aboutsummaryrefslogtreecommitdiff
blob: 2bdb3e7cdefaa5d34eea89ec34a3cc705d823689 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
# Copyright 1998-2007 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Id$

__all__ = ["dbapi"]

import os
import re
from portage.dep import match_from_list
from portage.locks import unlockfile
from portage.output import red
from portage.util import writemsg
from portage import auxdbkeys, dep_expand
from portage.versions import catpkgsplit, pkgcmp


class dbapi(object):
	_category_re = re.compile(r'^\w[-.+\w]*$')
	_pkg_dir_name_re = re.compile(r'^\w[-+\w]*$')
	_categories = None
	_iuse_implicit = None
	_use_mutable = False
	_known_keys = frozenset(x for x in auxdbkeys
		if not x.startswith("UNUSED_0"))
	def __init__(self):
		pass

	@property
	def categories(self):
		"""
		Use self.cp_all() to generate a category list. Mutable instances
		can delete the self._categories attribute in cases when the cached
		categories become invalid and need to be regenerated.
		"""
		if self._categories is not None:
			return self._categories
		categories = set()
		cat_pattern = re.compile(r'(.*)/.*')
		for cp in self.cp_all():
			categories.add(cat_pattern.match(cp).group(1))
		self._categories = list(categories)
		self._categories.sort()
		return self._categories

	def close_caches(self):
		pass

	def cp_list(self, cp, use_cache=1):
		return

	def _cpv_sort_ascending(self, cpv_list):
		"""
		Use this to sort self.cp_list() results in ascending
		order. It sorts in place and returns None.
		"""
		if len(cpv_list) > 1:
			# If the cpv includes explicit -r0, it has to be preserved
			# for consistency in findname and aux_get calls, so use a
			# dict to map strings back to their original values.
			str_map = {}
			for i, cpv in enumerate(cpv_list):
				mysplit = tuple(catpkgsplit(cpv)[1:])
				str_map[mysplit] = cpv
				cpv_list[i] = mysplit
			cpv_list.sort(pkgcmp)
			for i, mysplit in enumerate(cpv_list):
				cpv_list[i] = str_map[mysplit]

	def cpv_all(self):
		"""Return all CPVs in the db
		Args:
			None
		Returns:
			A list of Strings, 1 per CPV

		This function relies on a subclass implementing cp_all, this is why the hasattr is there
		"""

		if not hasattr(self, "cp_all"):
			raise NotImplementedError
		cpv_list = []
		for cp in self.cp_all():
			cpv_list.extend(self.cp_list(cp))
		return cpv_list

	def cp_all(self):
		""" Implement this in a child class
		Args
			None
		Returns:
			A list of strings 1 per CP in the datastore
		"""
		return NotImplementedError

	def aux_get(self, mycpv, mylist):
		"""Return the metadata keys in mylist for mycpv
		Args:
			mycpv - "sys-apps/foo-1.0"
			mylist - ["SLOT","DEPEND","HOMEPAGE"]
		Returns: 
			a list of results, in order of keys in mylist, such as:
			["0",">=sys-libs/bar-1.0","http://www.foo.com"] or [] if mycpv not found'
		"""
		raise NotImplementedError
	
	def aux_update(self, cpv, metadata_updates):
		"""
		Args:
		  cpv - "sys-apps/foo-1.0"
			metadata_updates = { key : newvalue }
		Returns:
			None
		"""
		raise NotImplementedError

	def match(self, origdep, use_cache=1):
		"""Given a dependency, try to find packages that match
		Args:
			origdep - Depend atom
			use_cache - Boolean indicating if we should use the cache or not
			NOTE: Do we ever not want the cache?
		Returns:
			a list of packages that match origdep
		"""
		mydep = dep_expand(origdep, mydb=self, settings=self.settings)
		return list(self._iter_match(mydep,
			self.cp_list(mydep.cp, use_cache=use_cache)))

	def _iter_match(self, atom, cpv_iter):
		cpv_iter = iter(match_from_list(atom, cpv_iter))
		if atom.slot:
			cpv_iter = self._iter_match_slot(atom, cpv_iter)
		if atom.use:
			cpv_iter = self._iter_match_use(atom, cpv_iter)
		return cpv_iter

	def _iter_match_slot(self, atom, cpv_iter):
		for cpv in cpv_iter:
			try:
				if self.aux_get(cpv, ["SLOT"])[0] == atom.slot:
					yield cpv
			except KeyError:
				continue

	def _iter_match_use(self, atom, cpv_iter):
		"""
		1) Check for required IUSE intersection (need implicit IUSE here).
		2) Check enabled/disabled flag states.
		"""
		if self._iuse_implicit is None:
			self._iuse_implicit = self.settings._get_implicit_iuse()
		for cpv in cpv_iter:
			try:
				iuse, slot, use = self.aux_get(cpv, ["IUSE", "SLOT", "USE"])
			except KeyError:
				continue
			use = use.split()
			iuse = self._iuse_implicit.union(
				re.escape(x.lstrip("+-")) for x in iuse.split())
			iuse_re = re.compile("^(%s)$" % "|".join(iuse))
			missing_iuse = False
			for x in atom.use.required:
				if iuse_re.match(x) is None:
					missing_iuse = True
					break
			if missing_iuse:
				continue
			if not self._use_mutable:
				if atom.use.enabled.difference(use):
					continue
				if atom.use.disabled.intersection(use):
					continue
			else:
				# Check masked and forced flags for repoman.
				mysettings = getattr(self, "mysettings", None)
				if mysettings is not None and not mysettings.local_config:

					pkg = "%s:%s" % (cpv, slot)
					usemask = mysettings._getUseMask(pkg)
					if usemask.intersection(atom.use.enabled):
						continue

					useforce = mysettings._getUseForce(pkg).difference(usemask)
					if useforce.intersection(atom.use.disabled):
						continue

			yield cpv

	def invalidentry(self, mypath):
		if mypath.endswith('portage_lockfile'):
			if "PORTAGE_MASTER_PID" not in os.environ:
				writemsg("Lockfile removed: %s\n" % mypath, 1)
				unlockfile((mypath, None, None))
			else:
				# Nothing we can do about it. We're probably sandboxed.
				pass
		elif '/-MERGING-' in mypath:
			if os.path.exists(mypath):
				writemsg(red("INCOMPLETE MERGE:")+" "+mypath+"\n", noiselevel=-1)
		else:
			writemsg("!!! Invalid db entry: %s\n" % mypath, noiselevel=-1)

	def update_ents(self, updates, onProgress=None):
		"""
		Update metadata of all packages for package moves.
		@param updates: A list of move commands
		@type updates: List
		@param onProgress: A progress callback function
		@type onProgress: a callable that takes 2 integer arguments: maxval and curval
		"""
		cpv_all = self.cpv_all()
		cpv_all.sort()
		maxval = len(cpv_all)
		aux_get = self.aux_get
		aux_update = self.aux_update
		update_keys = ["DEPEND", "RDEPEND", "PDEPEND", "PROVIDE"]
		from itertools import izip
		from portage.update import update_dbentries
		if onProgress:
			onProgress(maxval, 0)
		for i, cpv in enumerate(cpv_all):
			metadata = dict(izip(update_keys, aux_get(cpv, update_keys)))
			metadata_updates = update_dbentries(updates, metadata)
			if metadata_updates:
				aux_update(cpv, metadata_updates)
			if onProgress:
				onProgress(maxval, i+1)

	def move_slot_ent(self, mylist):
		"""This function takes a sequence:
		Args:
			mylist: a sequence of (package, originalslot, newslot)
		Returns:
			The number of slotmoves this function did
		"""
		pkg = mylist[1]
		origslot = mylist[2]
		newslot = mylist[3]
		origmatches = self.match(pkg)
		moves = 0
		if not origmatches:
			return moves
		for mycpv in origmatches:
			slot = self.aux_get(mycpv, ["SLOT"])[0]
			if slot != origslot:
				continue
			moves += 1
			mydata = {"SLOT": newslot+"\n"}
			self.aux_update(mycpv, mydata)
		return moves