1 |
commit: c07bd5fd46d7b8adbdb4d8cf4d70bdc8fc66aea3 |
2 |
Author: Sebastian Luther <SebastianLuther <AT> gmx <DOT> de> |
3 |
AuthorDate: Thu Jan 23 20:37:32 2014 +0000 |
4 |
Commit: Sebastian Luther <SebastianLuther <AT> gmx <DOT> de > |
5 |
CommitDate: Wed Feb 5 19:39:21 2014 +0000 |
6 |
URL: http://git.overlays.gentoo.org/gitweb/?p=proj/portage.git;a=commit;h=c07bd5fd |
7 |
|
8 |
Replace mydbapi with _package_tracker |
9 |
|
10 |
--- |
11 |
pym/_emerge/depgraph.py | 211 +++++++++++++++++++++++------------------------- |
12 |
1 file changed, 101 insertions(+), 110 deletions(-) |
13 |
|
14 |
diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py |
15 |
index fd59dda..9d234c2 100644 |
16 |
--- a/pym/_emerge/depgraph.py |
17 |
+++ b/pym/_emerge/depgraph.py |
18 |
@@ -344,7 +344,6 @@ class _dynamic_depgraph_config(object): |
19 |
self._allow_backtracking = allow_backtracking |
20 |
# Maps nodes to the reasons they were selected for reinstallation. |
21 |
self._reinstall_nodes = {} |
22 |
- self.mydbapi = {} |
23 |
# Contains a filtered view of preferred packages that are selected |
24 |
# from available repositories. |
25 |
self._filtered_trees = {} |
26 |
@@ -440,7 +439,6 @@ class _dynamic_depgraph_config(object): |
27 |
# have after new packages have been installed. |
28 |
fakedb = PackageTrackerDbapiWrapper(myroot, self._package_tracker) |
29 |
|
30 |
- self.mydbapi[myroot] = fakedb |
31 |
def graph_tree(): |
32 |
pass |
33 |
graph_tree.dbapi = fakedb |
34 |
@@ -558,8 +556,6 @@ class depgraph(object): |
35 |
|
36 |
if preload_installed_pkgs: |
37 |
vardb = fake_vartree.dbapi |
38 |
- fakedb = self._dynamic_config._graph_trees[ |
39 |
- myroot]["vartree"].dbapi |
40 |
|
41 |
if not dynamic_deps: |
42 |
for pkg in vardb: |
43 |
@@ -724,25 +720,23 @@ class depgraph(object): |
44 |
|
45 |
for pkg in list(self._dynamic_config.ignored_binaries): |
46 |
|
47 |
- selected_pkg = self._dynamic_config.mydbapi[pkg.root |
48 |
- ].match_pkgs(pkg.slot_atom) |
49 |
+ selected_pkg = list() |
50 |
|
51 |
- if not selected_pkg: |
52 |
- continue |
53 |
+ for selected_pkg in self._dynamic_config._package_tracker.match( |
54 |
+ pkg.root, pkg.slot_atom): |
55 |
|
56 |
- selected_pkg = selected_pkg[-1] |
57 |
- if selected_pkg > pkg: |
58 |
- self._dynamic_config.ignored_binaries.pop(pkg) |
59 |
- continue |
60 |
+ if selected_pkg > pkg: |
61 |
+ self._dynamic_config.ignored_binaries.pop(pkg) |
62 |
+ break |
63 |
|
64 |
- if selected_pkg.installed and \ |
65 |
- selected_pkg.cpv == pkg.cpv and \ |
66 |
- selected_pkg.build_time == pkg.build_time: |
67 |
- # We don't care about ignored binaries when an |
68 |
- # identical installed instance is selected to |
69 |
- # fill the slot. |
70 |
- self._dynamic_config.ignored_binaries.pop(pkg) |
71 |
- continue |
72 |
+ if selected_pkg.installed and \ |
73 |
+ selected_pkg.cpv == pkg.cpv and \ |
74 |
+ selected_pkg.build_time == pkg.build_time: |
75 |
+ # We don't care about ignored binaries when an |
76 |
+ # identical installed instance is selected to |
77 |
+ # fill the slot. |
78 |
+ self._dynamic_config.ignored_binaries.pop(pkg) |
79 |
+ break |
80 |
|
81 |
if not self._dynamic_config.ignored_binaries: |
82 |
return |
83 |
@@ -788,20 +782,25 @@ class depgraph(object): |
84 |
# Exclude installed here since we only |
85 |
# want to show available updates. |
86 |
continue |
87 |
- chosen_pkg = self._dynamic_config.mydbapi[pkg.root |
88 |
- ].match_pkgs(pkg.slot_atom) |
89 |
- if not chosen_pkg or chosen_pkg[-1] >= pkg: |
90 |
- continue |
91 |
- k = (pkg.root, pkg.slot_atom) |
92 |
- if k in missed_updates: |
93 |
- other_pkg, mask_type, parent_atoms = missed_updates[k] |
94 |
- if other_pkg > pkg: |
95 |
- continue |
96 |
- for mask_type, parent_atoms in mask_reasons.items(): |
97 |
- if not parent_atoms: |
98 |
- continue |
99 |
- missed_updates[k] = (pkg, mask_type, parent_atoms) |
100 |
- break |
101 |
+ missed_update = True |
102 |
+ any_selected = False |
103 |
+ for chosen_pkg in self._dynamic_config._package_tracker.match( |
104 |
+ pkg.root, pkg.slot_atom): |
105 |
+ any_selected = True |
106 |
+ if chosen_pkg >= pkg: |
107 |
+ missed_update = False |
108 |
+ break |
109 |
+ if any_selected and missed_update: |
110 |
+ k = (pkg.root, pkg.slot_atom) |
111 |
+ if k in missed_updates: |
112 |
+ other_pkg, mask_type, parent_atoms = missed_updates[k] |
113 |
+ if other_pkg > pkg: |
114 |
+ continue |
115 |
+ for mask_type, parent_atoms in mask_reasons.items(): |
116 |
+ if not parent_atoms: |
117 |
+ continue |
118 |
+ missed_updates[k] = (pkg, mask_type, parent_atoms) |
119 |
+ break |
120 |
|
121 |
return missed_updates |
122 |
|
123 |
@@ -2040,16 +2039,13 @@ class depgraph(object): |
124 |
# can show use flags and --tree portage.output. This node is |
125 |
# only being partially added to the graph. It must not be |
126 |
# allowed to interfere with the other nodes that have been |
127 |
- # added. Do not overwrite data for existing nodes in |
128 |
- # self._dynamic_config.mydbapi since that data will be used for blocker |
129 |
- # validation. |
130 |
+ # added. |
131 |
# Even though the graph is now invalid, continue to process |
132 |
# dependencies so that things like --fetchonly can still |
133 |
# function despite collisions. |
134 |
pass |
135 |
elif not previously_added: |
136 |
self._dynamic_config._package_tracker.add_pkg(pkg) |
137 |
- self._dynamic_config.mydbapi[pkg.root].cpv_inject(pkg) |
138 |
self._dynamic_config._filtered_trees[pkg.root]["porttree"].dbapi._clear_cache() |
139 |
self._dynamic_config._highest_pkg_cache.clear() |
140 |
self._check_masks(pkg) |
141 |
@@ -3639,35 +3635,37 @@ class depgraph(object): |
142 |
def _expand_virt_from_graph(self, root, atom): |
143 |
if not isinstance(atom, Atom): |
144 |
atom = Atom(atom) |
145 |
- graphdb = self._dynamic_config.mydbapi[root] |
146 |
- match = graphdb.match_pkgs(atom) |
147 |
- if not match: |
148 |
- yield atom |
149 |
- return |
150 |
- pkg = match[-1] |
151 |
- if not pkg.cpv.startswith("virtual/"): |
152 |
- yield atom |
153 |
- return |
154 |
- try: |
155 |
- rdepend = self._select_atoms_from_graph( |
156 |
- pkg.root, pkg._metadata.get("RDEPEND", ""), |
157 |
- myuse=self._pkg_use_enabled(pkg), |
158 |
- parent=pkg, strict=False) |
159 |
- except InvalidDependString as e: |
160 |
- writemsg_level("!!! Invalid RDEPEND in " + \ |
161 |
- "'%svar/db/pkg/%s/RDEPEND': %s\n" % \ |
162 |
- (pkg.root, pkg.cpv, e), |
163 |
- noiselevel=-1, level=logging.ERROR) |
164 |
+ |
165 |
+ if not atom.cp.startswith("virtual/"): |
166 |
yield atom |
167 |
return |
168 |
|
169 |
- for atoms in rdepend.values(): |
170 |
- for atom in atoms: |
171 |
- if hasattr(atom, "_orig_atom"): |
172 |
- # Ignore virtual atoms since we're only |
173 |
- # interested in expanding the real atoms. |
174 |
- continue |
175 |
- yield atom |
176 |
+ any_match = False |
177 |
+ for pkg in self._dynamic_config._package_tracker.match(root, atom): |
178 |
+ try: |
179 |
+ rdepend = self._select_atoms_from_graph( |
180 |
+ pkg.root, pkg._metadata.get("RDEPEND", ""), |
181 |
+ myuse=self._pkg_use_enabled(pkg), |
182 |
+ parent=pkg, strict=False) |
183 |
+ except InvalidDependString as e: |
184 |
+ writemsg_level("!!! Invalid RDEPEND in " + \ |
185 |
+ "'%svar/db/pkg/%s/RDEPEND': %s\n" % \ |
186 |
+ (pkg.root, pkg.cpv, e), |
187 |
+ noiselevel=-1, level=logging.ERROR) |
188 |
+ continue |
189 |
+ |
190 |
+ for atoms in rdepend.values(): |
191 |
+ for atom in atoms: |
192 |
+ if hasattr(atom, "_orig_atom"): |
193 |
+ # Ignore virtual atoms since we're only |
194 |
+ # interested in expanding the real atoms. |
195 |
+ continue |
196 |
+ yield atom |
197 |
+ |
198 |
+ any_match = True |
199 |
+ |
200 |
+ if not any_match: |
201 |
+ yield atom |
202 |
|
203 |
def _virt_deps_visible(self, pkg, ignore_use=False): |
204 |
""" |
205 |
@@ -5524,10 +5522,14 @@ class depgraph(object): |
206 |
installed=installed, onlydeps=onlydeps)) |
207 |
if pkg is None and onlydeps and not installed: |
208 |
# Maybe it already got pulled in as a "merge" node. |
209 |
- pkg = self._dynamic_config.mydbapi[root_config.root].get( |
210 |
- Package._gen_hash_key(cpv=cpv, type_name=type_name, |
211 |
- repo_name=myrepo, root_config=root_config, |
212 |
- installed=installed, onlydeps=False)) |
213 |
+ for candidate in self._dynamic_config._package_tracker.match( |
214 |
+ root_config.root, cpv): |
215 |
+ if candidate.type_name == type_name and \ |
216 |
+ candidate.repo_name == myrepo and \ |
217 |
+ candidate.root_config is root_config and \ |
218 |
+ candidate.installed == installed and \ |
219 |
+ not candidate.onlydeps: |
220 |
+ pkg = candidate |
221 |
|
222 |
if pkg is None: |
223 |
tree_type = self.pkg_tree_map[type_name] |
224 |
@@ -5587,7 +5589,8 @@ class depgraph(object): |
225 |
vardb = self._frozen_config.trees[myroot]["vartree"].dbapi |
226 |
pkgsettings = self._frozen_config.pkgsettings[myroot] |
227 |
root_config = self._frozen_config.roots[myroot] |
228 |
- final_db = self._dynamic_config.mydbapi[myroot] |
229 |
+ final_db = PackageTrackerDbapiWrapper( |
230 |
+ myroot, self._dynamic_config._package_tracker) |
231 |
|
232 |
blocker_cache = BlockerCache(myroot, vardb) |
233 |
stale_cache = set(blocker_cache) |
234 |
@@ -5604,7 +5607,7 @@ class depgraph(object): |
235 |
# the merge process or by --depclean. Always warn about |
236 |
# packages masked by license, since the user likely wants |
237 |
# to adjust ACCEPT_LICENSE. |
238 |
- if pkg in final_db: |
239 |
+ if pkg in self._dynamic_config._package_tracker: |
240 |
if not self._pkg_visibility_check(pkg, |
241 |
trust_graph=False) and \ |
242 |
(pkg_in_graph or 'LICENSE' in pkg.masks): |
243 |
@@ -5686,9 +5689,10 @@ class depgraph(object): |
244 |
del e |
245 |
raise |
246 |
if not success: |
247 |
- replacement_pkg = final_db.match_pkgs(pkg.slot_atom) |
248 |
- if replacement_pkg and \ |
249 |
- replacement_pkg[0].operation == "merge": |
250 |
+ replacement_pkgs = self._dynamic_config._package_tracker.match( |
251 |
+ myroot, pkg.slot_atom) |
252 |
+ if any(replacement_pkg[0].operation == "merge" for \ |
253 |
+ replacement_pkg in replacement_pkgs): |
254 |
# This package is being replaced anyway, so |
255 |
# ignore invalid dependencies so as not to |
256 |
# annoy the user too much (otherwise they'd be |
257 |
@@ -5733,7 +5737,6 @@ class depgraph(object): |
258 |
virtuals = root_config.settings.getvirtuals() |
259 |
myroot = blocker.root |
260 |
initial_db = self._frozen_config.trees[myroot]["vartree"].dbapi |
261 |
- final_db = self._dynamic_config.mydbapi[myroot] |
262 |
|
263 |
provider_virtual = False |
264 |
if blocker.cp in virtuals and \ |
265 |
@@ -5761,7 +5764,7 @@ class depgraph(object): |
266 |
|
267 |
blocked_final = set() |
268 |
for atom in atoms: |
269 |
- for pkg in final_db.match_pkgs(atom): |
270 |
+ for pkg in self._dynamic_config._package_tracker.match(myroot, atom): |
271 |
if atom_set.findAtomForPackage(pkg, modified_use=self._pkg_use_enabled(pkg)): |
272 |
blocked_final.add(pkg) |
273 |
|
274 |
@@ -5943,19 +5946,15 @@ class depgraph(object): |
275 |
libc_pkgs = {} |
276 |
implicit_libc_roots = (self._frozen_config._running_root.root,) |
277 |
for root in implicit_libc_roots: |
278 |
- graphdb = self._dynamic_config.mydbapi[root] |
279 |
vardb = self._frozen_config.trees[root]["vartree"].dbapi |
280 |
for atom in self._expand_virt_from_graph(root, |
281 |
portage.const.LIBC_PACKAGE_ATOM): |
282 |
if atom.blocker: |
283 |
continue |
284 |
- match = graphdb.match_pkgs(atom) |
285 |
- if not match: |
286 |
- continue |
287 |
- pkg = match[-1] |
288 |
- if pkg.operation == "merge" and \ |
289 |
- not vardb.cpv_exists(pkg.cpv): |
290 |
- libc_pkgs.setdefault(pkg.root, set()).add(pkg) |
291 |
+ for pkg in self._dynamic_config._package_tracker.match(root, atom): |
292 |
+ if pkg.operation == "merge" and \ |
293 |
+ not vardb.cpv_exists(pkg.cpv): |
294 |
+ libc_pkgs.setdefault(pkg.root, set()).add(pkg) |
295 |
|
296 |
if not libc_pkgs: |
297 |
return |
298 |
@@ -6156,8 +6155,8 @@ class depgraph(object): |
299 |
initial_atoms=[PORTAGE_PACKAGE_ATOM]) |
300 |
running_portage = self._frozen_config.trees[running_root]["vartree"].dbapi.match_pkgs( |
301 |
PORTAGE_PACKAGE_ATOM) |
302 |
- replacement_portage = self._dynamic_config.mydbapi[running_root].match_pkgs( |
303 |
- PORTAGE_PACKAGE_ATOM) |
304 |
+ replacement_portage = list(self._dynamic_config._package_tracker.match( |
305 |
+ running_root, Atom(PORTAGE_PACKAGE_ATOM))) |
306 |
|
307 |
if running_portage: |
308 |
running_portage = running_portage[0] |
309 |
@@ -6194,18 +6193,15 @@ class depgraph(object): |
310 |
for root in implicit_libc_roots: |
311 |
libc_pkgs = set() |
312 |
vardb = self._frozen_config.trees[root]["vartree"].dbapi |
313 |
- graphdb = self._dynamic_config.mydbapi[root] |
314 |
for atom in self._expand_virt_from_graph(root, |
315 |
portage.const.LIBC_PACKAGE_ATOM): |
316 |
if atom.blocker: |
317 |
continue |
318 |
- match = graphdb.match_pkgs(atom) |
319 |
- if not match: |
320 |
- continue |
321 |
- pkg = match[-1] |
322 |
- if pkg.operation == "merge" and \ |
323 |
- not vardb.cpv_exists(pkg.cpv): |
324 |
- libc_pkgs.add(pkg) |
325 |
+ |
326 |
+ for pkg in self._dynamic_config._package_tracker.match(root, atom): |
327 |
+ if pkg.operation == "merge" and \ |
328 |
+ not vardb.cpv_exists(pkg.cpv): |
329 |
+ libc_pkgs.add(pkg) |
330 |
|
331 |
if libc_pkgs: |
332 |
# If there's also an os-headers upgrade, we need to |
333 |
@@ -6214,13 +6210,11 @@ class depgraph(object): |
334 |
portage.const.OS_HEADERS_PACKAGE_ATOM): |
335 |
if atom.blocker: |
336 |
continue |
337 |
- match = graphdb.match_pkgs(atom) |
338 |
- if not match: |
339 |
- continue |
340 |
- pkg = match[-1] |
341 |
- if pkg.operation == "merge" and \ |
342 |
- not vardb.cpv_exists(pkg.cpv): |
343 |
- asap_nodes.append(pkg) |
344 |
+ |
345 |
+ for pkg in self._dynamic_config._package_tracker.match(root, atom): |
346 |
+ if pkg.operation == "merge" and \ |
347 |
+ not vardb.cpv_exists(pkg.cpv): |
348 |
+ asap_nodes.append(pkg) |
349 |
|
350 |
asap_nodes.extend(libc_pkgs) |
351 |
|
352 |
@@ -6562,13 +6556,12 @@ class depgraph(object): |
353 |
# For packages in the world set, go ahead an uninstall |
354 |
# when necessary, as long as the atom will be satisfied |
355 |
# in the final state. |
356 |
- graph_db = self._dynamic_config.mydbapi[task.root] |
357 |
skip = False |
358 |
try: |
359 |
for atom in root_config.sets[ |
360 |
"selected"].iterAtomsForPackage(task): |
361 |
satisfied = False |
362 |
- for pkg in graph_db.match_pkgs(atom): |
363 |
+ for pkg in self._dynamic_config._package_tracker.match(task.root, atom): |
364 |
if pkg == inst_pkg: |
365 |
continue |
366 |
satisfied = True |
367 |
@@ -6650,12 +6643,11 @@ class depgraph(object): |
368 |
# node unnecessary (due to occupying the same SLOT), |
369 |
# and we want to avoid executing a separate uninstall |
370 |
# task in that case. |
371 |
- slot_node = self._dynamic_config.mydbapi[uninst_task.root |
372 |
- ].match_pkgs(uninst_task.slot_atom) |
373 |
- if slot_node and \ |
374 |
- slot_node[0].operation == "merge": |
375 |
- mygraph.add(slot_node[0], uninst_task, |
376 |
- priority=BlockerDepPriority.instance) |
377 |
+ for slot_node in self._dynamic_config._package_tracker.match( |
378 |
+ uninst_task.root, uninst_task.slot_atom): |
379 |
+ if slot_node.operation == "merge": |
380 |
+ mygraph.add(slot_node, uninst_task, |
381 |
+ priority=BlockerDepPriority.instance) |
382 |
|
383 |
# Reset the state variables for leaf node selection and |
384 |
# continue trying to select leaf nodes. |
385 |
@@ -7624,7 +7616,6 @@ class depgraph(object): |
386 |
else: |
387 |
args = [] |
388 |
|
389 |
- fakedb = self._dynamic_config.mydbapi |
390 |
serialized_tasks = [] |
391 |
masked_tasks = [] |
392 |
for x in mergelist: |
393 |
@@ -7682,7 +7673,7 @@ class depgraph(object): |
394 |
self._dynamic_config._unsatisfied_deps_for_display.append( |
395 |
((pkg.root, "="+pkg.cpv), {"myparent":None})) |
396 |
|
397 |
- fakedb[myroot].cpv_inject(pkg) |
398 |
+ self._dynamic_config._package_tracker.add_pkg(pkg) |
399 |
serialized_tasks.append(pkg) |
400 |
self._spinner_update() |