Gentoo Archives: gentoo-portage-dev

From: Sebastian Luther <SebastianLuther@×××.de>
To: gentoo-portage-dev@l.g.o
Subject: [gentoo-portage-dev] [PATCH 03/10] Replace mydbapi with _package_tracker
Date: Wed, 29 Jan 2014 15:34:08
Message-Id: 1391009594-22750-4-git-send-email-SebastianLuther@gmx.de
In Reply to: [gentoo-portage-dev] [PATCH 00/10] First steps to get rid of backtracking by Sebastian Luther
1 ---
2 pym/_emerge/depgraph.py | 211 +++++++++++++++++++++++-------------------------
3 1 file changed, 101 insertions(+), 110 deletions(-)
4
5 diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py
6 index fd59dda..9d234c2 100644
7 --- a/pym/_emerge/depgraph.py
8 +++ b/pym/_emerge/depgraph.py
9 @@ -344,7 +344,6 @@ class _dynamic_depgraph_config(object):
10 self._allow_backtracking = allow_backtracking
11 # Maps nodes to the reasons they were selected for reinstallation.
12 self._reinstall_nodes = {}
13 - self.mydbapi = {}
14 # Contains a filtered view of preferred packages that are selected
15 # from available repositories.
16 self._filtered_trees = {}
17 @@ -440,7 +439,6 @@ class _dynamic_depgraph_config(object):
18 # have after new packages have been installed.
19 fakedb = PackageTrackerDbapiWrapper(myroot, self._package_tracker)
20
21 - self.mydbapi[myroot] = fakedb
22 def graph_tree():
23 pass
24 graph_tree.dbapi = fakedb
25 @@ -558,8 +556,6 @@ class depgraph(object):
26
27 if preload_installed_pkgs:
28 vardb = fake_vartree.dbapi
29 - fakedb = self._dynamic_config._graph_trees[
30 - myroot]["vartree"].dbapi
31
32 if not dynamic_deps:
33 for pkg in vardb:
34 @@ -724,25 +720,23 @@ class depgraph(object):
35
36 for pkg in list(self._dynamic_config.ignored_binaries):
37
38 - selected_pkg = self._dynamic_config.mydbapi[pkg.root
39 - ].match_pkgs(pkg.slot_atom)
40 + selected_pkg = list()
41
42 - if not selected_pkg:
43 - continue
44 + for selected_pkg in self._dynamic_config._package_tracker.match(
45 + pkg.root, pkg.slot_atom):
46
47 - selected_pkg = selected_pkg[-1]
48 - if selected_pkg > pkg:
49 - self._dynamic_config.ignored_binaries.pop(pkg)
50 - continue
51 + if selected_pkg > pkg:
52 + self._dynamic_config.ignored_binaries.pop(pkg)
53 + break
54
55 - if selected_pkg.installed and \
56 - selected_pkg.cpv == pkg.cpv and \
57 - selected_pkg.build_time == pkg.build_time:
58 - # We don't care about ignored binaries when an
59 - # identical installed instance is selected to
60 - # fill the slot.
61 - self._dynamic_config.ignored_binaries.pop(pkg)
62 - continue
63 + if selected_pkg.installed and \
64 + selected_pkg.cpv == pkg.cpv and \
65 + selected_pkg.build_time == pkg.build_time:
66 + # We don't care about ignored binaries when an
67 + # identical installed instance is selected to
68 + # fill the slot.
69 + self._dynamic_config.ignored_binaries.pop(pkg)
70 + break
71
72 if not self._dynamic_config.ignored_binaries:
73 return
74 @@ -788,20 +782,25 @@ class depgraph(object):
75 # Exclude installed here since we only
76 # want to show available updates.
77 continue
78 - chosen_pkg = self._dynamic_config.mydbapi[pkg.root
79 - ].match_pkgs(pkg.slot_atom)
80 - if not chosen_pkg or chosen_pkg[-1] >= pkg:
81 - continue
82 - k = (pkg.root, pkg.slot_atom)
83 - if k in missed_updates:
84 - other_pkg, mask_type, parent_atoms = missed_updates[k]
85 - if other_pkg > pkg:
86 - continue
87 - for mask_type, parent_atoms in mask_reasons.items():
88 - if not parent_atoms:
89 - continue
90 - missed_updates[k] = (pkg, mask_type, parent_atoms)
91 - break
92 + missed_update = True
93 + any_selected = False
94 + for chosen_pkg in self._dynamic_config._package_tracker.match(
95 + pkg.root, pkg.slot_atom):
96 + any_selected = True
97 + if chosen_pkg >= pkg:
98 + missed_update = False
99 + break
100 + if any_selected and missed_update:
101 + k = (pkg.root, pkg.slot_atom)
102 + if k in missed_updates:
103 + other_pkg, mask_type, parent_atoms = missed_updates[k]
104 + if other_pkg > pkg:
105 + continue
106 + for mask_type, parent_atoms in mask_reasons.items():
107 + if not parent_atoms:
108 + continue
109 + missed_updates[k] = (pkg, mask_type, parent_atoms)
110 + break
111
112 return missed_updates
113
114 @@ -2040,16 +2039,13 @@ class depgraph(object):
115 # can show use flags and --tree portage.output. This node is
116 # only being partially added to the graph. It must not be
117 # allowed to interfere with the other nodes that have been
118 - # added. Do not overwrite data for existing nodes in
119 - # self._dynamic_config.mydbapi since that data will be used for blocker
120 - # validation.
121 + # added.
122 # Even though the graph is now invalid, continue to process
123 # dependencies so that things like --fetchonly can still
124 # function despite collisions.
125 pass
126 elif not previously_added:
127 self._dynamic_config._package_tracker.add_pkg(pkg)
128 - self._dynamic_config.mydbapi[pkg.root].cpv_inject(pkg)
129 self._dynamic_config._filtered_trees[pkg.root]["porttree"].dbapi._clear_cache()
130 self._dynamic_config._highest_pkg_cache.clear()
131 self._check_masks(pkg)
132 @@ -3639,35 +3635,37 @@ class depgraph(object):
133 def _expand_virt_from_graph(self, root, atom):
134 if not isinstance(atom, Atom):
135 atom = Atom(atom)
136 - graphdb = self._dynamic_config.mydbapi[root]
137 - match = graphdb.match_pkgs(atom)
138 - if not match:
139 - yield atom
140 - return
141 - pkg = match[-1]
142 - if not pkg.cpv.startswith("virtual/"):
143 - yield atom
144 - return
145 - try:
146 - rdepend = self._select_atoms_from_graph(
147 - pkg.root, pkg._metadata.get("RDEPEND", ""),
148 - myuse=self._pkg_use_enabled(pkg),
149 - parent=pkg, strict=False)
150 - except InvalidDependString as e:
151 - writemsg_level("!!! Invalid RDEPEND in " + \
152 - "'%svar/db/pkg/%s/RDEPEND': %s\n" % \
153 - (pkg.root, pkg.cpv, e),
154 - noiselevel=-1, level=logging.ERROR)
155 +
156 + if not atom.cp.startswith("virtual/"):
157 yield atom
158 return
159
160 - for atoms in rdepend.values():
161 - for atom in atoms:
162 - if hasattr(atom, "_orig_atom"):
163 - # Ignore virtual atoms since we're only
164 - # interested in expanding the real atoms.
165 - continue
166 - yield atom
167 + any_match = False
168 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
169 + try:
170 + rdepend = self._select_atoms_from_graph(
171 + pkg.root, pkg._metadata.get("RDEPEND", ""),
172 + myuse=self._pkg_use_enabled(pkg),
173 + parent=pkg, strict=False)
174 + except InvalidDependString as e:
175 + writemsg_level("!!! Invalid RDEPEND in " + \
176 + "'%svar/db/pkg/%s/RDEPEND': %s\n" % \
177 + (pkg.root, pkg.cpv, e),
178 + noiselevel=-1, level=logging.ERROR)
179 + continue
180 +
181 + for atoms in rdepend.values():
182 + for atom in atoms:
183 + if hasattr(atom, "_orig_atom"):
184 + # Ignore virtual atoms since we're only
185 + # interested in expanding the real atoms.
186 + continue
187 + yield atom
188 +
189 + any_match = True
190 +
191 + if not any_match:
192 + yield atom
193
194 def _virt_deps_visible(self, pkg, ignore_use=False):
195 """
196 @@ -5524,10 +5522,14 @@ class depgraph(object):
197 installed=installed, onlydeps=onlydeps))
198 if pkg is None and onlydeps and not installed:
199 # Maybe it already got pulled in as a "merge" node.
200 - pkg = self._dynamic_config.mydbapi[root_config.root].get(
201 - Package._gen_hash_key(cpv=cpv, type_name=type_name,
202 - repo_name=myrepo, root_config=root_config,
203 - installed=installed, onlydeps=False))
204 + for candidate in self._dynamic_config._package_tracker.match(
205 + root_config.root, cpv):
206 + if candidate.type_name == type_name and \
207 + candidate.repo_name == myrepo and \
208 + candidate.root_config is root_config and \
209 + candidate.installed == installed and \
210 + not candidate.onlydeps:
211 + pkg = candidate
212
213 if pkg is None:
214 tree_type = self.pkg_tree_map[type_name]
215 @@ -5587,7 +5589,8 @@ class depgraph(object):
216 vardb = self._frozen_config.trees[myroot]["vartree"].dbapi
217 pkgsettings = self._frozen_config.pkgsettings[myroot]
218 root_config = self._frozen_config.roots[myroot]
219 - final_db = self._dynamic_config.mydbapi[myroot]
220 + final_db = PackageTrackerDbapiWrapper(
221 + myroot, self._dynamic_config._package_tracker)
222
223 blocker_cache = BlockerCache(myroot, vardb)
224 stale_cache = set(blocker_cache)
225 @@ -5604,7 +5607,7 @@ class depgraph(object):
226 # the merge process or by --depclean. Always warn about
227 # packages masked by license, since the user likely wants
228 # to adjust ACCEPT_LICENSE.
229 - if pkg in final_db:
230 + if pkg in self._dynamic_config._package_tracker:
231 if not self._pkg_visibility_check(pkg,
232 trust_graph=False) and \
233 (pkg_in_graph or 'LICENSE' in pkg.masks):
234 @@ -5686,9 +5689,10 @@ class depgraph(object):
235 del e
236 raise
237 if not success:
238 - replacement_pkg = final_db.match_pkgs(pkg.slot_atom)
239 - if replacement_pkg and \
240 - replacement_pkg[0].operation == "merge":
241 + replacement_pkgs = self._dynamic_config._package_tracker.match(
242 + myroot, pkg.slot_atom)
243 + if any(replacement_pkg[0].operation == "merge" for \
244 + replacement_pkg in replacement_pkgs):
245 # This package is being replaced anyway, so
246 # ignore invalid dependencies so as not to
247 # annoy the user too much (otherwise they'd be
248 @@ -5733,7 +5737,6 @@ class depgraph(object):
249 virtuals = root_config.settings.getvirtuals()
250 myroot = blocker.root
251 initial_db = self._frozen_config.trees[myroot]["vartree"].dbapi
252 - final_db = self._dynamic_config.mydbapi[myroot]
253
254 provider_virtual = False
255 if blocker.cp in virtuals and \
256 @@ -5761,7 +5764,7 @@ class depgraph(object):
257
258 blocked_final = set()
259 for atom in atoms:
260 - for pkg in final_db.match_pkgs(atom):
261 + for pkg in self._dynamic_config._package_tracker.match(myroot, atom):
262 if atom_set.findAtomForPackage(pkg, modified_use=self._pkg_use_enabled(pkg)):
263 blocked_final.add(pkg)
264
265 @@ -5943,19 +5946,15 @@ class depgraph(object):
266 libc_pkgs = {}
267 implicit_libc_roots = (self._frozen_config._running_root.root,)
268 for root in implicit_libc_roots:
269 - graphdb = self._dynamic_config.mydbapi[root]
270 vardb = self._frozen_config.trees[root]["vartree"].dbapi
271 for atom in self._expand_virt_from_graph(root,
272 portage.const.LIBC_PACKAGE_ATOM):
273 if atom.blocker:
274 continue
275 - match = graphdb.match_pkgs(atom)
276 - if not match:
277 - continue
278 - pkg = match[-1]
279 - if pkg.operation == "merge" and \
280 - not vardb.cpv_exists(pkg.cpv):
281 - libc_pkgs.setdefault(pkg.root, set()).add(pkg)
282 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
283 + if pkg.operation == "merge" and \
284 + not vardb.cpv_exists(pkg.cpv):
285 + libc_pkgs.setdefault(pkg.root, set()).add(pkg)
286
287 if not libc_pkgs:
288 return
289 @@ -6156,8 +6155,8 @@ class depgraph(object):
290 initial_atoms=[PORTAGE_PACKAGE_ATOM])
291 running_portage = self._frozen_config.trees[running_root]["vartree"].dbapi.match_pkgs(
292 PORTAGE_PACKAGE_ATOM)
293 - replacement_portage = self._dynamic_config.mydbapi[running_root].match_pkgs(
294 - PORTAGE_PACKAGE_ATOM)
295 + replacement_portage = list(self._dynamic_config._package_tracker.match(
296 + running_root, Atom(PORTAGE_PACKAGE_ATOM)))
297
298 if running_portage:
299 running_portage = running_portage[0]
300 @@ -6194,18 +6193,15 @@ class depgraph(object):
301 for root in implicit_libc_roots:
302 libc_pkgs = set()
303 vardb = self._frozen_config.trees[root]["vartree"].dbapi
304 - graphdb = self._dynamic_config.mydbapi[root]
305 for atom in self._expand_virt_from_graph(root,
306 portage.const.LIBC_PACKAGE_ATOM):
307 if atom.blocker:
308 continue
309 - match = graphdb.match_pkgs(atom)
310 - if not match:
311 - continue
312 - pkg = match[-1]
313 - if pkg.operation == "merge" and \
314 - not vardb.cpv_exists(pkg.cpv):
315 - libc_pkgs.add(pkg)
316 +
317 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
318 + if pkg.operation == "merge" and \
319 + not vardb.cpv_exists(pkg.cpv):
320 + libc_pkgs.add(pkg)
321
322 if libc_pkgs:
323 # If there's also an os-headers upgrade, we need to
324 @@ -6214,13 +6210,11 @@ class depgraph(object):
325 portage.const.OS_HEADERS_PACKAGE_ATOM):
326 if atom.blocker:
327 continue
328 - match = graphdb.match_pkgs(atom)
329 - if not match:
330 - continue
331 - pkg = match[-1]
332 - if pkg.operation == "merge" and \
333 - not vardb.cpv_exists(pkg.cpv):
334 - asap_nodes.append(pkg)
335 +
336 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
337 + if pkg.operation == "merge" and \
338 + not vardb.cpv_exists(pkg.cpv):
339 + asap_nodes.append(pkg)
340
341 asap_nodes.extend(libc_pkgs)
342
343 @@ -6562,13 +6556,12 @@ class depgraph(object):
344 # For packages in the world set, go ahead an uninstall
345 # when necessary, as long as the atom will be satisfied
346 # in the final state.
347 - graph_db = self._dynamic_config.mydbapi[task.root]
348 skip = False
349 try:
350 for atom in root_config.sets[
351 "selected"].iterAtomsForPackage(task):
352 satisfied = False
353 - for pkg in graph_db.match_pkgs(atom):
354 + for pkg in self._dynamic_config._package_tracker.match(task.root, atom):
355 if pkg == inst_pkg:
356 continue
357 satisfied = True
358 @@ -6650,12 +6643,11 @@ class depgraph(object):
359 # node unnecessary (due to occupying the same SLOT),
360 # and we want to avoid executing a separate uninstall
361 # task in that case.
362 - slot_node = self._dynamic_config.mydbapi[uninst_task.root
363 - ].match_pkgs(uninst_task.slot_atom)
364 - if slot_node and \
365 - slot_node[0].operation == "merge":
366 - mygraph.add(slot_node[0], uninst_task,
367 - priority=BlockerDepPriority.instance)
368 + for slot_node in self._dynamic_config._package_tracker.match(
369 + uninst_task.root, uninst_task.slot_atom):
370 + if slot_node.operation == "merge":
371 + mygraph.add(slot_node, uninst_task,
372 + priority=BlockerDepPriority.instance)
373
374 # Reset the state variables for leaf node selection and
375 # continue trying to select leaf nodes.
376 @@ -7624,7 +7616,6 @@ class depgraph(object):
377 else:
378 args = []
379
380 - fakedb = self._dynamic_config.mydbapi
381 serialized_tasks = []
382 masked_tasks = []
383 for x in mergelist:
384 @@ -7682,7 +7673,7 @@ class depgraph(object):
385 self._dynamic_config._unsatisfied_deps_for_display.append(
386 ((pkg.root, "="+pkg.cpv), {"myparent":None}))
387
388 - fakedb[myroot].cpv_inject(pkg)
389 + self._dynamic_config._package_tracker.add_pkg(pkg)
390 serialized_tasks.append(pkg)
391 self._spinner_update()
392
393 --
394 1.8.3.2