Gentoo Archives: gentoo-commits

From: Zac Medico <zmedico@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] proj/portage:master commit in: bin/, pym/portage/dbapi/, pym/portage/, pym/portage/emaint/modules/binhost/, ...
Date: Wed, 04 Mar 2015 21:38:01
Message-Id: 1425504727.328dd4712f88cbb8ef390ae9eb471afa1ef781d7.zmedico@gentoo
1 commit: 328dd4712f88cbb8ef390ae9eb471afa1ef781d7
2 Author: Zac Medico <zmedico <AT> gentoo <DOT> org>
3 AuthorDate: Fri Feb 6 03:35:12 2015 +0000
4 Commit: Zac Medico <zmedico <AT> gentoo <DOT> org>
5 CommitDate: Wed Mar 4 21:32:07 2015 +0000
6 URL: http://sources.gentoo.org/gitweb/?p=proj/portage.git;a=commit;h=328dd471
7
8 binpkg-multi-instance 3 of 7
9
10 FEATURES=binpkg-multi-instance causes an integer build-id to be
11 associated with each binary package instance. Inclusion of the build-id
12 in the file name of the binary package file makes it possible to store
13 an arbitrary number of binary packages built from the same ebuild.
14
15 Having multiple instances is useful for a number of purposes, such as
16 retaining builds that were built with different USE flags or linked
17 against different versions of libraries. The location of any particular
18 package within PKGDIR can be expressed as follows:
19
20 ${PKGDIR}/${CATEGORY}/${PN}/${PF}-${BUILD_ID}.xpak
21
22 The build-id starts at 1 for the first build of a particular ebuild,
23 and is incremented by 1 for each new build. It is possible to share a
24 writable PKGDIR over NFS, and locking ensures that each package added
25 to PKGDIR will have a unique build-id. It is not necessary to migrate
26 an existing PKGDIR to the new layout, since portage is capable of
27 working with a mixed PKGDIR layout, where packages using the old layout
28 are allowed to remain in place.
29
30 The new PKGDIR layout is backward-compatible with binhost clients
31 running older portage, since the file format is identical, the
32 per-package PATH attribute in the 'Packages' index directs them to
33 download the file from the correct URI, and they automatically use
34 BUILD_TIME metadata to select the latest builds.
35
36 There is currently no automated way to prune old builds from PKGDIR,
37 although it is possible to remove packages manually, and then run
38 'emaint --fix binhost' to update the ${PKGDIR}/Packages index. Support
39 for FEATURES=binpkg-multi-instance is planned for eclean-pkg.
40
41 X-Gentoo-Bug: 150031
42 X-Gentoo-Bug-URL: https://bugs.gentoo.org/show_bug.cgi?id=150031
43
44 bin/quickpkg | 1 -
45 man/make.conf.5 | 27 +
46 pym/_emerge/Binpkg.py | 33 +-
47 pym/_emerge/BinpkgFetcher.py | 13 +-
48 pym/_emerge/BinpkgPrefetcher.py | 2 +-
49 pym/_emerge/BinpkgVerifier.py | 6 +-
50 pym/_emerge/EbuildBinpkg.py | 9 +-
51 pym/_emerge/EbuildBuild.py | 36 +-
52 pym/_emerge/Package.py | 16 +-
53 pym/_emerge/Scheduler.py | 6 +-
54 pym/_emerge/clear_caches.py | 1 -
55 pym/_emerge/depgraph.py | 16 +-
56 pym/portage/const.py | 2 +
57 pym/portage/dbapi/bintree.py | 683 +++++++++++++++++---------
58 pym/portage/emaint/modules/binhost/binhost.py | 47 +-
59 15 files changed, 613 insertions(+), 285 deletions(-)
60
61 diff --git a/bin/quickpkg b/bin/quickpkg
62 index 2c69a69..8b71c3e 100755
63 --- a/bin/quickpkg
64 +++ b/bin/quickpkg
65 @@ -63,7 +63,6 @@ def quickpkg_atom(options, infos, arg, eout):
66 pkgs_for_arg = 0
67 for cpv in matches:
68 excluded_config_files = []
69 - bintree.prevent_collision(cpv)
70 dblnk = vardb._dblink(cpv)
71 have_lock = False
72
73
74 diff --git a/man/make.conf.5 b/man/make.conf.5
75 index cd1ae21..1b71b97 100644
76 --- a/man/make.conf.5
77 +++ b/man/make.conf.5
78 @@ -256,6 +256,33 @@ has a \fB\-\-force\fR option that can be used to force regeneration of digests.
79 Keep logs from successful binary package merges. This is relevant only when
80 \fBPORT_LOGDIR\fR is set.
81 .TP
82 +.B binpkg\-multi\-instance
83 +Enable support for multiple binary package instances per ebuild.
84 +Having multiple instances is useful for a number of purposes, such as
85 +retaining builds that were built with different USE flags or linked
86 +against different versions of libraries. The location of any particular
87 +package within PKGDIR can be expressed as follows:
88 +
89 + ${PKGDIR}/${CATEGORY}/${PN}/${PF}\-${BUILD_ID}.xpak
90 +
91 +The build\-id starts at 1 for the first build of a particular ebuild,
92 +and is incremented by 1 for each new build. It is possible to share a
93 +writable PKGDIR over NFS, and locking ensures that each package added
94 +to PKGDIR will have a unique build\-id. It is not necessary to migrate
95 +an existing PKGDIR to the new layout, since portage is capable of
96 +working with a mixed PKGDIR layout, where packages using the old layout
97 +are allowed to remain in place.
98 +
99 +The new PKGDIR layout is backward\-compatible with binhost clients
100 +running older portage, since the file format is identical, the
101 +per\-package PATH attribute in the 'Packages' index directs them to
102 +download the file from the correct URI, and they automatically use
103 +BUILD_TIME metadata to select the latest builds.
104 +
105 +There is currently no automated way to prune old builds from PKGDIR,
106 +although it is possible to remove packages manually, and then run
107 +\(aqemaint \-\-fix binhost' to update the ${PKGDIR}/Packages index.
108 +.TP
109 .B buildpkg
110 Binary packages will be created for all packages that are merged. Also see
111 \fBquickpkg\fR(1) and \fBemerge\fR(1) \fB\-\-buildpkg\fR and
112
113 diff --git a/pym/_emerge/Binpkg.py b/pym/_emerge/Binpkg.py
114 index ded6dfd..7b7ae17 100644
115 --- a/pym/_emerge/Binpkg.py
116 +++ b/pym/_emerge/Binpkg.py
117 @@ -121,16 +121,11 @@ class Binpkg(CompositeTask):
118 fetcher = BinpkgFetcher(background=self.background,
119 logfile=self.settings.get("PORTAGE_LOG_FILE"), pkg=self.pkg,
120 pretend=self.opts.pretend, scheduler=self.scheduler)
121 - pkg_path = fetcher.pkg_path
122 - self._pkg_path = pkg_path
123 - # This gives bashrc users an opportunity to do various things
124 - # such as remove binary packages after they're installed.
125 - self.settings["PORTAGE_BINPKG_FILE"] = pkg_path
126
127 if self.opts.getbinpkg and self._bintree.isremote(pkg.cpv):
128 -
129 msg = " --- (%s of %s) Fetching Binary (%s::%s)" %\
130 - (pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path)
131 + (pkg_count.curval, pkg_count.maxval, pkg.cpv,
132 + fetcher.pkg_path)
133 short_msg = "emerge: (%s of %s) %s Fetch" % \
134 (pkg_count.curval, pkg_count.maxval, pkg.cpv)
135 self.logger.log(msg, short_msg=short_msg)
136 @@ -149,7 +144,7 @@ class Binpkg(CompositeTask):
137 # The fetcher only has a returncode when
138 # --getbinpkg is enabled.
139 if fetcher.returncode is not None:
140 - self._fetched_pkg = True
141 + self._fetched_pkg = fetcher.pkg_path
142 if self._default_exit(fetcher) != os.EX_OK:
143 self._unlock_builddir()
144 self.wait()
145 @@ -163,9 +158,15 @@ class Binpkg(CompositeTask):
146
147 verifier = None
148 if self._verify:
149 + if self._fetched_pkg:
150 + path = self._fetched_pkg
151 + else:
152 + path = self.pkg.root_config.trees["bintree"].getname(
153 + self.pkg.cpv)
154 logfile = self.settings.get("PORTAGE_LOG_FILE")
155 verifier = BinpkgVerifier(background=self.background,
156 - logfile=logfile, pkg=self.pkg, scheduler=self.scheduler)
157 + logfile=logfile, pkg=self.pkg, scheduler=self.scheduler,
158 + _pkg_path=path)
159 self._start_task(verifier, self._verifier_exit)
160 return
161
162 @@ -181,10 +182,20 @@ class Binpkg(CompositeTask):
163 logger = self.logger
164 pkg = self.pkg
165 pkg_count = self.pkg_count
166 - pkg_path = self._pkg_path
167
168 if self._fetched_pkg:
169 - self._bintree.inject(pkg.cpv, filename=pkg_path)
170 + pkg_path = self._bintree.getname(
171 + self._bintree.inject(pkg.cpv,
172 + filename=self._fetched_pkg),
173 + allocate_new=False)
174 + else:
175 + pkg_path = self.pkg.root_config.trees["bintree"].getname(
176 + self.pkg.cpv)
177 +
178 + # This gives bashrc users an opportunity to do various things
179 + # such as remove binary packages after they're installed.
180 + self.settings["PORTAGE_BINPKG_FILE"] = pkg_path
181 + self._pkg_path = pkg_path
182
183 logfile = self.settings.get("PORTAGE_LOG_FILE")
184 if logfile is not None and os.path.isfile(logfile):
185
186 diff --git a/pym/_emerge/BinpkgFetcher.py b/pym/_emerge/BinpkgFetcher.py
187 index 543881e..a7f2d44 100644
188 --- a/pym/_emerge/BinpkgFetcher.py
189 +++ b/pym/_emerge/BinpkgFetcher.py
190 @@ -24,7 +24,8 @@ class BinpkgFetcher(SpawnProcess):
191 def __init__(self, **kwargs):
192 SpawnProcess.__init__(self, **kwargs)
193 pkg = self.pkg
194 - self.pkg_path = pkg.root_config.trees["bintree"].getname(pkg.cpv)
195 + self.pkg_path = pkg.root_config.trees["bintree"].getname(
196 + pkg.cpv) + ".partial"
197
198 def _start(self):
199
200 @@ -51,10 +52,12 @@ class BinpkgFetcher(SpawnProcess):
201 # urljoin doesn't work correctly with
202 # unrecognized protocols like sftp
203 if bintree._remote_has_index:
204 - rel_uri = bintree._remotepkgs[pkg.cpv].get("PATH")
205 + instance_key = bintree.dbapi._instance_key(pkg.cpv)
206 + rel_uri = bintree._remotepkgs[instance_key].get("PATH")
207 if not rel_uri:
208 rel_uri = pkg.cpv + ".tbz2"
209 - remote_base_uri = bintree._remotepkgs[pkg.cpv]["BASE_URI"]
210 + remote_base_uri = bintree._remotepkgs[
211 + instance_key]["BASE_URI"]
212 uri = remote_base_uri.rstrip("/") + "/" + rel_uri.lstrip("/")
213 else:
214 uri = settings["PORTAGE_BINHOST"].rstrip("/") + \
215 @@ -128,7 +131,9 @@ class BinpkgFetcher(SpawnProcess):
216 # the fetcher didn't already do it automatically.
217 bintree = self.pkg.root_config.trees["bintree"]
218 if bintree._remote_has_index:
219 - remote_mtime = bintree._remotepkgs[self.pkg.cpv].get("MTIME")
220 + remote_mtime = bintree._remotepkgs[
221 + bintree.dbapi._instance_key(
222 + self.pkg.cpv)].get("MTIME")
223 if remote_mtime is not None:
224 try:
225 remote_mtime = long(remote_mtime)
226
227 diff --git a/pym/_emerge/BinpkgPrefetcher.py b/pym/_emerge/BinpkgPrefetcher.py
228 index ffa4900..7ca8970 100644
229 --- a/pym/_emerge/BinpkgPrefetcher.py
230 +++ b/pym/_emerge/BinpkgPrefetcher.py
231 @@ -27,7 +27,7 @@ class BinpkgPrefetcher(CompositeTask):
232
233 verifier = BinpkgVerifier(background=self.background,
234 logfile=self.scheduler.fetch.log_file, pkg=self.pkg,
235 - scheduler=self.scheduler)
236 + scheduler=self.scheduler, _pkg_path=self.pkg_path)
237 self._start_task(verifier, self._verifier_exit)
238
239 def _verifier_exit(self, verifier):
240
241 diff --git a/pym/_emerge/BinpkgVerifier.py b/pym/_emerge/BinpkgVerifier.py
242 index 2c69792..7a6d15e 100644
243 --- a/pym/_emerge/BinpkgVerifier.py
244 +++ b/pym/_emerge/BinpkgVerifier.py
245 @@ -33,7 +33,6 @@ class BinpkgVerifier(CompositeTask):
246 digests = _apply_hash_filter(digests, hash_filter)
247
248 self._digests = digests
249 - self._pkg_path = bintree.getname(self.pkg.cpv)
250
251 try:
252 size = os.stat(self._pkg_path).st_size
253 @@ -90,8 +89,11 @@ class BinpkgVerifier(CompositeTask):
254 if portage.output.havecolor:
255 portage.output.havecolor = not self.background
256
257 + path = self._pkg_path
258 + if path.endswith(".partial"):
259 + path = path[:-len(".partial")]
260 eout = EOutput()
261 - eout.ebegin("%s %s ;-)" % (os.path.basename(self._pkg_path),
262 + eout.ebegin("%s %s ;-)" % (os.path.basename(path),
263 " ".join(sorted(self._digests))))
264 eout.eend(0)
265
266
267 diff --git a/pym/_emerge/EbuildBinpkg.py b/pym/_emerge/EbuildBinpkg.py
268 index 34a6aef..6e098eb 100644
269 --- a/pym/_emerge/EbuildBinpkg.py
270 +++ b/pym/_emerge/EbuildBinpkg.py
271 @@ -10,13 +10,12 @@ class EbuildBinpkg(CompositeTask):
272 This assumes that src_install() has successfully completed.
273 """
274 __slots__ = ('pkg', 'settings') + \
275 - ('_binpkg_tmpfile',)
276 + ('_binpkg_tmpfile', '_binpkg_info')
277
278 def _start(self):
279 pkg = self.pkg
280 root_config = pkg.root_config
281 bintree = root_config.trees["bintree"]
282 - bintree.prevent_collision(pkg.cpv)
283 binpkg_tmpfile = os.path.join(bintree.pkgdir,
284 pkg.cpv + ".tbz2." + str(os.getpid()))
285 bintree._ensure_dir(os.path.dirname(binpkg_tmpfile))
286 @@ -43,8 +42,12 @@ class EbuildBinpkg(CompositeTask):
287
288 pkg = self.pkg
289 bintree = pkg.root_config.trees["bintree"]
290 - bintree.inject(pkg.cpv, filename=self._binpkg_tmpfile)
291 + self._binpkg_info = bintree.inject(pkg.cpv,
292 + filename=self._binpkg_tmpfile)
293
294 self._current_task = None
295 self.returncode = os.EX_OK
296 self.wait()
297 +
298 + def get_binpkg_info(self):
299 + return self._binpkg_info
300
301 diff --git a/pym/_emerge/EbuildBuild.py b/pym/_emerge/EbuildBuild.py
302 index b5b1e87..0e98602 100644
303 --- a/pym/_emerge/EbuildBuild.py
304 +++ b/pym/_emerge/EbuildBuild.py
305 @@ -1,6 +1,10 @@
306 # Copyright 1999-2014 Gentoo Foundation
307 # Distributed under the terms of the GNU General Public License v2
308
309 +from __future__ import unicode_literals
310 +
311 +import io
312 +
313 import _emerge.emergelog
314 from _emerge.EbuildExecuter import EbuildExecuter
315 from _emerge.EbuildPhase import EbuildPhase
316 @@ -15,7 +19,7 @@ from _emerge.TaskSequence import TaskSequence
317
318 from portage.util import writemsg
319 import portage
320 -from portage import os
321 +from portage import _encodings, _unicode_decode, _unicode_encode, os
322 from portage.output import colorize
323 from portage.package.ebuild.digestcheck import digestcheck
324 from portage.package.ebuild.digestgen import digestgen
325 @@ -317,9 +321,13 @@ class EbuildBuild(CompositeTask):
326 phase="rpm", scheduler=self.scheduler,
327 settings=self.settings))
328 else:
329 - binpkg_tasks.add(EbuildBinpkg(background=self.background,
330 + task = EbuildBinpkg(
331 + background=self.background,
332 pkg=self.pkg, scheduler=self.scheduler,
333 - settings=self.settings))
334 + settings=self.settings)
335 + binpkg_tasks.add(task)
336 + task.addExitListener(
337 + self._record_binpkg_info)
338
339 if binpkg_tasks:
340 self._start_task(binpkg_tasks, self._buildpkg_exit)
341 @@ -356,6 +364,28 @@ class EbuildBuild(CompositeTask):
342 self.returncode = packager.returncode
343 self.wait()
344
345 + def _record_binpkg_info(self, task):
346 + if task.returncode != os.EX_OK:
347 + return
348 +
349 + # Save info about the created binary package, so that
350 + # identifying information can be passed to the install
351 + # task, to be recorded in the installed package database.
352 + pkg = task.get_binpkg_info()
353 + infoloc = os.path.join(self.settings["PORTAGE_BUILDDIR"],
354 + "build-info")
355 + info = {
356 + "BINPKGMD5": "%s\n" % pkg._metadata["MD5"],
357 + }
358 + if pkg.build_id is not None:
359 + info["BUILD_ID"] = "%s\n" % pkg.build_id
360 + for k, v in info.items():
361 + with io.open(_unicode_encode(os.path.join(infoloc, k),
362 + encoding=_encodings['fs'], errors='strict'),
363 + mode='w', encoding=_encodings['repo.content'],
364 + errors='strict') as f:
365 + f.write(v)
366 +
367 def _buildpkgonly_success_hook_exit(self, success_hooks):
368 self._default_exit(success_hooks)
369 self.returncode = None
370
371 diff --git a/pym/_emerge/Package.py b/pym/_emerge/Package.py
372 index 975335d..2c1a116 100644
373 --- a/pym/_emerge/Package.py
374 +++ b/pym/_emerge/Package.py
375 @@ -219,6 +219,8 @@ class Package(Task):
376 else:
377 raise TypeError("root_config argument is required")
378
379 + elements = [type_name, root, _unicode(cpv), operation]
380 +
381 # For installed (and binary) packages we don't care for the repo
382 # when it comes to hashing, because there can only be one cpv.
383 # So overwrite the repo_key with type_name.
384 @@ -229,14 +231,22 @@ class Package(Task):
385 raise AssertionError(
386 "Package._gen_hash_key() " + \
387 "called without 'repo_name' argument")
388 - repo_key = repo_name
389 + elements.append(repo_name)
390 + elif type_name == "binary":
391 + # Including a variety of fingerprints in the hash makes
392 + # it possible to simultaneously consider multiple similar
393 + # packages. Note that digests are not included here, since
394 + # they are relatively expensive to compute, and they may
395 + # not necessarily be available.
396 + elements.extend([cpv.build_id, cpv.file_size,
397 + cpv.build_time, cpv.mtime])
398 else:
399 # For installed (and binary) packages we don't care for the repo
400 # when it comes to hashing, because there can only be one cpv.
401 # So overwrite the repo_key with type_name.
402 - repo_key = type_name
403 + elements.append(type_name)
404
405 - return (type_name, root, _unicode(cpv), operation, repo_key)
406 + return tuple(elements)
407
408 def _validate_deps(self):
409 """
410
411 diff --git a/pym/_emerge/Scheduler.py b/pym/_emerge/Scheduler.py
412 index 6e3bf1a..6b39e3b 100644
413 --- a/pym/_emerge/Scheduler.py
414 +++ b/pym/_emerge/Scheduler.py
415 @@ -862,8 +862,12 @@ class Scheduler(PollScheduler):
416 continue
417 fetched = fetcher.pkg_path
418
419 + if fetched is False:
420 + filename = bintree.getname(x.cpv)
421 + else:
422 + filename = fetched
423 verifier = BinpkgVerifier(pkg=x,
424 - scheduler=sched_iface)
425 + scheduler=sched_iface, _pkg_path=filename)
426 current_task = verifier
427 verifier.start()
428 if verifier.wait() != os.EX_OK:
429
430 diff --git a/pym/_emerge/clear_caches.py b/pym/_emerge/clear_caches.py
431 index 513df62..cb0db10 100644
432 --- a/pym/_emerge/clear_caches.py
433 +++ b/pym/_emerge/clear_caches.py
434 @@ -7,7 +7,6 @@ def clear_caches(trees):
435 for d in trees.values():
436 d["porttree"].dbapi.melt()
437 d["porttree"].dbapi._aux_cache.clear()
438 - d["bintree"].dbapi._aux_cache.clear()
439 d["bintree"].dbapi._clear_cache()
440 if d["vartree"].dbapi._linkmap is None:
441 # preserve-libs is entirely disabled
442
443 diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py
444 index 37292a6..ba897d0 100644
445 --- a/pym/_emerge/depgraph.py
446 +++ b/pym/_emerge/depgraph.py
447 @@ -5747,11 +5747,11 @@ class depgraph(object):
448 if want_reinstall and matched_packages:
449 continue
450
451 - # Ignore USE deps for the initial match since we want to
452 - # ensure that updates aren't missed solely due to the user's
453 - # USE configuration.
454 + # For unbuilt ebuilds, ignore USE deps for the initial
455 + # match since we want to ensure that updates aren't
456 + # missed solely due to the user's USE configuration.
457 for pkg in self._iter_match_pkgs(root_config, pkg_type,
458 - atom.without_use if atom.package else atom,
459 + atom.without_use if (atom.package and not built) else atom,
460 onlydeps=onlydeps):
461 if have_new_virt is True and pkg.cp != atom_cp:
462 # pull in a new-style virtual instead
463 @@ -6014,6 +6014,10 @@ class depgraph(object):
464 pkg, {}).setdefault(
465 "respect_use", set()).update(
466 reinstall_for_flags)
467 + # Continue searching for a binary
468 + # package instance built with the
469 + # desired USE settings.
470 + continue
471 break
472
473 if (((installed and changed_deps) or
474 @@ -6023,6 +6027,10 @@ class depgraph(object):
475 self._dynamic_config.\
476 ignored_binaries.setdefault(
477 pkg, {})["changed_deps"] = True
478 + # Continue searching for a binary
479 + # package instance built with the
480 + # desired USE settings.
481 + continue
482 break
483
484 # Compare current config to installed package
485
486 diff --git a/pym/portage/const.py b/pym/portage/const.py
487 index febdb4a..c7ecda2 100644
488 --- a/pym/portage/const.py
489 +++ b/pym/portage/const.py
490 @@ -122,6 +122,7 @@ EBUILD_PHASES = (
491 SUPPORTED_FEATURES = frozenset([
492 "assume-digests",
493 "binpkg-logs",
494 + "binpkg-multi-instance",
495 "buildpkg",
496 "buildsyspkg",
497 "candy",
498 @@ -268,6 +269,7 @@ LIVE_ECLASSES = frozenset([
499 ])
500
501 SUPPORTED_BINPKG_FORMATS = ("tar", "rpm")
502 +SUPPORTED_XPAK_EXTENSIONS = (".tbz2", ".xpak")
503
504 # Time formats used in various places like metadata.chk.
505 TIMESTAMP_FORMAT = "%a, %d %b %Y %H:%M:%S +0000" # to be used with time.gmtime()
506
507 diff --git a/pym/portage/dbapi/bintree.py b/pym/portage/dbapi/bintree.py
508 index cd30b67..9bc5d98 100644
509 --- a/pym/portage/dbapi/bintree.py
510 +++ b/pym/portage/dbapi/bintree.py
511 @@ -17,14 +17,13 @@ portage.proxy.lazyimport.lazyimport(globals(),
512 'portage.update:update_dbentries',
513 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
514 'writemsg,writemsg_stdout',
515 - 'portage.util.listdir:listdir',
516 'portage.util.path:first_existing',
517 'portage.util._urlopen:urlopen@_urlopen',
518 'portage.versions:best,catpkgsplit,catsplit,_pkg_str',
519 )
520
521 from portage.cache.mappings import slot_dict_class
522 -from portage.const import CACHE_PATH
523 +from portage.const import CACHE_PATH, SUPPORTED_XPAK_EXTENSIONS
524 from portage.dbapi.virtual import fakedbapi
525 from portage.dep import Atom, use_reduce, paren_enclose
526 from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \
527 @@ -71,18 +70,26 @@ class bindbapi(fakedbapi):
528 _known_keys = frozenset(list(fakedbapi._known_keys) + \
529 ["CHOST", "repository", "USE"])
530 def __init__(self, mybintree=None, **kwargs):
531 - fakedbapi.__init__(self, **kwargs)
532 + # Always enable multi_instance mode for bindbapi indexing. This
533 + # does not affect the local PKGDIR file layout, since that is
534 + # controlled independently by FEATURES=binpkg-multi-instance.
535 + # The multi_instance mode is useful for the following reasons:
536 + # * binary packages with the same cpv from multiple binhosts
537 + # can be considered simultaneously
538 + # * if binpkg-multi-instance is disabled, it's still possible
539 + # to properly access a PKGDIR which has binpkg-multi-instance
540 + # layout (or mixed layout)
541 + fakedbapi.__init__(self, exclusive_slots=False,
542 + multi_instance=True, **kwargs)
543 self.bintree = mybintree
544 self.move_ent = mybintree.move_ent
545 - self.cpvdict={}
546 - self.cpdict={}
547 # Selectively cache metadata in order to optimize dep matching.
548 self._aux_cache_keys = set(
549 - ["BUILD_TIME", "CHOST", "DEPEND", "EAPI",
550 - "HDEPEND", "IUSE", "KEYWORDS",
551 - "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
552 - "RDEPEND", "repository", "RESTRICT", "SLOT", "USE",
553 - "DEFINED_PHASES", "PROVIDES", "REQUIRES"
554 + ["BUILD_ID", "BUILD_TIME", "CHOST", "DEFINED_PHASES",
555 + "DEPEND", "EAPI", "HDEPEND", "IUSE", "KEYWORDS",
556 + "LICENSE", "MD5", "PDEPEND", "PROPERTIES", "PROVIDE",
557 + "PROVIDES", "RDEPEND", "repository", "REQUIRES", "RESTRICT",
558 + "SIZE", "SLOT", "USE", "_mtime_"
559 ])
560 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
561 self._aux_cache = {}
562 @@ -109,33 +116,49 @@ class bindbapi(fakedbapi):
563 return fakedbapi.cpv_exists(self, cpv)
564
565 def cpv_inject(self, cpv, **kwargs):
566 - self._aux_cache.pop(cpv, None)
567 - fakedbapi.cpv_inject(self, cpv, **kwargs)
568 + if not self.bintree.populated:
569 + self.bintree.populate()
570 + fakedbapi.cpv_inject(self, cpv,
571 + metadata=cpv._metadata, **kwargs)
572
573 def cpv_remove(self, cpv):
574 - self._aux_cache.pop(cpv, None)
575 + if not self.bintree.populated:
576 + self.bintree.populate()
577 fakedbapi.cpv_remove(self, cpv)
578
579 def aux_get(self, mycpv, wants, myrepo=None):
580 if self.bintree and not self.bintree.populated:
581 self.bintree.populate()
582 - cache_me = False
583 + # Support plain string for backward compatibility with API
584 + # consumers (including portageq, which passes in a cpv from
585 + # a command-line argument).
586 + instance_key = self._instance_key(mycpv,
587 + support_string=True)
588 if not self._known_keys.intersection(
589 wants).difference(self._aux_cache_keys):
590 - aux_cache = self._aux_cache.get(mycpv)
591 + aux_cache = self.cpvdict[instance_key]
592 if aux_cache is not None:
593 return [aux_cache.get(x, "") for x in wants]
594 - cache_me = True
595 mysplit = mycpv.split("/")
596 mylist = []
597 tbz2name = mysplit[1]+".tbz2"
598 if not self.bintree._remotepkgs or \
599 not self.bintree.isremote(mycpv):
600 - tbz2_path = self.bintree.getname(mycpv)
601 - if not os.path.exists(tbz2_path):
602 + try:
603 + tbz2_path = self.bintree._pkg_paths[instance_key]
604 + except KeyError:
605 + raise KeyError(mycpv)
606 + tbz2_path = os.path.join(self.bintree.pkgdir, tbz2_path)
607 + try:
608 + st = os.lstat(tbz2_path)
609 + except OSError:
610 raise KeyError(mycpv)
611 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
612 def getitem(k):
613 + if k == "_mtime_":
614 + return _unicode(st[stat.ST_MTIME])
615 + elif k == "SIZE":
616 + return _unicode(st.st_size)
617 v = metadata_bytes.get(_unicode_encode(k,
618 encoding=_encodings['repo.content'],
619 errors='backslashreplace'))
620 @@ -144,11 +167,9 @@ class bindbapi(fakedbapi):
621 encoding=_encodings['repo.content'], errors='replace')
622 return v
623 else:
624 - getitem = self.bintree._remotepkgs[mycpv].get
625 + getitem = self.cpvdict[instance_key].get
626 mydata = {}
627 mykeys = wants
628 - if cache_me:
629 - mykeys = self._aux_cache_keys.union(wants)
630 for x in mykeys:
631 myval = getitem(x)
632 # myval is None if the key doesn't exist
633 @@ -159,16 +180,24 @@ class bindbapi(fakedbapi):
634 if not mydata.setdefault('EAPI', '0'):
635 mydata['EAPI'] = '0'
636
637 - if cache_me:
638 - aux_cache = self._aux_cache_slot_dict()
639 - for x in self._aux_cache_keys:
640 - aux_cache[x] = mydata.get(x, '')
641 - self._aux_cache[mycpv] = aux_cache
642 return [mydata.get(x, '') for x in wants]
643
644 def aux_update(self, cpv, values):
645 if not self.bintree.populated:
646 self.bintree.populate()
647 + build_id = None
648 + try:
649 + build_id = cpv.build_id
650 + except AttributeError:
651 + if self.bintree._multi_instance:
652 + # The cpv.build_id attribute is required if we are in
653 + # multi-instance mode, since otherwise we won't know
654 + # which instance to update.
655 + raise
656 + else:
657 + cpv = self._instance_key(cpv, support_string=True)[0]
658 + build_id = cpv.build_id
659 +
660 tbz2path = self.bintree.getname(cpv)
661 if not os.path.exists(tbz2path):
662 raise KeyError(cpv)
663 @@ -187,7 +216,7 @@ class bindbapi(fakedbapi):
664 del mydata[k]
665 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
666 # inject will clear stale caches via cpv_inject.
667 - self.bintree.inject(cpv)
668 + self.bintree.inject(cpv, filename=tbz2path)
669
670 def cp_list(self, *pargs, **kwargs):
671 if not self.bintree.populated:
672 @@ -219,7 +248,7 @@ class bindbapi(fakedbapi):
673 if not self.bintree.isremote(pkg):
674 pass
675 else:
676 - metadata = self.bintree._remotepkgs[pkg]
677 + metadata = self.bintree._remotepkgs[self._instance_key(pkg)]
678 try:
679 size = int(metadata["SIZE"])
680 except KeyError:
681 @@ -300,6 +329,13 @@ class binarytree(object):
682
683 if True:
684 self.pkgdir = normalize_path(pkgdir)
685 + # NOTE: Event if binpkg-multi-instance is disabled, it's
686 + # still possible to access a PKGDIR which uses the
687 + # binpkg-multi-instance layout (or mixed layout).
688 + self._multi_instance = ("binpkg-multi-instance" in
689 + settings.features)
690 + if self._multi_instance:
691 + self._allocate_filename = self._allocate_filename_multi
692 self.dbapi = bindbapi(self, settings=settings)
693 self.update_ents = self.dbapi.update_ents
694 self.move_slot_ent = self.dbapi.move_slot_ent
695 @@ -310,7 +346,6 @@ class binarytree(object):
696 self.invalids = []
697 self.settings = settings
698 self._pkg_paths = {}
699 - self._pkgindex_uri = {}
700 self._populating = False
701 self._all_directory = os.path.isdir(
702 os.path.join(self.pkgdir, "All"))
703 @@ -318,12 +353,14 @@ class binarytree(object):
704 self._pkgindex_hashes = ["MD5","SHA1"]
705 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
706 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
707 - self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
708 + self._pkgindex_keys.update(["CPV", "SIZE"])
709 self._pkgindex_aux_keys = \
710 - ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
711 - "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
712 - "PROVIDE", "RESTRICT", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
713 - "BASE_URI", "PROVIDES", "REQUIRES"]
714 + ["BASE_URI", "BUILD_ID", "BUILD_TIME", "CHOST",
715 + "DEFINED_PHASES", "DEPEND", "DESCRIPTION", "EAPI",
716 + "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND",
717 + "PKGINDEX_URI", "PROPERTIES", "PROVIDE", "PROVIDES",
718 + "RDEPEND", "repository", "REQUIRES", "RESTRICT",
719 + "SIZE", "SLOT", "USE"]
720 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
721 self._pkgindex_use_evaluated_keys = \
722 ("DEPEND", "HDEPEND", "LICENSE", "RDEPEND",
723 @@ -336,6 +373,7 @@ class binarytree(object):
724 "USE_EXPAND", "USE_EXPAND_HIDDEN", "USE_EXPAND_IMPLICIT",
725 "USE_EXPAND_UNPREFIXED"])
726 self._pkgindex_default_pkg_data = {
727 + "BUILD_ID" : "",
728 "BUILD_TIME" : "",
729 "DEFINED_PHASES" : "",
730 "DEPEND" : "",
731 @@ -365,6 +403,7 @@ class binarytree(object):
732
733 self._pkgindex_translated_keys = (
734 ("DESCRIPTION" , "DESC"),
735 + ("_mtime_" , "MTIME"),
736 ("repository" , "REPO"),
737 )
738
739 @@ -455,16 +494,21 @@ class binarytree(object):
740 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
741
742 self.dbapi.cpv_remove(mycpv)
743 - del self._pkg_paths[mycpv]
744 + del self._pkg_paths[self.dbapi._instance_key(mycpv)]
745 + metadata = self.dbapi._aux_cache_slot_dict()
746 + for k in self.dbapi._aux_cache_keys:
747 + v = mydata.get(_unicode_encode(k))
748 + if v is not None:
749 + v = _unicode_decode(v)
750 + metadata[k] = " ".join(v.split())
751 + mynewcpv = _pkg_str(mynewcpv, metadata=metadata)
752 new_path = self.getname(mynewcpv)
753 - self._pkg_paths[mynewcpv] = os.path.join(
754 + self._pkg_paths[
755 + self.dbapi._instance_key(mynewcpv)] = os.path.join(
756 *new_path.split(os.path.sep)[-2:])
757 if new_path != mytbz2:
758 self._ensure_dir(os.path.dirname(new_path))
759 _movefile(tbz2path, new_path, mysettings=self.settings)
760 - self._remove_symlink(mycpv)
761 - if new_path.split(os.path.sep)[-2] == "All":
762 - self._create_symlink(mynewcpv)
763 self.inject(mynewcpv)
764
765 return moves
766 @@ -645,55 +689,63 @@ class binarytree(object):
767 # prior to performing package moves since it only wants to
768 # operate on local packages (getbinpkgs=0).
769 self._remotepkgs = None
770 - self.dbapi._clear_cache()
771 - self.dbapi._aux_cache.clear()
772 + self.dbapi.clear()
773 + _instance_key = self.dbapi._instance_key
774 if True:
775 pkg_paths = {}
776 self._pkg_paths = pkg_paths
777 - dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
778 - if "All" in dirs:
779 - dirs.remove("All")
780 - dirs.sort()
781 - dirs.insert(0, "All")
782 + dir_files = {}
783 + for parent, dir_names, file_names in os.walk(self.pkgdir):
784 + relative_parent = parent[len(self.pkgdir)+1:]
785 + dir_files[relative_parent] = file_names
786 +
787 pkgindex = self._load_pkgindex()
788 - pf_index = None
789 if not self._pkgindex_version_supported(pkgindex):
790 pkgindex = self._new_pkgindex()
791 header = pkgindex.header
792 metadata = {}
793 + basename_index = {}
794 for d in pkgindex.packages:
795 - metadata[d["CPV"]] = d
796 + cpv = _pkg_str(d["CPV"], metadata=d,
797 + settings=self.settings)
798 + d["CPV"] = cpv
799 + metadata[_instance_key(cpv)] = d
800 + path = d.get("PATH")
801 + if not path:
802 + path = cpv + ".tbz2"
803 + basename = os.path.basename(path)
804 + basename_index.setdefault(basename, []).append(d)
805 +
806 update_pkgindex = False
807 - for mydir in dirs:
808 - for myfile in listdir(os.path.join(self.pkgdir, mydir)):
809 - if not myfile.endswith(".tbz2"):
810 + for mydir, file_names in dir_files.items():
811 + try:
812 + mydir = _unicode_decode(mydir,
813 + encoding=_encodings["fs"], errors="strict")
814 + except UnicodeDecodeError:
815 + continue
816 + for myfile in file_names:
817 + try:
818 + myfile = _unicode_decode(myfile,
819 + encoding=_encodings["fs"], errors="strict")
820 + except UnicodeDecodeError:
821 + continue
822 + if not myfile.endswith(SUPPORTED_XPAK_EXTENSIONS):
823 continue
824 mypath = os.path.join(mydir, myfile)
825 full_path = os.path.join(self.pkgdir, mypath)
826 s = os.lstat(full_path)
827 - if stat.S_ISLNK(s.st_mode):
828 +
829 + if not stat.S_ISREG(s.st_mode):
830 continue
831
832 # Validate data from the package index and try to avoid
833 # reading the xpak if possible.
834 - if mydir != "All":
835 - possibilities = None
836 - d = metadata.get(mydir+"/"+myfile[:-5])
837 - if d:
838 - possibilities = [d]
839 - else:
840 - if pf_index is None:
841 - pf_index = {}
842 - for mycpv in metadata:
843 - mycat, mypf = catsplit(mycpv)
844 - pf_index.setdefault(
845 - mypf, []).append(metadata[mycpv])
846 - possibilities = pf_index.get(myfile[:-5])
847 + possibilities = basename_index.get(myfile)
848 if possibilities:
849 match = None
850 for d in possibilities:
851 try:
852 - if long(d["MTIME"]) != s[stat.ST_MTIME]:
853 + if long(d["_mtime_"]) != s[stat.ST_MTIME]:
854 continue
855 except (KeyError, ValueError):
856 continue
857 @@ -707,15 +759,14 @@ class binarytree(object):
858 break
859 if match:
860 mycpv = match["CPV"]
861 - if mycpv in pkg_paths:
862 - # discard duplicates (All/ is preferred)
863 - continue
864 - mycpv = _pkg_str(mycpv)
865 - pkg_paths[mycpv] = mypath
866 + instance_key = _instance_key(mycpv)
867 + pkg_paths[instance_key] = mypath
868 # update the path if the package has been moved
869 oldpath = d.get("PATH")
870 if oldpath and oldpath != mypath:
871 update_pkgindex = True
872 + # Omit PATH if it is the default path for
873 + # the current Packages format version.
874 if mypath != mycpv + ".tbz2":
875 d["PATH"] = mypath
876 if not oldpath:
877 @@ -725,11 +776,6 @@ class binarytree(object):
878 if oldpath:
879 update_pkgindex = True
880 self.dbapi.cpv_inject(mycpv)
881 - if not self.dbapi._aux_cache_keys.difference(d):
882 - aux_cache = self.dbapi._aux_cache_slot_dict()
883 - for k in self.dbapi._aux_cache_keys:
884 - aux_cache[k] = d[k]
885 - self.dbapi._aux_cache[mycpv] = aux_cache
886 continue
887 if not os.access(full_path, os.R_OK):
888 writemsg(_("!!! Permission denied to read " \
889 @@ -737,13 +783,12 @@ class binarytree(object):
890 noiselevel=-1)
891 self.invalids.append(myfile[:-5])
892 continue
893 - metadata_bytes = portage.xpak.tbz2(full_path).get_data()
894 - mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
895 - encoding=_encodings['repo.content'], errors='replace')
896 - mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
897 - encoding=_encodings['repo.content'], errors='replace')
898 - slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
899 - encoding=_encodings['repo.content'], errors='replace')
900 + pkg_metadata = self._read_metadata(full_path, s,
901 + keys=chain(self.dbapi._aux_cache_keys,
902 + ("PF", "CATEGORY")))
903 + mycat = pkg_metadata.get("CATEGORY", "")
904 + mypf = pkg_metadata.get("PF", "")
905 + slot = pkg_metadata.get("SLOT", "")
906 mypkg = myfile[:-5]
907 if not mycat or not mypf or not slot:
908 #old-style or corrupt package
909 @@ -767,16 +812,51 @@ class binarytree(object):
910 writemsg("!!! %s\n" % line, noiselevel=-1)
911 self.invalids.append(mypkg)
912 continue
913 - mycat = mycat.strip()
914 - slot = slot.strip()
915 - if mycat != mydir and mydir != "All":
916 +
917 + multi_instance = False
918 + invalid_name = False
919 + build_id = None
920 + if myfile.endswith(".xpak"):
921 + multi_instance = True
922 + build_id = self._parse_build_id(myfile)
923 + if build_id < 1:
924 + invalid_name = True
925 + elif myfile != "%s-%s.xpak" % (
926 + mypf, build_id):
927 + invalid_name = True
928 + else:
929 + mypkg = mypkg[:-len(str(build_id))-1]
930 + elif myfile != mypf + ".tbz2":
931 + invalid_name = True
932 +
933 + if invalid_name:
934 + writemsg(_("\n!!! Binary package name is "
935 + "invalid: '%s'\n") % full_path,
936 + noiselevel=-1)
937 + continue
938 +
939 + if pkg_metadata.get("BUILD_ID"):
940 + try:
941 + build_id = long(pkg_metadata["BUILD_ID"])
942 + except ValueError:
943 + writemsg(_("!!! Binary package has "
944 + "invalid BUILD_ID: '%s'\n") %
945 + full_path, noiselevel=-1)
946 + continue
947 + else:
948 + build_id = None
949 +
950 + if multi_instance:
951 + name_split = catpkgsplit("%s/%s" %
952 + (mycat, mypf))
953 + if (name_split is None or
954 + tuple(catsplit(mydir)) != name_split[:2]):
955 + continue
956 + elif mycat != mydir and mydir != "All":
957 continue
958 if mypkg != mypf.strip():
959 continue
960 mycpv = mycat + "/" + mypkg
961 - if mycpv in pkg_paths:
962 - # All is first, so it's preferred.
963 - continue
964 if not self.dbapi._category_re.match(mycat):
965 writemsg(_("!!! Binary package has an " \
966 "unrecognized category: '%s'\n") % full_path,
967 @@ -786,14 +866,23 @@ class binarytree(object):
968 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
969 noiselevel=-1)
970 continue
971 - mycpv = _pkg_str(mycpv)
972 - pkg_paths[mycpv] = mypath
973 + if build_id is not None:
974 + pkg_metadata["BUILD_ID"] = _unicode(build_id)
975 + pkg_metadata["SIZE"] = _unicode(s.st_size)
976 + # Discard items used only for validation above.
977 + pkg_metadata.pop("CATEGORY")
978 + pkg_metadata.pop("PF")
979 + mycpv = _pkg_str(mycpv,
980 + metadata=self.dbapi._aux_cache_slot_dict(
981 + pkg_metadata))
982 + pkg_paths[_instance_key(mycpv)] = mypath
983 self.dbapi.cpv_inject(mycpv)
984 update_pkgindex = True
985 - d = metadata.get(mycpv, {})
986 + d = metadata.get(_instance_key(mycpv),
987 + pkgindex._pkg_slot_dict())
988 if d:
989 try:
990 - if long(d["MTIME"]) != s[stat.ST_MTIME]:
991 + if long(d["_mtime_"]) != s[stat.ST_MTIME]:
992 d.clear()
993 except (KeyError, ValueError):
994 d.clear()
995 @@ -804,36 +893,30 @@ class binarytree(object):
996 except (KeyError, ValueError):
997 d.clear()
998
999 + for k in self._pkgindex_allowed_pkg_keys:
1000 + v = pkg_metadata.get(k)
1001 + if v is not None:
1002 + d[k] = v
1003 d["CPV"] = mycpv
1004 - d["SLOT"] = slot
1005 - d["MTIME"] = _unicode(s[stat.ST_MTIME])
1006 - d["SIZE"] = _unicode(s.st_size)
1007
1008 - d.update(zip(self._pkgindex_aux_keys,
1009 - self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
1010 try:
1011 self._eval_use_flags(mycpv, d)
1012 except portage.exception.InvalidDependString:
1013 writemsg(_("!!! Invalid binary package: '%s'\n") % \
1014 self.getname(mycpv), noiselevel=-1)
1015 self.dbapi.cpv_remove(mycpv)
1016 - del pkg_paths[mycpv]
1017 + del pkg_paths[_instance_key(mycpv)]
1018
1019 # record location if it's non-default
1020 if mypath != mycpv + ".tbz2":
1021 d["PATH"] = mypath
1022 else:
1023 d.pop("PATH", None)
1024 - metadata[mycpv] = d
1025 - if not self.dbapi._aux_cache_keys.difference(d):
1026 - aux_cache = self.dbapi._aux_cache_slot_dict()
1027 - for k in self.dbapi._aux_cache_keys:
1028 - aux_cache[k] = d[k]
1029 - self.dbapi._aux_cache[mycpv] = aux_cache
1030 + metadata[_instance_key(mycpv)] = d
1031
1032 - for cpv in list(metadata):
1033 - if cpv not in pkg_paths:
1034 - del metadata[cpv]
1035 + for instance_key in list(metadata):
1036 + if instance_key not in pkg_paths:
1037 + del metadata[instance_key]
1038
1039 # Do not bother to write the Packages index if $PKGDIR/All/ exists
1040 # since it will provide no benefit due to the need to read CATEGORY
1041 @@ -1058,45 +1141,24 @@ class binarytree(object):
1042 # The current user doesn't have permission to cache the
1043 # file, but that's alright.
1044 if pkgindex:
1045 - # Organize remote package list as a cpv -> metadata map.
1046 - remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
1047 remote_base_uri = pkgindex.header.get("URI", base_url)
1048 - for cpv, remote_metadata in remotepkgs.items():
1049 - remote_metadata["BASE_URI"] = remote_base_uri
1050 - self._pkgindex_uri[cpv] = url
1051 - self._remotepkgs.update(remotepkgs)
1052 - self._remote_has_index = True
1053 - for cpv in remotepkgs:
1054 + for d in pkgindex.packages:
1055 + cpv = _pkg_str(d["CPV"], metadata=d,
1056 + settings=self.settings)
1057 + instance_key = _instance_key(cpv)
1058 + # Local package instances override remote instances
1059 + # with the same instance_key.
1060 + if instance_key in metadata:
1061 + continue
1062 +
1063 + d["CPV"] = cpv
1064 + d["BASE_URI"] = remote_base_uri
1065 + d["PKGINDEX_URI"] = url
1066 + self._remotepkgs[instance_key] = d
1067 + metadata[instance_key] = d
1068 self.dbapi.cpv_inject(cpv)
1069 - if True:
1070 - # Remote package instances override local package
1071 - # if they are not identical.
1072 - hash_names = ["SIZE"] + self._pkgindex_hashes
1073 - for cpv, local_metadata in metadata.items():
1074 - remote_metadata = self._remotepkgs.get(cpv)
1075 - if remote_metadata is None:
1076 - continue
1077 - # Use digests to compare identity.
1078 - identical = True
1079 - for hash_name in hash_names:
1080 - local_value = local_metadata.get(hash_name)
1081 - if local_value is None:
1082 - continue
1083 - remote_value = remote_metadata.get(hash_name)
1084 - if remote_value is None:
1085 - continue
1086 - if local_value != remote_value:
1087 - identical = False
1088 - break
1089 - if identical:
1090 - del self._remotepkgs[cpv]
1091 - else:
1092 - # Override the local package in the aux_get cache.
1093 - self.dbapi._aux_cache[cpv] = remote_metadata
1094 - else:
1095 - # Local package instances override remote instances.
1096 - for cpv in metadata:
1097 - self._remotepkgs.pop(cpv, None)
1098 +
1099 + self._remote_has_index = True
1100
1101 self.populated=1
1102
1103 @@ -1108,7 +1170,8 @@ class binarytree(object):
1104 @param filename: File path of the package to inject, or None if it's
1105 already in the location returned by getname()
1106 @type filename: string
1107 - @rtype: None
1108 + @rtype: _pkg_str or None
1109 + @return: A _pkg_str instance on success, or None on failure.
1110 """
1111 mycat, mypkg = catsplit(cpv)
1112 if not self.populated:
1113 @@ -1126,24 +1189,44 @@ class binarytree(object):
1114 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
1115 noiselevel=-1)
1116 return
1117 - mytbz2 = portage.xpak.tbz2(full_path)
1118 - slot = mytbz2.getfile("SLOT")
1119 + metadata = self._read_metadata(full_path, s)
1120 + slot = metadata.get("SLOT")
1121 + try:
1122 + self._eval_use_flags(cpv, metadata)
1123 + except portage.exception.InvalidDependString:
1124 + slot = None
1125 if slot is None:
1126 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
1127 noiselevel=-1)
1128 return
1129 - slot = slot.strip()
1130 - self.dbapi.cpv_inject(cpv)
1131 +
1132 + fetched = False
1133 + try:
1134 + build_id = cpv.build_id
1135 + except AttributeError:
1136 + build_id = None
1137 + else:
1138 + instance_key = self.dbapi._instance_key(cpv)
1139 + if instance_key in self.dbapi.cpvdict:
1140 + # This means we've been called by aux_update (or
1141 + # similar). The instance key typically changes (due to
1142 + # file modification), so we need to discard existing
1143 + # instance key references.
1144 + self.dbapi.cpv_remove(cpv)
1145 + self._pkg_paths.pop(instance_key, None)
1146 + if self._remotepkgs is not None:
1147 + fetched = self._remotepkgs.pop(instance_key, None)
1148 +
1149 + cpv = _pkg_str(cpv, metadata=metadata, settings=self.settings)
1150
1151 # Reread the Packages index (in case it's been changed by another
1152 # process) and then updated it, all while holding a lock.
1153 pkgindex_lock = None
1154 - created_symlink = False
1155 try:
1156 pkgindex_lock = lockfile(self._pkgindex_file,
1157 wantnewlockfile=1)
1158 if filename is not None:
1159 - new_filename = self.getname(cpv)
1160 + new_filename = self.getname(cpv, allocate_new=True)
1161 try:
1162 samefile = os.path.samefile(filename, new_filename)
1163 except OSError:
1164 @@ -1153,54 +1236,31 @@ class binarytree(object):
1165 _movefile(filename, new_filename, mysettings=self.settings)
1166 full_path = new_filename
1167
1168 - self._file_permissions(full_path)
1169 + basename = os.path.basename(full_path)
1170 + pf = catsplit(cpv)[1]
1171 + if (build_id is None and not fetched and
1172 + basename.endswith(".xpak")):
1173 + # Apply the newly assigned BUILD_ID. This is intended
1174 + # to occur only for locally built packages. If the
1175 + # package was fetched, we want to preserve its
1176 + # attributes, so that we can later distinguish that it
1177 + # is identical to its remote counterpart.
1178 + build_id = self._parse_build_id(basename)
1179 + metadata["BUILD_ID"] = _unicode(build_id)
1180 + cpv = _pkg_str(cpv, metadata=metadata,
1181 + settings=self.settings)
1182 + binpkg = portage.xpak.tbz2(full_path)
1183 + binary_data = binpkg.get_data()
1184 + binary_data[b"BUILD_ID"] = _unicode_encode(
1185 + metadata["BUILD_ID"])
1186 + binpkg.recompose_mem(portage.xpak.xpak_mem(binary_data))
1187
1188 - if self._all_directory and \
1189 - self.getname(cpv).split(os.path.sep)[-2] == "All":
1190 - self._create_symlink(cpv)
1191 - created_symlink = True
1192 + self._file_permissions(full_path)
1193 pkgindex = self._load_pkgindex()
1194 -
1195 if not self._pkgindex_version_supported(pkgindex):
1196 pkgindex = self._new_pkgindex()
1197
1198 - # Discard remote metadata to ensure that _pkgindex_entry
1199 - # gets the local metadata. This also updates state for future
1200 - # isremote calls.
1201 - if self._remotepkgs is not None:
1202 - self._remotepkgs.pop(cpv, None)
1203 -
1204 - # Discard cached metadata to ensure that _pkgindex_entry
1205 - # doesn't return stale metadata.
1206 - self.dbapi._aux_cache.pop(cpv, None)
1207 -
1208 - try:
1209 - d = self._pkgindex_entry(cpv)
1210 - except portage.exception.InvalidDependString:
1211 - writemsg(_("!!! Invalid binary package: '%s'\n") % \
1212 - self.getname(cpv), noiselevel=-1)
1213 - self.dbapi.cpv_remove(cpv)
1214 - del self._pkg_paths[cpv]
1215 - return
1216 -
1217 - # If found, remove package(s) with duplicate path.
1218 - path = d.get("PATH", "")
1219 - for i in range(len(pkgindex.packages) - 1, -1, -1):
1220 - d2 = pkgindex.packages[i]
1221 - if path and path == d2.get("PATH"):
1222 - # Handle path collisions in $PKGDIR/All
1223 - # when CPV is not identical.
1224 - del pkgindex.packages[i]
1225 - elif cpv == d2.get("CPV"):
1226 - if path == d2.get("PATH", ""):
1227 - del pkgindex.packages[i]
1228 - elif created_symlink and not d2.get("PATH", ""):
1229 - # Delete entry for the package that was just
1230 - # overwritten by a symlink to this package.
1231 - del pkgindex.packages[i]
1232 -
1233 - pkgindex.packages.append(d)
1234 -
1235 + d = self._inject_file(pkgindex, cpv, full_path)
1236 self._update_pkgindex_header(pkgindex.header)
1237 self._pkgindex_write(pkgindex)
1238
1239 @@ -1208,6 +1268,73 @@ class binarytree(object):
1240 if pkgindex_lock:
1241 unlockfile(pkgindex_lock)
1242
1243 + # This is used to record BINPKGMD5 in the installed package
1244 + # database, for a package that has just been built.
1245 + cpv._metadata["MD5"] = d["MD5"]
1246 +
1247 + return cpv
1248 +
1249 + def _read_metadata(self, filename, st, keys=None):
1250 + if keys is None:
1251 + keys = self.dbapi._aux_cache_keys
1252 + metadata = self.dbapi._aux_cache_slot_dict()
1253 + else:
1254 + metadata = {}
1255 + binary_metadata = portage.xpak.tbz2(filename).get_data()
1256 + for k in keys:
1257 + if k == "_mtime_":
1258 + metadata[k] = _unicode(st[stat.ST_MTIME])
1259 + elif k == "SIZE":
1260 + metadata[k] = _unicode(st.st_size)
1261 + else:
1262 + v = binary_metadata.get(_unicode_encode(k))
1263 + if v is not None:
1264 + v = _unicode_decode(v)
1265 + metadata[k] = " ".join(v.split())
1266 + metadata.setdefault("EAPI", "0")
1267 + return metadata
1268 +
1269 + def _inject_file(self, pkgindex, cpv, filename):
1270 + """
1271 + Add a package to internal data structures, and add an
1272 + entry to the given pkgindex.
1273 + @param pkgindex: The PackageIndex instance to which an entry
1274 + will be added.
1275 + @type pkgindex: PackageIndex
1276 + @param cpv: A _pkg_str instance corresponding to the package
1277 + being injected.
1278 + @type cpv: _pkg_str
1279 + @param filename: Absolute file path of the package to inject.
1280 + @type filename: string
1281 + @rtype: dict
1282 + @return: A dict corresponding to the new entry which has been
1283 + added to pkgindex. This may be used to access the checksums
1284 + which have just been generated.
1285 + """
1286 + # Update state for future isremote calls.
1287 + instance_key = self.dbapi._instance_key(cpv)
1288 + if self._remotepkgs is not None:
1289 + self._remotepkgs.pop(instance_key, None)
1290 +
1291 + self.dbapi.cpv_inject(cpv)
1292 + self._pkg_paths[instance_key] = filename[len(self.pkgdir)+1:]
1293 + d = self._pkgindex_entry(cpv)
1294 +
1295 + # If found, remove package(s) with duplicate path.
1296 + path = d.get("PATH", "")
1297 + for i in range(len(pkgindex.packages) - 1, -1, -1):
1298 + d2 = pkgindex.packages[i]
1299 + if path and path == d2.get("PATH"):
1300 + # Handle path collisions in $PKGDIR/All
1301 + # when CPV is not identical.
1302 + del pkgindex.packages[i]
1303 + elif cpv == d2.get("CPV"):
1304 + if path == d2.get("PATH", ""):
1305 + del pkgindex.packages[i]
1306 +
1307 + pkgindex.packages.append(d)
1308 + return d
1309 +
1310 def _pkgindex_write(self, pkgindex):
1311 contents = codecs.getwriter(_encodings['repo.content'])(io.BytesIO())
1312 pkgindex.write(contents)
1313 @@ -1233,7 +1360,7 @@ class binarytree(object):
1314
1315 def _pkgindex_entry(self, cpv):
1316 """
1317 - Performs checksums and evaluates USE flag conditionals.
1318 + Performs checksums, and gets size and mtime via lstat.
1319 Raises InvalidDependString if necessary.
1320 @rtype: dict
1321 @return: a dict containing entry for the give cpv.
1322 @@ -1241,23 +1368,20 @@ class binarytree(object):
1323
1324 pkg_path = self.getname(cpv)
1325
1326 - d = dict(zip(self._pkgindex_aux_keys,
1327 - self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1328 -
1329 + d = dict(cpv._metadata.items())
1330 d.update(perform_multiple_checksums(
1331 pkg_path, hashes=self._pkgindex_hashes))
1332
1333 d["CPV"] = cpv
1334 - st = os.stat(pkg_path)
1335 - d["MTIME"] = _unicode(st[stat.ST_MTIME])
1336 + st = os.lstat(pkg_path)
1337 + d["_mtime_"] = _unicode(st[stat.ST_MTIME])
1338 d["SIZE"] = _unicode(st.st_size)
1339
1340 - rel_path = self._pkg_paths[cpv]
1341 + rel_path = pkg_path[len(self.pkgdir)+1:]
1342 # record location if it's non-default
1343 if rel_path != cpv + ".tbz2":
1344 d["PATH"] = rel_path
1345
1346 - self._eval_use_flags(cpv, d)
1347 return d
1348
1349 def _new_pkgindex(self):
1350 @@ -1311,15 +1435,17 @@ class binarytree(object):
1351 return False
1352
1353 def _eval_use_flags(self, cpv, metadata):
1354 - use = frozenset(metadata["USE"].split())
1355 + use = frozenset(metadata.get("USE", "").split())
1356 for k in self._pkgindex_use_evaluated_keys:
1357 if k.endswith('DEPEND'):
1358 token_class = Atom
1359 else:
1360 token_class = None
1361
1362 + deps = metadata.get(k)
1363 + if deps is None:
1364 + continue
1365 try:
1366 - deps = metadata[k]
1367 deps = use_reduce(deps, uselist=use, token_class=token_class)
1368 deps = paren_enclose(deps)
1369 except portage.exception.InvalidDependString as e:
1370 @@ -1349,46 +1475,129 @@ class binarytree(object):
1371 return ""
1372 return mymatch
1373
1374 - def getname(self, pkgname):
1375 - """Returns a file location for this package. The default location is
1376 - ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1377 - in the rare event of a collision. The prevent_collision() method can
1378 - be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1379 - specific cpv."""
1380 + def getname(self, cpv, allocate_new=None):
1381 + """Returns a file location for this package.
1382 + If cpv has both build_time and build_id attributes, then the
1383 + path to the specific corresponding instance is returned.
1384 + Otherwise, allocate a new path and return that. When allocating
1385 + a new path, behavior depends on the binpkg-multi-instance
1386 + FEATURES setting.
1387 + """
1388 if not self.populated:
1389 self.populate()
1390 - mycpv = pkgname
1391 - mypath = self._pkg_paths.get(mycpv, None)
1392 - if mypath:
1393 - return os.path.join(self.pkgdir, mypath)
1394 - mycat, mypkg = catsplit(mycpv)
1395 - if self._all_directory:
1396 - mypath = os.path.join("All", mypkg + ".tbz2")
1397 - if mypath in self._pkg_paths.values():
1398 - mypath = os.path.join(mycat, mypkg + ".tbz2")
1399 +
1400 + try:
1401 + cpv.cp
1402 + except AttributeError:
1403 + cpv = _pkg_str(cpv)
1404 +
1405 + filename = None
1406 + if allocate_new:
1407 + filename = self._allocate_filename(cpv)
1408 + elif self._is_specific_instance(cpv):
1409 + instance_key = self.dbapi._instance_key(cpv)
1410 + path = self._pkg_paths.get(instance_key)
1411 + if path is not None:
1412 + filename = os.path.join(self.pkgdir, path)
1413 +
1414 + if filename is None and not allocate_new:
1415 + try:
1416 + instance_key = self.dbapi._instance_key(cpv,
1417 + support_string=True)
1418 + except KeyError:
1419 + pass
1420 + else:
1421 + filename = self._pkg_paths.get(instance_key)
1422 + if filename is not None:
1423 + filename = os.path.join(self.pkgdir, filename)
1424 +
1425 + if filename is None:
1426 + if self._multi_instance:
1427 + pf = catsplit(cpv)[1]
1428 + filename = "%s-%s.xpak" % (
1429 + os.path.join(self.pkgdir, cpv.cp, pf), "1")
1430 + else:
1431 + filename = os.path.join(self.pkgdir, cpv + ".tbz2")
1432 +
1433 + return filename
1434 +
1435 + def _is_specific_instance(self, cpv):
1436 + specific = True
1437 + try:
1438 + build_time = cpv.build_time
1439 + build_id = cpv.build_id
1440 + except AttributeError:
1441 + specific = False
1442 else:
1443 - mypath = os.path.join(mycat, mypkg + ".tbz2")
1444 - self._pkg_paths[mycpv] = mypath # cache for future lookups
1445 - return os.path.join(self.pkgdir, mypath)
1446 + if build_time is None or build_id is None:
1447 + specific = False
1448 + return specific
1449 +
1450 + def _max_build_id(self, cpv):
1451 + max_build_id = 0
1452 + for x in self.dbapi.cp_list(cpv.cp):
1453 + if (x == cpv and x.build_id is not None and
1454 + x.build_id > max_build_id):
1455 + max_build_id = x.build_id
1456 + return max_build_id
1457 +
1458 + def _allocate_filename(self, cpv):
1459 + return os.path.join(self.pkgdir, cpv + ".tbz2")
1460 +
1461 + def _allocate_filename_multi(self, cpv):
1462 +
1463 + # First, get the max build_id found when _populate was
1464 + # called.
1465 + max_build_id = self._max_build_id(cpv)
1466 +
1467 + # A new package may have been added concurrently since the
1468 + # last _populate call, so use increment build_id until
1469 + # we locate an unused id.
1470 + pf = catsplit(cpv)[1]
1471 + build_id = max_build_id + 1
1472 +
1473 + while True:
1474 + filename = "%s-%s.xpak" % (
1475 + os.path.join(self.pkgdir, cpv.cp, pf), build_id)
1476 + if os.path.exists(filename):
1477 + build_id += 1
1478 + else:
1479 + return filename
1480 +
1481 + @staticmethod
1482 + def _parse_build_id(filename):
1483 + build_id = -1
1484 + hyphen = filename.rfind("-", 0, -6)
1485 + if hyphen != -1:
1486 + build_id = filename[hyphen+1:-5]
1487 + try:
1488 + build_id = long(build_id)
1489 + except ValueError:
1490 + pass
1491 + return build_id
1492
1493 def isremote(self, pkgname):
1494 """Returns true if the package is kept remotely and it has not been
1495 downloaded (or it is only partially downloaded)."""
1496 - if self._remotepkgs is None or pkgname not in self._remotepkgs:
1497 + if (self._remotepkgs is None or
1498 + self.dbapi._instance_key(pkgname) not in self._remotepkgs):
1499 return False
1500 # Presence in self._remotepkgs implies that it's remote. When a
1501 # package is downloaded, state is updated by self.inject().
1502 return True
1503
1504 - def get_pkgindex_uri(self, pkgname):
1505 + def get_pkgindex_uri(self, cpv):
1506 """Returns the URI to the Packages file for a given package."""
1507 - return self._pkgindex_uri.get(pkgname)
1508 -
1509 -
1510 + uri = None
1511 + metadata = self._remotepkgs.get(self.dbapi._instance_key(cpv))
1512 + if metadata is not None:
1513 + uri = metadata["PKGINDEX_URI"]
1514 + return uri
1515
1516 def gettbz2(self, pkgname):
1517 """Fetches the package from a remote site, if necessary. Attempts to
1518 resume if the file appears to be partially downloaded."""
1519 + instance_key = self.dbapi._instance_key(pkgname)
1520 tbz2_path = self.getname(pkgname)
1521 tbz2name = os.path.basename(tbz2_path)
1522 resume = False
1523 @@ -1404,10 +1613,10 @@ class binarytree(object):
1524 self._ensure_dir(mydest)
1525 # urljoin doesn't work correctly with unrecognized protocols like sftp
1526 if self._remote_has_index:
1527 - rel_url = self._remotepkgs[pkgname].get("PATH")
1528 + rel_url = self._remotepkgs[instance_key].get("PATH")
1529 if not rel_url:
1530 rel_url = pkgname+".tbz2"
1531 - remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"]
1532 + remote_base_uri = self._remotepkgs[instance_key]["BASE_URI"]
1533 url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1534 else:
1535 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1536 @@ -1450,15 +1659,19 @@ class binarytree(object):
1537 except AttributeError:
1538 cpv = pkg
1539
1540 + _instance_key = self.dbapi._instance_key
1541 + instance_key = _instance_key(cpv)
1542 digests = {}
1543 - metadata = None
1544 - if self._remotepkgs is None or cpv not in self._remotepkgs:
1545 + metadata = (None if self._remotepkgs is None else
1546 + self._remotepkgs.get(instance_key))
1547 + if metadata is None:
1548 for d in self._load_pkgindex().packages:
1549 - if d["CPV"] == cpv:
1550 + if (d["CPV"] == cpv and
1551 + instance_key == _instance_key(_pkg_str(d["CPV"],
1552 + metadata=d, settings=self.settings))):
1553 metadata = d
1554 break
1555 - else:
1556 - metadata = self._remotepkgs[cpv]
1557 +
1558 if metadata is None:
1559 return digests
1560
1561
1562 diff --git a/pym/portage/emaint/modules/binhost/binhost.py b/pym/portage/emaint/modules/binhost/binhost.py
1563 index 1138a8c..cf1213e 100644
1564 --- a/pym/portage/emaint/modules/binhost/binhost.py
1565 +++ b/pym/portage/emaint/modules/binhost/binhost.py
1566 @@ -7,6 +7,7 @@ import stat
1567 import portage
1568 from portage import os
1569 from portage.util import writemsg
1570 +from portage.versions import _pkg_str
1571
1572 import sys
1573
1574 @@ -38,7 +39,7 @@ class BinhostHandler(object):
1575 if size is None:
1576 return True
1577
1578 - mtime = data.get("MTIME")
1579 + mtime = data.get("_mtime_")
1580 if mtime is None:
1581 return True
1582
1583 @@ -90,6 +91,7 @@ class BinhostHandler(object):
1584 def fix(self, **kwargs):
1585 onProgress = kwargs.get('onProgress', None)
1586 bintree = self._bintree
1587 + _instance_key = bintree.dbapi._instance_key
1588 cpv_all = self._bintree.dbapi.cpv_all()
1589 cpv_all.sort()
1590 missing = []
1591 @@ -98,16 +100,21 @@ class BinhostHandler(object):
1592 onProgress(maxval, 0)
1593 pkgindex = self._pkgindex
1594 missing = []
1595 + stale = []
1596 metadata = {}
1597 for d in pkgindex.packages:
1598 - metadata[d["CPV"]] = d
1599 -
1600 - for i, cpv in enumerate(cpv_all):
1601 - d = metadata.get(cpv)
1602 + cpv = _pkg_str(d["CPV"], metadata=d,
1603 + settings=bintree.settings)
1604 + d["CPV"] = cpv
1605 + metadata[_instance_key(cpv)] = d
1606 + if not bintree.dbapi.cpv_exists(cpv):
1607 + stale.append(cpv)
1608 +
1609 + for cpv in cpv_all:
1610 + d = metadata.get(_instance_key(cpv))
1611 if not d or self._need_update(cpv, d):
1612 missing.append(cpv)
1613
1614 - stale = set(metadata).difference(cpv_all)
1615 if missing or stale:
1616 from portage import locks
1617 pkgindex_lock = locks.lockfile(
1618 @@ -121,31 +128,39 @@ class BinhostHandler(object):
1619 pkgindex = bintree._load_pkgindex()
1620 self._pkgindex = pkgindex
1621
1622 + # Recount stale/missing packages, with lock held.
1623 + missing = []
1624 + stale = []
1625 metadata = {}
1626 for d in pkgindex.packages:
1627 - metadata[d["CPV"]] = d
1628 -
1629 - # Recount missing packages, with lock held.
1630 - del missing[:]
1631 - for i, cpv in enumerate(cpv_all):
1632 - d = metadata.get(cpv)
1633 + cpv = _pkg_str(d["CPV"], metadata=d,
1634 + settings=bintree.settings)
1635 + d["CPV"] = cpv
1636 + metadata[_instance_key(cpv)] = d
1637 + if not bintree.dbapi.cpv_exists(cpv):
1638 + stale.append(cpv)
1639 +
1640 + for cpv in cpv_all:
1641 + d = metadata.get(_instance_key(cpv))
1642 if not d or self._need_update(cpv, d):
1643 missing.append(cpv)
1644
1645 maxval = len(missing)
1646 for i, cpv in enumerate(missing):
1647 + d = bintree._pkgindex_entry(cpv)
1648 try:
1649 - metadata[cpv] = bintree._pkgindex_entry(cpv)
1650 + bintree._eval_use_flags(cpv, d)
1651 except portage.exception.InvalidDependString:
1652 writemsg("!!! Invalid binary package: '%s'\n" % \
1653 bintree.getname(cpv), noiselevel=-1)
1654 + else:
1655 + metadata[_instance_key(cpv)] = d
1656
1657 if onProgress:
1658 onProgress(maxval, i+1)
1659
1660 - for cpv in set(metadata).difference(
1661 - self._bintree.dbapi.cpv_all()):
1662 - del metadata[cpv]
1663 + for cpv in stale:
1664 + del metadata[_instance_key(cpv)]
1665
1666 # We've updated the pkgindex, so set it to
1667 # repopulate when necessary.