Gentoo Archives: gentoo-commits

From: Brian Dolbec <brian.dolbec@×××××.com>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] proj/portage:public_api commit in: /, pym/portage/package/ebuild/, pym/portage/emaint/modules/logs/, ...
Date: Wed, 26 Mar 2014 13:59:24
Message-Id: 1395841962.276611668673967d8dd76caebb1e427dd10b3ff2.dol-sen@gentoo
1 commit: 276611668673967d8dd76caebb1e427dd10b3ff2
2 Author: Brian Dolbec <dolsen <AT> gentoo <DOT> org>
3 AuthorDate: Wed Mar 26 13:52:42 2014 +0000
4 Commit: Brian Dolbec <brian.dolbec <AT> gmail <DOT> com>
5 CommitDate: Wed Mar 26 13:52:42 2014 +0000
6 URL: http://git.overlays.gentoo.org/gitweb/?p=proj/portage.git;a=commit;h=27661166
7
8 Merge branch 'master' of git+ssh://git.overlays.gentoo.org/proj/portage into public_api
9
10 ---
11 .gitignore | 5 +-
12 .portage_not_installed | 0
13 DEVELOPING | 37 +-
14 Makefile | 33 +-
15 NEWS | 22 +-
16 README | 49 +
17 RELEASE-NOTES | 57 +-
18 bin/archive-conf | 56 +-
19 bin/banned-helper | 6 -
20 bin/bashrc-functions.sh | 63 +-
21 bin/binhost-snapshot | 31 +-
22 bin/check-implicit-pointer-usage.py | 2 +-
23 bin/chpathtool.py | 78 +-
24 bin/clean_locks | 24 +-
25 bin/dispatch-conf | 36 +-
26 bin/dohtml.py | 56 +-
27 bin/eapi.sh | 145 ++
28 bin/ebuild | 105 +-
29 bin/ebuild-helpers/4/dodoc | 1 -
30 bin/ebuild-helpers/4/dohard | 1 -
31 bin/ebuild-helpers/4/dosed | 1 -
32 bin/ebuild-helpers/4/prepalldocs | 1 -
33 bin/ebuild-helpers/{ => bsd}/sed | 14 +-
34 bin/ebuild-helpers/dobin | 13 +-
35 bin/ebuild-helpers/doconfd | 2 +-
36 bin/ebuild-helpers/dodir | 9 +-
37 bin/ebuild-helpers/dodoc | 20 +-
38 bin/ebuild-helpers/doenvd | 2 +-
39 bin/ebuild-helpers/doexe | 16 +-
40 bin/ebuild-helpers/dohard | 14 +-
41 bin/ebuild-helpers/doheader | 19 +
42 bin/ebuild-helpers/dohtml | 11 +-
43 bin/ebuild-helpers/doinfo | 15 +-
44 bin/ebuild-helpers/doinitd | 2 +-
45 bin/ebuild-helpers/doins | 47 +-
46 bin/ebuild-helpers/dolib | 13 +-
47 bin/ebuild-helpers/doman | 13 +-
48 bin/ebuild-helpers/domo | 11 +-
49 bin/ebuild-helpers/dosbin | 13 +-
50 bin/ebuild-helpers/dosed | 14 +-
51 bin/ebuild-helpers/dosym | 9 +-
52 bin/ebuild-helpers/ecompress | 14 +-
53 bin/ebuild-helpers/ecompressdir | 79 +-
54 bin/ebuild-helpers/emake | 2 +-
55 bin/ebuild-helpers/fowners | 12 +-
56 bin/ebuild-helpers/fperms | 9 +-
57 bin/ebuild-helpers/keepdir | 20 +
58 bin/ebuild-helpers/newbin | 23 +-
59 bin/ebuild-helpers/newconfd | 23 +-
60 bin/ebuild-helpers/newdoc | 23 +-
61 bin/ebuild-helpers/newenvd | 23 +-
62 bin/ebuild-helpers/newexe | 23 +-
63 bin/ebuild-helpers/newheader | 1 +
64 bin/ebuild-helpers/newinitd | 23 +-
65 bin/ebuild-helpers/newins | 67 +-
66 bin/ebuild-helpers/newlib.a | 23 +-
67 bin/ebuild-helpers/newlib.so | 23 +-
68 bin/ebuild-helpers/newman | 23 +-
69 bin/ebuild-helpers/newsbin | 23 +-
70 bin/ebuild-helpers/portageq | 6 +-
71 bin/ebuild-helpers/prepall | 7 +-
72 bin/ebuild-helpers/prepalldocs | 14 +-
73 bin/ebuild-helpers/prepallinfo | 7 +-
74 bin/ebuild-helpers/prepallman | 14 +-
75 bin/ebuild-helpers/prepallstrip | 9 +-
76 bin/ebuild-helpers/prepinfo | 11 +-
77 bin/ebuild-helpers/preplib | 31 -
78 bin/ebuild-helpers/prepman | 16 +-
79 bin/ebuild-helpers/prepstrip | 205 +-
80 bin/ebuild-helpers/unprivileged/chgrp | 1 +
81 bin/ebuild-helpers/unprivileged/chown | 41 +
82 bin/ebuild-helpers/xattr/install | 12 +
83 bin/ebuild-ipc | 6 +-
84 bin/ebuild-ipc.py | 177 +-
85 bin/ebuild.sh | 182 +-
86 bin/egencache | 490 ++--
87 bin/emaint | 23 +-
88 bin/emerge | 104 +-
89 bin/emerge-webrsync | 178 +-
90 bin/emirrordist | 13 +
91 bin/env-update | 16 +-
92 bin/etc-update | 19 +-
93 bin/filter-bash-environment.py | 20 +-
94 bin/fixpackages | 24 +-
95 bin/glsa-check | 198 +-
96 bin/helper-functions.sh | 71 +-
97 bin/install.py | 253 ++
98 bin/isolated-functions.sh | 168 +-
99 bin/lock-helper.py | 5 +-
100 bin/misc-functions.sh | 356 +--
101 bin/phase-functions.sh | 412 ++--
102 bin/phase-helpers.sh | 610 +++--
103 bin/portageq | 665 +++++-
104 bin/quickpkg | 37 +-
105 bin/regenworld | 16 +-
106 bin/repoman | 1687 ++++++++------
107 bin/save-ebuild-env.sh | 67 +-
108 bin/xattr-helper.py | 190 ++
109 bin/xpak-helper.py | 11 +-
110 cnf/dispatch-conf.conf | 1 +
111 cnf/{make.conf => make.conf.example} | 21 +-
112 ...onf.alpha.diff => make.conf.example.alpha.diff} | 18 +-
113 ...fbsd.diff => make.conf.example.amd64-fbsd.diff} | 18 +-
114 ...onf.amd64.diff => make.conf.example.amd64.diff} | 18 +-
115 ...ke.conf.arm.diff => make.conf.example.arm.diff} | 12 +-
116 ....conf.hppa.diff => make.conf.example.hppa.diff} | 28 +-
117 ....conf.ia64.diff => make.conf.example.ia64.diff} | 10 +-
118 ....conf.m68k.diff => make.conf.example.m68k.diff} | 14 +-
119 ....conf.mips.diff => make.conf.example.mips.diff} | 18 +-
120 ...ke.conf.ppc.diff => make.conf.example.ppc.diff} | 26 +-
121 ...onf.ppc64.diff => make.conf.example.ppc64.diff} | 24 +-
122 ....conf.s390.diff => make.conf.example.s390.diff} | 10 +-
123 ...make.conf.sh.diff => make.conf.example.sh.diff} | 17 +-
124 ...fbsd.diff => make.conf.example.sparc-fbsd.diff} | 12 +-
125 ...onf.sparc.diff => make.conf.example.sparc.diff} | 18 +-
126 ...6-fbsd.diff => make.conf.example.x86-fbsd.diff} | 18 +-
127 ...ke.conf.x86.diff => make.conf.example.x86.diff} | 18 +-
128 cnf/make.globals | 44 +-
129 cnf/metadata.dtd | 7 +-
130 cnf/repos.conf | 7 +
131 cnf/sets/portage.conf | 2 +-
132 doc/config/sets.docbook | 5 +-
133 doc/package/ebuild.docbook | 3 +
134 doc/package/ebuild/eapi/4-python.docbook | 44 +-
135 doc/package/ebuild/eapi/4-slot-abi.docbook | 12 +-
136 doc/package/ebuild/eapi/5-hdepend.docbook | 32 +
137 doc/package/ebuild/eapi/5-progress.docbook | 247 ++
138 doc/package/ebuild/eapi/5.docbook | 232 ++
139 doc/portage.docbook | 3 +
140 doc/qa.docbook | 2 +-
141 make.conf-repatch.sh | 40 -
142 make.conf.example-repatch.sh | 41 +
143 man/color.map.5 | 18 +-
144 man/dispatch-conf.1 | 77 +-
145 man/ebuild.1 | 36 +-
146 man/ebuild.5 | 1173 ++++++----
147 man/egencache.1 | 78 +-
148 man/emaint.1 | 15 +-
149 man/emerge.1 | 423 ++--
150 man/emirrordist.1 | 148 ++
151 man/env-update.1 | 21 +-
152 man/etc-update.1 | 46 +-
153 man/make.conf.5 | 326 ++-
154 man/portage.5 | 664 ++++--
155 man/quickpkg.1 | 34 +-
156 man/repoman.1 | 139 +-
157 man/ru/color.map.5 | 217 ++
158 man/ru/dispatch-conf.1 | 100 +
159 man/ru/ebuild.1 | 249 ++
160 man/ru/env-update.1 | 35 +
161 man/ru/etc-update.1 | 63 +
162 man/ru/fixpackages.1 | 22 +
163 man/xpak.5 | 5 +-
164 misc/emerge-delta-webrsync | 809 +++++++
165 mkrelease.sh | 91 +-
166 pym/_emerge/AbstractDepPriority.py | 5 +-
167 pym/_emerge/AbstractEbuildProcess.py | 58 +-
168 pym/_emerge/AbstractPollTask.py | 2 +-
169 pym/_emerge/AsynchronousLock.py | 66 +-
170 pym/_emerge/AsynchronousTask.py | 14 +
171 pym/_emerge/Binpkg.py | 7 +-
172 pym/_emerge/BinpkgExtractorAsync.py | 15 +-
173 pym/_emerge/BinpkgFetcher.py | 18 +-
174 pym/_emerge/BinpkgVerifier.py | 143 +-
175 pym/_emerge/BlockerCache.py | 10 +-
176 pym/_emerge/BlockerDB.py | 12 +-
177 pym/_emerge/CompositeTask.py | 4 +
178 pym/_emerge/DepPriority.py | 29 +-
179 pym/_emerge/DepPrioritySatisfiedRange.py | 24 +-
180 pym/_emerge/DependencyArg.py | 10 +-
181 pym/_emerge/EbuildBuild.py | 36 +-
182 pym/_emerge/EbuildBuildDir.py | 11 +-
183 pym/_emerge/EbuildExecuter.py | 13 +-
184 pym/_emerge/EbuildFetcher.py | 68 +-
185 pym/_emerge/EbuildMetadataPhase.py | 66 +-
186 pym/_emerge/EbuildPhase.py | 63 +-
187 pym/_emerge/EbuildProcess.py | 12 +-
188 pym/_emerge/EbuildSpawnProcess.py | 10 +-
189 pym/_emerge/FakeVartree.py | 123 +-
190 pym/_emerge/FifoIpcDaemon.py | 43 +-
191 pym/_emerge/JobStatusDisplay.py | 44 +-
192 pym/_emerge/MergeListItem.py | 18 +-
193 pym/_emerge/MetadataRegen.py | 93 +-
194 pym/_emerge/MiscFunctionsProcess.py | 7 +-
195 pym/_emerge/Package.py | 317 ++-
196 pym/_emerge/PackageMerge.py | 7 +-
197 pym/_emerge/PackageUninstall.py | 6 +-
198 pym/_emerge/PackageVirtualDbapi.py | 4 +-
199 pym/_emerge/PipeReader.py | 37 +-
200 pym/_emerge/PollScheduler.py | 129 +-
201 pym/_emerge/QueueScheduler.py | 105 -
202 pym/_emerge/RootConfig.py | 13 +-
203 pym/_emerge/Scheduler.py | 240 +-
204 pym/_emerge/SpawnProcess.py | 269 +--
205 pym/_emerge/SubProcess.py | 30 +-
206 pym/_emerge/Task.py | 9 +-
207 pym/_emerge/TaskScheduler.py | 26 -
208 pym/_emerge/UnmergeDepPriority.py | 27 +-
209 pym/_emerge/UseFlagDisplay.py | 10 +-
210 pym/_emerge/actions.py | 1704 ++++++++++----
211 pym/_emerge/chk_updated_cfg_files.py | 42 +
212 pym/_emerge/clear_caches.py | 4 +-
213 pym/_emerge/countdown.py | 18 +-
214 pym/_emerge/create_depgraph_params.py | 23 +-
215 pym/_emerge/create_world_atom.py | 25 +-
216 pym/_emerge/depgraph.py | 2451 ++++++++++++++------
217 pym/_emerge/emergelog.py | 12 +-
218 pym/_emerge/getloadavg.py | 5 +-
219 pym/_emerge/help.py | 10 +-
220 pym/_emerge/is_valid_package_atom.py | 7 +-
221 pym/_emerge/main.py | 1297 ++---------
222 pym/_emerge/post_emerge.py | 165 ++
223 pym/_emerge/resolver/backtracking.py | 38 +-
224 pym/_emerge/resolver/circular_dependency.py | 24 +-
225 pym/_emerge/resolver/output.py | 537 +++--
226 pym/_emerge/resolver/output_helpers.py | 95 +-
227 pym/_emerge/resolver/package_tracker.py | 301 +++
228 pym/_emerge/resolver/slot_collision.py | 230 +-
229 pym/_emerge/search.py | 4 +-
230 pym/_emerge/stdout_spinner.py | 13 +-
231 pym/_emerge/unmerge.py | 5 +-
232 pym/portage/__init__.py | 211 +-
233 pym/portage/_emirrordist/Config.py | 132 ++
234 pym/portage/_emirrordist/DeletionIterator.py | 83 +
235 pym/portage/_emirrordist/DeletionTask.py | 129 ++
236 pym/portage/_emirrordist/FetchIterator.py | 147 ++
237 pym/portage/_emirrordist/FetchTask.py | 629 +++++
238 pym/portage/_emirrordist/MirrorDistTask.py | 219 ++
239 pym/portage/_emirrordist/__init__.py | 2 +
240 pym/portage/_emirrordist/main.py | 463 ++++
241 pym/portage/_global_updates.py | 238 +-
242 pym/portage/_legacy_globals.py | 3 +-
243 pym/portage/_selinux.py | 55 +-
244 pym/portage/_sets/__init__.py | 30 +-
245 pym/portage/_sets/base.py | 7 +-
246 pym/portage/_sets/dbapi.py | 111 +-
247 pym/portage/_sets/files.py | 10 +-
248 pym/portage/_sets/libs.py | 17 +-
249 pym/portage/_sets/security.py | 4 +-
250 pym/portage/cache/ebuild_xattr.py | 2 +-
251 pym/portage/cache/flat_hash.py | 32 +-
252 pym/portage/cache/flat_list.py | 134 --
253 pym/portage/cache/fs_template.py | 6 +-
254 pym/portage/cache/mappings.py | 6 +-
255 pym/portage/cache/metadata.py | 6 +-
256 pym/portage/cache/sqlite.py | 41 +-
257 pym/portage/cache/template.py | 14 +-
258 pym/portage/checksum.py | 100 +-
259 pym/portage/const.py | 198 +-
260 pym/portage/cvstree.py | 274 +--
261 pym/portage/data.py | 76 +-
262 pym/portage/dbapi/_MergeProcess.py | 214 +-
263 pym/portage/dbapi/_SyncfsProcess.py | 53 +
264 pym/portage/dbapi/__init__.py | 110 +-
265 pym/portage/dbapi/_expand_new_virt.py | 12 +-
266 pym/portage/dbapi/_similar_name_search.py | 57 +
267 pym/portage/dbapi/bintree.py | 338 +--
268 pym/portage/dbapi/cpv_expand.py | 4 +-
269 pym/portage/dbapi/dep_expand.py | 6 +-
270 pym/portage/dbapi/porttree.py | 144 +-
271 pym/portage/dbapi/vartree.py | 604 +++--
272 pym/portage/dbapi/virtual.py | 7 +-
273 pym/portage/debug.py | 10 +-
274 pym/portage/dep/__init__.py | 309 ++-
275 .../dep/{_slot_abi.py => _slot_operator.py} | 53 +-
276 pym/portage/dep/dep_check.py | 113 +-
277 pym/portage/dispatch_conf.py | 326 +--
278 pym/portage/eapi.py | 64 +-
279 pym/portage/eclass_cache.py | 26 +-
280 pym/portage/elog/__init__.py | 3 +-
281 pym/portage/elog/mod_echo.py | 3 +-
282 pym/portage/elog/mod_save.py | 24 +-
283 pym/portage/elog/mod_save_summary.py | 40 +-
284 pym/portage/elog/mod_syslog.py | 13 +-
285 pym/portage/emaint/__init__.py | 4 +-
286 pym/portage/emaint/defaults.py | 11 +-
287 pym/portage/emaint/main.py | 157 +-
288 pym/portage/emaint/module.py | 8 +-
289 pym/portage/emaint/modules/__init__.py | 4 +-
290 pym/portage/emaint/modules/binhost/__init__.py | 8 +-
291 pym/portage/emaint/modules/binhost/binhost.py | 12 +-
292 pym/portage/emaint/modules/config/__init__.py | 8 +-
293 pym/portage/emaint/modules/config/config.py | 66 +-
294 pym/portage/emaint/modules/logs/__init__.py | 22 +-
295 pym/portage/emaint/modules/logs/logs.py | 17 +-
296 pym/portage/emaint/modules/move/__init__.py | 9 +-
297 pym/portage/emaint/modules/move/move.py | 42 +-
298 pym/portage/emaint/modules/resume/__init__.py | 6 +-
299 pym/portage/emaint/modules/world/__init__.py | 8 +-
300 pym/portage/env/loaders.py | 26 +-
301 pym/portage/exception.py | 54 +-
302 pym/portage/getbinpkg.py | 255 +-
303 pym/portage/glsa.py | 313 +--
304 pym/portage/localization.py | 17 +-
305 pym/portage/locks.py | 104 +-
306 pym/portage/mail.py | 7 +-
307 pym/portage/manifest.py | 114 +-
308 pym/portage/news.py | 10 +-
309 pym/portage/output.py | 43 +-
310 .../package/ebuild/_config/KeywordsManager.py | 56 +-
311 .../package/ebuild/_config/LocationsManager.py | 135 +-
312 pym/portage/package/ebuild/_config/MaskManager.py | 33 +-
313 pym/portage/package/ebuild/_config/UseManager.py | 290 ++-
314 .../package/ebuild/_config/special_env_vars.py | 56 +-
315 .../package/ebuild/_config/unpack_dependencies.py | 38 +
316 pym/portage/package/ebuild/_ipc/QueryCommand.py | 91 +-
317 .../{_eapi_invalid.py => _metadata_invalid.py} | 13 -
318 .../ebuild/_parallel_manifest/ManifestProcess.py | 43 +
319 .../ebuild/_parallel_manifest/ManifestScheduler.py | 93 +
320 .../ebuild/_parallel_manifest/ManifestTask.py | 186 ++
321 .../package/ebuild/_parallel_manifest/__init__.py | 2 +
322 pym/portage/package/ebuild/_spawn_nofetch.py | 23 +-
323 pym/portage/package/ebuild/config.py | 610 +++--
324 .../package/ebuild/deprecated_profile_check.py | 63 +-
325 pym/portage/package/ebuild/digestcheck.py | 15 +-
326 pym/portage/package/ebuild/digestgen.py | 107 +-
327 pym/portage/package/ebuild/doebuild.py | 546 +++--
328 pym/portage/package/ebuild/fetch.py | 84 +-
329 pym/portage/package/ebuild/getmaskingreason.py | 30 +-
330 pym/portage/package/ebuild/getmaskingstatus.py | 32 +-
331 pym/portage/package/ebuild/prepare_build_dirs.py | 8 +-
332 pym/portage/process.py | 333 ++-
333 pym/portage/proxy/lazyimport.py | 5 +-
334 pym/portage/proxy/objectproxy.py | 9 +-
335 pym/portage/repository/config.py | 552 ++++-
336 pym/portage/tests/__init__.py | 93 +-
337 pym/portage/tests/bin/setup_env.py | 54 +-
338 pym/portage/tests/dbapi/test_fakedbapi.py | 10 +-
339 pym/portage/tests/dbapi/test_portdb_cache.py | 183 ++
340 pym/portage/tests/dep/testAtom.py | 267 +--
341 pym/portage/tests/dep/testCheckRequiredUse.py | 192 +-
342 pym/portage/tests/dep/testStandalone.py | 26 +-
343 pym/portage/tests/dep/test_best_match_to_list.py | 44 +-
344 pym/portage/tests/dep/test_dep_getcpv.py | 16 +-
345 pym/portage/tests/dep/test_dep_getrepo.py | 6 +-
346 pym/portage/tests/dep/test_dep_getslot.py | 10 +-
347 pym/portage/tests/dep/test_dep_getusedeps.py | 12 +-
348 pym/portage/tests/dep/test_get_operator.py | 24 +-
349 .../tests/dep/test_get_required_use_flags.py | 4 +-
350 pym/portage/tests/dep/test_isjustname.py | 14 +-
351 pym/portage/tests/dep/test_isvalidatom.py | 13 +-
352 pym/portage/tests/dep/test_match_from_list.py | 136 +-
353 pym/portage/tests/dep/test_paren_reduce.py | 61 +-
354 pym/portage/tests/dep/test_use_reduce.py | 519 +++--
355 pym/portage/tests/ebuild/test_config.py | 27 +-
356 pym/portage/tests/ebuild/test_doebuild_fd_pipes.py | 137 ++
357 pym/portage/tests/ebuild/test_doebuild_spawn.py | 46 +-
358 pym/portage/tests/ebuild/test_ipc_daemon.py | 78 +-
359 pym/portage/tests/ebuild/test_spawn.py | 15 +-
360 pym/portage/tests/emerge/test_emerge_slot_abi.py | 30 +-
361 pym/portage/tests/emerge/test_simple.py | 116 +-
362 .../tests/env/config/test_PackageKeywordsFile.py | 8 +-
363 .../tests/env/config/test_PackageUseFile.py | 6 +-
364 .../tests/env/config/test_PortageModulesFile.py | 11 +-
365 pym/portage/tests/glsa/__init__.py | 2 +
366 pym/portage/tests/glsa/__test__ | 0
367 pym/portage/tests/glsa/test_security_set.py | 144 ++
368 .../test_lazy_import_portage_baseline.py | 4 +-
369 pym/portage/tests/lint/test_bash_syntax.py | 26 +-
370 pym/portage/tests/lint/test_compile_modules.py | 32 +-
371 pym/portage/tests/lint/test_import_modules.py | 2 +-
372 pym/portage/tests/locks/test_asynchronous_lock.py | 10 +-
373 pym/portage/tests/process/test_PopenProcess.py | 85 +
374 .../tests/process/test_PopenProcessBlockingIO.py | 63 +
375 pym/portage/tests/process/test_poll.py | 35 +-
376 pym/portage/tests/repoman/test_echangelog.py | 6 +-
377 pym/portage/tests/repoman/test_simple.py | 83 +-
378 pym/portage/tests/resolver/ResolverPlayground.py | 390 ++--
379 pym/portage/tests/resolver/test_autounmask.py | 304 +--
380 .../tests/resolver/test_autounmask_multilib_use.py | 85 +
381 pym/portage/tests/resolver/test_backtracking.py | 48 +-
382 pym/portage/tests/resolver/test_blocker.py | 48 +
383 pym/portage/tests/resolver/test_complete_graph.py | 4 +-
384 ...test_complete_if_new_subslot_without_revbump.py | 74 +
385 pym/portage/tests/resolver/test_depclean.py | 100 +-
386 pym/portage/tests/resolver/test_depclean_order.py | 57 +
387 .../resolver/test_depclean_slot_unavailable.py | 78 +
388 .../tests/resolver/test_features_test_use.py | 68 +
389 pym/portage/tests/resolver/test_merge_order.py | 35 +-
390 pym/portage/tests/resolver/test_multirepo.py | 88 +-
391 pym/portage/tests/resolver/test_onlydeps.py | 34 +
392 pym/portage/tests/resolver/test_or_choices.py | 134 ++
393 pym/portage/tests/resolver/test_package_tracker.py | 261 +++
394 .../test_regular_slot_change_without_revbump.py | 59 +
395 pym/portage/tests/resolver/test_slot_abi.py | 111 +-
396 .../tests/resolver/test_slot_abi_downgrade.py | 8 +-
397 .../resolver/test_slot_change_without_revbump.py | 69 +
398 pym/portage/tests/resolver/test_slot_collisions.py | 106 +-
399 .../resolver/test_slot_conflict_mask_update.py | 41 +
400 .../tests/resolver/test_slot_conflict_rebuild.py | 408 ++++
401 .../tests/resolver/test_slot_conflict_update.py | 98 +
402 .../resolver/test_slot_operator_autounmask.py | 120 +
403 .../resolver/test_slot_operator_unsatisfied.py | 70 +
404 .../tests/resolver/test_slot_operator_unsolved.py | 88 +
405 pym/portage/tests/resolver/test_targetroot.py | 85 +
406 .../tests/resolver/test_unpack_dependencies.py | 65 +
407 pym/portage/tests/resolver/test_use_aliases.py | 131 ++
408 pym/portage/tests/resolver/test_useflags.py | 78 +
409 pym/portage/tests/runTests | 19 +-
410 pym/portage/tests/unicode/test_string_format.py | 52 +-
411 pym/portage/tests/update/test_move_ent.py | 6 +-
412 pym/portage/tests/update/test_move_slot_ent.py | 6 +-
413 pym/portage/tests/update/test_update_dbentry.py | 101 +-
414 pym/portage/tests/util/test_getconfig.py | 31 +-
415 pym/portage/tests/util/test_stackDictList.py | 12 +-
416 pym/portage/tests/util/test_stackDicts.py | 41 +-
417 pym/portage/tests/util/test_stackLists.py | 18 +-
418 pym/portage/tests/util/test_uniqueArray.py | 14 +-
419 pym/portage/tests/util/test_varExpand.py | 80 +-
420 pym/portage/tests/util/test_whirlpool.py | 4 +-
421 pym/portage/tests/versions/test_cpv_sort_key.py | 7 +-
422 pym/portage/tests/versions/test_vercmp.py | 38 +-
423 pym/portage/update.py | 137 +-
424 pym/portage/util/ExtractKernelVersion.py | 6 +-
425 pym/portage/util/SlotObject.py | 1 -
426 pym/portage/util/_ShelveUnicodeWrapper.py | 45 +
427 pym/portage/util/__init__.py | 394 ++--
428 pym/portage/util/_argparse.py | 42 +
429 pym/portage/util/_async/AsyncScheduler.py | 102 +
430 pym/portage/util/_async/FileCopier.py | 17 +
431 pym/portage/util/_async/FileDigester.py | 73 +
432 pym/portage/util/_async/ForkProcess.py | 65 +
433 pym/portage/util/_async/PipeLogger.py | 163 ++
434 pym/portage/util/_async/PipeReaderBlockingIO.py | 91 +
435 pym/portage/util/_async/PopenProcess.py | 33 +
436 pym/portage/util/_async/SchedulerInterface.py | 79 +
437 pym/portage/util/_async/TaskScheduler.py | 20 +
438 pym/portage/util/_async/__init__.py | 2 +
439 pym/portage/util/_async/run_main_scheduler.py | 41 +
440 pym/portage/util/_ctypes.py | 47 +
441 pym/portage/util/_desktop_entry.py | 85 +-
442 pym/portage/util/_dyn_libs/LinkageMapELF.py | 24 +-
443 .../util/_dyn_libs/PreservedLibsRegistry.py | 3 +-
444 .../util/_dyn_libs/display_preserved_libs.py | 98 +
445 pym/portage/util/_eventloop/EventLoop.py | 364 ++-
446 pym/portage/util/_eventloop/PollSelectAdapter.py | 2 +-
447 pym/portage/util/_get_vm_info.py | 80 +
448 pym/portage/util/_info_files.py | 138 ++
449 pym/portage/util/_path.py | 27 +
450 pym/portage/util/_urlopen.py | 102 +-
451 pym/portage/util/digraph.py | 46 +-
452 pym/portage/util/env_update.py | 78 +-
453 pym/portage/util/lafilefixer.py | 10 +-
454 pym/portage/util/listdir.py | 128 +-
455 pym/portage/util/movefile.py | 220 +-
456 pym/portage/util/whirlpool.py | 2 +
457 pym/portage/util/writeable_check.py | 79 +
458 pym/portage/versions.py | 85 +-
459 pym/portage/xml/metadata.py | 15 +-
460 pym/portage/xpak.py | 8 +-
461 pym/repoman/checks.py | 235 +-
462 pym/repoman/errors.py | 6 +-
463 pym/repoman/herdbase.py | 11 +-
464 pym/repoman/utilities.py | 146 +-
465 runtests.sh | 47 +-
466 tabcheck.py | 2 +-
467 456 files changed, 30379 insertions(+), 13080 deletions(-)
468
469 diff --git a/.gitignore b/.gitignore
470 index 808cc0c..074bb86 100644
471 --- a/.gitignore
472 +++ b/.gitignore
473 @@ -1,3 +1,4 @@
474 *.py[co]
475 -/pym/portage/public_api.bz2
476 -/testpath
477 +__pycache__/
478 +*.class
479 +/tags
480
481 diff --git a/.portage_not_installed b/.portage_not_installed
482 new file mode 100644
483 index 0000000..e69de29
484
485 diff --git a/DEVELOPING b/DEVELOPING
486 index ebe5d56..40b4ca2 100644
487 --- a/DEVELOPING
488 +++ b/DEVELOPING
489 @@ -24,7 +24,8 @@ Tabs
490 ----
491
492 The current code uses tabs, not spaces. Keep whitespace usage consistent
493 -between files. New files should use tabs.
494 +between files. New files should use tabs. Space is sometimes used for
495 +indentation in Python code. Tab stop should for this reason be set to 4.
496
497 Line-Wrapping
498 -------------
499 @@ -51,13 +52,13 @@ wrapping is always clear (but you cannot convert spaces as easily as tabwidth).
500 Comparisons
501 -----------
502
503 -if foo == None
504 +if foo != None
505
506 should be replaced with:
507
508 if foo is not None:
509
510 -Is not does a reference comparison (address1 = address2 basically) and
511 +Is not does a reference comparison (address1 = address2 basically) and
512 the == forces a by value compare (with __eq__())
513
514 Dict Lookups
515 @@ -139,7 +140,7 @@ NO:
516
517 Try not to import large numbers of things into the namespace of a module.
518 I realize this is done all over the place in current code but it really makes it
519 -a pain to do code reflection when the namespace is cluttered with identifiers
520 +a pain to do code reflection when the namespace is cluttered with identifiers
521 from other modules.
522
523 YES:
524 @@ -159,13 +160,29 @@ just COLOR. However it means during introspection of the current namespace
525 The NO example just imports a set of functions from the output module. It is
526 somewhat annoying because the import line needs to be modified when functions
527 are needed and often unused functions are left in the import line until someone
528 -comes along with a linter to clean up (does not happen often). The color is a
529 -bit clearer as
530 +comes along with a linter to clean up (does not happen often).
531
532 - print red('blar')
533
534 -is shorter than:
535 +Releases
536 +--------
537
538 - print output.red('blar')
539 +First update the NEWS and RELEASE-NOTES files and commit.
540
541 -Rationale: python -c 'import portage; dir(portage)' (circa 02/2008)
542 +Second create a git tag for this release:
543 + git tag v2.2.8
544 +
545 +Then create the tarball and run the tests:
546 + ./mkrelease.sh --changelog-rev v2.2.7 --tag --runtests 2.2.8
547 +Make sure you have all supported python versions installed first
548 +(see PYTHON_SUPPORTED_VERSIONS in runtests.sh).
549 +
550 +Version bump the ebuild and verify it can re-install itself:
551 + emerge portage
552 + emerge portage
553 +
554 +Publish the results (no going back now):
555 + - Push the new git tag
556 + - Upload the tarball
557 + - Commit the new ebuild version
558 +
559 +Close the bugs blocking the tracker bug for this release.
560
561 diff --git a/Makefile b/Makefile
562 index f074dcf..92ea195 100644
563 --- a/Makefile
564 +++ b/Makefile
565 @@ -27,13 +27,14 @@ INSMODE = 0644
566 EXEMODE = 0755
567 DIRMODE = 0755
568 SYSCONFDIR_FILES = etc-update.conf dispatch-conf.conf
569 -PORTAGE_CONFDIR_FILES = make.globals
570 +PORTAGE_CONFDIR_FILES = make.conf.example make.globals repos.conf
571 LOGROTATE_FILES = elog-save-summary
572 BINDIR_FILES = ebuild egencache emerge emerge-webrsync \
573 - portageq quickpkg repoman
574 + emirrordist portageq quickpkg repoman
575 SBINDIR_FILES = archive-conf dispatch-conf emaint \
576 env-update etc-update fixpackages regenworld
577 DOCS = ChangeLog NEWS RELEASE-NOTES
578 +LINGUAS ?= $(shell cd "$(srcdir)/man" && find -mindepth 1 -type d)
579
580 ifdef PYTHONPATH
581 PYTHONPATH := $(srcdir)/pym:$(PYTHONPATH)
582 @@ -50,8 +51,6 @@ docbook:
583
584 epydoc:
585 set -e; \
586 - # workaround for bug 282760 \
587 - touch "$(srcdir)/pym/pysqlite2.py"; \
588 env PYTHONPATH="$(PYTHONPATH)" epydoc \
589 -o "$(WORKDIR)/epydoc" \
590 --name $(PN) \
591 @@ -63,9 +62,7 @@ epydoc:
592 -e s:^pym/:: \
593 -e s:/:.:g \
594 | sort); \
595 - rm -f "$(srcdir)/pym/pysqlite2.py"* \
596 - "$(WORKDIR)/epydoc/pysqlite2-"* \
597 - "$(WORKDIR)/epydoc/api-objects.txt"; \
598 + rm -f "$(WORKDIR)/epydoc/api-objects.txt"; \
599
600 test:
601 set -e; \
602 @@ -81,9 +78,6 @@ install:
603 cd "$(srcdir)/cnf"; \
604 install -m$(INSMODE) $(PORTAGE_CONFDIR_FILES) \
605 "$(DESTDIR)$(portage_confdir)"; \
606 - install -m$(INSMODE) "$(srcdir)/cnf/make.conf" \
607 - "$(DESTDIR)$(portage_confdir)/make.conf.example"; \
608 - \
609 install -d -m$(DIRMODE) "$(DESTDIR)$(portage_setsdir)"; \
610 cd "$(S)/cnf/sets"; \
611 install -m$(INSMODE) *.conf "$(DESTDIR)$(portage_setsdir)"; \
612 @@ -184,10 +178,18 @@ install:
613 cd "$(srcdir)"; \
614 install -m $(INSMODE) $(DOCS) "$(DESTDIR)$(docdir)"; \
615 \
616 - for x in 1 5 ; do \
617 - install -d -m$(DIRMODE) "$(DESTDIR)$(mandir)/man$$x"; \
618 - cd "$(srcdir)/man"; \
619 - install -m$(INSMODE) *.$$x "$(DESTDIR)$(mandir)/man$$x"; \
620 + for x in "" $(LINGUAS); do \
621 + for y in 1 5 ; do \
622 + if [ -d "$(srcdir)/man/$$x" ]; then \
623 + cd "$(srcdir)/man/$$x"; \
624 + files=$$(echo *.$$y); \
625 + if [ -z "$$files" ] || [ "$$files" = "*.$$y" ]; then \
626 + continue; \
627 + fi; \
628 + install -d -m$(DIRMODE) "$(DESTDIR)$(mandir)/$$x/man$$y"; \
629 + install -m$(INSMODE) *.$$y "$(DESTDIR)$(mandir)/$$x/man$$y"; \
630 + fi; \
631 + done; \
632 done; \
633 \
634 if [ -f "$(srcdir)/doc/portage.html" ] ; then \
635 @@ -208,7 +210,6 @@ install:
636 clean:
637 set -e; \
638 $(MAKE) -C "$(srcdir)/doc" clean; \
639 - rm -rf "$(srcdir)/pym/pysqlite2.py"* \
640 - "$(WORKDIR)/epydoc"; \
641 + rm -rf "$(WORKDIR)/epydoc"; \
642
643 .PHONY: all clean docbook epydoc install test
644
645 diff --git a/NEWS b/NEWS
646 index 9a2f24f..43d1797 100644
647 --- a/NEWS
648 +++ b/NEWS
649 @@ -1,22 +1,34 @@
650 News (mainly features/major bug fixes)
651
652 +portage-2.2.1
653 +-------------
654 +
655 +* Add cgroups, ipc-sandbox, and network-sandbox FEATURES.
656 +
657 portage-2.2
658 -------------
659
660 -* Add link level dependency awareness to emerge --depclean and --prune actions
661 - in order to protect against uninstallation of required libraries.
662 -* Add support for generic package sets (also see RELEASE-NOTES)
663 +* Add extended set configuration via /etc/portage/sets.conf. See
664 + /usr/share/portage/config/sets/portage.conf for examples.
665 +
666 +portage-2.1.11.20
667 +-------------
668 +* Add support for EAPI 5. Refer to the PMS EAPI Cheat Sheet, portage's html
669 + docs installed with USE=doc, or `man 5 ebuild` for more info about EAPI 5.
670 * Add support for FEATURES=preserve-libs which preserves libraries when the
671 sonames change during upgrade or downgrade, and the @preserved-rebuild
672 package set which rebuilds consumers of preserved libraries.
673 +* Add link level dependency awareness to emerge --depclean and --prune actions
674 + in order to protect against uninstallation of required libraries. Refer to
675 + the --depclean-lib-check option in the emerge(1) man page.
676
677 portage-2.1.11
678 -------------
679
680 * Add support for experimental EAPI "4-slot-abi". Refer to the corresponding
681 html documentation that is installed with USE=doc, and also to the emerge(1)
682 - man page for information about the related --ignore-built-slot-abi-deps and
683 - --rebuild-if-new-slot-abi options.
684 + man page for information about the related --ignore-built-slot-operator-deps and
685 + --rebuild-if-new-slot options.
686
687 portage-2.1.10
688 -------------
689
690 diff --git a/README b/README
691 new file mode 100644
692 index 0000000..5558dde
693 --- /dev/null
694 +++ b/README
695 @@ -0,0 +1,49 @@
696 +About Portage
697 +=============
698 +
699 +Portage is a package management system based on ports collections. The
700 +Package Manager Specification Project (PMS) standardises and documents
701 +the behaviour of Portage so that the Portage tree can be used by other
702 +package managers.
703 +
704 +
705 +Dependencies
706 +============
707 +
708 +Python and Bash should be the only hard dependencies. Python 2.6 is the
709 +minimum supported version.
710 +
711 +
712 +Licensing and Legalese
713 +=======================
714 +
715 +Portage is free software; you can redistribute it and/or
716 +modify it under the terms of the GNU General Public License
717 +version 2 as published by the Free Software Foundation.
718 +
719 +Portage is distributed in the hope that it will be useful,
720 +but WITHOUT ANY WARRANTY; without even the implied warranty of
721 +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
722 +GNU General Public License for more details.
723 +
724 +You should have received a copy of the GNU General Public License
725 +along with Portage; if not, write to the Free Software
726 +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
727 +02110-1301, USA.
728 +
729 +
730 +More information
731 +================
732 +
733 +-DEVELOPING contains some code guidelines.
734 +-LICENSE contains the GNU General Public License version 2.
735 +-NEWS contains new features/major bug fixes for each version.
736 +-RELEASE NOTES contains mainly upgrade information for each version.
737 +-TEST-NOTES contains Portage unit test information.
738 +
739 +
740 +Links
741 +=====
742 +Gentoo project page: <http://www.gentoo.org/proj/en/portage/>
743 +PMS: <https://dev.gentoo.org/~ulm/pms/head/pms.html>
744 +PMS git repo: <http://git.overlays.gentoo.org/gitweb/?p=proj/pms.git>
745
746 diff --git a/RELEASE-NOTES b/RELEASE-NOTES
747 index 93e67ed..72aa219 100644
748 --- a/RELEASE-NOTES
749 +++ b/RELEASE-NOTES
750 @@ -1,30 +1,53 @@
751 Release Notes; upgrade information mainly.
752 Features/major bugfixes are listed in NEWS
753
754 +portage-2.2.
755 +==================================
756 +* Bug Fixes:
757 + - Bug # 450372 Russian translation update.
758 + - Bug #497238: Fix unnecessary rebuild caused by equal versions
759 + in different repositories.
760 + - Bug #501360 Only use Atoms with package_tracker.match
761 + - For a complete list of bug fixes, changes, See the Changelog installed at
762 + /usr/share/doc/portage-2.2.9/ChangeLog.bz2
763 +
764 +portage-2.2.8
765 +==================================
766 +* Bug Fixes:
767 + - Bug 488972 - sys-apps/portage-2.2.7:
768 + "egencache --update --rsync" does not create metadata/timestamp.chk
769 + - For a complete list of bug fixes, changes, See the Changelog installed at
770 + /usr/share/doc/portage-2.2.8/ChangeLog.bz2
771 +
772 portage-2.2
773 ==================================
774
775 * Portage now warns if an ebuild repository does not have a name, as several
776 new features in 2.2 make use of or require named repositories. The repository
777 name is stored in profiles/repo_name in each repository.
778 -* Package set support: There are several important notes regarding package
779 - sets:
780 - - they may currently only include simple and versioned atoms or other sets,
781 - use conditionals or any-of constructs aren't possible yet
782 - - sets can be referenced either in other file-based sets or as argument to
783 - emerge, but not in ebuilds, config files or other tools at this time.
784 - - packages won't be unmerged if they are referenced by an installed package
785 - set (with the exception of the world set, and installed being determined
786 - by the world_sets file).
787 -* The "selected" package set, which includes packages listed in
788 - /var/lib/portage/world, has been extended to include nested sets that may
789 - be listed /var/lib/portage/world_sets.
790 +
791 +portage-2.1.13
792 +==================================
793 +
794 +* FEATURES=userpriv and usersandbox are enabled by default.
795 +* FEATURES=usersync is enabled by default.
796 +* New sync-cvs-repo, sync-type and sync-uri attributes in repos.conf replace
797 + SYNC variable.
798 +
799 +portage-2.1.12
800 +==================================
801 +
802 +* FEATURES=preserve-libs is enabled by default.
803 +* ACCEPT_RESTRICT variable may be used to mask packages based on RESTRICT.
804
805 portage-2.1.11
806 ==================================
807 * User-defined package sets can now be created by placing files in the
808 /etc/portage/sets/ directory. Refer to the emerge(1) and portage(5) man
809 pages for more information.
810 +* The "selected" package set, which includes packages listed in
811 + /var/lib/portage/world, has been extended to include nested sets that may
812 + be listed /var/lib/portage/world_sets.
813
814 portage-2.1.10.61
815 ==================================
816 @@ -119,7 +142,7 @@ portage-2.1.6
817 * The python namespace for portage has been sanitized, all portage related code
818 is now contained within the portage namespace. External script should be
819 updated accordingly, though links exist for backward compability.
820 -* -* support in package.keywords was changed as it was inconsistent with
821 +* -* support in package.keywords was changed as it was inconsistent with
822 ACCEPT_KEYWORDS behavior (also see
823 http://dev.gentoo.org/~genone/docs/KEYWORDS.stupid).
824 Previously having -* in package.keywords matched packages with KEYWORDS="-*",
825 @@ -225,7 +248,7 @@ portage-2.1.1
826
827 * emerge --search doesn't use regular expressions now anymore by default, so
828 emerge --search dvd+rw-tools now works as expected. Regular expressions can be enabled
829 - by prefixing the search string with %.
830 + by prefixing the search string with %.
831 * emerge --depclean algorithm is much safer than the old one.
832 * emerge --newuse detects changes in IUSE that previously went undetected.
833
834 @@ -238,9 +261,9 @@ portage-2.1
835 by the name of --alphabetical. Adding the option to EMERGE_DEFAULT_OPTS
836 in make.conf will restore the old behaviour permanently.
837 * The deprecated --inject has been removed, use /etc/portage/profile/package.provided
838 -* The deprecated --upgradeonly has been removed, use /etc/portage/package.*
839 +* The deprecated --upgradeonly has been removed, use /etc/portage/package.*
840 instead.
841 -* 'emerge sync' has been deprecated, use 'emerge --sync' instead (same
842 +* 'emerge sync' has been deprecated, use 'emerge --sync' instead (same
843 for other actions)
844 * Tools that call emerge should override the EMERGE_DEFAULT_OPTS environment
845 variable or use the emerge --ignore-default-opts option.
846 @@ -249,6 +272,6 @@ portage-2.1
847 * autouse (use.defaults) has been deprecated by specifying USE_ORDER in make.defaults
848 Users may still turn this back on by specifying USE_ORDER="env:pkg:conf:auto:defaults"
849 in make.conf. Interested in figuring out what use flags were turned off? Check out
850 - /usr/portage/profiles/base/use.defaults and other use.defaults files that correspond
851 + /usr/portage/profiles/base/use.defaults and other use.defaults files that correspond
852 to your profile.
853
854
855 diff --git a/bin/archive-conf b/bin/archive-conf
856 index 7978668..f73ca42 100755
857 --- a/bin/archive-conf
858 +++ b/bin/archive-conf
859 @@ -1,5 +1,5 @@
860 -#!/usr/bin/python
861 -# Copyright 1999-2006 Gentoo Foundation
862 +#!/usr/bin/python -b
863 +# Copyright 1999-2014 Gentoo Foundation
864 # Distributed under the terms of the GNU General Public License v2
865
866 #
867 @@ -12,43 +12,21 @@
868 from __future__ import print_function
869
870 import sys
871 -try:
872 - import portage
873 -except ImportError:
874 - from os import path as osp
875 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
876 - import portage
877
878 +from os import path as osp
879 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
880 +sys.path.insert(0, pym_path)
881 +import portage
882 +portage._internal_caller = True
883 +
884 +import portage.dispatch_conf
885 from portage import os
886 -from portage import dispatch_conf
887 +from portage.checksum import perform_md5
888
889 FIND_EXTANT_CONTENTS = "find %s -name CONTENTS"
890
891 MANDATORY_OPTS = [ 'archive-dir' ]
892
893 -try:
894 - import fchksum
895 - def perform_checksum(filename): return fchksum.fmd5t(filename)
896 -except ImportError:
897 - import md5
898 - def md5_to_hex(md5sum):
899 - hexform = ""
900 - for ix in range(len(md5sum)):
901 - hexform = hexform + "%02x" % ord(md5sum[ix])
902 - return hexform.lower()
903 -
904 - def perform_checksum(filename):
905 - f = open(filename, 'rb')
906 - blocksize=32768
907 - data = f.read(blocksize)
908 - size = 0
909 - sum = md5.new()
910 - while data:
911 - sum.update(data)
912 - size = size + len(data)
913 - data = f.read(blocksize)
914 - return (md5_to_hex(sum.digest()),size)
915 -
916 def archive_conf():
917 args = []
918 content_files = []
919 @@ -63,19 +41,19 @@ def archive_conf():
920 md5_match_hash[conf] = ''
921
922 # Find all the CONTENT files in VDB_PATH.
923 - content_files += os.popen(FIND_EXTANT_CONTENTS %
924 - (os.path.join(portage.settings['EROOT'], portage.VDB_PATH))).readlines()
925 + with os.popen(FIND_EXTANT_CONTENTS % (os.path.join(portage.settings['EROOT'], portage.VDB_PATH))) as f:
926 + content_files += f.readlines()
927
928 # Search for the saved md5 checksum of all the specified config files
929 # and see if the current file is unmodified or not.
930 try:
931 todo_cnt = len(args)
932 - for file in content_files:
933 - file = file.rstrip()
934 + for filename in content_files:
935 + filename = filename.rstrip()
936 try:
937 - contents = open(file, "r")
938 + contents = open(filename, "r")
939 except IOError as e:
940 - print('archive-conf: Unable to open %s: %s' % (file, e), file=sys.stderr)
941 + print('archive-conf: Unable to open %s: %s' % (filename, e), file=sys.stderr)
942 sys.exit(1)
943 lines = contents.readlines()
944 for line in lines:
945 @@ -84,7 +62,7 @@ def archive_conf():
946 for conf in args:
947 if items[1] == conf:
948 stored = items[2].lower()
949 - real = perform_checksum(conf)[0].lower()
950 + real = perform_md5(conf).lower()
951 if stored == real:
952 md5_match_hash[conf] = conf
953 todo_cnt -= 1
954
955 diff --git a/bin/banned-helper b/bin/banned-helper
956 deleted file mode 100755
957 index 17ea991..0000000
958 --- a/bin/banned-helper
959 +++ /dev/null
960 @@ -1,6 +0,0 @@
961 -#!/bin/bash
962 -# Copyright 2009 Gentoo Foundation
963 -# Distributed under the terms of the GNU General Public License v2
964 -
965 -die "'${0##*/}' has been banned for EAPI '$EAPI'"
966 -exit 1
967
968 diff --git a/bin/bashrc-functions.sh b/bin/bashrc-functions.sh
969 index 4da5585..503b172 100644
970 --- a/bin/bashrc-functions.sh
971 +++ b/bin/bashrc-functions.sh
972 @@ -1,9 +1,9 @@
973 #!/bin/bash
974 -# Copyright 1999-2011 Gentoo Foundation
975 +# Copyright 1999-2013 Gentoo Foundation
976 # Distributed under the terms of the GNU General Public License v2
977
978 portageq() {
979 - PYTHONPATH=${PORTAGE_PYM_PATH}${PYTHONPATH:+:}${PYTHONPATH} \
980 + PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}}\
981 "${PORTAGE_PYTHON:-/usr/bin/python}" "${PORTAGE_BIN_PATH}/portageq" "$@"
982 }
983
984 @@ -23,71 +23,16 @@ register_success_hook() {
985 done
986 }
987
988 -strip_duplicate_slashes() {
989 +__strip_duplicate_slashes() {
990 if [[ -n $1 ]] ; then
991 local removed=$1
992 while [[ ${removed} == *//* ]] ; do
993 removed=${removed//\/\///}
994 done
995 - echo ${removed}
996 + echo "${removed}"
997 fi
998 }
999
1000 -# this is a function for removing any directory matching a passed in pattern from
1001 -# PATH
1002 -remove_path_entry() {
1003 - save_IFS
1004 - IFS=":"
1005 - stripped_path="${PATH}"
1006 - while [ -n "$1" ]; do
1007 - cur_path=""
1008 - for p in ${stripped_path}; do
1009 - if [ "${p/${1}}" == "${p}" ]; then
1010 - cur_path="${cur_path}:${p}"
1011 - fi
1012 - done
1013 - stripped_path="${cur_path#:*}"
1014 - shift
1015 - done
1016 - restore_IFS
1017 - PATH="${stripped_path}"
1018 -}
1019 -
1020 -# Set given variables unless these variable have been already set (e.g. during emerge
1021 -# invocation) to values different than values set in make.conf.
1022 -set_unless_changed() {
1023 - if [[ $# -lt 1 ]]; then
1024 - die "${FUNCNAME}() requires at least 1 argument: VARIABLE=VALUE"
1025 - fi
1026 -
1027 - local argument value variable
1028 - for argument in "$@"; do
1029 - if [[ ${argument} != *=* ]]; then
1030 - die "${FUNCNAME}(): Argument '${argument}' has incorrect syntax"
1031 - fi
1032 - variable="${argument%%=*}"
1033 - value="${argument#*=}"
1034 - if eval "[[ \${${variable}} == \$(env -u ${variable} portageq envvar ${variable}) ]]"; then
1035 - eval "${variable}=\"\${value}\""
1036 - fi
1037 - done
1038 -}
1039 -
1040 -# Unset given variables unless these variable have been set (e.g. during emerge
1041 -# invocation) to values different than values set in make.conf.
1042 -unset_unless_changed() {
1043 - if [[ $# -lt 1 ]]; then
1044 - die "${FUNCNAME}() requires at least 1 argument: VARIABLE"
1045 - fi
1046 -
1047 - local variable
1048 - for variable in "$@"; do
1049 - if eval "[[ \${${variable}} == \$(env -u ${variable} portageq envvar ${variable}) ]]"; then
1050 - unset ${variable}
1051 - fi
1052 - done
1053 -}
1054 -
1055 KV_major() {
1056 [[ -z $1 ]] && return 1
1057
1058
1059 diff --git a/bin/binhost-snapshot b/bin/binhost-snapshot
1060 index 9d2697d..c2204f0 100755
1061 --- a/bin/binhost-snapshot
1062 +++ b/bin/binhost-snapshot
1063 @@ -1,9 +1,8 @@
1064 -#!/usr/bin/python
1065 -# Copyright 2010-2011 Gentoo Foundation
1066 +#!/usr/bin/python -b
1067 +# Copyright 2010-2014 Gentoo Foundation
1068 # Distributed under the terms of the GNU General Public License v2
1069
1070 import io
1071 -import optparse
1072 import os
1073 import sys
1074 import textwrap
1075 @@ -13,13 +12,12 @@ try:
1076 except ImportError:
1077 from urlparse import urlparse
1078
1079 -try:
1080 - import portage
1081 -except ImportError:
1082 - from os import path as osp
1083 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(
1084 - osp.realpath(__file__))), "pym"))
1085 - import portage
1086 +from os import path as osp
1087 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
1088 +sys.path.insert(0, pym_path)
1089 +import portage
1090 +portage._internal_caller = True
1091 +from portage.util._argparse import ArgumentParser
1092
1093 def parse_args(argv):
1094 prog_name = os.path.basename(argv[0])
1095 @@ -33,7 +31,7 @@ def parse_args(argv):
1096
1097 usage += "\n\n"
1098 for line in textwrap.wrap(prog_desc, 70):
1099 - usage += line + "\n"
1100 + usage += line + "\n"
1101
1102 usage += "\n"
1103 usage += "Required Arguments:\n\n"
1104 @@ -47,11 +45,12 @@ def parse_args(argv):
1105 "write Packages index with\n" + \
1106 " snapshot_uri"
1107
1108 - parser = optparse.OptionParser(usage=usage)
1109 - parser.add_option('--hardlinks', help='create hardlinks (y or n, default is y)',
1110 - choices=('y', 'n'))
1111 - parser.set_defaults(hardlinks='y')
1112 - options, args = parser.parse_args(argv[1:])
1113 + parser = ArgumentParser(usage=usage)
1114 + parser.add_argument('--hardlinks',
1115 + help='create hardlinks (y or n, default is y)',
1116 + choices=('y', 'n'),
1117 + default='y')
1118 + options, args = parser.parse_known_args(argv[1:])
1119
1120 if len(args) != 4:
1121 parser.error("Required 4 arguments, got %d" % (len(args),))
1122
1123 diff --git a/bin/check-implicit-pointer-usage.py b/bin/check-implicit-pointer-usage.py
1124 index 8822c45..242436c 100755
1125 --- a/bin/check-implicit-pointer-usage.py
1126 +++ b/bin/check-implicit-pointer-usage.py
1127 @@ -1,4 +1,4 @@
1128 -#!/usr/bin/python
1129 +#!/usr/bin/python -b
1130
1131 # Ripped from HP and updated from Debian
1132 # Update by Gentoo to support unicode output
1133
1134 diff --git a/bin/chpathtool.py b/bin/chpathtool.py
1135 index d0d49cb..6460662 100755
1136 --- a/bin/chpathtool.py
1137 +++ b/bin/chpathtool.py
1138 @@ -1,15 +1,26 @@
1139 -#!/usr/bin/python
1140 -# Copyright 2011 Gentoo Foundation
1141 +#!/usr/bin/python -b
1142 +# Copyright 2011-2014 Gentoo Foundation
1143 # Distributed under the terms of the GNU General Public License v2
1144
1145 +"""Helper tool for converting installed files to custom prefixes.
1146 +
1147 +In other words, eprefixy $D for Gentoo/Prefix."""
1148 +
1149 import io
1150 -import optparse
1151 import os
1152 import stat
1153 import sys
1154
1155 -CONTENT_ENCODING = "utf_8"
1156 -FS_ENCODING = "utf_8"
1157 +from portage.util._argparse import ArgumentParser
1158 +
1159 +# Argument parsing compatibility for Python 2.6 using optparse.
1160 +if sys.hexversion < 0x2070000:
1161 + from optparse import OptionParser
1162 +
1163 +from optparse import OptionError
1164 +
1165 +CONTENT_ENCODING = 'utf_8'
1166 +FS_ENCODING = 'utf_8'
1167
1168 try:
1169 import magic
1170 @@ -41,7 +52,9 @@ class IsTextFile(object):
1171
1172 def _is_text_magic(self, filename):
1173 mime_type = self._m.file(filename)
1174 - return mime_type.startswith("text/")
1175 + if isinstance(mime_type, bytes):
1176 + mime_type = mime_type.decode('ascii', 'replace')
1177 + return mime_type.startswith('text/')
1178
1179 def _is_text_encoding(self, filename):
1180 try:
1181 @@ -64,7 +77,7 @@ def chpath_inplace(filename, is_text_file, old, new):
1182 try:
1183 orig_mode = stat.S_IMODE(os.lstat(filename).st_mode)
1184 except OSError as e:
1185 - sys.stderr.write("%s: %s\n" % (e, filename))
1186 + sys.stderr.write('%s: %s\n' % (e, filename))
1187 return
1188 temp_mode = 0o200 | orig_mode
1189 os.chmod(filename, temp_mode)
1190 @@ -121,8 +134,12 @@ def chpath_inplace(filename, is_text_file, old, new):
1191
1192 f.close()
1193 if modified:
1194 - orig_mtime = orig_stat[stat.ST_MTIME]
1195 - os.utime(filename, (orig_mtime, orig_mtime))
1196 + if sys.hexversion >= 0x3030000:
1197 + orig_mtime = orig_stat.st_mtime_ns
1198 + os.utime(filename, ns=(orig_mtime, orig_mtime))
1199 + else:
1200 + orig_mtime = orig_stat[stat.ST_MTIME]
1201 + os.utime(filename, (orig_mtime, orig_mtime))
1202 return modified
1203
1204 def chpath_inplace_symlink(filename, st, old, new):
1205 @@ -135,14 +152,37 @@ def chpath_inplace_symlink(filename, st, old, new):
1206
1207 def main(argv):
1208
1209 - usage = "%s [options] <location> <old> <new>" % (os.path.basename(argv[0],))
1210 - parser = optparse.OptionParser(usage=usage)
1211 - options, args = parser.parse_args(argv[1:])
1212 -
1213 - if len(args) != 3:
1214 - parser.error("3 args required, got %s" % (len(args),))
1215 -
1216 - location, old, new = args
1217 + parser = ArgumentParser(description=__doc__)
1218 + try:
1219 + parser.add_argument('location', default=None,
1220 + help='root directory (e.g. $D)')
1221 + parser.add_argument('old', default=None,
1222 + help='original build prefix (e.g. /)')
1223 + parser.add_argument('new', default=None,
1224 + help='new install prefix (e.g. $EPREFIX)')
1225 + opts = parser.parse_args(argv)
1226 +
1227 + location, old, new = opts.location, opts.old, opts.new
1228 + except OptionError:
1229 + # Argument parsing compatibility for Python 2.6 using optparse.
1230 + if sys.hexversion < 0x2070000:
1231 + parser = OptionParser(description=__doc__,
1232 + usage="usage: %prog [-h] location old new\n\n" + \
1233 + " location: root directory (e.g. $D)\n" + \
1234 + " old: original build prefix (e.g. /)\n" + \
1235 + " new: new install prefix (e.g. $EPREFIX)")
1236 +
1237 + (opts, args) = parser.parse_args()
1238 +
1239 + if len(args) != 3:
1240 + parser.print_usage()
1241 + print("%s: error: expected 3 arguments, got %i"
1242 + % (__file__, len(args)))
1243 + return
1244 +
1245 + location, old, new = args[0:3]
1246 + else:
1247 + raise
1248
1249 is_text_file = IsTextFile()
1250
1251 @@ -178,5 +218,5 @@ def main(argv):
1252
1253 return os.EX_OK
1254
1255 -if __name__ == "__main__":
1256 - sys.exit(main(sys.argv))
1257 +if __name__ == '__main__':
1258 + sys.exit(main(sys.argv[1:]))
1259
1260 diff --git a/bin/clean_locks b/bin/clean_locks
1261 index 8c4299c..3e969f2 100755
1262 --- a/bin/clean_locks
1263 +++ b/bin/clean_locks
1264 @@ -1,21 +1,17 @@
1265 -#!/usr/bin/python -O
1266 -# Copyright 1999-2006 Gentoo Foundation
1267 +#!/usr/bin/python -bO
1268 +# Copyright 1999-2014 Gentoo Foundation
1269 # Distributed under the terms of the GNU General Public License v2
1270
1271 from __future__ import print_function
1272
1273 import sys, errno
1274 -try:
1275 - import portage
1276 -except ImportError:
1277 - from os import path as osp
1278 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
1279 - import portage
1280 -
1281 -from portage import os
1282 +from os import path as osp
1283 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
1284 +sys.path.insert(0, pym_path)
1285 +import portage
1286 +portage._internal_caller = True
1287
1288 if not sys.argv[1:] or "--help" in sys.argv or "-h" in sys.argv:
1289 - import portage
1290 print()
1291 print("You must specify directories with hardlink-locks to clean.")
1292 print("You may optionally specify --force, which will remove all")
1293 @@ -26,11 +22,11 @@ if not sys.argv[1:] or "--help" in sys.argv or "-h" in sys.argv:
1294 print("%s --force %s/.locks" % (sys.argv[0], portage.settings["DISTDIR"]))
1295 print()
1296 sys.exit(1)
1297 -
1298 +
1299 force = False
1300 if "--force" in sys.argv[1:]:
1301 force=True
1302 -
1303 +
1304 for x in sys.argv[1:]:
1305 if x == "--force":
1306 continue
1307 @@ -38,7 +34,7 @@ for x in sys.argv[1:]:
1308 for y in portage.locks.hardlock_cleanup(x, remove_all_locks=force):
1309 print(y)
1310 print()
1311 -
1312 +
1313 except OSError as e:
1314 if e.errno in (errno.ENOENT, errno.ENOTDIR):
1315 print("!!! %s is not a directory or does not exist" % x)
1316
1317 diff --git a/bin/dispatch-conf b/bin/dispatch-conf
1318 index 139a001..4b0c0ac 100755
1319 --- a/bin/dispatch-conf
1320 +++ b/bin/dispatch-conf
1321 @@ -1,5 +1,5 @@
1322 -#!/usr/bin/python -O
1323 -# Copyright 1999-2011 Gentoo Foundation
1324 +#!/usr/bin/python -bO
1325 +# Copyright 1999-2014 Gentoo Foundation
1326 # Distributed under the terms of the GNU General Public License v2
1327
1328 #
1329 @@ -16,19 +16,15 @@ from __future__ import print_function
1330 from stat import ST_GID, ST_MODE, ST_UID
1331 from random import random
1332 import atexit, re, shutil, stat, sys
1333 -
1334 -try:
1335 - import portage
1336 -except ImportError:
1337 - from os import path as osp
1338 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
1339 - import portage
1340 -
1341 +from os import path as osp
1342 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
1343 +sys.path.insert(0, pym_path)
1344 +import portage
1345 +portage._internal_caller = True
1346 from portage import os
1347 -from portage import dispatch_conf
1348 from portage import _unicode_decode
1349 from portage.dispatch_conf import diffstatusoutput
1350 -from portage.process import find_binary
1351 +from portage.process import find_binary, spawn
1352
1353 FIND_EXTANT_CONFIGS = "find '%s' %s -name '._cfg????_%s' ! -name '.*~' ! -iname '.*.bak' -print"
1354 DIFF_CONTENTS = "diff -Nu '%s' '%s'"
1355 @@ -83,7 +79,7 @@ class dispatch:
1356 confs = []
1357 count = 0
1358
1359 - config_root = portage.const.EPREFIX or os.sep
1360 + config_root = portage.settings["EPREFIX"] or os.sep
1361 self.options = portage.dispatch_conf.read_config(MANDATORY_OPTS)
1362
1363 if "log-file" in self.options:
1364 @@ -411,7 +407,8 @@ class dispatch:
1365
1366
1367 def do_help (self):
1368 - print(); print
1369 + print()
1370 + print()
1371
1372 print(' u -- update current config with new config and continue')
1373 print(' z -- zap (delete) new config and continue')
1374 @@ -431,7 +428,7 @@ class dispatch:
1375 def getch ():
1376 # from ASPN - Danny Yoo
1377 #
1378 - import sys, tty, termios
1379 + import tty, termios
1380
1381 fd = sys.stdin.fileno()
1382 old_settings = termios.tcgetattr(fd)
1383 @@ -456,17 +453,18 @@ def clear_screen():
1384 pass
1385 os.system("clear 2>/dev/null")
1386
1387 -from portage.process import find_binary, spawn
1388 shell = os.environ.get("SHELL")
1389 if not shell or not os.access(shell, os.EX_OK):
1390 shell = find_binary("sh")
1391
1392 def spawn_shell(cmd):
1393 if shell:
1394 + sys.__stdout__.flush()
1395 + sys.__stderr__.flush()
1396 spawn([shell, "-c", cmd], env=os.environ,
1397 - fd_pipes = { 0 : sys.stdin.fileno(),
1398 - 1 : sys.stdout.fileno(),
1399 - 2 : sys.stderr.fileno()})
1400 + fd_pipes = { 0 : portage._get_stdin().fileno(),
1401 + 1 : sys.__stdout__.fileno(),
1402 + 2 : sys.__stderr__.fileno()})
1403 else:
1404 os.system(cmd)
1405
1406
1407 diff --git a/bin/dohtml.py b/bin/dohtml.py
1408 index 3e80ef5..5359f5e 100755
1409 --- a/bin/dohtml.py
1410 +++ b/bin/dohtml.py
1411 @@ -1,5 +1,5 @@
1412 -#!/usr/bin/python
1413 -# Copyright 1999-2012 Gentoo Foundation
1414 +#!/usr/bin/python -b
1415 +# Copyright 1999-2014 Gentoo Foundation
1416 # Distributed under the terms of the GNU General Public License v2
1417
1418 #
1419 @@ -31,13 +31,25 @@
1420 from __future__ import print_function
1421
1422 import os
1423 +import shutil
1424 import sys
1425
1426 +from portage.util import normalize_path
1427 +
1428 +# Change back to original cwd _after_ all imports (bug #469338).
1429 +os.chdir(os.environ["__PORTAGE_HELPER_CWD"])
1430 +
1431 def dodir(path):
1432 - os.spawnlp(os.P_WAIT, "install", "install", "-d", path)
1433 + try:
1434 + os.makedirs(path, 0o755)
1435 + except OSError:
1436 + if not os.path.isdir(path):
1437 + raise
1438 + os.chmod(path, 0o755)
1439
1440 def dofile(src,dst):
1441 - os.spawnlp(os.P_WAIT, "install", "install", "-m0644", src, dst)
1442 + shutil.copy(src, dst)
1443 + os.chmod(dst, 0o644)
1444
1445 def eqawarn(lines):
1446 cmd = "source '%s/isolated-functions.sh' ; " % \
1447 @@ -55,14 +67,18 @@ unwarned_skipped_files = os.environ.get("PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES",
1448 def install(basename, dirname, options, prefix=""):
1449 fullpath = basename
1450 if prefix:
1451 - fullpath = prefix + "/" + fullpath
1452 + fullpath = os.path.join(prefix, fullpath)
1453 if dirname:
1454 - fullpath = dirname + "/" + fullpath
1455 + fullpath = os.path.join(dirname, fullpath)
1456
1457 if options.DOCDESTTREE:
1458 - destdir = options.ED + "usr/share/doc/" + options.PF + "/" + options.DOCDESTTREE + "/" + options.doc_prefix + "/" + prefix
1459 + desttree = options.DOCDESTTREE
1460 else:
1461 - destdir = options.ED + "usr/share/doc/" + options.PF + "/html/" + options.doc_prefix + "/" + prefix
1462 + desttree = "html"
1463 +
1464 + destdir = os.path.join(options.ED, "usr", "share", "doc",
1465 + options.PF.lstrip(os.sep), desttree.lstrip(os.sep),
1466 + options.doc_prefix.lstrip(os.sep), prefix).rstrip(os.sep)
1467
1468 if not os.path.exists(fullpath):
1469 sys.stderr.write("!!! dohtml: %s does not exist\n" % fullpath)
1470 @@ -71,14 +87,15 @@ def install(basename, dirname, options, prefix=""):
1471 ext = os.path.splitext(basename)[1][1:]
1472 if ext in options.allowed_exts or basename in options.allowed_files:
1473 dodir(destdir)
1474 - dofile(fullpath, destdir + "/" + basename)
1475 + dofile(fullpath, os.path.join(destdir, basename))
1476 elif warn_on_skipped_files and ext not in unwarned_skipped_extensions and basename not in unwarned_skipped_files:
1477 skipped_files.append(fullpath)
1478 elif options.recurse and os.path.isdir(fullpath) and \
1479 basename not in options.disallowed_dirs:
1480 for i in os.listdir(fullpath):
1481 pfx = basename
1482 - if prefix: pfx = prefix + "/" + pfx
1483 + if prefix:
1484 + pfx = os.path.join(prefix, pfx)
1485 install(i, dirname, options, pfx)
1486 elif not options.recurse and os.path.isdir(fullpath):
1487 global skipped_directories
1488 @@ -97,16 +114,22 @@ class OptionsClass:
1489
1490 if "PF" in os.environ:
1491 self.PF = os.environ["PF"]
1492 + if self.PF:
1493 + self.PF = normalize_path(self.PF)
1494 if "force-prefix" not in os.environ.get("FEATURES", "").split() and \
1495 os.environ.get("EAPI", "0") in ("0", "1", "2"):
1496 self.ED = os.environ.get("D", "")
1497 else:
1498 self.ED = os.environ.get("ED", "")
1499 + if self.ED:
1500 + self.ED = normalize_path(self.ED)
1501 if "_E_DOCDESTTREE_" in os.environ:
1502 self.DOCDESTTREE = os.environ["_E_DOCDESTTREE_"]
1503 + if self.DOCDESTTREE:
1504 + self.DOCDESTTREE = normalize_path(self.DOCDESTTREE)
1505
1506 self.allowed_exts = ['css', 'gif', 'htm', 'html', 'jpeg', 'jpg', 'js', 'png']
1507 - if os.environ.get("EAPI", "0") in ("4-python",):
1508 + if os.environ.get("EAPI", "0") in ("4-python", "5-progress"):
1509 self.allowed_exts += ['ico', 'svg', 'xhtml', 'xml']
1510 self.allowed_files = []
1511 self.disallowed_dirs = ['CVS']
1512 @@ -153,6 +176,8 @@ def parse_args():
1513 sys.exit(0)
1514 elif arg == "-p":
1515 options.doc_prefix = sys.argv[x]
1516 + if options.doc_prefix:
1517 + options.doc_prefix = normalize_path(options.doc_prefix)
1518 else:
1519 values = sys.argv[x].split(",")
1520 if arg == "-A":
1521 @@ -179,8 +204,17 @@ def main():
1522 print("Allowed files :", options.allowed_files)
1523
1524 success = False
1525 + endswith_slash = (os.sep, os.sep + ".")
1526
1527 for x in args:
1528 + trailing_slash = x.endswith(endswith_slash)
1529 + x = normalize_path(x)
1530 + if trailing_slash:
1531 + # Modify behavior of basename and dirname
1532 + # as noted in bug #425214, causing foo/ to
1533 + # behave similarly to the way that foo/*
1534 + # behaves.
1535 + x += os.sep
1536 basename = os.path.basename(x)
1537 dirname = os.path.dirname(x)
1538 success |= install(basename, dirname, options)
1539
1540 diff --git a/bin/eapi.sh b/bin/eapi.sh
1541 new file mode 100644
1542 index 0000000..623b89f
1543 --- /dev/null
1544 +++ b/bin/eapi.sh
1545 @@ -0,0 +1,145 @@
1546 +#!/bin/bash
1547 +# Copyright 2012 Gentoo Foundation
1548 +# Distributed under the terms of the GNU General Public License v2
1549 +
1550 +# PHASES
1551 +
1552 +___eapi_has_pkg_pretend() {
1553 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1554 +}
1555 +
1556 +___eapi_has_src_prepare() {
1557 + [[ ! ${1-${EAPI}} =~ ^(0|1)$ ]]
1558 +}
1559 +
1560 +___eapi_has_src_configure() {
1561 + [[ ! ${1-${EAPI}} =~ ^(0|1)$ ]]
1562 +}
1563 +
1564 +___eapi_default_src_test_disables_parallel_jobs() {
1565 + [[ ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1566 +}
1567 +
1568 +___eapi_has_S_WORKDIR_fallback() {
1569 + [[ ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1570 +}
1571 +
1572 +# VARIABLES
1573 +
1574 +___eapi_has_prefix_variables() {
1575 + [[ ! ${1-${EAPI}} =~ ^(0|1|2)$ || " ${FEATURES} " == *" force-prefix "* ]]
1576 +}
1577 +
1578 +___eapi_has_HDEPEND() {
1579 + [[ ${1-${EAPI}} =~ ^(5-hdepend)$ ]]
1580 +}
1581 +
1582 +___eapi_has_RDEPEND_DEPEND_fallback() {
1583 + [[ ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1584 +}
1585 +
1586 +# HELPERS PRESENCE
1587 +
1588 +___eapi_has_dohard() {
1589 + [[ ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1590 +}
1591 +
1592 +___eapi_has_dosed() {
1593 + [[ ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1594 +}
1595 +
1596 +___eapi_has_docompress() {
1597 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1598 +}
1599 +
1600 +___eapi_has_nonfatal() {
1601 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1602 +}
1603 +
1604 +___eapi_has_doheader() {
1605 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1606 +}
1607 +
1608 +___eapi_has_usex() {
1609 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1610 +}
1611 +
1612 +___eapi_has_master_repositories() {
1613 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1614 +}
1615 +
1616 +___eapi_has_repository_path() {
1617 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1618 +}
1619 +
1620 +___eapi_has_available_eclasses() {
1621 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1622 +}
1623 +
1624 +___eapi_has_eclass_path() {
1625 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1626 +}
1627 +
1628 +___eapi_has_license_path() {
1629 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1630 +}
1631 +
1632 +___eapi_has_package_manager_build_user() {
1633 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1634 +}
1635 +
1636 +___eapi_has_package_manager_build_group() {
1637 + [[ ${1-${EAPI}} =~ ^(5-progress)$ ]]
1638 +}
1639 +
1640 +# HELPERS BEHAVIOR
1641 +
1642 +___eapi_best_version_and_has_version_support_--host-root() {
1643 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1644 +}
1645 +
1646 +___eapi_unpack_supports_xz() {
1647 + [[ ! ${1-${EAPI}} =~ ^(0|1|2)$ ]]
1648 +}
1649 +
1650 +___eapi_econf_passes_--disable-dependency-tracking() {
1651 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1652 +}
1653 +
1654 +___eapi_econf_passes_--disable-silent-rules() {
1655 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1656 +}
1657 +
1658 +___eapi_use_enable_and_use_with_support_empty_third_argument() {
1659 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1660 +}
1661 +
1662 +___eapi_dodoc_supports_-r() {
1663 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1664 +}
1665 +
1666 +___eapi_doins_and_newins_preserve_symlinks() {
1667 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1668 +}
1669 +
1670 +___eapi_newins_supports_reading_from_standard_input() {
1671 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]]
1672 +}
1673 +
1674 +___eapi_helpers_can_die() {
1675 + [[ ! ${1-${EAPI}} =~ ^(0|1|2|3)$ ]]
1676 +}
1677 +
1678 +___eapi_disallows_helpers_in_global_scope() {
1679 + [[ ${1-${EAPI}} =~ ^(4-python|5-progress)$ ]]
1680 +}
1681 +
1682 +___eapi_unpack_is_case_sensitive() {
1683 + [[ ${1-${EAPI}} =~ ^(0|1|2|3|4|4-python|4-slot-abi|5|5-hdepend)$ ]]
1684 +}
1685 +
1686 +# OTHERS
1687 +
1688 +___eapi_enables_globstar() {
1689 + [[ ${1-${EAPI}} =~ ^(4-python|5-progress)$ ]]
1690 +}
1691
1692 diff --git a/bin/ebuild b/bin/ebuild
1693 index 65e5bef..8f4b103 100755
1694 --- a/bin/ebuild
1695 +++ b/bin/ebuild
1696 @@ -1,5 +1,5 @@
1697 -#!/usr/bin/python -O
1698 -# Copyright 1999-2012 Gentoo Foundation
1699 +#!/usr/bin/python -bO
1700 +# Copyright 1999-2014 Gentoo Foundation
1701 # Distributed under the terms of the GNU General Public License v2
1702
1703 from __future__ import print_function
1704 @@ -10,7 +10,7 @@ import sys
1705 # This block ensures that ^C interrupts are handled quietly.
1706 try:
1707
1708 - def exithandler(signum,frame):
1709 + def exithandler(signum, _frame):
1710 signal.signal(signal.SIGINT, signal.SIG_IGN)
1711 signal.signal(signal.SIGTERM, signal.SIG_IGN)
1712 sys.exit(128 + signum)
1713 @@ -24,7 +24,7 @@ try:
1714 except KeyboardInterrupt:
1715 sys.exit(128 + signal.SIGINT)
1716
1717 -def debug_signal(signum, frame):
1718 +def debug_signal(_signum, _frame):
1719 import pdb
1720 pdb.set_trace()
1721
1722 @@ -35,51 +35,50 @@ else:
1723
1724 signal.signal(debug_signum, debug_signal)
1725
1726 -import imp
1727 import io
1728 -import optparse
1729 import os
1730 +from os import path as osp
1731 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
1732 +sys.path.insert(0, pym_path)
1733 +import portage
1734 +portage._internal_caller = True
1735 +from portage import os
1736 +from portage import _encodings
1737 +from portage import _shell_quote
1738 +from portage import _unicode_decode
1739 +from portage import _unicode_encode
1740 +from portage.const import VDB_PATH
1741 +from portage.util._argparse import ArgumentParser
1742 +from _emerge.Package import Package
1743 +from _emerge.RootConfig import RootConfig
1744
1745 description = "See the ebuild(1) man page for more info"
1746 usage = "Usage: ebuild <ebuild file> <command> [command] ..."
1747 -parser = optparse.OptionParser(description=description, usage=usage)
1748 +parser = ArgumentParser(description=description, usage=usage)
1749
1750 force_help = "When used together with the digest or manifest " + \
1751 "command, this option forces regeneration of digests for all " + \
1752 "distfiles associated with the current ebuild. Any distfiles " + \
1753 "that do not already exist in ${DISTDIR} will be automatically fetched."
1754
1755 -parser.add_option("--force", help=force_help, action="store_true", dest="force")
1756 -parser.add_option("--color", help="enable or disable color output",
1757 - type="choice", choices=("y", "n"))
1758 -parser.add_option("--debug", help="show debug output",
1759 - action="store_true", dest="debug")
1760 -parser.add_option("--version", help="show version and exit",
1761 - action="store_true", dest="version")
1762 -parser.add_option("--ignore-default-opts",
1763 +parser.add_argument("--force", help=force_help, action="store_true")
1764 +parser.add_argument("--color", help="enable or disable color output",
1765 + choices=("y", "n"))
1766 +parser.add_argument("--debug", help="show debug output",
1767 + action="store_true")
1768 +parser.add_argument("--version", help="show version and exit",
1769 + action="store_true")
1770 +parser.add_argument("--ignore-default-opts",
1771 action="store_true",
1772 help="do not use the EBUILD_DEFAULT_OPTS environment variable")
1773 -parser.add_option("--skip-manifest", help="skip all manifest checks",
1774 - action="store_true", dest="skip_manifest")
1775 -
1776 -opts, pargs = parser.parse_args(args=sys.argv[1:])
1777 +parser.add_argument("--skip-manifest", help="skip all manifest checks",
1778 + action="store_true")
1779
1780 -try:
1781 - import portage
1782 -except ImportError:
1783 - from os import path as osp
1784 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
1785 - import portage
1786 +opts, pargs = parser.parse_known_args(args=sys.argv[1:])
1787
1788 -portage.dep._internal_warnings = True
1789 -from portage import os
1790 -from portage import _encodings
1791 -from portage import _shell_quote
1792 -from portage import _unicode_decode
1793 -from portage import _unicode_encode
1794 -from portage.const import VDB_PATH
1795 -from _emerge.Package import Package
1796 -from _emerge.RootConfig import RootConfig
1797 +def err(txt):
1798 + portage.writemsg('ebuild: %s\n' % (txt,), noiselevel=-1)
1799 + sys.exit(1)
1800
1801 if opts.version:
1802 print("Portage", portage.VERSION)
1803 @@ -89,8 +88,9 @@ if len(pargs) < 2:
1804 parser.error("missing required args")
1805
1806 if not opts.ignore_default_opts:
1807 - default_opts = portage.settings.get("EBUILD_DEFAULT_OPTS", "").split()
1808 - opts, pargs = parser.parse_args(default_opts + sys.argv[1:])
1809 + default_opts = portage.util.shlex_split(
1810 + portage.settings.get("EBUILD_DEFAULT_OPTS", ""))
1811 + opts, pargs = parser.parse_known_args(default_opts + sys.argv[1:])
1812
1813 debug = opts.debug
1814 force = opts.force
1815 @@ -119,9 +119,7 @@ if ebuild.endswith(".ebuild"):
1816 pf = os.path.basename(ebuild)[:-7]
1817
1818 if pf is None:
1819 - portage.writemsg("'%s' does not end with '.ebuild'.\n" % \
1820 - (ebuild,), noiselevel=-1)
1821 - sys.exit(1)
1822 + err("%s: does not end with '.ebuild'" % (ebuild,))
1823
1824 if not os.path.isabs(ebuild):
1825 mycwd = os.getcwd()
1826 @@ -160,15 +158,14 @@ if ebuild_portdir != vdb_path and \
1827 encoding=_encodings['content'], errors='strict')
1828
1829 print("Appending %s to PORTDIR_OVERLAY..." % ebuild_portdir)
1830 - imp.reload(portage)
1831 + portage._reset_legacy_globals()
1832
1833 myrepo = None
1834 if ebuild_portdir != vdb_path:
1835 myrepo = portage.portdb.getRepositoryName(ebuild_portdir)
1836
1837 if not os.path.exists(ebuild):
1838 - print("'%s' does not exist." % ebuild)
1839 - sys.exit(1)
1840 + err('%s: does not exist' % (ebuild,))
1841
1842 ebuild_split = ebuild.split("/")
1843 cpv = "%s/%s" % (ebuild_split[-3], pf)
1844 @@ -179,8 +176,7 @@ with io.open(_unicode_encode(ebuild, encoding=_encodings['fs'], errors='strict')
1845 if eapi is None:
1846 eapi = "0"
1847 if not portage.catpkgsplit(cpv, eapi=eapi):
1848 - print("!!! %s does not follow correct package syntax." % (cpv))
1849 - sys.exit(1)
1850 + err('%s: %s: does not follow correct package syntax' % (ebuild, cpv))
1851
1852 if ebuild.startswith(vdb_path):
1853 mytree = "vartree"
1854 @@ -189,8 +185,7 @@ if ebuild.startswith(vdb_path):
1855 portage_ebuild = portage.db[portage.root][mytree].dbapi.findname(cpv, myrepo=myrepo)
1856
1857 if os.path.realpath(portage_ebuild) != ebuild:
1858 - print("!!! Portage seems to think that %s is at %s" % (cpv, portage_ebuild))
1859 - sys.exit(1)
1860 + err('Portage seems to think that %s is at %s' % (cpv, portage_ebuild))
1861
1862 else:
1863 mytree = "porttree"
1864 @@ -199,12 +194,10 @@ else:
1865 portage_ebuild = portage.portdb.findname(cpv, myrepo=myrepo)
1866
1867 if not portage_ebuild or portage_ebuild != ebuild:
1868 - print("!!! %s does not seem to have a valid PORTDIR structure." % ebuild)
1869 - sys.exit(1)
1870 + err('%s: does not seem to have a valid PORTDIR structure' % (ebuild,))
1871
1872 if len(pargs) > 1 and "config" in pargs:
1873 - print("config must be called on it's own, not combined with any other phase")
1874 - sys.exit(1)
1875 + err('"config" must not be called with any other phase')
1876
1877 def discard_digests(myebuild, mysettings, mydbapi):
1878 """Discard all distfiles digests for the given ebuild. This is useful when
1879 @@ -313,14 +306,16 @@ def stale_env_warning():
1880
1881 if ebuild_changed:
1882 open(os.path.join(tmpsettings['PORTAGE_BUILDDIR'],
1883 - '.ebuild_changed'), 'w')
1884 + '.ebuild_changed'), 'w').close()
1885
1886 from portage.exception import PermissionDenied, \
1887 PortagePackageException, UnsupportedAPIException
1888
1889 -if 'digest' in tmpsettings.features and \
1890 - not set(["digest", "manifest"]).intersection(pargs):
1891 - pargs = ['digest'] + pargs
1892 +if 'digest' in tmpsettings.features:
1893 + if pargs and pargs[0] not in ("digest", "manifest"):
1894 + pargs = ['digest'] + pargs
1895 + # We only need to build digests on the first pass.
1896 + tmpsettings.features.discard('digest')
1897
1898 checked_for_stale_env = False
1899
1900 @@ -334,7 +329,7 @@ for arg in pargs:
1901
1902 if arg in ("digest", "manifest") and force:
1903 discard_digests(ebuild, tmpsettings, portage.portdb)
1904 - a = portage.doebuild(ebuild, arg, portage.root, tmpsettings,
1905 + a = portage.doebuild(ebuild, arg, settings=tmpsettings,
1906 debug=debug, tree=mytree,
1907 vartree=portage.db[portage.root]['vartree'])
1908 except KeyboardInterrupt:
1909
1910 diff --git a/bin/ebuild-helpers/4/dodoc b/bin/ebuild-helpers/4/dodoc
1911 deleted file mode 120000
1912 index 35080ad..0000000
1913 --- a/bin/ebuild-helpers/4/dodoc
1914 +++ /dev/null
1915 @@ -1 +0,0 @@
1916 -../doins
1917 \ No newline at end of file
1918
1919 diff --git a/bin/ebuild-helpers/4/dohard b/bin/ebuild-helpers/4/dohard
1920 deleted file mode 120000
1921 index 1a6b57a..0000000
1922 --- a/bin/ebuild-helpers/4/dohard
1923 +++ /dev/null
1924 @@ -1 +0,0 @@
1925 -../../banned-helper
1926 \ No newline at end of file
1927
1928 diff --git a/bin/ebuild-helpers/4/dosed b/bin/ebuild-helpers/4/dosed
1929 deleted file mode 120000
1930 index 1a6b57a..0000000
1931 --- a/bin/ebuild-helpers/4/dosed
1932 +++ /dev/null
1933 @@ -1 +0,0 @@
1934 -../../banned-helper
1935 \ No newline at end of file
1936
1937 diff --git a/bin/ebuild-helpers/4/prepalldocs b/bin/ebuild-helpers/4/prepalldocs
1938 deleted file mode 120000
1939 index 1a6b57a..0000000
1940 --- a/bin/ebuild-helpers/4/prepalldocs
1941 +++ /dev/null
1942 @@ -1 +0,0 @@
1943 -../../banned-helper
1944 \ No newline at end of file
1945
1946 diff --git a/bin/ebuild-helpers/sed b/bin/ebuild-helpers/bsd/sed
1947 similarity index 66%
1948 rename from bin/ebuild-helpers/sed
1949 rename to bin/ebuild-helpers/bsd/sed
1950 index b21e856..01b8847 100755
1951 --- a/bin/ebuild-helpers/sed
1952 +++ b/bin/ebuild-helpers/bsd/sed
1953 @@ -1,27 +1,27 @@
1954 #!/bin/bash
1955 -# Copyright 2007 Gentoo Foundation
1956 +# Copyright 2007-2012 Gentoo Foundation
1957 # Distributed under the terms of the GNU General Public License v2
1958
1959 scriptpath=${BASH_SOURCE[0]}
1960 scriptname=${scriptpath##*/}
1961
1962 -if [[ sed == ${scriptname} ]] && [[ -n ${ESED} ]]; then
1963 +if [[ sed == ${scriptname} && -n ${ESED} ]]; then
1964 exec ${ESED} "$@"
1965 elif type -P g${scriptname} > /dev/null ; then
1966 exec g${scriptname} "$@"
1967 else
1968 old_IFS="${IFS}"
1969 IFS=":"
1970 -
1971 +
1972 for path in $PATH; do
1973 - [[ ${path}/${scriptname} == ${scriptpath} ]] && continue
1974 if [[ -x ${path}/${scriptname} ]]; then
1975 - exec ${path}/${scriptname} "$@"
1976 + [[ ${path}/${scriptname} -ef ${scriptpath} ]] && continue
1977 + exec "${path}/${scriptname}" "$@"
1978 exit 0
1979 fi
1980 done
1981 -
1982 +
1983 IFS="${old_IFS}"
1984 fi
1985 -
1986 +
1987 exit 1
1988
1989 diff --git a/bin/ebuild-helpers/dobin b/bin/ebuild-helpers/dobin
1990 index f90d893..0ba1eb0 100755
1991 --- a/bin/ebuild-helpers/dobin
1992 +++ b/bin/ebuild-helpers/dobin
1993 @@ -1,19 +1,20 @@
1994 #!/bin/bash
1995 -# Copyright 1999-2011 Gentoo Foundation
1996 +# Copyright 1999-2012 Gentoo Foundation
1997 # Distributed under the terms of the GNU General Public License v2
1998
1999 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2000
2001 if [[ $# -lt 1 ]] ; then
2002 - helpers_die "${0##*/}: at least one argument needed"
2003 + __helpers_die "${0##*/}: at least one argument needed"
2004 exit 1
2005 fi
2006
2007 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2008 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2009 +if ! ___eapi_has_prefix_variables; then
2010 + ED=${D}
2011 +fi
2012
2013 if [[ ! -d ${ED}${DESTTREE}/bin ]] ; then
2014 - install -d "${ED}${DESTTREE}/bin" || { helpers_die "${0##*/}: failed to install ${ED}${DESTTREE}/bin"; exit 2; }
2015 + install -d "${ED}${DESTTREE}/bin" || { __helpers_die "${0##*/}: failed to install ${ED}${DESTTREE}/bin"; exit 2; }
2016 fi
2017
2018 ret=0
2019 @@ -28,5 +29,5 @@ for x in "$@" ; do
2020 ((ret|=$?))
2021 done
2022
2023 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2024 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2025 exit ${ret}
2026
2027 diff --git a/bin/ebuild-helpers/doconfd b/bin/ebuild-helpers/doconfd
2028 index e146000..a3c09a5 100755
2029 --- a/bin/ebuild-helpers/doconfd
2030 +++ b/bin/ebuild-helpers/doconfd
2031 @@ -4,7 +4,7 @@
2032
2033 if [[ $# -lt 1 ]] ; then
2034 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2035 - helpers_die "${0##*/}: at least one argument needed"
2036 + __helpers_die "${0##*/}: at least one argument needed"
2037 exit 1
2038 fi
2039
2040
2041 diff --git a/bin/ebuild-helpers/dodir b/bin/ebuild-helpers/dodir
2042 index 90a3efe..e03ba9a 100755
2043 --- a/bin/ebuild-helpers/dodir
2044 +++ b/bin/ebuild-helpers/dodir
2045 @@ -1,13 +1,14 @@
2046 #!/bin/bash
2047 -# Copyright 1999-2011 Gentoo Foundation
2048 +# Copyright 1999-2012 Gentoo Foundation
2049 # Distributed under the terms of the GNU General Public License v2
2050
2051 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2052
2053 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2054 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2055 +if ! ___eapi_has_prefix_variables; then
2056 + ED=${D}
2057 +fi
2058
2059 install -d ${DIROPTIONS} "${@/#/${ED}/}"
2060 ret=$?
2061 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2062 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2063 exit $ret
2064
2065 diff --git a/bin/ebuild-helpers/dodoc b/bin/ebuild-helpers/dodoc
2066 index 1f333a6..99122c4 100755
2067 --- a/bin/ebuild-helpers/dodoc
2068 +++ b/bin/ebuild-helpers/dodoc
2069 @@ -1,16 +1,24 @@
2070 #!/bin/bash
2071 -# Copyright 1999-2011 Gentoo Foundation
2072 +# Copyright 1999-2012 Gentoo Foundation
2073 # Distributed under the terms of the GNU General Public License v2
2074
2075 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2076
2077 +if ___eapi_dodoc_supports_-r; then
2078 + exec \
2079 + env \
2080 + __PORTAGE_HELPER="dodoc" \
2081 + doins "$@"
2082 +fi
2083 +
2084 if [ $# -lt 1 ] ; then
2085 - helpers_die "${0##*/}: at least one argument needed"
2086 - exit 1
2087 + __helpers_die "${0##*/}: at least one argument needed"
2088 + exit 1
2089 fi
2090
2091 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2092 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2093 +if ! ___eapi_has_prefix_variables; then
2094 + ED=${D}
2095 +fi
2096
2097 dir="${ED}usr/share/doc/${PF}/${_E_DOCDESTTREE_}"
2098 if [ ! -d "${dir}" ] ; then
2099 @@ -30,5 +38,5 @@ for x in "$@" ; do
2100 fi
2101 done
2102
2103 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2104 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2105 exit ${ret}
2106
2107 diff --git a/bin/ebuild-helpers/doenvd b/bin/ebuild-helpers/doenvd
2108 index 28ab5d2..9287933 100755
2109 --- a/bin/ebuild-helpers/doenvd
2110 +++ b/bin/ebuild-helpers/doenvd
2111 @@ -4,7 +4,7 @@
2112
2113 if [[ $# -lt 1 ]] ; then
2114 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2115 - helpers_die "${0##*/}: at least one argument needed"
2116 + __helpers_die "${0##*/}: at least one argument needed"
2117 exit 1
2118 fi
2119
2120
2121 diff --git a/bin/ebuild-helpers/doexe b/bin/ebuild-helpers/doexe
2122 index fb228f9..c34fcae 100755
2123 --- a/bin/ebuild-helpers/doexe
2124 +++ b/bin/ebuild-helpers/doexe
2125 @@ -1,23 +1,23 @@
2126 #!/bin/bash
2127 -# Copyright 1999-2011 Gentoo Foundation
2128 +# Copyright 1999-2012 Gentoo Foundation
2129 # Distributed under the terms of the GNU General Public License v2
2130
2131 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2132
2133 if [[ $# -lt 1 ]] ; then
2134 - helpers_die "${0##*/}: at least one argument needed"
2135 + __helpers_die "${0##*/}: at least one argument needed"
2136 exit 1
2137 fi
2138
2139 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2140 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2141 +if ! ___eapi_has_prefix_variables; then
2142 + ED=${D}
2143 +fi
2144
2145 if [[ ! -d ${ED}${_E_EXEDESTTREE_} ]] ; then
2146 install -d "${ED}${_E_EXEDESTTREE_}"
2147 fi
2148
2149 -TMP=$T/.doexe_tmp
2150 -mkdir "$TMP"
2151 +TMP=$(mktemp -d "${T}/.doexe_tmp_XXXXXX")
2152
2153 ret=0
2154
2155 @@ -26,7 +26,7 @@ for x in "$@" ; do
2156 cp "$x" "$TMP"
2157 mysrc=$TMP/${x##*/}
2158 elif [ -d "${x}" ] ; then
2159 - vecho "doexe: warning, skipping directory ${x}"
2160 + __vecho "doexe: warning, skipping directory ${x}"
2161 continue
2162 else
2163 mysrc="${x}"
2164 @@ -42,5 +42,5 @@ done
2165
2166 rm -rf "$TMP"
2167
2168 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2169 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2170 exit $ret
2171
2172 diff --git a/bin/ebuild-helpers/dohard b/bin/ebuild-helpers/dohard
2173 index b52fd7c..e0a44fa 100755
2174 --- a/bin/ebuild-helpers/dohard
2175 +++ b/bin/ebuild-helpers/dohard
2176 @@ -1,14 +1,22 @@
2177 #!/bin/bash
2178 -# Copyright 1999-2011 Gentoo Foundation
2179 +# Copyright 1999-2012 Gentoo Foundation
2180 # Distributed under the terms of the GNU General Public License v2
2181
2182 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2183 +
2184 +if ! ___eapi_has_dohard; then
2185 + die "'${0##*/}' has been banned for EAPI '$EAPI'"
2186 + exit 1
2187 +fi
2188 +
2189 if [[ $# -ne 2 ]] ; then
2190 echo "$0: two arguments needed" 1>&2
2191 exit 1
2192 fi
2193
2194 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2195 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2196 +if ! ___eapi_has_prefix_variables; then
2197 + ED=${D}
2198 +fi
2199
2200 destdir=${2%/*}
2201 [[ ! -d ${ED}${destdir} ]] && dodir "${destdir}"
2202
2203 diff --git a/bin/ebuild-helpers/doheader b/bin/ebuild-helpers/doheader
2204 new file mode 100755
2205 index 0000000..3795365
2206 --- /dev/null
2207 +++ b/bin/ebuild-helpers/doheader
2208 @@ -0,0 +1,19 @@
2209 +#!/bin/bash
2210 +# Copyright 1999-2012 Gentoo Foundation
2211 +# Distributed under the terms of the GNU General Public License v2
2212 +
2213 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2214 +
2215 +if ! ___eapi_has_doheader; then
2216 + die "${0##*/} is not supported in EAPI ${EAPI}"
2217 +fi
2218 +
2219 +if [[ $# -lt 1 ]] || [[ $1 == -r && $# -lt 2 ]] ; then
2220 + __helpers_die "${0##*/}: at least one argument needed"
2221 + exit 1
2222 +fi
2223 +
2224 +exec \
2225 +env \
2226 +INSDESTTREE="/usr/include/" \
2227 +doins "$@"
2228
2229 diff --git a/bin/ebuild-helpers/dohtml b/bin/ebuild-helpers/dohtml
2230 index 630629a..75d3d00 100755
2231 --- a/bin/ebuild-helpers/dohtml
2232 +++ b/bin/ebuild-helpers/dohtml
2233 @@ -1,14 +1,19 @@
2234 #!/bin/bash
2235 -# Copyright 2009-2010 Gentoo Foundation
2236 +# Copyright 2009-2013 Gentoo Foundation
2237 # Distributed under the terms of the GNU General Public License v2
2238
2239 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2240
2241 PORTAGE_BIN_PATH=${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}
2242 PORTAGE_PYM_PATH=${PORTAGE_PYM_PATH:-/usr/lib/portage/pym}
2243 -PYTHONPATH=$PORTAGE_PYM_PATH${PYTHONPATH:+:}$PYTHONPATH \
2244 +# Use safe cwd, avoiding unsafe import for bug #469338.
2245 +export __PORTAGE_HELPER_CWD=${PWD}
2246 +cd "${PORTAGE_PYM_PATH}"
2247 +PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
2248 "${PORTAGE_PYTHON:-/usr/bin/python}" "$PORTAGE_BIN_PATH/dohtml.py" "$@"
2249
2250 ret=$?
2251 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2252 +# Restore cwd for display by __helpers_die
2253 +cd "${__PORTAGE_HELPER_CWD}"
2254 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2255 exit $ret
2256
2257 diff --git a/bin/ebuild-helpers/doinfo b/bin/ebuild-helpers/doinfo
2258 index 8fd7d45..2edbdc5 100755
2259 --- a/bin/ebuild-helpers/doinfo
2260 +++ b/bin/ebuild-helpers/doinfo
2261 @@ -1,19 +1,20 @@
2262 #!/bin/bash
2263 -# Copyright 1999-2011 Gentoo Foundation
2264 +# Copyright 1999-2012 Gentoo Foundation
2265 # Distributed under the terms of the GNU General Public License v2
2266
2267 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2268
2269 if [[ -z $1 ]] ; then
2270 - helpers_die "${0##*/}: at least one argument needed"
2271 - exit 1
2272 + __helpers_die "${0##*/}: at least one argument needed"
2273 + exit 1
2274 fi
2275
2276 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2277 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2278 +if ! ___eapi_has_prefix_variables; then
2279 + ED=${D}
2280 +fi
2281
2282 if [[ ! -d ${ED}usr/share/info ]] ; then
2283 - install -d "${ED}usr/share/info" || { helpers_die "${0##*/}: failed to install ${ED}usr/share/info"; exit 1; }
2284 + install -d "${ED}usr/share/info" || { __helpers_die "${0##*/}: failed to install ${ED}usr/share/info"; exit 1; }
2285 fi
2286
2287 install -m0644 "$@" "${ED}usr/share/info"
2288 @@ -22,6 +23,6 @@ if [ $rval -ne 0 ] ; then
2289 for x in "$@" ; do
2290 [ -e "$x" ] || echo "!!! ${0##*/}: $x does not exist" 1>&2
2291 done
2292 - helpers_die "${0##*/} failed"
2293 + __helpers_die "${0##*/} failed"
2294 fi
2295 exit $rval
2296
2297 diff --git a/bin/ebuild-helpers/doinitd b/bin/ebuild-helpers/doinitd
2298 index b711e19..476b858 100755
2299 --- a/bin/ebuild-helpers/doinitd
2300 +++ b/bin/ebuild-helpers/doinitd
2301 @@ -4,7 +4,7 @@
2302
2303 if [[ $# -lt 1 ]] ; then
2304 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2305 - helpers_die "${0##*/}: at least one argument needed"
2306 + __helpers_die "${0##*/}: at least one argument needed"
2307 exit 1
2308 fi
2309
2310
2311 diff --git a/bin/ebuild-helpers/doins b/bin/ebuild-helpers/doins
2312 index 443bfdb..c60e057 100755
2313 --- a/bin/ebuild-helpers/doins
2314 +++ b/bin/ebuild-helpers/doins
2315 @@ -1,14 +1,17 @@
2316 #!/bin/bash
2317 -# Copyright 1999-2011 Gentoo Foundation
2318 +# Copyright 1999-2012 Gentoo Foundation
2319 # Distributed under the terms of the GNU General Public License v2
2320
2321 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2322
2323 -if [[ ${0##*/} == dodoc ]] ; then
2324 +helper=${__PORTAGE_HELPER:-${0##*/}}
2325 +
2326 +if [[ ${helper} == dodoc ]] ; then
2327 if [ $# -eq 0 ] ; then
2328 # default_src_install may call dodoc with no arguments
2329 # when DOC is defined but empty, so simply return
2330 # sucessfully in this case.
2331 + eqawarn "QA Notice: dodoc called with no arguments"
2332 exit 0
2333 fi
2334 export INSOPTIONS=-m0644
2335 @@ -16,7 +19,7 @@ if [[ ${0##*/} == dodoc ]] ; then
2336 fi
2337
2338 if [ $# -lt 1 ] ; then
2339 - helpers_die "${0##*/}: at least one argument needed"
2340 + __helpers_die "${helper}: at least one argument needed"
2341 exit 1
2342 fi
2343
2344 @@ -27,28 +30,26 @@ else
2345 DOINSRECUR=n
2346 fi
2347
2348 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2349 - case "$EAPI" in 0|1|2) export ED="${D}" ;; esac
2350 +if ! ___eapi_has_prefix_variables; then
2351 + export ED="${D}"
2352 +fi
2353
2354 if [[ ${INSDESTTREE#${ED}} != "${INSDESTTREE}" ]]; then
2355 - vecho "-------------------------------------------------------" 1>&2
2356 - vecho "You should not use \${D} or \${ED} with helpers." 1>&2
2357 - vecho " --> ${INSDESTTREE}" 1>&2
2358 - vecho "-------------------------------------------------------" 1>&2
2359 - helpers_die "${0##*/} used with \${D} or \${ED}"
2360 + __vecho "-------------------------------------------------------" 1>&2
2361 + __vecho "You should not use \${D} or \${ED} with helpers." 1>&2
2362 + __vecho " --> ${INSDESTTREE}" 1>&2
2363 + __vecho "-------------------------------------------------------" 1>&2
2364 + __helpers_die "${helper} used with \${D} or \${ED}"
2365 exit 1
2366 fi
2367
2368 -case "$EAPI" in
2369 - 0|1|2|3|3_pre2)
2370 - PRESERVE_SYMLINKS=n
2371 - ;;
2372 - *)
2373 - PRESERVE_SYMLINKS=y
2374 - ;;
2375 -esac
2376 +if ___eapi_doins_and_newins_preserve_symlinks; then
2377 + PRESERVE_SYMLINKS=y
2378 +else
2379 + PRESERVE_SYMLINKS=n
2380 +fi
2381
2382 -export TMP=$T/.doins_tmp
2383 +export TMP=$(mktemp -d "${T}/.doins_tmp_XXXXXX")
2384 # Use separate directories to avoid potential name collisions.
2385 mkdir -p "$TMP"/{1,2}
2386
2387 @@ -79,7 +80,7 @@ _doins() {
2388 install ${INSOPTIONS} "${mysrc}" "${ED}${INSDESTTREE}/${mydir}"
2389 rval=$?
2390 [[ -n ${cleanup} ]] && rm -f "${cleanup}"
2391 - [ $rval -ne 0 ] && echo "!!! ${0##*/}: $mysrc does not exist" 1>&2
2392 + [ $rval -ne 0 ] && echo "!!! ${helper}: $mysrc does not exist" 1>&2
2393 return $rval
2394 }
2395
2396 @@ -99,8 +100,8 @@ for x in "$@" ; do
2397 if [[ $PRESERVE_SYMLINKS = n && -d $x ]] || \
2398 [[ $PRESERVE_SYMLINKS = y && -d $x && ! -L $x ]] ; then
2399 if [ "${DOINSRECUR}" == "n" ] ; then
2400 - if [[ ${0##*/} == dodoc ]] ; then
2401 - echo "!!! ${0##*/}: $x is a directory" 1>&2
2402 + if [[ ${helper} == dodoc ]] ; then
2403 + echo "!!! ${helper}: $x is a directory" 1>&2
2404 ((failed|=1))
2405 fi
2406 continue
2407 @@ -155,4 +156,4 @@ for x in "$@" ; do
2408 fi
2409 done
2410 rm -rf "$TMP"
2411 -[[ $failed -ne 0 || $success -eq 0 ]] && { helpers_die "${0##*/} failed"; exit 1; } || exit 0
2412 +[[ $failed -ne 0 || $success -eq 0 ]] && { __helpers_die "${helper} failed"; exit 1; } || exit 0
2413
2414 diff --git a/bin/ebuild-helpers/dolib b/bin/ebuild-helpers/dolib
2415 index 9af5418..fd92d7f 100755
2416 --- a/bin/ebuild-helpers/dolib
2417 +++ b/bin/ebuild-helpers/dolib
2418 @@ -1,11 +1,12 @@
2419 #!/bin/bash
2420 -# Copyright 1999-2011 Gentoo Foundation
2421 +# Copyright 1999-2012 Gentoo Foundation
2422 # Distributed under the terms of the GNU General Public License v2
2423
2424 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2425
2426 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2427 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2428 +if ! ___eapi_has_prefix_variables; then
2429 + ED=${D}
2430 +fi
2431
2432 # Setup ABI cruft
2433 LIBDIR_VAR="LIBDIR_${ABI}"
2434 @@ -19,11 +20,11 @@ libdir="${ED}${DESTTREE}/${CONF_LIBDIR}"
2435
2436
2437 if [[ $# -lt 1 ]] ; then
2438 - helpers_die "${0##*/}: at least one argument needed"
2439 + __helpers_die "${0##*/}: at least one argument needed"
2440 exit 1
2441 fi
2442 if [[ ! -d ${libdir} ]] ; then
2443 - install -d "${libdir}" || { helpers_die "${0##*/}: failed to install ${libdir}"; exit 1; }
2444 + install -d "${libdir}" || { __helpers_die "${0##*/}: failed to install ${libdir}"; exit 1; }
2445 fi
2446
2447 ret=0
2448 @@ -42,5 +43,5 @@ for x in "$@" ; do
2449 ((ret|=$?))
2450 done
2451
2452 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2453 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2454 exit ${ret}
2455
2456 diff --git a/bin/ebuild-helpers/doman b/bin/ebuild-helpers/doman
2457 index b4047ce..d680859 100755
2458 --- a/bin/ebuild-helpers/doman
2459 +++ b/bin/ebuild-helpers/doman
2460 @@ -1,16 +1,17 @@
2461 #!/bin/bash
2462 -# Copyright 1999-2011 Gentoo Foundation
2463 +# Copyright 1999-2012 Gentoo Foundation
2464 # Distributed under the terms of the GNU General Public License v2
2465
2466 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2467
2468 if [[ $# -lt 1 ]] ; then
2469 - helpers_die "${0##*/}: at least one argument needed"
2470 + __helpers_die "${0##*/}: at least one argument needed"
2471 exit 1
2472 fi
2473
2474 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2475 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2476 +if ! ___eapi_has_prefix_variables; then
2477 + ED=${D}
2478 +fi
2479
2480 i18n=""
2481
2482 @@ -58,10 +59,10 @@ for x in "$@" ; do
2483 ((ret|=1))
2484 fi
2485 else
2486 - vecho "doman: '${x}' is probably not a man page; skipping" 1>&2
2487 + __vecho "doman: '${x}' is probably not a man page; skipping" 1>&2
2488 ((ret|=1))
2489 fi
2490 done
2491
2492 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2493 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2494 exit ${ret}
2495
2496 diff --git a/bin/ebuild-helpers/domo b/bin/ebuild-helpers/domo
2497 index d994343..9a8dda3 100755
2498 --- a/bin/ebuild-helpers/domo
2499 +++ b/bin/ebuild-helpers/domo
2500 @@ -1,17 +1,18 @@
2501 #!/bin/bash
2502 -# Copyright 1999-2011 Gentoo Foundation
2503 +# Copyright 1999-2012 Gentoo Foundation
2504 # Distributed under the terms of the GNU General Public License v2
2505
2506 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2507
2508 mynum=${#}
2509 if [ ${mynum} -lt 1 ] ; then
2510 - helpers_die "${0}: at least one argument needed"
2511 + __helpers_die "${0}: at least one argument needed"
2512 exit 1
2513 fi
2514
2515 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2516 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2517 +if ! ___eapi_has_prefix_variables; then
2518 + ED=${D}
2519 +fi
2520
2521 if [ ! -d "${ED}${DESTTREE}/share/locale" ] ; then
2522 install -d "${ED}${DESTTREE}/share/locale/"
2523 @@ -34,5 +35,5 @@ for x in "$@" ; do
2524 ((ret|=$?))
2525 done
2526
2527 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2528 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2529 exit $ret
2530
2531 diff --git a/bin/ebuild-helpers/dosbin b/bin/ebuild-helpers/dosbin
2532 index d101c8a..361ca83 100755
2533 --- a/bin/ebuild-helpers/dosbin
2534 +++ b/bin/ebuild-helpers/dosbin
2535 @@ -1,19 +1,20 @@
2536 #!/bin/bash
2537 -# Copyright 1999-2011 Gentoo Foundation
2538 +# Copyright 1999-2012 Gentoo Foundation
2539 # Distributed under the terms of the GNU General Public License v2
2540
2541 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2542
2543 if [[ $# -lt 1 ]] ; then
2544 - helpers_die "${0##*/}: at least one argument needed"
2545 + __helpers_die "${0##*/}: at least one argument needed"
2546 exit 1
2547 fi
2548
2549 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2550 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2551 +if ! ___eapi_has_prefix_variables; then
2552 + ED=${D}
2553 +fi
2554
2555 if [[ ! -d ${ED}${DESTTREE}/sbin ]] ; then
2556 - install -d "${ED}${DESTTREE}/sbin" || { helpers_die "${0##*/}: failed to install ${ED}${DESTTREE}/sbin"; exit 2; }
2557 + install -d "${ED}${DESTTREE}/sbin" || { __helpers_die "${0##*/}: failed to install ${ED}${DESTTREE}/sbin"; exit 2; }
2558 fi
2559
2560 ret=0
2561 @@ -28,5 +29,5 @@ for x in "$@" ; do
2562 ((ret|=$?))
2563 done
2564
2565 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2566 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2567 exit ${ret}
2568
2569 diff --git a/bin/ebuild-helpers/dosed b/bin/ebuild-helpers/dosed
2570 index f202df7..7db0629 100755
2571 --- a/bin/ebuild-helpers/dosed
2572 +++ b/bin/ebuild-helpers/dosed
2573 @@ -1,14 +1,22 @@
2574 #!/bin/bash
2575 -# Copyright 1999-2011 Gentoo Foundation
2576 +# Copyright 1999-2012 Gentoo Foundation
2577 # Distributed under the terms of the GNU General Public License v2
2578
2579 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2580 +
2581 +if ! ___eapi_has_dosed; then
2582 + die "'${0##*/}' has been banned for EAPI '$EAPI'"
2583 + exit 1
2584 +fi
2585 +
2586 if [[ $# -lt 1 ]] ; then
2587 echo "!!! ${0##*/}: at least one argument needed" >&2
2588 exit 1
2589 fi
2590
2591 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2592 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2593 +if ! ___eapi_has_prefix_variables; then
2594 + ED=${D}
2595 +fi
2596
2597 ret=0
2598 file_found=0
2599
2600 diff --git a/bin/ebuild-helpers/dosym b/bin/ebuild-helpers/dosym
2601 index 2489e22..649b100 100755
2602 --- a/bin/ebuild-helpers/dosym
2603 +++ b/bin/ebuild-helpers/dosym
2604 @@ -5,12 +5,13 @@
2605 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2606
2607 if [[ $# -ne 2 ]] ; then
2608 - helpers_die "${0##*/}: two arguments needed"
2609 + __helpers_die "${0##*/}: two arguments needed"
2610 exit 1
2611 fi
2612
2613 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2614 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2615 +if ! ___eapi_has_prefix_variables; then
2616 + ED=${D}
2617 +fi
2618
2619 if [[ ${2} == */ ]] || \
2620 [[ -d ${ED}${2} && ! -L ${ED}${2} ]] ; then
2621 @@ -26,5 +27,5 @@ target="${1}"
2622 ln -snf "${target}" "${ED}${2}"
2623
2624 ret=$?
2625 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2626 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2627 exit $ret
2628
2629 diff --git a/bin/ebuild-helpers/ecompress b/bin/ebuild-helpers/ecompress
2630 index b61421b..71287b4 100755
2631 --- a/bin/ebuild-helpers/ecompress
2632 +++ b/bin/ebuild-helpers/ecompress
2633 @@ -5,7 +5,7 @@
2634 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2635
2636 if [[ -z $1 ]] ; then
2637 - helpers_die "${0##*/}: at least one argument needed"
2638 + __helpers_die "${0##*/}: at least one argument needed"
2639 exit 1
2640 fi
2641
2642 @@ -68,7 +68,7 @@ decompress_args() {
2643
2644 case $1 in
2645 --suffix)
2646 - [[ -n $2 ]] && vecho "${0##*/}: --suffix takes no additional arguments" 1>&2
2647 + [[ -n $2 ]] && __vecho "${0##*/}: --suffix takes no additional arguments" 1>&2
2648
2649 if [[ ! -e ${T}/.ecompress.suffix ]] ; then
2650 set -e
2651 @@ -93,7 +93,7 @@ case $1 in
2652 cat "${T}/.ecompress.suffix"
2653 ;;
2654 --bin)
2655 - [[ -n $2 ]] && vecho "${0##*/}: --bin takes no additional arguments" 1>&2
2656 + [[ -n $2 ]] && __vecho "${0##*/}: --bin takes no additional arguments" 1>&2
2657
2658 echo "${PORTAGE_COMPRESS} ${PORTAGE_COMPRESS_FLAGS}"
2659 ;;
2660 @@ -104,18 +104,18 @@ case $1 in
2661 >> "$x"
2662 ((ret|=$?))
2663 done
2664 - [[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2665 + [[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2666 exit $ret
2667 ;;
2668 --dequeue)
2669 - [[ -n $2 ]] && vecho "${0##*/}: --dequeue takes no additional arguments" 1>&2
2670 + [[ -n $2 ]] && __vecho "${0##*/}: --dequeue takes no additional arguments" 1>&2
2671 find "${D}" -name '*.ecompress.file' -print0 \
2672 | sed -e 's:\.ecompress\.file::g' \
2673 | ${XARGS} -0 ecompress
2674 find "${D}" -name '*.ecompress.file' -print0 | ${XARGS} -0 rm -f
2675 ;;
2676 --*)
2677 - helpers_die "${0##*/}: unknown arguments '$*'"
2678 + __helpers_die "${0##*/}: unknown arguments '$*'"
2679 exit 1
2680 ;;
2681 *)
2682 @@ -155,7 +155,7 @@ case $1 in
2683 # Finally, let's actually do some real work
2684 "${PORTAGE_COMPRESS}" ${PORTAGE_COMPRESS_FLAGS} "$@"
2685 ret=$?
2686 - [[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2687 + [[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2688 exit $ret
2689 ;;
2690 esac
2691
2692 diff --git a/bin/ebuild-helpers/ecompressdir b/bin/ebuild-helpers/ecompressdir
2693 index 6801a07..eca5888 100755
2694 --- a/bin/ebuild-helpers/ecompressdir
2695 +++ b/bin/ebuild-helpers/ecompressdir
2696 @@ -1,18 +1,21 @@
2697 #!/bin/bash
2698 -# Copyright 1999-2011 Gentoo Foundation
2699 +# Copyright 1999-2013 Gentoo Foundation
2700 # Distributed under the terms of the GNU General Public License v2
2701
2702 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/helper-functions.sh
2703
2704 if [[ -z $1 ]] ; then
2705 - helpers_die "${0##*/}: at least one argument needed"
2706 + __helpers_die "${0##*/}: at least one argument needed"
2707 exit 1
2708 fi
2709
2710 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2711 - case "$EAPI" in 0|1|2) ED=${D} EPREFIX= ;; esac
2712 +if ! ___eapi_has_prefix_variables; then
2713 + ED=${D} EPREFIX=
2714 +fi
2715
2716 -case $1 in
2717 +SIZE_LIMIT=''
2718 +while [[ $# -gt 0 ]] ; do
2719 + case $1 in
2720 --ignore)
2721 shift
2722 for skip in "$@" ; do
2723 @@ -27,45 +30,66 @@ case $1 in
2724 set -- "${@/#/${ED}}"
2725 ret=0
2726 for x in "$@" ; do
2727 - >> "$x"
2728 + # Stash the limit in the .dir file so we can reload it later.
2729 + printf "${SIZE_LIMIT}" > "${x}"
2730 ((ret|=$?))
2731 done
2732 - [[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2733 + [[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2734 exit $ret
2735 ;;
2736 --dequeue)
2737 - [[ -n $2 ]] && vecho "${0##*/}: --dequeue takes no additional arguments" 1>&2
2738 + [[ -n $2 ]] && __vecho "${0##*/}: --dequeue takes no additional arguments" 1>&2
2739 find "${ED}" -name '*.ecompress.dir' -print0 \
2740 | sed -e 's:\.ecompress\.dir::g' -e "s:${ED}:/:g" \
2741 | ${XARGS} -0 ecompressdir
2742 find "${ED}" -name '*.ecompress.skip' -print0 | ${XARGS} -0 rm -f
2743 exit 0
2744 ;;
2745 + --limit)
2746 + SIZE_LIMIT=$2
2747 + shift
2748 + ;;
2749 --*)
2750 - helpers_die "${0##*/}: unknown arguments '$*'"
2751 + __helpers_die "${0##*/}: unknown arguments '$*'"
2752 exit 1
2753 ;;
2754 -esac
2755 + *)
2756 + break
2757 + ;;
2758 + esac
2759 + shift
2760 +done
2761
2762 # figure out the new suffix
2763 suffix=$(ecompress --suffix)
2764
2765 -# funk_up_dir(action, suffix, binary)
2766 +# funk_up_dir(action, suffix, binary, [size_limit])
2767 # - action: compress or decompress
2768 # - suffix: the compression suffix to work with
2769 # - binary: the program to execute that'll compress/decompress
2770 +# - size_limit: if compressing, skip files smaller than this
2771 # The directory we act on is implied in the ${dir} variable
2772 funk_up_dir() {
2773 - local act=$1 suffix=$2 binary=$3
2774 + local act=$1 suffix=$2 binary=$3 size_limit=$4
2775
2776 local negate=""
2777 [[ ${act} == "compress" ]] && negate="!"
2778
2779 local ret=0
2780 # first we act on all the files
2781 - find "${dir}" -type f ${negate} -iname '*'${suffix} -print0 | ${XARGS} -0 ${binary}
2782 + local args=(
2783 + -type f
2784 + ${negate} -iname "*${suffix}"
2785 + )
2786 + [[ -n ${size_limit} ]] && args+=( -size "+${size_limit}c" )
2787 + find "${dir}" "${args[@]}" -print0 | ${XARGS} -0 ${binary}
2788 ((ret|=$?))
2789
2790 + # Repeat until nothing changes, in order to handle multiple
2791 + # levels of indirection (see bug #470916).
2792 + local -i indirection=0
2793 + while true ; do
2794 + local something_changed=
2795 while read -r -d $'\0' brokenlink ; do
2796 [[ -e ${brokenlink} ]] && continue
2797 olddest=$(readlink "${brokenlink}")
2798 @@ -91,12 +115,22 @@ funk_up_dir() {
2799 else
2800 [[ -f "${dir}/${brokenlink%/*}/${newdest}" ]] || continue
2801 fi
2802 + something_changed=${brokenlink}
2803 rm -f "${brokenlink}"
2804 [[ ${act} == "compress" ]] \
2805 && ln -snf "${newdest}" "${brokenlink}${suffix}" \
2806 || ln -snf "${newdest}" "${brokenlink%${suffix}}"
2807 ((ret|=$?))
2808 done < <(find "${dir}" -type l -print0)
2809 + [[ -n ${something_changed} ]] || break
2810 + (( indirection++ ))
2811 + if (( indirection >= 100 )) ; then
2812 + # Protect against possibility of a bug triggering an endless loop.
2813 + eerror "ecompressdir: too many levels of indirection for" \
2814 + "'${actual_dir#${ED}}/${something_changed#./}'"
2815 + break
2816 + fi
2817 + done
2818 return ${ret}
2819 }
2820
2821 @@ -133,13 +167,13 @@ decompressors=(
2822 ".lzma" "unxz -f"
2823 )
2824
2825 -multijob_init
2826 +__multijob_init
2827
2828 for dir in "$@" ; do
2829 dir=${dir#/}
2830 dir="${ED}${dir}"
2831 if [[ ! -d ${dir} ]] ; then
2832 - vecho "${0##*/}: /${dir#${ED}} does not exist!"
2833 + __vecho "${0##*/}: /${dir#${ED}} does not exist!"
2834 continue
2835 fi
2836 cd "${dir}"
2837 @@ -151,24 +185,25 @@ for dir in "$@" ; do
2838
2839 # since we've been requested to compress the whole dir,
2840 # delete any individual queued requests
2841 + size_limit=${SIZE_LIMIT:-$(<"${actual_dir}.ecompress.dir")}
2842 rm -f "${actual_dir}.ecompress.dir"
2843 find "${dir}" -type f -name '*.ecompress.file' -print0 | ${XARGS} -0 rm -f
2844
2845 # not uncommon for packages to compress doc files themselves
2846 - for (( d = 0; d < ${#decompressors[@]}; d += 2 )) ; do
2847 + for (( i = 0; i < ${#decompressors[@]}; i += 2 )) ; do
2848 # It's faster to parallelize at this stage than to try to
2849 # parallelize the compressors. This is because the find|xargs
2850 # ends up launching less compressors overall, so the overhead
2851 # of forking children ends up dominating.
2852 (
2853 - multijob_child_init
2854 + __multijob_child_init
2855 funk_up_dir "decompress" "${decompressors[i]}" "${decompressors[i+1]}"
2856 ) &
2857 - multijob_post_fork
2858 + __multijob_post_fork
2859 : $(( ret |= $? ))
2860 done
2861
2862 - multijob_finish
2863 + __multijob_finish
2864 : $(( ret |= $? ))
2865
2866 # forcibly break all hard links as some compressors whine about it
2867 @@ -177,8 +212,8 @@ for dir in "$@" ; do
2868
2869 # now lets do our work
2870 if [[ -n ${suffix} ]] ; then
2871 - vecho "${0##*/}: $(ecompress --bin) /${actual_dir#${ED}}"
2872 - funk_up_dir "compress" "${suffix}" "ecompress"
2873 + __vecho "${0##*/}: $(ecompress --bin) /${actual_dir#${ED}}"
2874 + funk_up_dir "compress" "${suffix}" "ecompress" "${size_limit}"
2875 : $(( ret |= $? ))
2876 fi
2877
2878 @@ -186,5 +221,5 @@ for dir in "$@" ; do
2879 restore_skip_dirs
2880 done
2881
2882 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2883 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2884 exit ${ret}
2885
2886 diff --git a/bin/ebuild-helpers/emake b/bin/ebuild-helpers/emake
2887 index d842781..69d836f 100755
2888 --- a/bin/ebuild-helpers/emake
2889 +++ b/bin/ebuild-helpers/emake
2890 @@ -24,5 +24,5 @@ fi
2891
2892 ${MAKE:-make} ${MAKEOPTS} ${EXTRA_EMAKE} "$@"
2893 ret=$?
2894 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2895 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2896 exit $ret
2897
2898 diff --git a/bin/ebuild-helpers/fowners b/bin/ebuild-helpers/fowners
2899 index a213c9e..cee4108 100755
2900 --- a/bin/ebuild-helpers/fowners
2901 +++ b/bin/ebuild-helpers/fowners
2902 @@ -4,8 +4,9 @@
2903
2904 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2905
2906 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2907 - case "$EAPI" in 0|1|2) EPREFIX= ED=${D} ;; esac
2908 +if ! ___eapi_has_prefix_variables; then
2909 + EPREFIX= ED=${D}
2910 +fi
2911
2912 # we can't prefix all arguments because
2913 # chown takes random options
2914 @@ -13,10 +14,5 @@ slash="/"
2915 chown "${@/#${slash}/${ED}${slash}}"
2916 ret=$?
2917
2918 -if [[ ${ret} != 0 && -n ${EPREFIX} && ${EUID} != 0 ]] ; then
2919 - ewarn "fowners failure ignored in Prefix with non-privileged user"
2920 - exit 0
2921 -fi
2922 -
2923 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2924 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2925 exit $ret
2926
2927 diff --git a/bin/ebuild-helpers/fperms b/bin/ebuild-helpers/fperms
2928 index a2f77ea..d854ebb 100755
2929 --- a/bin/ebuild-helpers/fperms
2930 +++ b/bin/ebuild-helpers/fperms
2931 @@ -1,16 +1,17 @@
2932 #!/bin/bash
2933 -# Copyright 1999-2011 Gentoo Foundation
2934 +# Copyright 1999-2012 Gentoo Foundation
2935 # Distributed under the terms of the GNU General Public License v2
2936
2937 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2938
2939 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
2940 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
2941 +if ! ___eapi_has_prefix_variables; then
2942 + ED=${D}
2943 +fi
2944
2945 # we can't prefix all arguments because
2946 # chmod takes random options
2947 slash="/"
2948 chmod "${@/#${slash}/${ED}${slash}}"
2949 ret=$?
2950 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
2951 +[[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed"
2952 exit $ret
2953
2954 diff --git a/bin/ebuild-helpers/keepdir b/bin/ebuild-helpers/keepdir
2955 new file mode 100755
2956 index 0000000..bec2feb
2957 --- /dev/null
2958 +++ b/bin/ebuild-helpers/keepdir
2959 @@ -0,0 +1,20 @@
2960 +#!/bin/bash
2961 +# Copyright 1999-2013 Gentoo Foundation
2962 +# Distributed under the terms of the GNU General Public License v2
2963 +
2964 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2965 +
2966 +if ! ___eapi_has_prefix_variables; then
2967 + ED=${D}
2968 +fi
2969 +
2970 +dodir "$@"
2971 +ret=$?
2972 +
2973 +for x in "$@"; do
2974 + >> "${ED}${x}/.keep_${CATEGORY}_${PN}-${SLOT%/*}" || \
2975 + { echo "!!! ${0##*/}: cannot write .keep in ${ED}${x}" 1>&2; ret=1; }
2976 +done
2977 +
2978 +[[ ${ret} -ne 0 ]] && __helpers_die "${0##*/} failed"
2979 +exit ${ret}
2980
2981 diff --git a/bin/ebuild-helpers/newbin b/bin/ebuild-helpers/newbin
2982 deleted file mode 100755
2983 index bf98744..0000000
2984 --- a/bin/ebuild-helpers/newbin
2985 +++ /dev/null
2986 @@ -1,22 +0,0 @@
2987 -#!/bin/bash
2988 -# Copyright 1999-2011 Gentoo Foundation
2989 -# Distributed under the terms of the GNU General Public License v2
2990 -
2991 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
2992 -
2993 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
2994 - helpers_die "${0##*/}: Need two arguments, old file and new file"
2995 - exit 1
2996 -fi
2997 -
2998 -if [ ! -e "$1" ] ; then
2999 - helpers_die "!!! ${0##*/}: $1 does not exist"
3000 - exit 1
3001 -fi
3002 -
3003 -(($#>2)) && \
3004 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3005 -
3006 -rm -rf "${T}/${2}" && \
3007 -cp -f "${1}" "${T}/${2}" && \
3008 -exec dobin "${T}/${2}"
3009
3010 diff --git a/bin/ebuild-helpers/newbin b/bin/ebuild-helpers/newbin
3011 new file mode 120000
3012 index 0000000..59a0db2
3013 --- /dev/null
3014 +++ b/bin/ebuild-helpers/newbin
3015 @@ -0,0 +1 @@
3016 +newins
3017 \ No newline at end of file
3018
3019 diff --git a/bin/ebuild-helpers/newconfd b/bin/ebuild-helpers/newconfd
3020 deleted file mode 100755
3021 index fa3710d..0000000
3022 --- a/bin/ebuild-helpers/newconfd
3023 +++ /dev/null
3024 @@ -1,22 +0,0 @@
3025 -#!/bin/bash
3026 -# Copyright 1999-2011 Gentoo Foundation
3027 -# Distributed under the terms of the GNU General Public License v2
3028 -
3029 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3030 -
3031 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3032 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3033 - exit 1
3034 -fi
3035 -
3036 -if [ ! -e "$1" ] ; then
3037 - helpers_die "!!! ${0##*/}: $1 does not exist"
3038 - exit 1
3039 -fi
3040 -
3041 -(($#>2)) && \
3042 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3043 -
3044 -rm -rf "${T}/${2}" && \
3045 -cp -f "${1}" "${T}/${2}" && \
3046 -exec doconfd "${T}/${2}"
3047
3048 diff --git a/bin/ebuild-helpers/newconfd b/bin/ebuild-helpers/newconfd
3049 new file mode 120000
3050 index 0000000..59a0db2
3051 --- /dev/null
3052 +++ b/bin/ebuild-helpers/newconfd
3053 @@ -0,0 +1 @@
3054 +newins
3055 \ No newline at end of file
3056
3057 diff --git a/bin/ebuild-helpers/newdoc b/bin/ebuild-helpers/newdoc
3058 deleted file mode 100755
3059 index df6fb1d..0000000
3060 --- a/bin/ebuild-helpers/newdoc
3061 +++ /dev/null
3062 @@ -1,22 +0,0 @@
3063 -#!/bin/bash
3064 -# Copyright 1999-2011 Gentoo Foundation
3065 -# Distributed under the terms of the GNU General Public License v2
3066 -
3067 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3068 -
3069 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3070 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3071 - exit 1
3072 -fi
3073 -
3074 -if [ ! -e "$1" ] ; then
3075 - helpers_die "!!! ${0##*/}: $1 does not exist"
3076 - exit 1
3077 -fi
3078 -
3079 -(($#>2)) && \
3080 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3081 -
3082 -rm -rf "${T}/${2}" && \
3083 -cp -f "${1}" "${T}/${2}" && \
3084 -exec dodoc "${T}/${2}"
3085
3086 diff --git a/bin/ebuild-helpers/newdoc b/bin/ebuild-helpers/newdoc
3087 new file mode 120000
3088 index 0000000..59a0db2
3089 --- /dev/null
3090 +++ b/bin/ebuild-helpers/newdoc
3091 @@ -0,0 +1 @@
3092 +newins
3093 \ No newline at end of file
3094
3095 diff --git a/bin/ebuild-helpers/newenvd b/bin/ebuild-helpers/newenvd
3096 deleted file mode 100755
3097 index c54af05..0000000
3098 --- a/bin/ebuild-helpers/newenvd
3099 +++ /dev/null
3100 @@ -1,22 +0,0 @@
3101 -#!/bin/bash
3102 -# Copyright 1999-2011 Gentoo Foundation
3103 -# Distributed under the terms of the GNU General Public License v2
3104 -
3105 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3106 -
3107 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3108 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3109 - exit 1
3110 -fi
3111 -
3112 -if [ ! -e "$1" ] ; then
3113 - helpers_die "!!! ${0##*/}: $1 does not exist"
3114 - exit 1
3115 -fi
3116 -
3117 -(($#>2)) && \
3118 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3119 -
3120 -rm -rf "${T}/${2}" && \
3121 -cp -f "${1}" "${T}/${2}" && \
3122 -exec doenvd "${T}/${2}"
3123
3124 diff --git a/bin/ebuild-helpers/newenvd b/bin/ebuild-helpers/newenvd
3125 new file mode 120000
3126 index 0000000..59a0db2
3127 --- /dev/null
3128 +++ b/bin/ebuild-helpers/newenvd
3129 @@ -0,0 +1 @@
3130 +newins
3131 \ No newline at end of file
3132
3133 diff --git a/bin/ebuild-helpers/newexe b/bin/ebuild-helpers/newexe
3134 deleted file mode 100755
3135 index 9bcf64b..0000000
3136 --- a/bin/ebuild-helpers/newexe
3137 +++ /dev/null
3138 @@ -1,22 +0,0 @@
3139 -#!/bin/bash
3140 -# Copyright 1999-2011 Gentoo Foundation
3141 -# Distributed under the terms of the GNU General Public License v2
3142 -
3143 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3144 -
3145 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3146 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3147 - exit 1
3148 -fi
3149 -
3150 -if [ ! -e "$1" ] ; then
3151 - helpers_die "!!! ${0##*/}: $1 does not exist"
3152 - exit 1
3153 -fi
3154 -
3155 -(($#>2)) && \
3156 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3157 -
3158 -rm -rf "${T}/${2}" && \
3159 -cp -f "${1}" "${T}/${2}" && \
3160 -exec doexe "${T}/${2}"
3161
3162 diff --git a/bin/ebuild-helpers/newexe b/bin/ebuild-helpers/newexe
3163 new file mode 120000
3164 index 0000000..59a0db2
3165 --- /dev/null
3166 +++ b/bin/ebuild-helpers/newexe
3167 @@ -0,0 +1 @@
3168 +newins
3169 \ No newline at end of file
3170
3171 diff --git a/bin/ebuild-helpers/newheader b/bin/ebuild-helpers/newheader
3172 new file mode 120000
3173 index 0000000..59a0db2
3174 --- /dev/null
3175 +++ b/bin/ebuild-helpers/newheader
3176 @@ -0,0 +1 @@
3177 +newins
3178 \ No newline at end of file
3179
3180 diff --git a/bin/ebuild-helpers/newinitd b/bin/ebuild-helpers/newinitd
3181 deleted file mode 100755
3182 index 03bbe68..0000000
3183 --- a/bin/ebuild-helpers/newinitd
3184 +++ /dev/null
3185 @@ -1,22 +0,0 @@
3186 -#!/bin/bash
3187 -# Copyright 1999-2011 Gentoo Foundation
3188 -# Distributed under the terms of the GNU General Public License v2
3189 -
3190 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3191 -
3192 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3193 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3194 - exit 1
3195 -fi
3196 -
3197 -if [ ! -e "$1" ] ; then
3198 - helpers_die "!!! ${0##*/}: $1 does not exist"
3199 - exit 1
3200 -fi
3201 -
3202 -(($#>2)) && \
3203 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3204 -
3205 -rm -rf "${T}/${2}" && \
3206 -cp -f "${1}" "${T}/${2}" && \
3207 -exec doinitd "${T}/${2}"
3208
3209 diff --git a/bin/ebuild-helpers/newinitd b/bin/ebuild-helpers/newinitd
3210 new file mode 120000
3211 index 0000000..59a0db2
3212 --- /dev/null
3213 +++ b/bin/ebuild-helpers/newinitd
3214 @@ -0,0 +1 @@
3215 +newins
3216 \ No newline at end of file
3217
3218 diff --git a/bin/ebuild-helpers/newins b/bin/ebuild-helpers/newins
3219 index adf2d80..0335985 100755
3220 --- a/bin/ebuild-helpers/newins
3221 +++ b/bin/ebuild-helpers/newins
3222 @@ -1,38 +1,57 @@
3223 #!/bin/bash
3224 -# Copyright 1999-2011 Gentoo Foundation
3225 +# Copyright 1999-2012 Gentoo Foundation
3226 # Distributed under the terms of the GNU General Public License v2
3227
3228 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3229
3230 +helper=${0##*/}
3231 +
3232 if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3233 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3234 + __helpers_die "${helper}: Need two arguments, old file and new file"
3235 exit 1
3236 fi
3237
3238 -if [ ! -e "$1" ] ; then
3239 - helpers_die "!!! ${0##*/}: $1 does not exist"
3240 - exit 1
3241 +(($#>2)) && \
3242 + eqawarn "QA Notice: ${helper} called with more than 2 arguments: ${@:3}"
3243 +
3244 +stdin=
3245 +if ___eapi_newins_supports_reading_from_standard_input && [[ $1 == "-" ]]; then
3246 + stdin=yes
3247 fi
3248
3249 -(($#>2)) && \
3250 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3251 -
3252 -rm -rf "${T}/${2}" || exit $?
3253 -case "$EAPI" in
3254 - 0|1|2|3|3_pre2)
3255 - cp "$1" "$T/$2" || exit $?
3256 - ;;
3257 - *)
3258 - cp -P "$1" "$T/$2"
3259 - ret=$?
3260 - if [[ $ret -ne 0 ]] ; then
3261 - helpers_die "${0##*/} failed"
3262 - exit $ret
3263 +TMP=$(mktemp -d "${T}/.newins_tmp_XXXXXX")
3264 +trap 'rm -rf "${TMP}"' EXIT
3265 +
3266 +if [[ ${stdin} ]] ; then
3267 + if [[ -t 0 ]] ; then
3268 + __helpers_die "!!! ${helper}: Input is from a terminal"
3269 + exit 1
3270 + fi
3271 + cat > "${TMP}/$2"
3272 + ret=$?
3273 +else
3274 + if [[ ! -e $1 ]] ; then
3275 + __helpers_die "!!! ${helper}: $1 does not exist"
3276 + exit 1
3277 + fi
3278 +
3279 + cp_args="-f"
3280 + if [[ ${helper} == newins ]] ; then
3281 + if ___eapi_doins_and_newins_preserve_symlinks; then
3282 + cp_args+=" -P"
3283 fi
3284 - ;;
3285 -esac
3286 -doins "${T}/${2}"
3287 + fi
3288 +
3289 + cp ${cp_args} "$1" "${TMP}/$2"
3290 + ret=$?
3291 +fi
3292 +
3293 +if [[ ${ret} -ne 0 ]] ; then
3294 + __helpers_die "${0##*/} failed"
3295 + exit ${ret}
3296 +fi
3297 +
3298 +do${helper#new} "${TMP}/$2"
3299 ret=$?
3300 -rm -rf "${T}/${2}"
3301 -[[ $ret -ne 0 ]] && helpers_die "${0##*/} failed"
3302 +[[ $ret -ne 0 ]] && __helpers_die "${helper} failed"
3303 exit $ret
3304
3305 diff --git a/bin/ebuild-helpers/newlib.a b/bin/ebuild-helpers/newlib.a
3306 deleted file mode 100755
3307 index 7ff8195..0000000
3308 --- a/bin/ebuild-helpers/newlib.a
3309 +++ /dev/null
3310 @@ -1,22 +0,0 @@
3311 -#!/bin/bash
3312 -# Copyright 1999-2011 Gentoo Foundation
3313 -# Distributed under the terms of the GNU General Public License v2
3314 -
3315 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3316 -
3317 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3318 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3319 - exit 1
3320 -fi
3321 -
3322 -if [ ! -e "$1" ] ; then
3323 - helpers_die "!!! ${0##*/}: $1 does not exist"
3324 - exit 1
3325 -fi
3326 -
3327 -(($#>2)) && \
3328 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3329 -
3330 -rm -rf "${T}/${2}" && \
3331 -cp -f "${1}" "${T}/${2}" && \
3332 -exec dolib.a "${T}/${2}"
3333
3334 diff --git a/bin/ebuild-helpers/newlib.a b/bin/ebuild-helpers/newlib.a
3335 new file mode 120000
3336 index 0000000..59a0db2
3337 --- /dev/null
3338 +++ b/bin/ebuild-helpers/newlib.a
3339 @@ -0,0 +1 @@
3340 +newins
3341 \ No newline at end of file
3342
3343 diff --git a/bin/ebuild-helpers/newlib.so b/bin/ebuild-helpers/newlib.so
3344 deleted file mode 100755
3345 index fd4c097..0000000
3346 --- a/bin/ebuild-helpers/newlib.so
3347 +++ /dev/null
3348 @@ -1,22 +0,0 @@
3349 -#!/bin/bash
3350 -# Copyright 1999-2011 Gentoo Foundation
3351 -# Distributed under the terms of the GNU General Public License v2
3352 -
3353 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3354 -
3355 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3356 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3357 - exit 1
3358 -fi
3359 -
3360 -if [ ! -e "$1" ] ; then
3361 - helpers_die "!!! ${0##*/}: $1 does not exist"
3362 - exit 1
3363 -fi
3364 -
3365 -(($#>2)) && \
3366 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3367 -
3368 -rm -rf "${T}/${2}" && \
3369 -cp -f "${1}" "${T}/${2}" && \
3370 -exec dolib.so "${T}/${2}"
3371
3372 diff --git a/bin/ebuild-helpers/newlib.so b/bin/ebuild-helpers/newlib.so
3373 new file mode 120000
3374 index 0000000..59a0db2
3375 --- /dev/null
3376 +++ b/bin/ebuild-helpers/newlib.so
3377 @@ -0,0 +1 @@
3378 +newins
3379 \ No newline at end of file
3380
3381 diff --git a/bin/ebuild-helpers/newman b/bin/ebuild-helpers/newman
3382 deleted file mode 100755
3383 index 889e0f9..0000000
3384 --- a/bin/ebuild-helpers/newman
3385 +++ /dev/null
3386 @@ -1,22 +0,0 @@
3387 -#!/bin/bash
3388 -# Copyright 1999-2011 Gentoo Foundation
3389 -# Distributed under the terms of the GNU General Public License v2
3390 -
3391 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3392 -
3393 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3394 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3395 - exit 1
3396 -fi
3397 -
3398 -if [ ! -e "$1" ] ; then
3399 - helpers_die "!!! ${0##*/}: $1 does not exist"
3400 - exit 1
3401 -fi
3402 -
3403 -(($#>2)) && \
3404 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3405 -
3406 -rm -rf "${T}/${2}" && \
3407 -cp -f "${1}" "${T}/${2}" && \
3408 -exec doman "${T}/${2}"
3409
3410 diff --git a/bin/ebuild-helpers/newman b/bin/ebuild-helpers/newman
3411 new file mode 120000
3412 index 0000000..59a0db2
3413 --- /dev/null
3414 +++ b/bin/ebuild-helpers/newman
3415 @@ -0,0 +1 @@
3416 +newins
3417 \ No newline at end of file
3418
3419 diff --git a/bin/ebuild-helpers/newsbin b/bin/ebuild-helpers/newsbin
3420 deleted file mode 100755
3421 index 9df0af2..0000000
3422 --- a/bin/ebuild-helpers/newsbin
3423 +++ /dev/null
3424 @@ -1,22 +0,0 @@
3425 -#!/bin/bash
3426 -# Copyright 1999-2011 Gentoo Foundation
3427 -# Distributed under the terms of the GNU General Public License v2
3428 -
3429 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3430 -
3431 -if [[ -z ${T} ]] || [[ -z ${2} ]] ; then
3432 - helpers_die "${0##*/}: Need two arguments, old file and new file"
3433 - exit 1
3434 -fi
3435 -
3436 -if [ ! -e "$1" ] ; then
3437 - helpers_die "!!! ${0##*/}: $1 does not exist"
3438 - exit 1
3439 -fi
3440 -
3441 -(($#>2)) && \
3442 - eqawarn "QA Notice: ${0##*/} called with more than 2 arguments: ${@:3}"
3443 -
3444 -rm -rf "${T}/${2}" && \
3445 -cp -f "${1}" "${T}/${2}" && \
3446 -exec dosbin "${T}/${2}"
3447
3448 diff --git a/bin/ebuild-helpers/newsbin b/bin/ebuild-helpers/newsbin
3449 new file mode 120000
3450 index 0000000..59a0db2
3451 --- /dev/null
3452 +++ b/bin/ebuild-helpers/newsbin
3453 @@ -0,0 +1 @@
3454 +newins
3455 \ No newline at end of file
3456
3457 diff --git a/bin/ebuild-helpers/portageq b/bin/ebuild-helpers/portageq
3458 index ec30b66..b67b03f 100755
3459 --- a/bin/ebuild-helpers/portageq
3460 +++ b/bin/ebuild-helpers/portageq
3461 @@ -1,8 +1,10 @@
3462 #!/bin/bash
3463 -# Copyright 2009-2010 Gentoo Foundation
3464 +# Copyright 2009-2013 Gentoo Foundation
3465 # Distributed under the terms of the GNU General Public License v2
3466
3467 PORTAGE_BIN_PATH=${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}
3468 PORTAGE_PYM_PATH=${PORTAGE_PYM_PATH:-/usr/lib/portage/pym}
3469 -PYTHONPATH=$PORTAGE_PYM_PATH${PYTHONPATH:+:}$PYTHONPATH \
3470 +# Use safe cwd, avoiding unsafe import for bug #469338.
3471 +cd "${PORTAGE_PYM_PATH}"
3472 +PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
3473 exec "${PORTAGE_PYTHON:-/usr/bin/python}" "$PORTAGE_BIN_PATH/portageq" "$@"
3474
3475 diff --git a/bin/ebuild-helpers/prepall b/bin/ebuild-helpers/prepall
3476 index 49e646c..fb5c2db 100755
3477 --- a/bin/ebuild-helpers/prepall
3478 +++ b/bin/ebuild-helpers/prepall
3479 @@ -1,11 +1,12 @@
3480 #!/bin/bash
3481 -# Copyright 1999-2011 Gentoo Foundation
3482 +# Copyright 1999-2012 Gentoo Foundation
3483 # Distributed under the terms of the GNU General Public License v2
3484
3485 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3486
3487 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3488 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3489 +if ! ___eapi_has_prefix_variables; then
3490 + ED=${D}
3491 +fi
3492
3493 if has chflags $FEATURES ; then
3494 # Save all the file flags for restoration at the end of prepall.
3495
3496 diff --git a/bin/ebuild-helpers/prepalldocs b/bin/ebuild-helpers/prepalldocs
3497 index 560a02b..3094661 100755
3498 --- a/bin/ebuild-helpers/prepalldocs
3499 +++ b/bin/ebuild-helpers/prepalldocs
3500 @@ -1,15 +1,21 @@
3501 #!/bin/bash
3502 -# Copyright 1999-2011 Gentoo Foundation
3503 +# Copyright 1999-2012 Gentoo Foundation
3504 # Distributed under the terms of the GNU General Public License v2
3505
3506 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3507
3508 +if ___eapi_has_docompress; then
3509 + die "'${0##*/}' has been banned for EAPI '$EAPI'"
3510 + exit 1
3511 +fi
3512 +
3513 if [[ -n $1 ]] ; then
3514 - vecho "${0##*/}: invalid usage; takes no arguments" 1>&2
3515 + __vecho "${0##*/}: invalid usage; takes no arguments" 1>&2
3516 fi
3517
3518 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3519 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3520 +if ! ___eapi_has_prefix_variables; then
3521 + ED=${D}
3522 +fi
3523
3524 [[ -d ${ED}usr/share/doc ]] || exit 0
3525
3526
3527 diff --git a/bin/ebuild-helpers/prepallinfo b/bin/ebuild-helpers/prepallinfo
3528 index db9bbfa..1a20275 100755
3529 --- a/bin/ebuild-helpers/prepallinfo
3530 +++ b/bin/ebuild-helpers/prepallinfo
3531 @@ -1,11 +1,12 @@
3532 #!/bin/bash
3533 -# Copyright 1999-2011 Gentoo Foundation
3534 +# Copyright 1999-2012 Gentoo Foundation
3535 # Distributed under the terms of the GNU General Public License v2
3536
3537 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3538
3539 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3540 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3541 +if ! ___eapi_has_prefix_variables; then
3542 + ED=${D}
3543 +fi
3544
3545 [[ -d ${ED}usr/share/info ]] || exit 0
3546
3547
3548 diff --git a/bin/ebuild-helpers/prepallman b/bin/ebuild-helpers/prepallman
3549 index dee1c72..5331eaf 100755
3550 --- a/bin/ebuild-helpers/prepallman
3551 +++ b/bin/ebuild-helpers/prepallman
3552 @@ -1,22 +1,22 @@
3553 #!/bin/bash
3554 -# Copyright 1999-2011 Gentoo Foundation
3555 +# Copyright 1999-2012 Gentoo Foundation
3556 # Distributed under the terms of the GNU General Public License v2
3557
3558 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3559
3560 # replaced by controllable compression in EAPI 4
3561 -has "${EAPI}" 0 1 2 3 || exit 0
3562 +___eapi_has_docompress && exit 0
3563
3564 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3565 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3566 +if ! ___eapi_has_prefix_variables; then
3567 + ED=${D}
3568 +fi
3569
3570 ret=0
3571
3572 -find "${ED}" -type d -name man > "${T}"/prepallman.filelist
3573 -while read -r mandir ; do
3574 +while IFS= read -r -d '' mandir ; do
3575 mandir=${mandir#${ED}}
3576 prepman "${mandir%/man}"
3577 ((ret|=$?))
3578 -done < "${T}"/prepallman.filelist
3579 +done < <(find "${ED}" -type d -name man -print0)
3580
3581 exit ${ret}
3582
3583 diff --git a/bin/ebuild-helpers/prepallstrip b/bin/ebuild-helpers/prepallstrip
3584 index 28320d9..1aa6686 100755
3585 --- a/bin/ebuild-helpers/prepallstrip
3586 +++ b/bin/ebuild-helpers/prepallstrip
3587 @@ -1,8 +1,11 @@
3588 #!/bin/bash
3589 -# Copyright 1999-2011 Gentoo Foundation
3590 +# Copyright 1999-2012 Gentoo Foundation
3591 # Distributed under the terms of the GNU General Public License v2
3592
3593 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3594 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3595 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3596 +
3597 +if ! ___eapi_has_prefix_variables; then
3598 + ED=${D}
3599 +fi
3600
3601 exec prepstrip "${ED}"
3602
3603 diff --git a/bin/ebuild-helpers/prepinfo b/bin/ebuild-helpers/prepinfo
3604 index ffe2ece..5afc18a 100755
3605 --- a/bin/ebuild-helpers/prepinfo
3606 +++ b/bin/ebuild-helpers/prepinfo
3607 @@ -1,11 +1,12 @@
3608 #!/bin/bash
3609 -# Copyright 1999-2011 Gentoo Foundation
3610 +# Copyright 1999-2012 Gentoo Foundation
3611 # Distributed under the terms of the GNU General Public License v2
3612
3613 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3614
3615 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3616 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3617 +if ! ___eapi_has_prefix_variables; then
3618 + ED=${D}
3619 +fi
3620
3621 if [[ -z $1 ]] ; then
3622 infodir="/usr/share/info"
3623 @@ -19,7 +20,7 @@ fi
3624
3625 if [[ ! -d ${ED}${infodir} ]] ; then
3626 if [[ -n $1 ]] ; then
3627 - vecho "${0##*/}: '${infodir}' does not exist!"
3628 + __vecho "${0##*/}: '${infodir}' does not exist!"
3629 exit 1
3630 else
3631 exit 0
3632 @@ -33,5 +34,5 @@ find "${ED}${infodir}" -type d -print0 | while read -r -d $'\0' x ; do
3633 rm -f "${x}"/dir{,.info}{,.gz,.bz2}
3634 done
3635
3636 -has "${EAPI}" 0 1 2 3 || exit 0
3637 +___eapi_has_docompress && exit 0
3638 exec ecompressdir --queue "${infodir}"
3639
3640 diff --git a/bin/ebuild-helpers/preplib b/bin/ebuild-helpers/preplib
3641 deleted file mode 100755
3642 index 6e91cf3..0000000
3643 --- a/bin/ebuild-helpers/preplib
3644 +++ /dev/null
3645 @@ -1,31 +0,0 @@
3646 -#!/bin/bash
3647 -# Copyright 1999-2011 Gentoo Foundation
3648 -# Distributed under the terms of the GNU General Public License v2
3649 -
3650 -source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3651 -
3652 -eqawarn "QA Notice: Deprecated call to 'preplib'"
3653 -
3654 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3655 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3656 -
3657 -LIBDIR_VAR="LIBDIR_${ABI}"
3658 -if [ -n "${ABI}" -a -n "${!LIBDIR_VAR}" ]; then
3659 - CONF_LIBDIR="${!LIBDIR_VAR}"
3660 -fi
3661 -unset LIBDIR_VAR
3662 -
3663 -if [ -z "${CONF_LIBDIR}" ]; then
3664 - # we need this to default to lib so that things dont break
3665 - CONF_LIBDIR="lib"
3666 -fi
3667 -
3668 -if [ -z "$1" ] ; then
3669 - z="${ED}usr/${CONF_LIBDIR}"
3670 -else
3671 - z="${ED}$1/${CONF_LIBDIR}"
3672 -fi
3673 -
3674 -if [ -d "${z}" ] ; then
3675 - ldconfig -n -N "${z}"
3676 -fi
3677
3678 diff --git a/bin/ebuild-helpers/prepman b/bin/ebuild-helpers/prepman
3679 index f96b641..fb5dcb4 100755
3680 --- a/bin/ebuild-helpers/prepman
3681 +++ b/bin/ebuild-helpers/prepman
3682 @@ -1,13 +1,17 @@
3683 #!/bin/bash
3684 -# Copyright 1999-2011 Gentoo Foundation
3685 +# Copyright 1999-2012 Gentoo Foundation
3686 # Distributed under the terms of the GNU General Public License v2
3687
3688 +# Do not compress man pages which are smaller than this (in bytes). #169260
3689 +SIZE_LIMIT='128'
3690 +
3691 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
3692
3693 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3694 - case "$EAPI" in 0|1|2) ED=${D} ;; esac
3695 +if ! ___eapi_has_prefix_variables; then
3696 + ED=${D}
3697 +fi
3698
3699 -if [[ -z $1 ]] ; then
3700 +if [[ -z $1 ]] ; then
3701 mandir="${ED}usr/share/man"
3702 else
3703 mandir="${ED}$1/man"
3704 @@ -19,7 +23,7 @@ if [[ ! -d ${mandir} ]] ; then
3705 fi
3706
3707 # replaced by controllable compression in EAPI 4
3708 -has "${EAPI}" 0 1 2 3 || exit 0
3709 +___eapi_has_docompress && exit 0
3710
3711 shopt -s nullglob
3712
3713 @@ -30,6 +34,6 @@ for subdir in "${mandir}"/man* "${mandir}"/*/man* ; do
3714 [[ -d ${subdir} ]] && really_is_mandir=1 && break
3715 done
3716
3717 -[[ ${really_is_mandir} == 1 ]] && exec ecompressdir --queue "${mandir#${ED}}"
3718 +[[ ${really_is_mandir} == 1 ]] && exec ecompressdir --limit ${SIZE_LIMIT} --queue "${mandir#${ED}}"
3719
3720 exit 0
3721
3722 diff --git a/bin/ebuild-helpers/prepstrip b/bin/ebuild-helpers/prepstrip
3723 index fe5c1bc..2ef8a1a 100755
3724 --- a/bin/ebuild-helpers/prepstrip
3725 +++ b/bin/ebuild-helpers/prepstrip
3726 @@ -1,7 +1,8 @@
3727 #!/bin/bash
3728 -# Copyright 1999-2012 Gentoo Foundation
3729 +# Copyright 1999-2014 Gentoo Foundation
3730 # Distributed under the terms of the GNU General Public License v2
3731
3732 +PORTAGE_PYM_PATH=${PORTAGE_PYM_PATH:-/usr/lib/portage/pym}
3733 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/helper-functions.sh
3734
3735 # avoid multiple calls to `has`. this creates things like:
3736 @@ -15,11 +16,12 @@ exp_tf() {
3737 eval ${var}_${flag}=$(tf has ${flag} ${!var})
3738 done
3739 }
3740 -exp_tf FEATURES compressdebug installsources nostrip splitdebug
3741 -exp_tf RESTRICT binchecks installsources strip
3742 +exp_tf FEATURES compressdebug installsources nostrip splitdebug xattr
3743 +exp_tf RESTRICT binchecks installsources splitdebug strip
3744
3745 -[[ " ${FEATURES} " == *" force-prefix "* ]] || \
3746 - case "${EAPI}" in 0|1|2) EPREFIX= ED=${D} ;; esac
3747 +if ! ___eapi_has_prefix_variables; then
3748 + EPREFIX= ED=${D}
3749 +fi
3750
3751 banner=false
3752 SKIP_STRIP=false
3753 @@ -29,6 +31,30 @@ if ${RESTRICT_strip} || ${FEATURES_nostrip} ; then
3754 ${FEATURES_installsources} || exit 0
3755 fi
3756
3757 +PRESERVE_XATTR=false
3758 +if [[ ${KERNEL} == linux ]] && ${FEATURES_xattr} ; then
3759 + PRESERVE_XATTR=true
3760 + if type -P getfattr >/dev/null && type -P setfattr >/dev/null ; then
3761 + dump_xattrs() {
3762 + getfattr -d --absolute-names "$1"
3763 + }
3764 + restore_xattrs() {
3765 + setfattr --restore=-
3766 + }
3767 + else
3768 + dump_xattrs() {
3769 + PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
3770 + "${PORTAGE_PYTHON:-/usr/bin/python}" \
3771 + "${PORTAGE_BIN_PATH}/xattr-helper.py" --dump < <(echo -n "$1")
3772 + }
3773 + restore_xattrs() {
3774 + PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
3775 + "${PORTAGE_PYTHON:-/usr/bin/python}" \
3776 + "${PORTAGE_BIN_PATH}/xattr-helper.py" --restore
3777 + }
3778 + fi
3779 +fi
3780 +
3781 # look up the tools we might be using
3782 for t in STRIP:strip OBJCOPY:objcopy READELF:readelf ; do
3783 v=${t%:*} # STRIP
3784 @@ -51,7 +77,7 @@ case $(${STRIP} --version 2>/dev/null) in
3785 # We'll leave out -R .note for now until we can check out the relevance
3786 # of the section when it has the ALLOC flag set on it ...
3787 SAFE_STRIP_FLAGS="--strip-unneeded"
3788 - DEF_STRIP_FLAGS="-R .comment -R .GCC.command.line"
3789 + DEF_STRIP_FLAGS="-R .comment -R .GCC.command.line -R .note.gnu.gold-version"
3790 SPLIT_STRIP_FLAGS=
3791 ;;
3792 esac
3793 @@ -62,23 +88,13 @@ prepstrip_sources_dir=${EPREFIX}/usr/src/debug/${CATEGORY}/${PF}
3794 type -P debugedit >/dev/null && debugedit_found=true || debugedit_found=false
3795 debugedit_warned=false
3796
3797 -multijob_init
3798 +__multijob_init
3799
3800 # Setup $T filesystem layout that we care about.
3801 tmpdir="${T}/prepstrip"
3802 rm -rf "${tmpdir}"
3803 mkdir -p "${tmpdir}"/{inodes,splitdebug,sources}
3804
3805 -# Usage: inode_var_name: <file>
3806 -inode_file_link() {
3807 - echo -n "${tmpdir}/inodes/"
3808 - if [[ ${USERLAND} == "BSD" ]] ; then
3809 - stat -f '%i' "$1"
3810 - else
3811 - stat -c '%i' "$1"
3812 - fi
3813 -}
3814 -
3815 # Usage: save_elf_sources <elf>
3816 save_elf_sources() {
3817 ${FEATURES_installsources} || return 0
3818 @@ -93,7 +109,6 @@ save_elf_sources() {
3819 fi
3820
3821 local x=$1
3822 - [[ -f $(inode_file_link "${x}") ]] && return 0
3823
3824 # since we're editing the ELF here, we should recompute the build-id
3825 # (the -i flag below). save that output so we don't need to recompute
3826 @@ -101,20 +116,22 @@ save_elf_sources() {
3827 buildid=$(debugedit -i \
3828 -b "${WORKDIR}" \
3829 -d "${prepstrip_sources_dir}" \
3830 - -l "${tmpdir}/sources/${x##*/}.${BASHPID}" \
3831 + -l "${tmpdir}/sources/${x##*/}.${BASHPID:-$(__bashpid)}" \
3832 "${x}")
3833 }
3834
3835 # Usage: save_elf_debug <elf> [splitdebug file]
3836 save_elf_debug() {
3837 ${FEATURES_splitdebug} || return 0
3838 + ${RESTRICT_splitdebug} && return 0
3839
3840 # NOTE: Debug files must be installed in
3841 # ${EPREFIX}/usr/lib/debug/${EPREFIX} (note that ${EPREFIX} occurs
3842 # twice in this path) in order for gdb's debug-file-directory
3843 # lookup to work correctly.
3844 local x=$1
3845 - local splitdebug=$2
3846 + local inode_debug=$2
3847 + local splitdebug=$3
3848 local y=${ED}usr/lib/debug/${x:${#D}}.debug
3849
3850 # dont save debug info twice
3851 @@ -122,9 +139,8 @@ save_elf_debug() {
3852
3853 mkdir -p "${y%/*}"
3854
3855 - local inode=$(inode_file_link "${x}")
3856 - if [[ -f ${inode} ]] ; then
3857 - ln "${inode}" "${y}"
3858 + if [ -f "${inode_debug}" ] ; then
3859 + ln "${inode_debug}" "${y}" || die "ln failed unexpectedly"
3860 else
3861 if [[ -n ${splitdebug} ]] ; then
3862 mv "${splitdebug}" "${y}"
3863 @@ -134,64 +150,89 @@ save_elf_debug() {
3864 ${OBJCOPY} ${objcopy_flags} "${x}" "${y}"
3865 ${OBJCOPY} --add-gnu-debuglink="${y}" "${x}"
3866 fi
3867 - local args="a-x,o-w"
3868 - [[ -g ${x} || -u ${x} ]] && args+=",go-r"
3869 - chmod ${args} "${y}"
3870 - ln "${y}" "${inode}"
3871 + # Only do the following if the debug file was
3872 + # successfully created (see bug #446774).
3873 + if [ $? -eq 0 ] ; then
3874 + local args="a-x,o-w"
3875 + [[ -g ${x} || -u ${x} ]] && args+=",go-r"
3876 + chmod ${args} "${y}"
3877 + ln "${y}" "${inode_debug}" || die "ln failed unexpectedly"
3878 + fi
3879 fi
3880
3881 # if we don't already have build-id from debugedit, look it up
3882 if [[ -z ${buildid} ]] ; then
3883 # convert the readelf output to something useful
3884 - buildid=$(${READELF} -x .note.gnu.build-id "${x}" 2>/dev/null \
3885 - | awk '$NF ~ /GNU/ { getline; printf $2$3$4$5; getline; print $2 }')
3886 + buildid=$(${READELF} -n "${x}" 2>/dev/null | awk '/Build ID:/{ print $NF; exit }')
3887 fi
3888 if [[ -n ${buildid} ]] ; then
3889 local buildid_dir="${ED}usr/lib/debug/.build-id/${buildid:0:2}"
3890 local buildid_file="${buildid_dir}/${buildid:2}"
3891 mkdir -p "${buildid_dir}"
3892 - ln -s "../../${x:${#D}}.debug" "${buildid_file}.debug"
3893 - ln -s "/${x:${#D}}" "${buildid_file}"
3894 + [ -L "${buildid_file}".debug ] || ln -s "../../${x:${#D}}.debug" "${buildid_file}.debug"
3895 + [ -L "${buildid_file}" ] || ln -s "/${x:${#D}}" "${buildid_file}"
3896 fi
3897 }
3898
3899 # Usage: process_elf <elf>
3900 process_elf() {
3901 - local x=$1 strip_flags=${*:2}
3902 -
3903 - vecho " ${x:${#ED}}"
3904 - save_elf_sources "${x}"
3905 + local x=$1 inode_link=$2 strip_flags=${*:3}
3906 + local already_stripped lockfile xt_data
3907 +
3908 + __vecho " ${x:${#ED}}"
3909 +
3910 + # If two processes try to debugedit or strip the same hardlink at the
3911 + # same time, it may corrupt files or cause loss of splitdebug info.
3912 + # So, use a lockfile to prevent interference (easily observed with
3913 + # dev-vcs/git which creates ~111 hardlinks to one file in
3914 + # /usr/libexec/git-core).
3915 + lockfile=${inode_link}_lockfile
3916 + if ! ln "${inode_link}" "${lockfile}" 2>/dev/null ; then
3917 + while [[ -f ${lockfile} ]] ; do
3918 + sleep 1
3919 + done
3920 + unset lockfile
3921 + fi
3922
3923 - if ${strip_this} ; then
3924 + [ -f "${inode_link}_stripped" ] && already_stripped=true || already_stripped=false
3925
3926 - # If two processes try to strip the same hardlink at the same
3927 - # time, it will cause one of them to lose the splitdebug info.
3928 - # So, use a lockfile to prevent interference (easily observed
3929 - # with dev-vcs/git which creates ~109 hardlinks to one file in
3930 - # /usr/libexec/git-core).
3931 - local lockfile=$(inode_file_link "${x}")_lockfile
3932 - if ! ln "${x}" "${lockfile}" ; then
3933 - while [[ -f ${lockfile} ]] ; do
3934 - sleep 1
3935 - done
3936 - unset lockfile
3937 + if ! ${already_stripped} ; then
3938 + if ${PRESERVE_XATTR} ; then
3939 + xt_data=$(dump_xattrs "${x}")
3940 fi
3941 + save_elf_sources "${x}"
3942 + fi
3943 +
3944 + if ${strip_this} ; then
3945
3946 # see if we can split & strip at the same time
3947 if [[ -n ${SPLIT_STRIP_FLAGS} ]] ; then
3948 local shortname="${x##*/}.debug"
3949 - local splitdebug="${tmpdir}/splitdebug/${shortname}.${BASHPID}"
3950 + local splitdebug="${tmpdir}/splitdebug/${shortname}.${BASHPID:-$(__bashpid)}"
3951 + ${already_stripped} || \
3952 ${STRIP} ${strip_flags} \
3953 -f "${splitdebug}" \
3954 -F "${shortname}" \
3955 "${x}"
3956 - save_elf_debug "${x}" "${splitdebug}"
3957 + save_elf_debug "${x}" "${inode_link}_debug" "${splitdebug}"
3958 else
3959 - save_elf_debug "${x}"
3960 + save_elf_debug "${x}" "${inode_link}_debug"
3961 + ${already_stripped} || \
3962 ${STRIP} ${strip_flags} "${x}"
3963 fi
3964 - [[ -n ${lockfile} ]] && rm -f "${lockfile}"
3965 fi
3966 +
3967 + if ${already_stripped} ; then
3968 + rm -f "${x}" || die "rm failed unexpectedly"
3969 + ln "${inode_link}_stripped" "${x}" || die "ln failed unexpectedly"
3970 + else
3971 + ln "${x}" "${inode_link}_stripped" || die "ln failed unexpectedly"
3972 + if [[ ${xt_data} ]] ; then
3973 + restore_xattrs <<< "${xt_data}"
3974 + fi
3975 + fi
3976 +
3977 + [[ -n ${lockfile} ]] && rm -f "${lockfile}"
3978 }
3979
3980 # The existance of the section .symtab tells us that a binary is stripped.
3981 @@ -204,7 +245,7 @@ if ! ${RESTRICT_binchecks} && ! ${RESTRICT_strip} ; then
3982 log=${tmpdir}/scanelf-already-stripped.log
3983 scanelf -yqRBF '#k%F' -k '!.symtab' "$@" | sed -e "s#^${ED}##" > "${log}"
3984 (
3985 - multijob_child_init
3986 + __multijob_child_init
3987 qa_var="QA_PRESTRIPPED_${ARCH/-/_}"
3988 [[ -n ${!qa_var} ]] && QA_PRESTRIPPED="${!qa_var}"
3989 if [[ -n ${QA_PRESTRIPPED} && -s ${log} && \
3990 @@ -219,28 +260,49 @@ if ! ${RESTRICT_binchecks} && ! ${RESTRICT_strip} ; then
3991 fi
3992 sed -e "/^\$/d" -e "s#^#/#" -i "${log}"
3993 if [[ -s ${log} ]] ; then
3994 - vecho -e "\n"
3995 + __vecho -e "\n"
3996 eqawarn "QA Notice: Pre-stripped files found:"
3997 eqawarn "$(<"${log}")"
3998 else
3999 rm -f "${log}"
4000 fi
4001 ) &
4002 - multijob_post_fork
4003 + __multijob_post_fork
4004 +fi
4005 +
4006 +# Since strip creates a new inode, we need to know the initial set of
4007 +# inodes in advance, so that we can avoid interference due to trying
4008 +# to strip the same (hardlinked) file multiple times in parallel.
4009 +# See bug #421099.
4010 +if [[ ${USERLAND} == BSD ]] ; then
4011 + get_inode_number() { stat -f '%i' "$1"; }
4012 +else
4013 + get_inode_number() { stat -c '%i' "$1"; }
4014 fi
4015 +cd "${tmpdir}/inodes" || die "cd failed unexpectedly"
4016 +while read -r x ; do
4017 + inode_link=$(get_inode_number "${x}") || die "stat failed unexpectedly"
4018 + echo "${x}" >> "${inode_link}" || die "echo failed unexpectedly"
4019 +done < <(
4020 + # Use sort -u to eliminate duplicates for bug #445336.
4021 + (
4022 + scanelf -yqRBF '#k%F' -k '.symtab' "$@"
4023 + find "$@" -type f ! -type l -name '*.a'
4024 + ) | LC_ALL=C sort -u
4025 +)
4026
4027 # Now we look for unstripped binaries.
4028 -for x in \
4029 - $(scanelf -yqRBF '#k%F' -k '.symtab' "$@") \
4030 - $(find "$@" -type f -name '*.a')
4031 +for inode_link in $(shopt -s nullglob; echo *) ; do
4032 +while read -r x
4033 do
4034 +
4035 if ! ${banner} ; then
4036 - vecho "strip: ${STRIP} ${PORTAGE_STRIP_FLAGS}"
4037 + __vecho "strip: ${STRIP} ${PORTAGE_STRIP_FLAGS}"
4038 banner=true
4039 fi
4040
4041 (
4042 - multijob_child_init
4043 + __multijob_child_init
4044 f=$(file "${x}") || exit 0
4045 [[ -z ${f} ]] && exit 0
4046
4047 @@ -275,27 +337,34 @@ do
4048
4049 buildid=
4050 if [[ ${f} == *"current ar archive"* ]] ; then
4051 - vecho " ${x:${#ED}}"
4052 + __vecho " ${x:${#ED}}"
4053 if ${strip_this} ; then
4054 - # hmm, can we split debug/sources for .a ?
4055 - ${STRIP} -g "${x}"
4056 + # If we have split debug enabled, then do not strip this.
4057 + # There is no concept of splitdebug for objects not yet
4058 + # linked in (only for finally linked ELFs), so we have to
4059 + # retain the debug info in the archive itself.
4060 + if ! ${FEATURES_splitdebug} || ${RESTRICT_splitdebug} ; then
4061 + ${STRIP} -g "${x}"
4062 + fi
4063 fi
4064 elif [[ ${f} == *"SB executable"* || ${f} == *"SB shared object"* ]] ; then
4065 - process_elf "${x}" ${PORTAGE_STRIP_FLAGS}
4066 + process_elf "${x}" "${inode_link}" ${PORTAGE_STRIP_FLAGS}
4067 elif [[ ${f} == *"SB relocatable"* ]] ; then
4068 - process_elf "${x}" ${SAFE_STRIP_FLAGS}
4069 + process_elf "${x}" "${inode_link}" ${SAFE_STRIP_FLAGS}
4070 fi
4071
4072 if ${was_not_writable} ; then
4073 chmod u-w "${x}"
4074 fi
4075 ) &
4076 - multijob_post_fork
4077 + __multijob_post_fork
4078 +
4079 +done < "${inode_link}"
4080 done
4081
4082 # With a bit more work, we could run the rsync processes below in
4083 # parallel, but not sure that'd be an overall improvement.
4084 -multijob_finish
4085 +__multijob_finish
4086
4087 cd "${tmpdir}"/sources/ && cat * > "${tmpdir}/debug.sources" 2>/dev/null
4088 if [[ -s ${tmpdir}/debug.sources ]] && \
4089 @@ -303,11 +372,11 @@ if [[ -s ${tmpdir}/debug.sources ]] && \
4090 ! ${RESTRICT_installsources} && \
4091 ${debugedit_found}
4092 then
4093 - vecho "installsources: rsyncing source files"
4094 + __vecho "installsources: rsyncing source files"
4095 [[ -d ${D}${prepstrip_sources_dir} ]] || mkdir -p "${D}${prepstrip_sources_dir}"
4096 grep -zv '/<[^/>]*>$' "${tmpdir}"/debug.sources | \
4097 (cd "${WORKDIR}"; LANG=C sort -z -u | \
4098 - rsync -tL0 --files-from=- "${WORKDIR}/" "${D}${prepstrip_sources_dir}/" )
4099 + rsync -tL0 --chmod=ugo-st,a+r,go-w,Da+x,Fa-x --files-from=- "${WORKDIR}/" "${D}${prepstrip_sources_dir}/" )
4100
4101 # Preserve directory structure.
4102 # Needed after running save_elf_sources.
4103
4104 diff --git a/bin/ebuild-helpers/unprivileged/chgrp b/bin/ebuild-helpers/unprivileged/chgrp
4105 new file mode 120000
4106 index 0000000..6fb0fcd
4107 --- /dev/null
4108 +++ b/bin/ebuild-helpers/unprivileged/chgrp
4109 @@ -0,0 +1 @@
4110 +chown
4111 \ No newline at end of file
4112
4113 diff --git a/bin/ebuild-helpers/unprivileged/chown b/bin/ebuild-helpers/unprivileged/chown
4114 new file mode 100755
4115 index 0000000..08fa650
4116 --- /dev/null
4117 +++ b/bin/ebuild-helpers/unprivileged/chown
4118 @@ -0,0 +1,41 @@
4119 +#!/bin/bash
4120 +# Copyright 2012-2013 Gentoo Foundation
4121 +# Distributed under the terms of the GNU General Public License v2
4122 +
4123 +scriptpath=${BASH_SOURCE[0]}
4124 +scriptname=${scriptpath##*/}
4125 +
4126 +IFS=':'
4127 +
4128 +for path in ${PATH}; do
4129 + [[ -x ${path}/${scriptname} ]] || continue
4130 + [[ ${path}/${scriptname} -ef ${scriptpath} ]] && continue
4131 + IFS=$' \t\n'
4132 + output=$("${path}/${scriptname}" "$@" 2>&1)
4133 + if [[ $? -ne 0 ]] ; then
4134 +
4135 + # Avoid an extreme performance problem when the
4136 + # output is very long (bug #470992).
4137 + if [[ $(wc -l <<< "${output}") -gt 100 ]]; then
4138 + output=$(head -n100 <<< "${output}")
4139 + output="${output}\n ... (further messages truncated)"
4140 + fi
4141 +
4142 + source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
4143 +
4144 + if ! ___eapi_has_prefix_variables; then
4145 + EPREFIX=
4146 + fi
4147 + msg="${scriptname} failure ignored with unprivileged user:\n ${scriptname} $*\n ${output}"
4148 + # Reverse expansion of ${D} and ${EPREFIX}, for readability.
4149 + msg=${msg//${D}/'${D}'}
4150 + if [[ -n ${EPREFIX} ]] ; then
4151 + msg=${msg//${EPREFIX}/'${EPREFIX}'}
4152 + msg=${msg//${EPREFIX#/}/'${EPREFIX}'}
4153 + fi
4154 + ewarn "${msg}"
4155 + fi
4156 + exit 0
4157 +done
4158 +
4159 +exit 1
4160
4161 diff --git a/bin/ebuild-helpers/xattr/install b/bin/ebuild-helpers/xattr/install
4162 new file mode 100755
4163 index 0000000..f51f621
4164 --- /dev/null
4165 +++ b/bin/ebuild-helpers/xattr/install
4166 @@ -0,0 +1,12 @@
4167 +#!/bin/bash
4168 +# Copyright 2013 Gentoo Foundation
4169 +# Distributed under the terms of the GNU General Public License v2
4170 +
4171 +PORTAGE_BIN_PATH=${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}
4172 +PORTAGE_PYM_PATH=${PORTAGE_PYM_PATH:-/usr/lib/portage/pym}
4173 +# Use safe cwd, avoiding unsafe import for bug #469338.
4174 +export __PORTAGE_HELPER_CWD=${PWD}
4175 +cd "${PORTAGE_PYM_PATH}"
4176 +export __PORTAGE_HELPER_PATH=${BASH_SOURCE[0]}
4177 +PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
4178 + exec "${PORTAGE_PYTHON:-/usr/bin/python}" "${PORTAGE_BIN_PATH}/install.py" "$@"
4179
4180 diff --git a/bin/ebuild-ipc b/bin/ebuild-ipc
4181 index 43e4a02..820005f 100755
4182 --- a/bin/ebuild-ipc
4183 +++ b/bin/ebuild-ipc
4184 @@ -1,8 +1,10 @@
4185 #!/bin/bash
4186 -# Copyright 2010 Gentoo Foundation
4187 +# Copyright 2010-2013 Gentoo Foundation
4188 # Distributed under the terms of the GNU General Public License v2
4189
4190 PORTAGE_BIN_PATH=${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}
4191 PORTAGE_PYM_PATH=${PORTAGE_PYM_PATH:-/usr/lib/portage/pym}
4192 -PYTHONPATH=$PORTAGE_PYM_PATH${PYTHONPATH:+:}$PYTHONPATH \
4193 +# Use safe cwd, avoiding unsafe import for bug #469338.
4194 +cd "${PORTAGE_PYM_PATH}"
4195 +PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
4196 exec "${PORTAGE_PYTHON:-/usr/bin/python}" "$PORTAGE_BIN_PATH/ebuild-ipc.py" "$@"
4197
4198 diff --git a/bin/ebuild-ipc.py b/bin/ebuild-ipc.py
4199 index 3caf2d1..00337ee 100755
4200 --- a/bin/ebuild-ipc.py
4201 +++ b/bin/ebuild-ipc.py
4202 @@ -1,20 +1,17 @@
4203 -#!/usr/bin/python
4204 -# Copyright 2010-2012 Gentoo Foundation
4205 +#!/usr/bin/python -b
4206 +# Copyright 2010-2014 Gentoo Foundation
4207 # Distributed under the terms of the GNU General Public License v2
4208 #
4209 # This is a helper which ebuild processes can use
4210 # to communicate with portage's main python process.
4211
4212 -import errno
4213 import logging
4214 import os
4215 import pickle
4216 import platform
4217 -import select
4218 import signal
4219 import sys
4220 import time
4221 -import traceback
4222
4223 def debug_signal(signum, frame):
4224 import pdb
4225 @@ -38,14 +35,28 @@ if os.environ.get("SANDBOX_ON") == "1":
4226 ":".join(filter(None, sandbox_write))
4227
4228 import portage
4229 +portage._internal_caller = True
4230 portage._disable_legacy_globals()
4231
4232 +from portage.util._async.ForkProcess import ForkProcess
4233 +from portage.util._eventloop.global_event_loop import global_event_loop
4234 +from _emerge.PipeReader import PipeReader
4235 +
4236 +class FifoWriter(ForkProcess):
4237 +
4238 + __slots__ = ('buf', 'fifo',)
4239 +
4240 + def _run(self):
4241 + # Atomically write the whole buffer into the fifo.
4242 + with open(self.fifo, 'wb', 0) as f:
4243 + f.write(self.buf)
4244 + return os.EX_OK
4245 +
4246 class EbuildIpc(object):
4247
4248 # Timeout for each individual communication attempt (we retry
4249 # as long as the daemon process appears to be alive).
4250 - _COMMUNICATE_RETRY_TIMEOUT_SECONDS = 15
4251 - _BUFSIZE = 4096
4252 + _COMMUNICATE_RETRY_TIMEOUT_MS = 15000
4253
4254 def __init__(self):
4255 self.fifo_dir = os.environ['PORTAGE_BUILDDIR']
4256 @@ -89,7 +100,7 @@ class EbuildIpc(object):
4257 'ebuild-ipc: daemon process not detected\n'),
4258 level=logging.ERROR, noiselevel=-1)
4259
4260 - def _wait(self, pid, pr, msg):
4261 + def _run_writer(self, fifo_writer, msg):
4262 """
4263 Wait on pid and return an appropriate exit code. This
4264 may return unsuccessfully due to timeout if the daemon
4265 @@ -98,88 +109,48 @@ class EbuildIpc(object):
4266
4267 start_time = time.time()
4268
4269 - while True:
4270 - try:
4271 - events = select.select([pr], [], [],
4272 - self._COMMUNICATE_RETRY_TIMEOUT_SECONDS)
4273 - except select.error as e:
4274 - portage.util.writemsg_level(
4275 - "ebuild-ipc: %s: %s\n" % \
4276 - (portage.localization._('during select'), e),
4277 - level=logging.ERROR, noiselevel=-1)
4278 - continue
4279 + fifo_writer.start()
4280 + eof = fifo_writer.poll() is not None
4281
4282 - if events[0]:
4283 - break
4284 + while not eof:
4285 + fifo_writer._wait_loop(timeout=self._COMMUNICATE_RETRY_TIMEOUT_MS)
4286
4287 - if self._daemon_is_alive():
4288 + eof = fifo_writer.poll() is not None
4289 + if eof:
4290 + break
4291 + elif self._daemon_is_alive():
4292 self._timeout_retry_msg(start_time, msg)
4293 else:
4294 + fifo_writer.cancel()
4295 self._no_daemon_msg()
4296 - try:
4297 - os.kill(pid, signal.SIGKILL)
4298 - os.waitpid(pid, 0)
4299 - except OSError as e:
4300 - portage.util.writemsg_level(
4301 - "ebuild-ipc: %s\n" % (e,),
4302 - level=logging.ERROR, noiselevel=-1)
4303 + fifo_writer.wait()
4304 return 2
4305
4306 - try:
4307 - wait_retval = os.waitpid(pid, 0)
4308 - except OSError as e:
4309 - portage.util.writemsg_level(
4310 - "ebuild-ipc: %s: %s\n" % (msg, e),
4311 - level=logging.ERROR, noiselevel=-1)
4312 - return 2
4313 + return fifo_writer.wait()
4314
4315 - if not os.WIFEXITED(wait_retval[1]):
4316 - portage.util.writemsg_level(
4317 - "ebuild-ipc: %s: %s\n" % (msg,
4318 - portage.localization._('subprocess failure: %s') % \
4319 - wait_retval[1]),
4320 - level=logging.ERROR, noiselevel=-1)
4321 - return 2
4322 + def _receive_reply(self, input_fd):
4323
4324 - return os.WEXITSTATUS(wait_retval[1])
4325 + start_time = time.time()
4326
4327 - def _receive_reply(self, input_fd):
4328 + pipe_reader = PipeReader(input_files={"input_fd":input_fd},
4329 + scheduler=global_event_loop())
4330 + pipe_reader.start()
4331
4332 - # Timeouts are handled by the parent process, so just
4333 - # block until input is available. For maximum portability,
4334 - # use a single atomic read.
4335 - buf = None
4336 - while True:
4337 - try:
4338 - events = select.select([input_fd], [], [])
4339 - except select.error as e:
4340 - portage.util.writemsg_level(
4341 - "ebuild-ipc: %s: %s\n" % \
4342 - (portage.localization._('during select for read'), e),
4343 - level=logging.ERROR, noiselevel=-1)
4344 - continue
4345 -
4346 - if events[0]:
4347 - # For maximum portability, use os.read() here since
4348 - # array.fromfile() and file.read() are both known to
4349 - # erroneously return an empty string from this
4350 - # non-blocking fifo stream on FreeBSD (bug #337465).
4351 - try:
4352 - buf = os.read(input_fd, self._BUFSIZE)
4353 - except OSError as e:
4354 - if e.errno != errno.EAGAIN:
4355 - portage.util.writemsg_level(
4356 - "ebuild-ipc: %s: %s\n" % \
4357 - (portage.localization._('read error'), e),
4358 - level=logging.ERROR, noiselevel=-1)
4359 - break
4360 - # Assume that another event will be generated
4361 - # if there's any relevant data.
4362 - continue
4363 -
4364 - # Only one (atomic) read should be necessary.
4365 - if buf:
4366 - break
4367 + eof = pipe_reader.poll() is not None
4368 +
4369 + while not eof:
4370 + pipe_reader._wait_loop(timeout=self._COMMUNICATE_RETRY_TIMEOUT_MS)
4371 + eof = pipe_reader.poll() is not None
4372 + if not eof:
4373 + if self._daemon_is_alive():
4374 + self._timeout_retry_msg(start_time,
4375 + portage.localization._('during read'))
4376 + else:
4377 + pipe_reader.cancel()
4378 + self._no_daemon_msg()
4379 + return 2
4380 +
4381 + buf = pipe_reader.getvalue()
4382
4383 retval = 2
4384
4385 @@ -232,32 +203,9 @@ class EbuildIpc(object):
4386 # un-interrupted, while the parent handles all timeout
4387 # considerations. This helps to avoid possible race conditions
4388 # from interference between timeouts and blocking IO operations.
4389 - pr, pw = os.pipe()
4390 - pid = os.fork()
4391 -
4392 - if pid == 0:
4393 - retval = 2
4394 - try:
4395 - os.close(pr)
4396 -
4397 - # File streams are in unbuffered mode since we do atomic
4398 - # read and write of whole pickles.
4399 - output_file = open(self.ipc_in_fifo, 'wb', 0)
4400 - output_file.write(pickle.dumps(args))
4401 - output_file.close()
4402 - retval = os.EX_OK
4403 - except SystemExit:
4404 - raise
4405 - except:
4406 - traceback.print_exc()
4407 - finally:
4408 - os._exit(retval)
4409 -
4410 - os.close(pw)
4411 -
4412 msg = portage.localization._('during write')
4413 - retval = self._wait(pid, pr, msg)
4414 - os.close(pr)
4415 + retval = self._run_writer(FifoWriter(buf=pickle.dumps(args),
4416 + fifo=self.ipc_in_fifo, scheduler=global_event_loop()), msg)
4417
4418 if retval != os.EX_OK:
4419 portage.util.writemsg_level(
4420 @@ -270,26 +218,7 @@ class EbuildIpc(object):
4421 self._no_daemon_msg()
4422 return 2
4423
4424 - pr, pw = os.pipe()
4425 - pid = os.fork()
4426 -
4427 - if pid == 0:
4428 - retval = 2
4429 - try:
4430 - os.close(pr)
4431 - retval = self._receive_reply(input_fd)
4432 - except SystemExit:
4433 - raise
4434 - except:
4435 - traceback.print_exc()
4436 - finally:
4437 - os._exit(retval)
4438 -
4439 - os.close(pw)
4440 - retval = self._wait(pid, pr, portage.localization._('during read'))
4441 - os.close(pr)
4442 - os.close(input_fd)
4443 - return retval
4444 + return self._receive_reply(input_fd)
4445
4446 def ebuild_ipc_main(args):
4447 ebuild_ipc = EbuildIpc()
4448
4449 diff --git a/bin/ebuild.sh b/bin/ebuild.sh
4450 index 9829f68..be044e0 100755
4451 --- a/bin/ebuild.sh
4452 +++ b/bin/ebuild.sh
4453 @@ -1,5 +1,5 @@
4454 #!/bin/bash
4455 -# Copyright 1999-2012 Gentoo Foundation
4456 +# Copyright 1999-2013 Gentoo Foundation
4457 # Distributed under the terms of the GNU General Public License v2
4458
4459 PORTAGE_BIN_PATH="${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"
4460 @@ -21,21 +21,23 @@ else
4461 # in global scope, even though they are completely useless during
4462 # the "depend" phase.
4463 for x in diropts docompress exeopts get_KV insopts \
4464 - keepdir KV_major KV_micro KV_minor KV_to_int \
4465 + KV_major KV_micro KV_minor KV_to_int \
4466 libopts register_die_hook register_success_hook \
4467 - remove_path_entry set_unless_changed strip_duplicate_slashes \
4468 - unset_unless_changed use_with use_enable ; do
4469 + __strip_duplicate_slashes \
4470 + use_with use_enable ; do
4471 eval "${x}() {
4472 - if has \"\${EAPI:-0}\" 4-python; then
4473 + if ___eapi_disallows_helpers_in_global_scope; then
4474 die \"\${FUNCNAME}() calls are not allowed in global scope\"
4475 fi
4476 }"
4477 done
4478 - # These dummy functions return false in older EAPIs, in order to ensure that
4479 + # These dummy functions return false in non-strict EAPIs, in order to ensure that
4480 # `use multislot` is false for the "depend" phase.
4481 - for x in use useq usev ; do
4482 + funcs="use useq usev"
4483 + ___eapi_has_usex && funcs+=" usex"
4484 + for x in ${funcs} ; do
4485 eval "${x}() {
4486 - if has \"\${EAPI:-0}\" 4-python; then
4487 + if ___eapi_disallows_helpers_in_global_scope; then
4488 die \"\${FUNCNAME}() calls are not allowed in global scope\"
4489 else
4490 return 1
4491 @@ -44,10 +46,16 @@ else
4492 done
4493 # These functions die because calls to them during the "depend" phase
4494 # are considered to be severe QA violations.
4495 - for x in best_version has_version portageq ; do
4496 + funcs="best_version has_version portageq"
4497 + ___eapi_has_master_repositories && funcs+=" master_repositories"
4498 + ___eapi_has_repository_path && funcs+=" repository_path"
4499 + ___eapi_has_available_eclasses && funcs+=" available_eclasses"
4500 + ___eapi_has_eclass_path && funcs+=" eclass_path"
4501 + ___eapi_has_license_path && funcs+=" license_path"
4502 + for x in ${funcs} ; do
4503 eval "${x}() { die \"\${FUNCNAME}() calls are not allowed in global scope\"; }"
4504 done
4505 - unset x
4506 + unset funcs x
4507 fi
4508
4509 # Don't use sandbox's BASH_ENV for new shells because it does
4510 @@ -66,7 +74,7 @@ export PORTAGE_BZIP2_COMMAND=${PORTAGE_BZIP2_COMMAND:-bzip2}
4511 # with shell opts (shopts). Ebuilds/eclasses changing shopts should reset them
4512 # when they are done.
4513
4514 -qa_source() {
4515 +__qa_source() {
4516 local shopts=$(shopt) OLDIFS="$IFS"
4517 local retval
4518 source "$@"
4519 @@ -79,7 +87,7 @@ qa_source() {
4520 return $retval
4521 }
4522
4523 -qa_call() {
4524 +__qa_call() {
4525 local shopts=$(shopt) OLDIFS="$IFS"
4526 local retval
4527 "$@"
4528 @@ -102,20 +110,19 @@ unset GZIP BZIP BZIP2 CDPATH GREP_OPTIONS GREP_COLOR GLOBIGNORE
4529 [[ $PORTAGE_QUIET != "" ]] && export PORTAGE_QUIET
4530
4531 # sandbox support functions; defined prior to profile.bashrc srcing, since the profile might need to add a default exception (/usr/lib64/conftest fex)
4532 -_sb_append_var() {
4533 +__sb_append_var() {
4534 local _v=$1 ; shift
4535 local var="SANDBOX_${_v}"
4536 - [[ -z $1 || -n $2 ]] && die "Usage: add$(echo ${_v} | \
4537 - LC_ALL=C tr [:upper:] [:lower:]) <colon-delimited list of paths>"
4538 + [[ -z $1 || -n $2 ]] && die "Usage: add$(LC_ALL=C tr "[:upper:]" "[:lower:]" <<< "${_v}") <colon-delimited list of paths>"
4539 export ${var}="${!var:+${!var}:}$1"
4540 }
4541 # bash-4 version:
4542 # local var="SANDBOX_${1^^}"
4543 -# addread() { _sb_append_var ${0#add} "$@" ; }
4544 -addread() { _sb_append_var READ "$@" ; }
4545 -addwrite() { _sb_append_var WRITE "$@" ; }
4546 -adddeny() { _sb_append_var DENY "$@" ; }
4547 -addpredict() { _sb_append_var PREDICT "$@" ; }
4548 +# addread() { __sb_append_var ${0#add} "$@" ; }
4549 +addread() { __sb_append_var READ "$@" ; }
4550 +addwrite() { __sb_append_var WRITE "$@" ; }
4551 +adddeny() { __sb_append_var DENY "$@" ; }
4552 +addpredict() { __sb_append_var PREDICT "$@" ; }
4553
4554 addwrite "${PORTAGE_TMPDIR}"
4555 addread "/:${PORTAGE_TMPDIR}"
4556 @@ -136,19 +143,11 @@ fi
4557 # the sandbox is disabled by default except when overridden in the relevant stages
4558 export SANDBOX_ON=0
4559
4560 -esyslog() {
4561 - # Custom version of esyslog() to take care of the "Red Star" bug.
4562 - # MUST follow functions.sh to override the "" parameter problem.
4563 - return 0
4564 -}
4565 -
4566 # Ensure that $PWD is sane whenever possible, to protect against
4567 # exploitation of insecure search path for python -c in ebuilds.
4568 -# See bug #239560.
4569 -if ! has "$EBUILD_PHASE" clean cleanrm depend help ; then
4570 - cd "$PORTAGE_BUILDDIR" || \
4571 - die "PORTAGE_BUILDDIR does not exist: '$PORTAGE_BUILDDIR'"
4572 -fi
4573 +# See bug #239560 and bug #469338.
4574 +cd "${PORTAGE_PYM_PATH}" || \
4575 + die "PORTAGE_PYM_PATH does not exist: '${PORTAGE_PYM_PATH}'"
4576
4577 #if no perms are specified, dirs/files will have decent defaults
4578 #(not secretive, but not stupid)
4579 @@ -178,8 +177,8 @@ debug-print() {
4580 # default target
4581 printf '%s\n' "${@}" >> "${T}/eclass-debug.log"
4582 # let the portage user own/write to this file
4583 - chgrp portage "${T}/eclass-debug.log" &>/dev/null
4584 - chmod g+w "${T}/eclass-debug.log" &>/dev/null
4585 + chgrp "${PORTAGE_GRPNAME:-portage}" "${T}/eclass-debug.log"
4586 + chmod g+w "${T}/eclass-debug.log"
4587 fi
4588 }
4589
4590 @@ -208,8 +207,9 @@ inherit() {
4591 | fmt -w 75 | while read -r ; do eqawarn "$REPLY" ; done
4592 fi
4593
4594 + local repo_location
4595 local location
4596 - local olocation
4597 + local potential_location
4598 local x
4599
4600 # These variables must be restored before returning.
4601 @@ -221,9 +221,10 @@ inherit() {
4602 local B_DEPEND
4603 local B_RDEPEND
4604 local B_PDEPEND
4605 + local B_HDEPEND
4606 while [ "$1" ]; do
4607 - location="${ECLASSDIR}/${1}.eclass"
4608 - olocation=""
4609 + location=""
4610 + potential_location=""
4611
4612 export ECLASS="$1"
4613 __export_funcs_var=__export_functions_$ECLASS_DEPTH
4614 @@ -244,43 +245,36 @@ inherit() {
4615 fi
4616 fi
4617
4618 - # any future resolution code goes here
4619 - if [ -n "$PORTDIR_OVERLAY" ]; then
4620 - local overlay
4621 - for overlay in ${PORTDIR_OVERLAY}; do
4622 - olocation="${overlay}/eclass/${1}.eclass"
4623 - if [ -e "$olocation" ]; then
4624 - location="${olocation}"
4625 - debug-print " eclass exists: ${location}"
4626 - fi
4627 - done
4628 - fi
4629 + for repo_location in "${PORTAGE_ECLASS_LOCATIONS[@]}"; do
4630 + potential_location="${repo_location}/eclass/${1}.eclass"
4631 + if [[ -f ${potential_location} ]]; then
4632 + location="${potential_location}"
4633 + debug-print " eclass exists: ${location}"
4634 + break
4635 + fi
4636 + done
4637 debug-print "inherit: $1 -> $location"
4638 - [ ! -e "$location" ] && die "${1}.eclass could not be found by inherit()"
4639 -
4640 - if [ "${location}" == "${olocation}" ] && \
4641 - ! has "${location}" ${EBUILD_OVERLAY_ECLASSES} ; then
4642 - EBUILD_OVERLAY_ECLASSES="${EBUILD_OVERLAY_ECLASSES} ${location}"
4643 - fi
4644 + [[ -z ${location} ]] && die "${1}.eclass could not be found by inherit()"
4645
4646 - #We need to back up the value of DEPEND and RDEPEND to B_DEPEND and B_RDEPEND
4647 + #We need to back up the values of *DEPEND to B_*DEPEND
4648 #(if set).. and then restore them after the inherit call.
4649
4650 #turn off glob expansion
4651 set -f
4652
4653 # Retain the old data and restore it later.
4654 - unset B_IUSE B_REQUIRED_USE B_DEPEND B_RDEPEND B_PDEPEND
4655 + unset B_IUSE B_REQUIRED_USE B_DEPEND B_RDEPEND B_PDEPEND B_HDEPEND
4656 [ "${IUSE+set}" = set ] && B_IUSE="${IUSE}"
4657 [ "${REQUIRED_USE+set}" = set ] && B_REQUIRED_USE="${REQUIRED_USE}"
4658 [ "${DEPEND+set}" = set ] && B_DEPEND="${DEPEND}"
4659 [ "${RDEPEND+set}" = set ] && B_RDEPEND="${RDEPEND}"
4660 [ "${PDEPEND+set}" = set ] && B_PDEPEND="${PDEPEND}"
4661 - unset IUSE REQUIRED_USE DEPEND RDEPEND PDEPEND
4662 + [ "${HDEPEND+set}" = set ] && B_HDEPEND="${HDEPEND}"
4663 + unset IUSE REQUIRED_USE DEPEND RDEPEND PDEPEND HDEPEND
4664 #turn on glob expansion
4665 set +f
4666
4667 - qa_source "$location" || die "died sourcing $location in inherit()"
4668 + __qa_source "$location" || die "died sourcing $location in inherit()"
4669
4670 #turn off glob expansion
4671 set -f
4672 @@ -292,6 +286,7 @@ inherit() {
4673 [ "${DEPEND+set}" = set ] && E_DEPEND+="${E_DEPEND:+ }${DEPEND}"
4674 [ "${RDEPEND+set}" = set ] && E_RDEPEND+="${E_RDEPEND:+ }${RDEPEND}"
4675 [ "${PDEPEND+set}" = set ] && E_PDEPEND+="${E_PDEPEND:+ }${PDEPEND}"
4676 + [ "${HDEPEND+set}" = set ] && E_HDEPEND+="${E_HDEPEND:+ }${HDEPEND}"
4677
4678 [ "${B_IUSE+set}" = set ] && IUSE="${B_IUSE}"
4679 [ "${B_IUSE+set}" = set ] || unset IUSE
4680 @@ -308,6 +303,9 @@ inherit() {
4681 [ "${B_PDEPEND+set}" = set ] && PDEPEND="${B_PDEPEND}"
4682 [ "${B_PDEPEND+set}" = set ] || unset PDEPEND
4683
4684 + [ "${B_HDEPEND+set}" = set ] && HDEPEND="${B_HDEPEND}"
4685 + [ "${B_HDEPEND+set}" = set ] || unset HDEPEND
4686 +
4687 #turn on glob expansion
4688 set +f
4689
4690 @@ -348,7 +346,7 @@ EXPORT_FUNCTIONS() {
4691
4692 PORTAGE_BASHRCS_SOURCED=0
4693
4694 -# @FUNCTION: source_all_bashrcs
4695 +# @FUNCTION: __source_all_bashrcs
4696 # @DESCRIPTION:
4697 # Source a relevant bashrc files and perform other miscellaneous
4698 # environment initialization when appropriate.
4699 @@ -359,7 +357,7 @@ PORTAGE_BASHRCS_SOURCED=0
4700 # * A "default" function which is an alias for the default phase
4701 # function for the current phase.
4702 #
4703 -source_all_bashrcs() {
4704 +__source_all_bashrcs() {
4705 [[ $PORTAGE_BASHRCS_SOURCED = 1 ]] && return 0
4706 PORTAGE_BASHRCS_SOURCED=1
4707 local x
4708 @@ -373,7 +371,7 @@ source_all_bashrcs() {
4709 local path_array=($PROFILE_PATHS)
4710 restore_IFS
4711 for x in "${path_array[@]}" ; do
4712 - [ -f "$x/profile.bashrc" ] && qa_source "$x/profile.bashrc"
4713 + [ -f "$x/profile.bashrc" ] && __qa_source "$x/profile.bashrc"
4714 done
4715 fi
4716
4717 @@ -390,7 +388,7 @@ source_all_bashrcs() {
4718 if [[ $EBUILD_PHASE != depend ]] ; then
4719 # The user's bashrc is the ONLY non-portage bit of code that can
4720 # change shopts without a QA violation.
4721 - for x in "${PM_EBUILD_HOOK_DIR}"/${CATEGORY}/{${PN},${PN}:${SLOT},${P},${PF}}; do
4722 + for x in "${PM_EBUILD_HOOK_DIR}"/${CATEGORY}/{${PN},${PN}:${SLOT%/*},${P},${PF}}; do
4723 if [ -r "${x}" ]; then
4724 # If $- contains x, then tracing has already been enabled
4725 # elsewhere for some reason. We preserve it's state so as
4726 @@ -470,7 +468,7 @@ if [[ -n ${QA_INTERCEPTORS} ]] ; then
4727 fi
4728
4729 # Subshell/helper die support (must export for the die helper).
4730 -export EBUILD_MASTER_PID=$BASHPID
4731 +export EBUILD_MASTER_PID=${BASHPID:-$(__bashpid)}
4732 trap 'exit 1' SIGTERM
4733
4734 if ! has "$EBUILD_PHASE" clean cleanrm depend && \
4735 @@ -479,7 +477,7 @@ if ! has "$EBUILD_PHASE" clean cleanrm depend && \
4736 # may have come from another version of ebuild.sh or something.
4737 # In any case, preprocess it to prevent any potential interference.
4738 # NOTE: export ${FOO}=... requires quoting, unlike normal exports
4739 - preprocess_ebuild_env || \
4740 + __preprocess_ebuild_env || \
4741 die "error processing environment"
4742 # Colon separated SANDBOX_* variables need to be cumulative.
4743 for x in SANDBOX_DENY SANDBOX_READ SANDBOX_PREDICT SANDBOX_WRITE ; do
4744 @@ -512,17 +510,22 @@ if ! has "$EBUILD_PHASE" clean cleanrm depend && \
4745 [[ -n $EAPI ]] || EAPI=0
4746 fi
4747
4748 -if has "${EAPI:-0}" 4-python; then
4749 +if ___eapi_enables_globstar; then
4750 shopt -s globstar
4751 fi
4752
4753 +# Convert quoted paths to array.
4754 +eval "PORTAGE_ECLASS_LOCATIONS=(${PORTAGE_ECLASS_LOCATIONS})"
4755 +
4756 +# Source the ebuild every time for FEATURES=noauto, so that ebuild
4757 +# modifications take effect immediately.
4758 if ! has "$EBUILD_PHASE" clean cleanrm ; then
4759 if [[ $EBUILD_PHASE = depend || ! -f $T/environment || \
4760 - -f $PORTAGE_BUILDDIR/.ebuild_changed ]] || \
4761 - has noauto $FEATURES ; then
4762 + -f $PORTAGE_BUILDDIR/.ebuild_changed || \
4763 + " ${FEATURES} " == *" noauto "* ]] ; then
4764 # The bashrcs get an opportunity here to set aliases that will be expanded
4765 # during sourcing of ebuilds and eclasses.
4766 - source_all_bashrcs
4767 + __source_all_bashrcs
4768
4769 # When EBUILD_PHASE != depend, INHERITED comes pre-initialized
4770 # from cache. In order to make INHERITED content independent of
4771 @@ -534,8 +537,9 @@ if ! has "$EBUILD_PHASE" clean cleanrm ; then
4772 # In order to ensure correct interaction between ebuilds and
4773 # eclasses, they need to be unset before this process of
4774 # interaction begins.
4775 - unset EAPI DEPEND RDEPEND PDEPEND INHERITED IUSE REQUIRED_USE \
4776 - ECLASS E_IUSE E_REQUIRED_USE E_DEPEND E_RDEPEND E_PDEPEND
4777 + unset EAPI DEPEND RDEPEND PDEPEND HDEPEND INHERITED IUSE REQUIRED_USE \
4778 + ECLASS E_IUSE E_REQUIRED_USE E_DEPEND E_RDEPEND E_PDEPEND \
4779 + E_HDEPEND
4780
4781 if [[ $PORTAGE_DEBUG != 1 || ${-/x/} != $- ]] ; then
4782 source "$EBUILD" || die "error sourcing ebuild"
4783 @@ -556,7 +560,7 @@ if ! has "$EBUILD_PHASE" clean cleanrm ; then
4784 # export EAPI for helpers (especially since we unset it above)
4785 export EAPI
4786
4787 - if has "$EAPI" 0 1 2 3 3_pre2 ; then
4788 + if ___eapi_has_RDEPEND_DEPEND_fallback; then
4789 export RDEPEND=${RDEPEND-${DEPEND}}
4790 debug-print "RDEPEND: not set... Setting to: ${DEPEND}"
4791 fi
4792 @@ -566,19 +570,20 @@ if ! has "$EBUILD_PHASE" clean cleanrm ; then
4793 DEPEND+="${DEPEND:+ }${E_DEPEND}"
4794 RDEPEND+="${RDEPEND:+ }${E_RDEPEND}"
4795 PDEPEND+="${PDEPEND:+ }${E_PDEPEND}"
4796 + HDEPEND+="${HDEPEND:+ }${E_HDEPEND}"
4797 REQUIRED_USE+="${REQUIRED_USE:+ }${E_REQUIRED_USE}"
4798
4799 - unset ECLASS E_IUSE E_REQUIRED_USE E_DEPEND E_RDEPEND E_PDEPEND \
4800 + unset ECLASS E_IUSE E_REQUIRED_USE E_DEPEND E_RDEPEND E_PDEPEND E_HDEPEND \
4801 __INHERITED_QA_CACHE
4802
4803 # alphabetically ordered by $EBUILD_PHASE value
4804 - case "$EAPI" in
4805 + case ${EAPI} in
4806 0|1)
4807 _valid_phases="src_compile pkg_config pkg_info src_install
4808 pkg_nofetch pkg_postinst pkg_postrm pkg_preinst pkg_prerm
4809 pkg_setup src_test src_unpack"
4810 ;;
4811 - 2|3|3_pre2)
4812 + 2|3)
4813 _valid_phases="src_compile pkg_config src_configure pkg_info
4814 src_install pkg_nofetch pkg_postinst pkg_postrm pkg_preinst
4815 src_prepare pkg_prerm pkg_setup src_test src_unpack"
4816 @@ -670,9 +675,13 @@ if [[ $EBUILD_PHASE = depend ]] ; then
4817
4818 auxdbkeys="DEPEND RDEPEND SLOT SRC_URI RESTRICT HOMEPAGE LICENSE
4819 DESCRIPTION KEYWORDS INHERITED IUSE REQUIRED_USE PDEPEND PROVIDE EAPI
4820 - PROPERTIES DEFINED_PHASES UNUSED_05 UNUSED_04
4821 + PROPERTIES DEFINED_PHASES HDEPEND UNUSED_04
4822 UNUSED_03 UNUSED_02 UNUSED_01"
4823
4824 + if ! ___eapi_has_HDEPEND; then
4825 + unset HDEPEND
4826 + fi
4827 +
4828 # The extra $(echo) commands remove newlines.
4829 if [ -n "${dbkey}" ] ; then
4830 > "${dbkey}"
4831 @@ -681,31 +690,28 @@ if [[ $EBUILD_PHASE = depend ]] ; then
4832 done
4833 else
4834 for f in ${auxdbkeys} ; do
4835 - echo $(echo ${!f}) 1>&9 || exit $?
4836 + eval "echo \$(echo \${!f}) 1>&${PORTAGE_PIPE_FD}" || exit $?
4837 done
4838 - exec 9>&-
4839 + eval "exec ${PORTAGE_PIPE_FD}>&-"
4840 fi
4841 set +f
4842 else
4843 - # Note: readonly variables interfere with preprocess_ebuild_env(), so
4844 + # Note: readonly variables interfere with __preprocess_ebuild_env(), so
4845 # declare them only after it has already run.
4846 declare -r $PORTAGE_READONLY_METADATA $PORTAGE_READONLY_VARS
4847 - case "$EAPI" in
4848 - 0|1|2)
4849 - [[ " ${FEATURES} " == *" force-prefix "* ]] && \
4850 - declare -r ED EPREFIX EROOT
4851 - ;;
4852 - *)
4853 - declare -r ED EPREFIX EROOT
4854 - ;;
4855 - esac
4856 + if ___eapi_has_prefix_variables; then
4857 + declare -r ED EPREFIX EROOT
4858 + fi
4859
4860 if [[ -n $EBUILD_SH_ARGS ]] ; then
4861 (
4862 # Don't allow subprocesses to inherit the pipe which
4863 # emerge uses to monitor ebuild.sh.
4864 - exec 9>&-
4865 - ebuild_main ${EBUILD_SH_ARGS}
4866 + if [[ -n ${PORTAGE_PIPE_FD} ]] ; then
4867 + eval "exec ${PORTAGE_PIPE_FD}>&-"
4868 + unset PORTAGE_PIPE_FD
4869 + fi
4870 + __ebuild_main ${EBUILD_SH_ARGS}
4871 exit 0
4872 )
4873 exit $?
4874
4875 diff --git a/bin/egencache b/bin/egencache
4876 index a75a341..c14be93 100755
4877 --- a/bin/egencache
4878 +++ b/bin/egencache
4879 @@ -1,15 +1,17 @@
4880 -#!/usr/bin/python
4881 -# Copyright 2009-2012 Gentoo Foundation
4882 +#!/usr/bin/python -b
4883 +# Copyright 2009-2014 Gentoo Foundation
4884 # Distributed under the terms of the GNU General Public License v2
4885
4886 -from __future__ import print_function
4887 +# unicode_literals for compat with TextIOWrapper in Python 2
4888 +from __future__ import print_function, unicode_literals
4889
4890 +import platform
4891 import signal
4892 import sys
4893 # This block ensures that ^C interrupts are handled quietly.
4894 try:
4895
4896 - def exithandler(signum,frame):
4897 + def exithandler(signum, _frame):
4898 signal.signal(signal.SIGINT, signal.SIG_IGN)
4899 signal.signal(signal.SIGTERM, signal.SIG_IGN)
4900 sys.exit(128 + signum)
4901 @@ -20,26 +22,39 @@ try:
4902 except KeyboardInterrupt:
4903 sys.exit(128 + signal.SIGINT)
4904
4905 +def debug_signal(_signum, _frame):
4906 + import pdb
4907 + pdb.set_trace()
4908 +
4909 +if platform.python_implementation() == 'Jython':
4910 + debug_signum = signal.SIGUSR2 # bug #424259
4911 +else:
4912 + debug_signum = signal.SIGUSR1
4913 +
4914 +signal.signal(debug_signum, debug_signal)
4915 +
4916 import io
4917 import logging
4918 -import optparse
4919 import subprocess
4920 import time
4921 import textwrap
4922 import re
4923
4924 -try:
4925 - import portage
4926 -except ImportError:
4927 - from os import path as osp
4928 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
4929 - import portage
4930 -
4931 +from os import path as osp
4932 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
4933 +sys.path.insert(0, pym_path)
4934 +import portage
4935 +portage._internal_caller = True
4936 from portage import os, _encodings, _unicode_encode, _unicode_decode
4937 from _emerge.MetadataRegen import MetadataRegen
4938 from portage.cache.cache_errors import CacheError, StatCollision
4939 +from portage.const import TIMESTAMP_FORMAT
4940 from portage.manifest import guessManifestFileType
4941 +from portage.package.ebuild._parallel_manifest.ManifestScheduler import ManifestScheduler
4942 from portage.util import cmp_sort_key, writemsg_level
4943 +from portage.util._argparse import ArgumentParser
4944 +from portage.util._async.run_main_scheduler import run_main_scheduler
4945 +from portage.util._eventloop.global_event_loop import global_event_loop
4946 from portage import cpv_getkey
4947 from portage.dep import Atom, isjustname
4948 from portage.versions import pkgsplit, vercmp
4949 @@ -59,72 +74,98 @@ else:
4950 from repoman.utilities import FindVCS
4951
4952 if sys.hexversion >= 0x3000000:
4953 + # pylint: disable=W0622
4954 long = int
4955
4956 def parse_args(args):
4957 usage = "egencache [options] <action> ... [atom] ..."
4958 - parser = optparse.OptionParser(usage=usage)
4959 + parser = ArgumentParser(usage=usage)
4960
4961 - actions = optparse.OptionGroup(parser, 'Actions')
4962 - actions.add_option("--update",
4963 + actions = parser.add_argument_group('Actions')
4964 + actions.add_argument("--update",
4965 action="store_true",
4966 - help="update metadata/cache/ (generate as necessary)")
4967 - actions.add_option("--update-use-local-desc",
4968 + help="update metadata/md5-cache/ (generate as necessary)")
4969 + actions.add_argument("--update-use-local-desc",
4970 action="store_true",
4971 help="update the use.local.desc file from metadata.xml")
4972 - actions.add_option("--update-changelogs",
4973 + actions.add_argument("--update-changelogs",
4974 action="store_true",
4975 help="update the ChangeLog files from SCM logs")
4976 - parser.add_option_group(actions)
4977 + actions.add_argument("--update-manifests",
4978 + action="store_true",
4979 + help="update manifests")
4980
4981 - common = optparse.OptionGroup(parser, 'Common options')
4982 - common.add_option("--repo",
4983 + common = parser.add_argument_group('Common options')
4984 + common.add_argument("--repo",
4985 action="store",
4986 - help="name of repo to operate on (default repo is located at $PORTDIR)")
4987 - common.add_option("--config-root",
4988 + help="name of repo to operate on")
4989 + common.add_argument("--config-root",
4990 help="location of portage config files",
4991 dest="portage_configroot")
4992 - common.add_option("--portdir",
4993 - help="override the portage tree location",
4994 + common.add_argument("--gpg-dir",
4995 + help="override the PORTAGE_GPG_DIR variable",
4996 + dest="gpg_dir")
4997 + common.add_argument("--gpg-key",
4998 + help="override the PORTAGE_GPG_KEY variable",
4999 + dest="gpg_key")
5000 + common.add_argument("--portdir",
5001 + help="override the PORTDIR variable (deprecated in favor of --repositories-configuration)",
5002 dest="portdir")
5003 - common.add_option("--portdir-overlay",
5004 - help="override the PORTDIR_OVERLAY variable (requires that --repo is also specified)",
5005 + common.add_argument("--portdir-overlay",
5006 + help="override the PORTDIR_OVERLAY variable (deprecated in favor of --repositories-configuration)",
5007 dest="portdir_overlay")
5008 - common.add_option("--tolerant",
5009 + common.add_argument("--repositories-configuration",
5010 + help="override configuration of repositories (in format of repos.conf)",
5011 + dest="repositories_configuration")
5012 + common.add_argument("--sign-manifests",
5013 + choices=('y', 'n'),
5014 + metavar="<y|n>",
5015 + help="manually override layout.conf sign-manifests setting")
5016 + common.add_argument("--strict-manifests",
5017 + choices=('y', 'n'),
5018 + metavar="<y|n>",
5019 + help="manually override \"strict\" FEATURES setting")
5020 + common.add_argument("--thin-manifests",
5021 + choices=('y', 'n'),
5022 + metavar="<y|n>",
5023 + help="manually override layout.conf thin-manifests setting")
5024 + common.add_argument("--tolerant",
5025 action="store_true",
5026 help="exit successfully if only minor errors occurred")
5027 - common.add_option("--ignore-default-opts",
5028 + common.add_argument("--ignore-default-opts",
5029 action="store_true",
5030 help="do not use the EGENCACHE_DEFAULT_OPTS environment variable")
5031 - parser.add_option_group(common)
5032 + common.add_argument("--write-timestamp",
5033 + action="store_true",
5034 + help="write metadata/timestamp.chk as required for rsync repositories")
5035
5036 - update = optparse.OptionGroup(parser, '--update options')
5037 - update.add_option("--cache-dir",
5038 + update = parser.add_argument_group('--update options')
5039 + update.add_argument("--cache-dir",
5040 help="location of the metadata cache",
5041 dest="cache_dir")
5042 - update.add_option("--jobs",
5043 + update.add_argument("-j", "--jobs",
5044 + type=int,
5045 action="store",
5046 help="max ebuild processes to spawn")
5047 - update.add_option("--load-average",
5048 + update.add_argument("--load-average",
5049 + type=float,
5050 action="store",
5051 help="max load allowed when spawning multiple jobs",
5052 dest="load_average")
5053 - update.add_option("--rsync",
5054 + update.add_argument("--rsync",
5055 action="store_true",
5056 help="enable rsync stat collision workaround " + \
5057 "for bug 139134 (use with --update)")
5058 - parser.add_option_group(update)
5059
5060 - uld = optparse.OptionGroup(parser, '--update-use-local-desc options')
5061 - uld.add_option("--preserve-comments",
5062 + uld = parser.add_argument_group('--update-use-local-desc options')
5063 + uld.add_argument("--preserve-comments",
5064 action="store_true",
5065 help="preserve the comments from the existing use.local.desc file")
5066 - uld.add_option("--use-local-desc-output",
5067 + uld.add_argument("--use-local-desc-output",
5068 help="output file for use.local.desc data (or '-' for stdout)",
5069 dest="uld_output")
5070 - parser.add_option_group(uld)
5071
5072 - options, args = parser.parse_args(args)
5073 + options, args = parser.parse_known_args(args)
5074
5075 if options.jobs:
5076 jobs = None
5077 @@ -171,9 +212,12 @@ def parse_args(args):
5078 parser.error("Write access denied: --cache-dir='%s'" % \
5079 (options.cache_dir,))
5080
5081 - if options.portdir_overlay is not None and \
5082 - options.repo is None:
5083 - parser.error("--portdir-overlay option requires --repo option")
5084 + if options.portdir is not None:
5085 + writemsg_level("egencache: warning: --portdir option is deprecated in favor of --repositories-configuration option\n",
5086 + level=logging.WARNING, noiselevel=-1)
5087 + if options.portdir_overlay is not None:
5088 + writemsg_level("egencache: warning: --portdir-overlay option is deprecated in favor of --repositories-configuration option\n",
5089 + level=logging.WARNING, noiselevel=-1)
5090
5091 for atom in args:
5092 try:
5093 @@ -215,9 +259,11 @@ class GenCache(object):
5094 else:
5095 self._cp_set = None
5096 self._cp_missing = set()
5097 + write_auxdb = "metadata-transfer" in portdb.settings.features
5098 self._regen = MetadataRegen(portdb, cp_iter=cp_iter,
5099 consumer=self._metadata_callback,
5100 - max_jobs=max_jobs, max_load=max_load)
5101 + max_jobs=max_jobs, max_load=max_load,
5102 + write_auxdb=write_auxdb, main=True)
5103 self.returncode = os.EX_OK
5104 conf = portdb.repositories.get_repo_for_location(tree)
5105 self._trg_caches = tuple(conf.iter_pregenerated_caches(
5106 @@ -255,98 +301,74 @@ class GenCache(object):
5107
5108 def _write_cache(self, trg_cache, cpv, repo_path, metadata, ebuild_hash):
5109
5110 - if not hasattr(trg_cache, 'raise_stat_collision'):
5111 - # This cache does not avoid redundant writes automatically,
5112 - # so check for an identical existing entry before writing.
5113 - # This prevents unnecessary disk writes and can also prevent
5114 - # unnecessary rsync transfers.
5115 - try:
5116 - dest = trg_cache[cpv]
5117 - except (KeyError, CacheError):
5118 - pass
5119 - else:
5120 - if trg_cache.validate_entry(dest,
5121 - ebuild_hash, self._eclass_db):
5122 - identical = True
5123 - for k in self._auxdbkeys:
5124 - if dest.get(k, '') != metadata.get(k, ''):
5125 - identical = False
5126 - break
5127 - if identical:
5128 - return
5129 + if not hasattr(trg_cache, 'raise_stat_collision'):
5130 + # This cache does not avoid redundant writes automatically,
5131 + # so check for an identical existing entry before writing.
5132 + # This prevents unnecessary disk writes and can also prevent
5133 + # unnecessary rsync transfers.
5134 + try:
5135 + dest = trg_cache[cpv]
5136 + except (KeyError, CacheError):
5137 + pass
5138 + else:
5139 + if trg_cache.validate_entry(dest,
5140 + ebuild_hash, self._eclass_db):
5141 + identical = True
5142 + for k in self._auxdbkeys:
5143 + if dest.get(k, '') != metadata.get(k, ''):
5144 + identical = False
5145 + break
5146 + if identical:
5147 + return
5148
5149 + try:
5150 + chf = trg_cache.validation_chf
5151 + metadata['_%s_' % chf] = getattr(ebuild_hash, chf)
5152 try:
5153 - chf = trg_cache.validation_chf
5154 - metadata['_%s_' % chf] = getattr(ebuild_hash, chf)
5155 + trg_cache[cpv] = metadata
5156 + except StatCollision as sc:
5157 + # If the content of a cache entry changes and neither the
5158 + # file mtime nor size changes, it will prevent rsync from
5159 + # detecting changes. Cache backends may raise this
5160 + # exception from _setitem() if they detect this type of stat
5161 + # collision. These exceptions are handled by bumping the
5162 + # mtime on the ebuild (and the corresponding cache entry).
5163 + # See bug #139134. It is convenient to include checks for
5164 + # redundant writes along with the internal StatCollision
5165 + # detection code, so for caches with the
5166 + # raise_stat_collision attribute, we do not need to
5167 + # explicitly check for redundant writes like we do for the
5168 + # other cache types above.
5169 + max_mtime = sc.mtime
5170 + for _ec, ec_hash in metadata['_eclasses_'].items():
5171 + if max_mtime < ec_hash.mtime:
5172 + max_mtime = ec_hash.mtime
5173 + if max_mtime == sc.mtime:
5174 + max_mtime += 1
5175 + max_mtime = long(max_mtime)
5176 try:
5177 + os.utime(ebuild_hash.location, (max_mtime, max_mtime))
5178 + except OSError as e:
5179 + self.returncode |= 1
5180 + writemsg_level(
5181 + "%s writing target: %s\n" % (cpv, e),
5182 + level=logging.ERROR, noiselevel=-1)
5183 + else:
5184 + ebuild_hash.mtime = max_mtime
5185 + metadata['_mtime_'] = max_mtime
5186 trg_cache[cpv] = metadata
5187 - except StatCollision as sc:
5188 - # If the content of a cache entry changes and neither the
5189 - # file mtime nor size changes, it will prevent rsync from
5190 - # detecting changes. Cache backends may raise this
5191 - # exception from _setitem() if they detect this type of stat
5192 - # collision. These exceptions are handled by bumping the
5193 - # mtime on the ebuild (and the corresponding cache entry).
5194 - # See bug #139134. It is convenient to include checks for
5195 - # redundant writes along with the internal StatCollision
5196 - # detection code, so for caches with the
5197 - # raise_stat_collision attribute, we do not need to
5198 - # explicitly check for redundant writes like we do for the
5199 - # other cache types above.
5200 - max_mtime = sc.mtime
5201 - for ec, ec_hash in metadata['_eclasses_'].items():
5202 - if max_mtime < ec_hash.mtime:
5203 - max_mtime = ec_hash.mtime
5204 - if max_mtime == sc.mtime:
5205 - max_mtime += 1
5206 - max_mtime = long(max_mtime)
5207 - try:
5208 - os.utime(ebuild_hash.location, (max_mtime, max_mtime))
5209 - except OSError as e:
5210 - self.returncode |= 1
5211 - writemsg_level(
5212 - "%s writing target: %s\n" % (cpv, e),
5213 - level=logging.ERROR, noiselevel=-1)
5214 - else:
5215 - ebuild_hash.mtime = max_mtime
5216 - metadata['_mtime_'] = max_mtime
5217 - trg_cache[cpv] = metadata
5218 - self._portdb.auxdb[repo_path][cpv] = metadata
5219 + self._portdb.auxdb[repo_path][cpv] = metadata
5220
5221 - except CacheError as ce:
5222 - self.returncode |= 1
5223 - writemsg_level(
5224 - "%s writing target: %s\n" % (cpv, ce),
5225 - level=logging.ERROR, noiselevel=-1)
5226 + except CacheError as ce:
5227 + self.returncode |= 1
5228 + writemsg_level(
5229 + "%s writing target: %s\n" % (cpv, ce),
5230 + level=logging.ERROR, noiselevel=-1)
5231
5232 def run(self):
5233 -
5234 - received_signal = []
5235 -
5236 - def sighandler(signum, frame):
5237 - signal.signal(signal.SIGINT, signal.SIG_IGN)
5238 - signal.signal(signal.SIGTERM, signal.SIG_IGN)
5239 - self._regen.terminate()
5240 - received_signal.append(128 + signum)
5241 -
5242 - earlier_sigint_handler = signal.signal(signal.SIGINT, sighandler)
5243 - earlier_sigterm_handler = signal.signal(signal.SIGTERM, sighandler)
5244 -
5245 - try:
5246 - self._regen.run()
5247 - finally:
5248 - # Restore previous handlers
5249 - if earlier_sigint_handler is not None:
5250 - signal.signal(signal.SIGINT, earlier_sigint_handler)
5251 - else:
5252 - signal.signal(signal.SIGINT, signal.SIG_DFL)
5253 - if earlier_sigterm_handler is not None:
5254 - signal.signal(signal.SIGTERM, earlier_sigterm_handler)
5255 - else:
5256 - signal.signal(signal.SIGTERM, signal.SIG_DFL)
5257 -
5258 - if received_signal:
5259 - sys.exit(received_signal[0])
5260 + signum = run_main_scheduler(self._regen)
5261 + if signum is not None:
5262 + sys.exit(128 + signum)
5263
5264 self.returncode |= self._regen.returncode
5265
5266 @@ -371,8 +393,8 @@ class GenCache(object):
5267 self.returncode |= 1
5268 writemsg_level(
5269 "Error listing cache entries for " + \
5270 - "'%s/metadata/cache': %s, continuing...\n" % \
5271 - (self._portdb.porttree_root, ce),
5272 + "'%s': %s, continuing...\n" % \
5273 + (trg_cache.location, ce),
5274 level=logging.ERROR, noiselevel=-1)
5275
5276 else:
5277 @@ -393,8 +415,8 @@ class GenCache(object):
5278 self.returncode |= 1
5279 writemsg_level(
5280 "Error listing cache entries for " + \
5281 - "'%s/metadata/cache': %s, continuing...\n" % \
5282 - (self._portdb.porttree_root, ce),
5283 + "'%s': %s, continuing...\n" % \
5284 + (trg_cache.location, ce),
5285 level=logging.ERROR, noiselevel=-1)
5286
5287 if cp_missing:
5288 @@ -436,7 +458,7 @@ class GenUseLocalDesc(object):
5289 self._portdb = portdb
5290 self._output = output
5291 self._preserve_comments = preserve_comments
5292 -
5293 +
5294 def run(self):
5295 repo_path = self._portdb.porttrees[0]
5296 ops = {'<':0, '<=':1, '=':2, '>=':3, '>':4}
5297 @@ -509,14 +531,14 @@ class GenUseLocalDesc(object):
5298 encoding=_encodings['fs'], errors='strict'),
5299 mode='a', encoding=_encodings['repo.content'],
5300 errors='backslashreplace')
5301 - output.write(_unicode_decode('\n'))
5302 + output.write('\n')
5303 else:
5304 - output.write(textwrap.dedent(_unicode_decode('''\
5305 + output.write(textwrap.dedent('''\
5306 # This file is deprecated as per GLEP 56 in favor of metadata.xml. Please add
5307 # your descriptions to your package's metadata.xml ONLY.
5308 # * generated automatically using egencache *
5309
5310 - ''')))
5311 + '''))
5312
5313 # The cmp function no longer exists in python3, so we'll
5314 # implement our own here under a slightly different name
5315 @@ -544,7 +566,8 @@ class GenUseLocalDesc(object):
5316 for cp in self._portdb.cp_all():
5317 metadata_path = os.path.join(repo_path, cp, 'metadata.xml')
5318 try:
5319 - metadata = ElementTree.parse(metadata_path,
5320 + metadata = ElementTree.parse(_unicode_encode(metadata_path,
5321 + encoding=_encodings['fs'], errors='strict'),
5322 parser=ElementTree.XMLParser(
5323 target=_MetadataTreeBuilder()))
5324 except IOError:
5325 @@ -600,8 +623,7 @@ class GenUseLocalDesc(object):
5326 resatoms = sorted(reskeys, key=cmp_sort_key(atomcmp))
5327 resdesc = resdict[reskeys[resatoms[-1]]]
5328
5329 - output.write(_unicode_decode(
5330 - '%s:%s - %s\n' % (cp, flag, resdesc)))
5331 + output.write('%s:%s - %s\n' % (cp, flag, resdesc))
5332
5333 output.close()
5334
5335 @@ -623,7 +645,8 @@ class _special_filename(_filename_base):
5336 self.file_name = file_name
5337 self.file_type = guessManifestFileType(file_name)
5338
5339 - def file_type_lt(self, a, b):
5340 + @staticmethod
5341 + def file_type_lt(a, b):
5342 """
5343 Defines an ordering between file types.
5344 """
5345 @@ -698,12 +721,12 @@ class GenChangeLogs(object):
5346 self.returncode |= 2
5347 return
5348
5349 - output.write(textwrap.dedent(_unicode_decode('''\
5350 + output.write(textwrap.dedent('''\
5351 # ChangeLog for %s
5352 # Copyright 1999-%s Gentoo Foundation; Distributed under the GPL v2
5353 # $Header: $
5354
5355 - ''' % (cp, time.strftime('%Y')))))
5356 + ''' % (cp, time.strftime('%Y'))))
5357
5358 # now grab all the commits
5359 commits = self.grab(['git', 'rev-list', 'HEAD', '--', '.']).split()
5360 @@ -767,11 +790,10 @@ class GenChangeLogs(object):
5361 # Reverse the sort order for headers.
5362 for c in reversed(changed):
5363 if c.startswith('+') and c.endswith('.ebuild'):
5364 - output.write(_unicode_decode(
5365 - '*%s (%s)\n' % (c[1:-7], date)))
5366 + output.write('*%s (%s)\n' % (c[1:-7], date))
5367 wroteheader = True
5368 if wroteheader:
5369 - output.write(_unicode_decode('\n'))
5370 + output.write('\n')
5371
5372 # strip '<cp>: ', '[<cp>] ', and similar
5373 body[0] = re.sub(r'^\W*' + re.escape(cp) + r'\W+', '', body[0])
5374 @@ -791,13 +813,12 @@ class GenChangeLogs(object):
5375
5376 # don't break filenames on hyphens
5377 self._wrapper.break_on_hyphens = False
5378 - output.write(_unicode_decode(
5379 - self._wrapper.fill(
5380 - '%s; %s %s:' % (date, author, ', '.join(changed)))))
5381 + output.write(self._wrapper.fill(
5382 + '%s; %s %s:' % (date, author, ', '.join(changed))))
5383 # but feel free to break commit messages there
5384 self._wrapper.break_on_hyphens = True
5385 - output.write(_unicode_decode(
5386 - '\n%s\n\n' % '\n'.join(self._wrapper.fill(x) for x in body)))
5387 + output.write(
5388 + '\n%s\n\n' % '\n'.join(self._wrapper.fill(x) for x in body))
5389
5390 output.close()
5391
5392 @@ -830,17 +851,22 @@ class GenChangeLogs(object):
5393 self.generate_changelog(cp)
5394
5395 def egencache_main(args):
5396 - parser, options, atoms = parse_args(args)
5397 -
5398 - config_root = options.config_root
5399
5400 # The calling environment is ignored, so the program is
5401 # completely controlled by commandline arguments.
5402 env = {}
5403
5404 - if options.repo is None:
5405 - env['PORTDIR_OVERLAY'] = ''
5406 - elif options.portdir_overlay:
5407 + if not sys.stdout.isatty():
5408 + portage.output.nocolor()
5409 + env['NOCOLOR'] = 'true'
5410 +
5411 + parser, options, atoms = parse_args(args)
5412 +
5413 + config_root = options.config_root
5414 +
5415 + if options.repositories_configuration is not None:
5416 + env['PORTAGE_REPOSITORIES'] = options.repositories_configuration
5417 + elif options.portdir_overlay is not None:
5418 env['PORTDIR_OVERLAY'] = options.portdir_overlay
5419
5420 if options.cache_dir is not None:
5421 @@ -854,7 +880,8 @@ def egencache_main(args):
5422
5423 default_opts = None
5424 if not options.ignore_default_opts:
5425 - default_opts = settings.get('EGENCACHE_DEFAULT_OPTS', '').split()
5426 + default_opts = portage.util.shlex_split(
5427 + settings.get('EGENCACHE_DEFAULT_OPTS', ''))
5428
5429 if default_opts:
5430 parser, options, args = parse_args(default_opts + args)
5431 @@ -865,18 +892,50 @@ def egencache_main(args):
5432 settings = portage.config(config_root=config_root,
5433 local_config=False, env=env)
5434
5435 - if not options.update and not options.update_use_local_desc \
5436 - and not options.update_changelogs:
5437 + if not (options.update or options.update_use_local_desc or
5438 + options.update_changelogs or options.update_manifests):
5439 parser.error('No action specified')
5440 return 1
5441
5442 + if options.repo is None:
5443 + if len(settings.repositories.prepos) == 2:
5444 + for repo in settings.repositories:
5445 + if repo.name != "DEFAULT":
5446 + options.repo = repo.name
5447 + break
5448 +
5449 + if options.repo is None:
5450 + parser.error("--repo option is required")
5451 +
5452 + repo_path = settings.repositories.treemap.get(options.repo)
5453 + if repo_path is None:
5454 + parser.error("Unable to locate repository named '%s'" % (options.repo,))
5455 + return 1
5456 +
5457 + repo_config = settings.repositories.get_repo_for_location(repo_path)
5458 +
5459 + if options.strict_manifests is not None:
5460 + if options.strict_manifests == "y":
5461 + settings.features.add("strict")
5462 + else:
5463 + settings.features.discard("strict")
5464 +
5465 if options.update and 'metadata-transfer' not in settings.features:
5466 - settings.features.add('metadata-transfer')
5467 + # Forcibly enable metadata-transfer if portdbapi has a pregenerated
5468 + # cache that does not support eclass validation.
5469 + cache = repo_config.get_pregenerated_cache(
5470 + portage.dbapi.dbapi._known_keys, readonly=True)
5471 + if cache is not None and not cache.complete_eclass_entries:
5472 + settings.features.add('metadata-transfer')
5473 + cache = None
5474
5475 settings.lock()
5476
5477 portdb = portage.portdbapi(mysettings=settings)
5478
5479 + # Limit ebuilds to the specified repo.
5480 + portdb.porttrees = [repo_path]
5481 +
5482 if options.update:
5483 if options.cache_dir is not None:
5484 # already validated earlier
5485 @@ -892,17 +951,71 @@ def egencache_main(args):
5486 level=logging.ERROR, noiselevel=-1)
5487 return 1
5488
5489 - if options.repo is not None:
5490 - repo_path = portdb.getRepositoryPath(options.repo)
5491 - if repo_path is None:
5492 - parser.error("Unable to locate repository named '%s'" % \
5493 - (options.repo,))
5494 - return 1
5495 + if options.sign_manifests is not None:
5496 + repo_config.sign_manifest = options.sign_manifests == 'y'
5497
5498 - # Limit ebuilds to the specified repo.
5499 - portdb.porttrees = [repo_path]
5500 - else:
5501 - portdb.porttrees = [portdb.porttree_root]
5502 + if options.thin_manifests is not None:
5503 + repo_config.thin_manifest = options.thin_manifests == 'y'
5504 +
5505 + gpg_cmd = None
5506 + gpg_vars = None
5507 + force_sign_key = None
5508 +
5509 + if options.update_manifests:
5510 + if repo_config.sign_manifest:
5511 +
5512 + sign_problem = False
5513 + gpg_dir = None
5514 + gpg_cmd = settings.get("PORTAGE_GPG_SIGNING_COMMAND")
5515 + if gpg_cmd is None:
5516 + writemsg_level("egencache: error: "
5517 + "PORTAGE_GPG_SIGNING_COMMAND is unset! "
5518 + "Is make.globals missing?\n",
5519 + level=logging.ERROR, noiselevel=-1)
5520 + sign_problem = True
5521 + elif "${PORTAGE_GPG_KEY}" in gpg_cmd and \
5522 + options.gpg_key is None and \
5523 + "PORTAGE_GPG_KEY" not in settings:
5524 + writemsg_level("egencache: error: "
5525 + "PORTAGE_GPG_KEY is unset!\n",
5526 + level=logging.ERROR, noiselevel=-1)
5527 + sign_problem = True
5528 + elif "${PORTAGE_GPG_DIR}" in gpg_cmd:
5529 + if options.gpg_dir is not None:
5530 + gpg_dir = options.gpg_dir
5531 + elif "PORTAGE_GPG_DIR" not in settings:
5532 + gpg_dir = os.path.expanduser("~/.gnupg")
5533 + else:
5534 + gpg_dir = os.path.expanduser(settings["PORTAGE_GPG_DIR"])
5535 + if not os.access(gpg_dir, os.X_OK):
5536 + writemsg_level(("egencache: error: "
5537 + "Unable to access directory: "
5538 + "PORTAGE_GPG_DIR='%s'\n") % gpg_dir,
5539 + level=logging.ERROR, noiselevel=-1)
5540 + sign_problem = True
5541 +
5542 + if sign_problem:
5543 + writemsg_level("egencache: You may disable manifest "
5544 + "signatures with --sign-manifests=n or by setting "
5545 + "\"sign-manifests = false\" in metadata/layout.conf\n",
5546 + level=logging.ERROR, noiselevel=-1)
5547 + return 1
5548 +
5549 + gpg_vars = {}
5550 + if gpg_dir is not None:
5551 + gpg_vars["PORTAGE_GPG_DIR"] = gpg_dir
5552 + gpg_var_names = []
5553 + if options.gpg_key is None:
5554 + gpg_var_names.append("PORTAGE_GPG_KEY")
5555 + else:
5556 + gpg_vars["PORTAGE_GPG_KEY"] = options.gpg_key
5557 +
5558 + for k in gpg_var_names:
5559 + v = settings.get(k)
5560 + if v is not None:
5561 + gpg_vars[k] = v
5562 +
5563 + force_sign_key = gpg_vars.get("PORTAGE_GPG_KEY")
5564
5565 ret = [os.EX_OK]
5566
5567 @@ -921,6 +1034,29 @@ def egencache_main(args):
5568 else:
5569 ret.append(gen_cache.returncode)
5570
5571 + if options.update_manifests:
5572 +
5573 + cp_iter = None
5574 + if atoms:
5575 + cp_iter = iter(atoms)
5576 +
5577 + event_loop = global_event_loop()
5578 + scheduler = ManifestScheduler(portdb, cp_iter=cp_iter,
5579 + gpg_cmd=gpg_cmd, gpg_vars=gpg_vars,
5580 + force_sign_key=force_sign_key,
5581 + max_jobs=options.jobs,
5582 + max_load=options.load_average,
5583 + event_loop=event_loop)
5584 +
5585 + signum = run_main_scheduler(scheduler)
5586 + if signum is not None:
5587 + sys.exit(128 + signum)
5588 +
5589 + if options.tolerant:
5590 + ret.append(os.EX_OK)
5591 + else:
5592 + ret.append(scheduler.returncode)
5593 +
5594 if options.update_use_local_desc:
5595 gen_desc = GenUseLocalDesc(portdb,
5596 output=options.uld_output,
5597 @@ -933,6 +1069,16 @@ def egencache_main(args):
5598 gen_clogs.run()
5599 ret.append(gen_clogs.returncode)
5600
5601 + if options.write_timestamp:
5602 + timestamp_path = os.path.join(repo_path, 'metadata', 'timestamp.chk')
5603 + try:
5604 + with open(timestamp_path, 'w') as f:
5605 + f.write(time.strftime('%s\n' % TIMESTAMP_FORMAT, time.gmtime()))
5606 + except IOError:
5607 + ret.append(os.EX_IOERR)
5608 + else:
5609 + ret.append(os.EX_OK)
5610 +
5611 return max(ret)
5612
5613 if __name__ == "__main__":
5614
5615 diff --git a/bin/emaint b/bin/emaint
5616 index bee46c4..aeeb183 100755
5617 --- a/bin/emaint
5618 +++ b/bin/emaint
5619 @@ -1,9 +1,8 @@
5620 -#!/usr/bin/python -O
5621 -# Copyright 2005-2012 Gentoo Foundation
5622 +#!/usr/bin/python -bO
5623 +# Copyright 2005-2014 Gentoo Foundation
5624 # Distributed under the terms of the GNU General Public License v2
5625
5626 -"""'The emaint program provides an interface to system health
5627 - checks and maintenance.
5628 +"""System health checks and maintenance utilities.
5629 """
5630
5631 from __future__ import print_function
5632 @@ -14,10 +13,10 @@ import errno
5633 try:
5634 import signal
5635
5636 - def exithandler(signum,frame):
5637 + def exithandler(signum, _frame):
5638 signal.signal(signal.SIGINT, signal.SIG_IGN)
5639 signal.signal(signal.SIGTERM, signal.SIG_IGN)
5640 - sys.exit(1)
5641 + sys.exit(128 + signum)
5642
5643 signal.signal(signal.SIGINT, exithandler)
5644 signal.signal(signal.SIGTERM, exithandler)
5645 @@ -26,13 +25,11 @@ try:
5646 except KeyboardInterrupt:
5647 sys.exit(1)
5648
5649 -try:
5650 - import portage
5651 -except ImportError:
5652 - from os import path as osp
5653 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
5654 - import portage
5655 -
5656 +from os import path as osp
5657 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
5658 +sys.path.insert(0, pym_path)
5659 +import portage
5660 +portage._internal_caller = True
5661 from portage.emaint.main import emaint_main
5662
5663 try:
5664
5665 diff --git a/bin/emerge b/bin/emerge
5666 index a9a5643..bb93d83 100755
5667 --- a/bin/emerge
5668 +++ b/bin/emerge
5669 @@ -1,5 +1,5 @@
5670 -#!/usr/bin/python
5671 -# Copyright 2006-2012 Gentoo Foundation
5672 +#!/usr/bin/python -b
5673 +# Copyright 2006-2014 Gentoo Foundation
5674 # Distributed under the terms of the GNU General Public License v2
5675
5676 from __future__ import print_function
5677 @@ -7,67 +7,73 @@ from __future__ import print_function
5678 import platform
5679 import signal
5680 import sys
5681 -# This block ensures that ^C interrupts are handled quietly.
5682 +
5683 +# This block ensures that ^C interrupts are handled quietly. We handle
5684 +# KeyboardInterrupt instead of installing a SIGINT handler, since
5685 +# exiting from signal handlers intermittently causes python to ignore
5686 +# the SystemExit exception with a message like this:
5687 +# Exception SystemExit: 130 in <function remove at 0x7fd2146c1320> ignored
5688 try:
5689
5690 - def exithandler(signum,frame):
5691 - signal.signal(signal.SIGINT, signal.SIG_IGN)
5692 + def exithandler(signum, _frame):
5693 signal.signal(signal.SIGTERM, signal.SIG_IGN)
5694 sys.exit(128 + signum)
5695
5696 - signal.signal(signal.SIGINT, exithandler)
5697 signal.signal(signal.SIGTERM, exithandler)
5698 # Prevent "[Errno 32] Broken pipe" exceptions when
5699 # writing to a pipe.
5700 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
5701
5702 -except KeyboardInterrupt:
5703 - sys.exit(128 + signal.SIGINT)
5704 + def debug_signal(_signum, _frame):
5705 + import pdb
5706 + pdb.set_trace()
5707
5708 -def debug_signal(signum, frame):
5709 - import pdb
5710 - pdb.set_trace()
5711 + if platform.python_implementation() == 'Jython':
5712 + debug_signum = signal.SIGUSR2 # bug #424259
5713 + else:
5714 + debug_signum = signal.SIGUSR1
5715
5716 -if platform.python_implementation() == 'Jython':
5717 - debug_signum = signal.SIGUSR2 # bug #424259
5718 -else:
5719 - debug_signum = signal.SIGUSR1
5720 + signal.signal(debug_signum, debug_signal)
5721
5722 -signal.signal(debug_signum, debug_signal)
5723 -
5724 -try:
5725 - from _emerge.main import emerge_main
5726 -except ImportError:
5727 from os import path as osp
5728 - import sys
5729 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
5730 + pym_path = osp.join(osp.dirname(osp.dirname(
5731 + osp.realpath(__file__))), "pym")
5732 + sys.path.insert(0, pym_path)
5733 + import portage
5734 + portage._internal_caller = True
5735 + portage._disable_legacy_globals()
5736 from _emerge.main import emerge_main
5737
5738 -if __name__ == "__main__":
5739 - import sys
5740 - from portage.exception import ParseError, PermissionDenied
5741 - try:
5742 - retval = emerge_main()
5743 - except PermissionDenied as e:
5744 - sys.stderr.write("Permission denied: '%s'\n" % str(e))
5745 - sys.exit(e.errno)
5746 - except ParseError as e:
5747 - sys.stderr.write("%s\n" % str(e))
5748 - sys.exit(1)
5749 - except SystemExit:
5750 - raise
5751 - except Exception:
5752 - # If an unexpected exception occurs then we don't want the mod_echo
5753 - # output to obscure the traceback, so dump the mod_echo output before
5754 - # showing the traceback.
5755 - import traceback
5756 - tb_str = traceback.format_exc()
5757 + if __name__ == "__main__":
5758 + from portage.exception import ParseError, PermissionDenied
5759 try:
5760 - from portage.elog import mod_echo
5761 - except ImportError:
5762 - pass
5763 - else:
5764 - mod_echo.finalize()
5765 - sys.stderr.write(tb_str)
5766 - sys.exit(1)
5767 - sys.exit(retval)
5768 + retval = emerge_main()
5769 + except PermissionDenied as e:
5770 + sys.stderr.write("Permission denied: '%s'\n" % str(e))
5771 + sys.exit(e.errno)
5772 + except ParseError as e:
5773 + sys.stderr.write("%s\n" % str(e))
5774 + sys.exit(1)
5775 + except (KeyboardInterrupt, SystemExit):
5776 + raise
5777 + except Exception:
5778 + # If an unexpected exception occurs then we don't want the
5779 + # mod_echo output to obscure the traceback, so dump the
5780 + # mod_echo output before showing the traceback.
5781 + import traceback
5782 + tb_str = traceback.format_exc()
5783 + try:
5784 + from portage.elog import mod_echo
5785 + except ImportError:
5786 + pass
5787 + else:
5788 + mod_echo.finalize()
5789 + sys.stderr.write(tb_str)
5790 + sys.exit(1)
5791 + sys.exit(retval)
5792 +
5793 +except KeyboardInterrupt:
5794 + sys.stderr.write("\n\nExiting on signal %(signal)s\n" %
5795 + {"signal": signal.SIGINT})
5796 + sys.stderr.flush()
5797 + sys.exit(128 + signal.SIGINT)
5798
5799 diff --git a/bin/emerge-webrsync b/bin/emerge-webrsync
5800 index bfd9aa2..2f0689c 100755
5801 --- a/bin/emerge-webrsync
5802 +++ b/bin/emerge-webrsync
5803 @@ -1,5 +1,5 @@
5804 #!/bin/bash
5805 -# Copyright 1999-2011 Gentoo Foundation
5806 +# Copyright 1999-2014 Gentoo Foundation
5807 # Distributed under the terms of the GNU General Public License v2
5808 # Author: Karl Trygve Kalleberg <karltk@g.o>
5809 # Rewritten from the old, Perl-based emerge-webrsync script
5810 @@ -22,9 +22,9 @@ vvecho() { [[ ${do_verbose} -eq 1 ]] && echo "$@" ; }
5811 # Only echo if not in verbose mode
5812 nvecho() { [[ ${do_verbose} -eq 0 ]] && echo "$@" ; }
5813 # warning echos
5814 -wecho() { echo "${argv0}: warning: $*" 1>&2 ; }
5815 +wecho() { echo "${argv0##*/}: warning: $*" 1>&2 ; }
5816 # error echos
5817 -eecho() { echo "${argv0}: error: $*" 1>&2 ; }
5818 +eecho() { echo "${argv0##*/}: error: $*" 1>&2 ; }
5819
5820 argv0=$0
5821
5822 @@ -39,23 +39,33 @@ else
5823 eecho "could not find 'portageq'; aborting"
5824 exit 1
5825 fi
5826 -eval $("${portageq}" envvar -v FEATURES FETCHCOMMAND GENTOO_MIRRORS \
5827 - PORTAGE_BIN_PATH PORTAGE_GPG_DIR \
5828 - PORTAGE_NICENESS PORTAGE_RSYNC_EXTRA_OPTS PORTAGE_TMPDIR PORTDIR \
5829 - SYNC http_proxy ftp_proxy)
5830 -DISTDIR="${PORTAGE_TMPDIR}/emerge-webrsync"
5831 +eval "$("${portageq}" envvar -v DISTDIR EPREFIX FEATURES \
5832 + FETCHCOMMAND GENTOO_MIRRORS \
5833 + PORTAGE_BIN_PATH PORTAGE_CONFIGROOT PORTAGE_GPG_DIR \
5834 + PORTAGE_NICENESS PORTAGE_REPOSITORIES PORTAGE_RSYNC_EXTRA_OPTS \
5835 + PORTAGE_RSYNC_OPTS PORTAGE_TMPDIR \
5836 + USERLAND http_proxy ftp_proxy)"
5837 export http_proxy ftp_proxy
5838
5839 +source "${PORTAGE_BIN_PATH}"/isolated-functions.sh || exit 1
5840 +
5841 +repo_name=gentoo
5842 +repo_location=$(__repo_attr "${repo_name}" location)
5843 +if [[ -z ${repo_location} ]]; then
5844 + eecho "Repository '${repo_name}' not found"
5845 + exit 1
5846 +fi
5847 +repo_sync_type=$(__repo_attr "${repo_name}" sync-type)
5848 +
5849 # If PORTAGE_NICENESS is overriden via the env then it will
5850 # still pass through the portageq call and override properly.
5851 if [ -n "${PORTAGE_NICENESS}" ]; then
5852 renice $PORTAGE_NICENESS $$ > /dev/null
5853 fi
5854
5855 -source "${PORTAGE_BIN_PATH}"/isolated-functions.sh || exit 1
5856 -
5857 do_verbose=0
5858 do_debug=0
5859 +keep=false
5860
5861 if has webrsync-gpg ${FEATURES} ; then
5862 WEBSYNC_VERIFY_SIGNATURE=1
5863 @@ -99,7 +109,9 @@ get_date_part() {
5864 get_utc_second_from_string() {
5865 local s="$1"
5866 if [[ ${USERLAND} == BSD ]] ; then
5867 - date -juf "%Y%m%d" "$s" +"%s"
5868 + # Specify zeros for the least significant digits, or else those
5869 + # digits are inherited from the current system clock time.
5870 + date -juf "%Y%m%d%H%M.%S" "${s}0000.00" +"%s"
5871 else
5872 date -d "${s:0:4}-${s:4:2}-${s:6:2}" -u +"%s"
5873 fi
5874 @@ -108,8 +120,8 @@ get_utc_second_from_string() {
5875 get_portage_timestamp() {
5876 local portage_current_timestamp=0
5877
5878 - if [ -f "${PORTDIR}/metadata/timestamp.x" ]; then
5879 - portage_current_timestamp=$(cut -f 1 -d " " "${PORTDIR}/metadata/timestamp.x" )
5880 + if [ -f "${repo_location}/metadata/timestamp.x" ]; then
5881 + portage_current_timestamp=$(cut -f 1 -d " " "${repo_location}/metadata/timestamp.x" )
5882 fi
5883
5884 echo "${portage_current_timestamp}"
5885 @@ -125,13 +137,18 @@ fetch_file() {
5886 elif [ "${FETCHCOMMAND/curl/}" != "${FETCHCOMMAND}" ]; then
5887 opts="--continue-at - $(nvecho -s -f)"
5888 else
5889 - rm -f "${FILE}"
5890 + rm -f "${DISTDIR}/${FILE}"
5891 fi
5892
5893 - vecho "Fetching file ${FILE} ..."
5894 + __vecho "Fetching file ${FILE} ..."
5895 # already set DISTDIR=
5896 - eval "${FETCHCOMMAND}" ${opts}
5897 - [ -s "${FILE}" ]
5898 + eval "${FETCHCOMMAND} ${opts}"
5899 + if [[ $? -eq 0 && -s ${DISTDIR}/${FILE} ]] ; then
5900 + return 0
5901 + else
5902 + rm -f "${DISTDIR}/${FILE}"
5903 + return 1
5904 + fi
5905 }
5906
5907 check_file_digest() {
5908 @@ -139,10 +156,12 @@ check_file_digest() {
5909 local file="$2"
5910 local r=1
5911
5912 - vecho "Checking digest ..."
5913 + __vecho "Checking digest ..."
5914
5915 if type -P md5sum > /dev/null; then
5916 - md5sum -c $digest && r=0
5917 + local md5sum_output=$(md5sum "${file}")
5918 + local digest_content=$(< "${digest}")
5919 + [ "${md5sum_output%%[[:space:]]*}" = "${digest_content%%[[:space:]]*}" ] && r=0
5920 elif type -P md5 > /dev/null; then
5921 [ "$(md5 -q "${file}")" == "$(cut -d ' ' -f 1 "${digest}")" ] && r=0
5922 else
5923 @@ -159,7 +178,7 @@ check_file_signature() {
5924
5925 if [ ${WEBSYNC_VERIFY_SIGNATURE} != 0 ]; then
5926
5927 - vecho "Checking signature ..."
5928 + __vecho "Checking signature ..."
5929
5930 if type -P gpg > /dev/null; then
5931 gpg --homedir "${PORTAGE_GPG_DIR}" --verify "$signature" "$file" && r=0
5932 @@ -183,13 +202,25 @@ get_snapshot_timestamp() {
5933 sync_local() {
5934 local file="$1"
5935
5936 - vecho "Syncing local tree ..."
5937 + __vecho "Syncing local tree ..."
5938 +
5939 + local ownership="portage:portage"
5940 + if has usersync ${FEATURES} ; then
5941 + case "${USERLAND}" in
5942 + BSD)
5943 + ownership=$(stat -f '%Su:%Sg' "${repo_location}")
5944 + ;;
5945 + *)
5946 + ownership=$(stat -c '%U:%G' "${repo_location}")
5947 + ;;
5948 + esac
5949 + fi
5950
5951 if type -P tarsync > /dev/null ; then
5952 - local chown_opts="-o portage -g portage"
5953 - chown portage:portage portage > /dev/null 2>&1 || chown_opts=""
5954 + local chown_opts="-o ${ownership%:*} -g ${ownership#*:}"
5955 + chown ${ownership} "${repo_location}" > /dev/null 2>&1 || chown_opts=""
5956 if ! tarsync $(vvecho -v) -s 1 ${chown_opts} \
5957 - -e /distfiles -e /packages -e /local "${file}" "${PORTDIR}"; then
5958 + -e /distfiles -e /packages -e /local "${file}" "${repo_location}"; then
5959 eecho "tarsync failed; tarball is corrupt? (${file})"
5960 return 1
5961 fi
5962 @@ -201,27 +232,29 @@ sync_local() {
5963 fi
5964
5965 # Free disk space
5966 - rm -f "${file}"
5967 + ${keep} || rm -f "${file}"
5968
5969 - chown portage:portage portage > /dev/null 2>&1 && \
5970 - chown -R portage:portage portage
5971 + local rsync_opts="${PORTAGE_RSYNC_OPTS} ${PORTAGE_RSYNC_EXTRA_OPTS}"
5972 + if chown ${ownership} portage > /dev/null 2>&1; then
5973 + chown -R ${ownership} portage
5974 + rsync_opts+=" --owner --group"
5975 + fi
5976 cd portage
5977 - rsync -av --progress --stats --delete --delete-after \
5978 - --exclude='/distfiles' --exclude='/packages' \
5979 - --exclude='/local' ${PORTAGE_RSYNC_EXTRA_OPTS} . "${PORTDIR%%/}"
5980 + rsync ${rsync_opts} . "${repo_location%%/}"
5981 cd ..
5982
5983 - vecho "Cleaning up ..."
5984 + __vecho "Cleaning up ..."
5985 rm -fr portage
5986 fi
5987
5988 if has metadata-transfer ${FEATURES} ; then
5989 - vecho "Updating cache ..."
5990 - emerge --metadata
5991 + __vecho "Updating cache ..."
5992 + "${PORTAGE_BIN_PATH}/emerge" --metadata
5993 fi
5994 - [ -x /etc/portage/bin/post_sync ] && /etc/portage/bin/post_sync
5995 + local post_sync=${PORTAGE_CONFIGROOT}etc/portage/bin/post_sync
5996 + [ -x "${post_sync}" ] && "${post_sync}"
5997 # --quiet suppresses output if there are no relevant news items
5998 - has news ${FEATURES} && emerge --check-news --quiet
5999 + has news ${FEATURES} && "${PORTAGE_BIN_PATH}/emerge" --check-news --quiet
6000 return 0
6001 }
6002
6003 @@ -251,14 +284,15 @@ do_snapshot() {
6004
6005 for mirror in ${GENTOO_MIRRORS} ; do
6006
6007 - vecho "Trying to retrieve ${date} snapshot from ${mirror} ..."
6008 + mirror=${mirror%/}
6009 + __vecho "Trying to retrieve ${date} snapshot from ${mirror} ..."
6010
6011 for compression in ${compressions} ; do
6012 local file="portage-${date}.tar.${compression}"
6013 local digest="${file}.md5sum"
6014 local signature="${file}.gpgsig"
6015
6016 - if [ -s "${file}" -a -s "${digest}" -a -s "${signature}" ] ; then
6017 + if [ -s "${DISTDIR}/${file}" -a -s "${DISTDIR}/${digest}" -a -s "${DISTDIR}/${signature}" ] ; then
6018 check_file_digest "${DISTDIR}/${digest}" "${DISTDIR}/${file}" && \
6019 check_file_signature "${DISTDIR}/${signature}" "${DISTDIR}/${file}" && \
6020 have_files=1
6021 @@ -280,8 +314,8 @@ do_snapshot() {
6022 #
6023 if [ ${have_files} -eq 1 ]; then
6024
6025 - vecho "Getting snapshot timestamp ..."
6026 - local snapshot_timestamp=$(get_snapshot_timestamp "${file}")
6027 + __vecho "Getting snapshot timestamp ..."
6028 + local snapshot_timestamp=$(get_snapshot_timestamp "${DISTDIR}/${file}")
6029
6030 if [ ${ignore_timestamp} == 0 ]; then
6031 if [ ${snapshot_timestamp} -lt $(get_portage_timestamp) ]; then
6032 @@ -310,7 +344,7 @@ do_snapshot() {
6033 #
6034 # Remove files and use a different mirror
6035 #
6036 - rm -f "${file}" "${digest}" "${signature}"
6037 + rm -f "${DISTDIR}/${file}" "${DISTDIR}/${digest}" "${DISTDIR}/${signature}"
6038 fi
6039 done
6040
6041 @@ -318,12 +352,12 @@ do_snapshot() {
6042 done
6043
6044 if [ ${have_files} -eq 1 ]; then
6045 - sync_local "${file}" && r=0
6046 + sync_local "${DISTDIR}/${file}" && r=0
6047 else
6048 - vecho "${date} snapshot was not found"
6049 + __vecho "${date} snapshot was not found"
6050 fi
6051 -
6052 - rm -f "${file}" "${digest}" "${signature}"
6053 +
6054 + ${keep} || rm -f "${DISTDIR}/${file}" "${DISTDIR}/${digest}" "${DISTDIR}/${signature}"
6055 return "${r}"
6056 }
6057
6058 @@ -331,9 +365,9 @@ do_latest_snapshot() {
6059 local attempts=0
6060 local r=1
6061
6062 - vecho "Fetching most recent snapshot ..."
6063 + __vecho "Fetching most recent snapshot ..."
6064
6065 - # The snapshot for a given day is generated at 01:45 UTC on the following
6066 + # The snapshot for a given day is generated at 00:45 UTC on the following
6067 # day, so the current day's snapshot (going by UTC time) hasn't been
6068 # generated yet. Therefore, always start by looking for the previous day's
6069 # snapshot (for attempts=1, subtract 1 day from the current UTC time).
6070 @@ -349,10 +383,10 @@ do_latest_snapshot() {
6071 local start_time=$(get_utc_date_in_seconds)
6072 local start_hour=$(get_date_part ${start_time} "%H")
6073
6074 - # Daily snapshots are created at 1:45 AM and are not
6075 - # available until after 2 AM. Don't waste time trying
6076 + # Daily snapshots are created at 00:45 and are not
6077 + # available until after 01:00. Don't waste time trying
6078 # to fetch a snapshot before it's been created.
6079 - if [ ${start_hour} -lt 2 ] ; then
6080 + if [ ${start_hour} -lt 1 ] ; then
6081 (( start_time -= 86400 ))
6082 fi
6083 local snapshot_date=$(get_date_part ${start_time} "%Y%m%d")
6084 @@ -361,8 +395,8 @@ do_latest_snapshot() {
6085 while (( ${attempts} < 40 )) ; do
6086 (( attempts++ ))
6087 (( snapshot_date_seconds -= 86400 ))
6088 - # snapshots are created at 1:45 AM
6089 - (( approx_snapshot_time = snapshot_date_seconds + 86400 + 6300 ))
6090 + # snapshots are created at 00:45
6091 + (( approx_snapshot_time = snapshot_date_seconds + 86400 + 2700 ))
6092 (( timestamp_difference = existing_timestamp - approx_snapshot_time ))
6093 [ ${timestamp_difference} -lt 0 ] && (( timestamp_difference = -1 * timestamp_difference ))
6094 snapshot_date=$(get_date_part ${snapshot_date_seconds} "%Y%m%d")
6095 @@ -388,7 +422,7 @@ do_latest_snapshot() {
6096 "snapshot. In order to force sync," \
6097 "use the --revert option or remove" \
6098 "the timestamp file located at" \
6099 - "'${PORTDIR}/metadata/timestamp.x'." | fmt -w 70 | \
6100 + "'${repo_location}/metadata/timestamp.x'." | fmt -w 70 | \
6101 while read -r line ; do
6102 ewarn "${line}"
6103 done
6104 @@ -408,9 +442,10 @@ do_latest_snapshot() {
6105 usage() {
6106 cat <<-EOF
6107 Usage: $0 [options]
6108 -
6109 +
6110 Options:
6111 --revert=yyyymmdd Revert to snapshot
6112 + -k, --keep Keep snapshots in DISTDIR (don't delete)
6113 -q, --quiet Only output errors
6114 -v, --verbose Enable verbose output
6115 -x, --debug Enable debug output
6116 @@ -427,14 +462,12 @@ usage() {
6117 main() {
6118 local arg
6119 local revert_date
6120 -
6121 - [ ! -d "${DISTDIR}" ] && mkdir -p "${DISTDIR}"
6122 - cd "${DISTDIR}"
6123
6124 for arg in "$@" ; do
6125 local v=${arg#*=}
6126 case ${arg} in
6127 -h|--help) usage ;;
6128 + -k|--keep) keep=true ;;
6129 -q|--quiet) PORTAGE_QUIET=1 ;;
6130 -v|--verbose) do_verbose=1 ;;
6131 -x|--debug) do_debug=1 ;;
6132 @@ -443,16 +476,39 @@ main() {
6133 esac
6134 done
6135
6136 + [[ -d ${repo_location} ]] || mkdir -p "${repo_location}"
6137 + if [[ ! -w ${repo_location} ]] ; then
6138 + eecho "Repository '${repo_name}' is not writable: ${repo_location}"
6139 + exit 1
6140 + fi
6141 +
6142 + [[ -d ${PORTAGE_TMPDIR}/portage ]] || mkdir -p "${PORTAGE_TMPDIR}/portage"
6143 + TMPDIR=$(mktemp -d "${PORTAGE_TMPDIR}/portage/webrsync-XXXXXX")
6144 + if [[ ! -w ${TMPDIR} ]] ; then
6145 + eecho "TMPDIR is not writable: ${TMPDIR}"
6146 + exit 1
6147 + fi
6148 + trap 'cd / ; rm -rf "${TMPDIR}"' EXIT
6149 + cd "${TMPDIR}" || exit 1
6150 +
6151 + ${keep} || DISTDIR=${TMPDIR}
6152 + [ ! -d "${DISTDIR}" ] && mkdir -p "${DISTDIR}"
6153 +
6154 + if ${keep} && [[ ! -w ${DISTDIR} ]] ; then
6155 + eecho "DISTDIR is not writable: ${DISTDIR}"
6156 + exit 1
6157 + fi
6158 +
6159 # This is a sanity check to help prevent people like funtoo users
6160 # from accidentally wiping out their git tree.
6161 - if [[ -n $SYNC && ${SYNC#rsync:} = $SYNC ]] ; then
6162 - echo "The current SYNC variable setting does not refer to an rsync URI:" >&2
6163 + if [[ -n ${repo_sync_type} && ${repo_sync_type} != rsync ]] ; then
6164 + echo "The current sync-type attribute of repository 'gentoo' is not set to 'rsync':" >&2
6165 echo >&2
6166 - echo " SYNC=$SYNC" >&2
6167 + echo " sync-type=${repo_sync_type}" >&2
6168 echo >&2
6169 echo "If you intend to use emerge-webrsync then please" >&2
6170 - echo "adjust SYNC to refer to an rsync URI." >&2
6171 - echo "emerge-webrsync exiting due to abnormal SYNC setting." >&2
6172 + echo "adjust sync-type and sync-uri attributes to refer to rsync." >&2
6173 + echo "emerge-webrsync exiting due to abnormal sync-type setting." >&2
6174 exit 1
6175 fi
6176
6177
6178 diff --git a/bin/emirrordist b/bin/emirrordist
6179 new file mode 100755
6180 index 0000000..0368eee
6181 --- /dev/null
6182 +++ b/bin/emirrordist
6183 @@ -0,0 +1,13 @@
6184 +#!/usr/bin/python -b
6185 +# Copyright 2013-2014 Gentoo Foundation
6186 +# Distributed under the terms of the GNU General Public License v2
6187 +
6188 +import sys
6189 +
6190 +import portage
6191 +portage._internal_caller = True
6192 +portage._disable_legacy_globals()
6193 +from portage._emirrordist.main import emirrordist_main
6194 +
6195 +if __name__ == "__main__":
6196 + sys.exit(emirrordist_main(sys.argv[1:]))
6197
6198 diff --git a/bin/env-update b/bin/env-update
6199 index 8a69f2b..7651ef9 100755
6200 --- a/bin/env-update
6201 +++ b/bin/env-update
6202 @@ -1,5 +1,5 @@
6203 -#!/usr/bin/python -O
6204 -# Copyright 1999-2006 Gentoo Foundation
6205 +#!/usr/bin/python -bO
6206 +# Copyright 1999-2014 Gentoo Foundation
6207 # Distributed under the terms of the GNU General Public License v2
6208
6209 from __future__ import print_function
6210 @@ -25,12 +25,12 @@ if len(sys.argv) > 1:
6211 print("!!! Invalid command line options!\n")
6212 usage(1)
6213
6214 -try:
6215 - import portage
6216 -except ImportError:
6217 - from os import path as osp
6218 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
6219 - import portage
6220 +from os import path as osp
6221 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
6222 +sys.path.insert(0, pym_path)
6223 +import portage
6224 +portage._internal_caller = True
6225 +
6226 try:
6227 portage.env_update(makelinks)
6228 except IOError as e:
6229
6230 diff --git a/bin/etc-update b/bin/etc-update
6231 index d763c1f..1a99231 100755
6232 --- a/bin/etc-update
6233 +++ b/bin/etc-update
6234 @@ -62,7 +62,7 @@ do_mv_ln() {
6235 }
6236
6237 scan() {
6238 - echo "Scanning Configuration files..."
6239 + ${QUIET} || echo "Scanning Configuration files..."
6240 rm -rf "${TMP}"/files > /dev/null 2>&1
6241 mkdir "${TMP}"/files || die "Failed mkdir command!"
6242 count=0
6243 @@ -107,13 +107,13 @@ scan() {
6244 for mpath in ${CONFIG_PROTECT_MASK}; do
6245 mpath="${EROOT%/}${mpath}"
6246 if [[ "${rpath}" == "${mpath}"* ]] ; then
6247 - echo "Updating masked file: ${live_file}"
6248 + ${QUIET} || echo "Updating masked file: ${live_file}"
6249 mv "${cfg_file}" "${live_file}"
6250 continue 2
6251 fi
6252 done
6253 if [[ ! -f ${file} ]] ; then
6254 - echo "Skipping non-file ${file} ..."
6255 + ${QUIET} || echo "Skipping non-file ${file} ..."
6256 continue
6257 fi
6258
6259 @@ -140,7 +140,7 @@ scan() {
6260 fi
6261
6262 if [[ ${MATCHES} == 1 ]] ; then
6263 - echo "Automerging trivial changes in: ${live_file}"
6264 + ${QUIET} || echo "Automerging trivial changes in: ${live_file}"
6265 do_mv_ln "${cfg_file}" "${live_file}"
6266 continue
6267 else
6268 @@ -548,9 +548,9 @@ die() {
6269 local msg=$1 exitcode=${2:-1}
6270
6271 if [ ${exitcode} -eq 0 ] ; then
6272 - printf 'Exiting: %b\n' "${msg}"
6273 + ${QUIET} || printf 'Exiting: %b\n' "${msg}"
6274 scan > /dev/null
6275 - [ ${count} -gt 0 ] && echo "NOTE: ${count} updates remaining"
6276 + ! ${QUIET} && [ ${count} -gt 0 ] && echo "NOTE: ${count} updates remaining"
6277 else
6278 error "${msg}"
6279 fi
6280 @@ -575,6 +575,7 @@ usage() {
6281 -d, --debug Enable shell debugging
6282 -h, --help Show help and run away
6283 -p, --preen Automerge trivial changes only and quit
6284 + -q, --quiet Show only essential output
6285 -v, --verbose Show settings and such along the way
6286 -V, --version Show version and trundle away
6287
6288 @@ -600,6 +601,7 @@ declare title="Gentoo's etc-update tool!"
6289
6290 PREEN=false
6291 SET_X=false
6292 +QUIET=false
6293 VERBOSE=false
6294 NONINTERACTIVE_MV=false
6295 while [[ -n $1 ]] ; do
6296 @@ -607,6 +609,7 @@ while [[ -n $1 ]] ; do
6297 -d|--debug) SET_X=true;;
6298 -h|--help) usage;;
6299 -p|--preen) PREEN=true;;
6300 + -q|--quiet) QUIET=true;;
6301 -v|--verbose) VERBOSE=true;;
6302 -V|--version) emerge --version; exit 0;;
6303 --automode) parse_automode_flag $2 && shift || usage 1 "Invalid mode '$2'";;
6304 @@ -617,7 +620,7 @@ while [[ -n $1 ]] ; do
6305 done
6306 ${SET_X} && set -x
6307
6308 -type portageq >/dev/null || die "missing portageq"
6309 +type -P portageq >/dev/null || die "missing portageq"
6310 portage_vars=(
6311 CONFIG_PROTECT{,_MASK}
6312 PORTAGE_CONFIGROOT
6313 @@ -627,7 +630,7 @@ portage_vars=(
6314 USERLAND
6315 NOCOLOR
6316 )
6317 -eval $(portageq envvar -v ${portage_vars[@]})
6318 +eval $(${PORTAGE_PYTHON:+"${PORTAGE_PYTHON}"} "$(type -P portageq)" envvar -v ${portage_vars[@]})
6319 export PORTAGE_TMPDIR
6320 SCAN_PATHS=${*:-${CONFIG_PROTECT}}
6321
6322
6323 diff --git a/bin/filter-bash-environment.py b/bin/filter-bash-environment.py
6324 index b9aec96..a4cdc54 100755
6325 --- a/bin/filter-bash-environment.py
6326 +++ b/bin/filter-bash-environment.py
6327 @@ -1,10 +1,9 @@
6328 -#!/usr/bin/python
6329 -# Copyright 1999-2011 Gentoo Foundation
6330 +#!/usr/bin/python -b
6331 +# Copyright 1999-2014 Gentoo Foundation
6332 # Distributed under the terms of the GNU General Public License v2
6333
6334 import codecs
6335 import io
6336 -import optparse
6337 import os
6338 import re
6339 import sys
6340 @@ -126,10 +125,19 @@ if __name__ == "__main__":
6341 "intact. The PATTERN is a space separated list of variable names" + \
6342 " and it supports python regular expression syntax."
6343 usage = "usage: %s PATTERN" % os.path.basename(sys.argv[0])
6344 - parser = optparse.OptionParser(description=description, usage=usage)
6345 - options, args = parser.parse_args(sys.argv[1:])
6346 + args = sys.argv[1:]
6347 +
6348 + if '-h' in args or '--help' in args:
6349 + sys.stdout.write(usage + "\n")
6350 + sys.stdout.flush()
6351 + sys.exit(os.EX_OK)
6352 +
6353 if len(args) != 1:
6354 - parser.error("Missing required PATTERN argument.")
6355 + sys.stderr.write(usage + "\n")
6356 + sys.stderr.write("Exactly one PATTERN argument required.\n")
6357 + sys.stderr.flush()
6358 + sys.exit(2)
6359 +
6360 file_in = sys.stdin
6361 file_out = sys.stdout
6362 if sys.hexversion >= 0x3000000:
6363
6364 diff --git a/bin/fixpackages b/bin/fixpackages
6365 index dc43ed2..cec0030 100755
6366 --- a/bin/fixpackages
6367 +++ b/bin/fixpackages
6368 @@ -1,5 +1,5 @@
6369 -#!/usr/bin/python
6370 -# Copyright 1999-2011 Gentoo Foundation
6371 +#!/usr/bin/python -b
6372 +# Copyright 1999-2014 Gentoo Foundation
6373 # Distributed under the terms of the GNU General Public License v2
6374
6375 from __future__ import print_function
6376 @@ -7,21 +7,27 @@ from __future__ import print_function
6377 import os
6378 import sys
6379
6380 -try:
6381 - import portage
6382 -except ImportError:
6383 - from os import path as osp
6384 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
6385 - import portage
6386 -
6387 +from os import path as osp
6388 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
6389 +sys.path.insert(0, pym_path)
6390 +import portage
6391 +portage._internal_caller = True
6392 from portage import os
6393 from portage.output import EOutput
6394 +from portage.util._argparse import ArgumentParser
6395 from textwrap import wrap
6396 from portage._global_updates import _global_updates
6397 mysettings = portage.settings
6398 mytrees = portage.db
6399 mtimedb = portage.mtimedb
6400
6401 +description = """The fixpackages program performs package move updates on
6402 + configuration files, installed packages, and binary packages."""
6403 +description = " ".join(description.split())
6404 +
6405 +parser = ArgumentParser(description=description)
6406 +parser.parse_args()
6407 +
6408 if mysettings['ROOT'] != "/":
6409 out = EOutput()
6410 msg = "The fixpackages program is not intended for use with " + \
6411
6412 diff --git a/bin/glsa-check b/bin/glsa-check
6413 index a840c32..972679a 100755
6414 --- a/bin/glsa-check
6415 +++ b/bin/glsa-check
6416 @@ -1,81 +1,79 @@
6417 -#!/usr/bin/python
6418 -# Copyright 2008-2011 Gentoo Foundation
6419 +#!/usr/bin/python -b
6420 +# Copyright 2008-2014 Gentoo Foundation
6421 # Distributed under the terms of the GNU General Public License v2
6422
6423 from __future__ import print_function
6424
6425 import sys
6426 +import codecs
6427
6428 -try:
6429 - import portage
6430 -except ImportError:
6431 - from os import path as osp
6432 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
6433 - import portage
6434 -
6435 +from os import path as osp
6436 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
6437 +sys.path.insert(0, pym_path)
6438 +import portage
6439 +portage._internal_caller = True
6440 from portage import os
6441 -from portage.output import *
6442 -
6443 -from optparse import OptionGroup, OptionParser
6444 +from portage.output import green, red, nocolor, white
6445 +from portage.util._argparse import ArgumentParser
6446
6447 __program__ = "glsa-check"
6448 __author__ = "Marius Mauch <genone@g.o>"
6449 __version__ = "1.0"
6450
6451 -def cb_version(*args, **kwargs):
6452 - """Callback for --version"""
6453 - sys.stderr.write("\n"+ __program__ + ", version " + __version__ + "\n")
6454 - sys.stderr.write("Author: " + __author__ + "\n")
6455 - sys.stderr.write("This program is licensed under the GPL, version 2\n\n")
6456 - sys.exit(0)
6457 -
6458 # option parsing
6459 -parser = OptionParser(usage="%prog <option> [glsa-list]",
6460 - version="%prog "+ __version__)
6461 -parser.epilog = "glsa-list can contain an arbitrary number of GLSA ids," \
6462 +epilog = "glsa-list can contain an arbitrary number of GLSA ids," \
6463 " filenames containing GLSAs or the special identifiers" \
6464 " 'all', 'new' and 'affected'"
6465 +parser = ArgumentParser(usage=__program__ + " <option> [glsa-list]",
6466 + epilog=epilog)
6467
6468 -modes = OptionGroup(parser, "Modes")
6469 -modes.add_option("-l", "--list", action="store_const",
6470 +modes = parser.add_argument_group("Modes")
6471 +modes.add_argument("-l", "--list", action="store_const",
6472 const="list", dest="mode",
6473 help="List all unapplied GLSA")
6474 -modes.add_option("-d", "--dump", action="store_const",
6475 +modes.add_argument("-d", "--dump", action="store_const",
6476 const="dump", dest="mode",
6477 help="Show all information about the given GLSA")
6478 -modes.add_option("", "--print", action="store_const",
6479 +modes.add_argument("--print", action="store_const",
6480 const="dump", dest="mode",
6481 help="Alias for --dump")
6482 -modes.add_option("-t", "--test", action="store_const",
6483 +modes.add_argument("-t", "--test", action="store_const",
6484 const="test", dest="mode",
6485 help="Test if this system is affected by the given GLSA")
6486 -modes.add_option("-p", "--pretend", action="store_const",
6487 +modes.add_argument("-p", "--pretend", action="store_const",
6488 const="pretend", dest="mode",
6489 help="Show the necessary commands to apply this GLSA")
6490 -modes.add_option("-f", "--fix", action="store_const",
6491 +modes.add_argument("-f", "--fix", action="store_const",
6492 const="fix", dest="mode",
6493 help="Try to auto-apply this GLSA (experimental)")
6494 -modes.add_option("-i", "--inject", action="store_const", dest="mode",
6495 - help="Inject the given GLSA into the checkfile")
6496 -modes.add_option("-m", "--mail", action="store_const",
6497 +modes.add_argument("-i", "--inject", action="store_const",
6498 + const="inject", dest="mode",
6499 + help="inject the given GLSA into the glsa_injected file")
6500 +modes.add_argument("-m", "--mail", action="store_const",
6501 const="mail", dest="mode",
6502 help="Send a mail with the given GLSAs to the administrator")
6503 -parser.add_option_group(modes)
6504
6505 -parser.remove_option("--version")
6506 -parser.add_option("-V", "--version", action="callback",
6507 - callback=cb_version, help="Some information about this tool")
6508 -parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
6509 +parser.add_argument("-V", "--version", action="store_true",
6510 + help="Some information about this tool")
6511 +parser.add_argument("-v", "--verbose", action="store_true", dest="verbose",
6512 help="Print more information")
6513 -parser.add_option("-n", "--nocolor", action="callback",
6514 - callback=lambda *args, **kwargs: nocolor(),
6515 +parser.add_argument("-n", "--nocolor", action="store_true",
6516 help="Disable colors")
6517 -parser.add_option("-e", "--emergelike", action="store_false", dest="least_change",
6518 +parser.add_argument("-e", "--emergelike", action="store_false", dest="least_change",
6519 help="Do not use a least-change algorithm")
6520 -parser.add_option("-c", "--cve", action="store_true", dest="list_cve",
6521 +parser.add_argument("-c", "--cve", action="store_true", dest="list_cve",
6522 help="Show CAN ids in listing mode")
6523
6524 -options, params = parser.parse_args()
6525 +options, params = parser.parse_known_args()
6526 +
6527 +if options.nocolor:
6528 + nocolor()
6529 +
6530 +if options.version:
6531 + sys.stderr.write("\n"+ __program__ + ", version " + __version__ + "\n")
6532 + sys.stderr.write("Author: " + __author__ + "\n")
6533 + sys.stderr.write("This program is licensed under the GPL, version 2\n\n")
6534 + sys.exit(0)
6535
6536 mode = options.mode
6537 least_change = options.least_change
6538 @@ -101,7 +99,8 @@ elif mode == "list" and not params:
6539 params.append("new")
6540
6541 # delay this for speed increase
6542 -from portage.glsa import *
6543 +from portage.glsa import (Glsa, GlsaTypeException, GlsaFormatException,
6544 + get_applied_glsas, get_glsa_list)
6545
6546 eroot = portage.settings['EROOT']
6547 vardb = portage.db[eroot]["vartree"].dbapi
6548 @@ -117,7 +116,7 @@ glsalist = []
6549 if "new" in params:
6550 glsalist = todolist
6551 params.remove("new")
6552 -
6553 +
6554 if "all" in params:
6555 glsalist = completelist
6556 params.remove("all")
6557 @@ -142,8 +141,17 @@ for p in params[:]:
6558
6559 glsalist.extend([g for g in params if g not in glsalist])
6560
6561 -def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr):
6562 - fd2.write(white("[A]")+" means this GLSA was already applied,\n")
6563 +def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr, encoding="utf-8"):
6564 + # Get to the raw streams in py3k before wrapping them with an encoded writer
6565 + # to avoid writing bytes to a text stream (stdout/stderr are text streams
6566 + # by default in py3k)
6567 + if hasattr(fd1, "buffer"):
6568 + fd1 = fd1.buffer
6569 + if hasattr(fd2, "buffer"):
6570 + fd2 = fd2.buffer
6571 + fd1 = codecs.getwriter(encoding)(fd1)
6572 + fd2 = codecs.getwriter(encoding)(fd2)
6573 + fd2.write(white("[A]")+" means this GLSA was marked as applied (injected),\n")
6574 fd2.write(green("[U]")+" means the system is not affected and\n")
6575 fd2.write(red("[N]")+" indicates that the system might be affected.\n\n")
6576
6577 @@ -155,7 +163,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr):
6578 if verbose:
6579 fd2.write(("invalid GLSA: %s (error message was: %s)\n" % (myid, e)))
6580 continue
6581 - if myglsa.isApplied():
6582 + if myglsa.isInjected():
6583 status = "[A]"
6584 color = white
6585 elif myglsa.isVulnerable():
6586 @@ -186,7 +194,7 @@ def summarylist(myglsalist, fd1=sys.stdout, fd2=sys.stderr):
6587 fd1.write(")")
6588 if list_cve:
6589 fd1.write(" "+(",".join([r[:13] for r in myglsa.references if r[:4] in ["CAN-", "CVE-"]])))
6590 - fd1.write("\n")
6591 + fd1.write("\n")
6592 return 0
6593
6594 if mode == "list":
6595 @@ -204,39 +212,46 @@ if mode in ["dump", "fix", "inject", "pretend"]:
6596 if mode == "dump":
6597 myglsa.dump()
6598 elif mode == "fix":
6599 - sys.stdout.write("fixing "+myid+"\n")
6600 - mergelist = myglsa.getMergeList(least_change=least_change)
6601 - for pkg in mergelist:
6602 - sys.stdout.write(">>> merging "+pkg+"\n")
6603 - # using emerge for the actual merging as it contains the dependency
6604 - # code and we want to be consistent in behaviour. Also this functionality
6605 - # will be integrated in emerge later, so it shouldn't hurt much.
6606 - emergecmd = "emerge --oneshot " + portage.settings["EMERGE_OPTS"] + " =" + pkg
6607 - if verbose:
6608 - sys.stderr.write(emergecmd+"\n")
6609 - exitcode = os.system(emergecmd)
6610 - # system() returns the exitcode in the high byte of a 16bit integer
6611 - if exitcode >= 1<<8:
6612 - exitcode >>= 8
6613 - if exitcode:
6614 - sys.exit(exitcode)
6615 - myglsa.inject()
6616 + sys.stdout.write("Fixing GLSA "+myid+"\n")
6617 + if not myglsa.isVulnerable():
6618 + sys.stdout.write(">>> no vulnerable packages installed\n")
6619 + else:
6620 + mergelist = myglsa.getMergeList(least_change=least_change)
6621 + if mergelist == []:
6622 + sys.stdout.write(">>> cannot fix GLSA, no unaffected packages available\n")
6623 + sys.exit(2)
6624 + for pkg in mergelist:
6625 + sys.stdout.write(">>> merging "+pkg+"\n")
6626 + # using emerge for the actual merging as it contains the dependency
6627 + # code and we want to be consistent in behaviour. Also this functionality
6628 + # will be integrated in emerge later, so it shouldn't hurt much.
6629 + emergecmd = "emerge --oneshot " + " =" + pkg
6630 + if verbose:
6631 + sys.stderr.write(emergecmd+"\n")
6632 + exitcode = os.system(emergecmd)
6633 + # system() returns the exitcode in the high byte of a 16bit integer
6634 + if exitcode >= 1<<8:
6635 + exitcode >>= 8
6636 + if exitcode:
6637 + sys.exit(exitcode)
6638 + if len(mergelist):
6639 + sys.stdout.write("\n")
6640 elif mode == "pretend":
6641 sys.stdout.write("Checking GLSA "+myid+"\n")
6642 - mergelist = myglsa.getMergeList(least_change=least_change)
6643 - if mergelist:
6644 - sys.stdout.write("The following updates will be performed for this GLSA:\n")
6645 - for pkg in mergelist:
6646 - oldver = None
6647 - for x in vardb.match(portage.cpv_getkey(pkg)):
6648 - if vardb.aux_get(x, ["SLOT"]) == portdb.aux_get(pkg, ["SLOT"]):
6649 - oldver = x
6650 - if oldver == None:
6651 - raise ValueError("could not find old version for package %s" % pkg)
6652 - oldver = oldver[len(portage.cpv_getkey(oldver))+1:]
6653 - sys.stdout.write(" " + pkg + " (" + oldver + ")\n")
6654 + if not myglsa.isVulnerable():
6655 + sys.stdout.write(">>> no vulnerable packages installed\n")
6656 else:
6657 - sys.stdout.write("Nothing to do for this GLSA\n")
6658 + mergedict = {}
6659 + for (vuln, update) in myglsa.getAffectionTable(least_change=least_change):
6660 + mergedict.setdefault(update, []).append(vuln)
6661 +
6662 + sys.stdout.write(">>> The following updates will be performed for this GLSA:\n")
6663 + for pkg in mergedict:
6664 + if pkg != "":
6665 + sys.stdout.write(" " + pkg + " (vulnerable: " + ", ".join(mergedict[pkg]) + ")\n")
6666 + if "" in mergedict:
6667 + sys.stdout.write("\n>>> For the following packages, no upgrade path exists:\n")
6668 + sys.stdout.write(" " + ", ".join(mergedict[""]))
6669 elif mode == "inject":
6670 sys.stdout.write("injecting " + myid + "\n")
6671 myglsa.inject()
6672 @@ -268,9 +283,9 @@ if mode == "test":
6673 # mail mode as requested by solar
6674 if mode == "mail":
6675 import portage.mail, socket
6676 - from io import StringIO
6677 + from io import BytesIO
6678 from email.mime.text import MIMEText
6679 -
6680 +
6681 # color doesn't make any sense for mail
6682 nocolor()
6683
6684 @@ -278,7 +293,7 @@ if mode == "mail":
6685 myrecipient = portage.settings["PORTAGE_ELOG_MAILURI"].split()[0]
6686 else:
6687 myrecipient = "root@localhost"
6688 -
6689 +
6690 if "PORTAGE_ELOG_MAILFROM" in portage.settings:
6691 myfrom = portage.settings["PORTAGE_ELOG_MAILFROM"]
6692 else:
6693 @@ -287,11 +302,13 @@ if mode == "mail":
6694 mysubject = "[glsa-check] Summary for %s" % socket.getfqdn()
6695
6696 # need a file object for summarylist()
6697 - myfd = StringIO()
6698 - myfd.write("GLSA Summary report for host %s\n" % socket.getfqdn())
6699 - myfd.write("(Command was: %s)\n\n" % " ".join(sys.argv))
6700 + myfd = BytesIO()
6701 + line = "GLSA Summary report for host %s\n" % socket.getfqdn()
6702 + myfd.write(line.encode("utf-8"))
6703 + line = "(Command was: %s)\n\n" % " ".join(sys.argv)
6704 + myfd.write(line.encode("utf-8"))
6705 summarylist(glsalist, fd1=myfd, fd2=myfd)
6706 - summary = str(myfd.getvalue())
6707 + summary = myfd.getvalue().decode("utf-8")
6708 myfd.close()
6709
6710 myattachments = []
6711 @@ -302,16 +319,17 @@ if mode == "mail":
6712 if verbose:
6713 sys.stderr.write(("invalid GLSA: %s (error message was: %s)\n" % (myid, e)))
6714 continue
6715 - myfd = StringIO()
6716 + myfd = BytesIO()
6717 myglsa.dump(outstream=myfd)
6718 - myattachments.append(MIMEText(str(myfd.getvalue()), _charset="utf8"))
6719 + attachment = myfd.getvalue().decode("utf-8")
6720 + myattachments.append(MIMEText(attachment, _charset="utf8"))
6721 myfd.close()
6722 -
6723 +
6724 mymessage = portage.mail.create_message(myfrom, myrecipient, mysubject, summary, myattachments)
6725 portage.mail.send_mail(portage.settings, mymessage)
6726 -
6727 +
6728 sys.exit(0)
6729 -
6730 +
6731 # something wrong here, all valid paths are covered with sys.exit()
6732 sys.stderr.write("nothing more to do\n")
6733 sys.exit(2)
6734
6735 diff --git a/bin/helper-functions.sh b/bin/helper-functions.sh
6736 index c7400fa..b9bc74a 100644
6737 --- a/bin/helper-functions.sh
6738 +++ b/bin/helper-functions.sh
6739 @@ -10,42 +10,45 @@ source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/isolated-functions.sh
6740 #
6741 # API functions for doing parallel processing
6742 #
6743 -numjobs() {
6744 +makeopts_jobs() {
6745 # Copied from eutils.eclass:makeopts_jobs()
6746 local jobs=$(echo " ${MAKEOPTS} " | \
6747 sed -r -n 's:.*[[:space:]](-j|--jobs[=[:space:]])[[:space:]]*([0-9]+).*:\2:p')
6748 echo ${jobs:-1}
6749 }
6750
6751 -multijob_init() {
6752 +__multijob_init() {
6753 # Setup a pipe for children to write their pids to when they finish.
6754 - mj_control_pipe=$(mktemp -t multijob.XXXXXX)
6755 - rm "${mj_control_pipe}"
6756 - mkfifo "${mj_control_pipe}"
6757 - redirect_alloc_fd mj_control_fd "${mj_control_pipe}"
6758 - rm -f "${mj_control_pipe}"
6759 + # We have to allocate two fd's because POSIX has undefined behavior
6760 + # when you open a FIFO for simultaneous read/write. #487056
6761 + local pipe=$(mktemp -t multijob.XXXXXX)
6762 + rm -f "${pipe}"
6763 + mkfifo -m 600 "${pipe}"
6764 + __redirect_alloc_fd mj_write_fd "${pipe}"
6765 + __redirect_alloc_fd mj_read_fd "${pipe}"
6766 + rm -f "${pipe}"
6767
6768 # See how many children we can fork based on the user's settings.
6769 - mj_max_jobs=$(numjobs)
6770 + mj_max_jobs=$(makeopts_jobs "$@")
6771 mj_num_jobs=0
6772 }
6773
6774 -multijob_child_init() {
6775 - trap 'echo ${BASHPID} $? >&'${mj_control_fd} EXIT
6776 +__multijob_child_init() {
6777 + trap 'echo ${BASHPID:-$(__bashpid)} $? >&'${mj_write_fd} EXIT
6778 trap 'exit 1' INT TERM
6779 }
6780
6781 -multijob_finish_one() {
6782 +__multijob_finish_one() {
6783 local pid ret
6784 - read -r -u ${mj_control_fd} pid ret
6785 + read -r -u ${mj_read_fd} pid ret
6786 : $(( --mj_num_jobs ))
6787 return ${ret}
6788 }
6789
6790 -multijob_finish() {
6791 +__multijob_finish() {
6792 local ret=0
6793 while [[ ${mj_num_jobs} -gt 0 ]] ; do
6794 - multijob_finish_one
6795 + __multijob_finish_one
6796 : $(( ret |= $? ))
6797 done
6798 # Let bash clean up its internal child tracking state.
6799 @@ -53,38 +56,42 @@ multijob_finish() {
6800 return ${ret}
6801 }
6802
6803 -multijob_post_fork() {
6804 +__multijob_post_fork() {
6805 : $(( ++mj_num_jobs ))
6806 if [[ ${mj_num_jobs} -ge ${mj_max_jobs} ]] ; then
6807 - multijob_finish_one
6808 + __multijob_finish_one
6809 fi
6810 return $?
6811 }
6812
6813 -# @FUNCTION: redirect_alloc_fd
6814 +# @FUNCTION: __redirect_alloc_fd
6815 # @USAGE: <var> <file> [redirection]
6816 # @DESCRIPTION:
6817 # Find a free fd and redirect the specified file via it. Store the new
6818 # fd in the specified variable. Useful for the cases where we don't care
6819 # about the exact fd #.
6820 -redirect_alloc_fd() {
6821 +__redirect_alloc_fd() {
6822 local var=$1 file=$2 redir=${3:-"<>"}
6823
6824 if [[ $(( (BASH_VERSINFO[0] << 8) + BASH_VERSINFO[1] )) -ge $(( (4 << 8) + 1 )) ]] ; then
6825 - # Newer bash provides this functionality.
6826 - eval "exec {${var}}${redir}'${file}'"
6827 + # Newer bash provides this functionality.
6828 + eval "exec {${var}}${redir}'${file}'"
6829 else
6830 - # Need to provide the functionality ourselves.
6831 - local fd=10
6832 - while :; do
6833 - # Make sure the fd isn't open. It could be a char device,
6834 - # or a symlink (possibly broken) to something else.
6835 - if [[ ! -e /dev/fd/${fd} ]] && [[ ! -L /dev/fd/${fd} ]] ; then
6836 - eval "exec ${fd}${redir}'${file}'" && break
6837 - fi
6838 - [[ ${fd} -gt 1024 ]] && die "redirect_alloc_fd failed"
6839 - : $(( ++fd ))
6840 - done
6841 - : $(( ${var} = fd ))
6842 + # Need to provide the functionality ourselves.
6843 + local fd=10
6844 + local fddir=/dev/fd
6845 + # Prefer /proc/self/fd if available (/dev/fd
6846 + # doesn't work on solaris, see bug #474536).
6847 + [[ -d /proc/self/fd ]] && fddir=/proc/self/fd
6848 + while :; do
6849 + # Make sure the fd isn't open. It could be a char device,
6850 + # or a symlink (possibly broken) to something else.
6851 + if [[ ! -e ${fddir}/${fd} ]] && [[ ! -L ${fddir}/${fd} ]] ; then
6852 + eval "exec ${fd}${redir}'${file}'" && break
6853 + fi
6854 + [[ ${fd} -gt 1024 ]] && die 'could not locate a free temp fd !?'
6855 + : $(( ++fd ))
6856 + done
6857 + : $(( ${var} = fd ))
6858 fi
6859 }
6860
6861 diff --git a/bin/install.py b/bin/install.py
6862 new file mode 100755
6863 index 0000000..3c5e0de
6864 --- /dev/null
6865 +++ b/bin/install.py
6866 @@ -0,0 +1,253 @@
6867 +#!/usr/bin/python -b
6868 +# Copyright 2013-2014 Gentoo Foundation
6869 +# Distributed under the terms of the GNU General Public License v2
6870 +
6871 +import os
6872 +import stat
6873 +import sys
6874 +import subprocess
6875 +import traceback
6876 +
6877 +import portage
6878 +from portage.util._argparse import ArgumentParser
6879 +from portage.util.movefile import _copyxattr
6880 +from portage.exception import OperationNotSupported
6881 +
6882 +# Change back to original cwd _after_ all imports (bug #469338).
6883 +os.chdir(os.environ["__PORTAGE_HELPER_CWD"])
6884 +
6885 +def parse_args(args):
6886 + """
6887 + Parse the command line arguments using optparse for python 2.6 compatibility
6888 + Args:
6889 + args: a list of the white space delimited command line
6890 + Returns:
6891 + tuple of the Namespace of parsed options, and a list of order parameters
6892 + """
6893 + parser = ArgumentParser(add_help=False)
6894 +
6895 + parser.add_argument(
6896 + "-b",
6897 + action="store_true",
6898 + dest="shortopt_b"
6899 + )
6900 + parser.add_argument(
6901 + "--backup",
6902 + action="store",
6903 + dest="backup"
6904 + )
6905 + parser.add_argument(
6906 + "-c",
6907 + action="store_true",
6908 + dest="shortopt_c"
6909 + )
6910 + parser.add_argument(
6911 + "--compare",
6912 + "-C",
6913 + action="store_true",
6914 + dest="compare"
6915 + )
6916 + parser.add_argument(
6917 + "--directory",
6918 + "-d",
6919 + action="store_true",
6920 + dest="directory"
6921 + )
6922 + parser.add_argument(
6923 + "-D",
6924 + action="store_true",
6925 + dest="shortopt_D"
6926 + )
6927 + parser.add_argument(
6928 + "--owner",
6929 + "-o",
6930 + action="store",
6931 + dest="owner"
6932 + )
6933 + parser.add_argument(
6934 + "--group",
6935 + "-g",
6936 + action="store",
6937 + dest="group"
6938 + )
6939 + parser.add_argument(
6940 + "--mode",
6941 + "-m",
6942 + action="store",
6943 + dest="mode"
6944 + )
6945 + parser.add_argument(
6946 + "--preserve-timestamps",
6947 + "-p",
6948 + action="store_true",
6949 + dest="preserve_timestamps"
6950 + )
6951 + parser.add_argument(
6952 + "--strip",
6953 + "-s",
6954 + action="store_true",
6955 + dest="strip"
6956 + )
6957 + parser.add_argument(
6958 + "--strip-program",
6959 + action="store",
6960 + dest="strip_program"
6961 + )
6962 + parser.add_argument(
6963 + "--suffix",
6964 + "-S",
6965 + action="store",
6966 + dest="suffix"
6967 + )
6968 + parser.add_argument(
6969 + "--target-directory",
6970 + "-t",
6971 + action="store",
6972 + dest="target_directory"
6973 + )
6974 + parser.add_argument(
6975 + "--no-target-directory",
6976 + "-T",
6977 + action="store_true",
6978 + dest="no_target_directory"
6979 + )
6980 + parser.add_argument(
6981 + "--context",
6982 + "-Z",
6983 + action="store",
6984 + dest="context"
6985 + )
6986 + parser.add_argument(
6987 + "--verbose",
6988 + "-v",
6989 + action="store_true",
6990 + dest="verbose"
6991 + )
6992 + parser.add_argument(
6993 + "--help",
6994 + action="store_true",
6995 + dest="help"
6996 + )
6997 + parser.add_argument(
6998 + "--version",
6999 + action="store_true",
7000 + dest="version"
7001 + )
7002 +
7003 + # Use parse_known_args for maximum compatibility with
7004 + # getopt handling of non-option file arguments. Note
7005 + # that parser.add_argument("files", nargs='+') would
7006 + # be subtly incompatible because it requires that all
7007 + # of the file arguments be grouped sequentially. Also
7008 + # note that we have to explicitly call add_argument
7009 + # for known options in order for argparse to correctly
7010 + # separate option arguments from file arguments in all
7011 + # cases (it also allows for optparse compatibility).
7012 + parsed_args = parser.parse_known_args()
7013 +
7014 + opts = parsed_args[0]
7015 + files = parsed_args[1]
7016 + files = [f for f in files if f != "--"] # filter out "--"
7017 +
7018 + return (opts, files)
7019 +
7020 +
7021 +def copy_xattrs(opts, files):
7022 + """
7023 + Copy the extended attributes using portage.util.movefile._copyxattr
7024 + Args:
7025 + opts: Namespace of the parsed command line otions
7026 + files: list of ordered command line parameters which should be files/directories
7027 + Returns:
7028 + system exit code
7029 + """
7030 + if opts.directory or not files:
7031 + return os.EX_OK
7032 +
7033 + if opts.target_directory is None:
7034 + source, target = files[:-1], files[-1]
7035 + target_is_directory = os.path.isdir(target)
7036 + else:
7037 + source, target = files, opts.target_directory
7038 + target_is_directory = True
7039 +
7040 + exclude = os.environ.get("PORTAGE_XATTR_EXCLUDE", "security.* system.nfs4_acl")
7041 +
7042 + try:
7043 + if target_is_directory:
7044 + for s in source:
7045 + abs_path = os.path.join(target, os.path.basename(s))
7046 + _copyxattr(s, abs_path, exclude=exclude)
7047 + else:
7048 + _copyxattr(source[0], target, exclude=exclude)
7049 + return os.EX_OK
7050 +
7051 + except OperationNotSupported:
7052 + traceback.print_exc()
7053 + return os.EX_OSERR
7054 +
7055 +
7056 +def Which(filename, path=None, exclude=None):
7057 + """
7058 + Find the absolute path of 'filename' in a given search 'path'
7059 + Args:
7060 + filename: basename of the file
7061 + path: colon delimited search path
7062 + exclude: path of file to exclude
7063 + """
7064 + if path is None:
7065 + path = os.environ.get('PATH', '')
7066 +
7067 + if exclude is not None:
7068 + st = os.stat(exclude)
7069 + exclude = (st.st_ino, st.st_dev)
7070 +
7071 + for p in path.split(':'):
7072 + p = os.path.join(p, filename)
7073 + if os.access(p, os.X_OK):
7074 + try:
7075 + st = os.stat(p)
7076 + except OSError:
7077 + # file disappeared?
7078 + pass
7079 + else:
7080 + if stat.S_ISREG(st.st_mode) and \
7081 + (exclude is None or exclude != (st.st_ino, st.st_dev)):
7082 + return p
7083 +
7084 + return None
7085 +
7086 +
7087 +def main(args):
7088 + opts, files = parse_args(args)
7089 + install_binary = Which('install', exclude=os.environ["__PORTAGE_HELPER_PATH"])
7090 + if install_binary is None:
7091 + sys.stderr.write("install: command not found\n")
7092 + return 127
7093 +
7094 + cmdline = [install_binary]
7095 + cmdline += args
7096 +
7097 + if sys.hexversion >= 0x3000000:
7098 + # We can't trust that the filesystem encoding (locale dependent)
7099 + # correctly matches the arguments, so use surrogateescape to
7100 + # pass through the original argv bytes for Python 3.
7101 + fs_encoding = sys.getfilesystemencoding()
7102 + cmdline = [x.encode(fs_encoding, 'surrogateescape') for x in cmdline]
7103 + files = [x.encode(fs_encoding, 'surrogateescape') for x in files]
7104 + if opts.target_directory is not None:
7105 + opts.target_directory = \
7106 + opts.target_directory.encode(fs_encoding, 'surrogateescape')
7107 +
7108 + returncode = subprocess.call(cmdline)
7109 + if returncode == os.EX_OK:
7110 + returncode = copy_xattrs(opts, files)
7111 + if returncode != os.EX_OK:
7112 + portage.util.writemsg("!!! install: copy_xattrs failed with the "
7113 + "following arguments: %s\n" %
7114 + " ".join(portage._shell_quote(x) for x in args), noiselevel=-1)
7115 + return returncode
7116 +
7117 +
7118 +if __name__ == "__main__":
7119 + sys.exit(main(sys.argv[1:]))
7120
7121 diff --git a/bin/isolated-functions.sh b/bin/isolated-functions.sh
7122 index dbf988b..a22af57 100644
7123 --- a/bin/isolated-functions.sh
7124 +++ b/bin/isolated-functions.sh
7125 @@ -1,7 +1,9 @@
7126 #!/bin/bash
7127 -# Copyright 1999-2011 Gentoo Foundation
7128 +# Copyright 1999-2014 Gentoo Foundation
7129 # Distributed under the terms of the GNU General Public License v2
7130
7131 +source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}/eapi.sh"
7132 +
7133 # We need this next line for "die" and "assert". It expands
7134 # It _must_ preceed all the calls to die and assert.
7135 shopt -s expand_aliases
7136 @@ -15,7 +17,7 @@ assert() {
7137 done
7138 }
7139
7140 -assert_sigpipe_ok() {
7141 +__assert_sigpipe_ok() {
7142 # When extracting a tar file like this:
7143 #
7144 # bzip2 -dc foo.tar.bz2 | tar xof -
7145 @@ -43,21 +45,21 @@ assert_sigpipe_ok() {
7146
7147 shopt -s extdebug
7148
7149 -# dump_trace([number of funcs on stack to skip],
7150 +# __dump_trace([number of funcs on stack to skip],
7151 # [whitespacing for filenames],
7152 # [whitespacing for line numbers])
7153 -dump_trace() {
7154 +__dump_trace() {
7155 local funcname="" sourcefile="" lineno="" s="yes" n p
7156 declare -i strip=${1:-1}
7157 local filespacing=$2 linespacing=$3
7158
7159 - # The qa_call() function and anything before it are portage internals
7160 + # The __qa_call() function and anything before it are portage internals
7161 # that the user will not be interested in. Therefore, the stack trace
7162 - # should only show calls that come after qa_call().
7163 + # should only show calls that come after __qa_call().
7164 (( n = ${#FUNCNAME[@]} - 1 ))
7165 (( p = ${#BASH_ARGV[@]} ))
7166 while (( n > 0 )) ; do
7167 - [ "${FUNCNAME[${n}]}" == "qa_call" ] && break
7168 + [ "${FUNCNAME[${n}]}" == "__qa_call" ] && break
7169 (( p -= ${BASH_ARGC[${n}]} ))
7170 (( n-- ))
7171 done
7172 @@ -86,7 +88,7 @@ dump_trace() {
7173 }
7174
7175 nonfatal() {
7176 - if has "${EAPI:-0}" 0 1 2 3 3_pre2 ; then
7177 + if ! ___eapi_has_nonfatal; then
7178 die "$FUNCNAME() not supported in this EAPI"
7179 fi
7180 if [[ $# -lt 1 ]]; then
7181 @@ -96,18 +98,24 @@ nonfatal() {
7182 PORTAGE_NONFATAL=1 "$@"
7183 }
7184
7185 -helpers_die() {
7186 - case "${EAPI:-0}" in
7187 - 0|1|2|3)
7188 - echo -e "$@" >&2
7189 - ;;
7190 - *)
7191 - die "$@"
7192 - ;;
7193 - esac
7194 +__bashpid() {
7195 + # The BASHPID variable is new to bash-4.0, so add a hack for older
7196 + # versions. This must be used like so:
7197 + # ${BASHPID:-$(__bashpid)}
7198 + sh -c 'echo ${PPID}'
7199 +}
7200 +
7201 +__helpers_die() {
7202 + if ___eapi_helpers_can_die; then
7203 + die "$@"
7204 + else
7205 + echo -e "$@" >&2
7206 + fi
7207 }
7208
7209 die() {
7210 + local IFS=$' \t\n'
7211 +
7212 if [[ $PORTAGE_NONFATAL -eq 1 ]]; then
7213 echo -e " $WARN*$NORMAL ${FUNCNAME[1]}: WARNING: $@" >&2
7214 return 1
7215 @@ -124,7 +132,7 @@ die() {
7216 # setup spacing to make output easier to read
7217 (( n = ${#FUNCNAME[@]} - 1 ))
7218 while (( n > 0 )) ; do
7219 - [ "${FUNCNAME[${n}]}" == "qa_call" ] && break
7220 + [ "${FUNCNAME[${n}]}" == "__qa_call" ] && break
7221 (( n-- ))
7222 done
7223 (( n == 0 )) && (( n = ${#FUNCNAME[@]} - 1 ))
7224 @@ -140,14 +148,14 @@ die() {
7225 # get a stack trace, so at least report the phase that failed.
7226 local phase_str=
7227 [[ -n $EBUILD_PHASE ]] && phase_str=" ($EBUILD_PHASE phase)"
7228 - eerror "ERROR: $CATEGORY/$PF failed${phase_str}:"
7229 + eerror "ERROR: ${CATEGORY}/${PF}::${PORTAGE_REPO_NAME} failed${phase_str}:"
7230 eerror " ${*:-(no error message)}"
7231 eerror
7232 - # dump_trace is useless when the main script is a helper binary
7233 + # __dump_trace is useless when the main script is a helper binary
7234 local main_index
7235 (( main_index = ${#BASH_SOURCE[@]} - 1 ))
7236 if has ${BASH_SOURCE[$main_index]##*/} ebuild.sh misc-functions.sh ; then
7237 - dump_trace 2 ${filespacing} ${linespacing}
7238 + __dump_trace 2 ${filespacing} ${linespacing}
7239 eerror " $(printf "%${filespacing}s" "${BASH_SOURCE[1]##*/}"), line $(printf "%${linespacing}s" "${BASH_LINENO[0]}"): Called die"
7240 eerror "The specific snippet of code:"
7241 # This scans the file that called die and prints out the logic that
7242 @@ -173,39 +181,12 @@ die() {
7243 | while read -r n ; do eerror " ${n#RETAIN-LEADING-SPACE}" ; done
7244 eerror
7245 fi
7246 - eerror "If you need support, post the output of \`emerge --info '=$CATEGORY/$PF'\`,"
7247 - eerror "the complete build log and the output of \`emerge -pqv '=$CATEGORY/$PF'\`."
7248 - if [[ -n ${EBUILD_OVERLAY_ECLASSES} ]] ; then
7249 - eerror "This ebuild used the following eclasses from overlays:"
7250 - local x
7251 - for x in ${EBUILD_OVERLAY_ECLASSES} ; do
7252 - eerror " ${x}"
7253 - done
7254 - fi
7255 - if [ "${EMERGE_FROM}" != "binary" ] && \
7256 - ! has ${EBUILD_PHASE} prerm postrm && \
7257 - [ "${EBUILD#${PORTDIR}/}" == "${EBUILD}" ] ; then
7258 - local overlay=${EBUILD%/*}
7259 - overlay=${overlay%/*}
7260 - overlay=${overlay%/*}
7261 - if [[ -n $PORTAGE_REPO_NAME ]] ; then
7262 - eerror "This ebuild is from an overlay named" \
7263 - "'$PORTAGE_REPO_NAME': '${overlay}/'"
7264 - else
7265 - eerror "This ebuild is from an overlay: '${overlay}/'"
7266 - fi
7267 - elif [[ -n $PORTAGE_REPO_NAME && -f "$PORTDIR"/profiles/repo_name ]] ; then
7268 - local portdir_repo_name=$(<"$PORTDIR"/profiles/repo_name)
7269 - if [[ -n $portdir_repo_name && \
7270 - $portdir_repo_name != $PORTAGE_REPO_NAME ]] ; then
7271 - eerror "This ebuild is from a repository" \
7272 - "named '$PORTAGE_REPO_NAME'"
7273 - fi
7274 - fi
7275 + eerror "If you need support, post the output of \`emerge --info '=${CATEGORY}/${PF}::${PORTAGE_REPO_NAME}'\`,"
7276 + eerror "the complete build log and the output of \`emerge -pqv '=${CATEGORY}/${PF}::${PORTAGE_REPO_NAME}'\`."
7277
7278 # Only call die hooks here if we are executed via ebuild.sh or
7279 # misc-functions.sh, since those are the only cases where the environment
7280 - # contains the hook functions. When necessary (like for helpers_die), die
7281 + # contains the hook functions. When necessary (like for __helpers_die), die
7282 # hooks are automatically called later by a misc-functions.sh invocation.
7283 if has ${BASH_SOURCE[$main_index]##*/} ebuild.sh misc-functions.sh && \
7284 [[ ${EBUILD_PHASE} != depend ]] ; then
7285 @@ -218,7 +199,8 @@ die() {
7286
7287 if [[ -n ${PORTAGE_LOG_FILE} ]] ; then
7288 eerror "The complete build log is located at '${PORTAGE_LOG_FILE}'."
7289 - if [[ ${PORTAGE_LOG_FILE} != ${T}/* ]] ; then
7290 + if [[ ${PORTAGE_LOG_FILE} != ${T}/* ]] && \
7291 + ! has fail-clean ${FEATURES} ; then
7292 # Display path to symlink in ${T}, as requested in bug #412865.
7293 local log_ext=log
7294 [[ ${PORTAGE_LOG_FILE} != *.log ]] && log_ext+=.${PORTAGE_LOG_FILE##*.}
7295 @@ -241,26 +223,20 @@ die() {
7296 [[ -n $PORTAGE_IPC_DAEMON ]] && "$PORTAGE_BIN_PATH"/ebuild-ipc exit 1
7297
7298 # subshell die support
7299 - [[ $BASHPID = $EBUILD_MASTER_PID ]] || kill -s SIGTERM $EBUILD_MASTER_PID
7300 + [[ ${BASHPID:-$(__bashpid)} == ${EBUILD_MASTER_PID} ]] || kill -s SIGTERM ${EBUILD_MASTER_PID}
7301 exit 1
7302 }
7303
7304 -# We need to implement diefunc() since environment.bz2 files contain
7305 -# calls to it (due to alias expansion).
7306 -diefunc() {
7307 - die "${@}"
7308 -}
7309 -
7310 -quiet_mode() {
7311 +__quiet_mode() {
7312 [[ ${PORTAGE_QUIET} -eq 1 ]]
7313 }
7314
7315 -vecho() {
7316 - quiet_mode || echo "$@"
7317 +__vecho() {
7318 + __quiet_mode || echo "$@"
7319 }
7320
7321 # Internal logging function, don't use this in ebuilds
7322 -elog_base() {
7323 +__elog_base() {
7324 local messagetype
7325 [ -z "${1}" -o -z "${T}" -o ! -d "${T}/logging" ] && return 1
7326 case "${1}" in
7327 @@ -269,7 +245,7 @@ elog_base() {
7328 shift
7329 ;;
7330 *)
7331 - vecho -e " ${BAD}*${NORMAL} Invalid use of internal function elog_base(), next message will not be logged"
7332 + __vecho -e " ${BAD}*${NORMAL} Invalid use of internal function __elog_base(), next message will not be logged"
7333 return 1
7334 ;;
7335 esac
7336 @@ -281,17 +257,17 @@ elog_base() {
7337 }
7338
7339 eqawarn() {
7340 - elog_base QA "$*"
7341 + __elog_base QA "$*"
7342 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7343 echo -e "$@" | while read -r ; do
7344 - vecho " $WARN*$NORMAL $REPLY" >&2
7345 + __vecho " $WARN*$NORMAL $REPLY" >&2
7346 done
7347 LAST_E_CMD="eqawarn"
7348 return 0
7349 }
7350
7351 elog() {
7352 - elog_base LOG "$*"
7353 + __elog_base LOG "$*"
7354 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7355 echo -e "$@" | while read -r ; do
7356 echo " $GOOD*$NORMAL $REPLY"
7357 @@ -300,26 +276,8 @@ elog() {
7358 return 0
7359 }
7360
7361 -esyslog() {
7362 - local pri=
7363 - local tag=
7364 -
7365 - if [ -x /usr/bin/logger ]
7366 - then
7367 - pri="$1"
7368 - tag="$2"
7369 -
7370 - shift 2
7371 - [ -z "$*" ] && return 0
7372 -
7373 - /usr/bin/logger -p "${pri}" -t "${tag}" -- "$*"
7374 - fi
7375 -
7376 - return 0
7377 -}
7378 -
7379 einfo() {
7380 - elog_base INFO "$*"
7381 + __elog_base INFO "$*"
7382 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7383 echo -e "$@" | while read -r ; do
7384 echo " $GOOD*$NORMAL $REPLY"
7385 @@ -329,7 +287,7 @@ einfo() {
7386 }
7387
7388 einfon() {
7389 - elog_base INFO "$*"
7390 + __elog_base INFO "$*"
7391 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7392 echo -ne " ${GOOD}*${NORMAL} $*"
7393 LAST_E_CMD="einfon"
7394 @@ -337,7 +295,7 @@ einfon() {
7395 }
7396
7397 ewarn() {
7398 - elog_base WARN "$*"
7399 + __elog_base WARN "$*"
7400 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7401 echo -e "$@" | while read -r ; do
7402 echo " $WARN*$NORMAL $RC_INDENTATION$REPLY" >&2
7403 @@ -347,7 +305,7 @@ ewarn() {
7404 }
7405
7406 eerror() {
7407 - elog_base ERROR "$*"
7408 + __elog_base ERROR "$*"
7409 [[ ${RC_ENDCOL} != "yes" && ${LAST_E_CMD} == "ebegin" ]] && echo
7410 echo -e "$@" | while read -r ; do
7411 echo " $BAD*$NORMAL $RC_INDENTATION$REPLY" >&2
7412 @@ -372,7 +330,7 @@ ebegin() {
7413 return 0
7414 }
7415
7416 -_eend() {
7417 +__eend() {
7418 local retval=${1:-0} efunc=${2:-eerror} msg
7419 shift 2
7420
7421 @@ -399,13 +357,13 @@ eend() {
7422 local retval=${1:-0}
7423 shift
7424
7425 - _eend ${retval} eerror "$*"
7426 + __eend ${retval} eerror "$*"
7427
7428 LAST_E_CMD="eend"
7429 return ${retval}
7430 }
7431
7432 -unset_colors() {
7433 +__unset_colors() {
7434 COLS=80
7435 ENDCOL=
7436
7437 @@ -417,7 +375,7 @@ unset_colors() {
7438 BRACKET=
7439 }
7440
7441 -set_colors() {
7442 +__set_colors() {
7443 COLS=${COLUMNS:-0} # bash's internal COLUMNS variable
7444 # Avoid wasteful stty calls during the "depend" phases.
7445 # If stdout is a pipe, the parent process can export COLUMNS
7446 @@ -450,10 +408,10 @@ RC_DOT_PATTERN=''
7447
7448 case "${NOCOLOR:-false}" in
7449 yes|true)
7450 - unset_colors
7451 + __unset_colors
7452 ;;
7453 no|false)
7454 - set_colors
7455 + __set_colors
7456 ;;
7457 esac
7458
7459 @@ -504,4 +462,24 @@ has() {
7460 return 1
7461 }
7462
7463 +__repo_attr() {
7464 + local appropriate_section=0 exit_status=1 line saved_extglob_shopt=$(shopt -p extglob)
7465 + shopt -s extglob
7466 + while read line; do
7467 + [[ ${appropriate_section} == 0 && ${line} == "[$1]" ]] && appropriate_section=1 && continue
7468 + [[ ${appropriate_section} == 1 && ${line} == "["*"]" ]] && appropriate_section=0 && continue
7469 + # If a conditional expression like [[ ${line} == $2*( )=* ]] is used
7470 + # then bash-3.2 produces an error like the following when the file is
7471 + # sourced: syntax error in conditional expression: unexpected token `('
7472 + # Therefore, use a regular expression for compatibility.
7473 + if [[ ${appropriate_section} == 1 && ${line} =~ ^${2}[[:space:]]*= ]]; then
7474 + echo "${line##$2*( )=*( )}"
7475 + exit_status=0
7476 + break
7477 + fi
7478 + done <<< "${PORTAGE_REPOSITORIES}"
7479 + eval "${saved_extglob_shopt}"
7480 + return ${exit_status}
7481 +}
7482 +
7483 true
7484
7485 diff --git a/bin/lock-helper.py b/bin/lock-helper.py
7486 index dfb8876..aa2dd60 100755
7487 --- a/bin/lock-helper.py
7488 +++ b/bin/lock-helper.py
7489 @@ -1,11 +1,12 @@
7490 -#!/usr/bin/python
7491 -# Copyright 2010-2011 Gentoo Foundation
7492 +#!/usr/bin/python -b
7493 +# Copyright 2010-2014 Gentoo Foundation
7494 # Distributed under the terms of the GNU General Public License v2
7495
7496 import os
7497 import sys
7498 sys.path.insert(0, os.environ['PORTAGE_PYM_PATH'])
7499 import portage
7500 +portage._internal_caller = True
7501 portage._disable_legacy_globals()
7502
7503 def main(args):
7504
7505 diff --git a/bin/misc-functions.sh b/bin/misc-functions.sh
7506 index 9eec8bb..5ccf7c2 100755
7507 --- a/bin/misc-functions.sh
7508 +++ b/bin/misc-functions.sh
7509 @@ -1,5 +1,5 @@
7510 #!/bin/bash
7511 -# Copyright 1999-2011 Gentoo Foundation
7512 +# Copyright 1999-2013 Gentoo Foundation
7513 # Distributed under the terms of the GNU General Public License v2
7514 #
7515 # Miscellaneous shell functions that make use of the ebuild env but don't need
7516 @@ -17,8 +17,9 @@ shift $#
7517 source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}/ebuild.sh"
7518
7519 install_symlink_html_docs() {
7520 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
7521 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
7522 + if ! ___eapi_has_prefix_variables; then
7523 + local ED=${D}
7524 + fi
7525 cd "${ED}" || die "cd failed"
7526 #symlink the html documentation (if DOC_SYMLINKS_DIR is set in make.conf)
7527 if [ -n "${DOC_SYMLINKS_DIR}" ] ; then
7528 @@ -30,10 +31,10 @@ install_symlink_html_docs() {
7529 done
7530 if [ -n "${mydocdir}" ] ; then
7531 local mysympath
7532 - if [ -z "${SLOT}" -o "${SLOT}" = "0" ] ; then
7533 + if [ -z "${SLOT}" -o "${SLOT%/*}" = "0" ] ; then
7534 mysympath="${DOC_SYMLINKS_DIR}/${CATEGORY}/${PN}"
7535 else
7536 - mysympath="${DOC_SYMLINKS_DIR}/${CATEGORY}/${PN}-${SLOT}"
7537 + mysympath="${DOC_SYMLINKS_DIR}/${CATEGORY}/${PN}-${SLOT%/*}"
7538 fi
7539 einfo "Symlinking ${mysympath} to the HTML documentation"
7540 dodir "${DOC_SYMLINKS_DIR}/${CATEGORY}"
7541 @@ -43,7 +44,20 @@ install_symlink_html_docs() {
7542 }
7543
7544 # replacement for "readlink -f" or "realpath"
7545 +READLINK_F_WORKS=""
7546 canonicalize() {
7547 + if [[ -z ${READLINK_F_WORKS} ]] ; then
7548 + if [[ $(readlink -f -- /../ 2>/dev/null) == "/" ]] ; then
7549 + READLINK_F_WORKS=true
7550 + else
7551 + READLINK_F_WORKS=false
7552 + fi
7553 + fi
7554 + if ${READLINK_F_WORKS} ; then
7555 + readlink -f -- "$@"
7556 + return
7557 + fi
7558 +
7559 local f=$1 b n=10 wd=$(pwd)
7560 while (( n-- > 0 )); do
7561 while [[ ${f: -1} = / && ${#f} -gt 1 ]]; do
7562 @@ -66,8 +80,9 @@ canonicalize() {
7563 prepcompress() {
7564 local -a include exclude incl_d incl_f
7565 local f g i real_f real_d
7566 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
7567 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
7568 + if ! ___eapi_has_prefix_variables; then
7569 + local ED=${D}
7570 + fi
7571
7572 # Canonicalize path names and check for their existence.
7573 real_d=$(canonicalize "${ED}")
7574 @@ -141,7 +156,7 @@ prepcompress() {
7575
7576 # Queue up for compression.
7577 # ecompress{,dir} doesn't like to be called with empty argument lists.
7578 - [[ ${#incl_d[@]} -gt 0 ]] && ecompressdir --queue "${incl_d[@]}"
7579 + [[ ${#incl_d[@]} -gt 0 ]] && ecompressdir --limit ${PORTAGE_DOCOMPRESS_SIZE_LIMIT:-0} --queue "${incl_d[@]}"
7580 [[ ${#incl_f[@]} -gt 0 ]] && ecompress --queue "${incl_f[@]/#/${ED}}"
7581 [[ ${#exclude[@]} -gt 0 ]] && ecompressdir --ignore "${exclude[@]}"
7582 return 0
7583 @@ -149,13 +164,12 @@ prepcompress() {
7584
7585 install_qa_check() {
7586 local f i qa_var x
7587 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
7588 - case "$EAPI" in 0|1|2) local EPREFIX= ED=${D} ;; esac
7589 + if ! ___eapi_has_prefix_variables; then
7590 + local EPREFIX= ED=${D}
7591 + fi
7592
7593 cd "${ED}" || die "cd failed"
7594
7595 - # Merge QA_FLAGS_IGNORED and QA_DT_HASH into a single array, since
7596 - # QA_DT_HASH is deprecated.
7597 qa_var="QA_FLAGS_IGNORED_${ARCH/-/_}"
7598 eval "[[ -n \${!qa_var} ]] && QA_FLAGS_IGNORED=(\"\${${qa_var}[@]}\")"
7599 if [[ ${#QA_FLAGS_IGNORED[@]} -eq 1 ]] ; then
7600 @@ -166,29 +180,6 @@ install_qa_check() {
7601 set -${shopts}
7602 fi
7603
7604 - qa_var="QA_DT_HASH_${ARCH/-/_}"
7605 - eval "[[ -n \${!qa_var} ]] && QA_DT_HASH=(\"\${${qa_var}[@]}\")"
7606 - if [[ ${#QA_DT_HASH[@]} -eq 1 ]] ; then
7607 - local shopts=$-
7608 - set -o noglob
7609 - QA_DT_HASH=(${QA_DT_HASH})
7610 - set +o noglob
7611 - set -${shopts}
7612 - fi
7613 -
7614 - if [[ -n ${QA_DT_HASH} ]] ; then
7615 - QA_FLAGS_IGNORED=("${QA_FLAGS_IGNORED[@]}" "${QA_DT_HASH[@]}")
7616 - unset QA_DT_HASH
7617 - fi
7618 -
7619 - # Merge QA_STRICT_FLAGS_IGNORED and QA_STRICT_DT_HASH, since
7620 - # QA_STRICT_DT_HASH is deprecated
7621 - if [ "${QA_STRICT_FLAGS_IGNORED-unset}" = unset ] && \
7622 - [ "${QA_STRICT_DT_HASH-unset}" != unset ] ; then
7623 - QA_STRICT_FLAGS_IGNORED=1
7624 - unset QA_STRICT_DT_HASH
7625 - fi
7626 -
7627 # Check for files built without respecting *FLAGS. Note that
7628 # -frecord-gcc-switches must be in all *FLAGS variables, in
7629 # order to avoid false positive results here.
7630 @@ -200,8 +191,7 @@ install_qa_check() {
7631 [[ "${FFLAGS}" == *-frecord-gcc-switches* ]] && \
7632 [[ "${FCFLAGS}" == *-frecord-gcc-switches* ]] ; then
7633 rm -f "${T}"/scanelf-ignored-CFLAGS.log
7634 - for x in $(scanelf -qyRF '%k %p' -k \!.GCC.command.line "${ED}" | \
7635 - sed -e "s:\!.GCC.command.line ::") ; do
7636 + for x in $(scanelf -qyRF '#k%p' -k '!.GCC.command.line' "${ED}") ; do
7637 # Separate out file types that are known to support
7638 # .GCC.command.line sections, using the `file` command
7639 # similar to how prepstrip uses it.
7640 @@ -226,11 +216,11 @@ install_qa_check() {
7641 -i "${T}"/scanelf-ignored-CFLAGS.log
7642 f=$(<"${T}"/scanelf-ignored-CFLAGS.log)
7643 if [[ -n ${f} ]] ; then
7644 - vecho -ne '\n'
7645 + __vecho -ne '\n'
7646 eqawarn "${BAD}QA Notice: Files built without respecting CFLAGS have been detected${NORMAL}"
7647 eqawarn " Please include the following list of files in your report:"
7648 eqawarn "${f}"
7649 - vecho -ne '\n'
7650 + __vecho -ne '\n'
7651 sleep 1
7652 else
7653 rm -f "${T}"/scanelf-ignored-CFLAGS.log
7654 @@ -240,7 +230,7 @@ install_qa_check() {
7655
7656 export STRIP_MASK
7657 prepall
7658 - has "${EAPI}" 0 1 2 3 || prepcompress
7659 + ___eapi_has_docompress && prepcompress
7660 ecompressdir --dequeue
7661 ecompress --dequeue
7662
7663 @@ -251,32 +241,50 @@ install_qa_check() {
7664 for x in etc/app-defaults usr/man usr/info usr/X11R6 usr/doc usr/locale ; do
7665 [[ -d ${ED}/$x ]] && f+=" $x\n"
7666 done
7667 -
7668 if [[ -n $f ]] ; then
7669 eqawarn "QA Notice: This ebuild installs into the following deprecated directories:"
7670 eqawarn
7671 eqawarn "$f"
7672 fi
7673
7674 - if [[ -d ${ED}/etc/udev/rules.d ]] ; then
7675 - f=
7676 - for x in $(ls "${ED}/etc/udev/rules.d") ; do
7677 - f+=" etc/udev/rules.d/$x\n"
7678 - done
7679 - if [[ -n $f ]] ; then
7680 - eqawarn "QA Notice: udev rules should be installed in /lib/udev/rules.d:"
7681 - eqawarn
7682 - eqawarn "$f"
7683 + # It's ok create these directories, but not to install into them. #493154
7684 + # TODO: We should add var/lib to this list.
7685 + f=
7686 + for x in var/cache var/lock var/run run ; do
7687 + if [[ ! -L ${ED}/${x} && -d ${ED}/${x} ]] ; then
7688 + if [[ -z $(find "${ED}/${x}" -prune -empty) ]] ; then
7689 + f+=$(cd "${ED}"; find "${x}" -printf ' %p\n')
7690 + fi
7691 fi
7692 + done
7693 + if [[ -n ${f} ]] ; then
7694 + eqawarn "QA Notice: This ebuild installs into paths that should be created at runtime."
7695 + eqawarn " To fix, simply do not install into these directories. Instead, your package"
7696 + eqawarn " should create dirs on the fly at runtime as needed via init scripts/etc..."
7697 + eqawarn
7698 + eqawarn "${f}"
7699 + fi
7700 +
7701 + set +f
7702 + f=
7703 + for x in "${ED}etc/udev/rules.d/"* "${ED}lib"*"/udev/rules.d/"* ; do
7704 + [[ -e ${x} ]] || continue
7705 + [[ ${x} == ${ED}lib/udev/rules.d/* ]] && continue
7706 + f+=" ${x#${ED}}\n"
7707 + done
7708 + if [[ -n $f ]] ; then
7709 + eqawarn "QA Notice: udev rules should be installed in /lib/udev/rules.d:"
7710 + eqawarn
7711 + eqawarn "$f"
7712 fi
7713
7714 # Now we look for all world writable files.
7715 local unsafe_files=$(find "${ED}" -type f -perm -2 | sed -e "s:^${ED}:- :")
7716 if [[ -n ${unsafe_files} ]] ; then
7717 - vecho "QA Security Notice: world writable file(s):"
7718 - vecho "${unsafe_files}"
7719 - vecho "- This may or may not be a security problem, most of the time it is one."
7720 - vecho "- Please double check that $PF really needs a world writeable bit and file bugs accordingly."
7721 + __vecho "QA Security Notice: world writable file(s):"
7722 + __vecho "${unsafe_files}"
7723 + __vecho "- This may or may not be a security problem, most of the time it is one."
7724 + __vecho "- Please double check that $PF really needs a world writeable bit and file bugs accordingly."
7725 sleep 1
7726 fi
7727
7728 @@ -307,7 +315,7 @@ install_qa_check() {
7729 for l in $(echo "${rpath_files}" | grep -E ":${dir}|::|: "); do
7730 f+=" ${l%%:*}\n"
7731 if ! has stricter ${FEATURES}; then
7732 - vecho "Auto fixing rpaths for ${l%%:*}"
7733 + __vecho "Auto fixing rpaths for ${l%%:*}"
7734 TMPDIR="${dir}" scanelf -BXr "${l%%:*}" -o /dev/null
7735 fi
7736 done
7737 @@ -321,12 +329,12 @@ install_qa_check() {
7738
7739 # Print QA notice.
7740 if [[ -n ${f}${x} ]] ; then
7741 - vecho -ne '\n'
7742 + __vecho -ne '\n'
7743 eqawarn "QA Notice: The following files contain insecure RUNPATHs"
7744 eqawarn " Please file a bug about this at http://bugs.gentoo.org/"
7745 eqawarn " with the maintaining herd of the package."
7746 eqawarn "${f}${f:+${x:+\n}}${x}"
7747 - vecho -ne '\n'
7748 + __vecho -ne '\n'
7749 if [[ -n ${x} ]] || has stricter ${FEATURES} ; then
7750 insecure_rpath=1
7751 fi
7752 @@ -344,7 +352,7 @@ install_qa_check() {
7753 f=$(scanelf -qyRF '%t %p' "${ED}" | grep -v 'usr/lib/debug/')
7754 if [[ -n ${f} ]] ; then
7755 scanelf -qyRAF '%T %p' "${PORTAGE_BUILDDIR}"/ &> "${T}"/scanelf-textrel.log
7756 - vecho -ne '\n'
7757 + __vecho -ne '\n'
7758 eqawarn "QA Notice: The following files contain runtime text relocations"
7759 eqawarn " Text relocations force the dynamic linker to perform extra"
7760 eqawarn " work at startup, waste system resources, and may pose a security"
7761 @@ -353,7 +361,7 @@ install_qa_check() {
7762 eqawarn " For more information, see http://hardened.gentoo.org/pic-fix-guide.xml"
7763 eqawarn " Please include the following list of files in your report:"
7764 eqawarn "${f}"
7765 - vecho -ne '\n'
7766 + __vecho -ne '\n'
7767 die_msg="${die_msg} textrels,"
7768 sleep 1
7769 fi
7770 @@ -364,7 +372,7 @@ install_qa_check() {
7771 *-linux-gnu*)
7772 # Check for files with executable stacks, but only on arches which
7773 # are supported at the moment. Keep this list in sync with
7774 - # http://hardened.gentoo.org/gnu-stack.xml (Arch Status)
7775 + # http://www.gentoo.org/proj/en/hardened/gnu-stack.xml (Arch Status)
7776 case ${CTARGET:-${CHOST}} in
7777 arm*|i?86*|ia64*|m68k*|s390*|sh*|x86_64*)
7778 # Allow devs to mark things as ignorable ... e.g. things
7779 @@ -389,7 +397,7 @@ install_qa_check() {
7780 if [[ -n ${f} ]] ; then
7781 # One more pass to help devs track down the source
7782 scanelf -qyRAF '%e %p' "${PORTAGE_BUILDDIR}"/ &> "${T}"/scanelf-execstack.log
7783 - vecho -ne '\n'
7784 + __vecho -ne '\n'
7785 eqawarn "QA Notice: The following files contain writable and executable sections"
7786 eqawarn " Files with such sections will not work properly (or at all!) on some"
7787 eqawarn " architectures/operating systems. A bug should be filed at"
7788 @@ -399,15 +407,15 @@ install_qa_check() {
7789 eqawarn " Note: Bugs should be filed for the respective maintainers"
7790 eqawarn " of the package in question and not hardened@g.o."
7791 eqawarn "${f}"
7792 - vecho -ne '\n'
7793 + __vecho -ne '\n'
7794 die_msg="${die_msg} execstacks"
7795 sleep 1
7796 fi
7797
7798 # Check for files built without respecting LDFLAGS
7799 if [[ "${LDFLAGS}" == *,--hash-style=gnu* ]] && \
7800 - ! has binchecks ${RESTRICT} ; then
7801 - f=$(scanelf -qyRF '%k %p' -k .hash "${ED}" | sed -e "s:\.hash ::")
7802 + ! has binchecks ${RESTRICT} ; then
7803 + f=$(scanelf -qyRF '#k%p' -k .hash "${ED}")
7804 if [[ -n ${f} ]] ; then
7805 echo "${f}" > "${T}"/scanelf-ignored-LDFLAGS.log
7806 if [ "${QA_STRICT_FLAGS_IGNORED-unset}" = unset ] ; then
7807 @@ -421,11 +429,11 @@ install_qa_check() {
7808 -i "${T}"/scanelf-ignored-LDFLAGS.log
7809 f=$(<"${T}"/scanelf-ignored-LDFLAGS.log)
7810 if [[ -n ${f} ]] ; then
7811 - vecho -ne '\n'
7812 + __vecho -ne '\n'
7813 eqawarn "${BAD}QA Notice: Files built without respecting LDFLAGS have been detected${NORMAL}"
7814 eqawarn " Please include the following list of files in your report:"
7815 eqawarn "${f}"
7816 - vecho -ne '\n'
7817 + __vecho -ne '\n'
7818 sleep 1
7819 else
7820 rm -f "${T}"/scanelf-ignored-LDFLAGS.log
7821 @@ -442,7 +450,7 @@ install_qa_check() {
7822 # Check for shared libraries lacking SONAMEs
7823 qa_var="QA_SONAME_${ARCH/-/_}"
7824 eval "[[ -n \${!qa_var} ]] && QA_SONAME=(\"\${${qa_var}[@]}\")"
7825 - f=$(scanelf -ByF '%S %p' "${ED}"{,usr/}lib*/lib*.so* | gawk '$2 == "" { print }' | sed -e "s:^[[:space:]]${ED}:/:")
7826 + f=$(scanelf -ByF '%S %p' "${ED}"{,usr/}lib*/lib*.so* | awk '$2 == "" { print }' | sed -e "s:^[[:space:]]${ED}:/:")
7827 if [[ -n ${f} ]] ; then
7828 echo "${f}" > "${T}"/scanelf-missing-SONAME.log
7829 if [[ "${QA_STRICT_SONAME-unset}" == unset ]] ; then
7830 @@ -463,10 +471,10 @@ install_qa_check() {
7831 sed -e "/^\$/d" -i "${T}"/scanelf-missing-SONAME.log
7832 f=$(<"${T}"/scanelf-missing-SONAME.log)
7833 if [[ -n ${f} ]] ; then
7834 - vecho -ne '\n'
7835 + __vecho -ne '\n'
7836 eqawarn "QA Notice: The following shared libraries lack a SONAME"
7837 eqawarn "${f}"
7838 - vecho -ne '\n'
7839 + __vecho -ne '\n'
7840 sleep 1
7841 else
7842 rm -f "${T}"/scanelf-missing-SONAME.log
7843 @@ -476,7 +484,7 @@ install_qa_check() {
7844 # Check for shared libraries lacking NEEDED entries
7845 qa_var="QA_DT_NEEDED_${ARCH/-/_}"
7846 eval "[[ -n \${!qa_var} ]] && QA_DT_NEEDED=(\"\${${qa_var}[@]}\")"
7847 - f=$(scanelf -ByF '%n %p' "${ED}"{,usr/}lib*/lib*.so* | gawk '$2 == "" { print }' | sed -e "s:^[[:space:]]${ED}:/:")
7848 + f=$(scanelf -ByF '%n %p' "${ED}"{,usr/}lib*/lib*.so* | awk '$2 == "" { print }' | sed -e "s:^[[:space:]]${ED}:/:")
7849 if [[ -n ${f} ]] ; then
7850 echo "${f}" > "${T}"/scanelf-missing-NEEDED.log
7851 if [[ "${QA_STRICT_DT_NEEDED-unset}" == unset ]] ; then
7852 @@ -497,10 +505,10 @@ install_qa_check() {
7853 sed -e "/^\$/d" -i "${T}"/scanelf-missing-NEEDED.log
7854 f=$(<"${T}"/scanelf-missing-NEEDED.log)
7855 if [[ -n ${f} ]] ; then
7856 - vecho -ne '\n'
7857 + __vecho -ne '\n'
7858 eqawarn "QA Notice: The following shared libraries lack NEEDED entries"
7859 eqawarn "${f}"
7860 - vecho -ne '\n'
7861 + __vecho -ne '\n'
7862 sleep 1
7863 else
7864 rm -f "${T}"/scanelf-missing-NEEDED.log
7865 @@ -545,14 +553,13 @@ install_qa_check() {
7866 die "Unsafe files found in \${D}. Portage will not install them."
7867 fi
7868
7869 - if [[ -d ${D}/${D} ]] ; then
7870 - declare -i INSTALLTOD=0
7871 - for i in $(find "${D}/${D}/"); do
7872 - eqawarn "QA Notice: /${i##${D}/${D}} installed in \${D}/\${D}"
7873 + if [[ -d ${D%/}${D} ]] ; then
7874 + local -i INSTALLTOD=0
7875 + while read -r -d $'\0' i ; do
7876 + eqawarn "QA Notice: /${i##${D%/}${D}} installed in \${D}/\${D}"
7877 ((INSTALLTOD++))
7878 - done
7879 - die "Aborting due to QA concerns: ${INSTALLTOD} files installed in ${D}/${D}"
7880 - unset INSTALLTOD
7881 + done < <(find "${D%/}${D}" -print0)
7882 + die "Aborting due to QA concerns: ${INSTALLTOD} files installed in ${D%/}${D}"
7883 fi
7884
7885 # Sanity check syntax errors in init.d scripts
7886 @@ -563,10 +570,31 @@ install_qa_check() {
7887 [[ -L ${i} ]] && continue
7888 # if empty conf.d/init.d dir exists (baselayout), then i will be "/etc/conf.d/*" and not exist
7889 [[ ! -e ${i} ]] && continue
7890 + if [[ ${d} == /etc/init.d && ${i} != *.sh ]] ; then
7891 + # skip non-shell-script for bug #451386
7892 + [[ $(head -n1 "${i}") =~ ^#!.*[[:space:]/](runscript|sh)$ ]] || continue
7893 + fi
7894 bash -n "${i}" || die "The init.d file has syntax errors: ${i}"
7895 done
7896 done
7897
7898 + local checkbashisms=$(type -P checkbashisms)
7899 + if [[ -n ${checkbashisms} ]] ; then
7900 + for d in /etc/init.d ; do
7901 + [[ -d ${ED}${d} ]] || continue
7902 + for i in "${ED}${d}"/* ; do
7903 + [[ -e ${i} ]] || continue
7904 + [[ -L ${i} ]] && continue
7905 + f=$("${checkbashisms}" -f "${i}" 2>&1)
7906 + [[ $? != 0 && -n ${f} ]] || continue
7907 + eqawarn "QA Notice: shell script appears to use non-POSIX feature(s):"
7908 + while read -r ;
7909 + do eqawarn " ${REPLY}"
7910 + done <<< "${f//${ED}}"
7911 + done
7912 + done
7913 + fi
7914 +
7915 # Look for leaking LDFLAGS into pkg-config files
7916 f=$(egrep -sH '^Libs.*-Wl,(-O[012]|--hash-style)' "${ED}"/usr/*/pkgconfig/*.pc)
7917 if [[ -n ${f} ]] ; then
7918 @@ -577,17 +605,16 @@ install_qa_check() {
7919 # this should help to ensure that all (most?) shared libraries are executable
7920 # and that all libtool scripts / static libraries are not executable
7921 local j
7922 - for i in "${ED}"opt/*/lib{,32,64} \
7923 - "${ED}"lib{,32,64} \
7924 - "${ED}"usr/lib{,32,64} \
7925 - "${ED}"usr/X11R6/lib{,32,64} ; do
7926 + for i in "${ED}"opt/*/lib* \
7927 + "${ED}"lib* \
7928 + "${ED}"usr/lib* ; do
7929 [[ ! -d ${i} ]] && continue
7930
7931 for j in "${i}"/*.so.* "${i}"/*.so ; do
7932 [[ ! -e ${j} ]] && continue
7933 [[ -L ${j} ]] && continue
7934 [[ -x ${j} ]] && continue
7935 - vecho "making executable: ${j#${ED}}"
7936 + __vecho "making executable: ${j#${ED}}"
7937 chmod +x "${j}"
7938 done
7939
7940 @@ -595,7 +622,7 @@ install_qa_check() {
7941 [[ ! -e ${j} ]] && continue
7942 [[ -L ${j} ]] && continue
7943 [[ ! -x ${j} ]] && continue
7944 - vecho "removing executable bit: ${j#${ED}}"
7945 + __vecho "removing executable bit: ${j#${ED}}"
7946 chmod -x "${j}"
7947 done
7948
7949 @@ -604,7 +631,7 @@ install_qa_check() {
7950 [[ ! -L ${j} ]] && continue
7951 linkdest=$(readlink "${j}")
7952 if [[ ${linkdest} == /* ]] ; then
7953 - vecho -ne '\n'
7954 + __vecho -ne '\n'
7955 eqawarn "QA Notice: Found an absolute symlink in a library directory:"
7956 eqawarn " ${j#${D}} -> ${linkdest}"
7957 eqawarn " It should be a relative symlink if in the same directory"
7958 @@ -613,8 +640,8 @@ install_qa_check() {
7959 done
7960 done
7961
7962 - # When installing static libraries into /usr/lib and shared libraries into
7963 - # /lib, we have to make sure we have a linker script in /usr/lib along side
7964 + # When installing static libraries into /usr/lib and shared libraries into
7965 + # /lib, we have to make sure we have a linker script in /usr/lib along side
7966 # the static library, or gcc will utilize the static lib when linking :(.
7967 # http://bugs.gentoo.org/4411
7968 abort="no"
7969 @@ -624,7 +651,7 @@ install_qa_check() {
7970 if [[ ! -e ${s} ]] ; then
7971 s=${s%usr/*}${s##*/usr/}
7972 if [[ -e ${s} ]] ; then
7973 - vecho -ne '\n'
7974 + __vecho -ne '\n'
7975 eqawarn "QA Notice: Missing gen_usr_ldscript for ${s##*/}"
7976 abort="yes"
7977 fi
7978 @@ -635,11 +662,11 @@ install_qa_check() {
7979 # Make sure people don't store libtool files or static libs in /lib
7980 f=$(ls "${ED}"lib*/*.{a,la} 2>/dev/null)
7981 if [[ -n ${f} ]] ; then
7982 - vecho -ne '\n'
7983 + __vecho -ne '\n'
7984 eqawarn "QA Notice: Excessive files found in the / partition"
7985 eqawarn "${f}"
7986 - vecho -ne '\n'
7987 - die "static archives (*.a) and libtool library files (*.la) do not belong in /"
7988 + __vecho -ne '\n'
7989 + die "static archives (*.a) and libtool library files (*.la) belong in /usr/lib*, not /lib*"
7990 fi
7991
7992 # Verify that the libtool files don't contain bogus $D entries.
7993 @@ -647,7 +674,7 @@ install_qa_check() {
7994 for a in "${ED}"usr/lib*/*.la ; do
7995 s=${a##*/}
7996 if grep -qs "${ED}" "${a}" ; then
7997 - vecho -ne '\n'
7998 + __vecho -ne '\n'
7999 eqawarn "QA Notice: ${s} appears to contain PORTAGE_TMPDIR paths"
8000 abort="yes"
8001 fi
8002 @@ -688,6 +715,8 @@ install_qa_check() {
8003 ": warning: reference to local variable .* returned"
8004 ": warning: returning reference to temporary"
8005 ": warning: function returns address of local variable"
8006 + ": warning: .*\\[-Wsizeof-pointer-memaccess\\]"
8007 + ": warning: .*\\[-Waggressive-loop-optimizations\\]"
8008 # this may be valid code :/
8009 #": warning: multi-character character constant"
8010 # need to check these two ...
8011 @@ -726,18 +755,19 @@ install_qa_check() {
8012 eerror " with the maintaining herd of the package."
8013 eerror
8014 else
8015 - vecho -ne '\n'
8016 + __vecho -ne '\n'
8017 eqawarn "QA Notice: Package triggers severe warnings which indicate that it"
8018 eqawarn " may exhibit random runtime failures."
8019 eqawarn "${f}"
8020 - vecho -ne '\n'
8021 + __vecho -ne '\n'
8022 fi
8023 fi
8024 done
8025 local cat_cmd=cat
8026 [[ $PORTAGE_LOG_FILE = *.gz ]] && cat_cmd=zcat
8027 [[ $reset_debug = 1 ]] && set -x
8028 - f=$($cat_cmd "${PORTAGE_LOG_FILE}" | \
8029 + # Use safe cwd, avoiding unsafe import for bug #469338.
8030 + f=$(cd "${PORTAGE_PYM_PATH}" ; $cat_cmd "${PORTAGE_LOG_FILE}" | \
8031 "${PORTAGE_PYTHON:-/usr/bin/python}" "$PORTAGE_BIN_PATH"/check-implicit-pointer-usage.py || die "check-implicit-pointer-usage.py failed")
8032 if [[ -n ${f} ]] ; then
8033
8034 @@ -763,11 +793,11 @@ install_qa_check() {
8035 eerror " with the maintaining herd of the package."
8036 eerror
8037 else
8038 - vecho -ne '\n'
8039 + __vecho -ne '\n'
8040 eqawarn "QA Notice: Package triggers severe warnings which indicate that it"
8041 eqawarn " will almost certainly crash on 64bit architectures."
8042 eqawarn "${f}"
8043 - vecho -ne '\n'
8044 + __vecho -ne '\n'
8045 fi
8046
8047 fi
8048 @@ -793,32 +823,42 @@ install_qa_check() {
8049 [[ -x /usr/bin/file && -x /usr/bin/find ]] && \
8050 [[ -n ${MULTILIB_STRICT_DIRS} && -n ${MULTILIB_STRICT_DENY} ]]
8051 then
8052 - local abort=no dir file firstrun=yes
8053 + rm -f "${T}/multilib-strict.log"
8054 + local abort=no dir file
8055 MULTILIB_STRICT_EXEMPT=$(echo ${MULTILIB_STRICT_EXEMPT} | sed -e 's:\([(|)]\):\\\1:g')
8056 for dir in ${MULTILIB_STRICT_DIRS} ; do
8057 [[ -d ${ED}/${dir} ]] || continue
8058 for file in $(find ${ED}/${dir} -type f | grep -v "^${ED}/${dir}/${MULTILIB_STRICT_EXEMPT}"); do
8059 if file ${file} | egrep -q "${MULTILIB_STRICT_DENY}" ; then
8060 - if [[ ${firstrun} == yes ]] ; then
8061 - echo "Files matching a file type that is not allowed:"
8062 - firstrun=no
8063 - fi
8064 - abort=yes
8065 - echo " ${file#${ED}//}"
8066 + echo "${file#${ED}//}" >> "${T}/multilib-strict.log"
8067 fi
8068 done
8069 done
8070 - [[ ${abort} == yes ]] && die "multilib-strict check failed!"
8071 - fi
8072
8073 - # ensure packages don't install systemd units automagically
8074 - if ! has systemd ${INHERITED} && \
8075 - [[ -d "${ED}"/lib/systemd/system ]]
8076 - then
8077 - eqawarn "QA Notice: package installs systemd unit files (/lib/systemd/system)"
8078 - eqawarn " but does not inherit systemd.eclass."
8079 - has stricter ${FEATURES} \
8080 - && die "install aborted due to missing inherit of systemd.eclass"
8081 + if [[ -s ${T}/multilib-strict.log ]] ; then
8082 + if [[ ${#QA_MULTILIB_PATHS[@]} -eq 1 ]] ; then
8083 + local shopts=$-
8084 + set -o noglob
8085 + QA_MULTILIB_PATHS=(${QA_MULTILIB_PATHS})
8086 + set +o noglob
8087 + set -${shopts}
8088 + fi
8089 + if [ "${QA_STRICT_MULTILIB_PATHS-unset}" = unset ] ; then
8090 + for x in "${QA_MULTILIB_PATHS[@]}" ; do
8091 + sed -e "s#^${x#/}\$##" -i "${T}/multilib-strict.log"
8092 + done
8093 + sed -e "/^\$/d" -i "${T}/multilib-strict.log"
8094 + fi
8095 + if [[ -s ${T}/multilib-strict.log ]] ; then
8096 + abort=yes
8097 + echo "Files matching a file type that is not allowed:"
8098 + while read -r ; do
8099 + echo " ${REPLY}"
8100 + done < "${T}/multilib-strict.log"
8101 + fi
8102 + fi
8103 +
8104 + [[ ${abort} == yes ]] && die "multilib-strict check failed!"
8105 fi
8106 }
8107
8108 @@ -851,16 +891,6 @@ install_qa_check_prefix() {
8109 # all further checks rely on ${ED} existing
8110 [[ -d ${ED} ]] || return
8111
8112 - # this does not really belong here, but it's closely tied to
8113 - # the code below; many runscripts generate positives here, and we
8114 - # know they don't work (bug #196294) so as long as that one
8115 - # remains an issue, simply remove them as they won't work
8116 - # anyway, avoid etc/init.d/functions.sh from being thrown away
8117 - if [[ ( -d "${ED}"/etc/conf.d || -d "${ED}"/etc/init.d ) && ! -f "${ED}"/etc/init.d/functions.sh ]] ; then
8118 - ewarn "removed /etc/init.d and /etc/conf.d directories until bug #196294 has been resolved"
8119 - rm -Rf "${ED}"/etc/{conf,init}.d
8120 - fi
8121 -
8122 # check shebangs, bug #282539
8123 rm -f "${T}"/non-prefix-shebangs-errs
8124 local WHITELIST=" /usr/bin/env "
8125 @@ -952,7 +982,7 @@ install_mask() {
8126 local no_inst
8127 for no_inst in ${install_mask}; do
8128 set +o noglob
8129 - quiet_mode || einfo "Removing ${no_inst}"
8130 + __quiet_mode || einfo "Removing ${no_inst}"
8131 # normal stuff
8132 rm -Rf "${root}"/${no_inst} >&/dev/null
8133
8134 @@ -971,8 +1001,9 @@ preinst_mask() {
8135 return 1
8136 fi
8137
8138 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8139 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
8140 + if ! ___eapi_has_prefix_variables; then
8141 + local ED=${D}
8142 + fi
8143
8144 # Make sure $PWD is not ${D} so that we don't leave gmon.out files
8145 # in there in case any tools were built with -pg in CFLAGS.
8146 @@ -1000,8 +1031,9 @@ preinst_sfperms() {
8147 return 1
8148 fi
8149
8150 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8151 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
8152 + if ! ___eapi_has_prefix_variables; then
8153 + local ED=${D}
8154 + fi
8155
8156 # Smart FileSystem Permissions
8157 if has sfperms $FEATURES; then
8158 @@ -1039,8 +1071,9 @@ preinst_suid_scan() {
8159 return 1
8160 fi
8161
8162 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8163 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
8164 + if ! ___eapi_has_prefix_variables; then
8165 + local ED=${D}
8166 + fi
8167
8168 # total suid control.
8169 if has suidctl $FEATURES; then
8170 @@ -1050,19 +1083,19 @@ preinst_suid_scan() {
8171 # to files outside of the sandbox, but this
8172 # can easly be bypassed using the addwrite() function
8173 addwrite "${sfconf}"
8174 - vecho ">>> Performing suid scan in ${ED}"
8175 + __vecho ">>> Performing suid scan in ${ED}"
8176 for i in $(find "${ED}" -type f \( -perm -4000 -o -perm -2000 \) ); do
8177 if [ -s "${sfconf}" ]; then
8178 install_path=/${i#${ED}}
8179 if grep -q "^${install_path}\$" "${sfconf}" ; then
8180 - vecho "- ${install_path} is an approved suid file"
8181 + __vecho "- ${install_path} is an approved suid file"
8182 else
8183 - vecho ">>> Removing sbit on non registered ${install_path}"
8184 + __vecho ">>> Removing sbit on non registered ${install_path}"
8185 for x in 5 4 3 2 1 0; do sleep 0.25 ; done
8186 ls_ret=$(ls -ldh "${i}")
8187 chmod ugo-s "${i}"
8188 grep "^#${install_path}$" "${sfconf}" > /dev/null || {
8189 - vecho ">>> Appending commented out entry to ${sfconf} for ${PF}"
8190 + __vecho ">>> Appending commented out entry to ${sfconf} for ${PF}"
8191 echo "## ${ls_ret%${ED}*}${install_path}" >> "${sfconf}"
8192 echo "#${install_path}" >> "${sfconf}"
8193 # no delwrite() eh?
8194 @@ -1070,7 +1103,7 @@ preinst_suid_scan() {
8195 }
8196 fi
8197 else
8198 - vecho "suidctl feature set but you are lacking a ${sfconf}"
8199 + __vecho "suidctl feature set but you are lacking a ${sfconf}"
8200 fi
8201 done
8202 fi
8203 @@ -1082,34 +1115,35 @@ preinst_selinux_labels() {
8204 return 1
8205 fi
8206 if has selinux ${FEATURES}; then
8207 - # SELinux file labeling (needs to always be last in dyn_preinst)
8208 + # SELinux file labeling (needs to execute after preinst)
8209 # only attempt to label if setfiles is executable
8210 # and 'context' is available on selinuxfs.
8211 if [ -f /selinux/context -o -f /sys/fs/selinux/context ] && \
8212 [ -x /usr/sbin/setfiles -a -x /usr/sbin/selinuxconfig ]; then
8213 - vecho ">>> Setting SELinux security labels"
8214 + __vecho ">>> Setting SELinux security labels"
8215 (
8216 eval "$(/usr/sbin/selinuxconfig)" || \
8217 die "Failed to determine SELinux policy paths.";
8218 -
8219 +
8220 addwrite /selinux/context
8221 addwrite /sys/fs/selinux/context
8222 -
8223 +
8224 /usr/sbin/setfiles "${file_contexts_path}" -r "${D}" "${D}"
8225 ) || die "Failed to set SELinux security labels."
8226 else
8227 # nonfatal, since merging can happen outside a SE kernel
8228 # like during a recovery situation
8229 - vecho "!!! Unable to set SELinux security labels"
8230 + __vecho "!!! Unable to set SELinux security labels"
8231 fi
8232 fi
8233 }
8234
8235 -dyn_package() {
8236 +__dyn_package() {
8237 local PROOT
8238
8239 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8240 - case "$EAPI" in 0|1|2) local EPREFIX= ED=${D} ;; esac
8241 + if ! ___eapi_has_prefix_variables; then
8242 + local EPREFIX= ED=${D}
8243 + fi
8244
8245 # Make sure $PWD is not ${D} so that we don't leave gmon.out files
8246 # in there in case any tools were built with -pg in CFLAGS.
8247 @@ -1132,6 +1166,7 @@ dyn_package() {
8248
8249 local tar_options=""
8250 [[ $PORTAGE_VERBOSE = 1 ]] && tar_options+=" -v"
8251 + has xattr ${FEATURES} && [[ $(tar --help 2> /dev/null) == *--xattrs* ]] && tar_options+=" --xattrs"
8252 # Sandbox is disabled in case the user wants to use a symlink
8253 # for $PKGDIR and/or $PKGDIR/All.
8254 export SANDBOX_ON="0"
8255 @@ -1141,7 +1176,7 @@ dyn_package() {
8256 tar $tar_options -cf - $PORTAGE_BINPKG_TAR_OPTS -C "${PROOT}" . | \
8257 $PORTAGE_BZIP2_COMMAND -c > "$PORTAGE_BINPKG_TMPFILE"
8258 assert "failed to pack binary package: '$PORTAGE_BINPKG_TMPFILE'"
8259 - PYTHONPATH=${PORTAGE_PYM_PATH}${PYTHONPATH:+:}${PYTHONPATH} \
8260 + PYTHONPATH=${PORTAGE_PYTHONPATH:-${PORTAGE_PYM_PATH}} \
8261 "${PORTAGE_PYTHON:-/usr/bin/python}" "$PORTAGE_BIN_PATH"/xpak-helper.py recompose \
8262 "$PORTAGE_BINPKG_TMPFILE" "$PORTAGE_BUILDDIR/build-info"
8263 if [ $? -ne 0 ]; then
8264 @@ -1158,7 +1193,7 @@ dyn_package() {
8265 fi
8266 [ -n "${md5_hash}" ] && \
8267 echo ${md5_hash} > "${PORTAGE_BUILDDIR}"/build-info/BINPKGMD5
8268 - vecho ">>> Done."
8269 + __vecho ">>> Done."
8270
8271 # cleanup our temp tree
8272 [[ -n ${PKG_INSTALL_MASK} ]] && rm -rf "${PROOT}"
8273 @@ -1167,8 +1202,8 @@ dyn_package() {
8274 die "Failed to create $PORTAGE_BUILDDIR/.packaged"
8275 }
8276
8277 -dyn_spec() {
8278 - local sources_dir=/usr/src/rpm/SOURCES
8279 +__dyn_spec() {
8280 + local sources_dir=${T}/rpmbuild/SOURCES
8281 mkdir -p "${sources_dir}"
8282 declare -a tar_args=("${EBUILD}")
8283 [[ -d ${FILESDIR} ]] && tar_args=("${EBUILD}" "${FILESDIR}")
8284 @@ -1181,10 +1216,9 @@ Summary: ${DESCRIPTION}
8285 Name: ${PN}
8286 Version: ${PV}
8287 Release: ${PR}
8288 -Copyright: GPL
8289 +License: GPL
8290 Group: portage/${CATEGORY}
8291 Source: ${PF}.tar.gz
8292 -Buildroot: ${D}
8293 %description
8294 ${DESCRIPTION}
8295
8296 @@ -1205,18 +1239,18 @@ __END1__
8297
8298 }
8299
8300 -dyn_rpm() {
8301 -
8302 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8303 - case "$EAPI" in 0|1|2) local EPREFIX= ;; esac
8304 +__dyn_rpm() {
8305 + if ! ___eapi_has_prefix_variables; then
8306 + local EPREFIX=
8307 + fi
8308
8309 cd "${T}" || die "cd failed"
8310 - local machine_name=$(uname -m)
8311 - local dest_dir=${EPREFIX}/usr/src/rpm/RPMS/${machine_name}
8312 - addwrite ${EPREFIX}/usr/src/rpm
8313 + local machine_name=${CHOST%%-*}
8314 + local dest_dir=${T}/rpmbuild/RPMS/${machine_name}
8315 addwrite "${RPMDIR}"
8316 - dyn_spec
8317 - rpmbuild -bb --clean --rmsource "${PF}.spec" || die "Failed to integrate rpm spec file"
8318 + __dyn_spec
8319 + HOME=${T} \
8320 + rpmbuild -bb --clean --nodeps --rmsource "${PF}.spec" --buildroot "${D}" --target "${CHOST}" || die "Failed to integrate rpm spec file"
8321 install -D "${dest_dir}/${PN}-${PV}-${PR}.${machine_name}.rpm" \
8322 "${RPMDIR}/${CATEGORY}/${PN}-${PV}-${PR}.rpm" || \
8323 die "Failed to move rpm"
8324 @@ -1254,7 +1288,7 @@ install_hooks() {
8325 }
8326
8327 if [ -n "${MISC_FUNCTIONS_ARGS}" ]; then
8328 - source_all_bashrcs
8329 + __source_all_bashrcs
8330 [ "$PORTAGE_DEBUG" == "1" ] && set -x
8331 for x in ${MISC_FUNCTIONS_ARGS}; do
8332 ${x}
8333
8334 diff --git a/bin/phase-functions.sh b/bin/phase-functions.sh
8335 index ce251ce..f39a024 100644
8336 --- a/bin/phase-functions.sh
8337 +++ b/bin/phase-functions.sh
8338 @@ -1,5 +1,5 @@
8339 #!/bin/bash
8340 -# Copyright 1999-2012 Gentoo Foundation
8341 +# Copyright 1999-2013 Gentoo Foundation
8342 # Distributed under the terms of the GNU General Public License v2
8343
8344 # Hardcoded bash lists are needed for backward compatibility with
8345 @@ -8,28 +8,31 @@
8346 # when portage is upgrading itself.
8347
8348 PORTAGE_READONLY_METADATA="DEFINED_PHASES DEPEND DESCRIPTION
8349 - EAPI HOMEPAGE INHERITED IUSE REQUIRED_USE KEYWORDS LICENSE
8350 + EAPI HDEPEND HOMEPAGE INHERITED IUSE REQUIRED_USE KEYWORDS LICENSE
8351 PDEPEND PROVIDE RDEPEND REPOSITORY RESTRICT SLOT SRC_URI"
8352
8353 -PORTAGE_READONLY_VARS="D EBUILD EBUILD_PHASE \
8354 +PORTAGE_READONLY_VARS="D EBUILD EBUILD_PHASE EBUILD_PHASE_FUNC \
8355 EBUILD_SH_ARGS ECLASSDIR EMERGE_FROM FILESDIR MERGE_TYPE \
8356 PM_EBUILD_HOOK_DIR \
8357 PORTAGE_ACTUAL_DISTDIR PORTAGE_ARCHLIST PORTAGE_BASHRC \
8358 PORTAGE_BINPKG_FILE PORTAGE_BINPKG_TAR_OPTS PORTAGE_BINPKG_TMPFILE \
8359 - PORTAGE_BIN_PATH PORTAGE_BUILDDIR PORTAGE_BUNZIP2_COMMAND \
8360 + PORTAGE_BIN_PATH PORTAGE_BUILDDIR PORTAGE_BUILD_GROUP \
8361 + PORTAGE_BUILD_USER PORTAGE_BUNZIP2_COMMAND \
8362 PORTAGE_BZIP2_COMMAND PORTAGE_COLORMAP PORTAGE_CONFIGROOT \
8363 PORTAGE_DEBUG PORTAGE_DEPCACHEDIR PORTAGE_EBUILD_EXIT_FILE \
8364 + PORTAGE_ECLASS_LOCATIONS \
8365 PORTAGE_GID PORTAGE_GRPNAME PORTAGE_INST_GID PORTAGE_INST_UID \
8366 - PORTAGE_IPC_DAEMON PORTAGE_IUSE PORTAGE_LOG_FILE \
8367 + PORTAGE_INTERNAL_CALLER PORTAGE_IPC_DAEMON PORTAGE_IUSE PORTAGE_LOG_FILE \
8368 PORTAGE_MUTABLE_FILTERED_VARS PORTAGE_OVERRIDE_EPREFIX \
8369 - PORTAGE_PYM_PATH PORTAGE_PYTHON \
8370 + PORTAGE_PYM_PATH PORTAGE_PYTHON PORTAGE_PYTHONPATH \
8371 PORTAGE_READONLY_METADATA PORTAGE_READONLY_VARS \
8372 - PORTAGE_REPO_NAME PORTAGE_RESTRICT \
8373 + PORTAGE_REPO_NAME PORTAGE_REPOSITORIES PORTAGE_RESTRICT \
8374 PORTAGE_SAVED_READONLY_VARS PORTAGE_SIGPIPE_STATUS \
8375 PORTAGE_TMPDIR PORTAGE_UPDATE_ENV PORTAGE_USERNAME \
8376 - PORTAGE_VERBOSE PORTAGE_WORKDIR_MODE PORTDIR PORTDIR_OVERLAY \
8377 + PORTAGE_VERBOSE PORTAGE_WORKDIR_MODE PORTAGE_XATTR_EXCLUDE \
8378 + PORTDIR \
8379 PROFILE_PATHS REPLACING_VERSIONS REPLACED_BY_VERSION T WORKDIR \
8380 - __PORTAGE_TEST_HARDLINK_LOCKS"
8381 + __PORTAGE_HELPER __PORTAGE_TEST_HARDLINK_LOCKS"
8382
8383 PORTAGE_SAVED_READONLY_VARS="A CATEGORY P PF PN PR PV PVR"
8384
8385 @@ -39,7 +42,7 @@ PORTAGE_SAVED_READONLY_VARS="A CATEGORY P PF PN PR PV PVR"
8386 # it is saved or loaded (any mutations do not persist).
8387 PORTAGE_MUTABLE_FILTERED_VARS="AA HOSTNAME"
8388
8389 -# @FUNCTION: filter_readonly_variables
8390 +# @FUNCTION: __filter_readonly_variables
8391 # @DESCRIPTION: [--filter-sandbox] [--allow-extra-vars]
8392 # Read an environment from stdin and echo to stdout while filtering variables
8393 # with names that are known to cause interference:
8394 @@ -81,14 +84,14 @@ PORTAGE_MUTABLE_FILTERED_VARS="AA HOSTNAME"
8395 # readonly variable cause the shell to exit while executing the "source"
8396 # builtin command. To avoid this problem, this function filters those
8397 # variables out and discards them. See bug #190128.
8398 -filter_readonly_variables() {
8399 +__filter_readonly_variables() {
8400 local x filtered_vars
8401 local readonly_bash_vars="BASHOPTS BASHPID DIRSTACK EUID
8402 FUNCNAME GROUPS PIPESTATUS PPID SHELLOPTS UID"
8403 local bash_misc_vars="BASH BASH_.* COLUMNS COMP_WORDBREAKS HISTCMD
8404 HISTFILE HOSTNAME HOSTTYPE IFS LINENO MACHTYPE OLDPWD
8405 OPTERR OPTIND OSTYPE POSIXLY_CORRECT PS4 PWD RANDOM
8406 - SECONDS SHELL SHLVL _"
8407 + SECONDS SHLVL _"
8408 local filtered_sandbox_vars="SANDBOX_ACTIVE SANDBOX_BASHRC
8409 SANDBOX_DEBUG_LOG SANDBOX_DISABLED SANDBOX_LIB
8410 SANDBOX_LOG SANDBOX_ON"
8411 @@ -100,15 +103,9 @@ filter_readonly_variables() {
8412
8413 # Don't filter/interfere with prefix variables unless they are
8414 # supported by the current EAPI.
8415 - case "${EAPI:-0}" in
8416 - 0|1|2)
8417 - [[ " ${FEATURES} " == *" force-prefix "* ]] && \
8418 - filtered_vars+=" ED EPREFIX EROOT"
8419 - ;;
8420 - *)
8421 - filtered_vars+=" ED EPREFIX EROOT"
8422 - ;;
8423 - esac
8424 + if ___eapi_has_prefix_variables; then
8425 + filtered_vars+=" ED EPREFIX EROOT"
8426 + fi
8427
8428 if has --filter-sandbox $* ; then
8429 filtered_vars="${filtered_vars} SANDBOX_.*"
8430 @@ -140,14 +137,14 @@ filter_readonly_variables() {
8431 "${PORTAGE_PYTHON:-/usr/bin/python}" "${PORTAGE_BIN_PATH}"/filter-bash-environment.py "${filtered_vars}" || die "filter-bash-environment.py failed"
8432 }
8433
8434 -# @FUNCTION: preprocess_ebuild_env
8435 +# @FUNCTION: __preprocess_ebuild_env
8436 # @DESCRIPTION:
8437 # Filter any readonly variables from ${T}/environment, source it, and then
8438 -# save it via save_ebuild_env(). This process should be sufficient to prevent
8439 +# save it via __save_ebuild_env(). This process should be sufficient to prevent
8440 # any stale variables or functions from an arbitrary environment from
8441 # interfering with the current environment. This is useful when an existing
8442 # environment needs to be loaded from a binary or installed package.
8443 -preprocess_ebuild_env() {
8444 +__preprocess_ebuild_env() {
8445 local _portage_filter_opts="--filter-features --filter-locale --filter-path --filter-sandbox"
8446
8447 # If environment.raw is present, this is a signal from the python side,
8448 @@ -156,7 +153,7 @@ preprocess_ebuild_env() {
8449 # Otherwise, we don't need to filter the environment.
8450 [ -f "${T}/environment.raw" ] || return 0
8451
8452 - filter_readonly_variables $_portage_filter_opts < "${T}"/environment \
8453 + __filter_readonly_variables $_portage_filter_opts < "${T}"/environment \
8454 >> "$T/environment.filtered" || return $?
8455 unset _portage_filter_opts
8456 mv "${T}"/environment.filtered "${T}"/environment || return $?
8457 @@ -174,20 +171,20 @@ preprocess_ebuild_env() {
8458 # until we've merged them with our current values.
8459 export SANDBOX_ON=0
8460
8461 - # It's remotely possible that save_ebuild_env() has been overridden
8462 + # It's remotely possible that __save_ebuild_env() has been overridden
8463 # by the above source command. To protect ourselves, we override it
8464 # here with our own version. ${PORTAGE_BIN_PATH} is safe to use here
8465 # because it's already filtered above.
8466 source "${PORTAGE_BIN_PATH}/save-ebuild-env.sh" || exit $?
8467
8468 - # Rely on save_ebuild_env() to filter out any remaining variables
8469 + # Rely on __save_ebuild_env() to filter out any remaining variables
8470 # and functions that could interfere with the current environment.
8471 - save_ebuild_env || exit $?
8472 + __save_ebuild_env || exit $?
8473 >> "$T/environment.success" || exit $?
8474 ) > "${T}/environment.filtered"
8475 local retval
8476 if [ -e "${T}/environment.success" ] ; then
8477 - filter_readonly_variables --filter-features < \
8478 + __filter_readonly_variables --filter-features < \
8479 "${T}/environment.filtered" > "${T}/environment"
8480 retval=$?
8481 else
8482 @@ -197,62 +194,62 @@ preprocess_ebuild_env() {
8483 return ${retval}
8484 }
8485
8486 -ebuild_phase() {
8487 - declare -F "$1" >/dev/null && qa_call $1
8488 +__ebuild_phase() {
8489 + declare -F "$1" >/dev/null && __qa_call $1
8490 }
8491
8492 -ebuild_phase_with_hooks() {
8493 +__ebuild_phase_with_hooks() {
8494 local x phase_name=${1}
8495 for x in {pre_,,post_}${phase_name} ; do
8496 - ebuild_phase ${x}
8497 + __ebuild_phase ${x}
8498 done
8499 }
8500
8501 -dyn_pretend() {
8502 +__dyn_pretend() {
8503 if [[ -e $PORTAGE_BUILDDIR/.pretended ]] ; then
8504 - vecho ">>> It appears that '$PF' is already pretended; skipping."
8505 - vecho ">>> Remove '$PORTAGE_BUILDDIR/.pretended' to force pretend."
8506 + __vecho ">>> It appears that '$PF' is already pretended; skipping."
8507 + __vecho ">>> Remove '$PORTAGE_BUILDDIR/.pretended' to force pretend."
8508 return 0
8509 fi
8510 - ebuild_phase pre_pkg_pretend
8511 - ebuild_phase pkg_pretend
8512 + __ebuild_phase pre_pkg_pretend
8513 + __ebuild_phase pkg_pretend
8514 >> "$PORTAGE_BUILDDIR/.pretended" || \
8515 die "Failed to create $PORTAGE_BUILDDIR/.pretended"
8516 - ebuild_phase post_pkg_pretend
8517 + __ebuild_phase post_pkg_pretend
8518 }
8519
8520 -dyn_setup() {
8521 +__dyn_setup() {
8522 if [[ -e $PORTAGE_BUILDDIR/.setuped ]] ; then
8523 - vecho ">>> It appears that '$PF' is already setup; skipping."
8524 - vecho ">>> Remove '$PORTAGE_BUILDDIR/.setuped' to force setup."
8525 + __vecho ">>> It appears that '$PF' is already setup; skipping."
8526 + __vecho ">>> Remove '$PORTAGE_BUILDDIR/.setuped' to force setup."
8527 return 0
8528 fi
8529 - ebuild_phase pre_pkg_setup
8530 - ebuild_phase pkg_setup
8531 + __ebuild_phase pre_pkg_setup
8532 + __ebuild_phase pkg_setup
8533 >> "$PORTAGE_BUILDDIR/.setuped" || \
8534 die "Failed to create $PORTAGE_BUILDDIR/.setuped"
8535 - ebuild_phase post_pkg_setup
8536 + __ebuild_phase post_pkg_setup
8537 }
8538
8539 -dyn_unpack() {
8540 +__dyn_unpack() {
8541 if [[ -f ${PORTAGE_BUILDDIR}/.unpacked ]] ; then
8542 - vecho ">>> WORKDIR is up-to-date, keeping..."
8543 + __vecho ">>> WORKDIR is up-to-date, keeping..."
8544 return 0
8545 fi
8546 if [ ! -d "${WORKDIR}" ]; then
8547 install -m${PORTAGE_WORKDIR_MODE:-0700} -d "${WORKDIR}" || die "Failed to create dir '${WORKDIR}'"
8548 fi
8549 cd "${WORKDIR}" || die "Directory change failed: \`cd '${WORKDIR}'\`"
8550 - ebuild_phase pre_src_unpack
8551 - vecho ">>> Unpacking source..."
8552 - ebuild_phase src_unpack
8553 + __ebuild_phase pre_src_unpack
8554 + __vecho ">>> Unpacking source..."
8555 + __ebuild_phase src_unpack
8556 >> "$PORTAGE_BUILDDIR/.unpacked" || \
8557 die "Failed to create $PORTAGE_BUILDDIR/.unpacked"
8558 - vecho ">>> Source unpacked in ${WORKDIR}"
8559 - ebuild_phase post_src_unpack
8560 + __vecho ">>> Source unpacked in ${WORKDIR}"
8561 + __ebuild_phase post_src_unpack
8562 }
8563
8564 -dyn_clean() {
8565 +__dyn_clean() {
8566 if [ -z "${PORTAGE_BUILDDIR}" ]; then
8567 echo "Aborting clean phase because PORTAGE_BUILDDIR is unset!"
8568 return 1
8569 @@ -299,7 +296,7 @@ dyn_clean() {
8570 true
8571 }
8572
8573 -abort_handler() {
8574 +__abort_handler() {
8575 local msg
8576 if [ "$2" != "fail" ]; then
8577 msg="${EBUILD}: ${1} aborted; exiting."
8578 @@ -314,37 +311,37 @@ abort_handler() {
8579 trap - SIGINT SIGQUIT
8580 }
8581
8582 -abort_prepare() {
8583 - abort_handler src_prepare $1
8584 +__abort_prepare() {
8585 + __abort_handler src_prepare $1
8586 rm -f "$PORTAGE_BUILDDIR/.prepared"
8587 exit 1
8588 }
8589
8590 -abort_configure() {
8591 - abort_handler src_configure $1
8592 +__abort_configure() {
8593 + __abort_handler src_configure $1
8594 rm -f "$PORTAGE_BUILDDIR/.configured"
8595 exit 1
8596 }
8597
8598 -abort_compile() {
8599 - abort_handler "src_compile" $1
8600 +__abort_compile() {
8601 + __abort_handler "src_compile" $1
8602 rm -f "${PORTAGE_BUILDDIR}/.compiled"
8603 exit 1
8604 }
8605
8606 -abort_test() {
8607 - abort_handler "dyn_test" $1
8608 +__abort_test() {
8609 + __abort_handler "__dyn_test" $1
8610 rm -f "${PORTAGE_BUILDDIR}/.tested"
8611 exit 1
8612 }
8613
8614 -abort_install() {
8615 - abort_handler "src_install" $1
8616 +__abort_install() {
8617 + __abort_handler "src_install" $1
8618 rm -rf "${PORTAGE_BUILDDIR}/image"
8619 exit 1
8620 }
8621
8622 -has_phase_defined_up_to() {
8623 +__has_phase_defined_up_to() {
8624 local phase
8625 for phase in unpack prepare configure compile install; do
8626 has ${phase} ${DEFINED_PHASES} && return 0
8627 @@ -354,89 +351,89 @@ has_phase_defined_up_to() {
8628 return 1
8629 }
8630
8631 -dyn_prepare() {
8632 +__dyn_prepare() {
8633
8634 if [[ -e $PORTAGE_BUILDDIR/.prepared ]] ; then
8635 - vecho ">>> It appears that '$PF' is already prepared; skipping."
8636 - vecho ">>> Remove '$PORTAGE_BUILDDIR/.prepared' to force prepare."
8637 + __vecho ">>> It appears that '$PF' is already prepared; skipping."
8638 + __vecho ">>> Remove '$PORTAGE_BUILDDIR/.prepared' to force prepare."
8639 return 0
8640 fi
8641
8642 if [[ -d $S ]] ; then
8643 cd "${S}"
8644 - elif has $EAPI 0 1 2 3 3_pre2 ; then
8645 + elif ___eapi_has_S_WORKDIR_fallback; then
8646 cd "${WORKDIR}"
8647 - elif [[ -z ${A} ]] && ! has_phase_defined_up_to prepare; then
8648 + elif [[ -z ${A} ]] && ! __has_phase_defined_up_to prepare; then
8649 cd "${WORKDIR}"
8650 else
8651 die "The source directory '${S}' doesn't exist"
8652 fi
8653
8654 - trap abort_prepare SIGINT SIGQUIT
8655 + trap __abort_prepare SIGINT SIGQUIT
8656
8657 - ebuild_phase pre_src_prepare
8658 - vecho ">>> Preparing source in $PWD ..."
8659 - ebuild_phase src_prepare
8660 + __ebuild_phase pre_src_prepare
8661 + __vecho ">>> Preparing source in $PWD ..."
8662 + __ebuild_phase src_prepare
8663 >> "$PORTAGE_BUILDDIR/.prepared" || \
8664 die "Failed to create $PORTAGE_BUILDDIR/.prepared"
8665 - vecho ">>> Source prepared."
8666 - ebuild_phase post_src_prepare
8667 + __vecho ">>> Source prepared."
8668 + __ebuild_phase post_src_prepare
8669
8670 trap - SIGINT SIGQUIT
8671 }
8672
8673 -dyn_configure() {
8674 +__dyn_configure() {
8675
8676 if [[ -e $PORTAGE_BUILDDIR/.configured ]] ; then
8677 - vecho ">>> It appears that '$PF' is already configured; skipping."
8678 - vecho ">>> Remove '$PORTAGE_BUILDDIR/.configured' to force configuration."
8679 + __vecho ">>> It appears that '$PF' is already configured; skipping."
8680 + __vecho ">>> Remove '$PORTAGE_BUILDDIR/.configured' to force configuration."
8681 return 0
8682 fi
8683
8684 if [[ -d $S ]] ; then
8685 cd "${S}"
8686 - elif has $EAPI 0 1 2 3 3_pre2 ; then
8687 + elif ___eapi_has_S_WORKDIR_fallback; then
8688 cd "${WORKDIR}"
8689 - elif [[ -z ${A} ]] && ! has_phase_defined_up_to configure; then
8690 + elif [[ -z ${A} ]] && ! __has_phase_defined_up_to configure; then
8691 cd "${WORKDIR}"
8692 else
8693 die "The source directory '${S}' doesn't exist"
8694 fi
8695
8696 - trap abort_configure SIGINT SIGQUIT
8697 + trap __abort_configure SIGINT SIGQUIT
8698
8699 - ebuild_phase pre_src_configure
8700 + __ebuild_phase pre_src_configure
8701
8702 - vecho ">>> Configuring source in $PWD ..."
8703 - ebuild_phase src_configure
8704 + __vecho ">>> Configuring source in $PWD ..."
8705 + __ebuild_phase src_configure
8706 >> "$PORTAGE_BUILDDIR/.configured" || \
8707 die "Failed to create $PORTAGE_BUILDDIR/.configured"
8708 - vecho ">>> Source configured."
8709 + __vecho ">>> Source configured."
8710
8711 - ebuild_phase post_src_configure
8712 + __ebuild_phase post_src_configure
8713
8714 trap - SIGINT SIGQUIT
8715 }
8716
8717 -dyn_compile() {
8718 +__dyn_compile() {
8719
8720 if [[ -e $PORTAGE_BUILDDIR/.compiled ]] ; then
8721 - vecho ">>> It appears that '${PF}' is already compiled; skipping."
8722 - vecho ">>> Remove '$PORTAGE_BUILDDIR/.compiled' to force compilation."
8723 + __vecho ">>> It appears that '${PF}' is already compiled; skipping."
8724 + __vecho ">>> Remove '$PORTAGE_BUILDDIR/.compiled' to force compilation."
8725 return 0
8726 fi
8727
8728 if [[ -d $S ]] ; then
8729 cd "${S}"
8730 - elif has $EAPI 0 1 2 3 3_pre2 ; then
8731 + elif ___eapi_has_S_WORKDIR_fallback; then
8732 cd "${WORKDIR}"
8733 - elif [[ -z ${A} ]] && ! has_phase_defined_up_to compile; then
8734 + elif [[ -z ${A} ]] && ! __has_phase_defined_up_to compile; then
8735 cd "${WORKDIR}"
8736 else
8737 die "The source directory '${S}' doesn't exist"
8738 fi
8739
8740 - trap abort_compile SIGINT SIGQUIT
8741 + trap __abort_compile SIGINT SIGQUIT
8742
8743 if has distcc $FEATURES && has distcc-pump $FEATURES ; then
8744 if [[ -z $INCLUDE_SERVER_PORT ]] || [[ ! -w $INCLUDE_SERVER_PORT ]] ; then
8745 @@ -445,90 +442,96 @@ dyn_compile() {
8746 fi
8747 fi
8748
8749 - ebuild_phase pre_src_compile
8750 + __ebuild_phase pre_src_compile
8751
8752 - vecho ">>> Compiling source in $PWD ..."
8753 - ebuild_phase src_compile
8754 + __vecho ">>> Compiling source in $PWD ..."
8755 + __ebuild_phase src_compile
8756 >> "$PORTAGE_BUILDDIR/.compiled" || \
8757 die "Failed to create $PORTAGE_BUILDDIR/.compiled"
8758 - vecho ">>> Source compiled."
8759 + __vecho ">>> Source compiled."
8760
8761 - ebuild_phase post_src_compile
8762 + __ebuild_phase post_src_compile
8763
8764 trap - SIGINT SIGQUIT
8765 }
8766
8767 -dyn_test() {
8768 +__dyn_test() {
8769
8770 if [[ -e $PORTAGE_BUILDDIR/.tested ]] ; then
8771 - vecho ">>> It appears that ${PN} has already been tested; skipping."
8772 - vecho ">>> Remove '${PORTAGE_BUILDDIR}/.tested' to force test."
8773 + __vecho ">>> It appears that ${PN} has already been tested; skipping."
8774 + __vecho ">>> Remove '${PORTAGE_BUILDDIR}/.tested' to force test."
8775 return
8776 fi
8777
8778 - if [ "${EBUILD_FORCE_TEST}" == "1" ] ; then
8779 - # If USE came from ${T}/environment then it might not have USE=test
8780 - # like it's supposed to here.
8781 - ! has test ${USE} && export USE="${USE} test"
8782 - fi
8783 -
8784 - trap "abort_test" SIGINT SIGQUIT
8785 + trap "__abort_test" SIGINT SIGQUIT
8786 if [ -d "${S}" ]; then
8787 cd "${S}"
8788 else
8789 cd "${WORKDIR}"
8790 fi
8791
8792 - if ! has test $FEATURES && [ "${EBUILD_FORCE_TEST}" != "1" ]; then
8793 - vecho ">>> Test phase [not enabled]: ${CATEGORY}/${PF}"
8794 - elif has test $RESTRICT; then
8795 + if has test ${RESTRICT} ; then
8796 einfo "Skipping make test/check due to ebuild restriction."
8797 - vecho ">>> Test phase [explicitly disabled]: ${CATEGORY}/${PF}"
8798 + __vecho ">>> Test phase [disabled because of RESTRICT=test]: ${CATEGORY}/${PF}"
8799 +
8800 + # If ${EBUILD_FORCE_TEST} == 1 and FEATURES came from ${T}/environment
8801 + # then it might not have FEATURES=test like it's supposed to here.
8802 + elif [[ ${EBUILD_FORCE_TEST} != 1 ]] && ! has test ${FEATURES} ; then
8803 + __vecho ">>> Test phase [not enabled]: ${CATEGORY}/${PF}"
8804 else
8805 + # If ${EBUILD_FORCE_TEST} == 1 and USE came from ${T}/environment
8806 + # then it might not have USE=test like it's supposed to here.
8807 + if [[ ${EBUILD_FORCE_TEST} == 1 && test =~ ${PORTAGE_IUSE} ]] && \
8808 + ! has test ${USE} ; then
8809 + export USE="${USE} test"
8810 + fi
8811 +
8812 local save_sp=${SANDBOX_PREDICT}
8813 addpredict /
8814 - ebuild_phase pre_src_test
8815 - ebuild_phase src_test
8816 + __ebuild_phase pre_src_test
8817 + __ebuild_phase src_test
8818 >> "$PORTAGE_BUILDDIR/.tested" || \
8819 die "Failed to create $PORTAGE_BUILDDIR/.tested"
8820 - ebuild_phase post_src_test
8821 + __ebuild_phase post_src_test
8822 SANDBOX_PREDICT=${save_sp}
8823 fi
8824
8825 trap - SIGINT SIGQUIT
8826 }
8827
8828 -dyn_install() {
8829 +__dyn_install() {
8830 [ -z "$PORTAGE_BUILDDIR" ] && die "${FUNCNAME}: PORTAGE_BUILDDIR is unset"
8831 if has noauto $FEATURES ; then
8832 rm -f "${PORTAGE_BUILDDIR}/.installed"
8833 elif [[ -e $PORTAGE_BUILDDIR/.installed ]] ; then
8834 - vecho ">>> It appears that '${PF}' is already installed; skipping."
8835 - vecho ">>> Remove '${PORTAGE_BUILDDIR}/.installed' to force install."
8836 + __vecho ">>> It appears that '${PF}' is already installed; skipping."
8837 + __vecho ">>> Remove '${PORTAGE_BUILDDIR}/.installed' to force install."
8838 return 0
8839 fi
8840 - trap "abort_install" SIGINT SIGQUIT
8841 - ebuild_phase pre_src_install
8842 + trap "__abort_install" SIGINT SIGQUIT
8843 + __ebuild_phase pre_src_install
8844
8845 - _x=${ED}
8846 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
8847 - case "$EAPI" in 0|1|2) _x=${D} ;; esac
8848 + if ___eapi_has_prefix_variables; then
8849 + _x=${ED}
8850 + else
8851 + _x=${D}
8852 + fi
8853 rm -rf "${D}"
8854 mkdir -p "${_x}"
8855 unset _x
8856
8857 if [[ -d $S ]] ; then
8858 cd "${S}"
8859 - elif has $EAPI 0 1 2 3 3_pre2 ; then
8860 + elif ___eapi_has_S_WORKDIR_fallback; then
8861 cd "${WORKDIR}"
8862 - elif [[ -z ${A} ]] && ! has_phase_defined_up_to install; then
8863 + elif [[ -z ${A} ]] && ! __has_phase_defined_up_to install; then
8864 cd "${WORKDIR}"
8865 else
8866 die "The source directory '${S}' doesn't exist"
8867 fi
8868
8869 - vecho
8870 - vecho ">>> Install ${PF} into ${D} category ${CATEGORY}"
8871 + __vecho
8872 + __vecho ">>> Install ${PF} into ${D} category ${CATEGORY}"
8873 #our custom version of libtool uses $S and $D to fix
8874 #invalid paths in .la files
8875 export S D
8876 @@ -541,12 +544,12 @@ dyn_install() {
8877 export _E_EXEDESTTREE_=""
8878 export _E_DOCDESTTREE_=""
8879
8880 - ebuild_phase src_install
8881 + __ebuild_phase src_install
8882 >> "$PORTAGE_BUILDDIR/.installed" || \
8883 die "Failed to create $PORTAGE_BUILDDIR/.installed"
8884 - vecho ">>> Completed installing ${PF} into ${D}"
8885 - vecho
8886 - ebuild_phase post_src_install
8887 + __vecho ">>> Completed installing ${PF} into ${D}"
8888 + __vecho
8889 + __ebuild_phase post_src_install
8890
8891 cd "${PORTAGE_BUILDDIR}"/build-info
8892 set -f
8893 @@ -560,10 +563,15 @@ dyn_install() {
8894 if [[ $CATEGORY != virtual ]] ; then
8895 for f in ASFLAGS CBUILD CC CFLAGS CHOST CTARGET CXX \
8896 CXXFLAGS EXTRA_ECONF EXTRA_EINSTALL EXTRA_MAKE \
8897 - LDFLAGS LIBCFLAGS LIBCXXFLAGS ; do
8898 + LDFLAGS LIBCFLAGS LIBCXXFLAGS QA_CONFIGURE_OPTIONS \
8899 + QA_DESKTOP_FILE ; do
8900 x=$(echo -n ${!f})
8901 [[ -n $x ]] && echo "$x" > $f
8902 done
8903 + # whitespace preserved
8904 + for f in QA_AM_MAINTAINER_MODE ; do
8905 + [[ -n ${!f} ]] && echo "${!f}" > $f
8906 + done
8907 fi
8908 echo "${USE}" > USE
8909 echo "${EAPI:-0}" > EAPI
8910 @@ -571,24 +579,22 @@ dyn_install() {
8911 # Save EPREFIX, since it makes it easy to use chpathtool to
8912 # adjust the content of a binary package so that it will
8913 # work in a different EPREFIX from the one is was built for.
8914 - case "${EAPI:-0}" in
8915 - 0|1|2)
8916 - [[ " ${FEATURES} " == *" force-prefix "* ]] && \
8917 - [ -n "${EPREFIX}" ] && echo "${EPREFIX}" > EPREFIX
8918 - ;;
8919 - *)
8920 - [ -n "${EPREFIX}" ] && echo "${EPREFIX}" > EPREFIX
8921 - ;;
8922 - esac
8923 + if ___eapi_has_prefix_variables && [[ -n ${EPREFIX} ]]; then
8924 + echo "${EPREFIX}" > EPREFIX
8925 + fi
8926
8927 set +f
8928
8929 # local variables can leak into the saved environment.
8930 unset f
8931
8932 - save_ebuild_env --exclude-init-phases | filter_readonly_variables \
8933 - --filter-path --filter-sandbox --allow-extra-vars > environment
8934 - assert "save_ebuild_env failed"
8935 + # Use safe cwd, avoiding unsafe import for bug #469338.
8936 + cd "${PORTAGE_PYM_PATH}"
8937 + __save_ebuild_env --exclude-init-phases | __filter_readonly_variables \
8938 + --filter-path --filter-sandbox --allow-extra-vars > \
8939 + "${PORTAGE_BUILDDIR}"/build-info/environment
8940 + assert "__save_ebuild_env failed"
8941 + cd "${PORTAGE_BUILDDIR}"/build-info || die
8942
8943 ${PORTAGE_BZIP2_COMMAND} -f9 environment
8944
8945 @@ -601,15 +607,7 @@ dyn_install() {
8946 trap - SIGINT SIGQUIT
8947 }
8948
8949 -dyn_preinst() {
8950 - if [ -z "${D}" ]; then
8951 - eerror "${FUNCNAME}: D is unset"
8952 - return 1
8953 - fi
8954 - ebuild_phase_with_hooks pkg_preinst
8955 -}
8956 -
8957 -dyn_help() {
8958 +__dyn_help() {
8959 echo
8960 echo "Portage"
8961 echo "Copyright 1999-2010 Gentoo Foundation"
8962 @@ -625,6 +623,7 @@ dyn_help() {
8963 echo " pretend : execute package specific pretend actions"
8964 echo " setup : execute package specific setup actions"
8965 echo " fetch : download source archive(s) and patches"
8966 + echo " nofetch : display special fetch instructions"
8967 echo " digest : create a manifest file for the package"
8968 echo " manifest : create a manifest file for the package"
8969 echo " unpack : unpack sources (auto-dependencies if needed)"
8970 @@ -672,19 +671,18 @@ dyn_help() {
8971 echo
8972 }
8973
8974 -# @FUNCTION: _ebuild_arg_to_phase
8975 +# @FUNCTION: __ebuild_arg_to_phase
8976 # @DESCRIPTION:
8977 # Translate a known ebuild(1) argument into the precise
8978 # name of it's corresponding ebuild phase.
8979 -_ebuild_arg_to_phase() {
8980 - [ $# -ne 2 ] && die "expected exactly 2 args, got $#: $*"
8981 - local eapi=$1
8982 - local arg=$2
8983 +__ebuild_arg_to_phase() {
8984 + [ $# -ne 1 ] && die "expected exactly 1 arg, got $#: $*"
8985 + local arg=$1
8986 local phase_func=""
8987
8988 case "$arg" in
8989 pretend)
8990 - ! has $eapi 0 1 2 3 3_pre2 && \
8991 + ___eapi_has_pkg_pretend && \
8992 phase_func=pkg_pretend
8993 ;;
8994 setup)
8995 @@ -697,11 +695,11 @@ _ebuild_arg_to_phase() {
8996 phase_func=src_unpack
8997 ;;
8998 prepare)
8999 - ! has $eapi 0 1 && \
9000 + ___eapi_has_src_prepare && \
9001 phase_func=src_prepare
9002 ;;
9003 configure)
9004 - ! has $eapi 0 1 && \
9005 + ___eapi_has_src_configure && \
9006 phase_func=src_configure
9007 ;;
9008 compile)
9009 @@ -732,7 +730,7 @@ _ebuild_arg_to_phase() {
9010 return 0
9011 }
9012
9013 -_ebuild_phase_funcs() {
9014 +__ebuild_phase_funcs() {
9015 [ $# -ne 2 ] && die "expected exactly 2 args, got $#: $*"
9016 local eapi=$1
9017 local phase_func=$2
9018 @@ -742,20 +740,20 @@ _ebuild_phase_funcs() {
9019
9020 for x in pkg_nofetch src_unpack src_test ; do
9021 declare -F $x >/dev/null || \
9022 - eval "$x() { _eapi0_$x \"\$@\" ; }"
9023 + eval "$x() { __eapi0_$x \"\$@\" ; }"
9024 done
9025
9026 - case $eapi in
9027 + case "$eapi" in
9028
9029 0|1)
9030
9031 if ! declare -F src_compile >/dev/null ; then
9032 - case $eapi in
9033 + case "$eapi" in
9034 0)
9035 - src_compile() { _eapi0_src_compile "$@" ; }
9036 + src_compile() { __eapi0_src_compile "$@" ; }
9037 ;;
9038 *)
9039 - src_compile() { _eapi1_src_compile "$@" ; }
9040 + src_compile() { __eapi1_src_compile "$@" ; }
9041 ;;
9042 esac
9043 fi
9044 @@ -775,35 +773,35 @@ _ebuild_phase_funcs() {
9045 *)
9046
9047 declare -F src_configure >/dev/null || \
9048 - src_configure() { _eapi2_src_configure "$@" ; }
9049 + src_configure() { __eapi2_src_configure "$@" ; }
9050
9051 declare -F src_compile >/dev/null || \
9052 - src_compile() { _eapi2_src_compile "$@" ; }
9053 + src_compile() { __eapi2_src_compile "$@" ; }
9054
9055 - has $eapi 2 3 3_pre2 || declare -F src_install >/dev/null || \
9056 - src_install() { _eapi4_src_install "$@" ; }
9057 + has $eapi 2 3 || declare -F src_install >/dev/null || \
9058 + src_install() { __eapi4_src_install "$@" ; }
9059
9060 if has $phase_func $default_phases ; then
9061
9062 - _eapi2_pkg_nofetch () { _eapi0_pkg_nofetch "$@" ; }
9063 - _eapi2_src_unpack () { _eapi0_src_unpack "$@" ; }
9064 - _eapi2_src_prepare () { true ; }
9065 - _eapi2_src_test () { _eapi0_src_test "$@" ; }
9066 - _eapi2_src_install () { die "$FUNCNAME is not supported" ; }
9067 + __eapi2_pkg_nofetch () { __eapi0_pkg_nofetch "$@" ; }
9068 + __eapi2_src_unpack () { __eapi0_src_unpack "$@" ; }
9069 + __eapi2_src_prepare () { true ; }
9070 + __eapi2_src_test () { __eapi0_src_test "$@" ; }
9071 + __eapi2_src_install () { die "$FUNCNAME is not supported" ; }
9072
9073 for x in $default_phases ; do
9074 - eval "default_$x() { _eapi2_$x \"\$@\" ; }"
9075 + eval "default_$x() { __eapi2_$x \"\$@\" ; }"
9076 done
9077
9078 - eval "default() { _eapi2_$phase_func \"\$@\" ; }"
9079 + eval "default() { __eapi2_$phase_func \"\$@\" ; }"
9080
9081 - case $eapi in
9082 + case "$eapi" in
9083 2|3)
9084 ;;
9085 *)
9086 - eval "default_src_install() { _eapi4_src_install \"\$@\" ; }"
9087 + eval "default_src_install() { __eapi4_src_install \"\$@\" ; }"
9088 [[ $phase_func = src_install ]] && \
9089 - eval "default() { _eapi4_$phase_func \"\$@\" ; }"
9090 + eval "default() { __eapi4_$phase_func \"\$@\" ; }"
9091 ;;
9092 esac
9093
9094 @@ -825,14 +823,14 @@ _ebuild_phase_funcs() {
9095 esac
9096 }
9097
9098 -ebuild_main() {
9099 +__ebuild_main() {
9100
9101 # Subshell/helper die support (must export for the die helper).
9102 # Since this function is typically executed in a subshell,
9103 # setup EBUILD_MASTER_PID to refer to the current $BASHPID,
9104 # which seems to give the best results when further
9105 # nested subshells call die.
9106 - export EBUILD_MASTER_PID=$BASHPID
9107 + export EBUILD_MASTER_PID=${BASHPID:-$(__bashpid)}
9108 trap 'exit 1' SIGTERM
9109
9110 #a reasonable default for $S
9111 @@ -861,37 +859,39 @@ ebuild_main() {
9112 # respect FEATURES="-ccache".
9113 has ccache $FEATURES || export CCACHE_DISABLE=1
9114
9115 - local phase_func=$(_ebuild_arg_to_phase "$EAPI" "$EBUILD_PHASE")
9116 - [[ -n $phase_func ]] && _ebuild_phase_funcs "$EAPI" "$phase_func"
9117 + local phase_func=$(__ebuild_arg_to_phase "$EBUILD_PHASE")
9118 + [[ -n $phase_func ]] && __ebuild_phase_funcs "$EAPI" "$phase_func"
9119 unset phase_func
9120
9121 - source_all_bashrcs
9122 + __source_all_bashrcs
9123
9124 case ${1} in
9125 nofetch)
9126 - ebuild_phase_with_hooks pkg_nofetch
9127 + __ebuild_phase_with_hooks pkg_nofetch
9128 ;;
9129 - prerm|postrm|postinst|config|info)
9130 + prerm|postrm|preinst|postinst|config|info)
9131 if has "${1}" config info && \
9132 ! declare -F "pkg_${1}" >/dev/null ; then
9133 ewarn "pkg_${1}() is not defined: '${EBUILD##*/}'"
9134 fi
9135 export SANDBOX_ON="0"
9136 if [ "${PORTAGE_DEBUG}" != "1" ] || [ "${-/x/}" != "$-" ]; then
9137 - ebuild_phase_with_hooks pkg_${1}
9138 + __ebuild_phase_with_hooks pkg_${1}
9139 else
9140 set -x
9141 - ebuild_phase_with_hooks pkg_${1}
9142 + __ebuild_phase_with_hooks pkg_${1}
9143 set +x
9144 fi
9145 - if [[ $EBUILD_PHASE == postinst ]] && [[ -n $PORTAGE_UPDATE_ENV ]]; then
9146 + if [[ -n $PORTAGE_UPDATE_ENV ]] ; then
9147 # Update environment.bz2 in case installation phases
9148 # need to pass some variables to uninstallation phases.
9149 - save_ebuild_env --exclude-init-phases | \
9150 - filter_readonly_variables --filter-path \
9151 + # Use safe cwd, avoiding unsafe import for bug #469338.
9152 + cd "${PORTAGE_PYM_PATH}"
9153 + __save_ebuild_env --exclude-init-phases | \
9154 + __filter_readonly_variables --filter-path \
9155 --filter-sandbox --allow-extra-vars \
9156 | ${PORTAGE_BZIP2_COMMAND} -c -f9 > "$PORTAGE_UPDATE_ENV"
9157 - assert "save_ebuild_env failed"
9158 + assert "__save_ebuild_env failed"
9159 fi
9160 ;;
9161 unpack|prepare|configure|compile|test|clean|install)
9162 @@ -917,7 +917,7 @@ ebuild_main() {
9163
9164 x=LIBDIR_$ABI
9165 [ -z "$PKG_CONFIG_PATH" -a -n "$ABI" -a -n "${!x}" ] && \
9166 - export PKG_CONFIG_PATH=/usr/${!x}/pkgconfig
9167 + export PKG_CONFIG_PATH=${EPREFIX}/usr/${!x}/pkgconfig
9168
9169 if has noauto $FEATURES && \
9170 [[ ! -f $PORTAGE_BUILDDIR/.unpacked ]] ; then
9171 @@ -952,24 +952,24 @@ ebuild_main() {
9172 esac
9173
9174 if [ "${PORTAGE_DEBUG}" != "1" ] || [ "${-/x/}" != "$-" ]; then
9175 - dyn_${1}
9176 + __dyn_${1}
9177 else
9178 set -x
9179 - dyn_${1}
9180 + __dyn_${1}
9181 set +x
9182 fi
9183 export SANDBOX_ON="0"
9184 ;;
9185 - help|pretend|setup|preinst)
9186 + help|pretend|setup)
9187 #pkg_setup needs to be out of the sandbox for tmp file creation;
9188 #for example, awking and piping a file in /tmp requires a temp file to be created
9189 #in /etc. If pkg_setup is in the sandbox, both our lilo and apache ebuilds break.
9190 export SANDBOX_ON="0"
9191 if [ "${PORTAGE_DEBUG}" != "1" ] || [ "${-/x/}" != "$-" ]; then
9192 - dyn_${1}
9193 + __dyn_${1}
9194 else
9195 set -x
9196 - dyn_${1}
9197 + __dyn_${1}
9198 set +x
9199 fi
9200 ;;
9201 @@ -979,7 +979,7 @@ ebuild_main() {
9202 export SANDBOX_ON="1"
9203 echo "Unrecognized arg '${1}'"
9204 echo
9205 - dyn_help
9206 + __dyn_help
9207 exit 1
9208 ;;
9209 esac
9210 @@ -987,11 +987,13 @@ ebuild_main() {
9211 # Save the env only for relevant phases.
9212 if ! has "${1}" clean help info nofetch ; then
9213 umask 002
9214 - save_ebuild_env | filter_readonly_variables \
9215 + # Use safe cwd, avoiding unsafe import for bug #469338.
9216 + cd "${PORTAGE_PYM_PATH}"
9217 + __save_ebuild_env | __filter_readonly_variables \
9218 --filter-features > "$T/environment"
9219 - assert "save_ebuild_env failed"
9220 - chown portage:portage "$T/environment" &>/dev/null
9221 - chmod g+w "$T/environment" &>/dev/null
9222 + assert "__save_ebuild_env failed"
9223 + chgrp "${PORTAGE_GRPNAME:-portage}" "$T/environment"
9224 + chmod g+w "$T/environment"
9225 fi
9226 [[ -n $PORTAGE_EBUILD_EXIT_FILE ]] && > "$PORTAGE_EBUILD_EXIT_FILE"
9227 if [[ -n $PORTAGE_IPC_DAEMON ]] ; then
9228
9229 diff --git a/bin/phase-helpers.sh b/bin/phase-helpers.sh
9230 index 946520b..412decb 100644
9231 --- a/bin/phase-helpers.sh
9232 +++ b/bin/phase-helpers.sh
9233 @@ -1,5 +1,5 @@
9234 #!/bin/bash
9235 -# Copyright 1999-2011 Gentoo Foundation
9236 +# Copyright 1999-2013 Gentoo Foundation
9237 # Distributed under the terms of the GNU General Public License v2
9238
9239 export DESTTREE=/usr
9240 @@ -11,6 +11,8 @@ export EXEOPTIONS="-m0755"
9241 export LIBOPTIONS="-m0644"
9242 export DIROPTIONS="-m0755"
9243 export MOPREFIX=${PN}
9244 +# Do not compress files which are smaller than this (in bytes). #169260
9245 +export PORTAGE_DOCOMPRESS_SIZE_LIMIT="128"
9246 declare -a PORTAGE_DOCOMPRESS=( /usr/share/{doc,info,man} )
9247 declare -a PORTAGE_DOCOMPRESS_SKIP=( /usr/share/doc/${PF}/html )
9248
9249 @@ -19,13 +21,14 @@ into() {
9250 export DESTTREE=""
9251 else
9252 export DESTTREE=$1
9253 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9254 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9255 + if ! ___eapi_has_prefix_variables; then
9256 + local ED=${D}
9257 + fi
9258 if [ ! -d "${ED}${DESTTREE}" ]; then
9259 install -d "${ED}${DESTTREE}"
9260 local ret=$?
9261 if [[ $ret -ne 0 ]] ; then
9262 - helpers_die "${FUNCNAME[0]} failed"
9263 + __helpers_die "${FUNCNAME[0]} failed"
9264 return $ret
9265 fi
9266 fi
9267 @@ -37,13 +40,14 @@ insinto() {
9268 export INSDESTTREE=""
9269 else
9270 export INSDESTTREE=$1
9271 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9272 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9273 + if ! ___eapi_has_prefix_variables; then
9274 + local ED=${D}
9275 + fi
9276 if [ ! -d "${ED}${INSDESTTREE}" ]; then
9277 install -d "${ED}${INSDESTTREE}"
9278 local ret=$?
9279 if [[ $ret -ne 0 ]] ; then
9280 - helpers_die "${FUNCNAME[0]} failed"
9281 + __helpers_die "${FUNCNAME[0]} failed"
9282 return $ret
9283 fi
9284 fi
9285 @@ -55,13 +59,14 @@ exeinto() {
9286 export _E_EXEDESTTREE_=""
9287 else
9288 export _E_EXEDESTTREE_="$1"
9289 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9290 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9291 + if ! ___eapi_has_prefix_variables; then
9292 + local ED=${D}
9293 + fi
9294 if [ ! -d "${ED}${_E_EXEDESTTREE_}" ]; then
9295 install -d "${ED}${_E_EXEDESTTREE_}"
9296 local ret=$?
9297 if [[ $ret -ne 0 ]] ; then
9298 - helpers_die "${FUNCNAME[0]} failed"
9299 + __helpers_die "${FUNCNAME[0]} failed"
9300 return $ret
9301 fi
9302 fi
9303 @@ -73,13 +78,14 @@ docinto() {
9304 export _E_DOCDESTTREE_=""
9305 else
9306 export _E_DOCDESTTREE_="$1"
9307 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9308 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9309 + if ! ___eapi_has_prefix_variables; then
9310 + local ED=${D}
9311 + fi
9312 if [ ! -d "${ED}usr/share/doc/${PF}/${_E_DOCDESTTREE_}" ]; then
9313 install -d "${ED}usr/share/doc/${PF}/${_E_DOCDESTTREE_}"
9314 local ret=$?
9315 if [[ $ret -ne 0 ]] ; then
9316 - helpers_die "${FUNCNAME[0]} failed"
9317 + __helpers_die "${FUNCNAME[0]} failed"
9318 return $ret
9319 fi
9320 fi
9321 @@ -112,13 +118,13 @@ libopts() {
9322 }
9323
9324 docompress() {
9325 - has "${EAPI}" 0 1 2 3 && die "'docompress' not supported in this EAPI"
9326 + ___eapi_has_docompress || die "'docompress' not supported in this EAPI"
9327
9328 local f g
9329 if [[ $1 = "-x" ]]; then
9330 shift
9331 for f; do
9332 - f=$(strip_duplicate_slashes "${f}"); f=${f%/}
9333 + f=$(__strip_duplicate_slashes "${f}"); f=${f%/}
9334 [[ ${f:0:1} = / ]] || f="/${f}"
9335 for g in "${PORTAGE_DOCOMPRESS_SKIP[@]}"; do
9336 [[ ${f} = "${g}" ]] && continue 2
9337 @@ -127,7 +133,7 @@ docompress() {
9338 done
9339 else
9340 for f; do
9341 - f=$(strip_duplicate_slashes "${f}"); f=${f%/}
9342 + f=$(__strip_duplicate_slashes "${f}"); f=${f%/}
9343 [[ ${f:0:1} = / ]] || f="/${f}"
9344 for g in "${PORTAGE_DOCOMPRESS[@]}"; do
9345 [[ ${f} = "${g}" ]] && continue 2
9346 @@ -137,29 +143,6 @@ docompress() {
9347 fi
9348 }
9349
9350 -# adds ".keep" files so that dirs aren't auto-cleaned
9351 -keepdir() {
9352 - dodir "$@"
9353 - local x
9354 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9355 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9356 - if [ "$1" == "-R" ] || [ "$1" == "-r" ]; then
9357 - shift
9358 - find "$@" -type d -printf "${ED}%p/.keep_${CATEGORY}_${PN}-${SLOT}\n" \
9359 - | tr "\n" "\0" | \
9360 - while read -r -d $'\0' ; do
9361 - >> "$REPLY" || \
9362 - die "Failed to recursively create .keep files"
9363 - done
9364 - else
9365 - for x in "$@"; do
9366 - >> "${ED}${x}/.keep_${CATEGORY}_${PN}-${SLOT}" || \
9367 - die "Failed to create .keep in ${ED}${x}"
9368 - done
9369 - fi
9370 -}
9371 -
9372 -
9373 useq() {
9374 has $EBUILD_PHASE prerm postrm || eqawarn \
9375 "QA Notice: The 'useq' function is deprecated (replaced by 'use')"
9376 @@ -174,6 +157,17 @@ usev() {
9377 return 1
9378 }
9379
9380 +if ___eapi_has_usex; then
9381 + usex() {
9382 + if use "$1"; then
9383 + echo "${2-yes}$4"
9384 + else
9385 + echo "${3-no}$5"
9386 + fi
9387 + return 0
9388 + }
9389 +fi
9390 +
9391 use() {
9392 local u=$1
9393 local found=0
9394 @@ -194,18 +188,31 @@ use() {
9395 #fi
9396 true
9397
9398 - # Make sure we have this USE flag in IUSE
9399 - elif [[ -n $PORTAGE_IUSE && -n $EBUILD_PHASE ]] ; then
9400 - [[ $u =~ $PORTAGE_IUSE ]] || \
9401 + # Make sure we have this USE flag in IUSE, but exempt binary
9402 + # packages for API consumers like Entropy which do not require
9403 + # a full profile with IUSE_IMPLICIT and stuff (see bug #456830).
9404 + elif [[ -n $PORTAGE_IUSE && -n $EBUILD_PHASE &&
9405 + -n $PORTAGE_INTERNAL_CALLER ]] ; then
9406 + if [[ ! $u =~ $PORTAGE_IUSE ]] ; then
9407 + if [[ ! ${EAPI} =~ ^(0|1|2|3|4|4-python|4-slot-abi)$ ]] ; then
9408 + # This is only strict starting with EAPI 5, since implicit IUSE
9409 + # is not well defined for earlier EAPIs (see bug #449708).
9410 + die "USE Flag '${u}' not in IUSE for ${CATEGORY}/${PF}"
9411 + fi
9412 eqawarn "QA Notice: USE Flag '${u}' not" \
9413 "in IUSE for ${CATEGORY}/${PF}"
9414 + fi
9415 fi
9416
9417 + local IFS=$' \t\n' prev_shopts=$- ret
9418 + set -f
9419 if has ${u} ${USE} ; then
9420 - return ${found}
9421 + ret=${found}
9422 else
9423 - return $((!found))
9424 + ret=$((!found))
9425 fi
9426 + [[ ${prev_shopts} == *f* ]] || set +f
9427 + return ${ret}
9428 }
9429
9430 use_with() {
9431 @@ -215,7 +222,7 @@ use_with() {
9432 return 1
9433 fi
9434
9435 - if ! has "${EAPI:-0}" 0 1 2 3 ; then
9436 + if ___eapi_use_enable_and_use_with_support_empty_third_argument; then
9437 local UW_SUFFIX=${3+=$3}
9438 else
9439 local UW_SUFFIX=${3:+=$3}
9440 @@ -237,7 +244,7 @@ use_enable() {
9441 return 1
9442 fi
9443
9444 - if ! has "${EAPI:-0}" 0 1 2 3 ; then
9445 + if ___eapi_use_enable_and_use_with_support_empty_third_argument; then
9446 local UE_SUFFIX=${3+=$3}
9447 else
9448 local UE_SUFFIX=${3:+=$3}
9449 @@ -255,15 +262,19 @@ use_enable() {
9450 unpack() {
9451 local srcdir
9452 local x
9453 - local y
9454 + local y y_insensitive
9455 + local suffix suffix_insensitive
9456 local myfail
9457 local eapi=${EAPI:-0}
9458 [ -z "$*" ] && die "Nothing passed to the 'unpack' command"
9459
9460 for x in "$@"; do
9461 - vecho ">>> Unpacking ${x} to ${PWD}"
9462 + __vecho ">>> Unpacking ${x} to ${PWD}"
9463 + suffix=${x##*.}
9464 + suffix_insensitive=$(LC_ALL=C tr "[:upper:]" "[:lower:]" <<< "${suffix}")
9465 y=${x%.*}
9466 y=${y##*.}
9467 + y_insensitive=$(LC_ALL=C tr "[:upper:]" "[:lower:]" <<< "${y}")
9468
9469 if [[ ${x} == "./"* ]] ; then
9470 srcdir=""
9471 @@ -276,10 +287,16 @@ unpack() {
9472 fi
9473 [[ ! -s ${srcdir}${x} ]] && die "${x} does not exist"
9474
9475 - _unpack_tar() {
9476 - if [ "${y}" == "tar" ]; then
9477 + __unpack_tar() {
9478 + if [[ ${y_insensitive} == tar ]] ; then
9479 + if ___eapi_unpack_is_case_sensitive && \
9480 + [[ tar != ${y} ]] ; then
9481 + eqawarn "QA Notice: unpack called with" \
9482 + "secondary suffix '${y}' which is unofficially" \
9483 + "supported with EAPI '${EAPI}'. Instead use 'tar'."
9484 + fi
9485 $1 -c -- "$srcdir$x" | tar xof -
9486 - assert_sigpipe_ok "$myfail"
9487 + __assert_sigpipe_ok "$myfail"
9488 else
9489 local cwd_dest=${x##*/}
9490 cwd_dest=${cwd_dest%.*}
9491 @@ -288,30 +305,67 @@ unpack() {
9492 }
9493
9494 myfail="failure unpacking ${x}"
9495 - case "${x##*.}" in
9496 + case "${suffix_insensitive}" in
9497 tar)
9498 + if ___eapi_unpack_is_case_sensitive && \
9499 + [[ tar != ${suffix} ]] ; then
9500 + eqawarn "QA Notice: unpack called with" \
9501 + "suffix '${suffix}' which is unofficially supported" \
9502 + "with EAPI '${EAPI}'. Instead use 'tar'."
9503 + fi
9504 tar xof "$srcdir$x" || die "$myfail"
9505 ;;
9506 tgz)
9507 + if ___eapi_unpack_is_case_sensitive && \
9508 + [[ tgz != ${suffix} ]] ; then
9509 + eqawarn "QA Notice: unpack called with" \
9510 + "suffix '${suffix}' which is unofficially supported" \
9511 + "with EAPI '${EAPI}'. Instead use 'tgz'."
9512 + fi
9513 tar xozf "$srcdir$x" || die "$myfail"
9514 ;;
9515 tbz|tbz2)
9516 + if ___eapi_unpack_is_case_sensitive && \
9517 + [[ " tbz tbz2 " != *" ${suffix} "* ]] ; then
9518 + eqawarn "QA Notice: unpack called with" \
9519 + "suffix '${suffix}' which is unofficially supported" \
9520 + "with EAPI '${EAPI}'. Instead use 'tbz' or 'tbz2'."
9521 + fi
9522 ${PORTAGE_BUNZIP2_COMMAND:-${PORTAGE_BZIP2_COMMAND} -d} -c -- "$srcdir$x" | tar xof -
9523 - assert_sigpipe_ok "$myfail"
9524 + __assert_sigpipe_ok "$myfail"
9525 ;;
9526 - ZIP|zip|jar)
9527 + zip|jar)
9528 + if ___eapi_unpack_is_case_sensitive && \
9529 + [[ " ZIP zip jar " != *" ${suffix} "* ]] ; then
9530 + eqawarn "QA Notice: unpack called with" \
9531 + "suffix '${suffix}' which is unofficially supported" \
9532 + "with EAPI '${EAPI}'." \
9533 + "Instead use 'ZIP', 'zip', or 'jar'."
9534 + fi
9535 # unzip will interactively prompt under some error conditions,
9536 # as reported in bug #336285
9537 ( set +x ; while true ; do echo n || break ; done ) | \
9538 unzip -qo "${srcdir}${x}" || die "$myfail"
9539 ;;
9540 - gz|Z|z)
9541 - _unpack_tar "gzip -d"
9542 + gz|z)
9543 + if ___eapi_unpack_is_case_sensitive && \
9544 + [[ " gz z Z " != *" ${suffix} "* ]] ; then
9545 + eqawarn "QA Notice: unpack called with" \
9546 + "suffix '${suffix}' which is unofficially supported" \
9547 + "with EAPI '${EAPI}'. Instead use 'gz', 'z', or 'Z'."
9548 + fi
9549 + __unpack_tar "gzip -d"
9550 ;;
9551 bz2|bz)
9552 - _unpack_tar "${PORTAGE_BUNZIP2_COMMAND:-${PORTAGE_BZIP2_COMMAND} -d}"
9553 + if ___eapi_unpack_is_case_sensitive && \
9554 + [[ " bz bz2 " != *" ${suffix} "* ]] ; then
9555 + eqawarn "QA Notice: unpack called with" \
9556 + "suffix '${suffix}' which is unofficially supported" \
9557 + "with EAPI '${EAPI}'. Instead use 'bz' or 'bz2'."
9558 + fi
9559 + __unpack_tar "${PORTAGE_BUNZIP2_COMMAND:-${PORTAGE_BZIP2_COMMAND} -d}"
9560 ;;
9561 - 7Z|7z)
9562 + 7z)
9563 local my_output
9564 my_output="$(7z x -y "${srcdir}${x}")"
9565 if [ $? -ne 0 ]; then
9566 @@ -319,16 +373,41 @@ unpack() {
9567 die "$myfail"
9568 fi
9569 ;;
9570 - RAR|rar)
9571 + rar)
9572 + if ___eapi_unpack_is_case_sensitive && \
9573 + [[ " rar RAR " != *" ${suffix} "* ]] ; then
9574 + eqawarn "QA Notice: unpack called with" \
9575 + "suffix '${suffix}' which is unofficially supported" \
9576 + "with EAPI '${EAPI}'. Instead use 'rar' or 'RAR'."
9577 + fi
9578 unrar x -idq -o+ "${srcdir}${x}" || die "$myfail"
9579 ;;
9580 - LHa|LHA|lha|lzh)
9581 + lha|lzh)
9582 + if ___eapi_unpack_is_case_sensitive && \
9583 + [[ " LHA LHa lha lzh " != *" ${suffix} "* ]] ; then
9584 + eqawarn "QA Notice: unpack called with" \
9585 + "suffix '${suffix}' which is unofficially supported" \
9586 + "with EAPI '${EAPI}'." \
9587 + "Instead use 'LHA', 'LHa', 'lha', or 'lzh'."
9588 + fi
9589 lha xfq "${srcdir}${x}" || die "$myfail"
9590 ;;
9591 a)
9592 + if ___eapi_unpack_is_case_sensitive && \
9593 + [[ " a " != *" ${suffix} "* ]] ; then
9594 + eqawarn "QA Notice: unpack called with" \
9595 + "suffix '${suffix}' which is unofficially supported" \
9596 + "with EAPI '${EAPI}'. Instead use 'a'."
9597 + fi
9598 ar x "${srcdir}${x}" || die "$myfail"
9599 ;;
9600 deb)
9601 + if ___eapi_unpack_is_case_sensitive && \
9602 + [[ " deb " != *" ${suffix} "* ]] ; then
9603 + eqawarn "QA Notice: unpack called with" \
9604 + "suffix '${suffix}' which is unofficially supported" \
9605 + "with EAPI '${EAPI}'. Instead use 'deb'."
9606 + fi
9607 # Unpacking .deb archives can not always be done with
9608 # `ar`. For instance on AIX this doesn't work out. If
9609 # we have `deb2targz` installed, prefer it over `ar` for
9610 @@ -356,17 +435,29 @@ unpack() {
9611 fi
9612 ;;
9613 lzma)
9614 - _unpack_tar "lzma -d"
9615 + if ___eapi_unpack_is_case_sensitive && \
9616 + [[ " lzma " != *" ${suffix} "* ]] ; then
9617 + eqawarn "QA Notice: unpack called with" \
9618 + "suffix '${suffix}' which is unofficially supported" \
9619 + "with EAPI '${EAPI}'. Instead use 'lzma'."
9620 + fi
9621 + __unpack_tar "lzma -d"
9622 ;;
9623 xz)
9624 - if has $eapi 0 1 2 ; then
9625 - vecho "unpack ${x}: file format not recognized. Ignoring."
9626 + if ___eapi_unpack_is_case_sensitive && \
9627 + [[ " xz " != *" ${suffix} "* ]] ; then
9628 + eqawarn "QA Notice: unpack called with" \
9629 + "suffix '${suffix}' which is unofficially supported" \
9630 + "with EAPI '${EAPI}'. Instead use 'xz'."
9631 + fi
9632 + if ___eapi_unpack_supports_xz; then
9633 + __unpack_tar "xz -d"
9634 else
9635 - _unpack_tar "xz -d"
9636 + __vecho "unpack ${x}: file format not recognized. Ignoring."
9637 fi
9638 ;;
9639 *)
9640 - vecho "unpack ${x}: file format not recognized. Ignoring."
9641 + __vecho "unpack ${x}: file format not recognized. Ignoring."
9642 ;;
9643 esac
9644 done
9645 @@ -378,22 +469,24 @@ unpack() {
9646
9647 econf() {
9648 local x
9649 + local pid=${BASHPID:-$(__bashpid)}
9650
9651 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9652 - case "$EAPI" in 0|1|2) local EPREFIX= ;; esac
9653 + if ! ___eapi_has_prefix_variables; then
9654 + local EPREFIX=
9655 + fi
9656
9657 - _hasg() {
9658 + __hasg() {
9659 local x s=$1
9660 shift
9661 for x ; do [[ ${x} == ${s} ]] && echo "${x}" && return 0 ; done
9662 return 1
9663 }
9664
9665 - _hasgq() { _hasg "$@" >/dev/null ; }
9666 + __hasgq() { __hasg "$@" >/dev/null ; }
9667
9668 - local phase_func=$(_ebuild_arg_to_phase "$EAPI" "$EBUILD_PHASE")
9669 + local phase_func=$(__ebuild_arg_to_phase "$EBUILD_PHASE")
9670 if [[ -n $phase_func ]] ; then
9671 - if has "$EAPI" 0 1 ; then
9672 + if ! ___eapi_has_src_configure; then
9673 [[ $phase_func != src_compile ]] && \
9674 eqawarn "QA Notice: econf called in" \
9675 "$phase_func instead of src_compile"
9676 @@ -408,23 +501,44 @@ econf() {
9677 if [ -x "${ECONF_SOURCE}/configure" ]; then
9678 if [[ -n $CONFIG_SHELL && \
9679 "$(head -n1 "$ECONF_SOURCE/configure")" =~ ^'#!'[[:space:]]*/bin/sh([[:space:]]|$) ]] ; then
9680 - sed -e "1s:^#![[:space:]]*/bin/sh:#!$CONFIG_SHELL:" -i "$ECONF_SOURCE/configure" || \
9681 - die "Substition of shebang in '$ECONF_SOURCE/configure' failed"
9682 + # preserve timestamp, see bug #440304
9683 + touch -r "${ECONF_SOURCE}/configure" "${ECONF_SOURCE}/configure._portage_tmp_.${pid}" || die
9684 + sed -i \
9685 + -e "1s:^#![[:space:]]*/bin/sh:#!$CONFIG_SHELL:" \
9686 + "${ECONF_SOURCE}/configure" \
9687 + || die "Substition of shebang in '${ECONF_SOURCE}/configure' failed"
9688 + touch -r "${ECONF_SOURCE}/configure._portage_tmp_.${pid}" "${ECONF_SOURCE}/configure" || die
9689 + rm -f "${ECONF_SOURCE}/configure._portage_tmp_.${pid}"
9690 fi
9691 if [ -e "${EPREFIX}"/usr/share/gnuconfig/ ]; then
9692 find "${WORKDIR}" -type f '(' \
9693 -name config.guess -o -name config.sub ')' -print0 | \
9694 while read -r -d $'\0' x ; do
9695 - vecho " * econf: updating ${x/${WORKDIR}\/} with ${EPREFIX}/usr/share/gnuconfig/${x##*/}"
9696 - cp -f "${EPREFIX}"/usr/share/gnuconfig/"${x##*/}" "${x}"
9697 + __vecho " * econf: updating ${x/${WORKDIR}\/} with ${EPREFIX}/usr/share/gnuconfig/${x##*/}"
9698 + # Make sure we do this atomically incase we're run in parallel. #487478
9699 + cp -f "${EPREFIX}"/usr/share/gnuconfig/"${x##*/}" "${x}.${pid}"
9700 + mv -f "${x}.${pid}" "${x}"
9701 done
9702 fi
9703
9704 - # EAPI=4 adds --disable-dependency-tracking to econf
9705 - if ! has "$EAPI" 0 1 2 3 3_pre2 && \
9706 - "${ECONF_SOURCE}/configure" --help 2>/dev/null | \
9707 - grep -q disable-dependency-tracking ; then
9708 - set -- --disable-dependency-tracking "$@"
9709 + if ___eapi_econf_passes_--disable-dependency-tracking || ___eapi_econf_passes_--disable-silent-rules; then
9710 + local conf_help=$("${ECONF_SOURCE}/configure" --help 2>/dev/null)
9711 +
9712 + if ___eapi_econf_passes_--disable-dependency-tracking; then
9713 + case "${conf_help}" in
9714 + *--disable-dependency-tracking*)
9715 + set -- --disable-dependency-tracking "$@"
9716 + ;;
9717 + esac
9718 + fi
9719 +
9720 + if ___eapi_econf_passes_--disable-silent-rules; then
9721 + case "${conf_help}" in
9722 + *--disable-silent-rules*)
9723 + set -- --disable-silent-rules "$@"
9724 + ;;
9725 + esac
9726 + fi
9727 fi
9728
9729 # if the profile defines a location to install libs to aside from default, pass it on.
9730 @@ -433,16 +547,19 @@ econf() {
9731 if [[ -n ${ABI} && -n ${!LIBDIR_VAR} ]] ; then
9732 CONF_LIBDIR=${!LIBDIR_VAR}
9733 fi
9734 - if [[ -n ${CONF_LIBDIR} ]] && ! _hasgq --libdir=\* "$@" ; then
9735 - export CONF_PREFIX=$(_hasg --exec-prefix=\* "$@")
9736 - [[ -z ${CONF_PREFIX} ]] && CONF_PREFIX=$(_hasg --prefix=\* "$@")
9737 + if [[ -n ${CONF_LIBDIR} ]] && ! __hasgq --libdir=\* "$@" ; then
9738 + export CONF_PREFIX=$(__hasg --exec-prefix=\* "$@")
9739 + [[ -z ${CONF_PREFIX} ]] && CONF_PREFIX=$(__hasg --prefix=\* "$@")
9740 : ${CONF_PREFIX:=${EPREFIX}/usr}
9741 CONF_PREFIX=${CONF_PREFIX#*=}
9742 [[ ${CONF_PREFIX} != /* ]] && CONF_PREFIX="/${CONF_PREFIX}"
9743 [[ ${CONF_LIBDIR} != /* ]] && CONF_LIBDIR="/${CONF_LIBDIR}"
9744 - set -- --libdir="$(strip_duplicate_slashes ${CONF_PREFIX}${CONF_LIBDIR})" "$@"
9745 + set -- --libdir="$(__strip_duplicate_slashes "${CONF_PREFIX}${CONF_LIBDIR}")" "$@"
9746 fi
9747
9748 + # Handle arguments containing quoted whitespace (see bug #457136).
9749 + eval "local -a EXTRA_ECONF=(${EXTRA_ECONF})"
9750 +
9751 set -- \
9752 --prefix="${EPREFIX}"/usr \
9753 ${CBUILD:+--build=${CBUILD}} \
9754 @@ -454,8 +571,8 @@ econf() {
9755 --sysconfdir="${EPREFIX}"/etc \
9756 --localstatedir="${EPREFIX}"/var/lib \
9757 "$@" \
9758 - ${EXTRA_ECONF}
9759 - vecho "${ECONF_SOURCE}/configure" "$@"
9760 + "${EXTRA_ECONF[@]}"
9761 + __vecho "${ECONF_SOURCE}/configure" "$@"
9762
9763 if ! "${ECONF_SOURCE}/configure" "$@" ; then
9764
9765 @@ -476,8 +593,9 @@ econf() {
9766 einstall() {
9767 # CONF_PREFIX is only set if they didn't pass in libdir above.
9768 local LOCAL_EXTRA_EINSTALL="${EXTRA_EINSTALL}"
9769 - [[ " ${FEATURES} " == *" force-prefix "* ]] || \
9770 - case "$EAPI" in 0|1|2) local ED=${D} ;; esac
9771 + if ! ___eapi_has_prefix_variables; then
9772 + local ED=${D}
9773 + fi
9774 LIBDIR_VAR="LIBDIR_${ABI}"
9775 if [ -n "${ABI}" -a -n "${!LIBDIR_VAR}" ]; then
9776 CONF_LIBDIR="${!LIBDIR_VAR}"
9777 @@ -485,7 +603,7 @@ einstall() {
9778 unset LIBDIR_VAR
9779 if [ -n "${CONF_LIBDIR}" ] && [ "${CONF_PREFIX:+set}" = set ]; then
9780 EI_DESTLIBDIR="${D}/${CONF_PREFIX}/${CONF_LIBDIR}"
9781 - EI_DESTLIBDIR="$(strip_duplicate_slashes ${EI_DESTLIBDIR})"
9782 + EI_DESTLIBDIR="$(__strip_duplicate_slashes "${EI_DESTLIBDIR}")"
9783 LOCAL_EXTRA_EINSTALL="libdir=${EI_DESTLIBDIR} ${LOCAL_EXTRA_EINSTALL}"
9784 unset EI_DESTLIBDIR
9785 fi
9786 @@ -516,7 +634,7 @@ einstall() {
9787 fi
9788 }
9789
9790 -_eapi0_pkg_nofetch() {
9791 +__eapi0_pkg_nofetch() {
9792 [ -z "${SRC_URI}" ] && return
9793
9794 elog "The following are listed in SRC_URI for ${PN}:"
9795 @@ -526,55 +644,59 @@ _eapi0_pkg_nofetch() {
9796 done
9797 }
9798
9799 -_eapi0_src_unpack() {
9800 +__eapi0_src_unpack() {
9801 [[ -n ${A} ]] && unpack ${A}
9802 }
9803
9804 -_eapi0_src_compile() {
9805 +__eapi0_src_compile() {
9806 if [ -x ./configure ] ; then
9807 econf
9808 fi
9809 - _eapi2_src_compile
9810 + __eapi2_src_compile
9811 }
9812
9813 -_eapi0_src_test() {
9814 +__eapi0_src_test() {
9815 # Since we don't want emake's automatic die
9816 # support (EAPI 4 and later), and we also don't
9817 # want the warning messages that it produces if
9818 # we call it in 'nonfatal' mode, we use emake_cmd
9819 # to emulate the desired parts of emake behavior.
9820 local emake_cmd="${MAKE:-make} ${MAKEOPTS} ${EXTRA_EMAKE}"
9821 - if $emake_cmd -j1 check -n &> /dev/null; then
9822 - vecho ">>> Test phase [check]: ${CATEGORY}/${PF}"
9823 - $emake_cmd -j1 check || \
9824 + local internal_opts=
9825 + if ___eapi_default_src_test_disables_parallel_jobs; then
9826 + internal_opts+=" -j1"
9827 + fi
9828 + if $emake_cmd ${internal_opts} check -n &> /dev/null; then
9829 + __vecho ">>> Test phase [check]: ${CATEGORY}/${PF}"
9830 + $emake_cmd ${internal_opts} check || \
9831 die "Make check failed. See above for details."
9832 - elif $emake_cmd -j1 test -n &> /dev/null; then
9833 - vecho ">>> Test phase [test]: ${CATEGORY}/${PF}"
9834 - $emake_cmd -j1 test || \
9835 + elif $emake_cmd ${internal_opts} test -n &> /dev/null; then
9836 + __vecho ">>> Test phase [test]: ${CATEGORY}/${PF}"
9837 + $emake_cmd ${internal_opts} test || \
9838 die "Make test failed. See above for details."
9839 else
9840 - vecho ">>> Test phase [none]: ${CATEGORY}/${PF}"
9841 + __vecho ">>> Test phase [none]: ${CATEGORY}/${PF}"
9842 fi
9843 }
9844
9845 -_eapi1_src_compile() {
9846 - _eapi2_src_configure
9847 - _eapi2_src_compile
9848 +__eapi1_src_compile() {
9849 + __eapi2_src_configure
9850 + __eapi2_src_compile
9851 }
9852
9853 -_eapi2_src_configure() {
9854 +__eapi2_src_configure() {
9855 if [[ -x ${ECONF_SOURCE:-.}/configure ]] ; then
9856 econf
9857 fi
9858 }
9859
9860 -_eapi2_src_compile() {
9861 +__eapi2_src_compile() {
9862 if [ -f Makefile ] || [ -f GNUmakefile ] || [ -f makefile ]; then
9863 emake || die "emake failed"
9864 fi
9865 }
9866
9867 -_eapi4_src_install() {
9868 +__eapi4_src_install() {
9869 if [[ -f Makefile || -f GNUmakefile || -f makefile ]] ; then
9870 emake DESTDIR="${D}" install
9871 fi
9872 @@ -593,71 +715,285 @@ _eapi4_src_install() {
9873 }
9874
9875 # @FUNCTION: has_version
9876 -# @USAGE: <DEPEND ATOM>
9877 +# @USAGE: [--host-root] <DEPEND ATOM>
9878 # @DESCRIPTION:
9879 # Return true if given package is installed. Otherwise return false.
9880 # Callers may override the ROOT variable in order to match packages from an
9881 # alternative ROOT.
9882 has_version() {
9883
9884 - local eroot
9885 - case "$EAPI" in
9886 - 0|1|2)
9887 - [[ " ${FEATURES} " == *" force-prefix "* ]] && \
9888 - eroot=${ROOT%/}${EPREFIX}/ || eroot=${ROOT}
9889 - ;;
9890 - *)
9891 - eroot=${ROOT%/}${EPREFIX}/
9892 - ;;
9893 - esac
9894 + local atom eroot host_root=false root=${ROOT}
9895 + if [[ $1 == --host-root ]] ; then
9896 + host_root=true
9897 + shift
9898 + fi
9899 + atom=$1
9900 + shift
9901 + [ $# -gt 0 ] && die "${FUNCNAME[0]}: unused argument(s): $*"
9902 +
9903 + if ${host_root} ; then
9904 + if ! ___eapi_best_version_and_has_version_support_--host-root; then
9905 + die "${FUNCNAME[0]}: option --host-root is not supported with EAPI ${EAPI}"
9906 + fi
9907 + root=/
9908 + fi
9909 +
9910 + if ___eapi_has_prefix_variables; then
9911 + # [[ ${root} == / ]] would be ambiguous here,
9912 + # since both prefixes can share root=/ while
9913 + # having different EPREFIX offsets.
9914 + if ${host_root} ; then
9915 + eroot=${root%/}${PORTAGE_OVERRIDE_EPREFIX}/
9916 + else
9917 + eroot=${root%/}${EPREFIX}/
9918 + fi
9919 + else
9920 + eroot=${root}
9921 + fi
9922 if [[ -n $PORTAGE_IPC_DAEMON ]] ; then
9923 - "$PORTAGE_BIN_PATH"/ebuild-ipc has_version "${eroot}" "$1"
9924 + "$PORTAGE_BIN_PATH"/ebuild-ipc has_version "${eroot}" "${atom}"
9925 else
9926 - PYTHONPATH=${PORTAGE_PYM_PATH}${PYTHONPATH:+:}${PYTHONPATH} \
9927 - "${PORTAGE_PYTHON:-/usr/bin/python}" "${PORTAGE_BIN_PATH}/portageq" has_version "${eroot}" "$1"
9928 + "${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" has_version "${eroot}" "${atom}"
9929 fi
9930 local retval=$?
9931 case "${retval}" in
9932 0|1)
9933 return ${retval}
9934 ;;
9935 + 2)
9936 + die "${FUNCNAME[0]}: invalid atom: ${atom}"
9937 + ;;
9938 *)
9939 - die "unexpected portageq exit code: ${retval}"
9940 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
9941 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
9942 + else
9943 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
9944 + fi
9945 ;;
9946 esac
9947 }
9948
9949 # @FUNCTION: best_version
9950 -# @USAGE: <DEPEND ATOM>
9951 +# @USAGE: [--host-root] <DEPEND ATOM>
9952 # @DESCRIPTION:
9953 # Returns the best/most-current match.
9954 # Callers may override the ROOT variable in order to match packages from an
9955 # alternative ROOT.
9956 best_version() {
9957
9958 - local eroot
9959 - case "$EAPI" in
9960 - 0|1|2)
9961 - [[ " ${FEATURES} " == *" force-prefix "* ]] && \
9962 - eroot=${ROOT%/}${EPREFIX}/ || eroot=${ROOT}
9963 - ;;
9964 - *)
9965 - eroot=${ROOT%/}${EPREFIX}/
9966 - ;;
9967 - esac
9968 + local atom eroot host_root=false root=${ROOT}
9969 + if [[ $1 == --host-root ]] ; then
9970 + host_root=true
9971 + shift
9972 + fi
9973 + atom=$1
9974 + shift
9975 + [ $# -gt 0 ] && die "${FUNCNAME[0]}: unused argument(s): $*"
9976 +
9977 + if ${host_root} ; then
9978 + if ! ___eapi_best_version_and_has_version_support_--host-root; then
9979 + die "${FUNCNAME[0]}: option --host-root is not supported with EAPI ${EAPI}"
9980 + fi
9981 + root=/
9982 + fi
9983 +
9984 + if ___eapi_has_prefix_variables; then
9985 + # [[ ${root} == / ]] would be ambiguous here,
9986 + # since both prefixes can share root=/ while
9987 + # having different EPREFIX offsets.
9988 + if ${host_root} ; then
9989 + eroot=${root%/}${PORTAGE_OVERRIDE_EPREFIX}/
9990 + else
9991 + eroot=${root%/}${EPREFIX}/
9992 + fi
9993 + else
9994 + eroot=${root}
9995 + fi
9996 if [[ -n $PORTAGE_IPC_DAEMON ]] ; then
9997 - "$PORTAGE_BIN_PATH"/ebuild-ipc best_version "${eroot}" "$1"
9998 + "$PORTAGE_BIN_PATH"/ebuild-ipc best_version "${eroot}" "${atom}"
9999 else
10000 - PYTHONPATH=${PORTAGE_PYM_PATH}${PYTHONPATH:+:}${PYTHONPATH} \
10001 - "${PORTAGE_PYTHON:-/usr/bin/python}" "${PORTAGE_BIN_PATH}/portageq" best_version "${eroot}" "$1"
10002 + "${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" best_version "${eroot}" "${atom}"
10003 fi
10004 local retval=$?
10005 case "${retval}" in
10006 0|1)
10007 return ${retval}
10008 ;;
10009 + 2)
10010 + die "${FUNCNAME[0]}: invalid atom: ${atom}"
10011 + ;;
10012 *)
10013 - die "unexpected portageq exit code: ${retval}"
10014 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10015 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10016 + else
10017 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10018 + fi
10019 ;;
10020 esac
10021 }
10022 +
10023 +if ___eapi_has_master_repositories; then
10024 + master_repositories() {
10025 + local output repository=$1 retval
10026 + shift
10027 + [[ $# -gt 0 ]] && die "${FUNCNAME[0]}: unused argument(s): $*"
10028 +
10029 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10030 + "${PORTAGE_BIN_PATH}/ebuild-ipc" master_repositories "${EROOT}" "${repository}"
10031 + else
10032 + output=$("${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" master_repositories "${EROOT}" "${repository}")
10033 + fi
10034 + retval=$?
10035 + [[ -n ${output} ]] && echo "${output}"
10036 + case "${retval}" in
10037 + 0|1)
10038 + return ${retval}
10039 + ;;
10040 + 2)
10041 + die "${FUNCNAME[0]}: invalid repository: ${repository}"
10042 + ;;
10043 + *)
10044 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10045 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10046 + else
10047 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10048 + fi
10049 + ;;
10050 + esac
10051 + }
10052 +fi
10053 +
10054 +if ___eapi_has_repository_path; then
10055 + repository_path() {
10056 + local output repository=$1 retval
10057 + shift
10058 + [[ $# -gt 0 ]] && die "${FUNCNAME[0]}: unused argument(s): $*"
10059 +
10060 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10061 + "${PORTAGE_BIN_PATH}/ebuild-ipc" repository_path "${EROOT}" "${repository}"
10062 + else
10063 + output=$("${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" get_repo_path "${EROOT}" "${repository}")
10064 + fi
10065 + retval=$?
10066 + [[ -n ${output} ]] && echo "${output}"
10067 + case "${retval}" in
10068 + 0|1)
10069 + return ${retval}
10070 + ;;
10071 + 2)
10072 + die "${FUNCNAME[0]}: invalid repository: ${repository}"
10073 + ;;
10074 + *)
10075 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10076 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10077 + else
10078 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10079 + fi
10080 + ;;
10081 + esac
10082 + }
10083 +fi
10084 +
10085 +if ___eapi_has_available_eclasses; then
10086 + available_eclasses() {
10087 + local output repository=${PORTAGE_REPO_NAME} retval
10088 + [[ $# -gt 0 ]] && die "${FUNCNAME[0]}: unused argument(s): $*"
10089 +
10090 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10091 + "${PORTAGE_BIN_PATH}/ebuild-ipc" available_eclasses "${EROOT}" "${repository}"
10092 + else
10093 + output=$("${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" available_eclasses "${EROOT}" "${repository}")
10094 + fi
10095 + retval=$?
10096 + [[ -n ${output} ]] && echo "${output}"
10097 + case "${retval}" in
10098 + 0|1)
10099 + return ${retval}
10100 + ;;
10101 + 2)
10102 + die "${FUNCNAME[0]}: invalid repository: ${repository}"
10103 + ;;
10104 + *)
10105 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10106 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10107 + else
10108 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10109 + fi
10110 + ;;
10111 + esac
10112 + }
10113 +fi
10114 +
10115 +if ___eapi_has_eclass_path; then
10116 + eclass_path() {
10117 + local eclass=$1 output repository=${PORTAGE_REPO_NAME} retval
10118 + shift
10119 + [[ $# -gt 0 ]] && die "${FUNCNAME[0]}: unused argument(s): $*"
10120 +
10121 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10122 + "${PORTAGE_BIN_PATH}/ebuild-ipc" eclass_path "${EROOT}" "${repository}" "${eclass}"
10123 + else
10124 + output=$("${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" eclass_path "${EROOT}" "${repository}" "${eclass}")
10125 + fi
10126 + retval=$?
10127 + [[ -n ${output} ]] && echo "${output}"
10128 + case "${retval}" in
10129 + 0|1)
10130 + return ${retval}
10131 + ;;
10132 + 2)
10133 + die "${FUNCNAME[0]}: invalid repository: ${repository}"
10134 + ;;
10135 + *)
10136 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10137 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10138 + else
10139 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10140 + fi
10141 + ;;
10142 + esac
10143 + }
10144 +fi
10145 +
10146 +if ___eapi_has_license_path; then
10147 + license_path() {
10148 + local license=$1 output repository=${PORTAGE_REPO_NAME} retval
10149 + shift
10150 + [[ $# -gt 0 ]] && die "${FUNCNAME[0]}: unused argument(s): $*"
10151 +
10152 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10153 + "${PORTAGE_BIN_PATH}/ebuild-ipc" license_path "${EROOT}" "${repository}" "${license}"
10154 + else
10155 + output=$("${PORTAGE_BIN_PATH}/ebuild-helpers/portageq" license_path "${EROOT}" "${repository}" "${license}")
10156 + fi
10157 + retval=$?
10158 + [[ -n ${output} ]] && echo "${output}"
10159 + case "${retval}" in
10160 + 0|1)
10161 + return ${retval}
10162 + ;;
10163 + 2)
10164 + die "${FUNCNAME[0]}: invalid repository: ${repository}"
10165 + ;;
10166 + *)
10167 + if [[ -n ${PORTAGE_IPC_DAEMON} ]]; then
10168 + die "${FUNCNAME[0]}: unexpected ebuild-ipc exit code: ${retval}"
10169 + else
10170 + die "${FUNCNAME[0]}: unexpected portageq exit code: ${retval}"
10171 + fi
10172 + ;;
10173 + esac
10174 + }
10175 +fi
10176 +
10177 +if ___eapi_has_package_manager_build_user; then
10178 + package_manager_build_user() {
10179 + echo "${PORTAGE_BUILD_USER}"
10180 + }
10181 +fi
10182 +
10183 +if ___eapi_has_package_manager_build_group; then
10184 + package_manager_build_group() {
10185 + echo "${PORTAGE_BUILD_GROUP}"
10186 + }
10187 +fi
10188
10189 diff --git a/bin/portageq b/bin/portageq
10190 index d9abb0b..79818f6 100755
10191 --- a/bin/portageq
10192 +++ b/bin/portageq
10193 @@ -1,15 +1,15 @@
10194 -#!/usr/bin/python -O
10195 -# Copyright 1999-2012 Gentoo Foundation
10196 +#!/usr/bin/python -bO
10197 +# Copyright 1999-2014 Gentoo Foundation
10198 # Distributed under the terms of the GNU General Public License v2
10199
10200 -from __future__ import print_function
10201 +from __future__ import print_function, unicode_literals
10202
10203 import signal
10204 import sys
10205 # This block ensures that ^C interrupts are handled quietly.
10206 try:
10207
10208 - def exithandler(signum, frame):
10209 + def exithandler(signum, _frame):
10210 signal.signal(signal.SIGINT, signal.SIG_IGN)
10211 signal.signal(signal.SIGTERM, signal.SIG_IGN)
10212 sys.exit(128 + signum)
10213 @@ -34,23 +34,22 @@ if os.environ.get("SANDBOX_ON") == "1":
10214 ":".join(filter(None, sandbox_write))
10215 del sandbox_write
10216
10217 -try:
10218 - import portage
10219 -except ImportError:
10220 - sys.path.insert(0, pym_path)
10221 - import portage
10222 -del pym_path
10223 -
10224 +sys.path.insert(0, pym_path)
10225 +import portage
10226 +portage._internal_caller = True
10227 from portage import os
10228 from portage.eapi import eapi_has_repo_deps
10229 from portage.util import writemsg, writemsg_stdout
10230 -from portage.output import colormap
10231 +from portage.util._argparse import ArgumentParser
10232 portage.proxy.lazyimport.lazyimport(globals(),
10233 + 're',
10234 'subprocess',
10235 '_emerge.Package:Package',
10236 '_emerge.RootConfig:RootConfig',
10237 + '_emerge.is_valid_package_atom:insert_category_into_atom',
10238 'portage.dbapi._expand_new_virt:expand_new_virt',
10239 'portage._sets.base:InternalPackageSet',
10240 + 'portage.xml.metadata:MetaDataXML'
10241 )
10242
10243 def eval_atom_use(atom):
10244 @@ -59,6 +58,10 @@ def eval_atom_use(atom):
10245 atom = atom.evaluate_conditionals(use)
10246 return atom
10247
10248 +def uses_eroot(function):
10249 + function.uses_eroot = True
10250 + return function
10251 +
10252 #-----------------------------------------------------------------------------
10253 #
10254 # To add functionality to this tool, add a function below.
10255 @@ -80,13 +83,14 @@ def eval_atom_use(atom):
10256 # and will automaticly add a command by the same name as the function!
10257 #
10258
10259 +@uses_eroot
10260 def has_version(argv):
10261 """<eroot> <category/package>
10262 Return code 0 if it's available, 1 otherwise.
10263 """
10264 if (len(argv) < 2):
10265 print("ERROR: insufficient parameters!")
10266 - return 2
10267 + return 3
10268
10269 warnings = []
10270
10271 @@ -105,9 +109,7 @@ def has_version(argv):
10272 try:
10273 atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
10274 except portage.exception.InvalidAtom as e:
10275 - warnings.append(
10276 - portage._unicode_decode("QA Notice: %s: %s") % \
10277 - ('has_version', e))
10278 + warnings.append("QA Notice: %s: %s" % ('has_version', e))
10279 atom = eval_atom_use(atom)
10280
10281 if warnings:
10282 @@ -125,16 +127,16 @@ def has_version(argv):
10283 portage.writemsg("ERROR: Invalid atom: '%s'\n" % argv[1],
10284 noiselevel=-1)
10285 return 2
10286 -has_version.uses_root = True
10287
10288
10289 +@uses_eroot
10290 def best_version(argv):
10291 """<eroot> <category/package>
10292 Returns category/package-version (without .ebuild).
10293 """
10294 if (len(argv) < 2):
10295 print("ERROR: insufficient parameters!")
10296 - return 2
10297 + return 3
10298
10299 warnings = []
10300
10301 @@ -153,9 +155,7 @@ def best_version(argv):
10302 try:
10303 atom = portage.dep.Atom(argv[1], allow_repo=allow_repo, eapi=eapi)
10304 except portage.exception.InvalidAtom as e:
10305 - warnings.append(
10306 - portage._unicode_decode("QA Notice: %s: %s") % \
10307 - ('best_version', e))
10308 + warnings.append("QA Notice: %s: %s" % ('best_version', e))
10309 atom = eval_atom_use(atom)
10310
10311 if warnings:
10312 @@ -166,9 +166,9 @@ def best_version(argv):
10313 print(portage.best(mylist))
10314 except KeyError:
10315 return 1
10316 -best_version.uses_root = True
10317
10318
10319 +@uses_eroot
10320 def mass_best_version(argv):
10321 """<eroot> [<category/package>]+
10322 Returns category/package-version (without .ebuild).
10323 @@ -178,23 +178,25 @@ def mass_best_version(argv):
10324 return 2
10325 try:
10326 for pack in argv[1:]:
10327 - mylist=portage.db[argv[0]]["vartree"].dbapi.match(pack)
10328 - print(pack+":"+portage.best(mylist))
10329 + mylist = portage.db[argv[0]]['vartree'].dbapi.match(pack)
10330 + print('%s:%s' % (pack, portage.best(mylist)))
10331 except KeyError:
10332 return 1
10333 -mass_best_version.uses_root = True
10334
10335 +
10336 +@uses_eroot
10337 def metadata(argv):
10338 if (len(argv) < 4):
10339 - print("ERROR: insufficient parameters!", file=sys.stderr)
10340 + print('ERROR: insufficient parameters!', file=sys.stderr)
10341 return 2
10342
10343 eroot, pkgtype, pkgspec = argv[0:3]
10344 metakeys = argv[3:]
10345 type_map = {
10346 - "ebuild":"porttree",
10347 - "binary":"bintree",
10348 - "installed":"vartree"}
10349 + 'ebuild': 'porttree',
10350 + 'binary': 'bintree',
10351 + 'installed': 'vartree'
10352 + }
10353 if pkgtype not in type_map:
10354 print("Unrecognized package type: '%s'" % pkgtype, file=sys.stderr)
10355 return 1
10356 @@ -202,9 +204,9 @@ def metadata(argv):
10357 repo = portage.dep.dep_getrepo(pkgspec)
10358 pkgspec = portage.dep.remove_slot(pkgspec)
10359 try:
10360 - values = trees[eroot][type_map[pkgtype]].dbapi.aux_get(
10361 - pkgspec, metakeys, myrepo=repo)
10362 - writemsg_stdout(''.join('%s\n' % x for x in values), noiselevel=-1)
10363 + values = trees[eroot][type_map[pkgtype]].dbapi.aux_get(
10364 + pkgspec, metakeys, myrepo=repo)
10365 + writemsg_stdout(''.join('%s\n' % x for x in values), noiselevel=-1)
10366 except KeyError:
10367 print("Package not found: '%s'" % pkgspec, file=sys.stderr)
10368 return 1
10369 @@ -216,8 +218,8 @@ Available keys: %s
10370 """ % ','.join(sorted(x for x in portage.auxdbkeys \
10371 if not x.startswith('UNUSED_')))
10372
10373 -metadata.uses_root = True
10374
10375 +@uses_eroot
10376 def contents(argv):
10377 """<eroot> <category/package>
10378 List the files that are installed for a given package, with
10379 @@ -238,8 +240,9 @@ def contents(argv):
10380 treetype="vartree", vartree=vartree)
10381 writemsg_stdout(''.join('%s\n' % x for x in sorted(db.getcontents())),
10382 noiselevel=-1)
10383 -contents.uses_root = True
10384
10385 +
10386 +@uses_eroot
10387 def owners(argv):
10388 """<eroot> [<filename>]+
10389 Given a list of files, print the packages that own the files and which
10390 @@ -253,7 +256,6 @@ def owners(argv):
10391 sys.stderr.flush()
10392 return 2
10393
10394 - from portage import catsplit, dblink
10395 eroot = argv[0]
10396 vardb = portage.db[eroot]["vartree"].dbapi
10397 root = portage.settings['ROOT']
10398 @@ -319,8 +321,8 @@ def owners(argv):
10399 return 0
10400 return 1
10401
10402 -owners.uses_root = True
10403
10404 +@uses_eroot
10405 def is_protected(argv):
10406 """<eroot> <filename>
10407 Given a single filename, return code 0 if it's protected, 1 otherwise.
10408 @@ -366,8 +368,8 @@ def is_protected(argv):
10409 return 0
10410 return 1
10411
10412 -is_protected.uses_root = True
10413
10414 +@uses_eroot
10415 def filter_protected(argv):
10416 """<eroot>
10417 Read filenames from stdin and write them to stdout if they are protected.
10418 @@ -395,7 +397,6 @@ def filter_protected(argv):
10419 settings.get("CONFIG_PROTECT_MASK", ""))
10420 protect_obj = ConfigProtect(root, protect, protect_mask)
10421
10422 - protected = 0
10423 errors = 0
10424
10425 for line in sys.stdin:
10426 @@ -417,7 +418,6 @@ def filter_protected(argv):
10427 continue
10428
10429 if protect_obj.isprotected(f):
10430 - protected += 1
10431 out.write("%s\n" % filename)
10432 out.flush()
10433
10434 @@ -426,8 +426,8 @@ def filter_protected(argv):
10435
10436 return 0
10437
10438 -filter_protected.uses_root = True
10439
10440 +@uses_eroot
10441 def best_visible(argv):
10442 """<eroot> [pkgtype] <atom>
10443 Returns category/package-version (without .ebuild).
10444 @@ -465,8 +465,7 @@ def best_visible(argv):
10445 noiselevel=-1)
10446 return 2
10447
10448 - root_config = RootConfig(portage.settings,
10449 - portage.db[eroot], None)
10450 + root_config = RootConfig(portage.settings, portage.db[eroot], None)
10451
10452 if hasattr(db, "xmatch"):
10453 cpv_list = db.xmatch("match-all-cpv-only", atom)
10454 @@ -508,11 +507,11 @@ def best_visible(argv):
10455 writemsg_stdout("\n", noiselevel=-1)
10456
10457 return 1
10458 -best_visible.uses_root = True
10459
10460
10461 +@uses_eroot
10462 def mass_best_visible(argv):
10463 - """<root> [<type>] [<category/package>]+
10464 + """<eroot> [<type>] [<category/package>]+
10465 Returns category/package-version (without .ebuild).
10466 The pkgtype argument defaults to "ebuild" if unspecified,
10467 otherwise it must be one of ebuild, binary, or installed.
10468 @@ -535,9 +534,9 @@ def mass_best_visible(argv):
10469 best_visible([root, pkgtype, pack])
10470 except KeyError:
10471 return 1
10472 -mass_best_visible.uses_root = True
10473
10474
10475 +@uses_eroot
10476 def all_best_visible(argv):
10477 """<eroot>
10478 Returns all best_visible packages (without .ebuild).
10479 @@ -552,9 +551,9 @@ def all_best_visible(argv):
10480 mybest=portage.best(portage.db[argv[0]]["porttree"].dbapi.match(pkg))
10481 if mybest:
10482 print(mybest)
10483 -all_best_visible.uses_root = True
10484
10485
10486 +@uses_eroot
10487 def match(argv):
10488 """<eroot> <atom>
10489 Returns a \\n separated list of category/package-version.
10490 @@ -601,8 +600,9 @@ def match(argv):
10491 results = vardb.match(atom)
10492 for cpv in results:
10493 print(cpv)
10494 -match.uses_root = True
10495
10496 +
10497 +@uses_eroot
10498 def expand_virtual(argv):
10499 """<eroot> <atom>
10500 Returns a \\n separated list of atoms expanded from a
10501 @@ -637,9 +637,8 @@ def expand_virtual(argv):
10502
10503 return os.EX_OK
10504
10505 -expand_virtual.uses_root = True
10506
10507 -def vdb_path(argv):
10508 +def vdb_path(_argv):
10509 """
10510 Returns the path used for the var(installed) package database for the
10511 set environment/configuration options.
10512 @@ -649,56 +648,79 @@ def vdb_path(argv):
10513 out.flush()
10514 return os.EX_OK
10515
10516 -def gentoo_mirrors(argv):
10517 +def gentoo_mirrors(_argv):
10518 """
10519 Returns the mirrors set to use in the portage configuration.
10520 """
10521 print(portage.settings["GENTOO_MIRRORS"])
10522
10523
10524 -def portdir(argv):
10525 +@uses_eroot
10526 +def repositories_configuration(argv):
10527 + """<eroot>
10528 + Returns the configuration of repositories.
10529 + """
10530 + if len(argv) < 1:
10531 + print("ERROR: insufficient parameters!", file=sys.stderr)
10532 + return 3
10533 + sys.stdout.write(portage.db[argv[0]]["vartree"].settings.repositories.config_string())
10534 + sys.stdout.flush()
10535 +
10536 +@uses_eroot
10537 +def repos_config(argv):
10538 + """
10539 + <eroot>
10540 + This is an alias for the repositories_configuration command.
10541 + """
10542 + return repositories_configuration(argv)
10543 +
10544 +def portdir(_argv):
10545 """
10546 Returns the PORTDIR path.
10547 + Deprecated in favor of repositories_configuration command.
10548 """
10549 + print("WARNING: 'portageq portdir' is deprecated. Use 'portageq repositories_configuration' instead.", file=sys.stderr)
10550 print(portage.settings["PORTDIR"])
10551
10552
10553 -def config_protect(argv):
10554 +def config_protect(_argv):
10555 """
10556 Returns the CONFIG_PROTECT paths.
10557 """
10558 print(portage.settings["CONFIG_PROTECT"])
10559
10560
10561 -def config_protect_mask(argv):
10562 +def config_protect_mask(_argv):
10563 """
10564 Returns the CONFIG_PROTECT_MASK paths.
10565 """
10566 print(portage.settings["CONFIG_PROTECT_MASK"])
10567
10568
10569 -def portdir_overlay(argv):
10570 +def portdir_overlay(_argv):
10571 """
10572 Returns the PORTDIR_OVERLAY path.
10573 + Deprecated in favor of repositories_configuration command.
10574 """
10575 + print("WARNING: 'portageq portdir_overlay' is deprecated. Use 'portageq repositories_configuration' instead.", file=sys.stderr)
10576 print(portage.settings["PORTDIR_OVERLAY"])
10577
10578
10579 -def pkgdir(argv):
10580 +def pkgdir(_argv):
10581 """
10582 Returns the PKGDIR path.
10583 """
10584 print(portage.settings["PKGDIR"])
10585
10586
10587 -def distdir(argv):
10588 +def distdir(_argv):
10589 """
10590 Returns the DISTDIR path.
10591 """
10592 print(portage.settings["DISTDIR"])
10593
10594
10595 -def colormap(argv):
10596 +def colormap(_argv):
10597 """
10598 Display the color.map as environment variables.
10599 """
10600 @@ -719,11 +741,15 @@ def envvar(argv):
10601 return 2
10602
10603 for arg in argv:
10604 + if arg in ("PORTDIR", "PORTDIR_OVERLAY", "SYNC"):
10605 + print("WARNING: 'portageq envvar %s' is deprecated. Use 'portageq repositories_configuration' instead." % arg, file=sys.stderr)
10606 if verbose:
10607 - print(arg +"='"+ portage.settings[arg] +"'")
10608 + print(arg + "=" + portage._shell_quote(portage.settings[arg]))
10609 else:
10610 print(portage.settings[arg])
10611
10612 +
10613 +@uses_eroot
10614 def get_repos(argv):
10615 """<eroot>
10616 Returns all repos with names (repo_name file) argv[0] = $EROOT
10617 @@ -731,25 +757,137 @@ def get_repos(argv):
10618 if len(argv) < 1:
10619 print("ERROR: insufficient parameters!")
10620 return 2
10621 - print(" ".join(portage.db[argv[0]]["porttree"].dbapi.getRepositories()))
10622 + print(" ".join(reversed(portage.db[argv[0]]["vartree"].settings.repositories.prepos_order)))
10623 +
10624 +
10625 +@uses_eroot
10626 +def master_repositories(argv):
10627 + """<eroot> <repo_id>+
10628 + Returns space-separated list of master repositories for specified repository.
10629 + """
10630 + if len(argv) < 2:
10631 + print("ERROR: insufficient parameters!", file=sys.stderr)
10632 + return 3
10633 + for arg in argv[1:]:
10634 + if portage.dep._repo_name_re.match(arg) is None:
10635 + print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
10636 + return 2
10637 + try:
10638 + repo = portage.db[argv[0]]["vartree"].settings.repositories[arg]
10639 + except KeyError:
10640 + print("")
10641 + return 1
10642 + else:
10643 + print(" ".join(x.name for x in repo.masters))
10644
10645 -get_repos.uses_root = True
10646 +@uses_eroot
10647 +def master_repos(argv):
10648 + """<eroot> <repo_id>+
10649 + This is an alias for the master_repositories command.
10650 + """
10651 + return master_repositories(argv)
10652
10653 +@uses_eroot
10654 def get_repo_path(argv):
10655 """<eroot> <repo_id>+
10656 Returns the path to the repo named argv[1], argv[0] = $EROOT
10657 """
10658 if len(argv) < 2:
10659 - print("ERROR: insufficient parameters!")
10660 - return 2
10661 + print("ERROR: insufficient parameters!", file=sys.stderr)
10662 + return 3
10663 for arg in argv[1:]:
10664 - path = portage.db[argv[0]]["porttree"].dbapi.getRepositoryPath(arg)
10665 + if portage.dep._repo_name_re.match(arg) is None:
10666 + print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
10667 + return 2
10668 + path = portage.db[argv[0]]["vartree"].settings.repositories.treemap.get(arg)
10669 if path is None:
10670 - path = ""
10671 + print("")
10672 + return 1
10673 print(path)
10674
10675 -get_repo_path.uses_root = True
10676
10677 +@uses_eroot
10678 +def available_eclasses(argv):
10679 + """<eroot> <repo_id>+
10680 + Returns space-separated list of available eclasses for specified repository.
10681 + """
10682 + if len(argv) < 2:
10683 + print("ERROR: insufficient parameters!", file=sys.stderr)
10684 + return 3
10685 + for arg in argv[1:]:
10686 + if portage.dep._repo_name_re.match(arg) is None:
10687 + print("ERROR: invalid repository: %s" % arg, file=sys.stderr)
10688 + return 2
10689 + try:
10690 + repo = portage.db[argv[0]]["vartree"].settings.repositories[arg]
10691 + except KeyError:
10692 + print("")
10693 + return 1
10694 + else:
10695 + print(" ".join(sorted(repo.eclass_db.eclasses)))
10696 +
10697 +
10698 +@uses_eroot
10699 +def eclass_path(argv):
10700 + """<eroot> <repo_id> <eclass>+
10701 + Returns the path to specified eclass for specified repository.
10702 + """
10703 + if len(argv) < 3:
10704 + print("ERROR: insufficient parameters!", file=sys.stderr)
10705 + return 3
10706 + if portage.dep._repo_name_re.match(argv[1]) is None:
10707 + print("ERROR: invalid repository: %s" % argv[1], file=sys.stderr)
10708 + return 2
10709 + try:
10710 + repo = portage.db[argv[0]]["vartree"].settings.repositories[argv[1]]
10711 + except KeyError:
10712 + print("")
10713 + return 1
10714 + else:
10715 + retval = 0
10716 + for arg in argv[2:]:
10717 + try:
10718 + eclass = repo.eclass_db.eclasses[arg]
10719 + except KeyError:
10720 + print("")
10721 + retval = 1
10722 + else:
10723 + print(eclass.location)
10724 + return retval
10725 +
10726 +
10727 +@uses_eroot
10728 +def license_path(argv):
10729 + """<eroot> <repo_id> <license>+
10730 + Returns the path to specified license for specified repository.
10731 + """
10732 + if len(argv) < 3:
10733 + print("ERROR: insufficient parameters!", file=sys.stderr)
10734 + return 3
10735 + if portage.dep._repo_name_re.match(argv[1]) is None:
10736 + print("ERROR: invalid repository: %s" % argv[1], file=sys.stderr)
10737 + return 2
10738 + try:
10739 + repo = portage.db[argv[0]]["vartree"].settings.repositories[argv[1]]
10740 + except KeyError:
10741 + print("")
10742 + return 1
10743 + else:
10744 + retval = 0
10745 + for arg in argv[2:]:
10746 + eclass_path = ""
10747 + paths = reversed([os.path.join(x.location, 'licenses', arg) for x in list(repo.masters) + [repo]])
10748 + for path in paths:
10749 + if os.path.exists(path):
10750 + eclass_path = path
10751 + break
10752 + if eclass_path == "":
10753 + retval = 1
10754 + print(eclass_path)
10755 + return retval
10756 +
10757 +
10758 +@uses_eroot
10759 def list_preserved_libs(argv):
10760 """<eroot>
10761 Print a list of libraries preserved during a package update in the form
10762 @@ -771,21 +909,296 @@ def list_preserved_libs(argv):
10763 msg.append('\n')
10764 writemsg_stdout(''.join(msg), noiselevel=-1)
10765 return rValue
10766 -list_preserved_libs.uses_root = True
10767 +
10768 +
10769 +class MaintainerEmailMatcher(object):
10770 + def __init__(self, maintainer_emails):
10771 + self._re = re.compile("^(%s)$" % "|".join(maintainer_emails))
10772 +
10773 + def __call__(self, metadata_xml):
10774 + match = False
10775 + matcher = self._re.match
10776 + for x in metadata_xml.maintainers():
10777 + if x.email is not None and matcher(x.email) is not None:
10778 + match = True
10779 + break
10780 + return match
10781 +
10782 +class HerdMatcher(object):
10783 + def __init__(self, herds):
10784 + self._herds = frozenset(herds)
10785 +
10786 + def __call__(self, metadata_xml):
10787 + herds = self._herds
10788 + return any(x in herds for x in metadata_xml.herds())
10789 +
10790 +
10791 +def pquery(parser, opts, args):
10792 + """[options] [atom]+
10793 + Emulates a subset of Pkgcore's pquery tool.
10794 + """
10795 +
10796 + portdb = portage.db[portage.root]['porttree'].dbapi
10797 + root_config = RootConfig(portdb.settings,
10798 + portage.db[portage.root], None)
10799 +
10800 + def _pkg(cpv, repo_name):
10801 + try:
10802 + metadata = dict(zip(
10803 + Package.metadata_keys,
10804 + portdb.aux_get(cpv,
10805 + Package.metadata_keys,
10806 + myrepo=repo_name)))
10807 + except KeyError:
10808 + raise portage.exception.PackageNotFound(cpv)
10809 + return Package(built=False, cpv=cpv,
10810 + installed=False, metadata=metadata,
10811 + root_config=root_config,
10812 + type_name="ebuild")
10813 +
10814 + need_metadata = False
10815 + atoms = []
10816 + for arg in args:
10817 + if "/" not in arg.split(":")[0]:
10818 + atom = insert_category_into_atom(arg, '*')
10819 + if atom is None:
10820 + writemsg("ERROR: Invalid atom: '%s'\n" % arg,
10821 + noiselevel=-1)
10822 + return 2
10823 + else:
10824 + atom = arg
10825 +
10826 + try:
10827 + atom = portage.dep.Atom(atom, allow_wildcard=True, allow_repo=True)
10828 + except portage.exception.InvalidAtom:
10829 + writemsg("ERROR: Invalid atom: '%s'\n" % arg,
10830 + noiselevel=-1)
10831 + return 2
10832 +
10833 + if atom.slot is not None:
10834 + need_metadata = True
10835 +
10836 + atoms.append(atom)
10837 +
10838 + if "*/*" in atoms:
10839 + del atoms[:]
10840 + need_metadata = False
10841 +
10842 + if not opts.no_filters:
10843 + need_metadata = True
10844 +
10845 + xml_matchers = []
10846 + if opts.maintainer_email:
10847 + maintainer_emails = []
10848 + for x in opts.maintainer_email:
10849 + maintainer_emails.extend(x.split(","))
10850 + xml_matchers.append(MaintainerEmailMatcher(maintainer_emails))
10851 + if opts.herd is not None:
10852 + herds = []
10853 + for x in opts.herd:
10854 + herds.extend(x.split(","))
10855 + xml_matchers.append(HerdMatcher(herds))
10856 +
10857 + repos = []
10858 + if opts.all_repos:
10859 + repos.extend(portdb.repositories.get_repo_for_location(location)
10860 + for location in portdb.porttrees)
10861 + elif opts.repo is not None:
10862 + repos.append(portdb.repositories[opts.repo])
10863 + else:
10864 + repos.append(portdb.repositories.mainRepo())
10865 +
10866 + if not atoms:
10867 + names = None
10868 + categories = list(portdb.categories)
10869 + else:
10870 + category_wildcard = False
10871 + name_wildcard = False
10872 + categories = []
10873 + names = []
10874 + for atom in atoms:
10875 + category, name = portage.catsplit(atom.cp)
10876 + categories.append(category)
10877 + names.append(name)
10878 + if "*" in category:
10879 + category_wildcard = True
10880 + if "*" in name:
10881 + name_wildcard = True
10882 +
10883 + if category_wildcard:
10884 + categories = list(portdb.categories)
10885 + else:
10886 + categories = list(set(categories))
10887 +
10888 + if name_wildcard:
10889 + names = None
10890 + else:
10891 + names = sorted(set(names))
10892 +
10893 + no_version = opts.no_version
10894 + categories.sort()
10895 +
10896 + for category in categories:
10897 + if names is None:
10898 + cp_list = portdb.cp_all(categories=(category,))
10899 + else:
10900 + cp_list = [category + "/" + name for name in names]
10901 + for cp in cp_list:
10902 + matches = []
10903 + for repo in repos:
10904 + match = True
10905 + if xml_matchers:
10906 + metadata_xml_path = os.path.join(
10907 + repo.location, cp, 'metadata.xml')
10908 + try:
10909 + metadata_xml = MetaDataXML(metadata_xml_path, None)
10910 + except (EnvironmentError, SyntaxError):
10911 + match = False
10912 + else:
10913 + for matcher in xml_matchers:
10914 + if not matcher(metadata_xml):
10915 + match = False
10916 + break
10917 + if not match:
10918 + continue
10919 + cpv_list = portdb.cp_list(cp, mytree=[repo.location])
10920 + if atoms:
10921 + for cpv in cpv_list:
10922 + pkg = None
10923 + for atom in atoms:
10924 + if atom.repo is not None and \
10925 + atom.repo != repo.name:
10926 + continue
10927 + if not portage.match_from_list(atom, [cpv]):
10928 + continue
10929 + if need_metadata:
10930 + if pkg is None:
10931 + try:
10932 + pkg = _pkg(cpv, repo.name)
10933 + except portage.exception.PackageNotFound:
10934 + continue
10935 +
10936 + if not (opts.no_filters or pkg.visible):
10937 + continue
10938 + if not portage.match_from_list(atom, [pkg]):
10939 + continue
10940 + matches.append(cpv)
10941 + break
10942 + if no_version and matches:
10943 + break
10944 + elif opts.no_filters:
10945 + matches.extend(cpv_list)
10946 + else:
10947 + for cpv in cpv_list:
10948 + try:
10949 + pkg = _pkg(cpv, repo.name)
10950 + except portage.exception.PackageNotFound:
10951 + continue
10952 + else:
10953 + if pkg.visible:
10954 + matches.append(cpv)
10955 + if no_version:
10956 + break
10957 +
10958 + if no_version and matches:
10959 + break
10960 +
10961 + if not matches:
10962 + continue
10963 +
10964 + if no_version:
10965 + writemsg_stdout("%s\n" % (cp,), noiselevel=-1)
10966 + else:
10967 + matches = list(set(matches))
10968 + portdb._cpv_sort_ascending(matches)
10969 + for cpv in matches:
10970 + writemsg_stdout("%s\n" % (cpv,), noiselevel=-1)
10971 +
10972 + return os.EX_OK
10973 +
10974
10975 #-----------------------------------------------------------------------------
10976 #
10977 # DO NOT CHANGE CODE BEYOND THIS POINT - IT'S NOT NEEDED!
10978 #
10979
10980 -if not portage.const._ENABLE_PRESERVE_LIBS:
10981 - del list_preserved_libs
10982 -
10983 -non_commands = frozenset(['elog', 'eval_atom_use',
10984 - 'exithandler', 'expand_new_virt', 'main',
10985 - 'usage', 'writemsg', 'writemsg_stdout'])
10986 +non_commands = frozenset(['elog', 'eval_atom_use', 'exithandler', 'main', 'usage', 'uses_eroot'])
10987 commands = sorted(k for k, v in globals().items() \
10988 - if k not in non_commands and isinstance(v, types.FunctionType))
10989 + if k not in non_commands and isinstance(v, types.FunctionType) and v.__module__ == "__main__")
10990 +
10991 +
10992 +def add_pquery_arguments(parser):
10993 + pquery_option_groups = (
10994 + (
10995 + 'Repository matching options',
10996 + (
10997 + {
10998 + "longopt": "--no-filters",
10999 + "action": "store_true",
11000 + "help": "no visibility filters (ACCEPT_KEYWORDS, package masking, etc)"
11001 + },
11002 + {
11003 + "longopt": "--repo",
11004 + "help": "repo to use (default is PORTDIR if omitted)"
11005 + },
11006 + {
11007 + "longopt": "--all-repos",
11008 + "help": "search all repos"
11009 + }
11010 + )
11011 + ),
11012 + (
11013 + 'Package matching options',
11014 + (
11015 + {
11016 + "longopt": "--herd",
11017 + "action": "append",
11018 + "help": "exact match on a herd"
11019 + },
11020 + {
11021 + "longopt": "--maintainer-email",
11022 + "action": "append",
11023 + "help": "comma-separated list of maintainer email regexes to search for"
11024 + }
11025 + )
11026 + ),
11027 + (
11028 + 'Output formatting',
11029 + (
11030 + {
11031 + "shortopt": "-n",
11032 + "longopt": "--no-version",
11033 + "action": "store_true",
11034 + "help": "collapse multiple matching versions together"
11035 + },
11036 + )
11037 + ),
11038 + )
11039 +
11040 + for group_title, opt_data in pquery_option_groups:
11041 + arg_group = parser.add_argument_group(group_title)
11042 + for opt_info in opt_data:
11043 + pargs = []
11044 + try:
11045 + pargs.append(opt_info["shortopt"])
11046 + except KeyError:
11047 + pass
11048 + try:
11049 + pargs.append(opt_info["longopt"])
11050 + except KeyError:
11051 + pass
11052 +
11053 + kwargs = {}
11054 + try:
11055 + kwargs["action"] = opt_info["action"]
11056 + except KeyError:
11057 + pass
11058 + try:
11059 + kwargs["help"] = opt_info["help"]
11060 + except KeyError:
11061 + pass
11062 + arg_group.add_argument(*pargs, **portage._native_kwargs(kwargs))
11063 +
11064
11065 def usage(argv):
11066 print(">>> Portage information query tool")
11067 @@ -798,7 +1211,7 @@ def usage(argv):
11068 # Show our commands -- we do this by scanning the functions in this
11069 # file, and formatting each functions documentation.
11070 #
11071 - help_mode = '--help' in sys.argv
11072 + help_mode = '--help' in argv
11073 for name in commands:
11074 # Drop non-functions
11075 obj = globals()[name]
11076 @@ -812,12 +1225,21 @@ def usage(argv):
11077
11078 lines = doc.lstrip("\n").split("\n")
11079 print(" " + name + " " + lines[0].strip())
11080 - if (len(sys.argv) > 1):
11081 + if len(argv) > 1:
11082 if (not help_mode):
11083 lines = lines[:-1]
11084 for line in lines[1:]:
11085 print(" " + line.strip())
11086 - if (len(sys.argv) == 1):
11087 +
11088 + print()
11089 + print('Pkgcore pquery compatible options:')
11090 + print()
11091 + parser = ArgumentParser(add_help=False,
11092 + usage='portageq pquery [options] [atom ...]')
11093 + add_pquery_arguments(parser)
11094 + parser.print_help()
11095 +
11096 + if len(argv) == 1:
11097 print("\nRun portageq with --help for info")
11098
11099 atom_validate_strict = "EBUILD_PHASE" in os.environ
11100 @@ -836,52 +1258,84 @@ else:
11101 def elog(elog_funcname, lines):
11102 pass
11103
11104 -def main():
11105 +def main(argv):
11106 +
11107 + argv = portage._decode_argv(argv)
11108
11109 nocolor = os.environ.get('NOCOLOR')
11110 if nocolor in ('yes', 'true'):
11111 portage.output.nocolor()
11112
11113 - if len(sys.argv) < 2:
11114 - usage(sys.argv)
11115 - sys.exit(os.EX_USAGE)
11116 + parser = ArgumentParser(add_help=False)
11117
11118 - for x in sys.argv:
11119 - if x in ("-h", "--help"):
11120 - usage(sys.argv)
11121 - sys.exit(os.EX_OK)
11122 - elif x == "--version":
11123 - print("Portage", portage.VERSION)
11124 - sys.exit(os.EX_OK)
11125 -
11126 - cmd = sys.argv[1]
11127 - function = globals().get(cmd)
11128 - if function is None or cmd not in commands:
11129 - usage(sys.argv)
11130 + # used by envvar
11131 + parser.add_argument("-v", dest="verbose", action="store_true")
11132 +
11133 + actions = parser.add_argument_group('Actions')
11134 + actions.add_argument("-h", "--help", action="store_true")
11135 + actions.add_argument("--version", action="store_true")
11136 +
11137 + add_pquery_arguments(parser)
11138 +
11139 + opts, args = parser.parse_known_args(argv[1:])
11140 +
11141 + if opts.help:
11142 + usage(argv)
11143 + return os.EX_OK
11144 + elif opts.version:
11145 + print("Portage", portage.VERSION)
11146 + return os.EX_OK
11147 +
11148 + cmd = None
11149 + if args and args[0] in commands:
11150 + cmd = args[0]
11151 +
11152 + if cmd == 'pquery':
11153 + cmd = None
11154 + args = args[1:]
11155 +
11156 + if cmd is None:
11157 + return pquery(parser, opts, args)
11158 +
11159 + if opts.verbose:
11160 + # used by envvar
11161 + args.append("-v")
11162 +
11163 + argv = argv[:1] + args
11164 +
11165 + if len(argv) < 2:
11166 + usage(argv)
11167 sys.exit(os.EX_USAGE)
11168 +
11169 function = globals()[cmd]
11170 - uses_root = getattr(function, "uses_root", False) and len(sys.argv) > 2
11171 - if uses_root:
11172 - if not os.path.isdir(sys.argv[2]):
11173 - sys.stderr.write("Not a directory: '%s'\n" % sys.argv[2])
11174 + uses_eroot = getattr(function, "uses_eroot", False) and len(argv) > 2
11175 + if uses_eroot:
11176 + if not os.path.isdir(argv[2]):
11177 + sys.stderr.write("Not a directory: '%s'\n" % argv[2])
11178 sys.stderr.write("Run portageq with --help for info\n")
11179 sys.stderr.flush()
11180 sys.exit(os.EX_USAGE)
11181 - eprefix = portage.const.EPREFIX
11182 - eroot = portage.util.normalize_path(sys.argv[2])
11183 + eprefix = portage.settings["EPREFIX"]
11184 + eroot = portage.util.normalize_path(argv[2])
11185 +
11186 if eprefix:
11187 - root = eroot[:1-len(eprefix)]
11188 + if not eroot.endswith(eprefix):
11189 + sys.stderr.write("ERROR: This version of portageq"
11190 + " only supports <eroot>s ending in"
11191 + " '%s'. The provided <eroot>, '%s',"
11192 + " doesn't.\n" % (eprefix, eroot))
11193 + sys.stderr.flush()
11194 + sys.exit(os.EX_USAGE)
11195 + root = eroot[:1 - len(eprefix)]
11196 else:
11197 root = eroot
11198 +
11199 os.environ["ROOT"] = root
11200
11201 - args = sys.argv[2:]
11202 - if args and isinstance(args[0], bytes):
11203 - for i in range(len(args)):
11204 - args[i] = portage._unicode_decode(args[i])
11205 + args = argv[2:]
11206
11207 try:
11208 - if uses_root:
11209 + if uses_eroot:
11210 args[0] = portage.settings['EROOT']
11211 retval = function(args)
11212 if retval:
11213 @@ -902,6 +1356,7 @@ def main():
11214 portage.writemsg("\nPlease use a more specific atom.\n", noiselevel=-1)
11215 sys.exit(1)
11216
11217 -main()
11218 +if __name__ == '__main__':
11219 + sys.exit(main(sys.argv))
11220
11221 #-----------------------------------------------------------------------------
11222
11223 diff --git a/bin/quickpkg b/bin/quickpkg
11224 index 76259c5..90277ad 100755
11225 --- a/bin/quickpkg
11226 +++ b/bin/quickpkg
11227 @@ -1,23 +1,20 @@
11228 -#!/usr/bin/python
11229 -# Copyright 1999-2012 Gentoo Foundation
11230 +#!/usr/bin/python -b
11231 +# Copyright 1999-2014 Gentoo Foundation
11232 # Distributed under the terms of the GNU General Public License v2
11233
11234 from __future__ import print_function
11235
11236 import errno
11237 import math
11238 -import optparse
11239 import signal
11240 import sys
11241 import tarfile
11242
11243 -try:
11244 - import portage
11245 -except ImportError:
11246 - from os import path as osp
11247 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
11248 - import portage
11249 -
11250 +from os import path as osp
11251 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
11252 +sys.path.insert(0, pym_path)
11253 +import portage
11254 +portage._internal_caller = True
11255 from portage import os
11256 from portage import xpak
11257 from portage.dbapi.dep_expand import dep_expand
11258 @@ -28,6 +25,7 @@ from portage.util import ConfigProtect, ensure_dirs, shlex_split
11259 from portage.dbapi.vartree import dblink, tar_contents
11260 from portage.checksum import perform_md5
11261 from portage._sets import load_default_config, SETPREFIX
11262 +from portage.util._argparse import ArgumentParser
11263
11264 def quickpkg_atom(options, infos, arg, eout):
11265 settings = portage.settings
11266 @@ -291,29 +289,28 @@ def quickpkg_main(options, args, eout):
11267
11268 if __name__ == "__main__":
11269 usage = "quickpkg [options] <list of package atoms or package sets>"
11270 - parser = optparse.OptionParser(usage=usage)
11271 - parser.add_option("--umask",
11272 + parser = ArgumentParser(usage=usage)
11273 + parser.add_argument("--umask",
11274 default="0077",
11275 help="umask used during package creation (default is 0077)")
11276 - parser.add_option("--ignore-default-opts",
11277 + parser.add_argument("--ignore-default-opts",
11278 action="store_true",
11279 help="do not use the QUICKPKG_DEFAULT_OPTS environment variable")
11280 - parser.add_option("--include-config",
11281 - type="choice",
11282 + parser.add_argument("--include-config",
11283 choices=["y","n"],
11284 default="n",
11285 metavar="<y|n>",
11286 help="include all files protected by CONFIG_PROTECT (as a security precaution, default is 'n')")
11287 - parser.add_option("--include-unmodified-config",
11288 - type="choice",
11289 + parser.add_argument("--include-unmodified-config",
11290 choices=["y","n"],
11291 default="n",
11292 metavar="<y|n>",
11293 help="include files protected by CONFIG_PROTECT that have not been modified since installation (as a security precaution, default is 'n')")
11294 - options, args = parser.parse_args(sys.argv[1:])
11295 + options, args = parser.parse_known_args(sys.argv[1:])
11296 if not options.ignore_default_opts:
11297 - default_opts = portage.settings.get("QUICKPKG_DEFAULT_OPTS","").split()
11298 - options, args = parser.parse_args(default_opts + sys.argv[1:])
11299 + default_opts = shlex_split(
11300 + portage.settings.get("QUICKPKG_DEFAULT_OPTS", ""))
11301 + options, args = parser.parse_known_args(default_opts + sys.argv[1:])
11302 if not args:
11303 parser.error("no packages atoms given")
11304 try:
11305
11306 diff --git a/bin/regenworld b/bin/regenworld
11307 index 3199fdf..32e8e5c 100755
11308 --- a/bin/regenworld
11309 +++ b/bin/regenworld
11310 @@ -1,17 +1,15 @@
11311 -#!/usr/bin/python
11312 -# Copyright 1999-2011 Gentoo Foundation
11313 +#!/usr/bin/python -b
11314 +# Copyright 1999-2014 Gentoo Foundation
11315 # Distributed under the terms of the GNU General Public License v2
11316
11317 from __future__ import print_function
11318
11319 import sys
11320 -try:
11321 - import portage
11322 -except ImportError:
11323 - from os import path as osp
11324 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
11325 - import portage
11326 -
11327 +from os import path as osp
11328 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
11329 +sys.path.insert(0, pym_path)
11330 +import portage
11331 +portage._internal_caller = True
11332 from portage import os
11333 from portage._sets.files import StaticFileSet, WorldSelectedSet
11334
11335
11336 diff --git a/bin/repoman b/bin/repoman
11337 index 795c7ce..888892b 100755
11338 --- a/bin/repoman
11339 +++ b/bin/repoman
11340 @@ -1,20 +1,19 @@
11341 -#!/usr/bin/python -O
11342 -# Copyright 1999-2012 Gentoo Foundation
11343 +#!/usr/bin/python -bO
11344 +# Copyright 1999-2014 Gentoo Foundation
11345 # Distributed under the terms of the GNU General Public License v2
11346
11347 # Next to do: dep syntax checking in mask files
11348 # Then, check to make sure deps are satisfiable (to avoid "can't find match for" problems)
11349 # that last one is tricky because multiple profiles need to be checked.
11350
11351 -from __future__ import print_function
11352 +from __future__ import print_function, unicode_literals
11353
11354 -import calendar
11355 +import codecs
11356 import copy
11357 import errno
11358 import formatter
11359 import io
11360 import logging
11361 -import optparse
11362 import re
11363 import signal
11364 import stat
11365 @@ -24,23 +23,20 @@ import tempfile
11366 import textwrap
11367 import time
11368 import platform
11369 -
11370 -try:
11371 - from urllib.request import urlopen as urllib_request_urlopen
11372 -except ImportError:
11373 - from urllib import urlopen as urllib_request_urlopen
11374 -
11375 from itertools import chain
11376 from stat import S_ISDIR
11377
11378 try:
11379 - import portage
11380 + from urllib.parse import urlparse
11381 except ImportError:
11382 - from os import path as osp
11383 - sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym"))
11384 - import portage
11385 + from urlparse import urlparse
11386 +
11387 +from os import path as osp
11388 +pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
11389 +sys.path.insert(0, pym_path)
11390 +import portage
11391 +portage._internal_caller = True
11392 portage._disable_legacy_globals()
11393 -portage.dep._internal_warnings = True
11394
11395 try:
11396 import xml.etree.ElementTree
11397 @@ -58,9 +54,9 @@ except (ImportError, SystemError, RuntimeError, Exception):
11398 sys.exit(1)
11399
11400 from portage import os
11401 -from portage import subprocess_getstatusoutput
11402 from portage import _encodings
11403 from portage import _unicode_encode
11404 +import repoman.checks
11405 from repoman.checks import run_checks
11406 from repoman import utilities
11407 from repoman.herdbase import make_herd_base
11408 @@ -69,18 +65,18 @@ from _emerge.RootConfig import RootConfig
11409 from _emerge.userquery import userquery
11410 import portage.checksum
11411 import portage.const
11412 +import portage.repository.config
11413 from portage import cvstree, normalize_path
11414 from portage import util
11415 -from portage.exception import (FileNotFound, MissingParameter,
11416 +from portage.exception import (FileNotFound, InvalidAtom, MissingParameter,
11417 ParseError, PermissionDenied)
11418 -from portage.manifest import _prohibited_filename_chars_re as \
11419 - disallowed_filename_chars_re
11420 +from portage.dep import Atom
11421 from portage.process import find_binary, spawn
11422 from portage.output import bold, create_color_func, \
11423 green, nocolor, red
11424 from portage.output import ConsoleStyleFile, StyleWriter
11425 from portage.util import writemsg_level
11426 -from portage.util._desktop_entry import validate_desktop_entry
11427 +from portage.util._argparse import ArgumentParser
11428 from portage.package.ebuild.digestgen import digestgen
11429 from portage.eapi import eapi_has_iuse_defaults, eapi_has_required_use
11430
11431 @@ -93,6 +89,7 @@ util.initialize_logger()
11432 max_desc_len = 100
11433 allowed_filename_chars="a-zA-Z0-9._-+:"
11434 pv_toolong_re = re.compile(r'[0-9]{19,}')
11435 +GPG_KEY_ID_REGEX = r'(0x)?([0-9a-fA-F]{8}|[0-9a-fA-F]{16}|[0-9a-fA-F]{24}|[0-9a-fA-F]{32}|[0-9a-fA-F]{40})!?'
11436 bad = create_color_func("BAD")
11437
11438 # A sane umask is needed for files that portage creates.
11439 @@ -116,41 +113,14 @@ def err(txt):
11440 warn(txt)
11441 sys.exit(1)
11442
11443 -def exithandler(signum=None, frame=None):
11444 +def exithandler(signum=None, _frame=None):
11445 logging.fatal("Interrupted; exiting...")
11446 if signum is None:
11447 sys.exit(1)
11448 else:
11449 sys.exit(128 + signum)
11450
11451 -signal.signal(signal.SIGINT,exithandler)
11452 -
11453 -class RepomanHelpFormatter(optparse.IndentedHelpFormatter):
11454 - """Repoman needs it's own HelpFormatter for now, because the default ones
11455 - murder the help text."""
11456 -
11457 - def __init__(self, indent_increment=1, max_help_position=24, width=150, short_first=1):
11458 - optparse.HelpFormatter.__init__(self, indent_increment, max_help_position, width, short_first)
11459 -
11460 - def format_description(self, description):
11461 - return description
11462 -
11463 -class RepomanOptionParser(optparse.OptionParser):
11464 - """Add the on_tail function, ruby has it, optionParser should too
11465 - """
11466 -
11467 - def __init__(self, *args, **kwargs):
11468 - optparse.OptionParser.__init__(self, *args, **kwargs)
11469 - self.tail = ""
11470 -
11471 - def on_tail(self, description):
11472 - self.tail += description
11473 -
11474 - def format_help(self, formatter=None):
11475 - result = optparse.OptionParser.format_help(self, formatter)
11476 - result += self.tail
11477 - return result
11478 -
11479 +signal.signal(signal.SIGINT, exithandler)
11480
11481 def ParseArgs(argv, qahelp):
11482 """This function uses a customized optionParser to parse command line arguments for repoman
11483 @@ -161,8 +131,7 @@ def ParseArgs(argv, qahelp):
11484 (opts, args), just like a call to parser.parse_args()
11485 """
11486
11487 - if argv and isinstance(argv[0], bytes):
11488 - argv = [portage._unicode_decode(x) for x in argv]
11489 + argv = portage._decode_argv(argv)
11490
11491 modes = {
11492 'commit' : 'Run a scan then commit changes',
11493 @@ -172,102 +141,113 @@ def ParseArgs(argv, qahelp):
11494 'help' : 'Show this screen',
11495 'manifest' : 'Generate a Manifest (fetches files if necessary)',
11496 'manifest-check' : 'Check Manifests for missing or incorrect digests',
11497 - 'scan' : 'Scan directory tree for QA issues'
11498 + 'scan' : 'Scan directory tree for QA issues'
11499 + }
11500 +
11501 + output_choices = {
11502 + 'default' : 'The normal output format',
11503 + 'column' : 'Columnar output suitable for use with grep'
11504 }
11505
11506 mode_keys = list(modes)
11507 mode_keys.sort()
11508
11509 - parser = RepomanOptionParser(formatter=RepomanHelpFormatter(), usage="%prog [options] [mode]")
11510 - parser.description = green(" ".join((os.path.basename(argv[0]), "1.2")))
11511 - parser.description += "\nCopyright 1999-2007 Gentoo Foundation"
11512 - parser.description += "\nDistributed under the terms of the GNU General Public License v2"
11513 - parser.description += "\nmodes: " + " | ".join(map(green,mode_keys))
11514 + output_keys = sorted(output_choices)
11515
11516 - parser.add_option('-a', '--ask', dest='ask', action='store_true', default=False,
11517 + parser = ArgumentParser(usage="repoman [options] [mode]",
11518 + description="Modes: %s" % " | ".join(mode_keys),
11519 + epilog="For more help consult the man page.")
11520 +
11521 + parser.add_argument('-a', '--ask', dest='ask', action='store_true', default=False,
11522 help='Request a confirmation before commiting')
11523
11524 - parser.add_option('-m', '--commitmsg', dest='commitmsg',
11525 + parser.add_argument('-m', '--commitmsg', dest='commitmsg',
11526 help='specify a commit message on the command line')
11527
11528 - parser.add_option('-M', '--commitmsgfile', dest='commitmsgfile',
11529 + parser.add_argument('-M', '--commitmsgfile', dest='commitmsgfile',
11530 help='specify a path to a file that contains a commit message')
11531
11532 - parser.add_option('--digest',
11533 - type='choice', choices=('y', 'n'), metavar='<y|n>',
11534 + parser.add_argument('--digest',
11535 + choices=('y', 'n'), metavar='<y|n>',
11536 help='Automatically update Manifest digests for modified files')
11537
11538 - parser.add_option('-p', '--pretend', dest='pretend', default=False,
11539 + parser.add_argument('-p', '--pretend', dest='pretend', default=False,
11540 action='store_true', help='don\'t commit or fix anything; just show what would be done')
11541 -
11542 - parser.add_option('-q', '--quiet', dest="quiet", action="count", default=0,
11543 +
11544 + parser.add_argument('-q', '--quiet', dest="quiet", action="count", default=0,
11545 help='do not print unnecessary messages')
11546
11547 - parser.add_option(
11548 - '--echangelog', type='choice', choices=('y', 'n', 'force'), metavar="<y|n|force>",
11549 + parser.add_argument(
11550 + '--echangelog', choices=('y', 'n', 'force'), metavar="<y|n|force>",
11551 help='for commit mode, call echangelog if ChangeLog is unmodified (or '
11552 'regardless of modification if \'force\' is specified)')
11553
11554 - parser.add_option('-f', '--force', dest='force', default=False, action='store_true',
11555 + parser.add_argument('--experimental-inherit', choices=('y', 'n'),
11556 + metavar="<y|n>", default='n',
11557 + help='Enable experimental inherit.missing checks which may misbehave'
11558 + ' when the internal eclass database becomes outdated')
11559 +
11560 + parser.add_argument('-f', '--force', dest='force', default=False, action='store_true',
11561 help='Commit with QA violations')
11562
11563 - parser.add_option('--vcs', dest='vcs',
11564 + parser.add_argument('--vcs', dest='vcs',
11565 help='Force using specific VCS instead of autodetection')
11566
11567 - parser.add_option('-v', '--verbose', dest="verbosity", action='count',
11568 + parser.add_argument('-v', '--verbose', dest="verbosity", action='count',
11569 help='be very verbose in output', default=0)
11570
11571 - parser.add_option('-V', '--version', dest='version', action='store_true',
11572 + parser.add_argument('-V', '--version', dest='version', action='store_true',
11573 help='show version info')
11574
11575 - parser.add_option('-x', '--xmlparse', dest='xml_parse', action='store_true',
11576 + parser.add_argument('-x', '--xmlparse', dest='xml_parse', action='store_true',
11577 default=False, help='forces the metadata.xml parse check to be carried out')
11578
11579 - parser.add_option(
11580 - '--if-modified', type='choice', choices=('y', 'n'), default='n',
11581 + parser.add_argument(
11582 + '--if-modified', choices=('y', 'n'), default='n',
11583 metavar="<y|n>",
11584 help='only check packages that have uncommitted modifications')
11585
11586 - parser.add_option('-i', '--ignore-arches', dest='ignore_arches', action='store_true',
11587 + parser.add_argument('-i', '--ignore-arches', dest='ignore_arches', action='store_true',
11588 default=False, help='ignore arch-specific failures (where arch != host)')
11589
11590 - parser.add_option("--ignore-default-opts",
11591 + parser.add_argument("--ignore-default-opts",
11592 action="store_true",
11593 help="do not use the REPOMAN_DEFAULT_OPTS environment variable")
11594
11595 - parser.add_option('-I', '--ignore-masked', dest='ignore_masked', action='store_true',
11596 + parser.add_argument('-I', '--ignore-masked', dest='ignore_masked', action='store_true',
11597 default=False, help='ignore masked packages (not allowed with commit mode)')
11598
11599 - parser.add_option('-d', '--include-dev', dest='include_dev', action='store_true',
11600 + parser.add_argument('--include-arches', dest='include_arches',
11601 + metavar='ARCHES', action='append',
11602 + help='A space separated list of arches used to '
11603 + 'filter the selection of profiles for dependency checks')
11604 +
11605 + parser.add_argument('-d', '--include-dev', dest='include_dev', action='store_true',
11606 default=False, help='include dev profiles in dependency checks')
11607
11608 - parser.add_option('--unmatched-removal', dest='unmatched_removal', action='store_true',
11609 + parser.add_argument('-e', '--include-exp-profiles', choices=('y', 'n'),
11610 + default=False, help='include exp profiles in dependency checks',
11611 + metavar='<y|n>')
11612 +
11613 + parser.add_argument('--unmatched-removal', dest='unmatched_removal', action='store_true',
11614 default=False, help='enable strict checking of package.mask and package.unmask files for unmatched removal atoms')
11615
11616 - parser.add_option('--without-mask', dest='without_mask', action='store_true',
11617 + parser.add_argument('--without-mask', dest='without_mask', action='store_true',
11618 default=False, help='behave as if no package.mask entries exist (not allowed with commit mode)')
11619
11620 - parser.add_option('--mode', type='choice', dest='mode', choices=list(modes),
11621 - help='specify which mode repoman will run in (default=full)')
11622 -
11623 - parser.on_tail("\n " + green("Modes".ljust(20) + " Description\n"))
11624 + parser.add_argument('--output-style', dest='output_style', choices=output_keys,
11625 + help='select output type', default='default')
11626
11627 - for k in mode_keys:
11628 - parser.on_tail(" %s %s\n" % (k.ljust(20), modes[k]))
11629 -
11630 - parser.on_tail("\n " + green("QA keyword".ljust(20) + " Description\n"))
11631 -
11632 - sorted_qa = list(qahelp)
11633 - sorted_qa.sort()
11634 - for k in sorted_qa:
11635 - parser.on_tail(" %s %s\n" % (k.ljust(20), qahelp[k]))
11636 + parser.add_argument('--mode', dest='mode', choices=mode_keys,
11637 + help='specify which mode repoman will run in (default=full)')
11638
11639 - opts, args = parser.parse_args(argv[1:])
11640 + opts, args = parser.parse_known_args(argv[1:])
11641
11642 if not opts.ignore_default_opts:
11643 - default_opts = repoman_settings.get("REPOMAN_DEFAULT_OPTS", "").split()
11644 + default_opts = portage.util.shlex_split(
11645 + repoman_settings.get("REPOMAN_DEFAULT_OPTS", ""))
11646 if default_opts:
11647 - opts, args = parser.parse_args(default_opts + sys.argv[1:])
11648 + opts, args = parser.parse_known_args(default_opts + sys.argv[1:])
11649
11650 if opts.mode == 'help':
11651 parser.print_help(short=False)
11652 @@ -282,16 +262,10 @@ def ParseArgs(argv, qahelp):
11653
11654 if not opts.mode:
11655 opts.mode = 'full'
11656 -
11657 +
11658 if opts.mode == 'ci':
11659 opts.mode = 'commit' # backwards compat shortcut
11660
11661 - if opts.mode == 'commit' and not (opts.force or opts.pretend):
11662 - if opts.ignore_masked:
11663 - parser.error('Commit mode and --ignore-masked are not compatible')
11664 - if opts.without_mask:
11665 - parser.error('Commit mode and --without-mask are not compatible')
11666 -
11667 # Use the verbosity and quiet options to fiddle with the loglevel appropriately
11668 for val in range(opts.verbosity):
11669 logger = logging.getLogger()
11670 @@ -301,101 +275,99 @@ def ParseArgs(argv, qahelp):
11671 logger = logging.getLogger()
11672 logger.setLevel(logger.getEffectiveLevel() + 10)
11673
11674 + if opts.mode == 'commit' and not (opts.force or opts.pretend):
11675 + if opts.ignore_masked:
11676 + opts.ignore_masked = False
11677 + logging.warn('Commit mode automatically disables --ignore-masked')
11678 + if opts.without_mask:
11679 + opts.without_mask = False
11680 + logging.warn('Commit mode automatically disables --without-mask')
11681 +
11682 return (opts, args)
11683
11684 -qahelp={
11685 - "CVS/Entries.IO_error":"Attempting to commit, and an IO error was encountered access the Entries file",
11686 - "desktop.invalid":"desktop-file-validate reports errors in a *.desktop file",
11687 - "ebuild.invalidname":"Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1 versioning extensions)",
11688 - "ebuild.namenomatch":"Ebuild files that do not have the same name as their parent directory",
11689 - "changelog.ebuildadded":"An ebuild was added but the ChangeLog was not modified",
11690 - "changelog.missing":"Missing ChangeLog files",
11691 - "ebuild.notadded":"Ebuilds that exist but have not been added to cvs",
11692 - "ebuild.patches":"PATCHES variable should be a bash array to ensure white space safety",
11693 - "changelog.notadded":"ChangeLogs that exist but have not been added to cvs",
11694 - "dependency.unknown" : "Ebuild has a dependency that refers to an unknown package (which may be valid if it is a blocker for a renamed/removed package, or is an alternative choice provided by an overlay)",
11695 - "file.executable":"Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the executable bit",
11696 - "file.size":"Files in the files directory must be under 20 KiB",
11697 - "file.size.fatal":"Files in the files directory must be under 60 KiB",
11698 - "file.name":"File/dir name must be composed of only the following chars: %s " % allowed_filename_chars,
11699 - "file.UTF8":"File is not UTF8 compliant",
11700 - "inherit.deprecated":"Ebuild inherits a deprecated eclass",
11701 - "inherit.missing":"Ebuild uses functions from an eclass but does not inherit it",
11702 - "inherit.unused":"Ebuild inherits an eclass but does not use it",
11703 - "java.eclassesnotused":"With virtual/jdk in DEPEND you must inherit a java eclass",
11704 - "wxwidgets.eclassnotused":"Ebuild DEPENDs on x11-libs/wxGTK without inheriting wxwidgets.eclass",
11705 - "KEYWORDS.dropped":"Ebuilds that appear to have dropped KEYWORDS for some arch",
11706 - "KEYWORDS.missing":"Ebuilds that have a missing or empty KEYWORDS variable",
11707 - "KEYWORDS.stable":"Ebuilds that have been added directly with stable KEYWORDS",
11708 - "KEYWORDS.stupid":"Ebuilds that use KEYWORDS=-* instead of package.mask",
11709 - "LICENSE.missing":"Ebuilds that have a missing or empty LICENSE variable",
11710 - "LICENSE.virtual":"Virtuals that have a non-empty LICENSE variable",
11711 - "DESCRIPTION.missing":"Ebuilds that have a missing or empty DESCRIPTION variable",
11712 - "DESCRIPTION.toolong":"DESCRIPTION is over %d characters" % max_desc_len,
11713 - "EAPI.definition":"EAPI definition does not conform to PMS section 7.3.1 (first non-comment, non-blank line)",
11714 - "EAPI.deprecated":"Ebuilds that use features that are deprecated in the current EAPI",
11715 - "EAPI.incompatible":"Ebuilds that use features that are only available with a different EAPI",
11716 - "EAPI.unsupported":"Ebuilds that have an unsupported EAPI version (you must upgrade portage)",
11717 - "SLOT.invalid":"Ebuilds that have a missing or invalid SLOT variable value",
11718 - "HOMEPAGE.missing":"Ebuilds that have a missing or empty HOMEPAGE variable",
11719 - "HOMEPAGE.virtual":"Virtuals that have a non-empty HOMEPAGE variable",
11720 - "DEPEND.bad":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds)",
11721 - "RDEPEND.bad":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds)",
11722 - "PDEPEND.bad":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds)",
11723 - "DEPEND.badmasked":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds)",
11724 - "RDEPEND.badmasked":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds)",
11725 - "PDEPEND.badmasked":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds)",
11726 - "DEPEND.badindev":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds) in developing arch",
11727 - "RDEPEND.badindev":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds) in developing arch",
11728 - "PDEPEND.badindev":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds) in developing arch",
11729 - "DEPEND.badmaskedindev":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds) in developing arch",
11730 - "RDEPEND.badmaskedindev":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds) in developing arch",
11731 - "PDEPEND.badmaskedindev":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds) in developing arch",
11732 - "PDEPEND.suspect":"PDEPEND contains a package that usually only belongs in DEPEND.",
11733 - "DEPEND.syntax":"Syntax error in DEPEND (usually an extra/missing space/parenthesis)",
11734 - "RDEPEND.syntax":"Syntax error in RDEPEND (usually an extra/missing space/parenthesis)",
11735 - "PDEPEND.syntax":"Syntax error in PDEPEND (usually an extra/missing space/parenthesis)",
11736 - "DEPEND.badtilde":"DEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)",
11737 - "RDEPEND.badtilde":"RDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)",
11738 - "PDEPEND.badtilde":"PDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)",
11739 - "LICENSE.syntax":"Syntax error in LICENSE (usually an extra/missing space/parenthesis)",
11740 - "PROVIDE.syntax":"Syntax error in PROVIDE (usually an extra/missing space/parenthesis)",
11741 - "PROPERTIES.syntax":"Syntax error in PROPERTIES (usually an extra/missing space/parenthesis)",
11742 - "RESTRICT.syntax":"Syntax error in RESTRICT (usually an extra/missing space/parenthesis)",
11743 - "REQUIRED_USE.syntax":"Syntax error in REQUIRED_USE (usually an extra/missing space/parenthesis)",
11744 - "SRC_URI.syntax":"Syntax error in SRC_URI (usually an extra/missing space/parenthesis)",
11745 - "SRC_URI.mirror":"A uri listed in profiles/thirdpartymirrors is found in SRC_URI",
11746 - "ebuild.syntax":"Error generating cache entry for ebuild; typically caused by ebuild syntax error or digest verification failure",
11747 - "ebuild.output":"A simple sourcing of the ebuild produces output; this breaks ebuild policy.",
11748 - "ebuild.nesteddie":"Placing 'die' inside ( ) prints an error, but doesn't stop the ebuild.",
11749 - "variable.invalidchar":"A variable contains an invalid character that is not part of the ASCII character set",
11750 - "variable.readonly":"Assigning a readonly variable",
11751 - "variable.usedwithhelpers":"Ebuild uses D, ROOT, ED, EROOT or EPREFIX with helpers",
11752 - "LIVEVCS.stable":"This ebuild is a live checkout from a VCS but has stable keywords.",
11753 - "LIVEVCS.unmasked":"This ebuild is a live checkout from a VCS but has keywords and is not masked in the global package.mask.",
11754 - "IUSE.invalid":"This ebuild has a variable in IUSE that is not in the use.desc or its metadata.xml file",
11755 - "IUSE.missing":"This ebuild has a USE conditional which references a flag that is not listed in IUSE",
11756 - "IUSE.undefined":"This ebuild does not define IUSE (style guideline says to define IUSE even when empty)",
11757 - "LICENSE.invalid":"This ebuild is listing a license that doesnt exist in portages license/ dir.",
11758 - "KEYWORDS.invalid":"This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found",
11759 - "RDEPEND.implicit":"RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4)",
11760 - "RDEPEND.suspect":"RDEPEND contains a package that usually only belongs in DEPEND.",
11761 - "RESTRICT.invalid":"This ebuild contains invalid RESTRICT values.",
11762 - "digest.assumed":"Existing digest must be assumed correct (Package level only)",
11763 - "digest.missing":"Some files listed in SRC_URI aren't referenced in the Manifest",
11764 - "digest.unused":"Some files listed in the Manifest aren't referenced in SRC_URI",
11765 - "ebuild.majorsyn":"This ebuild has a major syntax error that may cause the ebuild to fail partially or fully",
11766 - "ebuild.minorsyn":"This ebuild has a minor syntax error that contravenes gentoo coding style",
11767 - "ebuild.badheader":"This ebuild has a malformed header",
11768 - "manifest.bad":"Manifest has missing or incorrect digests",
11769 - "metadata.missing":"Missing metadata.xml files",
11770 - "metadata.bad":"Bad metadata.xml files",
11771 - "metadata.warning":"Warnings in metadata.xml files",
11772 - "portage.internal":"The ebuild uses an internal Portage function",
11773 - "virtual.oldstyle":"The ebuild PROVIDEs an old-style virtual (see GLEP 37)",
11774 - "virtual.suspect":"Ebuild contains a package that usually should be pulled via virtual/, not directly.",
11775 - "usage.obsolete":"The ebuild makes use of an obsolete construct",
11776 - "upstream.workaround":"The ebuild works around an upstream bug, an upstream bug should be filed and tracked in bugs.gentoo.org"
11777 +qahelp = {
11778 + "CVS/Entries.IO_error": "Attempting to commit, and an IO error was encountered access the Entries file",
11779 + "ebuild.invalidname": "Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1 versioning extensions)",
11780 + "ebuild.namenomatch": "Ebuild files that do not have the same name as their parent directory",
11781 + "changelog.ebuildadded": "An ebuild was added but the ChangeLog was not modified",
11782 + "changelog.missing": "Missing ChangeLog files",
11783 + "ebuild.notadded": "Ebuilds that exist but have not been added to cvs",
11784 + "ebuild.patches": "PATCHES variable should be a bash array to ensure white space safety",
11785 + "changelog.notadded": "ChangeLogs that exist but have not been added to cvs",
11786 + "dependency.bad": "User-visible ebuilds with unsatisfied dependencies (matched against *visible* ebuilds)",
11787 + "dependency.badmasked": "Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds)",
11788 + "dependency.badindev": "User-visible ebuilds with unsatisfied dependencies (matched against *visible* ebuilds) in developing arch",
11789 + "dependency.badmaskedindev": "Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds) in developing arch",
11790 + "dependency.badtilde": "Uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)",
11791 + "dependency.syntax": "Syntax error in dependency string (usually an extra/missing space/parenthesis)",
11792 + "dependency.unknown": "Ebuild has a dependency that refers to an unknown package (which may be valid if it is a blocker for a renamed/removed package, or is an alternative choice provided by an overlay)",
11793 + "file.executable": "Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the executable bit",
11794 + "file.size": "Files in the files directory must be under 20 KiB",
11795 + "file.size.fatal": "Files in the files directory must be under 60 KiB",
11796 + "file.name": "File/dir name must be composed of only the following chars: %s " % allowed_filename_chars,
11797 + "file.UTF8": "File is not UTF8 compliant",
11798 + "inherit.deprecated": "Ebuild inherits a deprecated eclass",
11799 + "inherit.missing": "Ebuild uses functions from an eclass but does not inherit it",
11800 + "inherit.unused": "Ebuild inherits an eclass but does not use it",
11801 + "java.eclassesnotused": "With virtual/jdk in DEPEND you must inherit a java eclass",
11802 + "wxwidgets.eclassnotused": "Ebuild DEPENDs on x11-libs/wxGTK without inheriting wxwidgets.eclass",
11803 + "KEYWORDS.dropped": "Ebuilds that appear to have dropped KEYWORDS for some arch",
11804 + "KEYWORDS.missing": "Ebuilds that have a missing or empty KEYWORDS variable",
11805 + "KEYWORDS.stable": "Ebuilds that have been added directly with stable KEYWORDS",
11806 + "KEYWORDS.stupid": "Ebuilds that use KEYWORDS=-* instead of package.mask",
11807 + "LICENSE.missing": "Ebuilds that have a missing or empty LICENSE variable",
11808 + "LICENSE.virtual": "Virtuals that have a non-empty LICENSE variable",
11809 + "DESCRIPTION.missing": "Ebuilds that have a missing or empty DESCRIPTION variable",
11810 + "DESCRIPTION.toolong": "DESCRIPTION is over %d characters" % max_desc_len,
11811 + "EAPI.definition": "EAPI definition does not conform to PMS section 7.3.1 (first non-comment, non-blank line)",
11812 + "EAPI.deprecated": "Ebuilds that use features that are deprecated in the current EAPI",
11813 + "EAPI.incompatible": "Ebuilds that use features that are only available with a different EAPI",
11814 + "EAPI.unsupported": "Ebuilds that have an unsupported EAPI version (you must upgrade portage)",
11815 + "SLOT.invalid": "Ebuilds that have a missing or invalid SLOT variable value",
11816 + "HOMEPAGE.missing": "Ebuilds that have a missing or empty HOMEPAGE variable",
11817 + "HOMEPAGE.virtual": "Virtuals that have a non-empty HOMEPAGE variable",
11818 + "PDEPEND.suspect": "PDEPEND contains a package that usually only belongs in DEPEND.",
11819 + "LICENSE.syntax": "Syntax error in LICENSE (usually an extra/missing space/parenthesis)",
11820 + "PROVIDE.syntax": "Syntax error in PROVIDE (usually an extra/missing space/parenthesis)",
11821 + "PROPERTIES.syntax": "Syntax error in PROPERTIES (usually an extra/missing space/parenthesis)",
11822 + "RESTRICT.syntax": "Syntax error in RESTRICT (usually an extra/missing space/parenthesis)",
11823 + "REQUIRED_USE.syntax": "Syntax error in REQUIRED_USE (usually an extra/missing space/parenthesis)",
11824 + "SRC_URI.syntax": "Syntax error in SRC_URI (usually an extra/missing space/parenthesis)",
11825 + "SRC_URI.mirror": "A uri listed in profiles/thirdpartymirrors is found in SRC_URI",
11826 + "ebuild.syntax": "Error generating cache entry for ebuild; typically caused by ebuild syntax error or digest verification failure",
11827 + "ebuild.output": "A simple sourcing of the ebuild produces output; this breaks ebuild policy.",
11828 + "ebuild.nesteddie": "Placing 'die' inside ( ) prints an error, but doesn't stop the ebuild.",
11829 + "variable.invalidchar": "A variable contains an invalid character that is not part of the ASCII character set",
11830 + "variable.readonly": "Assigning a readonly variable",
11831 + "variable.usedwithhelpers": "Ebuild uses D, ROOT, ED, EROOT or EPREFIX with helpers",
11832 + "LIVEVCS.stable": "This ebuild is a live checkout from a VCS but has stable keywords.",
11833 + "LIVEVCS.unmasked": "This ebuild is a live checkout from a VCS but has keywords and is not masked in the global package.mask.",
11834 + "IUSE.invalid": "This ebuild has a variable in IUSE that is not in the use.desc or its metadata.xml file",
11835 + "IUSE.missing": "This ebuild has a USE conditional which references a flag that is not listed in IUSE",
11836 + "IUSE.rubydeprecated": "The ebuild has set a ruby interpreter in USE_RUBY, that is not available as a ruby target anymore",
11837 + "LICENSE.invalid": "This ebuild is listing a license that doesnt exist in portages license/ dir.",
11838 + "LICENSE.deprecated": "This ebuild is listing a deprecated license.",
11839 + "KEYWORDS.invalid": "This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found",
11840 + "RDEPEND.implicit": "RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4)",
11841 + "RDEPEND.suspect": "RDEPEND contains a package that usually only belongs in DEPEND.",
11842 + "RESTRICT.invalid": "This ebuild contains invalid RESTRICT values.",
11843 + "digest.assumed": "Existing digest must be assumed correct (Package level only)",
11844 + "digest.missing": "Some files listed in SRC_URI aren't referenced in the Manifest",
11845 + "digest.unused": "Some files listed in the Manifest aren't referenced in SRC_URI",
11846 + "ebuild.majorsyn": "This ebuild has a major syntax error that may cause the ebuild to fail partially or fully",
11847 + "ebuild.minorsyn": "This ebuild has a minor syntax error that contravenes gentoo coding style",
11848 + "ebuild.badheader": "This ebuild has a malformed header",
11849 + "manifest.bad": "Manifest has missing or incorrect digests",
11850 + "metadata.missing": "Missing metadata.xml files",
11851 + "metadata.bad": "Bad metadata.xml files",
11852 + "metadata.warning": "Warnings in metadata.xml files",
11853 + "portage.internal": "The ebuild uses an internal Portage function or variable",
11854 + "repo.eapi.banned": "The ebuild uses an EAPI which is banned by the repository's metadata/layout.conf settings",
11855 + "repo.eapi.deprecated": "The ebuild uses an EAPI which is deprecated by the repository's metadata/layout.conf settings",
11856 + "virtual.oldstyle": "The ebuild PROVIDEs an old-style virtual (see GLEP 37)",
11857 + "virtual.suspect": "Ebuild contains a package that usually should be pulled via virtual/, not directly.",
11858 + "usage.obsolete": "The ebuild makes use of an obsolete construct",
11859 + "upstream.workaround": "The ebuild works around an upstream bug, an upstream bug should be filed and tracked in bugs.gentoo.org"
11860 }
11861
11862 qacats = list(qahelp)
11863 @@ -409,19 +381,18 @@ qawarnings = set((
11864 "digest.unused",
11865 "ebuild.notadded",
11866 "ebuild.nesteddie",
11867 -"desktop.invalid",
11868 -"DEPEND.badmasked","RDEPEND.badmasked","PDEPEND.badmasked",
11869 -"DEPEND.badindev","RDEPEND.badindev","PDEPEND.badindev",
11870 -"DEPEND.badmaskedindev","RDEPEND.badmaskedindev","PDEPEND.badmaskedindev",
11871 -"DEPEND.badtilde", "RDEPEND.badtilde", "PDEPEND.badtilde",
11872 +"dependency.badmasked",
11873 +"dependency.badindev",
11874 +"dependency.badmaskedindev",
11875 +"dependency.badtilde",
11876 "DESCRIPTION.toolong",
11877 "EAPI.deprecated",
11878 "HOMEPAGE.virtual",
11879 +"LICENSE.deprecated",
11880 "LICENSE.virtual",
11881 "KEYWORDS.dropped",
11882 "KEYWORDS.stupid",
11883 "KEYWORDS.missing",
11884 -"IUSE.undefined",
11885 "PDEPEND.suspect",
11886 "RDEPEND.implicit",
11887 "RDEPEND.suspect",
11888 @@ -437,23 +408,21 @@ qawarnings = set((
11889 "wxwidgets.eclassnotused",
11890 "metadata.warning",
11891 "portage.internal",
11892 +"repo.eapi.deprecated",
11893 "usage.obsolete",
11894 "upstream.workaround",
11895 "LIVEVCS.stable",
11896 "LIVEVCS.unmasked",
11897 +"IUSE.rubydeprecated",
11898 ))
11899
11900 -if portage.const._ENABLE_INHERIT_CHECK:
11901 - # This is experimental, so it's non-fatal.
11902 - qawarnings.add("inherit.missing")
11903 -
11904 non_ascii_re = re.compile(r'[^\x00-\x7f]')
11905
11906 missingvars = ["KEYWORDS", "LICENSE", "DESCRIPTION", "HOMEPAGE"]
11907 allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_"))
11908 allvars.update(Package.metadata_keys)
11909 allvars = sorted(allvars)
11910 -commitmessage=None
11911 +commitmessage = None
11912 for x in missingvars:
11913 x += ".missing"
11914 if x not in qacats:
11915 @@ -462,19 +431,10 @@ for x in missingvars:
11916 qawarnings.add(x)
11917
11918 valid_restrict = frozenset(["binchecks", "bindist",
11919 - "fetch", "installsources", "mirror",
11920 - "primaryuri", "strip", "test", "userpriv"])
11921 -
11922 -live_eclasses = frozenset([
11923 - "bzr",
11924 - "cvs",
11925 - "darcs",
11926 - "git",
11927 - "git-2",
11928 - "mercurial",
11929 - "subversion",
11930 - "tla",
11931 -])
11932 + "fetch", "installsources", "mirror", "preserve-libs",
11933 + "primaryuri", "splitdebug", "strip", "test", "userpriv"])
11934 +
11935 +live_eclasses = portage.const.LIVE_ECLASSES
11936
11937 suspect_rdepend = frozenset([
11938 "app-arch/cabextract",
11939 @@ -520,14 +480,25 @@ suspect_virtual = {
11940 "dev-util/pkgconf":"virtual/pkgconfig",
11941 "dev-util/pkgconfig":"virtual/pkgconfig",
11942 "dev-util/pkgconfig-openbsd":"virtual/pkgconfig",
11943 + "dev-libs/libusb":"virtual/libusb",
11944 + "dev-libs/libusbx":"virtual/libusb",
11945 + "dev-libs/libusb-compat":"virtual/libusb",
11946 }
11947
11948 +ruby_deprecated = frozenset([
11949 + "ruby_targets_ree18",
11950 +])
11951 +
11952 +metadata_xml_encoding = 'UTF-8'
11953 +metadata_xml_declaration = '<?xml version="1.0" encoding="%s"?>' % \
11954 + (metadata_xml_encoding,)
11955 +metadata_doctype_name = 'pkgmetadata'
11956 metadata_dtd_uri = 'http://www.gentoo.org/dtd/metadata.dtd'
11957 # force refetch if the local copy creation time is older than this
11958 metadata_dtd_ctime_interval = 60 * 60 * 24 * 7 # 7 days
11959
11960 # file.executable
11961 -no_exec = frozenset(["Manifest","ChangeLog","metadata.xml"])
11962 +no_exec = frozenset(["Manifest", "ChangeLog", "metadata.xml"])
11963
11964 options, arguments = ParseArgs(sys.argv, qahelp)
11965
11966 @@ -535,6 +506,11 @@ if options.version:
11967 print("Portage", portage.VERSION)
11968 sys.exit(0)
11969
11970 +if options.experimental_inherit == 'y':
11971 + # This is experimental, so it's non-fatal.
11972 + qawarnings.add("inherit.missing")
11973 + repoman.checks._init(experimental_inherit=True)
11974 +
11975 # Set this to False when an extraordinary issue (generally
11976 # something other than a QA issue) makes it impossible to
11977 # commit (like if Manifest generation fails).
11978 @@ -584,14 +560,29 @@ if options.mode == 'commit' and not options.pretend and not vcs:
11979 logging.info("Not in a version controlled repository; enabling pretend mode.")
11980 options.pretend = True
11981
11982 -# Ensure that PORTDIR_OVERLAY contains the repository corresponding to $PWD.
11983 -repoman_settings['PORTDIR_OVERLAY'] = "%s %s" % \
11984 - (repoman_settings.get('PORTDIR_OVERLAY', ''),
11985 - portage._shell_quote(portdir_overlay))
11986 -# We have to call the config constructor again so
11987 -# that config.repositories is initialized correctly.
11988 -repoman_settings = portage.config(config_root=config_root, local_config=False,
11989 - env=dict(os.environ, PORTDIR_OVERLAY=repoman_settings['PORTDIR_OVERLAY']))
11990 +# Ensure that current repository is in the list of enabled repositories.
11991 +repodir = os.path.realpath(portdir_overlay)
11992 +try:
11993 + repoman_settings.repositories.get_repo_for_location(repodir)
11994 +except KeyError:
11995 + repo_name = portage.repository.config.RepoConfig._read_valid_repo_name(portdir_overlay)[0]
11996 + layout_conf_data = portage.repository.config.parse_layout_conf(portdir_overlay)[0]
11997 + if layout_conf_data['repo-name']:
11998 + repo_name = layout_conf_data['repo-name']
11999 + tmp_conf_file = io.StringIO(textwrap.dedent("""
12000 + [%s]
12001 + location = %s
12002 + """) % (repo_name, portdir_overlay))
12003 + # Ensure that the repository corresponding to $PWD overrides a
12004 + # repository of the same name referenced by the existing PORTDIR
12005 + # or PORTDIR_OVERLAY settings.
12006 + repoman_settings['PORTDIR_OVERLAY'] = "%s %s" % \
12007 + (repoman_settings.get('PORTDIR_OVERLAY', ''),
12008 + portage._shell_quote(portdir_overlay))
12009 + repositories = portage.repository.config.load_repository_config(repoman_settings, extra_files=[tmp_conf_file])
12010 + # We have to call the config constructor again so that attributes
12011 + # dependent on config.repositories are initialized correctly.
12012 + repoman_settings = portage.config(config_root=config_root, local_config=False, repositories=repositories)
12013
12014 root = repoman_settings['EROOT']
12015 trees = {
12016 @@ -601,10 +592,15 @@ portdb = trees[root]['porttree'].dbapi
12017
12018 # Constrain dependency resolution to the master(s)
12019 # that are specified in layout.conf.
12020 -repodir = os.path.realpath(portdir_overlay)
12021 repo_config = repoman_settings.repositories.get_repo_for_location(repodir)
12022 portdb.porttrees = list(repo_config.eclass_db.porttrees)
12023 portdir = portdb.porttrees[0]
12024 +commit_env = os.environ.copy()
12025 +# list() is for iteration on a copy.
12026 +for repo in list(repoman_settings.repositories):
12027 + # all paths are canonical
12028 + if repo.location not in repo_config.eclass_db.porttrees:
12029 + del repoman_settings.repositories[repo.name]
12030
12031 if repo_config.allow_provide_virtual:
12032 qawarnings.add("virtual.oldstyle")
12033 @@ -615,6 +611,15 @@ if repo_config.sign_commit:
12034 # the commit arguments. If key_id is unspecified, then it must be
12035 # configured by `git config user.signingkey key_id`.
12036 vcs_local_opts.append("--gpg-sign")
12037 + if repoman_settings.get("PORTAGE_GPG_DIR"):
12038 + # Pass GNUPGHOME to git for bug #462362.
12039 + commit_env["GNUPGHOME"] = repoman_settings["PORTAGE_GPG_DIR"]
12040 +
12041 + # Pass GPG_TTY to git for bug #477728.
12042 + try:
12043 + commit_env["GPG_TTY"] = os.ttyname(sys.stdin.fileno())
12044 + except OSError:
12045 + pass
12046
12047 # In order to disable manifest signatures, repos may set
12048 # "sign-manifests = false" in metadata/layout.conf. This
12049 @@ -623,6 +628,25 @@ if repo_config.sign_commit:
12050 sign_manifests = "sign" in repoman_settings.features and \
12051 repo_config.sign_manifest
12052
12053 +if repo_config.sign_manifest and repo_config.name == "gentoo" and \
12054 + options.mode in ("commit",) and not sign_manifests:
12055 + msg = ("The '%s' repository has manifest signatures enabled, "
12056 + "but FEATURES=sign is currently disabled. In order to avoid this "
12057 + "warning, enable FEATURES=sign in make.conf. Alternatively, "
12058 + "repositories can disable manifest signatures by setting "
12059 + "'sign-manifests = false' in metadata/layout.conf.") % \
12060 + (repo_config.name,)
12061 + for line in textwrap.wrap(msg, 60):
12062 + logging.warn(line)
12063 +
12064 +if sign_manifests and options.mode in ("commit",) and \
12065 + repoman_settings.get("PORTAGE_GPG_KEY") and \
12066 + re.match(r'^%s$' % GPG_KEY_ID_REGEX,
12067 + repoman_settings["PORTAGE_GPG_KEY"]) is None:
12068 + logging.error("PORTAGE_GPG_KEY value is invalid: %s" %
12069 + repoman_settings["PORTAGE_GPG_KEY"])
12070 + sys.exit(1)
12071 +
12072 manifest_hashes = repo_config.manifest_hashes
12073 if manifest_hashes is None:
12074 manifest_hashes = portage.const.MANIFEST2_HASH_DEFAULTS
12075 @@ -652,19 +676,6 @@ if options.mode in ("commit", "fix", "manifest"):
12076 logging.error(line)
12077 sys.exit(1)
12078
12079 -if "commit" == options.mode and \
12080 - repo_config.name == "gentoo" and \
12081 - "RMD160" in manifest_hashes and \
12082 - "RMD160" not in portage.checksum.hashorigin_map:
12083 - msg = "Please install " \
12084 - "pycrypto or enable python's ssl USE flag in order " \
12085 - "to enable RMD160 hash support. See bug #198398 for " \
12086 - "more information."
12087 - prefix = bad(" * ")
12088 - for line in textwrap.wrap(msg, 70):
12089 - print(prefix + line)
12090 - sys.exit(1)
12091 -
12092 if options.echangelog is None and repo_config.update_changelog:
12093 options.echangelog = 'y'
12094
12095 @@ -689,18 +700,9 @@ logging.debug("vcs: %s" % (vcs,))
12096 logging.debug("repo config: %s" % (repo_config,))
12097 logging.debug("options: %s" % (options,))
12098
12099 -# Generate an appropriate PORTDIR_OVERLAY value for passing into the
12100 -# profile-specific config constructor calls.
12101 -env = os.environ.copy()
12102 -env['PORTDIR'] = portdir
12103 -env['PORTDIR_OVERLAY'] = ' '.join(portdb.porttrees[1:])
12104 -
12105 -logging.info('Setting paths:')
12106 -logging.info('PORTDIR = "' + portdir + '"')
12107 -logging.info('PORTDIR_OVERLAY = "%s"' % env['PORTDIR_OVERLAY'])
12108 -
12109 # It's confusing if these warnings are displayed without the user
12110 # being told which profile they come from, so disable them.
12111 +env = os.environ.copy()
12112 env['FEATURES'] = env.get('FEATURES', '') + ' -unknown-features-warn'
12113
12114 categories = []
12115 @@ -724,7 +726,7 @@ repolevel = len(reposplit)
12116 # check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting.
12117 # Reason for this is if they're trying to commit in just $FILESDIR/*, the Manifest needs updating.
12118 # this check ensures that repoman knows where it is, and the manifest recommit is at least possible.
12119 -if options.mode == 'commit' and repolevel not in [1,2,3]:
12120 +if options.mode == 'commit' and repolevel not in [1, 2, 3]:
12121 print(red("***")+" Commit attempts *must* be from within a vcs co, category, or package directory.")
12122 print(red("***")+" Attempting to commit from a packages files directory will be blocked for instance.")
12123 print(red("***")+" This is intended behaviour, to ensure the manifest is recommitted for a package.")
12124 @@ -737,10 +739,76 @@ if repolevel == 1:
12125 startdir = repodir
12126 else:
12127 startdir = normalize_path(mydir)
12128 - startdir = os.path.join(repodir, *startdir.split(os.sep)[-2-repolevel+3:])
12129 + startdir = os.path.join(repodir, *startdir.split(os.sep)[-2 - repolevel + 3:])
12130
12131 def caterror(mycat):
12132 - err(mycat+" is not an official category. Skipping QA checks in this directory.\nPlease ensure that you add "+catdir+" to "+repodir+"/profiles/categories\nif it is a new category.")
12133 + err(mycat + " is not an official category. Skipping QA checks in this directory.\nPlease ensure that you add " + catdir + " to " + repodir + "/profiles/categories\nif it is a new category.")
12134 +
12135 +def repoman_getstatusoutput(cmd):
12136 + """
12137 + Implements an interface similar to getstatusoutput(), but with
12138 + customized unicode handling (see bug #310789) and without the shell.
12139 + """
12140 + args = portage.util.shlex_split(cmd)
12141 +
12142 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
12143 + not os.path.isabs(args[0]):
12144 + # Python 3.1 _execvp throws TypeError for non-absolute executable
12145 + # path passed as bytes (see http://bugs.python.org/issue8513).
12146 + fullname = find_binary(args[0])
12147 + if fullname is None:
12148 + raise portage.exception.CommandNotFound(args[0])
12149 + args[0] = fullname
12150 +
12151 + encoding = _encodings['fs']
12152 + args = [_unicode_encode(x,
12153 + encoding=encoding, errors='strict') for x in args]
12154 + proc = subprocess.Popen(args, stdout=subprocess.PIPE,
12155 + stderr=subprocess.STDOUT)
12156 + output = portage._unicode_decode(proc.communicate()[0],
12157 + encoding=encoding, errors='strict')
12158 + if output and output[-1] == "\n":
12159 + # getstatusoutput strips one newline
12160 + output = output[:-1]
12161 + return (proc.wait(), output)
12162 +
12163 +class repoman_popen(portage.proxy.objectproxy.ObjectProxy):
12164 + """
12165 + Implements an interface similar to os.popen(), but with customized
12166 + unicode handling (see bug #310789) and without the shell.
12167 + """
12168 +
12169 + __slots__ = ('_proc', '_stdout')
12170 +
12171 + def __init__(self, cmd):
12172 + args = portage.util.shlex_split(cmd)
12173 +
12174 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
12175 + not os.path.isabs(args[0]):
12176 + # Python 3.1 _execvp throws TypeError for non-absolute executable
12177 + # path passed as bytes (see http://bugs.python.org/issue8513).
12178 + fullname = find_binary(args[0])
12179 + if fullname is None:
12180 + raise portage.exception.CommandNotFound(args[0])
12181 + args[0] = fullname
12182 +
12183 + encoding = _encodings['fs']
12184 + args = [_unicode_encode(x,
12185 + encoding=encoding, errors='strict') for x in args]
12186 + proc = subprocess.Popen(args, stdout=subprocess.PIPE)
12187 + object.__setattr__(self, '_proc', proc)
12188 + object.__setattr__(self, '_stdout',
12189 + codecs.getreader(encoding)(proc.stdout, 'strict'))
12190 +
12191 + def _get_target(self):
12192 + return object.__getattribute__(self, '_stdout')
12193 +
12194 + __enter__ = _get_target
12195 +
12196 + def __exit__(self, exc_type, exc_value, traceback):
12197 + proc = object.__getattribute__(self, '_proc')
12198 + proc.wait()
12199 + proc.stdout.close()
12200
12201 class ProfileDesc(object):
12202 __slots__ = ('abs_path', 'arch', 'status', 'sub_path', 'tree_path',)
12203 @@ -818,18 +886,18 @@ for path in portdb.porttrees:
12204 continue
12205 if len(arch) != 3:
12206 err("wrong format: \"" + bad(x.strip()) + "\" in " + \
12207 - desc_path + " line %d" % (i+1, ))
12208 + desc_path + " line %d" % (i + 1, ))
12209 elif arch[0] not in kwlist:
12210 err("invalid arch: \"" + bad(arch[0]) + "\" in " + \
12211 - desc_path + " line %d" % (i+1, ))
12212 + desc_path + " line %d" % (i + 1, ))
12213 elif arch[2] not in valid_profile_types:
12214 err("invalid profile type: \"" + bad(arch[2]) + "\" in " + \
12215 - desc_path + " line %d" % (i+1, ))
12216 + desc_path + " line %d" % (i + 1, ))
12217 profile_desc = ProfileDesc(arch[0], arch[2], arch[1], path)
12218 if not os.path.isdir(profile_desc.abs_path):
12219 logging.error(
12220 "Invalid %s profile (%s) for arch %s in %s line %d",
12221 - arch[2], arch[1], arch[0], desc_path, i+1)
12222 + arch[2], arch[1], arch[0], desc_path, i + 1)
12223 continue
12224 if os.path.exists(
12225 os.path.join(profile_desc.abs_path, 'deprecated')):
12226 @@ -876,11 +944,16 @@ for x in repoman_settings.archlist():
12227 if x[0] == "~":
12228 continue
12229 if x not in profiles:
12230 - print(red("\""+x+"\" doesn't have a valid profile listed in profiles.desc."))
12231 + print(red("\"" + x + "\" doesn't have a valid profile listed in profiles.desc."))
12232 print(red("You need to either \"cvs update\" your profiles dir or follow this"))
12233 - print(red("up with the "+x+" team."))
12234 + print(red("up with the " + x + " team."))
12235 print()
12236
12237 +liclist_deprecated = set()
12238 +if "DEPRECATED" in repoman_settings._license_manager._license_groups:
12239 + liclist_deprecated.update(
12240 + repoman_settings._license_manager.expandLicenseTokens(["@DEPRECATED"]))
12241 +
12242 if not liclist:
12243 logging.fatal("Couldn't find licenses?")
12244 sys.exit(1)
12245 @@ -893,34 +966,34 @@ if not uselist:
12246 logging.fatal("Couldn't find use.desc?")
12247 sys.exit(1)
12248
12249 -scanlist=[]
12250 -if repolevel==2:
12251 - #we are inside a category directory
12252 - catdir=reposplit[-1]
12253 +scanlist = []
12254 +if repolevel == 2:
12255 + # we are inside a category directory
12256 + catdir = reposplit[-1]
12257 if catdir not in categories:
12258 caterror(catdir)
12259 - mydirlist=os.listdir(startdir)
12260 + mydirlist = os.listdir(startdir)
12261 for x in mydirlist:
12262 if x == "CVS" or x.startswith("."):
12263 continue
12264 - if os.path.isdir(startdir+"/"+x):
12265 - scanlist.append(catdir+"/"+x)
12266 + if os.path.isdir(startdir + "/" + x):
12267 + scanlist.append(catdir + "/" + x)
12268 repo_subdir = catdir + os.sep
12269 -elif repolevel==1:
12270 +elif repolevel == 1:
12271 for x in categories:
12272 - if not os.path.isdir(startdir+"/"+x):
12273 + if not os.path.isdir(startdir + "/" + x):
12274 continue
12275 - for y in os.listdir(startdir+"/"+x):
12276 + for y in os.listdir(startdir + "/" + x):
12277 if y == "CVS" or y.startswith("."):
12278 continue
12279 - if os.path.isdir(startdir+"/"+x+"/"+y):
12280 - scanlist.append(x+"/"+y)
12281 + if os.path.isdir(startdir + "/" + x + "/" + y):
12282 + scanlist.append(x + "/" + y)
12283 repo_subdir = ""
12284 -elif repolevel==3:
12285 +elif repolevel == 3:
12286 catdir = reposplit[-2]
12287 if catdir not in categories:
12288 caterror(catdir)
12289 - scanlist.append(catdir+"/"+reposplit[-1])
12290 + scanlist.append(catdir + "/" + reposplit[-1])
12291 repo_subdir = scanlist[-1] + os.sep
12292 else:
12293 msg = 'Repoman is unable to determine PORTDIR or PORTDIR_OVERLAY' + \
12294 @@ -952,7 +1025,7 @@ def vcs_files_to_cps(vcs_file_iter):
12295 if category in categories:
12296 for filename in vcs_file_iter:
12297 f_split = filename.split(os.sep)
12298 - # ['.', pn,...]
12299 + # ['.', pn, ...]
12300 if len(f_split) > 2:
12301 modified_cps.append(category + "/" + f_split[1])
12302
12303 @@ -960,7 +1033,7 @@ def vcs_files_to_cps(vcs_file_iter):
12304 # repolevel == 1
12305 for filename in vcs_file_iter:
12306 f_split = filename.split(os.sep)
12307 - # ['.', category, pn,...]
12308 + # ['.', category, pn, ...]
12309 if len(f_split) > 3 and f_split[1] in categories:
12310 modified_cps.append("/".join(f_split[1:3]))
12311
12312 @@ -968,12 +1041,12 @@ def vcs_files_to_cps(vcs_file_iter):
12313
12314 def git_supports_gpg_sign():
12315 status, cmd_output = \
12316 - subprocess_getstatusoutput("git --version")
12317 + repoman_getstatusoutput("git --version")
12318 cmd_output = cmd_output.split()
12319 if cmd_output:
12320 version = re.match(r'^(\d+)\.(\d+)\.(\d+)', cmd_output[-1])
12321 if version is not None:
12322 - version = [int(x) for x in version.groups()[1:]]
12323 + version = [int(x) for x in version.groups()]
12324 if version[0] > 1 or \
12325 (version[0] == 1 and version[1] > 7) or \
12326 (version[0] == 1 and version[1] == 7 and version[2] >= 9):
12327 @@ -1002,47 +1075,16 @@ def dev_keywords(profiles):
12328
12329 dev_keywords = dev_keywords(profiles)
12330
12331 -stats={}
12332 -fails={}
12333 -
12334 -# provided by the desktop-file-utils package
12335 -desktop_file_validate = find_binary("desktop-file-validate")
12336 -desktop_pattern = re.compile(r'.*\.desktop$')
12337 +stats = {}
12338 +fails = {}
12339
12340 for x in qacats:
12341 - stats[x]=0
12342 - fails[x]=[]
12343 + stats[x] = 0
12344 + fails[x] = []
12345
12346 xmllint_capable = False
12347 metadata_dtd = os.path.join(repoman_settings["DISTDIR"], 'metadata.dtd')
12348
12349 -def parsedate(s):
12350 - """Parse a RFC 822 date and time string.
12351 - This is required for python3 compatibility, since the
12352 - rfc822.parsedate() function is not available."""
12353 -
12354 - s_split = []
12355 - for x in s.upper().split():
12356 - for y in x.split(','):
12357 - if y:
12358 - s_split.append(y)
12359 -
12360 - if len(s_split) != 6:
12361 - return None
12362 -
12363 - # %a, %d %b %Y %H:%M:%S %Z
12364 - a, d, b, Y, H_M_S, Z = s_split
12365 -
12366 - # Convert month to integer, since strptime %w is locale-dependent.
12367 - month_map = {'JAN':1, 'FEB':2, 'MAR':3, 'APR':4, 'MAY':5, 'JUN':6,
12368 - 'JUL':7, 'AUG':8, 'SEP':9, 'OCT':10, 'NOV':11, 'DEC':12}
12369 - m = month_map.get(b)
12370 - if m is None:
12371 - return None
12372 - m = str(m).rjust(2, '0')
12373 -
12374 - return time.strptime(':'.join((Y, m, d, H_M_S)), '%Y:%m:%d:%H:%M:%S')
12375 -
12376 def fetch_metadata_dtd():
12377 """
12378 Fetch metadata.dtd if it doesn't exist or the ctime is older than
12379 @@ -1071,45 +1113,40 @@ def fetch_metadata_dtd():
12380 print(green("***") + " the local copy of metadata.dtd " + \
12381 "needs to be refetched, doing that now")
12382 print()
12383 + parsed_url = urlparse(metadata_dtd_uri)
12384 + setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
12385 + fcmd = repoman_settings.get(setting)
12386 + if not fcmd:
12387 + fcmd = repoman_settings.get('FETCHCOMMAND')
12388 + if not fcmd:
12389 + logging.error("FETCHCOMMAND is unset")
12390 + return False
12391 +
12392 + destdir = repoman_settings["DISTDIR"]
12393 + fd, metadata_dtd_tmp = tempfile.mkstemp(
12394 + prefix='metadata.dtd.', dir=destdir)
12395 + os.close(fd)
12396 +
12397 try:
12398 - url_f = urllib_request_urlopen(metadata_dtd_uri)
12399 - msg_info = url_f.info()
12400 - last_modified = msg_info.get('last-modified')
12401 - if last_modified is not None:
12402 - last_modified = parsedate(last_modified)
12403 - if last_modified is not None:
12404 - last_modified = calendar.timegm(last_modified)
12405 -
12406 - metadata_dtd_tmp = "%s.%s" % (metadata_dtd, os.getpid())
12407 - try:
12408 - local_f = open(metadata_dtd_tmp, mode='wb')
12409 - local_f.write(url_f.read())
12410 - local_f.close()
12411 - if last_modified is not None:
12412 - try:
12413 - os.utime(metadata_dtd_tmp,
12414 - (int(last_modified), int(last_modified)))
12415 - except OSError:
12416 - # This fails on some odd non-unix-like filesystems.
12417 - # We don't really need the mtime to be preserved
12418 - # anyway here (currently we use ctime to trigger
12419 - # fetch), so just ignore it.
12420 - pass
12421 - os.rename(metadata_dtd_tmp, metadata_dtd)
12422 - finally:
12423 - try:
12424 - os.unlink(metadata_dtd_tmp)
12425 - except OSError:
12426 - pass
12427 + if not portage.getbinpkg.file_get(metadata_dtd_uri,
12428 + destdir, fcmd=fcmd,
12429 + filename=os.path.basename(metadata_dtd_tmp)):
12430 + logging.error("failed to fetch metadata.dtd from '%s'" %
12431 + metadata_dtd_uri)
12432 + return False
12433
12434 - url_f.close()
12435 + try:
12436 + portage.util.apply_secpass_permissions(metadata_dtd_tmp,
12437 + gid=portage.data.portage_gid, mode=0o664, mask=0o2)
12438 + except portage.exception.PortageException:
12439 + pass
12440
12441 - except EnvironmentError as e:
12442 - print()
12443 - print(red("!!!")+" attempting to fetch '%s', caught" % metadata_dtd_uri)
12444 - print(red("!!!")+" exception '%s' though." % (e,))
12445 - print(red("!!!")+" fetching new metadata.dtd failed, aborting")
12446 - return False
12447 + os.rename(metadata_dtd_tmp, metadata_dtd)
12448 + finally:
12449 + try:
12450 + os.unlink(metadata_dtd_tmp)
12451 + except OSError:
12452 + pass
12453
12454 return True
12455
12456 @@ -1117,14 +1154,14 @@ if options.mode == "manifest":
12457 pass
12458 elif not find_binary('xmllint'):
12459 print(red("!!! xmllint not found. Can't check metadata.xml.\n"))
12460 - if options.xml_parse or repolevel==3:
12461 + if options.xml_parse or repolevel == 3:
12462 print(red("!!!")+" sorry, xmllint is needed. failing\n")
12463 sys.exit(1)
12464 else:
12465 if not fetch_metadata_dtd():
12466 sys.exit(1)
12467 - #this can be problematic if xmllint changes their output
12468 - xmllint_capable=True
12469 + # this can be problematic if xmllint changes their output
12470 + xmllint_capable = True
12471
12472 if options.mode == 'commit' and vcs:
12473 utilities.detect_vcs_conflicts(options, vcs)
12474 @@ -1151,45 +1188,46 @@ if vcs == "cvs":
12475 myremoved = cvstree.findremoved(mycvstree, recursive=1, basedir="./")
12476
12477 elif vcs == "svn":
12478 - with os.popen("svn status") as f:
12479 + with repoman_popen("svn status") as f:
12480 svnstatus = f.readlines()
12481 - mychanged = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem and elem[:1] in "MR" ]
12482 - mynew = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A") ]
12483 + mychanged = ["./" + elem.split()[-1:][0] for elem in svnstatus if elem and elem[:1] in "MR"]
12484 + mynew = ["./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A")]
12485 if options.if_modified == "y":
12486 - myremoved = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")]
12487 + myremoved = ["./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")]
12488
12489 elif vcs == "git":
12490 - with os.popen("git diff-index --name-only "
12491 + with repoman_popen("git diff-index --name-only "
12492 "--relative --diff-filter=M HEAD") as f:
12493 mychanged = f.readlines()
12494 mychanged = ["./" + elem[:-1] for elem in mychanged]
12495
12496 - with os.popen("git diff-index --name-only "
12497 + with repoman_popen("git diff-index --name-only "
12498 "--relative --diff-filter=A HEAD") as f:
12499 mynew = f.readlines()
12500 mynew = ["./" + elem[:-1] for elem in mynew]
12501 if options.if_modified == "y":
12502 - with os.popen("git diff-index --name-only "
12503 + with repoman_popen("git diff-index --name-only "
12504 "--relative --diff-filter=D HEAD") as f:
12505 myremoved = f.readlines()
12506 myremoved = ["./" + elem[:-1] for elem in myremoved]
12507
12508 elif vcs == "bzr":
12509 - with os.popen("bzr status -S .") as f:
12510 + with repoman_popen("bzr status -S .") as f:
12511 bzrstatus = f.readlines()
12512 - mychanged = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M" ]
12513 - mynew = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "NK" or elem[0:1] == "R" ) ]
12514 + mychanged = ["./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M"]
12515 + mynew = ["./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and (elem[1:2] == "NK" or elem[0:1] == "R")]
12516 if options.if_modified == "y":
12517 - myremoved = [ "./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "K" or elem[0:1] == "R" ) ]
12518 + myremoved = ["./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and (elem[1:2] == "K" or elem[0:1] == "R")]
12519
12520 elif vcs == "hg":
12521 - with os.popen("hg status --no-status --modified .") as f:
12522 + with repoman_popen("hg status --no-status --modified .") as f:
12523 mychanged = f.readlines()
12524 mychanged = ["./" + elem.rstrip() for elem in mychanged]
12525 - mynew = os.popen("hg status --no-status --added .").readlines()
12526 + with repoman_popen("hg status --no-status --added .") as f:
12527 + mynew = f.readlines()
12528 mynew = ["./" + elem.rstrip() for elem in mynew]
12529 if options.if_modified == "y":
12530 - with os.popen("hg status --no-status --removed .") as f:
12531 + with repoman_popen("hg status --no-status --removed .") as f:
12532 myremoved = f.readlines()
12533 myremoved = ["./" + elem.rstrip() for elem in myremoved]
12534
12535 @@ -1211,10 +1249,15 @@ dofail = 0
12536
12537 # NOTE: match-all caches are not shared due to potential
12538 # differences between profiles in _get_implicit_iuse.
12539 -arch_caches={}
12540 +arch_caches = {}
12541 arch_xmatch_caches = {}
12542 shared_xmatch_caches = {"cp-list":{}}
12543
12544 +include_arches = None
12545 +if options.include_arches:
12546 + include_arches = set()
12547 + include_arches.update(*[x.split() for x in options.include_arches])
12548 +
12549 # Disable the "ebuild.notadded" check when not in commit mode and
12550 # running `svn status` in every package dir will be too expensive.
12551
12552 @@ -1222,12 +1265,37 @@ check_ebuild_notadded = not \
12553 (vcs == "svn" and repolevel < 3 and options.mode != "commit")
12554
12555 # Build a regex from thirdpartymirrors for the SRC_URI.mirror check.
12556 -thirdpartymirrors = []
12557 -for v in repoman_settings.thirdpartymirrors().values():
12558 +thirdpartymirrors = {}
12559 +for k, v in repoman_settings.thirdpartymirrors().items():
12560 for v in v:
12561 if not v.endswith("/"):
12562 v += "/"
12563 - thirdpartymirrors.append(v)
12564 + thirdpartymirrors[v] = k
12565 +
12566 +class _XMLParser(xml.etree.ElementTree.XMLParser):
12567 +
12568 + def __init__(self, data, **kwargs):
12569 + xml.etree.ElementTree.XMLParser.__init__(self, **kwargs)
12570 + self._portage_data = data
12571 + if hasattr(self, 'parser'):
12572 + self._base_XmlDeclHandler = self.parser.XmlDeclHandler
12573 + self.parser.XmlDeclHandler = self._portage_XmlDeclHandler
12574 + self._base_StartDoctypeDeclHandler = \
12575 + self.parser.StartDoctypeDeclHandler
12576 + self.parser.StartDoctypeDeclHandler = \
12577 + self._portage_StartDoctypeDeclHandler
12578 +
12579 + def _portage_XmlDeclHandler(self, version, encoding, standalone):
12580 + if self._base_XmlDeclHandler is not None:
12581 + self._base_XmlDeclHandler(version, encoding, standalone)
12582 + self._portage_data["XML_DECLARATION"] = (version, encoding, standalone)
12583 +
12584 + def _portage_StartDoctypeDeclHandler(self, doctypeName, systemId, publicId,
12585 + has_internal_subset):
12586 + if self._base_StartDoctypeDeclHandler is not None:
12587 + self._base_StartDoctypeDeclHandler(doctypeName, systemId, publicId,
12588 + has_internal_subset)
12589 + self._portage_data["DOCTYPE"] = (doctypeName, systemId, publicId)
12590
12591 class _MetadataTreeBuilder(xml.etree.ElementTree.TreeBuilder):
12592 """
12593 @@ -1252,13 +1320,13 @@ if options.if_modified == "y":
12594 chain(mychanged, mynew, myremoved)))
12595
12596 for x in effective_scanlist:
12597 - #ebuilds and digests added to cvs respectively.
12598 + # ebuilds and digests added to cvs respectively.
12599 logging.info("checking package %s" % x)
12600 # save memory by discarding xmatch caches from previous package(s)
12601 arch_xmatch_caches.clear()
12602 - eadded=[]
12603 - catdir,pkgdir=x.split("/")
12604 - checkdir=repodir+"/"+x
12605 + eadded = []
12606 + catdir, pkgdir = x.split("/")
12607 + checkdir = repodir + "/" + x
12608 checkdir_relative = ""
12609 if repolevel < 3:
12610 checkdir_relative = os.path.join(pkgdir, checkdir_relative)
12611 @@ -1340,15 +1408,15 @@ for x in effective_scanlist:
12612 if options.mode == 'manifest-check':
12613 continue
12614
12615 - checkdirlist=os.listdir(checkdir)
12616 - ebuildlist=[]
12617 + checkdirlist = os.listdir(checkdir)
12618 + ebuildlist = []
12619 pkgs = {}
12620 allvalid = True
12621 for y in checkdirlist:
12622 if (y in no_exec or y.endswith(".ebuild")) and \
12623 - stat.S_IMODE(os.stat(os.path.join(checkdir, y)).st_mode) & 0o111:
12624 - stats["file.executable"] += 1
12625 - fails["file.executable"].append(os.path.join(checkdir, y))
12626 + stat.S_IMODE(os.stat(os.path.join(checkdir, y)).st_mode) & 0o111:
12627 + stats["file.executable"] += 1
12628 + fails["file.executable"].append(os.path.join(checkdir, y))
12629 if y.endswith(".ebuild"):
12630 pf = y[:-7]
12631 ebuildlist.append(pf)
12632 @@ -1389,19 +1457,19 @@ for x in effective_scanlist:
12633 ebuildlist = [pkg.pf for pkg in ebuildlist]
12634
12635 for y in checkdirlist:
12636 - m = disallowed_filename_chars_re.search(y.strip(os.sep))
12637 - if m is not None:
12638 + index = repo_config.find_invalid_path_char(y)
12639 + if index != -1:
12640 y_relative = os.path.join(checkdir_relative, y)
12641 if vcs is not None and not vcs_new_changed(y_relative):
12642 # If the file isn't in the VCS new or changed set, then
12643 # assume that it's an irrelevant temporary file (Manifest
12644 # entries are not generated for file names containing
12645 # prohibited characters). See bug #406877.
12646 - m = None
12647 - if m is not None:
12648 + index = -1
12649 + if index != -1:
12650 stats["file.name"] += 1
12651 fails["file.name"].append("%s/%s: char '%s'" % \
12652 - (checkdir, y, m.group(0)))
12653 + (checkdir, y, y[index]))
12654
12655 if not (y in ("ChangeLog", "metadata.xml") or y.endswith(".ebuild")):
12656 continue
12657 @@ -1412,7 +1480,7 @@ for x in effective_scanlist:
12658 encoding=_encodings['fs'], errors='strict'),
12659 mode='r', encoding=_encodings['repo.content'])
12660 for l in f:
12661 - line +=1
12662 + line += 1
12663 except UnicodeDecodeError as ue:
12664 stats["file.UTF8"] += 1
12665 s = ue.object[:ue.start]
12666 @@ -1427,10 +1495,10 @@ for x in effective_scanlist:
12667
12668 if vcs in ("git", "hg") and check_ebuild_notadded:
12669 if vcs == "git":
12670 - myf = os.popen("git ls-files --others %s" % \
12671 + myf = repoman_popen("git ls-files --others %s" % \
12672 (portage._shell_quote(checkdir_relative),))
12673 if vcs == "hg":
12674 - myf = os.popen("hg status --no-status --unknown %s" % \
12675 + myf = repoman_popen("hg status --no-status --unknown %s" % \
12676 (portage._shell_quote(checkdir_relative),))
12677 for l in myf:
12678 if l[:-1][-7:] == ".ebuild":
12679 @@ -1442,21 +1510,23 @@ for x in effective_scanlist:
12680 if vcs in ("cvs", "svn", "bzr") and check_ebuild_notadded:
12681 try:
12682 if vcs == "cvs":
12683 - myf=open(checkdir+"/CVS/Entries","r")
12684 + myf = open(checkdir + "/CVS/Entries", "r")
12685 if vcs == "svn":
12686 - myf = os.popen("svn status --depth=files --verbose " + checkdir)
12687 + myf = repoman_popen("svn status --depth=files --verbose " +
12688 + portage._shell_quote(checkdir))
12689 if vcs == "bzr":
12690 - myf = os.popen("bzr ls -v --kind=file " + checkdir)
12691 + myf = repoman_popen("bzr ls -v --kind=file " +
12692 + portage._shell_quote(checkdir))
12693 myl = myf.readlines()
12694 myf.close()
12695 for l in myl:
12696 if vcs == "cvs":
12697 - if l[0]!="/":
12698 + if l[0] != "/":
12699 continue
12700 - splitl=l[1:].split("/")
12701 + splitl = l[1:].split("/")
12702 if not len(splitl):
12703 continue
12704 - if splitl[0][-7:]==".ebuild":
12705 + if splitl[0][-7:] == ".ebuild":
12706 eadded.append(splitl[0][:-7])
12707 if vcs == "svn":
12708 if l[:1] == "?":
12709 @@ -1474,8 +1544,9 @@ for x in effective_scanlist:
12710 if l[-7:] == ".ebuild":
12711 eadded.append(os.path.basename(l[:-7]))
12712 if vcs == "svn":
12713 - myf = os.popen("svn status " + checkdir)
12714 - myl=myf.readlines()
12715 + myf = repoman_popen("svn status " +
12716 + portage._shell_quote(checkdir))
12717 + myl = myf.readlines()
12718 myf.close()
12719 for l in myl:
12720 if l[0] == "A":
12721 @@ -1485,7 +1556,7 @@ for x in effective_scanlist:
12722 except IOError:
12723 if vcs == "cvs":
12724 stats["CVS/Entries.IO_error"] += 1
12725 - fails["CVS/Entries.IO_error"].append(checkdir+"/CVS/Entries")
12726 + fails["CVS/Entries.IO_error"].append(checkdir + "/CVS/Entries")
12727 else:
12728 raise
12729 continue
12730 @@ -1493,7 +1564,7 @@ for x in effective_scanlist:
12731 mf = repoman_settings.repositories.get_repo_for_location(
12732 os.path.dirname(os.path.dirname(checkdir)))
12733 mf = mf.load_manifest(checkdir, repoman_settings["DISTDIR"])
12734 - mydigests=mf.getTypeDigests("DIST")
12735 + mydigests = mf.getTypeDigests("DIST")
12736
12737 fetchlist_dict = portage.FetchlistDict(checkdir, repoman_settings, portdb)
12738 myfiles_all = []
12739 @@ -1509,7 +1580,7 @@ for x in effective_scanlist:
12740 # This will be reported as an "ebuild.syntax" error.
12741 pass
12742 else:
12743 - stats["SRC_URI.syntax"] = stats["SRC_URI.syntax"] + 1
12744 + stats["SRC_URI.syntax"] += 1
12745 fails["SRC_URI.syntax"].append(
12746 "%s.ebuild SRC_URI: %s" % (mykey, e))
12747 del fetchlist_dict
12748 @@ -1523,15 +1594,15 @@ for x in effective_scanlist:
12749 for entry in mydigests:
12750 if entry not in myfiles_all:
12751 stats["digest.unused"] += 1
12752 - fails["digest.unused"].append(checkdir+"::"+entry)
12753 + fails["digest.unused"].append(checkdir + "::" + entry)
12754 for entry in myfiles_all:
12755 if entry not in mydigests:
12756 stats["digest.missing"] += 1
12757 - fails["digest.missing"].append(checkdir+"::"+entry)
12758 + fails["digest.missing"].append(checkdir + "::" + entry)
12759 del myfiles_all
12760
12761 - if os.path.exists(checkdir+"/files"):
12762 - filesdirlist=os.listdir(checkdir+"/files")
12763 + if os.path.exists(checkdir + "/files"):
12764 + filesdirlist = os.listdir(checkdir + "/files")
12765
12766 # recurse through files directory
12767 # use filesdirlist as a stack, appending directories as needed so people can't hide > 20k files in a subdirectory.
12768 @@ -1551,77 +1622,110 @@ for x in effective_scanlist:
12769 # !!! VCS "portability" alert! Need some function isVcsDir() or alike !!!
12770 if y == "CVS" or y == ".svn":
12771 continue
12772 - for z in os.listdir(checkdir+"/files/"+y):
12773 + for z in os.listdir(checkdir + "/files/" + y):
12774 if z == "CVS" or z == ".svn":
12775 continue
12776 - filesdirlist.append(y+"/"+z)
12777 + filesdirlist.append(y + "/" + z)
12778 # Current policy is no files over 20 KiB, these are the checks. File size between
12779 # 20 KiB and 60 KiB causes a warning, while file size over 60 KiB causes an error.
12780 elif mystat.st_size > 61440:
12781 stats["file.size.fatal"] += 1
12782 - fails["file.size.fatal"].append("("+ str(mystat.st_size//1024) + " KiB) "+x+"/files/"+y)
12783 + fails["file.size.fatal"].append("(" + str(mystat.st_size//1024) + " KiB) " + x + "/files/" + y)
12784 elif mystat.st_size > 20480:
12785 stats["file.size"] += 1
12786 - fails["file.size"].append("("+ str(mystat.st_size//1024) + " KiB) "+x+"/files/"+y)
12787 + fails["file.size"].append("(" + str(mystat.st_size//1024) + " KiB) " + x + "/files/" + y)
12788
12789 - m = disallowed_filename_chars_re.search(
12790 - os.path.basename(y.rstrip(os.sep)))
12791 - if m is not None:
12792 + index = repo_config.find_invalid_path_char(y)
12793 + if index != -1:
12794 y_relative = os.path.join(checkdir_relative, "files", y)
12795 if vcs is not None and not vcs_new_changed(y_relative):
12796 # If the file isn't in the VCS new or changed set, then
12797 # assume that it's an irrelevant temporary file (Manifest
12798 # entries are not generated for file names containing
12799 # prohibited characters). See bug #406877.
12800 - m = None
12801 - if m is not None:
12802 + index = -1
12803 + if index != -1:
12804 stats["file.name"] += 1
12805 fails["file.name"].append("%s/files/%s: char '%s'" % \
12806 - (checkdir, y, m.group(0)))
12807 -
12808 - if desktop_file_validate and desktop_pattern.match(y):
12809 - cmd_output = validate_desktop_entry(full_path)
12810 - if cmd_output:
12811 - # Note: in the future we may want to grab the
12812 - # warnings in addition to the errors. We're
12813 - # just doing errors now since we don't want
12814 - # to generate too much noise at first.
12815 - error_re = re.compile(r'.*\s*error:\s*(.*)')
12816 - for line in cmd_output:
12817 - error_match = error_re.match(line)
12818 - if error_match is None:
12819 - continue
12820 - stats["desktop.invalid"] += 1
12821 - fails["desktop.invalid"].append(
12822 - relative_path + ': %s' % error_match.group(1))
12823 -
12824 + (checkdir, y, y[index]))
12825 del mydigests
12826
12827 if check_changelog and "ChangeLog" not in checkdirlist:
12828 - stats["changelog.missing"]+=1
12829 - fails["changelog.missing"].append(x+"/ChangeLog")
12830 -
12831 + stats["changelog.missing"] += 1
12832 + fails["changelog.missing"].append(x + "/ChangeLog")
12833 +
12834 musedict = {}
12835 - #metadata.xml file check
12836 + # metadata.xml file check
12837 if "metadata.xml" not in checkdirlist:
12838 - stats["metadata.missing"]+=1
12839 - fails["metadata.missing"].append(x+"/metadata.xml")
12840 - #metadata.xml parse check
12841 + stats["metadata.missing"] += 1
12842 + fails["metadata.missing"].append(x + "/metadata.xml")
12843 + # metadata.xml parse check
12844 else:
12845 metadata_bad = False
12846 + xml_info = {}
12847 + xml_parser = _XMLParser(xml_info, target=_MetadataTreeBuilder())
12848
12849 # read metadata.xml into memory
12850 try:
12851 _metadata_xml = xml.etree.ElementTree.parse(
12852 - os.path.join(checkdir, "metadata.xml"),
12853 - parser=xml.etree.ElementTree.XMLParser(
12854 - target=_MetadataTreeBuilder()))
12855 + _unicode_encode(os.path.join(checkdir, "metadata.xml"),
12856 + encoding=_encodings['fs'], errors='strict'),
12857 + parser=xml_parser)
12858 except (ExpatError, SyntaxError, EnvironmentError) as e:
12859 metadata_bad = True
12860 stats["metadata.bad"] += 1
12861 fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e))
12862 del e
12863 else:
12864 + if not hasattr(xml_parser, 'parser') or \
12865 + sys.hexversion < 0x2070000 or \
12866 + (sys.hexversion > 0x3000000 and sys.hexversion < 0x3020000):
12867 + # doctype is not parsed with python 2.6 or 3.1
12868 + pass
12869 + else:
12870 + if "XML_DECLARATION" not in xml_info:
12871 + stats["metadata.bad"] += 1
12872 + fails["metadata.bad"].append("%s/metadata.xml: "
12873 + "xml declaration is missing on first line, "
12874 + "should be '%s'" % (x, metadata_xml_declaration))
12875 + else:
12876 + xml_version, xml_encoding, xml_standalone = \
12877 + xml_info["XML_DECLARATION"]
12878 + if xml_encoding is None or \
12879 + xml_encoding.upper() != metadata_xml_encoding:
12880 + stats["metadata.bad"] += 1
12881 + if xml_encoding is None:
12882 + encoding_problem = "but it is undefined"
12883 + else:
12884 + encoding_problem = "not '%s'" % xml_encoding
12885 + fails["metadata.bad"].append("%s/metadata.xml: "
12886 + "xml declaration encoding should be '%s', %s" %
12887 + (x, metadata_xml_encoding, encoding_problem))
12888 +
12889 + if "DOCTYPE" not in xml_info:
12890 + metadata_bad = True
12891 + stats["metadata.bad"] += 1
12892 + fails["metadata.bad"].append("%s/metadata.xml: %s" % (x,
12893 + "DOCTYPE is missing"))
12894 + else:
12895 + doctype_name, doctype_system, doctype_pubid = \
12896 + xml_info["DOCTYPE"]
12897 + if doctype_system != metadata_dtd_uri:
12898 + stats["metadata.bad"] += 1
12899 + if doctype_system is None:
12900 + system_problem = "but it is undefined"
12901 + else:
12902 + system_problem = "not '%s'" % doctype_system
12903 + fails["metadata.bad"].append("%s/metadata.xml: "
12904 + "DOCTYPE: SYSTEM should refer to '%s', %s" %
12905 + (x, metadata_dtd_uri, system_problem))
12906 +
12907 + if doctype_name != metadata_doctype_name:
12908 + stats["metadata.bad"] += 1
12909 + fails["metadata.bad"].append("%s/metadata.xml: "
12910 + "DOCTYPE: name should be '%s', not '%s'" %
12911 + (x, metadata_doctype_name, doctype_name))
12912 +
12913 # load USE flags from metadata.xml
12914 try:
12915 musedict = utilities.parse_metadata_use(_metadata_xml)
12916 @@ -1629,6 +1733,22 @@ for x in effective_scanlist:
12917 metadata_bad = True
12918 stats["metadata.bad"] += 1
12919 fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e))
12920 + else:
12921 + for atom in chain(*musedict.values()):
12922 + if atom is None:
12923 + continue
12924 + try:
12925 + atom = Atom(atom)
12926 + except InvalidAtom as e:
12927 + stats["metadata.bad"] += 1
12928 + fails["metadata.bad"].append(
12929 + "%s/metadata.xml: Invalid atom: %s" % (x, e))
12930 + else:
12931 + if atom.cp != x:
12932 + stats["metadata.bad"] += 1
12933 + fails["metadata.bad"].append(
12934 + ("%s/metadata.xml: Atom contains "
12935 + "unexpected cat/pn: %s") % (x, atom))
12936
12937 # Run other metadata.xml checkers
12938 try:
12939 @@ -1639,19 +1759,20 @@ for x in effective_scanlist:
12940 fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e))
12941 del e
12942
12943 - #Only carry out if in package directory or check forced
12944 + # Only carry out if in package directory or check forced
12945 if xmllint_capable and not metadata_bad:
12946 # xmlint can produce garbage output even on success, so only dump
12947 # the ouput when it fails.
12948 - st, out = subprocess_getstatusoutput(
12949 - "xmllint --nonet --noout --dtdvalid '%s' '%s'" % \
12950 - (metadata_dtd, os.path.join(checkdir, "metadata.xml")))
12951 + st, out = repoman_getstatusoutput(
12952 + "xmllint --nonet --noout --dtdvalid %s %s" % \
12953 + (portage._shell_quote(metadata_dtd),
12954 + portage._shell_quote(os.path.join(checkdir, "metadata.xml"))))
12955 if st != os.EX_OK:
12956 print(red("!!!") + " metadata.xml is invalid:")
12957 for z in out.splitlines():
12958 - print(red("!!! ")+z)
12959 - stats["metadata.bad"]+=1
12960 - fails["metadata.bad"].append(x+"/metadata.xml")
12961 + print(red("!!! ") + z)
12962 + stats["metadata.bad"] += 1
12963 + fails["metadata.bad"].append(x + "/metadata.xml")
12964
12965 del metadata_bad
12966 muselist = frozenset(musedict)
12967 @@ -1677,20 +1798,20 @@ for x in effective_scanlist:
12968 fails['changelog.ebuildadded'].append(relative_path)
12969
12970 if vcs in ("cvs", "svn", "bzr") and check_ebuild_notadded and y not in eadded:
12971 - #ebuild not added to vcs
12972 - stats["ebuild.notadded"]=stats["ebuild.notadded"]+1
12973 - fails["ebuild.notadded"].append(x+"/"+y+".ebuild")
12974 - myesplit=portage.pkgsplit(y)
12975 + # ebuild not added to vcs
12976 + stats["ebuild.notadded"] += 1
12977 + fails["ebuild.notadded"].append(x + "/" + y + ".ebuild")
12978 + myesplit = portage.pkgsplit(y)
12979 if myesplit is None or myesplit[0] != x.split("/")[-1] \
12980 or pv_toolong_re.search(myesplit[1]) \
12981 or pv_toolong_re.search(myesplit[2]):
12982 - stats["ebuild.invalidname"]=stats["ebuild.invalidname"]+1
12983 - fails["ebuild.invalidname"].append(x+"/"+y+".ebuild")
12984 + stats["ebuild.invalidname"] += 1
12985 + fails["ebuild.invalidname"].append(x + "/" + y + ".ebuild")
12986 continue
12987 - elif myesplit[0]!=pkgdir:
12988 - print(pkgdir,myesplit[0])
12989 - stats["ebuild.namenomatch"]=stats["ebuild.namenomatch"]+1
12990 - fails["ebuild.namenomatch"].append(x+"/"+y+".ebuild")
12991 + elif myesplit[0] != pkgdir:
12992 + print(pkgdir, myesplit[0])
12993 + stats["ebuild.namenomatch"] += 1
12994 + fails["ebuild.namenomatch"].append(x + "/" + y + ".ebuild")
12995 continue
12996
12997 pkg = pkgs[y]
12998 @@ -1699,15 +1820,25 @@ for x in effective_scanlist:
12999 allvalid = False
13000 for k, msgs in pkg.invalid.items():
13001 for msg in msgs:
13002 - stats[k] = stats[k] + 1
13003 - fails[k].append("%s %s" % (relative_path, msg))
13004 + stats[k] += 1
13005 + fails[k].append("%s: %s" % (relative_path, msg))
13006 continue
13007
13008 - myaux = pkg.metadata
13009 + myaux = pkg._metadata
13010 eapi = myaux["EAPI"]
13011 inherited = pkg.inherited
13012 live_ebuild = live_eclasses.intersection(inherited)
13013
13014 + if repo_config.eapi_is_banned(eapi):
13015 + stats["repo.eapi.banned"] += 1
13016 + fails["repo.eapi.banned"].append(
13017 + "%s: %s" % (relative_path, eapi))
13018 +
13019 + elif repo_config.eapi_is_deprecated(eapi):
13020 + stats["repo.eapi.deprecated"] += 1
13021 + fails["repo.eapi.deprecated"].append(
13022 + "%s: %s" % (relative_path, eapi))
13023 +
13024 for k, v in myaux.items():
13025 if not isinstance(v, basestring):
13026 continue
13027 @@ -1724,20 +1855,21 @@ for x in effective_scanlist:
13028 for uri in portage.dep.use_reduce( \
13029 myaux["SRC_URI"], matchall=True, is_src_uri=True, eapi=eapi, flat=True):
13030 contains_mirror = False
13031 - for mirror in thirdpartymirrors:
13032 + for mirror, mirror_alias in thirdpartymirrors.items():
13033 if uri.startswith(mirror):
13034 contains_mirror = True
13035 break
13036 if not contains_mirror:
13037 continue
13038
13039 + new_uri = "mirror://%s/%s" % (mirror_alias, uri[len(mirror):])
13040 stats["SRC_URI.mirror"] += 1
13041 fails["SRC_URI.mirror"].append(
13042 - "%s: '%s' found in thirdpartymirrors" % \
13043 - (relative_path, mirror))
13044 + "%s: '%s' found in thirdpartymirrors, use '%s'" % \
13045 + (relative_path, mirror, new_uri))
13046
13047 if myaux.get("PROVIDE"):
13048 - stats["virtual.oldstyle"]+=1
13049 + stats["virtual.oldstyle"] += 1
13050 fails["virtual.oldstyle"].append(relative_path)
13051
13052 for pos, missing_var in enumerate(missingvars):
13053 @@ -1747,15 +1879,15 @@ for x in effective_scanlist:
13054 continue
13055 if live_ebuild and missing_var == "KEYWORDS":
13056 continue
13057 - myqakey=missingvars[pos]+".missing"
13058 - stats[myqakey]=stats[myqakey]+1
13059 - fails[myqakey].append(x+"/"+y+".ebuild")
13060 + myqakey = missingvars[pos] + ".missing"
13061 + stats[myqakey] += 1
13062 + fails[myqakey].append(x + "/" + y + ".ebuild")
13063
13064 if catdir == "virtual":
13065 for var in ("HOMEPAGE", "LICENSE"):
13066 if myaux.get(var):
13067 myqakey = var + ".virtual"
13068 - stats[myqakey] = stats[myqakey] + 1
13069 + stats[myqakey] += 1
13070 fails[myqakey].append(relative_path)
13071
13072 # 14 is the length of DESCRIPTION=""
13073 @@ -1772,7 +1904,7 @@ for x in effective_scanlist:
13074 not keyword.startswith("-"):
13075 stable_keywords.append(keyword)
13076 if stable_keywords:
13077 - if ebuild_path in new_ebuilds:
13078 + if ebuild_path in new_ebuilds and catdir != "virtual":
13079 stable_keywords.sort()
13080 stats["KEYWORDS.stable"] += 1
13081 fails["KEYWORDS.stable"].append(
13082 @@ -1782,10 +1914,10 @@ for x in effective_scanlist:
13083 ebuild_archs = set(kw.lstrip("~") for kw in keywords \
13084 if not kw.startswith("-"))
13085
13086 - previous_keywords = slot_keywords.get(myaux["SLOT"])
13087 + previous_keywords = slot_keywords.get(pkg.slot)
13088 if previous_keywords is None:
13089 - slot_keywords[myaux["SLOT"]] = set()
13090 - elif ebuild_archs and not live_ebuild:
13091 + slot_keywords[pkg.slot] = set()
13092 + elif ebuild_archs and "*" not in ebuild_archs and not live_ebuild:
13093 dropped_keywords = previous_keywords.difference(ebuild_archs)
13094 if dropped_keywords:
13095 stats["KEYWORDS.dropped"] += 1
13096 @@ -1793,7 +1925,7 @@ for x in effective_scanlist:
13097 relative_path + ": %s" % \
13098 " ".join(sorted(dropped_keywords)))
13099
13100 - slot_keywords[myaux["SLOT"]].update(ebuild_archs)
13101 + slot_keywords[pkg.slot].update(ebuild_archs)
13102
13103 # KEYWORDS="-*" is a stupid replacement for package.mask and screws general KEYWORDS semantics
13104 if "-*" in keywords:
13105 @@ -1805,7 +1937,7 @@ for x in effective_scanlist:
13106 haskeyword = True
13107 if not haskeyword:
13108 stats["KEYWORDS.stupid"] += 1
13109 - fails["KEYWORDS.stupid"].append(x+"/"+y+".ebuild")
13110 + fails["KEYWORDS.stupid"].append(x + "/" + y + ".ebuild")
13111
13112 """
13113 Ebuilds that inherit a "Live" eclass (darcs,subversion,git,cvs,etc..) should
13114 @@ -1833,37 +1965,53 @@ for x in effective_scanlist:
13115 arches = [[repoman_settings["ARCH"], repoman_settings["ARCH"],
13116 repoman_settings["ACCEPT_KEYWORDS"].split()]]
13117 else:
13118 - arches=[]
13119 - for keyword in myaux["KEYWORDS"].split():
13120 - if (keyword[0]=="-"):
13121 + arches = set()
13122 + for keyword in keywords:
13123 + if keyword[0] == "-":
13124 continue
13125 - elif (keyword[0]=="~"):
13126 - arches.append([keyword, keyword[1:], [keyword[1:], keyword]])
13127 + elif keyword[0] == "~":
13128 + arch = keyword[1:]
13129 + if arch == "*":
13130 + for expanded_arch in profiles:
13131 + if expanded_arch == "**":
13132 + continue
13133 + arches.add((keyword, expanded_arch,
13134 + (expanded_arch, "~" + expanded_arch)))
13135 + else:
13136 + arches.add((keyword, arch, (arch, keyword)))
13137 else:
13138 - arches.append([keyword, keyword, [keyword]])
13139 + if keyword == "*":
13140 + for expanded_arch in profiles:
13141 + if expanded_arch == "**":
13142 + continue
13143 + arches.add((keyword, expanded_arch,
13144 + (expanded_arch,)))
13145 + else:
13146 + arches.add((keyword, keyword, (keyword,)))
13147 if not arches:
13148 # Use an empty profile for checking dependencies of
13149 # packages that have empty KEYWORDS.
13150 - arches.append(['**', '**', ['**']])
13151 + arches.add(('**', '**', ('**',)))
13152
13153 unknown_pkgs = set()
13154 baddepsyntax = False
13155 badlicsyntax = False
13156 badprovsyntax = False
13157 - catpkg = catdir+"/"+y
13158 + catpkg = catdir + "/" + y
13159
13160 inherited_java_eclass = "java-pkg-2" in inherited or \
13161 "java-pkg-opt-2" in inherited
13162 inherited_wxwidgets_eclass = "wxwidgets" in inherited
13163 operator_tokens = set(["||", "(", ")"])
13164 type_list, badsyntax = [], []
13165 - for mytype in ("DEPEND", "RDEPEND", "PDEPEND",
13166 - "LICENSE", "PROPERTIES", "PROVIDE"):
13167 + for mytype in Package._dep_keys + ("LICENSE", "PROPERTIES", "PROVIDE"):
13168 mydepstr = myaux[mytype]
13169
13170 + buildtime = mytype in Package._buildtime_keys
13171 + runtime = mytype in Package._runtime_keys
13172 token_class = None
13173 - if mytype in ("DEPEND", "RDEPEND", "PDEPEND"):
13174 - token_class=portage.dep.Atom
13175 + if mytype.endswith("DEPEND"):
13176 + token_class = portage.dep.Atom
13177
13178 try:
13179 atoms = portage.dep.use_reduce(mydepstr, matchall=1, flat=True, \
13180 @@ -1872,8 +2020,8 @@ for x in effective_scanlist:
13181 atoms = None
13182 badsyntax.append(str(e))
13183
13184 - if atoms and mytype in ("DEPEND", "RDEPEND", "PDEPEND"):
13185 - if mytype in ("RDEPEND", "PDEPEND") and \
13186 + if atoms and mytype.endswith("DEPEND"):
13187 + if runtime and \
13188 "test?" in mydepstr.split():
13189 stats[mytype + '.suspect'] += 1
13190 fails[mytype + '.suspect'].append(relative_path + \
13191 @@ -1902,21 +2050,21 @@ for x in effective_scanlist:
13192 ": %s: consider using '%s' instead of '%s'" %
13193 (mytype, suspect_virtual[atom.cp], atom))
13194
13195 - if mytype == "DEPEND" and \
13196 + if buildtime and \
13197 not is_blocker and \
13198 not inherited_java_eclass and \
13199 atom.cp == "virtual/jdk":
13200 stats['java.eclassesnotused'] += 1
13201 fails['java.eclassesnotused'].append(relative_path)
13202 - elif mytype == "DEPEND" and \
13203 + elif buildtime and \
13204 not is_blocker and \
13205 not inherited_wxwidgets_eclass and \
13206 atom.cp == "x11-libs/wxGTK":
13207 stats['wxwidgets.eclassnotused'] += 1
13208 fails['wxwidgets.eclassnotused'].append(
13209 - relative_path + ": DEPENDs on x11-libs/wxGTK"
13210 - " without inheriting wxwidgets.eclass")
13211 - elif mytype in ("PDEPEND", "RDEPEND"):
13212 + (relative_path + ": %ss on x11-libs/wxGTK"
13213 + " without inheriting wxwidgets.eclass") % mytype)
13214 + elif runtime:
13215 if not is_blocker and \
13216 atom.cp in suspect_rdepend:
13217 stats[mytype + '.suspect'] += 1
13218 @@ -1925,21 +2073,26 @@ for x in effective_scanlist:
13219
13220 if atom.operator == "~" and \
13221 portage.versions.catpkgsplit(atom.cpv)[3] != "r0":
13222 - stats[mytype + '.badtilde'] += 1
13223 - fails[mytype + '.badtilde'].append(
13224 + qacat = 'dependency.badtilde'
13225 + stats[qacat] += 1
13226 + fails[qacat].append(
13227 (relative_path + ": %s uses the ~ operator"
13228 " with a non-zero revision:" + \
13229 " '%s'") % (mytype, atom))
13230
13231 type_list.extend([mytype] * (len(badsyntax) - len(type_list)))
13232
13233 - for m,b in zip(type_list, badsyntax):
13234 - stats[m+".syntax"] += 1
13235 - fails[m+".syntax"].append(catpkg+".ebuild "+m+": "+b)
13236 + for m, b in zip(type_list, badsyntax):
13237 + if m.endswith("DEPEND"):
13238 + qacat = "dependency.syntax"
13239 + else:
13240 + qacat = m + ".syntax"
13241 + stats[qacat] += 1
13242 + fails[qacat].append("%s: %s: %s" % (relative_path, m, b))
13243
13244 badlicsyntax = len([z for z in type_list if z == "LICENSE"])
13245 badprovsyntax = len([z for z in type_list if z == "PROVIDE"])
13246 - baddepsyntax = len(type_list) != badlicsyntax + badprovsyntax
13247 + baddepsyntax = len(type_list) != badlicsyntax + badprovsyntax
13248 badlicsyntax = badlicsyntax > 0
13249 badprovsyntax = badprovsyntax > 0
13250
13251 @@ -1955,7 +2108,7 @@ for x in effective_scanlist:
13252 myuse.append(flag_name)
13253
13254 # uselist checks - metadata
13255 - for mypos in range(len(myuse)-1,-1,-1):
13256 + for mypos in range(len(myuse)-1, -1, -1):
13257 if myuse[mypos] and (myuse[mypos] in muselist):
13258 del myuse[mypos]
13259
13260 @@ -1968,8 +2121,17 @@ for x in effective_scanlist:
13261 " '%s'") % (eapi, myflag))
13262
13263 for mypos in range(len(myuse)):
13264 - stats["IUSE.invalid"]=stats["IUSE.invalid"]+1
13265 - fails["IUSE.invalid"].append(x+"/"+y+".ebuild: %s" % myuse[mypos])
13266 + stats["IUSE.invalid"] += 1
13267 + fails["IUSE.invalid"].append(x + "/" + y + ".ebuild: %s" % myuse[mypos])
13268 +
13269 + # Check for outdated RUBY targets
13270 + if "ruby-ng" in inherited or "ruby-fakegem" in inherited or "ruby" in inherited:
13271 + ruby_intersection = pkg.iuse.all.intersection(ruby_deprecated)
13272 + if ruby_intersection:
13273 + for myruby in ruby_intersection:
13274 + stats["IUSE.rubydeprecated"] += 1
13275 + fails["IUSE.rubydeprecated"].append(
13276 + (relative_path + ": Deprecated ruby target: %s") % myruby)
13277
13278 # license checks
13279 if not badlicsyntax:
13280 @@ -1982,10 +2144,13 @@ for x in effective_scanlist:
13281 # Need to check for "||" manually as no portage
13282 # function will remove it without removing values.
13283 if lic not in liclist and lic != "||":
13284 - stats["LICENSE.invalid"]=stats["LICENSE.invalid"]+1
13285 - fails["LICENSE.invalid"].append(x+"/"+y+".ebuild: %s" % lic)
13286 + stats["LICENSE.invalid"] += 1
13287 + fails["LICENSE.invalid"].append(x + "/" + y + ".ebuild: %s" % lic)
13288 + elif lic in liclist_deprecated:
13289 + stats["LICENSE.deprecated"] += 1
13290 + fails["LICENSE.deprecated"].append("%s: %s" % (relative_path, lic))
13291
13292 - #keyword checks
13293 + # keyword checks
13294 myuse = myaux["KEYWORDS"].split()
13295 for mykey in myuse:
13296 if mykey not in ("-*", "*", "~*"):
13297 @@ -1996,17 +2161,17 @@ for x in effective_scanlist:
13298 myskey = myskey[1:]
13299 if myskey not in kwlist:
13300 stats["KEYWORDS.invalid"] += 1
13301 - fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s" % mykey)
13302 + fails["KEYWORDS.invalid"].append(x + "/" + y + ".ebuild: %s" % mykey)
13303 elif myskey not in profiles:
13304 stats["KEYWORDS.invalid"] += 1
13305 - fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s (profile invalid)" % mykey)
13306 + fails["KEYWORDS.invalid"].append(x + "/" + y + ".ebuild: %s (profile invalid)" % mykey)
13307
13308 - #restrict checks
13309 + # restrict checks
13310 myrestrict = None
13311 try:
13312 myrestrict = portage.dep.use_reduce(myaux["RESTRICT"], matchall=1, flat=True)
13313 except portage.exception.InvalidDependString as e:
13314 - stats["RESTRICT.syntax"] = stats["RESTRICT.syntax"] + 1
13315 + stats["RESTRICT.syntax"] += 1
13316 fails["RESTRICT.syntax"].append(
13317 "%s: RESTRICT: %s" % (relative_path, e))
13318 del e
13319 @@ -2016,8 +2181,8 @@ for x in effective_scanlist:
13320 if mybadrestrict:
13321 stats["RESTRICT.invalid"] += len(mybadrestrict)
13322 for mybad in mybadrestrict:
13323 - fails["RESTRICT.invalid"].append(x+"/"+y+".ebuild: %s" % mybad)
13324 - #REQUIRED_USE check
13325 + fails["RESTRICT.invalid"].append(x + "/" + y + ".ebuild: %s" % mybad)
13326 + # REQUIRED_USE check
13327 required_use = myaux["REQUIRED_USE"]
13328 if required_use:
13329 if not eapi_has_required_use(eapi):
13330 @@ -2027,9 +2192,9 @@ for x in effective_scanlist:
13331 " not supported with EAPI='%s'" % (eapi,))
13332 try:
13333 portage.dep.check_required_use(required_use, (),
13334 - pkg.iuse.is_valid_flag)
13335 + pkg.iuse.is_valid_flag, eapi=eapi)
13336 except portage.exception.InvalidDependString as e:
13337 - stats["REQUIRED_USE.syntax"] = stats["REQUIRED_USE.syntax"] + 1
13338 + stats["REQUIRED_USE.syntax"] += 1
13339 fails["REQUIRED_USE.syntax"].append(
13340 "%s: REQUIRED_USE: %s" % (relative_path, e))
13341 del e
13342 @@ -2062,127 +2227,154 @@ for x in effective_scanlist:
13343 # user is intent on forcing the commit anyway.
13344 continue
13345
13346 - for keyword,arch,groups in arches:
13347 -
13348 + relevant_profiles = []
13349 + for keyword, arch, groups in arches:
13350 if arch not in profiles:
13351 # A missing profile will create an error further down
13352 # during the KEYWORDS verification.
13353 continue
13354 -
13355 - for prof in profiles[arch]:
13356
13357 - if prof.status not in ("stable", "dev") or \
13358 - prof.status == "dev" and not options.include_dev:
13359 + if include_arches is not None:
13360 + if arch not in include_arches:
13361 continue
13362
13363 - dep_settings = arch_caches.get(prof.sub_path)
13364 - if dep_settings is None:
13365 - dep_settings = portage.config(
13366 - config_profile_path=prof.abs_path,
13367 - config_incrementals=repoman_incrementals,
13368 - config_root=config_root,
13369 - local_config=False,
13370 - _unmatched_removal=options.unmatched_removal,
13371 - env=env)
13372 - dep_settings.categories = repoman_settings.categories
13373 - if options.without_mask:
13374 - dep_settings._mask_manager_obj = \
13375 - copy.deepcopy(dep_settings._mask_manager)
13376 - dep_settings._mask_manager._pmaskdict.clear()
13377 - arch_caches[prof.sub_path] = dep_settings
13378 -
13379 - xmatch_cache_key = (prof.sub_path, tuple(groups))
13380 - xcache = arch_xmatch_caches.get(xmatch_cache_key)
13381 - if xcache is None:
13382 - portdb.melt()
13383 - portdb.freeze()
13384 - xcache = portdb.xcache
13385 - xcache.update(shared_xmatch_caches)
13386 - arch_xmatch_caches[xmatch_cache_key] = xcache
13387 -
13388 - trees[root]["porttree"].settings = dep_settings
13389 - portdb.settings = dep_settings
13390 - portdb.xcache = xcache
13391 - # for package.use.mask support inside dep_check
13392 - dep_settings.setcpv(pkg)
13393 - dep_settings["ACCEPT_KEYWORDS"] = " ".join(groups)
13394 - # just in case, prevent config.reset() from nuking these.
13395 - dep_settings.backup_changes("ACCEPT_KEYWORDS")
13396 -
13397 - if not baddepsyntax:
13398 - ismasked = not ebuild_archs or \
13399 - pkg.cpv not in portdb.xmatch("match-visible", pkg.cp)
13400 - if ismasked:
13401 - if not have_pmasked:
13402 - have_pmasked = bool(dep_settings._getMaskAtom(
13403 - pkg.cpv, pkg.metadata))
13404 - if options.ignore_masked:
13405 - continue
13406 - #we are testing deps for a masked package; give it some lee-way
13407 - suffix="masked"
13408 - matchmode = "minimum-all"
13409 - else:
13410 - suffix=""
13411 - matchmode = "minimum-visible"
13412 -
13413 - if not have_dev_keywords:
13414 - have_dev_keywords = \
13415 - bool(dev_keywords.intersection(keywords))
13416 -
13417 - if prof.status == "dev":
13418 - suffix=suffix+"indev"
13419 -
13420 - for mytype,mypos in [["DEPEND",len(missingvars)],["RDEPEND",len(missingvars)+1],["PDEPEND",len(missingvars)+2]]:
13421 -
13422 - mykey=mytype+".bad"+suffix
13423 - myvalue = myaux[mytype]
13424 - if not myvalue:
13425 - continue
13426 -
13427 - success, atoms = portage.dep_check(myvalue, portdb,
13428 - dep_settings, use="all", mode=matchmode,
13429 - trees=trees)
13430 -
13431 - if success:
13432 - if atoms:
13433 -
13434 - # Don't bother with dependency.unknown for
13435 - # cases in which *DEPEND.bad is triggered.
13436 - for atom in atoms:
13437 - # dep_check returns all blockers and they
13438 - # aren't counted for *DEPEND.bad, so we
13439 - # ignore them here.
13440 - if not atom.blocker:
13441 - unknown_pkgs.discard(
13442 - (mytype, atom.unevaluated_atom))
13443 -
13444 - if not prof.sub_path:
13445 - # old-style virtuals currently aren't
13446 - # resolvable with empty profile, since
13447 - # 'virtuals' mappings are unavailable
13448 - # (it would be expensive to search
13449 - # for PROVIDE in all ebuilds)
13450 - atoms = [atom for atom in atoms if not \
13451 - (atom.cp.startswith('virtual/') and \
13452 - not portdb.cp_list(atom.cp))]
13453 -
13454 - #we have some unsolvable deps
13455 - #remove ! deps, which always show up as unsatisfiable
13456 - atoms = [str(atom.unevaluated_atom) \
13457 - for atom in atoms if not atom.blocker]
13458 -
13459 - #if we emptied out our list, continue:
13460 - if not atoms:
13461 - continue
13462 - stats[mykey]=stats[mykey]+1
13463 - fails[mykey].append("%s: %s(%s) %s" % \
13464 - (relative_path, keyword,
13465 - prof, repr(atoms)))
13466 - else:
13467 - stats[mykey]=stats[mykey]+1
13468 - fails[mykey].append("%s: %s(%s) %s" % \
13469 - (relative_path, keyword,
13470 + relevant_profiles.extend((keyword, groups, prof)
13471 + for prof in profiles[arch])
13472 +
13473 + def sort_key(item):
13474 + return item[2].sub_path
13475 +
13476 + relevant_profiles.sort(key=sort_key)
13477 +
13478 + for keyword, groups, prof in relevant_profiles:
13479 +
13480 + if not (prof.status == "stable" or \
13481 + (prof.status == "dev" and options.include_dev) or \
13482 + (prof.status == "exp" and options.include_exp_profiles == 'y')):
13483 + continue
13484 +
13485 + dep_settings = arch_caches.get(prof.sub_path)
13486 + if dep_settings is None:
13487 + dep_settings = portage.config(
13488 + config_profile_path=prof.abs_path,
13489 + config_incrementals=repoman_incrementals,
13490 + config_root=config_root,
13491 + local_config=False,
13492 + _unmatched_removal=options.unmatched_removal,
13493 + env=env, repositories=repoman_settings.repositories)
13494 + dep_settings.categories = repoman_settings.categories
13495 + if options.without_mask:
13496 + dep_settings._mask_manager_obj = \
13497 + copy.deepcopy(dep_settings._mask_manager)
13498 + dep_settings._mask_manager._pmaskdict.clear()
13499 + arch_caches[prof.sub_path] = dep_settings
13500 +
13501 + xmatch_cache_key = (prof.sub_path, tuple(groups))
13502 + xcache = arch_xmatch_caches.get(xmatch_cache_key)
13503 + if xcache is None:
13504 + portdb.melt()
13505 + portdb.freeze()
13506 + xcache = portdb.xcache
13507 + xcache.update(shared_xmatch_caches)
13508 + arch_xmatch_caches[xmatch_cache_key] = xcache
13509 +
13510 + trees[root]["porttree"].settings = dep_settings
13511 + portdb.settings = dep_settings
13512 + portdb.xcache = xcache
13513 +
13514 + dep_settings["ACCEPT_KEYWORDS"] = " ".join(groups)
13515 + # just in case, prevent config.reset() from nuking these.
13516 + dep_settings.backup_changes("ACCEPT_KEYWORDS")
13517 +
13518 + # This attribute is used in dbapi._match_use() to apply
13519 + # use.stable.{mask,force} settings based on the stable
13520 + # status of the parent package. This is required in order
13521 + # for USE deps of unstable packages to be resolved correctly,
13522 + # since otherwise use.stable.{mask,force} settings of
13523 + # dependencies may conflict (see bug #456342).
13524 + dep_settings._parent_stable = dep_settings._isStable(pkg)
13525 +
13526 + # Handle package.use*.{force,mask) calculation, for use
13527 + # in dep_check.
13528 + dep_settings.useforce = dep_settings._use_manager.getUseForce(
13529 + pkg, stable=dep_settings._parent_stable)
13530 + dep_settings.usemask = dep_settings._use_manager.getUseMask(
13531 + pkg, stable=dep_settings._parent_stable)
13532 +
13533 + if not baddepsyntax:
13534 + ismasked = not ebuild_archs or \
13535 + pkg.cpv not in portdb.xmatch("match-visible", pkg.cp)
13536 + if ismasked:
13537 + if not have_pmasked:
13538 + have_pmasked = bool(dep_settings._getMaskAtom(
13539 + pkg.cpv, pkg._metadata))
13540 + if options.ignore_masked:
13541 + continue
13542 + # we are testing deps for a masked package; give it some lee-way
13543 + suffix = "masked"
13544 + matchmode = "minimum-all"
13545 + else:
13546 + suffix = ""
13547 + matchmode = "minimum-visible"
13548 +
13549 + if not have_dev_keywords:
13550 + have_dev_keywords = \
13551 + bool(dev_keywords.intersection(keywords))
13552 +
13553 + if prof.status == "dev":
13554 + suffix = suffix + "indev"
13555 +
13556 + for mytype in Package._dep_keys:
13557 +
13558 + mykey = "dependency.bad" + suffix
13559 + myvalue = myaux[mytype]
13560 + if not myvalue:
13561 + continue
13562 +
13563 + success, atoms = portage.dep_check(myvalue, portdb,
13564 + dep_settings, use="all", mode=matchmode,
13565 + trees=trees)
13566 +
13567 + if success:
13568 + if atoms:
13569 +
13570 + # Don't bother with dependency.unknown for
13571 + # cases in which *DEPEND.bad is triggered.
13572 + for atom in atoms:
13573 + # dep_check returns all blockers and they
13574 + # aren't counted for *DEPEND.bad, so we
13575 + # ignore them here.
13576 + if not atom.blocker:
13577 + unknown_pkgs.discard(
13578 + (mytype, atom.unevaluated_atom))
13579 +
13580 + if not prof.sub_path:
13581 + # old-style virtuals currently aren't
13582 + # resolvable with empty profile, since
13583 + # 'virtuals' mappings are unavailable
13584 + # (it would be expensive to search
13585 + # for PROVIDE in all ebuilds)
13586 + atoms = [atom for atom in atoms if not \
13587 + (atom.cp.startswith('virtual/') and \
13588 + not portdb.cp_list(atom.cp))]
13589 +
13590 + # we have some unsolvable deps
13591 + # remove ! deps, which always show up as unsatisfiable
13592 + atoms = [str(atom.unevaluated_atom) \
13593 + for atom in atoms if not atom.blocker]
13594 +
13595 + # if we emptied out our list, continue:
13596 + if not atoms:
13597 + continue
13598 + stats[mykey] += 1
13599 + fails[mykey].append("%s: %s: %s(%s) %s" % \
13600 + (relative_path, mytype, keyword,
13601 prof, repr(atoms)))
13602 + else:
13603 + stats[mykey] += 1
13604 + fails[mykey].append("%s: %s: %s(%s) %s" % \
13605 + (relative_path, mytype, keyword,
13606 + prof, repr(atoms)))
13607
13608 if not baddepsyntax and unknown_pkgs:
13609 type_map = {}
13610 @@ -2208,11 +2400,11 @@ if options.if_modified == "y" and len(effective_scanlist) < 1:
13611 if options.mode == "manifest":
13612 sys.exit(dofail)
13613
13614 -#dofail will be set to 1 if we have failed in at least one non-warning category
13615 -dofail=0
13616 -#dowarn will be set to 1 if we tripped any warnings
13617 -dowarn=0
13618 -#dofull will be set if we should print a "repoman full" informational message
13619 +# dofail will be set to 1 if we have failed in at least one non-warning category
13620 +dofail = 0
13621 +# dowarn will be set to 1 if we tripped any warnings
13622 +dowarn = 0
13623 +# dofull will be set if we should print a "repoman full" informational message
13624 dofull = options.mode != 'full'
13625
13626 for x in qacats:
13627 @@ -2240,29 +2432,20 @@ console_writer.style_listener = style_file.new_styles
13628
13629 f = formatter.AbstractFormatter(console_writer)
13630
13631 -utilities.format_qa_output(f, stats, fails, dofull, dofail, options, qawarnings)
13632 +format_outputs = {
13633 + 'column': utilities.format_qa_output_column,
13634 + 'default': utilities.format_qa_output
13635 +}
13636 +
13637 +format_output = format_outputs.get(options.output_style,
13638 + format_outputs['default'])
13639 +format_output(f, stats, fails, dofull, dofail, options, qawarnings)
13640
13641 style_file.flush()
13642 del console_writer, f, style_file
13643 qa_output = qa_output.getvalue()
13644 qa_output = qa_output.splitlines(True)
13645
13646 -def grouplist(mylist,seperator="/"):
13647 - """(list,seperator="/") -- Takes a list of elements; groups them into
13648 - same initial element categories. Returns a dict of {base:[sublist]}
13649 - From: ["blah/foo","spork/spatula","blah/weee/splat"]
13650 - To: {"blah":["foo","weee/splat"], "spork":["spatula"]}"""
13651 - mygroups={}
13652 - for x in mylist:
13653 - xs=x.split(seperator)
13654 - if xs[0]==".":
13655 - xs=xs[1:]
13656 - if xs[0] not in mygroups:
13657 - mygroups[xs[0]]=[seperator.join(xs[1:])]
13658 - else:
13659 - mygroups[xs[0]]+=[seperator.join(xs[1:])]
13660 - return mygroups
13661 -
13662 suggest_ignore_masked = False
13663 suggest_include_dev = False
13664
13665 @@ -2311,65 +2494,65 @@ else:
13666 myunadded = []
13667 if vcs == "cvs":
13668 try:
13669 - myvcstree=portage.cvstree.getentries("./",recursive=1)
13670 - myunadded=portage.cvstree.findunadded(myvcstree,recursive=1,basedir="./")
13671 + myvcstree = portage.cvstree.getentries("./", recursive=1)
13672 + myunadded = portage.cvstree.findunadded(myvcstree, recursive=1, basedir="./")
13673 except SystemExit as e:
13674 raise # TODO propagate this
13675 except:
13676 err("Error retrieving CVS tree; exiting.")
13677 if vcs == "svn":
13678 try:
13679 - with os.popen("svn status --no-ignore") as f:
13680 + with repoman_popen("svn status --no-ignore") as f:
13681 svnstatus = f.readlines()
13682 - myunadded = [ "./"+elem.rstrip().split()[1] for elem in svnstatus if elem.startswith("?") or elem.startswith("I") ]
13683 + myunadded = ["./" + elem.rstrip().split()[1] for elem in svnstatus if elem.startswith("?") or elem.startswith("I")]
13684 except SystemExit as e:
13685 raise # TODO propagate this
13686 except:
13687 err("Error retrieving SVN info; exiting.")
13688 if vcs == "git":
13689 # get list of files not under version control or missing
13690 - myf = os.popen("git ls-files --others")
13691 - myunadded = [ "./" + elem[:-1] for elem in myf ]
13692 + myf = repoman_popen("git ls-files --others")
13693 + myunadded = ["./" + elem[:-1] for elem in myf]
13694 myf.close()
13695 if vcs == "bzr":
13696 try:
13697 - with os.popen("bzr status -S .") as f:
13698 + with repoman_popen("bzr status -S .") as f:
13699 bzrstatus = f.readlines()
13700 - myunadded = [ "./"+elem.rstrip().split()[1].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("?") or elem[0:2] == " D" ]
13701 + myunadded = ["./" + elem.rstrip().split()[1].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("?") or elem[0:2] == " D"]
13702 except SystemExit as e:
13703 raise # TODO propagate this
13704 except:
13705 err("Error retrieving bzr info; exiting.")
13706 if vcs == "hg":
13707 - with os.popen("hg status --no-status --unknown .") as f:
13708 + with repoman_popen("hg status --no-status --unknown .") as f:
13709 myunadded = f.readlines()
13710 myunadded = ["./" + elem.rstrip() for elem in myunadded]
13711 -
13712 +
13713 # Mercurial doesn't handle manually deleted files as removed from
13714 # the repository, so the user need to remove them before commit,
13715 # using "hg remove [FILES]"
13716 - with os.popen("hg status --no-status --deleted .") as f:
13717 + with repoman_popen("hg status --no-status --deleted .") as f:
13718 mydeleted = f.readlines()
13719 mydeleted = ["./" + elem.rstrip() for elem in mydeleted]
13720
13721
13722 - myautoadd=[]
13723 + myautoadd = []
13724 if myunadded:
13725 - for x in range(len(myunadded)-1,-1,-1):
13726 - xs=myunadded[x].split("/")
13727 - if xs[-1]=="files":
13728 + for x in range(len(myunadded)-1, -1, -1):
13729 + xs = myunadded[x].split("/")
13730 + if xs[-1] == "files":
13731 print("!!! files dir is not added! Please correct this.")
13732 sys.exit(-1)
13733 - elif xs[-1]=="Manifest":
13734 + elif xs[-1] == "Manifest":
13735 # It's a manifest... auto add
13736 - myautoadd+=[myunadded[x]]
13737 + myautoadd += [myunadded[x]]
13738 del myunadded[x]
13739
13740 if myunadded:
13741 print(red("!!! The following files are in your local tree but are not added to the master"))
13742 print(red("!!! tree. Please remove them from the local tree or add them to the master tree."))
13743 for x in myunadded:
13744 - print(" ",x)
13745 + print(" ", x)
13746 print()
13747 print()
13748 sys.exit(1)
13749 @@ -2378,7 +2561,7 @@ else:
13750 print(red("!!! The following files are removed manually from your local tree but are not"))
13751 print(red("!!! removed from the repository. Please remove them, using \"hg remove [FILES]\"."))
13752 for x in mydeleted:
13753 - print(" ",x)
13754 + print(" ", x)
13755 print()
13756 print()
13757 sys.exit(1)
13758 @@ -2387,60 +2570,59 @@ else:
13759 mycvstree = cvstree.getentries("./", recursive=1)
13760 mychanged = cvstree.findchanged(mycvstree, recursive=1, basedir="./")
13761 mynew = cvstree.findnew(mycvstree, recursive=1, basedir="./")
13762 - myremoved=portage.cvstree.findremoved(mycvstree,recursive=1,basedir="./")
13763 + myremoved = portage.cvstree.findremoved(mycvstree, recursive=1, basedir="./")
13764 bin_blob_pattern = re.compile("^-kb$")
13765 no_expansion = set(portage.cvstree.findoption(mycvstree, bin_blob_pattern,
13766 recursive=1, basedir="./"))
13767
13768 -
13769 if vcs == "svn":
13770 - with os.popen("svn status") as f:
13771 + with repoman_popen("svn status") as f:
13772 svnstatus = f.readlines()
13773 - mychanged = [ "./" + elem.split()[-1:][0] for elem in svnstatus if (elem[:1] in "MR" or elem[1:2] in "M")]
13774 - mynew = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A")]
13775 - myremoved = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")]
13776 + mychanged = ["./" + elem.split()[-1:][0] for elem in svnstatus if (elem[:1] in "MR" or elem[1:2] in "M")]
13777 + mynew = ["./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A")]
13778 + myremoved = ["./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")]
13779
13780 # Subversion expands keywords specified in svn:keywords properties.
13781 - with os.popen("svn propget -R svn:keywords") as f:
13782 + with repoman_popen("svn propget -R svn:keywords") as f:
13783 props = f.readlines()
13784 expansion = dict(("./" + prop.split(" - ")[0], prop.split(" - ")[1].split()) \
13785 for prop in props if " - " in prop)
13786
13787 elif vcs == "git":
13788 - with os.popen("git diff-index --name-only "
13789 + with repoman_popen("git diff-index --name-only "
13790 "--relative --diff-filter=M HEAD") as f:
13791 mychanged = f.readlines()
13792 mychanged = ["./" + elem[:-1] for elem in mychanged]
13793
13794 - with os.popen("git diff-index --name-only "
13795 + with repoman_popen("git diff-index --name-only "
13796 "--relative --diff-filter=A HEAD") as f:
13797 mynew = f.readlines()
13798 mynew = ["./" + elem[:-1] for elem in mynew]
13799
13800 - with os.popen("git diff-index --name-only "
13801 + with repoman_popen("git diff-index --name-only "
13802 "--relative --diff-filter=D HEAD") as f:
13803 myremoved = f.readlines()
13804 myremoved = ["./" + elem[:-1] for elem in myremoved]
13805
13806 if vcs == "bzr":
13807 - with os.popen("bzr status -S .") as f:
13808 + with repoman_popen("bzr status -S .") as f:
13809 bzrstatus = f.readlines()
13810 - mychanged = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M" ]
13811 - mynew = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] in "NK" or elem[0:1] == "R" ) ]
13812 - myremoved = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("-") ]
13813 - myremoved = [ "./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "K" or elem[0:1] == "R" ) ]
13814 + mychanged = ["./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M"]
13815 + mynew = ["./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and (elem[1:2] in "NK" or elem[0:1] == "R")]
13816 + myremoved = ["./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("-")]
13817 + myremoved = ["./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and (elem[1:2] == "K" or elem[0:1] == "R")]
13818 # Bazaar expands nothing.
13819
13820 if vcs == "hg":
13821 - with os.popen("hg status --no-status --modified .") as f:
13822 + with repoman_popen("hg status --no-status --modified .") as f:
13823 mychanged = f.readlines()
13824 mychanged = ["./" + elem.rstrip() for elem in mychanged]
13825
13826 - with os.popen("hg status --no-status --added .") as f:
13827 + with repoman_popen("hg status --no-status --added .") as f:
13828 mynew = f.readlines()
13829 mynew = ["./" + elem.rstrip() for elem in mynew]
13830
13831 - with os.popen("hg status --no-status --removed .") as f:
13832 + with repoman_popen("hg status --no-status --removed .") as f:
13833 myremoved = f.readlines()
13834 myremoved = ["./" + elem.rstrip() for elem in myremoved]
13835
13836 @@ -2499,21 +2681,54 @@ else:
13837 commitmessage = commitmessage.rstrip()
13838 changelog_msg = commitmessage
13839 portage_version = getattr(portage, "VERSION", None)
13840 + gpg_key = repoman_settings.get("PORTAGE_GPG_KEY", "")
13841 + dco_sob = repoman_settings.get("DCO_SIGNED_OFF_BY", "")
13842 if portage_version is None:
13843 sys.stderr.write("Failed to insert portage version in message!\n")
13844 sys.stderr.flush()
13845 portage_version = "Unknown"
13846 - unameout = platform.system() + " "
13847 - if platform.system() in ["Darwin", "SunOS"]:
13848 - unameout += platform.processor()
13849 - else:
13850 - unameout += platform.machine()
13851 - commitmessage += "\n\n(Portage version: %s/%s/%s" % \
13852 - (portage_version, vcs, unameout)
13853 +
13854 + report_options = []
13855 if options.force:
13856 - commitmessage += ", RepoMan options: --force"
13857 - commitmessage += ")"
13858 + report_options.append("--force")
13859 + if options.ignore_arches:
13860 + report_options.append("--ignore-arches")
13861 + if include_arches is not None:
13862 + report_options.append("--include-arches=\"%s\"" %
13863 + " ".join(sorted(include_arches)))
13864 +
13865 + if vcs == "git":
13866 + # Use new footer only for git (see bug #438364).
13867 + commit_footer = "\n\nPackage-Manager: portage-%s" % portage_version
13868 + if report_options:
13869 + commit_footer += "\nRepoMan-Options: " + " ".join(report_options)
13870 + if sign_manifests:
13871 + commit_footer += "\nManifest-Sign-Key: %s" % (gpg_key, )
13872 + if dco_sob:
13873 + commit_footer += "\nSigned-off-by: %s" % (dco_sob, )
13874 + else:
13875 + unameout = platform.system() + " "
13876 + if platform.system() in ["Darwin", "SunOS"]:
13877 + unameout += platform.processor()
13878 + else:
13879 + unameout += platform.machine()
13880 + commit_footer = "\n\n"
13881 + if dco_sob:
13882 + commit_footer += "Signed-off-by: %s\n" % (dco_sob, )
13883 + commit_footer += "(Portage version: %s/%s/%s" % \
13884 + (portage_version, vcs, unameout)
13885 + if report_options:
13886 + commit_footer += ", RepoMan options: " + " ".join(report_options)
13887 + if sign_manifests:
13888 + commit_footer += ", signed Manifest commit with key %s" % \
13889 + (gpg_key, )
13890 + else:
13891 + commit_footer += ", unsigned Manifest commit"
13892 + commit_footer += ")"
13893 +
13894 + commitmessage += commit_footer
13895
13896 + broken_changelog_manifests = []
13897 if options.echangelog in ('y', 'force'):
13898 logging.info("checking for unmodified ChangeLog files")
13899 committer_name = utilities.get_committer_name(env=repoman_settings)
13900 @@ -2569,6 +2784,8 @@ else:
13901 # regenerate Manifest for modified ChangeLog (bug #420735)
13902 repoman_settings["O"] = checkdir
13903 digestgen(mysettings=repoman_settings, myportdb=portdb)
13904 + else:
13905 + broken_changelog_manifests.append(x)
13906
13907 if myautoadd:
13908 print(">>> Auto-Adding missing Manifest/ChangeLog file(s)...")
13909 @@ -2578,15 +2795,17 @@ else:
13910 portage.writemsg_stdout("(%s)\n" % " ".join(add_cmd),
13911 noiselevel=-1)
13912 else:
13913 - if not (sys.hexversion >= 0x3000000 and sys.hexversion < 0x3020000):
13914 - # Python 3.1 produces the following TypeError if raw bytes are
13915 - # passed to subprocess.call():
13916 - # File "/usr/lib/python3.1/subprocess.py", line 646, in __init__
13917 - # errread, errwrite)
13918 - # File "/usr/lib/python3.1/subprocess.py", line 1157, in _execute_child
13919 - # raise child_exception
13920 - # TypeError: expected an object with the buffer interface
13921 - add_cmd = [_unicode_encode(arg) for arg in add_cmd]
13922 +
13923 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
13924 + not os.path.isabs(add_cmd[0]):
13925 + # Python 3.1 _execvp throws TypeError for non-absolute executable
13926 + # path passed as bytes (see http://bugs.python.org/issue8513).
13927 + fullname = find_binary(add_cmd[0])
13928 + if fullname is None:
13929 + raise portage.exception.CommandNotFound(add_cmd[0])
13930 + add_cmd[0] = fullname
13931 +
13932 + add_cmd = [_unicode_encode(arg) for arg in add_cmd]
13933 retcode = subprocess.call(add_cmd)
13934 if retcode != os.EX_OK:
13935 logging.error(
13936 @@ -2631,7 +2850,7 @@ else:
13937 elif vcs == "svn":
13938 if myfile not in expansion:
13939 continue
13940 -
13941 +
13942 # Subversion keywords are case-insensitive in svn:keywords properties, but case-sensitive in contents of files.
13943 enabled_keywords = []
13944 for k in expansion[myfile]:
13945 @@ -2641,7 +2860,8 @@ else:
13946
13947 headerstring = "'\$(%s).*\$'" % "|".join(enabled_keywords)
13948
13949 - myout = subprocess_getstatusoutput("egrep -q "+headerstring+" "+myfile)
13950 + myout = repoman_getstatusoutput("egrep -q " + headerstring + " " +
13951 + portage._shell_quote(myfile))
13952 if myout[0] == 0:
13953 myheaders.append(myfile)
13954
13955 @@ -2688,7 +2908,7 @@ else:
13956 if options.pretend:
13957 print("(%s)" % (" ".join(commit_cmd),))
13958 else:
13959 - retval = spawn(commit_cmd, env=os.environ)
13960 + retval = spawn(commit_cmd, env=commit_env)
13961 if retval != os.EX_OK:
13962 writemsg_level(("!!! Exiting on %s (shell) " + \
13963 "error code: %s\n") % (vcs, retval),
13964 @@ -2729,14 +2949,38 @@ else:
13965 gpgvars[k] = v
13966 gpgcmd = portage.util.varexpand(gpgcmd, mydict=gpgvars)
13967 if options.pretend:
13968 - print("("+gpgcmd+")")
13969 + print("(" + gpgcmd + ")")
13970 else:
13971 - rValue = os.system(gpgcmd)
13972 + # Encode unicode manually for bug #310789.
13973 + gpgcmd = portage.util.shlex_split(gpgcmd)
13974 +
13975 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
13976 + not os.path.isabs(gpgcmd[0]):
13977 + # Python 3.1 _execvp throws TypeError for non-absolute executable
13978 + # path passed as bytes (see http://bugs.python.org/issue8513).
13979 + fullname = find_binary(gpgcmd[0])
13980 + if fullname is None:
13981 + raise portage.exception.CommandNotFound(gpgcmd[0])
13982 + gpgcmd[0] = fullname
13983 +
13984 + gpgcmd = [_unicode_encode(arg,
13985 + encoding=_encodings['fs'], errors='strict') for arg in gpgcmd]
13986 + rValue = subprocess.call(gpgcmd)
13987 if rValue == os.EX_OK:
13988 - os.rename(filename+".asc", filename)
13989 + os.rename(filename + ".asc", filename)
13990 else:
13991 raise portage.exception.PortageException("!!! gpg exited with '" + str(rValue) + "' status")
13992
13993 + def need_signature(filename):
13994 + try:
13995 + with open(_unicode_encode(filename,
13996 + encoding=_encodings['fs'], errors='strict'), 'rb') as f:
13997 + return b"BEGIN PGP SIGNED MESSAGE" not in f.readline()
13998 + except IOError as e:
13999 + if e.errno in (errno.ENOENT, errno.ESTALE):
14000 + return False
14001 + raise
14002 +
14003 # When files are removed and re-added, the cvs server will put /Attic/
14004 # inside the $Header path. This code detects the problem and corrects it
14005 # so that the Manifest will generate correctly. See bug #169500.
14006 @@ -2771,6 +3015,11 @@ else:
14007 repoman_settings["O"] = os.path.join(repodir, x)
14008 digestgen(mysettings=repoman_settings, myportdb=portdb)
14009
14010 + elif broken_changelog_manifests:
14011 + for x in broken_changelog_manifests:
14012 + repoman_settings["O"] = os.path.join(repodir, x)
14013 + digestgen(mysettings=repoman_settings, myportdb=portdb)
14014 +
14015 signed = False
14016 if sign_manifests:
14017 signed = True
14018 @@ -2779,7 +3028,7 @@ else:
14019 chain(myupdates, myremoved, mymanifests))):
14020 repoman_settings["O"] = os.path.join(repodir, x)
14021 manifest_path = os.path.join(repoman_settings["O"], "Manifest")
14022 - if not os.path.exists(manifest_path):
14023 + if not need_signature(manifest_path):
14024 continue
14025 gpgsign(manifest_path)
14026 except portage.exception.PortageException as e:
14027 @@ -2809,7 +3058,6 @@ else:
14028 sys.exit(retval)
14029
14030 if True:
14031 -
14032 myfiles = mymanifests[:]
14033 # If there are no header (SVN/CVS keywords) changes in
14034 # the files, this Manifest commit must include the
14035 @@ -2821,14 +3069,7 @@ else:
14036
14037 fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
14038 mymsg = os.fdopen(fd, "wb")
14039 - # strip the closing parenthesis
14040 - mymsg.write(_unicode_encode(commitmessage[:-1]))
14041 - if signed:
14042 - mymsg.write(_unicode_encode(
14043 - ", signed Manifest commit with key %s)" % \
14044 - repoman_settings["PORTAGE_GPG_KEY"]))
14045 - else:
14046 - mymsg.write(b", unsigned Manifest commit)")
14047 + mymsg.write(_unicode_encode(commitmessage))
14048 mymsg.close()
14049
14050 commit_cmd = []
14051 @@ -2851,9 +3092,8 @@ else:
14052 if options.pretend:
14053 print("(%s)" % (" ".join(commit_cmd),))
14054 else:
14055 - retval = spawn(commit_cmd, env=os.environ)
14056 + retval = spawn(commit_cmd, env=commit_env)
14057 if retval != os.EX_OK:
14058 -
14059 if repo_config.sign_commit and vcs == 'git' and \
14060 not git_supports_gpg_sign():
14061 # Inform user that newer git is needed (bug #403323).
14062 @@ -2877,4 +3117,3 @@ else:
14063 print("repoman was too scared by not seeing any familiar version control file that he forgot to commit anything")
14064 print(green("RepoMan sez:"), "\"If everyone were like you, I'd be out of business!\"\n")
14065 sys.exit(0)
14066 -
14067
14068 diff --git a/bin/save-ebuild-env.sh b/bin/save-ebuild-env.sh
14069 index 47a2aca..98cff83 100644
14070 --- a/bin/save-ebuild-env.sh
14071 +++ b/bin/save-ebuild-env.sh
14072 @@ -1,8 +1,8 @@
14073 #!/bin/bash
14074 -# Copyright 1999-2011 Gentoo Foundation
14075 +# Copyright 1999-2014 Gentoo Foundation
14076 # Distributed under the terms of the GNU General Public License v2
14077
14078 -# @FUNCTION: save_ebuild_env
14079 +# @FUNCTION: __save_ebuild_env
14080 # @DESCRIPTION:
14081 # echo the current environment to stdout, filtering out redundant info.
14082 #
14083 @@ -10,11 +10,12 @@
14084 # be excluded from the output. These function are not needed for installation
14085 # or removal of the packages, and can therefore be safely excluded.
14086 #
14087 -save_ebuild_env() {
14088 +__save_ebuild_env() {
14089 (
14090 if has --exclude-init-phases $* ; then
14091 unset S _E_DOCDESTTREE_ _E_EXEDESTTREE_ \
14092 - PORTAGE_DOCOMPRESS PORTAGE_DOCOMPRESS_SKIP
14093 + PORTAGE_DOCOMPRESS_SIZE_LIMIT PORTAGE_DOCOMPRESS \
14094 + PORTAGE_DOCOMPRESS_SKIP
14095 if [[ -n $PYTHONPATH &&
14096 ${PYTHONPATH%%:*} -ef $PORTAGE_PYM_PATH ]] ; then
14097 if [[ $PYTHONPATH == *:* ]] ; then
14098 @@ -42,35 +43,51 @@ save_ebuild_env() {
14099 for x in pkg_setup pkg_nofetch src_unpack src_prepare src_configure \
14100 src_compile src_test src_install pkg_preinst pkg_postinst \
14101 pkg_prerm pkg_postrm ; do
14102 - unset -f default_$x _eapi{0,1,2,3,4}_$x
14103 + unset -f default_$x __eapi{0,1,2,3,4}_$x
14104 done
14105 unset x
14106
14107 - unset -f assert assert_sigpipe_ok dump_trace die diefunc \
14108 - quiet_mode vecho elog_base eqawarn elog \
14109 - esyslog einfo einfon ewarn eerror ebegin _eend eend KV_major \
14110 - KV_minor KV_micro KV_to_int get_KV unset_colors set_colors has \
14111 - has_phase_defined_up_to \
14112 - hasv hasq qa_source qa_call \
14113 - addread addwrite adddeny addpredict _sb_append_var \
14114 + unset -f assert __assert_sigpipe_ok \
14115 + __dump_trace die \
14116 + __quiet_mode __vecho __elog_base eqawarn elog \
14117 + einfo einfon ewarn eerror ebegin __eend eend KV_major \
14118 + KV_minor KV_micro KV_to_int get_KV __1 __1 has \
14119 + __has_phase_defined_up_to \
14120 + hasv hasq __qa_source __qa_call \
14121 + addread addwrite adddeny addpredict __sb_append_var \
14122 use usev useq has_version portageq \
14123 best_version use_with use_enable register_die_hook \
14124 - keepdir unpack strip_duplicate_slashes econf einstall \
14125 - dyn_setup dyn_unpack dyn_clean into insinto exeinto docinto \
14126 + unpack __strip_duplicate_slashes econf einstall \
14127 + __dyn_setup __dyn_unpack __dyn_clean \
14128 + into insinto exeinto docinto \
14129 insopts diropts exeopts libopts docompress \
14130 - abort_handler abort_prepare abort_configure abort_compile \
14131 - abort_test abort_install dyn_prepare dyn_configure \
14132 - dyn_compile dyn_test dyn_install \
14133 - dyn_preinst dyn_pretend dyn_help debug-print debug-print-function \
14134 - debug-print-section helpers_die inherit EXPORT_FUNCTIONS \
14135 - nonfatal register_success_hook remove_path_entry \
14136 - save_ebuild_env filter_readonly_variables preprocess_ebuild_env \
14137 - set_unless_changed unset_unless_changed source_all_bashrcs \
14138 - ebuild_main ebuild_phase ebuild_phase_with_hooks \
14139 - _ebuild_arg_to_phase _ebuild_phase_funcs default \
14140 - _hasg _hasgq _unpack_tar \
14141 + __abort_handler __abort_prepare __abort_configure __abort_compile \
14142 + __abort_test __abort_install __dyn_prepare __dyn_configure \
14143 + __dyn_compile __dyn_test __dyn_install \
14144 + __dyn_pretend __dyn_help \
14145 + debug-print debug-print-function \
14146 + debug-print-section __helpers_die inherit EXPORT_FUNCTIONS \
14147 + nonfatal register_success_hook \
14148 + __hasg __hasgq \
14149 + __save_ebuild_env __set_colors __filter_readonly_variables \
14150 + __preprocess_ebuild_env \
14151 + __repo_attr __source_all_bashrcs \
14152 + __ebuild_main __ebuild_phase __ebuild_phase_with_hooks \
14153 + __ebuild_arg_to_phase __ebuild_phase_funcs default \
14154 + __unpack_tar __unset_colors \
14155 ${QA_INTERCEPTORS}
14156
14157 + ___eapi_has_usex && unset -f usex
14158 + ___eapi_has_master_repositories && unset -f master_repositories
14159 + ___eapi_has_repository_path && unset -f repository_path
14160 + ___eapi_has_available_eclasses && unset -f available_eclasses
14161 + ___eapi_has_eclass_path && unset -f eclass_path
14162 + ___eapi_has_license_path && unset -f license_path
14163 + ___eapi_has_package_manager_build_user && unset -f package_manager_build_user
14164 + ___eapi_has_package_manager_build_group && unset -f package_manager_build_group
14165 +
14166 + unset -f $(compgen -A function ___eapi_)
14167 +
14168 # portage config variables and variables set directly by portage
14169 unset ACCEPT_LICENSE BAD BRACKET BUILD_PREFIX COLS \
14170 DISTCC_DIR DISTDIR DOC_SYMLINKS_DIR \
14171
14172 diff --git a/bin/xattr-helper.py b/bin/xattr-helper.py
14173 new file mode 100755
14174 index 0000000..ea83a5e
14175 --- /dev/null
14176 +++ b/bin/xattr-helper.py
14177 @@ -0,0 +1,190 @@
14178 +#!/usr/bin/python -b
14179 +# Copyright 2012-2014 Gentoo Foundation
14180 +# Distributed under the terms of the GNU General Public License v2
14181 +
14182 +"""Dump and restore extended attributes.
14183 +
14184 +We use formats like that used by getfattr --dump. This is meant for shell
14185 +helpers to save/restore. If you're looking for a python/portage API, see
14186 +portage.util.movefile._copyxattr instead.
14187 +
14188 +https://en.wikipedia.org/wiki/Extended_file_attributes
14189 +"""
14190 +
14191 +import array
14192 +import os
14193 +import re
14194 +import sys
14195 +
14196 +from portage.util._argparse import ArgumentParser
14197 +
14198 +if hasattr(os, "getxattr"):
14199 +
14200 + class xattr(object):
14201 + get = os.getxattr
14202 + set = os.setxattr
14203 + list = os.listxattr
14204 +
14205 +else:
14206 + import xattr
14207 +
14208 +
14209 +_UNQUOTE_RE = re.compile(br'\\[0-7]{3}')
14210 +_FS_ENCODING = sys.getfilesystemencoding()
14211 +
14212 +
14213 +if sys.hexversion < 0x3000000:
14214 +
14215 + def octal_quote_byte(b):
14216 + return b'\\%03o' % ord(b)
14217 +
14218 + def unicode_encode(s):
14219 + if isinstance(s, unicode):
14220 + s = s.encode(_FS_ENCODING)
14221 + return s
14222 +else:
14223 +
14224 + def octal_quote_byte(b):
14225 + return ('\\%03o' % ord(b)).encode('ascii')
14226 +
14227 + def unicode_encode(s):
14228 + if isinstance(s, str):
14229 + s = s.encode(_FS_ENCODING)
14230 + return s
14231 +
14232 +
14233 +def quote(s, quote_chars):
14234 + """Convert all |quote_chars| in |s| to escape sequences
14235 +
14236 + This is normally used to escape any embedded quotation marks.
14237 + """
14238 + quote_re = re.compile(b'[' + quote_chars + b']')
14239 + result = []
14240 + pos = 0
14241 + s_len = len(s)
14242 +
14243 + while pos < s_len:
14244 + m = quote_re.search(s, pos=pos)
14245 + if m is None:
14246 + result.append(s[pos:])
14247 + pos = s_len
14248 + else:
14249 + start = m.start()
14250 + result.append(s[pos:start])
14251 + result.append(octal_quote_byte(s[start:start+1]))
14252 + pos = start + 1
14253 +
14254 + return b''.join(result)
14255 +
14256 +
14257 +def unquote(s):
14258 + """Process all escape sequences in |s|"""
14259 + result = []
14260 + pos = 0
14261 + s_len = len(s)
14262 +
14263 + while pos < s_len:
14264 + m = _UNQUOTE_RE.search(s, pos=pos)
14265 + if m is None:
14266 + result.append(s[pos:])
14267 + pos = s_len
14268 + else:
14269 + start = m.start()
14270 + result.append(s[pos:start])
14271 + pos = start + 4
14272 + a = array.array('B')
14273 + a.append(int(s[start + 1:pos], 8))
14274 + try:
14275 + # Python >= 3.2
14276 + result.append(a.tobytes())
14277 + except AttributeError:
14278 + result.append(a.tostring())
14279 +
14280 + return b''.join(result)
14281 +
14282 +
14283 +def dump_xattrs(pathnames, file_out):
14284 + """Dump the xattr data for |pathnames| to |file_out|"""
14285 + # NOTE: Always quote backslashes, in order to ensure that they are
14286 + # not interpreted as quotes when they are processed by unquote.
14287 + quote_chars = b'\n\r\\\\'
14288 +
14289 + for pathname in pathnames:
14290 + attrs = xattr.list(pathname)
14291 + if not attrs:
14292 + continue
14293 +
14294 + file_out.write(b'# file: %s\n' % quote(pathname, quote_chars))
14295 + for attr in attrs:
14296 + attr = unicode_encode(attr)
14297 + value = xattr.get(pathname, attr)
14298 + file_out.write(b'%s="%s"\n' % (
14299 + quote(attr, b'=' + quote_chars),
14300 + quote(value, b'\0"' + quote_chars)))
14301 +
14302 +
14303 +def restore_xattrs(file_in):
14304 + """Read |file_in| and restore xattrs content from it
14305 +
14306 + This expects textual data in the format written by dump_xattrs.
14307 + """
14308 + pathname = None
14309 + for i, line in enumerate(file_in):
14310 + if line.startswith(b'# file: '):
14311 + pathname = unquote(line.rstrip(b'\n')[8:])
14312 + else:
14313 + parts = line.split(b'=', 1)
14314 + if len(parts) == 2:
14315 + if pathname is None:
14316 + raise ValueError('line %d: missing pathname' % (i + 1,))
14317 + attr = unquote(parts[0])
14318 + # strip trailing newline and quotes
14319 + value = unquote(parts[1].rstrip(b'\n')[1:-1])
14320 + xattr.set(pathname, attr, value)
14321 + elif line.strip():
14322 + raise ValueError('line %d: malformed entry' % (i + 1,))
14323 +
14324 +
14325 +def main(argv):
14326 +
14327 + parser = ArgumentParser(description=__doc__)
14328 + parser.add_argument('paths', nargs='*', default=[])
14329 +
14330 + actions = parser.add_argument_group('Actions')
14331 + actions.add_argument('--dump',
14332 + action='store_true',
14333 + help='Dump the values of all extended '
14334 + 'attributes associated with null-separated'
14335 + ' paths read from stdin.')
14336 + actions.add_argument('--restore',
14337 + action='store_true',
14338 + help='Restore extended attributes using'
14339 + ' a dump read from stdin.')
14340 +
14341 + options = parser.parse_args(argv)
14342 +
14343 + if sys.hexversion >= 0x3000000:
14344 + file_in = sys.stdin.buffer.raw
14345 + else:
14346 + file_in = sys.stdin
14347 + if not options.paths:
14348 + options.paths += [x for x in file_in.read().split(b'\0') if x]
14349 +
14350 + if options.dump:
14351 + if sys.hexversion >= 0x3000000:
14352 + file_out = sys.stdout.buffer
14353 + else:
14354 + file_out = sys.stdout
14355 + dump_xattrs(options.paths, file_out)
14356 +
14357 + elif options.restore:
14358 + restore_xattrs(file_in)
14359 +
14360 + else:
14361 + parser.error('missing action!')
14362 +
14363 + return os.EX_OK
14364 +
14365 +
14366 +if __name__ == '__main__':
14367 + sys.exit(main(sys.argv[1:]))
14368
14369 diff --git a/bin/xpak-helper.py b/bin/xpak-helper.py
14370 index ef74920..c4391cd 100755
14371 --- a/bin/xpak-helper.py
14372 +++ b/bin/xpak-helper.py
14373 @@ -1,11 +1,12 @@
14374 -#!/usr/bin/python
14375 -# Copyright 2009-2011 Gentoo Foundation
14376 +#!/usr/bin/python -b
14377 +# Copyright 2009-2014 Gentoo Foundation
14378 # Distributed under the terms of the GNU General Public License v2
14379
14380 -import optparse
14381 import sys
14382 import portage
14383 +portage._internal_caller = True
14384 from portage import os
14385 +from portage.util._argparse import ArgumentParser
14386
14387 def command_recompose(args):
14388
14389 @@ -45,8 +46,8 @@ def main(argv):
14390 usage = "usage: %s COMMAND [args]" % \
14391 os.path.basename(argv[0])
14392
14393 - parser = optparse.OptionParser(description=description, usage=usage)
14394 - options, args = parser.parse_args(argv[1:])
14395 + parser = ArgumentParser(description=description, usage=usage)
14396 + options, args = parser.parse_known_args(argv[1:])
14397
14398 if not args:
14399 parser.error("missing command argument")
14400
14401 diff --git a/cnf/dispatch-conf.conf b/cnf/dispatch-conf.conf
14402 index 7eea44c..125b7cc 100644
14403 --- a/cnf/dispatch-conf.conf
14404 +++ b/cnf/dispatch-conf.conf
14405 @@ -6,6 +6,7 @@
14406 archive-dir=${EPREFIX}/etc/config-archive
14407
14408 # Use rcs for storing files in the archive directory?
14409 +# NOTE: You should install dev-vcs/rcs before enabling this option.
14410 # WARNING: When configured to use rcs, read and execute permissions of
14411 # archived files may be inherited from the first check in of a working
14412 # file, as documented in the ci(1) man page. This means that even if
14413
14414 diff --git a/cnf/make.conf b/cnf/make.conf.example
14415 similarity index 95%
14416 rename from cnf/make.conf
14417 rename to cnf/make.conf.example
14418 index ad2a5b7..6603b42 100644
14419 --- a/cnf/make.conf
14420 +++ b/cnf/make.conf.example
14421 @@ -1,4 +1,4 @@
14422 -# Copyright 1999-2011 Gentoo Foundation
14423 +# Copyright 1999-2013 Gentoo Foundation
14424 # Distributed under the terms of the GNU General Public License v2
14425 # Contains local system settings for Portage system
14426
14427 @@ -78,6 +78,18 @@
14428 #
14429 #ACCEPT_KEYWORDS="~arch"
14430
14431 +# ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14432 +# It may contain both license and group names, where group names are
14433 +# prefixed with the '@' symbol. License groups are defined in the
14434 +# license_groups file (see portage(5) man page). In addition to license
14435 +# and group names, the * and -* wildcard tokens are also supported.
14436 +#
14437 +# Accept any license except those in the EULA license group (default).
14438 +#ACCEPT_LICENSE="* -@EULA"
14439 +#
14440 +# Only accept licenses in the FREE license group (i.e. Free Software).
14441 +#ACCEPT_LICENSE="-* @FREE"
14442 +
14443 # Portage Directories
14444 # ===================
14445 #
14446 @@ -92,7 +104,7 @@
14447 #
14448 # PORTDIR is the location of the portage tree. This is the repository
14449 # for all profile information as well as all ebuilds. If you change
14450 -# this, you must update your /etc/make.profile symlink accordingly.
14451 +# this, you must update your /etc/portage/make.profile symlink accordingly.
14452 # ***Warning***
14453 # Data stored inside PORTDIR is in peril of being overwritten or deleted by
14454 # the emerge --sync command. The default value of PORTAGE_RSYNC_OPTS
14455 @@ -289,8 +301,7 @@
14456 # logging related variables:
14457 # PORTAGE_ELOG_CLASSES: selects messages to be logged, possible values are:
14458 # info, warn, error, log, qa, *
14459 -# Warning: commenting this will disable elog
14460 -PORTAGE_ELOG_CLASSES="log warn error"
14461 +#PORTAGE_ELOG_CLASSES="log warn error"
14462
14463 # PORTAGE_ELOG_SYSTEM: selects the module(s) to process the log messages. Modules
14464 # included in portage are (empty means logging is disabled):
14465 @@ -312,7 +323,7 @@ PORTAGE_ELOG_CLASSES="log warn error"
14466 # separated list of loglevels to override PORTAGE_ELOG_CLASSES
14467 # for this module (e.g.
14468 # PORTAGE_ELOG_SYSTEM="mail:warn,error syslog:* save")
14469 -#PORTAGE_ELOG_SYSTEM="save_summary echo"
14470 +#PORTAGE_ELOG_SYSTEM="save_summary:log,warn,error,qa echo"
14471
14472 # PORTAGE_ELOG_COMMAND: only used with the "custom" logging module. Specifies a command
14473 # to process log messages. Two variables are expanded:
14474
14475 diff --git a/cnf/make.conf.alpha.diff b/cnf/make.conf.example.alpha.diff
14476 similarity index 80%
14477 rename from cnf/make.conf.alpha.diff
14478 rename to cnf/make.conf.example.alpha.diff
14479 index f0a4e38..5306999 100644
14480 --- a/cnf/make.conf.alpha.diff
14481 +++ b/cnf/make.conf.example.alpha.diff
14482 @@ -1,6 +1,6 @@
14483 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14484 -+++ make.conf.alpha 2006-03-19 18:26:21.000000000 +0100
14485 -@@ -23,6 +23,17 @@
14486 +--- make.conf.example
14487 ++++ make.conf.example
14488 +@@ -22,6 +22,17 @@
14489 # Example:
14490 #USE="X gtk gnome -alsa"
14491
14492 @@ -18,9 +18,9 @@
14493 # Host and optimization settings
14494 # ==============================
14495 #
14496 -@@ -33,10 +44,18 @@
14497 - # package (and in some cases the libraries it uses) at default optimizations
14498 - # before reporting errors to developers.
14499 +@@ -39,10 +50,18 @@
14500 + # -frecord-gcc-switches, since otherwise the check could result in false
14501 + # positive results.
14502 #
14503 -# Please refer to the GCC manual for a list of possible values.
14504 +# -mcpu=<cpu-type> means optimize code for the particular type of CPU. In
14505 @@ -39,12 +39,12 @@
14506 # If you set a CFLAGS above, then this line will set your default C++ flags to
14507 # the same settings.
14508 #CXXFLAGS="${CFLAGS}"
14509 -@@ -61,7 +80,7 @@
14510 +@@ -76,7 +95,7 @@
14511 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14512 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14513 #
14514 -#ACCEPT_KEYWORDS="~arch"
14515 +#ACCEPT_KEYWORDS="~alpha"
14516
14517 - # Portage Directories
14518 - # ===================
14519 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14520 + # It may contain both license and group names, where group names are
14521
14522 diff --git a/cnf/make.conf.amd64-fbsd.diff b/cnf/make.conf.example.amd64-fbsd.diff
14523 similarity index 85%
14524 rename from cnf/make.conf.amd64-fbsd.diff
14525 rename to cnf/make.conf.example.amd64-fbsd.diff
14526 index aa07d06..1277b6d 100644
14527 --- a/cnf/make.conf.amd64-fbsd.diff
14528 +++ b/cnf/make.conf.example.amd64-fbsd.diff
14529 @@ -1,6 +1,6 @@
14530 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14531 -+++ make.conf.amd64-fbsd 2006-03-19 18:26:21.000000000 +0100
14532 -@@ -23,6 +23,11 @@
14533 +--- make.conf.example
14534 ++++ make.conf.example
14535 +@@ -22,6 +22,11 @@
14536 # Example:
14537 #USE="X gtk gnome -alsa"
14538
14539 @@ -12,9 +12,9 @@
14540 # Host and optimization settings
14541 # ==============================
14542 #
14543 -@@ -33,10 +38,35 @@
14544 - # package (and in some cases the libraries it uses) at default optimizations
14545 - # before reporting errors to developers.
14546 +@@ -39,10 +44,35 @@
14547 + # -frecord-gcc-switches, since otherwise the check could result in false
14548 + # positive results.
14549 #
14550 -# Please refer to the GCC manual for a list of possible values.
14551 +# -mcpu=<cpu-type> means optimize code for the particular type of CPU without
14552 @@ -50,12 +50,12 @@
14553 # If you set a CFLAGS above, then this line will set your default C++ flags to
14554 # the same settings.
14555 #CXXFLAGS="${CFLAGS}"
14556 -@@ -61,7 +91,7 @@
14557 +@@ -76,7 +106,7 @@
14558 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14559 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14560 #
14561 -#ACCEPT_KEYWORDS="~arch"
14562 +#ACCEPT_KEYWORDS="~amd64-fbsd"
14563
14564 - # Portage Directories
14565 - # ===================
14566 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14567 + # It may contain both license and group names, where group names are
14568
14569 diff --git a/cnf/make.conf.amd64.diff b/cnf/make.conf.example.amd64.diff
14570 similarity index 85%
14571 rename from cnf/make.conf.amd64.diff
14572 rename to cnf/make.conf.example.amd64.diff
14573 index b4a93fe..dfa4b42 100644
14574 --- a/cnf/make.conf.amd64.diff
14575 +++ b/cnf/make.conf.example.amd64.diff
14576 @@ -1,6 +1,6 @@
14577 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14578 -+++ make.conf.amd64 2006-03-19 18:26:21.000000000 +0100
14579 -@@ -23,6 +23,11 @@
14580 +--- make.conf.example
14581 ++++ make.conf.example
14582 +@@ -22,6 +22,11 @@
14583 # Example:
14584 #USE="X gtk gnome -alsa"
14585
14586 @@ -12,9 +12,9 @@
14587 # Host and optimization settings
14588 # ==============================
14589 #
14590 -@@ -33,10 +38,35 @@
14591 - # package (and in some cases the libraries it uses) at default optimizations
14592 - # before reporting errors to developers.
14593 +@@ -39,10 +44,35 @@
14594 + # -frecord-gcc-switches, since otherwise the check could result in false
14595 + # positive results.
14596 #
14597 -# Please refer to the GCC manual for a list of possible values.
14598 +# -mcpu=<cpu-type> means optimize code for the particular type of CPU without
14599 @@ -50,12 +50,12 @@
14600 # If you set a CFLAGS above, then this line will set your default C++ flags to
14601 # the same settings.
14602 #CXXFLAGS="${CFLAGS}"
14603 -@@ -61,7 +91,7 @@
14604 +@@ -76,7 +106,7 @@
14605 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14606 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14607 #
14608 -#ACCEPT_KEYWORDS="~arch"
14609 +#ACCEPT_KEYWORDS="~amd64"
14610
14611 - # Portage Directories
14612 - # ===================
14613 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14614 + # It may contain both license and group names, where group names are
14615
14616 diff --git a/cnf/make.conf.arm.diff b/cnf/make.conf.example.arm.diff
14617 similarity index 84%
14618 rename from cnf/make.conf.arm.diff
14619 rename to cnf/make.conf.example.arm.diff
14620 index e6924ad..bb93563 100644
14621 --- a/cnf/make.conf.arm.diff
14622 +++ b/cnf/make.conf.example.arm.diff
14623 @@ -1,6 +1,6 @@
14624 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14625 -+++ make.conf.arm 2006-03-19 18:26:21.000000000 +0100
14626 -@@ -23,6 +23,19 @@
14627 +--- make.conf.example
14628 ++++ make.conf.example
14629 +@@ -22,6 +22,19 @@
14630 # Example:
14631 #USE="X gtk gnome -alsa"
14632
14633 @@ -20,9 +20,9 @@
14634 # Host and optimization settings
14635 # ==============================
14636 #
14637 -@@ -33,10 +46,22 @@
14638 - # package (and in some cases the libraries it uses) at default optimizations
14639 - # before reporting errors to developers.
14640 +@@ -39,10 +52,22 @@
14641 + # -frecord-gcc-switches, since otherwise the check could result in false
14642 + # positive results.
14643 #
14644 -# Please refer to the GCC manual for a list of possible values.
14645 -#
14646
14647 diff --git a/cnf/make.conf.hppa.diff b/cnf/make.conf.example.hppa.diff
14648 similarity index 86%
14649 rename from cnf/make.conf.hppa.diff
14650 rename to cnf/make.conf.example.hppa.diff
14651 index a1fa5ef..2d17b37 100644
14652 --- a/cnf/make.conf.hppa.diff
14653 +++ b/cnf/make.conf.example.hppa.diff
14654 @@ -1,6 +1,6 @@
14655 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14656 -+++ make.conf.hppa 2006-03-24 18:36:24.000000000 +0100
14657 -@@ -23,6 +23,18 @@
14658 +--- make.conf.example
14659 ++++ make.conf.example
14660 +@@ -22,6 +22,18 @@
14661 # Example:
14662 #USE="X gtk gnome -alsa"
14663
14664 @@ -19,9 +19,9 @@
14665 # Host and optimization settings
14666 # ==============================
14667 #
14668 -@@ -33,14 +45,38 @@
14669 - # package (and in some cases the libraries it uses) at default optimizations
14670 - # before reporting errors to developers.
14671 +@@ -39,14 +51,38 @@
14672 + # -frecord-gcc-switches, since otherwise the check could result in false
14673 + # positive results.
14674 #
14675 -# Please refer to the GCC manual for a list of possible values.
14676 +# -march=<cpu-type> means to take full advantage of the ABI and instructions
14677 @@ -35,10 +35,9 @@
14678 +#
14679 +# Architectures types supported in gcc-3.2 and higher: 1.0, 1.1 and 2.0
14680 +# Note that 64bit userspace is not yet implemented.
14681 - #
14682 --#CFLAGS="-O2 -pipe"
14683 ++#
14684 +# Decent examples:
14685 - #
14686 ++#
14687 +#
14688 +# Use this one if you have a hppa1.1
14689 +#CFLAGS="-march=1.1 -O2 -pipe -mschedule=7100LC"
14690 @@ -47,8 +46,9 @@
14691 +# Note that -march=2.0 was unstable on some stations.
14692 +# -march=1.0 will create problems too.
14693 +#CFLAGS="-O2 -pipe -mschedule=8000"
14694 -+#
14695 -+#
14696 + #
14697 +-#CFLAGS="-O2 -pipe"
14698 + #
14699 # If you set a CFLAGS above, then this line will set your default C++ flags to
14700 # the same settings.
14701 #CXXFLAGS="${CFLAGS}"
14702 @@ -60,12 +60,12 @@
14703 # If you set a CFLAGS above, then this line will set your default FORTRAN 77
14704 # flags to the same settings.
14705 #FFLAGS="${CFLAGS}"
14706 -@@ -61,7 +97,7 @@
14707 +@@ -76,7 +112,7 @@
14708 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14709 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14710 #
14711 -#ACCEPT_KEYWORDS="~arch"
14712 +#ACCEPT_KEYWORDS="~hppa"
14713
14714 - # Portage Directories
14715 - # ===================
14716 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14717 + # It may contain both license and group names, where group names are
14718
14719 diff --git a/cnf/make.conf.ia64.diff b/cnf/make.conf.example.ia64.diff
14720 similarity index 68%
14721 rename from cnf/make.conf.ia64.diff
14722 rename to cnf/make.conf.example.ia64.diff
14723 index 3b144d7..68a0cb0 100644
14724 --- a/cnf/make.conf.ia64.diff
14725 +++ b/cnf/make.conf.example.ia64.diff
14726 @@ -1,5 +1,5 @@
14727 ---- make.conf
14728 -+++ make.conf.ia64
14729 +--- make.conf.example
14730 ++++ make.conf.example
14731 @@ -22,6 +22,13 @@
14732 # Example:
14733 #USE="X gtk gnome -alsa"
14734 @@ -14,12 +14,12 @@
14735 # Host and optimization settings
14736 # ==============================
14737 #
14738 -@@ -75,7 +82,7 @@
14739 +@@ -76,7 +83,7 @@
14740 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14741 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14742 #
14743 -#ACCEPT_KEYWORDS="~arch"
14744 +#ACCEPT_KEYWORDS="~ia64"
14745
14746 - # Portage Directories
14747 - # ===================
14748 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14749 + # It may contain both license and group names, where group names are
14750
14751 diff --git a/cnf/make.conf.m68k.diff b/cnf/make.conf.example.m68k.diff
14752 similarity index 72%
14753 rename from cnf/make.conf.m68k.diff
14754 rename to cnf/make.conf.example.m68k.diff
14755 index ac3d074..f967461 100644
14756 --- a/cnf/make.conf.m68k.diff
14757 +++ b/cnf/make.conf.example.m68k.diff
14758 @@ -1,6 +1,6 @@
14759 ---- make.conf
14760 -+++ make.conf.m68k
14761 -@@ -23,6 +23,13 @@
14762 +--- make.conf.example
14763 ++++ make.conf.example
14764 +@@ -22,6 +22,13 @@
14765 # Example:
14766 #USE="X gtk gnome -alsa"
14767
14768 @@ -14,7 +14,7 @@
14769 # Host and optimization settings
14770 # ==============================
14771 #
14772 -@@ -35,7 +42,7 @@
14773 +@@ -41,7 +48,7 @@
14774 #
14775 # Please refer to the GCC manual for a list of possible values.
14776 #
14777 @@ -23,12 +23,12 @@
14778 #
14779 # If you set a CFLAGS above, then this line will set your default C++ flags to
14780 # the same settings.
14781 -@@ -61,7 +68,7 @@
14782 +@@ -76,7 +83,7 @@
14783 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14784 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14785 #
14786 -#ACCEPT_KEYWORDS="~arch"
14787 +#ACCEPT_KEYWORDS="~m68k"
14788
14789 - # Portage Directories
14790 - # ===================
14791 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14792 + # It may contain both license and group names, where group names are
14793
14794 diff --git a/cnf/make.conf.mips.diff b/cnf/make.conf.example.mips.diff
14795 similarity index 75%
14796 rename from cnf/make.conf.mips.diff
14797 rename to cnf/make.conf.example.mips.diff
14798 index 1ee10ec..7d3d83d 100644
14799 --- a/cnf/make.conf.mips.diff
14800 +++ b/cnf/make.conf.example.mips.diff
14801 @@ -1,6 +1,6 @@
14802 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14803 -+++ make.conf.mips 2006-03-19 18:26:21.000000000 +0100
14804 -@@ -23,6 +23,13 @@
14805 +--- make.conf.example
14806 ++++ make.conf.example
14807 +@@ -22,6 +22,13 @@
14808 # Example:
14809 #USE="X gtk gnome -alsa"
14810
14811 @@ -14,9 +14,9 @@
14812 # Host and optimization settings
14813 # ==============================
14814 #
14815 -@@ -33,10 +40,15 @@
14816 - # package (and in some cases the libraries it uses) at default optimizations
14817 - # before reporting errors to developers.
14818 +@@ -39,10 +46,15 @@
14819 + # -frecord-gcc-switches, since otherwise the check could result in false
14820 + # positive results.
14821 #
14822 -# Please refer to the GCC manual for a list of possible values.
14823 +# -mcpu=<cpu-type> for MIPS systems selects the type of processor you want
14824 @@ -32,12 +32,12 @@
14825 # If you set a CFLAGS above, then this line will set your default C++ flags to
14826 # the same settings.
14827 #CXXFLAGS="${CFLAGS}"
14828 -@@ -61,7 +73,7 @@
14829 +@@ -76,7 +88,7 @@
14830 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14831 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14832 #
14833 -#ACCEPT_KEYWORDS="~arch"
14834 +#ACCEPT_KEYWORDS="~mips"
14835
14836 - # Portage Directories
14837 - # ===================
14838 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14839 + # It may contain both license and group names, where group names are
14840
14841 diff --git a/cnf/make.conf.ppc.diff b/cnf/make.conf.example.ppc.diff
14842 similarity index 90%
14843 rename from cnf/make.conf.ppc.diff
14844 rename to cnf/make.conf.example.ppc.diff
14845 index 76a97d3..b34de8e 100644
14846 --- a/cnf/make.conf.ppc.diff
14847 +++ b/cnf/make.conf.example.ppc.diff
14848 @@ -1,6 +1,6 @@
14849 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14850 -+++ make.conf.ppc 2006-03-19 18:26:21.000000000 +0100
14851 -@@ -23,6 +23,13 @@
14852 +--- make.conf.example
14853 ++++ make.conf.example
14854 +@@ -22,6 +22,13 @@
14855 # Example:
14856 #USE="X gtk gnome -alsa"
14857
14858 @@ -14,9 +14,9 @@
14859 # Host and optimization settings
14860 # ==============================
14861 #
14862 -@@ -33,10 +40,56 @@
14863 - # package (and in some cases the libraries it uses) at default optimizations
14864 - # before reporting errors to developers.
14865 +@@ -39,10 +46,56 @@
14866 + # -frecord-gcc-switches, since otherwise the check could result in false
14867 + # positive results.
14868 #
14869 -# Please refer to the GCC manual for a list of possible values.
14870 +# -mcpu=<cpu-type> for PowerPC systems selects the type of processor you want
14871 @@ -44,13 +44,13 @@
14872 +#
14873 +# Long term testing has shown that -O3 opts can be unreliable on G4's but work
14874 +# on G3 series processors or earlier.
14875 -+#
14876 -+# The following is the suggested CFLAGS for a generic G4 cpu
14877 -+#
14878 -+#CFLAGS="-O2 -pipe -mcpu=G4 -maltivec -mabi=altivec -fno-strict-aliasing"
14879 #
14880 -#CFLAGS="-O2 -pipe"
14881 ++# The following is the suggested CFLAGS for a generic G4 cpu
14882 #
14883 ++#CFLAGS="-O2 -pipe -mcpu=G4 -maltivec -mabi=altivec -fno-strict-aliasing"
14884 ++#
14885 ++#
14886 +# All non G4 PPC boxen should choose this next option. It will work fine for
14887 +# all G3 and pre machines. (note it will not hurt pre G3 machines either to
14888 +# use this mcpu option as it is the default for gcc 3.2.x anyway)
14889 @@ -73,7 +73,7 @@
14890 # If you set a CFLAGS above, then this line will set your default C++ flags to
14891 # the same settings.
14892 #CXXFLAGS="${CFLAGS}"
14893 -@@ -61,7 +114,10 @@
14894 +@@ -76,7 +129,10 @@
14895 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14896 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14897 #
14898 @@ -83,5 +83,5 @@
14899 +#
14900 +#ACCEPT_KEYWORDS="~ppc"
14901
14902 - # Portage Directories
14903 - # ===================
14904 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14905 + # It may contain both license and group names, where group names are
14906
14907 diff --git a/cnf/make.conf.ppc64.diff b/cnf/make.conf.example.ppc64.diff
14908 similarity index 85%
14909 rename from cnf/make.conf.ppc64.diff
14910 rename to cnf/make.conf.example.ppc64.diff
14911 index 2aaec01..961508e 100644
14912 --- a/cnf/make.conf.ppc64.diff
14913 +++ b/cnf/make.conf.example.ppc64.diff
14914 @@ -1,6 +1,6 @@
14915 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
14916 -+++ make.conf.ppc64 2006-03-19 18:26:21.000000000 +0100
14917 -@@ -23,6 +23,13 @@
14918 +--- make.conf.example
14919 ++++ make.conf.example
14920 +@@ -22,6 +22,13 @@
14921 # Example:
14922 #USE="X gtk gnome -alsa"
14923
14924 @@ -14,9 +14,9 @@
14925 # Host and optimization settings
14926 # ==============================
14927 #
14928 -@@ -33,9 +40,38 @@
14929 - # package (and in some cases the libraries it uses) at default optimizations
14930 - # before reporting errors to developers.
14931 +@@ -39,9 +46,38 @@
14932 + # -frecord-gcc-switches, since otherwise the check could result in false
14933 + # positive results.
14934 #
14935 -# Please refer to the GCC manual for a list of possible values.
14936 +# -mtune=<cpu-type> for PowerPC64 systems instructs the gcc compiler that
14937 @@ -33,15 +33,15 @@
14938 +# 970 (aka G5), and power5.
14939 +#
14940 +# RS64 processors should specify power3.
14941 -+#
14942 + #
14943 +-#CFLAGS="-O2 -pipe"
14944 +# Additional options of interest:
14945 +#
14946 +# -maltivec enables optional altivec support and should be used
14947 +# only for 970 processors. It also requires that you have
14948 +# the alitvec option compiled into your kernel to take full advantage of this
14949 +# feature. Note: you should also include -mabi=altivec flag if using this option.
14950 - #
14951 --#CFLAGS="-O2 -pipe"
14952 ++#
14953 +# -O3 for the most part seems ok but should be used with caution as
14954 +# for instance app-editors/vim has problems if it is used. -O2 is a
14955 +# good selection.
14956 @@ -55,7 +55,7 @@
14957 #
14958 # If you set a CFLAGS above, then this line will set your default C++ flags to
14959 # the same settings.
14960 -@@ -61,7 +97,10 @@
14961 +@@ -76,7 +112,10 @@
14962 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14963 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14964 #
14965 @@ -65,5 +65,5 @@
14966 +#
14967 +#ACCEPT_KEYWORDS="ppc64"
14968
14969 - # Portage Directories
14970 - # ===================
14971 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
14972 + # It may contain both license and group names, where group names are
14973
14974 diff --git a/cnf/make.conf.s390.diff b/cnf/make.conf.example.s390.diff
14975 similarity index 68%
14976 rename from cnf/make.conf.s390.diff
14977 rename to cnf/make.conf.example.s390.diff
14978 index 2acb085..c780762 100644
14979 --- a/cnf/make.conf.s390.diff
14980 +++ b/cnf/make.conf.example.s390.diff
14981 @@ -1,5 +1,5 @@
14982 ---- make.conf
14983 -+++ make.conf.s390
14984 +--- make.conf.example
14985 ++++ make.conf.example
14986 @@ -22,6 +22,13 @@
14987 # Example:
14988 #USE="X gtk gnome -alsa"
14989 @@ -14,12 +14,12 @@
14990 # Host and optimization settings
14991 # ==============================
14992 #
14993 -@@ -75,7 +82,7 @@
14994 +@@ -76,7 +83,7 @@
14995 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
14996 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
14997 #
14998 -#ACCEPT_KEYWORDS="~arch"
14999 +#ACCEPT_KEYWORDS="~s390"
15000
15001 - # Portage Directories
15002 - # ===================
15003 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
15004 + # It may contain both license and group names, where group names are
15005
15006 diff --git a/cnf/make.conf.sh.diff b/cnf/make.conf.example.sh.diff
15007 similarity index 80%
15008 rename from cnf/make.conf.sh.diff
15009 rename to cnf/make.conf.example.sh.diff
15010 index f2784e6..9699a70 100644
15011 --- a/cnf/make.conf.sh.diff
15012 +++ b/cnf/make.conf.example.sh.diff
15013 @@ -1,6 +1,6 @@
15014 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
15015 -+++ make.conf.sh 2006-03-19 18:26:21.000000000 +0100
15016 -@@ -23,6 +23,19 @@
15017 +--- make.conf.example
15018 ++++ make.conf.example
15019 +@@ -22,6 +22,19 @@
15020 # Example:
15021 #USE="X gtk gnome -alsa"
15022
15023 @@ -20,20 +20,19 @@
15024 # Host and optimization settings
15025 # ==============================
15026 #
15027 -@@ -33,10 +46,15 @@
15028 - # package (and in some cases the libraries it uses) at default optimizations
15029 - # before reporting errors to developers.
15030 +@@ -39,10 +52,15 @@
15031 + # -frecord-gcc-switches, since otherwise the check could result in false
15032 + # positive results.
15033 #
15034 -# Please refer to the GCC manual for a list of possible values.
15035 --#
15036 --#CFLAGS="-O2 -pipe"
15037 +# -m# optimize code for the particular type of CPU. The number should match
15038 +# your CHOST so if you are using "sh4-unknown-linux-gnu", you should have
15039 +# -m4 below.
15040 #
15041 +-#CFLAGS="-O2 -pipe"
15042 +# For a full listing of supported CPU models, please refer to the GCC website:
15043 +# http://gcc.gnu.org/onlinedocs/gcc-3.3/gcc/SH-Options.html
15044 -+#
15045 + #
15046 +#CFLAGS="-m4 -O2 -pipe"
15047 +
15048 # If you set a CFLAGS above, then this line will set your default C++ flags to
15049
15050 diff --git a/cnf/make.conf.sparc-fbsd.diff b/cnf/make.conf.example.sparc-fbsd.diff
15051 similarity index 68%
15052 rename from cnf/make.conf.sparc-fbsd.diff
15053 rename to cnf/make.conf.example.sparc-fbsd.diff
15054 index f3d3bca..25e6f46 100644
15055 --- a/cnf/make.conf.sparc-fbsd.diff
15056 +++ b/cnf/make.conf.example.sparc-fbsd.diff
15057 @@ -1,6 +1,6 @@
15058 ---- make.conf 2006-10-16 17:06:32 +0100
15059 -+++ make.conf.sparc-fbsd 2006-10-16 17:09:22 +0100
15060 -@@ -23,6 +23,13 @@
15061 +--- make.conf.example
15062 ++++ make.conf.example
15063 +@@ -22,6 +22,13 @@
15064 # Example:
15065 #USE="X gtk gnome -alsa"
15066
15067 @@ -14,12 +14,12 @@
15068 # Host and optimization settings
15069 # ==============================
15070 #
15071 -@@ -61,7 +68,7 @@
15072 +@@ -76,7 +83,7 @@
15073 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
15074 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
15075 #
15076 -#ACCEPT_KEYWORDS="~arch"
15077 +ACCEPT_KEYWORDS="~sparc-fbsd"
15078
15079 - # Portage Directories
15080 - # ===================
15081 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
15082 + # It may contain both license and group names, where group names are
15083
15084 diff --git a/cnf/make.conf.sparc.diff b/cnf/make.conf.example.sparc.diff
15085 similarity index 84%
15086 rename from cnf/make.conf.sparc.diff
15087 rename to cnf/make.conf.example.sparc.diff
15088 index e016a0b..c68a95a 100644
15089 --- a/cnf/make.conf.sparc.diff
15090 +++ b/cnf/make.conf.example.sparc.diff
15091 @@ -1,6 +1,6 @@
15092 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
15093 -+++ make.conf.sparc 2006-03-19 18:26:21.000000000 +0100
15094 -@@ -23,6 +23,15 @@
15095 +--- make.conf.example
15096 ++++ make.conf.example
15097 +@@ -22,6 +22,15 @@
15098 # Example:
15099 #USE="X gtk gnome -alsa"
15100
15101 @@ -16,9 +16,9 @@
15102 # Host and optimization settings
15103 # ==============================
15104 #
15105 -@@ -33,10 +42,34 @@
15106 - # package (and in some cases the libraries it uses) at default optimizations
15107 - # before reporting errors to developers.
15108 +@@ -39,10 +48,34 @@
15109 + # -frecord-gcc-switches, since otherwise the check could result in false
15110 + # positive results.
15111 #
15112 -# Please refer to the GCC manual for a list of possible values.
15113 -#
15114 @@ -55,12 +55,12 @@
15115 # If you set a CFLAGS above, then this line will set your default C++ flags to
15116 # the same settings.
15117 #CXXFLAGS="${CFLAGS}"
15118 -@@ -61,7 +94,7 @@
15119 +@@ -76,7 +109,7 @@
15120 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
15121 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
15122 #
15123 -#ACCEPT_KEYWORDS="~arch"
15124 +#ACCEPT_KEYWORDS="~sparc"
15125
15126 - # Portage Directories
15127 - # ===================
15128 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
15129 + # It may contain both license and group names, where group names are
15130
15131 diff --git a/cnf/make.conf.x86-fbsd.diff b/cnf/make.conf.example.x86-fbsd.diff
15132 similarity index 86%
15133 rename from cnf/make.conf.x86-fbsd.diff
15134 rename to cnf/make.conf.example.x86-fbsd.diff
15135 index 9fec4f1..d5e02fe 100644
15136 --- a/cnf/make.conf.x86-fbsd.diff
15137 +++ b/cnf/make.conf.example.x86-fbsd.diff
15138 @@ -1,6 +1,6 @@
15139 ---- make.conf 2006-03-19 18:40:11.000000000 +0100
15140 -+++ make.conf.x86-fbsd 2006-03-19 18:26:21.000000000 +0100
15141 -@@ -23,6 +23,16 @@
15142 +--- make.conf.example
15143 ++++ make.conf.example
15144 +@@ -22,6 +22,16 @@
15145 # Example:
15146 #USE="X gtk gnome -alsa"
15147
15148 @@ -17,9 +17,9 @@
15149 # Host and optimization settings
15150 # ==============================
15151 #
15152 -@@ -33,10 +43,34 @@
15153 - # package (and in some cases the libraries it uses) at default optimizations
15154 - # before reporting errors to developers.
15155 +@@ -39,10 +49,34 @@
15156 + # -frecord-gcc-switches, since otherwise the check could result in false
15157 + # positive results.
15158 #
15159 -# Please refer to the GCC manual for a list of possible values.
15160 +# -mtune=<cpu-type> means optimize code for the particular type of CPU without
15161 @@ -54,12 +54,12 @@
15162 # If you set a CFLAGS above, then this line will set your default C++ flags to
15163 # the same settings.
15164 #CXXFLAGS="${CFLAGS}"
15165 -@@ -61,7 +95,7 @@
15166 +@@ -76,7 +110,7 @@
15167 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
15168 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
15169 #
15170 -#ACCEPT_KEYWORDS="~arch"
15171 +ACCEPT_KEYWORDS="~x86-fbsd"
15172
15173 - # Portage Directories
15174 - # ===================
15175 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
15176 + # It may contain both license and group names, where group names are
15177
15178 diff --git a/cnf/make.conf.x86.diff b/cnf/make.conf.example.x86.diff
15179 similarity index 90%
15180 rename from cnf/make.conf.x86.diff
15181 rename to cnf/make.conf.example.x86.diff
15182 index f908451..3247126 100644
15183 --- a/cnf/make.conf.x86.diff
15184 +++ b/cnf/make.conf.example.x86.diff
15185 @@ -1,6 +1,6 @@
15186 ---- make.conf 2007-01-10 03:22:07.410548112 +0100
15187 -+++ make.conf.x86 2007-01-10 03:22:13.206910362 +0100
15188 -@@ -23,6 +23,15 @@
15189 +--- make.conf.example
15190 ++++ make.conf.example
15191 +@@ -22,6 +22,15 @@
15192 # Example:
15193 #USE="X gtk gnome -alsa"
15194
15195 @@ -16,9 +16,9 @@
15196 # Host and optimization settings
15197 # ==============================
15198 #
15199 -@@ -33,10 +42,65 @@
15200 - # package (and in some cases the libraries it uses) at default optimizations
15201 - # before reporting errors to developers.
15202 +@@ -39,10 +48,65 @@
15203 + # -frecord-gcc-switches, since otherwise the check could result in false
15204 + # positive results.
15205 #
15206 -# Please refer to the GCC manual for a list of possible values.
15207 -#
15208 @@ -85,12 +85,12 @@
15209 # If you set a CFLAGS above, then this line will set your default C++ flags to
15210 # the same settings.
15211 #CXXFLAGS="${CFLAGS}"
15212 -@@ -61,7 +125,7 @@
15213 +@@ -76,7 +140,7 @@
15214 # DO NOT PUT ANYTHING BUT YOUR SPECIFIC ~ARCHITECTURE IN THE LIST.
15215 # IF YOU ARE UNSURE OF YOUR ARCH, OR THE IMPLICATIONS, DO NOT MODIFY THIS.
15216 #
15217 -#ACCEPT_KEYWORDS="~arch"
15218 +#ACCEPT_KEYWORDS="~x86"
15219
15220 - # Portage Directories
15221 - # ===================
15222 + # ACCEPT_LICENSE is used to mask packages based on licensing restrictions.
15223 + # It may contain both license and group names, where group names are
15224
15225 diff --git a/cnf/make.globals b/cnf/make.globals
15226 index ada91f8..013c556 100644
15227 --- a/cnf/make.globals
15228 +++ b/cnf/make.globals
15229 @@ -1,4 +1,4 @@
15230 -# Copyright 1999-2011 Gentoo Foundation
15231 +# Copyright 1999-2013 Gentoo Foundation
15232 # Distributed under the terms of the GNU General Public License v2
15233 # System-wide defaults for the Portage system
15234
15235 @@ -19,24 +19,21 @@ LDFLAGS=""
15236 FFLAGS=""
15237 FCFLAGS=""
15238
15239 -# Default rsync mirror
15240 -SYNC="rsync://rsync.gentoo.org/gentoo-portage"
15241 -
15242 # Default distfiles mirrors. This rotation has multiple hosts and is reliable.
15243 # Approved by the mirror-admin team.
15244 GENTOO_MIRRORS="http://distfiles.gentoo.org"
15245
15246 ACCEPT_LICENSE="* -@EULA"
15247 ACCEPT_PROPERTIES="*"
15248 +ACCEPT_RESTRICT="*"
15249
15250 -# Repository Paths
15251 -PORTDIR=/usr/portage
15252 -DISTDIR=${PORTDIR}/distfiles
15253 -PKGDIR=${PORTDIR}/packages
15254 -RPMDIR=${PORTDIR}/rpm
15255 +# Miscellaneous paths
15256 +DISTDIR="/usr/portage/distfiles"
15257 +PKGDIR="/usr/portage/packages"
15258 +RPMDIR="/usr/portage/rpm"
15259
15260 # Temporary build directory
15261 -PORTAGE_TMPDIR=/var/tmp
15262 +PORTAGE_TMPDIR="/var/tmp"
15263
15264 # Fetching command (3 tries, passive ftp for firewall compatibility)
15265 FETCHCOMMAND="wget -t 3 -T 60 --passive-ftp -O \"\${DISTDIR}/\${FILE}\" \"\${URI}\""
15266 @@ -45,17 +42,19 @@ RESUMECOMMAND="wget -c -t 3 -T 60 --passive-ftp -O \"\${DISTDIR}/\${FILE}\" \"\$
15267 FETCHCOMMAND_RSYNC="rsync -avP \"\${URI}\" \"\${DISTDIR}/\${FILE}\""
15268 RESUMECOMMAND_RSYNC="rsync -avP \"\${URI}\" \"\${DISTDIR}/\${FILE}\""
15269
15270 -FETCHCOMMAND_SSH="bash -c \"x=\\\${2#ssh://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port=22 ; exec rsync --rsh=\\\"ssh -p\\\${port}\\\" -avP \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" rsync \"\${DISTDIR}/\${FILE}\" \"\${URI}\""
15271 +# NOTE: rsync will evaluate quotes embedded inside PORTAGE_SSH_OPTS
15272 +FETCHCOMMAND_SSH="bash -c \"x=\\\${2#ssh://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port=22 ; exec rsync --rsh=\\\"ssh -p\\\${port} \\\${3}\\\" -avP \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" rsync \"\${DISTDIR}/\${FILE}\" \"\${URI}\" \"\${PORTAGE_SSH_OPTS}\""
15273 RESUMECOMMAND_SSH=${FETCHCOMMAND_SSH}
15274
15275 -FETCHCOMMAND_SFTP="bash -c \"x=\\\${2#sftp://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port=22 ; exec sftp -P \\\${port} \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" sftp \"\${DISTDIR}/\${FILE}\" \"\${URI}\""
15276 +# NOTE: bash eval is used to evaluate quotes embedded inside PORTAGE_SSH_OPTS
15277 +FETCHCOMMAND_SFTP="bash -c \"x=\\\${2#sftp://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port=22 ; eval \\\"declare -a ssh_opts=(\\\${3})\\\" ; exec sftp -P \\\${port} \\\"\\\${ssh_opts[@]}\\\" \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" sftp \"\${DISTDIR}/\${FILE}\" \"\${URI}\" \"\${PORTAGE_SSH_OPTS}\""
15278
15279 # Default user options
15280 FEATURES="assume-digests binpkg-logs
15281 config-protect-if-modified distlocks ebuild-locks
15282 - fixlafiles news parallel-fetch parse-eapi-ebuild-head protect-owned
15283 + fixlafiles merge-sync news parallel-fetch preserve-libs protect-owned
15284 sandbox sfperms strict unknown-features-warn unmerge-logs
15285 - unmerge-orphans userfetch"
15286 + unmerge-orphans userfetch userpriv usersandbox usersync"
15287
15288 # Ignore file collisions in /lib/modules since files inside this directory
15289 # are never unmerged, and therefore collisions must be ignored in order for
15290 @@ -63,16 +62,9 @@ FEATURES="assume-digests binpkg-logs
15291 # Ignore file collisions for unowned *.pyo and *.pyc files, this helps during
15292 # transition from compiling python modules in live file system to compiling
15293 # them in src_install() function.
15294 -COLLISION_IGNORE="/lib/modules/* *.py[co]"
15295 +COLLISION_IGNORE="/lib/modules/* *.py[co] *\$py.class"
15296 UNINSTALL_IGNORE="/lib/modules/*"
15297
15298 -# Enable preserve-libs for testing with portage versions that support it.
15299 -# This setting is commented out for portage versions that don't support it.
15300 -FEATURES="${FEATURES} preserve-libs"
15301 -
15302 -# Default chunksize for binhost comms
15303 -PORTAGE_BINHOST_CHUNKSIZE="3000"
15304 -
15305 # By default wait 5 secs before cleaning a package
15306 CLEAN_DELAY="5"
15307
15308 @@ -100,7 +92,7 @@ PORTAGE_RSYNC_RETRIES="-1"
15309 # Number of seconds rsync will wait before timing out.
15310 #RSYNC_TIMEOUT="180"
15311
15312 -PORTAGE_RSYNC_OPTS="--recursive --links --safe-links --perms --times --compress --force --whole-file --delete --stats --human-readable --timeout=180 --exclude=/distfiles --exclude=/local --exclude=/packages"
15313 +PORTAGE_RSYNC_OPTS="--recursive --links --safe-links --perms --times --omit-dir-times --compress --force --whole-file --delete --stats --human-readable --timeout=180 --exclude=/distfiles --exclude=/local --exclude=/packages"
15314
15315 # The number of days after the last `emerge --sync` that a warning
15316 # message should be produced.
15317 @@ -121,7 +113,7 @@ PORTAGE_WORKDIR_MODE="0700"
15318
15319 # Some defaults for elog
15320 PORTAGE_ELOG_CLASSES="log warn error"
15321 -PORTAGE_ELOG_SYSTEM="save_summary echo"
15322 +PORTAGE_ELOG_SYSTEM="save_summary:log,warn,error,qa echo"
15323
15324 PORTAGE_ELOG_MAILURI="root"
15325 PORTAGE_ELOG_MAILSUBJECT="[portage] ebuild log for \${PACKAGE} on \${HOST}"
15326 @@ -130,6 +122,10 @@ PORTAGE_ELOG_MAILFROM="portage@localhost"
15327 # Signing command used by repoman
15328 PORTAGE_GPG_SIGNING_COMMAND="gpg --sign --digest-algo SHA256 --clearsign --yes --default-key \"\${PORTAGE_GPG_KEY}\" --homedir \"\${PORTAGE_GPG_DIR}\" \"\${FILE}\""
15329
15330 +# Security labels are special, see bug #461868.
15331 +# system.nfs4_acl attributes are irrelevant, see bug #475496.
15332 +PORTAGE_XATTR_EXCLUDE="security.* system.nfs4_acl"
15333 +
15334 # *****************************
15335 # ** DO NOT EDIT THIS FILE **
15336 # ***************************************************
15337
15338 diff --git a/cnf/metadata.dtd b/cnf/metadata.dtd
15339 index d97642a..ff2649c 100644
15340 --- a/cnf/metadata.dtd
15341 +++ b/cnf/metadata.dtd
15342 @@ -5,7 +5,7 @@
15343 <!ATTLIST catmetadata pkgname CDATA "">
15344
15345 <!-- Metadata for a package -->
15346 -<!ELEMENT pkgmetadata ( (herd|maintainer|longdescription|use|upstream)* )>
15347 +<!ELEMENT pkgmetadata ( (herd|maintainer|natural-name|longdescription|use|upstream)* )>
15348 <!ATTLIST pkgmetadata pkgname CDATA "">
15349
15350 <!-- One tag for each herd this package is assigned to. -->
15351 @@ -14,6 +14,9 @@
15352 <!-- One tag for each maintainer of a package, multiple allowed-->
15353 <!ELEMENT maintainer ( email, (description| name)* )>
15354
15355 + <!-- Natural name for package, example: LibreOffice (for app-office/libreoffice) -->
15356 + <!ELEMENT natural-name (#PCDATA) >
15357 +
15358 <!-- A long description of the package in freetext-->
15359 <!ELEMENT longdescription (#PCDATA|pkg|cat)* >
15360
15361 @@ -61,7 +64,7 @@
15362 <!ELEMENT bugs-to (#PCDATA)>
15363 <!-- specify a type of package identification tracker -->
15364 <!ELEMENT remote-id (#PCDATA)>
15365 - <!ATTLIST remote-id type (freshmeat|sourceforge|sourceforge-jp|cpan|vim|google-code|ctan|pypi|rubyforge|cran) #REQUIRED>
15366 + <!ATTLIST remote-id type (bitbucket|cpan|cpan-module|cpe|cran|ctan|freecode|freshmeat|github|gitorious|google-code|launchpad|pear|pecl|pypi|rubyforge|rubygems|sourceforge|sourceforge-jp|vim) #REQUIRED>
15367
15368 <!-- category/package information for cross-linking in descriptions
15369 and useflag descriptions -->
15370
15371 diff --git a/cnf/repos.conf b/cnf/repos.conf
15372 new file mode 100644
15373 index 0000000..8c657da
15374 --- /dev/null
15375 +++ b/cnf/repos.conf
15376 @@ -0,0 +1,7 @@
15377 +[DEFAULT]
15378 +main-repo = gentoo
15379 +
15380 +[gentoo]
15381 +location = /usr/portage
15382 +sync-type = rsync
15383 +sync-uri = rsync://rsync.gentoo.org/gentoo-portage
15384
15385 diff --git a/cnf/sets/portage.conf b/cnf/sets/portage.conf
15386 index c5c787b..b73afb1 100644
15387 --- a/cnf/sets/portage.conf
15388 +++ b/cnf/sets/portage.conf
15389 @@ -51,7 +51,7 @@ class = portage.sets.libs.PreservedLibraryConsumerSet
15390 [live-rebuild]
15391 class = portage.sets.dbapi.VariableSet
15392 variable = INHERITED
15393 -includes = bzr cvs darcs git git-2 mercurial subversion tla
15394 +includes = bzr cvs darcs git git-2 git-r3 mercurial subversion tla
15395
15396 # Installed packages that own files inside /lib/modules.
15397 [module-rebuild]
15398
15399 diff --git a/doc/config/sets.docbook b/doc/config/sets.docbook
15400 index f7eea77..8f74412 100644
15401 --- a/doc/config/sets.docbook
15402 +++ b/doc/config/sets.docbook
15403 @@ -17,9 +17,8 @@
15404 <varname>system</varname> or <varname>security</varname>.
15405 <!-- TODO: Add reference to currently non-existing documentation about
15406 set usage and default sets -->
15407 - After that it will read repository specific configurations from
15408 - <envar>PORTDIR</envar> and <envar>PORTDIR_OVERLAY</envar> that might
15409 - include definitions of sets included in the repository.
15410 + After that it will read configurations located in repositories
15411 + configured in <filename>repos.conf</filename>.
15412 Finally a system-specific set configuration may reside in
15413 <filename>/etc/portage</filename> to either define additional sets or
15414 alter the default and repository sets.
15415
15416 diff --git a/doc/package/ebuild.docbook b/doc/package/ebuild.docbook
15417 index ba146ca..c3b6cac 100644
15418 --- a/doc/package/ebuild.docbook
15419 +++ b/doc/package/ebuild.docbook
15420 @@ -11,5 +11,8 @@
15421 &package_ebuild_eapi_4;
15422 &package_ebuild_eapi_4_python;
15423 &package_ebuild_eapi_4_slot_abi;
15424 +&package_ebuild_eapi_5;
15425 +&package_ebuild_eapi_5_progress;
15426 +&package_ebuild_eapi_5_hdepend;
15427 </section>
15428 </chapter>
15429
15430 diff --git a/doc/package/ebuild/eapi/4-python.docbook b/doc/package/ebuild/eapi/4-python.docbook
15431 index ec5fd83..a61ac05 100644
15432 --- a/doc/package/ebuild/eapi/4-python.docbook
15433 +++ b/doc/package/ebuild/eapi/4-python.docbook
15434 @@ -19,7 +19,6 @@
15435 <listitem><para>docompress</para></listitem>
15436 <listitem><para>exeopts</para></listitem>
15437 <listitem><para>insopts</para></listitem>
15438 - <listitem><para>keepdir</para></listitem>
15439 <listitem><para>libopts</para></listitem>
15440 <listitem><para>use</para></listitem>
15441 <listitem><para>use_enable</para></listitem>
15442 @@ -97,7 +96,7 @@
15443 <section id='package-ebuild-eapi-4-python-repo-level-config'>
15444 <title>Extended Repository-Level Configuration</title>
15445 <para>
15446 - Repository-level configuration in ${repository}/profiles is supported for the following files:
15447 + Repository-level configuration in ${repository_path}/profiles is supported for the following files:
15448 <itemizedlist>
15449 <listitem><para>make.defaults</para></listitem>
15450 <listitem><para>package.use</para></listitem>
15451 @@ -107,8 +106,11 @@
15452 <listitem><para>use.mask</para></listitem>
15453 </itemizedlist>
15454 </para>
15455 + </section>
15456 + <section id='package-ebuild-eapi-4-python-directories'>
15457 + <title>Directories Allowed for Profile-Level and Repository-Level Configuration</title>
15458 <para>
15459 - By default, the following files in ${repository}/profiles can be also directories:
15460 + The following files can be directories:
15461 <itemizedlist>
15462 <listitem><para>package.mask</para></listitem>
15463 <listitem><para>package.use</para></listitem>
15464 @@ -119,4 +121,40 @@
15465 </itemizedlist>
15466 </para>
15467 </section>
15468 + <section id='package-ebuild-eapi-4-python-use-aliases'>
15469 + <title>USE Flag Aliases</title>
15470 + <para>
15471 + USE flag aliases are supported to allow to satisfy dependencies of packages from other repositories, which require differently named USE flags. USE flag aliases are defined in ${repository_path}/profiles/use.aliases and ${repository_path}/profiles/package.use.aliases files.
15472 + </para>
15473 + <table><title>use.aliases Example</title>
15474 + <tgroup cols='1' align='left'>
15475 + <tbody>
15476 + <row>
15477 + <entry>real_flag1 alias1 alias2</entry>
15478 + </row>
15479 + <row>
15480 + <entry>real_flag2 alias3 alias4</entry>
15481 + </row>
15482 + </tbody>
15483 + </tgroup>
15484 + </table>
15485 + <table><title>package.use.aliases Example</title>
15486 + <tgroup cols='1' align='left'>
15487 + <tbody>
15488 + <row>
15489 + <entry>category/package1 real_flag1 alias1 alias2</entry>
15490 + </row>
15491 + <row>
15492 + <entry>category/package1 real_flag2 alias3 alias4</entry>
15493 + </row>
15494 + <row>
15495 + <entry>=category/package2-1* real_flag3 alias5 alias6</entry>
15496 + </row>
15497 + <row>
15498 + <entry>=category/package2-2* real_flag4 alias5 alias6</entry>
15499 + </row>
15500 + </tbody>
15501 + </tgroup>
15502 + </table>
15503 + </section>
15504 </section>
15505
15506 diff --git a/doc/package/ebuild/eapi/4-slot-abi.docbook b/doc/package/ebuild/eapi/4-slot-abi.docbook
15507 index 696d0bf..08e2cef 100644
15508 --- a/doc/package/ebuild/eapi/4-slot-abi.docbook
15509 +++ b/doc/package/ebuild/eapi/4-slot-abi.docbook
15510 @@ -28,12 +28,12 @@ Refer to the
15511 </para>
15512 </section>
15513 <section id='package-ebuild-eapi-4-slot-abi-metadata-dependency-atom-slot-abi-equal-operator'>
15514 -<title>Dependency Atom SLOT/ABI := Operator</title>
15515 +<title>Dependency Atom slot/sub-slot := Operator</title>
15516 <para>
15517 -Dependency atom syntax now supports SLOT/ABI := operators which allow the
15518 -specific SLOT/ABI that a package is built against to be recorded, so that it's
15519 +Dependency atom syntax now supports slot/sub-slot := operators which allow the
15520 +specific slot/sub-slot that a package is built against to be recorded, so that it's
15521 possible to automatically determine when a package needs to be rebuilt due to
15522 -having a dependency upgraded to a different SLOT/ABI.
15523 +having a dependency upgraded to a different slot/sub-slot.
15524 </para>
15525 <para>
15526 For example, if a package is built
15527 @@ -50,13 +50,13 @@ not contain a sub-slot part, the sub-slot is considered to be implicitly equal
15528 to "4.8".
15529 </para>
15530 <para>
15531 -When dependencies are rewritten as described above, the SLOT/ABI recorded in
15532 +When dependencies are rewritten as described above, the slot/sub-slot recorded in
15533 the atom is always equal to that of the highest matched version that is
15534 installed at build time.
15535 </para>
15536 </section>
15537 <section id='package-ebuild-eapi-4-slot-abi-metadata-dependency-atom-slot-abi-asterisk-operator'>
15538 -<title>Dependency Atom SLOT/ABI :* Operator</title>
15539 +<title>Dependency Atom slot/sub-slot :* Operator</title>
15540 <para>
15541 The new :* operator is used to express dependencies that can change versions
15542 at runtime without requiring reverse dependencies to be rebuilt. For example,
15543
15544 diff --git a/doc/package/ebuild/eapi/5-hdepend.docbook b/doc/package/ebuild/eapi/5-hdepend.docbook
15545 new file mode 100644
15546 index 0000000..0f568bc
15547 --- /dev/null
15548 +++ b/doc/package/ebuild/eapi/5-hdepend.docbook
15549 @@ -0,0 +1,32 @@
15550 +<section id='package-ebuild-eapi-5-hdepend'>
15551 +<title>EAPI 5-hdepend</title>
15552 +<section id='package-ebuild-eapi-5-hdepend-metadata'>
15553 +<title>Metadata</title>
15554 +<section id='package-ebuild-eapi-5-hdepend-metadata-dependencies'>
15555 +<title>Dependencies</title>
15556 +<section id='package-ebuild-eapi-5-hdepend-metadata-dependencies-hdepend'>
15557 +<title>HDEPEND</title>
15558 +<para>
15559 +The HDEPEND variable is used to represent build-time host dependencies. For
15560 +build-time target dependencies, use DEPEND (if the host is the target then both
15561 +HDEPEND and DEPEND will be installed on it). For EAPIs that support HDEPEND,
15562 +the emerge --root-deps option will have no effect, since it is not needed
15563 +when build-time dependencies are correctly specified with HDEPEND and DEPEND.
15564 +If ebuilds using EAPIs which <emphasis>do not</emphasis> support HDEPEND are
15565 +built in the same emerge run as those using EAPIs which <emphasis>do</emphasis>
15566 +support HDEPEND, the emerge --root-deps option will only apply to the former.
15567 +</para>
15568 +</section>
15569 +<section id='package-ebuild-eapi-5-hdepend-metadata-dependencies-targetroot'>
15570 +<title>Special "targetroot" USE flag</title>
15571 +<para>
15572 +The special "targetroot" USE flag will be automatically enabled for packages
15573 +that are built for installation into a target ROOT, and will otherwise be
15574 +automatically disabled. This flag may be used to control conditional
15575 +dependencies, and ebuilds that use this flag need to add it to IUSE unless it
15576 +happens to be included in the profile's IUSE_IMPLICIT variable.
15577 +</para>
15578 +</section>
15579 +</section>
15580 +</section>
15581 +</section>
15582
15583 diff --git a/doc/package/ebuild/eapi/5-progress.docbook b/doc/package/ebuild/eapi/5-progress.docbook
15584 new file mode 100644
15585 index 0000000..6493d7e
15586 --- /dev/null
15587 +++ b/doc/package/ebuild/eapi/5-progress.docbook
15588 @@ -0,0 +1,247 @@
15589 +<section id='package-ebuild-eapi-5-progress'>
15590 + <title>EAPI 5-progress</title>
15591 + <para>
15592 + Also see the <ulink url="http://people.apache.org/~Arfrever/EAPI_5-progress_Specification">official EAPI 5-progress Specification</ulink>.
15593 + </para>
15594 + <section id='package-ebuild-eapi-5-progress-helpers'>
15595 + <title>Helpers</title>
15596 + <section id='package-ebuild-eapi-5-progress-helpers-master-repositories'>
15597 + <title>master_repositories</title>
15598 + <para>
15599 + New master_repositories function prints space-separated list of master repositories for specified repository.
15600 + </para>
15601 + </section>
15602 + <section id='package-ebuild-eapi-5-progress-helpers-repository-path'>
15603 + <title>repository_path</title>
15604 + <para>
15605 + New repository_path function prints path to specified repository.
15606 + </para>
15607 + </section>
15608 + <section id='package-ebuild-eapi-5-progress-helpers-available-eclasses'>
15609 + <title>available_eclasses</title>
15610 + <para>
15611 + New available_eclasses function prints space-separated list of available eclasses for current repository.
15612 + </para>
15613 + </section>
15614 + <section id='package-ebuild-eapi-5-progress-helpers-eclass-path'>
15615 + <title>eclass_path</title>
15616 + <para>
15617 + New eclass_path function prints path to specified eclass for current repository.
15618 + </para>
15619 + </section>
15620 + <section id='package-ebuild-eapi-5-progress-helpers-license-path'>
15621 + <title>license_path</title>
15622 + <para>
15623 + New license_path function prints path to specified license for current repository.
15624 + </para>
15625 + </section>
15626 + <section id='package-ebuild-eapi-5-progress-helpers-package-manager-build-user'>
15627 + <title>package_manager_build_user</title>
15628 + <para>
15629 + New package_manager_build_user function prints name of user used by package manager in build phases.
15630 + </para>
15631 + </section>
15632 + <section id='package-ebuild-eapi-5-progress-helpers-package-manager-build-group'>
15633 + <title>package_manager_build_group</title>
15634 + <para>
15635 + New package_manager_build_group function prints name of group used by package manager in build phases.
15636 + </para>
15637 + </section>
15638 + <section id='package-ebuild-eapi-5-progress-helpers-dohtml-extended-default-list-of-extensions'>
15639 + <title>Extended default list of extensions in dohtml</title>
15640 + <para>
15641 + dohtml by default additionally installs files with .ico, .svg, .xhtml and .xml extensions.
15642 + </para>
15643 + </section>
15644 + <section id='package-ebuild-eapi-5-progress-helpers-unpack-case-insensitive'>
15645 + <title>Case-insensitive matching of extensions in unpack</title>
15646 + <para>
15647 + unpack matches extensions case-insensitively.
15648 + </para>
15649 + </section>
15650 + <section id='package-ebuild-eapi-5-progress-helpers-banned-in-global-scope'>
15651 + <title>Helpers Banned in Global Scope</title>
15652 + <para>
15653 + <itemizedlist>
15654 + <listitem><para>diropts</para></listitem>
15655 + <listitem><para>docompress</para></listitem>
15656 + <listitem><para>exeopts</para></listitem>
15657 + <listitem><para>insopts</para></listitem>
15658 + <listitem><para>libopts</para></listitem>
15659 + <listitem><para>use</para></listitem>
15660 + <listitem><para>use_enable</para></listitem>
15661 + <listitem><para>use_with</para></listitem>
15662 + <listitem><para>useq</para></listitem>
15663 + <listitem><para>usev</para></listitem>
15664 + <listitem><para>usex</para></listitem>
15665 + </itemizedlist>
15666 + </para>
15667 + </section>
15668 + </section>
15669 + <section id='package-ebuild-eapi-5-progress-metadata'>
15670 + <title>Metadata</title>
15671 + <section id='package-ebuild-eapi-5-progress-metadata-package-names-allow-period-characters'>
15672 + <title>Support for Period Characters in Package Names</title>
15673 + <para>
15674 + The "." character is allowed in package names.
15675 + </para>
15676 + </section>
15677 + <section id='package-ebuild-eapi-5-progress-metadata-use-flags-allow-period-characters'>
15678 + <title>Support for Period Characters in USE Flags</title>
15679 + <para>
15680 + The "." character is allowed in USE flags.
15681 + </para>
15682 + </section>
15683 + <section id='package-ebuild-eapi-5-progress-metadata-repository-dependencies'>
15684 + <title>Repository Dependencies</title>
15685 + <para>
15686 + Repository dependencies are supported in atoms in DEPEND, PDEPEND and RDEPEND and atoms passed to best_version and has_version functions.
15687 + Repository dependency is specified by two colons followed by repository name.
15688 + </para>
15689 + <table><title>Repository Dependency Examples</title>
15690 + <tgroup cols='1' align='left'>
15691 + <colspec colname='atom'/>
15692 + <thead>
15693 + <row>
15694 + <entry>Atom</entry>
15695 + </row>
15696 + </thead>
15697 + <tbody>
15698 + <row>
15699 + <entry>dev-lang/python::progress</entry>
15700 + </row>
15701 + <row>
15702 + <entry>&gt;=dev-lang/python-3.2::progress</entry>
15703 + </row>
15704 + <row>
15705 + <entry>dev-lang/python:3.2::progress</entry>
15706 + </row>
15707 + <row>
15708 + <entry>dev-lang/python::progress[xml]</entry>
15709 + </row>
15710 + <row>
15711 + <entry>dev-lang/python:3.2::progress[xml]</entry>
15712 + </row>
15713 + </tbody>
15714 + </tgroup>
15715 + </table>
15716 + </section>
15717 + <section id='package-ebuild-eapi-5-progress-metadata-automatic-unpack-dependencies'>
15718 + <title>Automatic Unpack Dependencies</title>
15719 + <para>
15720 + Dependencies on packages required to unpack archives specified in SRC_URI are automatically appended to DEPEND. These dependencies are calculated from filename extensions of archives specified in SRC_URI. Dependencies (for ebuilds using given EAPI) corresponding to given filename extensions are configured in ${repository_path}/profiles/unpack_dependencies/${EAPI} files.
15721 + </para>
15722 + <table><title>Unpack Dependencies Configuration Examples</title>
15723 + <tgroup cols='1' align='left'>
15724 + <tbody>
15725 + <row>
15726 + <entry>bz2 app-arch/bzip2</entry>
15727 + </row>
15728 + <row>
15729 + <entry>gz app-arch/gzip</entry>
15730 + </row>
15731 + <row>
15732 + <entry>tar app-arch/tar</entry>
15733 + </row>
15734 + <row>
15735 + <entry>tar.bz2 app-arch/tar app-arch/bzip2</entry>
15736 + </row>
15737 + <row>
15738 + <entry>tar.gz app-arch/tar app-arch/gzip</entry>
15739 + </row>
15740 + <row>
15741 + <entry>zip app-arch/unzip</entry>
15742 + </row>
15743 + </tbody>
15744 + </tgroup>
15745 + </table>
15746 + </section>
15747 + </section>
15748 + <section id='package-ebuild-eapi-5-progress-globstar'>
15749 + <title>globstar shell option enabled by default</title>
15750 + <para>
15751 + globstar shell option is enabled by default, which enables recursive expansion of ** pattern in pathname expansion context.
15752 + </para>
15753 + </section>
15754 + <section id='package-ebuild-eapi-5-progress-variables'>
15755 + <title>Variables</title>
15756 + <section id='package-ebuild-eapi-5-progress-variables-repository'>
15757 + <title>REPOSITORY Variable</title>
15758 + <para>
15759 + The new REPOSITORY variable is set in ebuild environment. This variable contains name of repository, which contains currently used ebuild.
15760 + </para>
15761 + </section>
15762 + </section>
15763 + <section id='package-ebuild-eapi-5-progress-repo-level-config'>
15764 + <title>Extended Repository-Level Configuration</title>
15765 + <para>
15766 + Repository-level configuration in ${repository_path}/profiles is supported for the following files:
15767 + <itemizedlist>
15768 + <listitem><para>make.defaults</para></listitem>
15769 + <listitem><para>package.use</para></listitem>
15770 + <listitem><para>package.use.force</para></listitem>
15771 + <listitem><para>package.use.mask</para></listitem>
15772 + <listitem><para>package.use.stable.force</para></listitem>
15773 + <listitem><para>package.use.stable.mask</para></listitem>
15774 + <listitem><para>use.force</para></listitem>
15775 + <listitem><para>use.mask</para></listitem>
15776 + <listitem><para>use.stable.force</para></listitem>
15777 + <listitem><para>use.stable.mask</para></listitem>
15778 + </itemizedlist>
15779 + </para>
15780 + </section>
15781 + <section id='package-ebuild-eapi-5-progress-directories'>
15782 + <title>Directories Allowed for Profile-Level and Repository-Level Configuration</title>
15783 + <para>
15784 + The following files can be directories:
15785 + <itemizedlist>
15786 + <listitem><para>package.mask</para></listitem>
15787 + <listitem><para>package.use</para></listitem>
15788 + <listitem><para>package.use.force</para></listitem>
15789 + <listitem><para>package.use.mask</para></listitem>
15790 + <listitem><para>package.use.stable.force</para></listitem>
15791 + <listitem><para>package.use.stable.mask</para></listitem>
15792 + <listitem><para>use.force</para></listitem>
15793 + <listitem><para>use.mask</para></listitem>
15794 + <listitem><para>use.stable.force</para></listitem>
15795 + <listitem><para>use.stable.mask</para></listitem>
15796 + </itemizedlist>
15797 + </para>
15798 + </section>
15799 + <section id='package-ebuild-eapi-5-progress-use-aliases'>
15800 + <title>USE Flag Aliases</title>
15801 + <para>
15802 + USE flag aliases are supported to allow to satisfy dependencies of packages from other repositories, which require differently named USE flags. USE flag aliases are defined in ${repository_path}/profiles/use.aliases and ${repository_path}/profiles/package.use.aliases files.
15803 + </para>
15804 + <table><title>use.aliases Example</title>
15805 + <tgroup cols='1' align='left'>
15806 + <tbody>
15807 + <row>
15808 + <entry>real_flag1 alias1 alias2</entry>
15809 + </row>
15810 + <row>
15811 + <entry>real_flag2 alias3 alias4</entry>
15812 + </row>
15813 + </tbody>
15814 + </tgroup>
15815 + </table>
15816 + <table><title>package.use.aliases Example</title>
15817 + <tgroup cols='1' align='left'>
15818 + <tbody>
15819 + <row>
15820 + <entry>category/package1 real_flag1 alias1 alias2</entry>
15821 + </row>
15822 + <row>
15823 + <entry>category/package1 real_flag2 alias3 alias4</entry>
15824 + </row>
15825 + <row>
15826 + <entry>=category/package2-1* real_flag3 alias5 alias6</entry>
15827 + </row>
15828 + <row>
15829 + <entry>=category/package2-2* real_flag4 alias5 alias6</entry>
15830 + </row>
15831 + </tbody>
15832 + </tgroup>
15833 + </table>
15834 + </section>
15835 +</section>
15836
15837 diff --git a/doc/package/ebuild/eapi/5.docbook b/doc/package/ebuild/eapi/5.docbook
15838 new file mode 100644
15839 index 0000000..376262e
15840 --- /dev/null
15841 +++ b/doc/package/ebuild/eapi/5.docbook
15842 @@ -0,0 +1,232 @@
15843 +<section id='package-ebuild-eapi-5'>
15844 +<title>EAPI 5</title>
15845 +<section id='package-ebuild-eapi-5-metadata'>
15846 +<title>Metadata</title>
15847 +<section id='package-ebuild-eapi-5-metadata-required-use-at-most-one-of'>
15848 +<title>REQUIRED_USE supports new at-most-one-of operator</title>
15849 +<para>
15850 +The new at-most-one-of operator consists of the string '??',
15851 +and is satisfied if zero or one (but no more) of its child
15852 +elements is matched.
15853 +</para>
15854 +</section>
15855 +<section id='package-ebuild-eapi-5-metadata-slot-sub-slot'>
15856 +<title>SLOT supports optional "sub-slot" part</title>
15857 +<para>
15858 +The SLOT variable may contain an optional sub-slot part that
15859 +follows the regular slot and is delimited by a / character.
15860 +The sub-slot must be a valid slot name. The sub-slot is used
15861 +to represent cases in which an upgrade to a new version of a
15862 +package with a different sub-slot may require dependent
15863 +packages to be rebuilt. When the sub-slot part is omitted from
15864 +the SLOT definition, the package is considered to have an
15865 +implicit sub-slot which is equal to the regular slot.
15866 +</para>
15867 +<para>
15868 +Refer to the
15869 +<link linkend="package-ebuild-eapi-5-metadata-dependency-atom-slot-operators">
15870 +slot operators</link> documentation for more information about sub-slot usage.
15871 +</para>
15872 +</section>
15873 +<section id='package-ebuild-eapi-5-metadata-dependency-atom-slot-operators'>
15874 +<title>Dependency atom slot operators</title>
15875 +<para>
15876 +A slot dependency may contain an optional sub-slot part that
15877 +follows the regular slot and is delimited by a / character.
15878 +An operator slot dependency consists of a colon followed by
15879 +one of the following operators:
15880 +<itemizedlist>
15881 +<listitem><para>
15882 +* Indicates that any slot value is acceptable. In addition,
15883 +for runtime dependencies, indicates that the package will not
15884 +break if the matched package is uninstalled and replaced by
15885 +a different matching package in a different slot.
15886 +</para></listitem>
15887 +<listitem><para>
15888 += Indicates that any slot value is acceptable. In addition,
15889 +for runtime dependencies, indicates that the package will
15890 +break unless a matching package with slot and sub-slot equal
15891 +to the slot and sub-slot of the best installed version at the
15892 +time the package was installed is available.
15893 +</para></listitem>
15894 +<listitem><para>
15895 +slot= Indicates that only a specific slot value is acceptable,
15896 +and otherwise behaves identically to the plain equals slot
15897 +operator.
15898 +</para></listitem>
15899 +</itemizedlist>
15900 +</para>
15901 +<para>
15902 +To implement the equals slot operator, the package manager
15903 +will need to store the slot/sub-slot pair of the best installed
15904 +version of the matching package. This syntax is only for package
15905 +manager use and must not be used by ebuilds. The package manager
15906 +may do this by inserting the appropriate slot/sub-slot pair
15907 +between the colon and equals sign when saving the package's
15908 +dependencies. The sub-slot part must not be omitted here
15909 +(when the SLOT variable omits the sub-slot part, the package
15910 +is considered to have an implicit sub-slot which is equal to
15911 +the regular slot).
15912 +</para>
15913 +</section>
15914 +</section>
15915 +<section id='package-ebuild-eapi-5-profile'>
15916 +<title>Profiles</title>
15917 +<section id='package-ebuild-eapi-5-profile-iuse-injection'>
15918 +<title>Profile IUSE Injection</title>
15919 +<para>
15920 +IUSE_EFFECTIVE is a variable calculated from IUSE and
15921 +a variety of other sources described below. It is purely
15922 +a conceptual variable; it is not exported to the ebuild
15923 +environment. Values in IUSE_EFFECTIVE may legally be
15924 +used in queries about an ebuild's state (for example, for use
15925 +dependencies, for the use function, and for use in dependency
15926 +specification conditional blocks).
15927 +</para>
15928 +<para>
15929 +For EAPIs that support profile defined IUSE injection, IUSE_EFFECTIVE
15930 +contains the following values:
15931 +<itemizedlist>
15932 +<listitem><para>
15933 +All values in the calculated IUSE value.
15934 +</para></listitem>
15935 +<listitem><para>
15936 +All values in the profile IUSE_IMPLICIT variable.
15937 +</para></listitem>
15938 +<listitem><para>
15939 +All values in the profile variable named USE_EXPAND_VALUES_${v},
15940 +where ${v} is any value in the intersection of the profile
15941 +USE_EXPAND_UNPREFIXED and USE_EXPAND_IMPLICIT variables.
15942 +</para></listitem>
15943 +<listitem><para>
15944 +All values for ${lower_v}_${x}, where ${x} is all values in
15945 +the profile variable named USE_EXPAND_VALUES_${v}, where ${v}
15946 +is any value in the intersection of the profile USE_EXPAND and
15947 +USE_EXPAND_IMPLICIT variables and ${lower_v} is the lowercase
15948 +equivalent of ${v}.
15949 +</para></listitem>
15950 +</itemizedlist>
15951 +</para>
15952 +<para>
15953 +<table><title>Example Variable Settings</title>
15954 +<tgroup cols='2' align='left' >
15955 +<colspec colname='source'/>
15956 +<colspec colname='destination'/>
15957 +<thead>
15958 +<row>
15959 +<entry>Variable</entry>
15960 +<entry>Value</entry>
15961 +</row>
15962 +</thead>
15963 +<tbody>
15964 +<row>
15965 +<entry>IUSE_IMPLICIT</entry>
15966 +<entry>prefix selinux</entry>
15967 +</row>
15968 +<row>
15969 +<entry>USE_EXPAND</entry>
15970 +<entry>ELIBC KERNEL USERLAND</entry>
15971 +</row>
15972 +<row>
15973 +<entry>USE_EXPAND_UNPREFIXED</entry>
15974 +<entry>ARCH</entry>
15975 +</row>
15976 +<row>
15977 +<entry>USE_EXPAND_IMPLICIT</entry>
15978 +<entry>ARCH ELIBC KERNEL USERLAND</entry>
15979 +</row>
15980 +<row>
15981 +<entry>USE_EXPAND_VALUES_ARCH</entry>
15982 +<entry>amd64 ppc ppc64 x86 x86-fbsd x86-solaris</entry>
15983 +</row>
15984 +<row>
15985 +<entry>USE_EXPAND_VALUES_ELIBC</entry>
15986 +<entry>FreeBSD glibc</entry>
15987 +</row>
15988 +<row>
15989 +<entry>USE_EXPAND_VALUES_KERNEL</entry>
15990 +<entry>FreeBSD linux SunOS</entry>
15991 +</row>
15992 +<row>
15993 +<entry>USE_EXPAND_VALUES_USERLAND</entry>
15994 +<entry>BSD GNU</entry>
15995 +</row>
15996 +</tbody>
15997 +</tgroup>
15998 +</table>
15999 +</para>
16000 +</section>
16001 +<section id='package-ebuild-eapi-5-profile-stable-use-masking'>
16002 +<title>Profile stable USE forcing and masking</title>
16003 +<para>
16004 +In profile directories with an EAPI supporting stable masking,
16005 +new USE configuration files are supported: use.stable.mask,
16006 +use.stable.force, package.use.stable.mask and
16007 +package.use.stable.force. These files behave similarly to
16008 +previously supported USE configuration files, except that they
16009 +only influence packages that are merged due to a stable keyword.
16010 +</para>
16011 +</section>
16012 +</section>
16013 +<section id='package-ebuild-eapi-5-helpers'>
16014 +<title>Helpers</title>
16015 +<section id='package-ebuild-eapi-5-helpers-econf-disable-silent-rules'>
16016 +<title>econf adds --disable-silent-rules</title>
16017 +<para>
16018 +This option will automatically be passed if
16019 +--disable-silent-rules occurs in the output of configure --help.
16020 +</para>
16021 +</section>
16022 +<section id='package-ebuild-eapi-5-helpers-newfoo-stdin'>
16023 +<title>new* commands can read from standard input</title>
16024 +<para>
16025 +Standard input is read when the first parameter is - (a hyphen).
16026 +</para>
16027 +</section>
16028 +<section id='package-ebuild-eapi-5-helpers-foo-version-host-root'>
16029 +<title>New option --host-root for {has,best}_version</title>
16030 +<para>
16031 +This option --host-root will cause the query to apply to the
16032 +host root instead of ROOT.
16033 +</para>
16034 +</section>
16035 +<section id='package-ebuild-eapi-5-helpers-doheader'>
16036 +<title>New doheader helper function</title>
16037 +<para>
16038 +Installs the given header files into /usr/include/, by default
16039 +with file mode 0644. This can be overridden by setting
16040 +INSOPTIONS with the insopts function.
16041 +</para>
16042 +</section>
16043 +<section id='package-ebuild-eapi-5-helpers-usex'>
16044 +<title>New usex helper function</title>
16045 +<programlisting>
16046 +USAGE: usex &lt;USE flag&gt; [true output] [false output] [true suffix] [false suffix]
16047 +DESCRIPTION:
16048 + If USE flag is set, echo [true output][true suffix] (defaults to "yes"),
16049 + otherwise echo [false output][false suffix] (defaults to "no").
16050 +</programlisting>
16051 +</section>
16052 +</section>
16053 +<section id='package-ebuild-eapi-5-phases'>
16054 +<title>Phases</title>
16055 +<section id='package-ebuild-eapi-5-phases-src-test-parallel'>
16056 +<title>src_test supports parallel tests</title>
16057 +<para>
16058 +Unlike older EAPIs, the default src_test implementation will not
16059 +pass the -j1 option to emake.
16060 +</para>
16061 +</section>
16062 +</section>
16063 +<section id='package-ebuild-eapi-5-ebuild-environment-variables'>
16064 +<title>Ebuild Environment Variables</title>
16065 +<section id='package-ebuild-eapi-5-ebuild-environment-variables-ebuild-phase-func'>
16066 +<title>New EBUILD_PHASE_FUNC variable</title>
16067 +<para>
16068 +During execution of an ebuild phase function (such as pkg_setup
16069 +or src_unpack), the EBUILD_PHASE_FUNC variable will contain the
16070 +name of the phase function that is currently executing.
16071 +</para>
16072 +</section>
16073 +</section>
16074 +</section>
16075
16076 diff --git a/doc/portage.docbook b/doc/portage.docbook
16077 index 781915c..811544f 100644
16078 --- a/doc/portage.docbook
16079 +++ b/doc/portage.docbook
16080 @@ -22,6 +22,9 @@
16081 <!ENTITY package_ebuild_eapi_4 SYSTEM "package/ebuild/eapi/4.docbook">
16082 <!ENTITY package_ebuild_eapi_4_python SYSTEM "package/ebuild/eapi/4-python.docbook">
16083 <!ENTITY package_ebuild_eapi_4_slot_abi SYSTEM "package/ebuild/eapi/4-slot-abi.docbook">
16084 + <!ENTITY package_ebuild_eapi_5 SYSTEM "package/ebuild/eapi/5.docbook">
16085 + <!ENTITY package_ebuild_eapi_5_progress SYSTEM "package/ebuild/eapi/5-progress.docbook">
16086 + <!ENTITY package_ebuild_eapi_5_hdepend SYSTEM "package/ebuild/eapi/5-hdepend.docbook">
16087 <!ENTITY qa SYSTEM "qa.docbook">
16088 <!ENTITY config SYSTEM "config.docbook">
16089 <!ENTITY config_bashrc SYSTEM "config/bashrc.docbook">
16090
16091 diff --git a/doc/qa.docbook b/doc/qa.docbook
16092 index b9ec375..d0986e2 100644
16093 --- a/doc/qa.docbook
16094 +++ b/doc/qa.docbook
16095 @@ -70,7 +70,7 @@
16096 </programlisting>
16097 </para>
16098 <para>
16099 - Please see the Gentoo Hardened <ulink url="http://hardened.gentoo.org/gnu-stack.xml">GNU Stack Guide</ulink>.
16100 + Please see the Gentoo Hardened <ulink url="http://www.gentoo.org/proj/en/hardened/gnu-stack.xml">GNU Stack Guide</ulink>.
16101 </para>
16102 </sect1>
16103
16104
16105 diff --git a/make.conf-repatch.sh b/make.conf-repatch.sh
16106 deleted file mode 100644
16107 index 6589e6b..0000000
16108 --- a/make.conf-repatch.sh
16109 +++ /dev/null
16110 @@ -1,40 +0,0 @@
16111 -#!/bin/bash
16112 -
16113 -die() {
16114 - echo "ERROR: $*" > /dev/stderr
16115 - patch -p0 make.conf < make.conf.diff
16116 - exit 1
16117 -}
16118 -
16119 -if [ ! -f "make.conf" -o ! -f "make.conf.x86.diff" -o ! -d ".svn" ]; then
16120 - echo "ERROR: current directory is invalid" > /dev/stderr
16121 - exit 1
16122 -fi
16123 -
16124 -svn diff make.conf > make.conf.diff
16125 -svn revert make.conf
16126 -
16127 -for x in make.conf.*.diff; do
16128 - archs="$archs $(basename ${x:10} .diff)"
16129 -done
16130 -
16131 -
16132 -for arch in $archs; do
16133 - echo "* Patching $arch"
16134 - cp make.conf make.conf.$arch || die "copy failed"
16135 - patch -p0 make.conf.$arch < make.conf.${arch}.diff > /dev/null || die "arch-patch failed"
16136 - patch -p0 make.conf.$arch < make.conf.diff > /dev/null || die "patch failed"
16137 -done
16138 -
16139 -echo "* Re-patching make.conf"
16140 -patch -p0 make.conf < make.conf.diff > /dev/null || die "repatch failed"
16141 -
16142 -for arch in $archs; do
16143 - echo "* Creating diff for $arch"
16144 - diff -u make.conf make.conf.$arch > make.conf.${arch}.diff
16145 - [ -z "${KEEP_ARCH_MAKE_CONF}" ] && rm -f make.conf.$arch make.conf.${arch}.orig
16146 -done
16147 -
16148 -rm make.conf.diff
16149 -
16150 -echo "Done"
16151 \ No newline at end of file
16152
16153 diff --git a/make.conf.example-repatch.sh b/make.conf.example-repatch.sh
16154 new file mode 100755
16155 index 0000000..c97c6f2
16156 --- /dev/null
16157 +++ b/make.conf.example-repatch.sh
16158 @@ -0,0 +1,41 @@
16159 +#!/bin/bash
16160 +
16161 +die() {
16162 + echo "ERROR: $*" > /dev/stderr
16163 + patch -p0 make.conf.example < make.conf.example.diff
16164 + exit 1
16165 +}
16166 +
16167 +if [[ ! -f make.conf.example || ! -f make.conf.example.x86.diff || ! -d ../.git ]]; then
16168 + echo "ERROR: current directory is invalid" > /dev/stderr
16169 + exit 1
16170 +fi
16171 +
16172 +git diff --no-prefix --relative="$(basename "$(pwd)")" make.conf.example > make.conf.example.diff
16173 +git checkout -- make.conf.example
16174 +
16175 +archs=()
16176 +for x in make.conf.example.*.diff; do
16177 + archs+=("$(basename ${x:18} .diff)")
16178 +done
16179 +
16180 +
16181 +for arch in "${archs[@]}"; do
16182 + echo "* Patching ${arch}"
16183 + cp make.conf.example make.conf.example.${arch} || die "copy failed"
16184 + patch -p0 make.conf.example.${arch} < make.conf.example.${arch}.diff > /dev/null || die "arch-patch failed"
16185 + patch -p0 make.conf.example.${arch} < make.conf.example.diff > /dev/null || die "patch failed"
16186 +done
16187 +
16188 +echo "* Re-patching make.conf.example"
16189 +patch -p0 make.conf.example < make.conf.example.diff > /dev/null || die "repatch failed"
16190 +
16191 +for arch in "${archs[@]}"; do
16192 + echo "* Creating diff for ${arch}"
16193 + diff -u make.conf.example make.conf.example.${arch} > make.conf.example.${arch}.diff
16194 + [[ -z ${KEEP_ARCH_MAKE_CONF_EXAMPLE} ]] && rm -f make.conf.example.${arch} make.conf.example.${arch}.orig
16195 +done
16196 +
16197 +rm make.conf.example.diff
16198 +
16199 +echo "Done"
16200
16201 diff --git a/man/color.map.5 b/man/color.map.5
16202 index ca6b17d..5543628 100644
16203 --- a/man/color.map.5
16204 +++ b/man/color.map.5
16205 @@ -1,4 +1,4 @@
16206 -.TH "COLOR.MAP" "5" "Mar 2010" "Portage VERSION" "Portage"
16207 +.TH "COLOR.MAP" "5" "Jul 2013" "Portage VERSION" "Portage"
16208 .SH "NAME"
16209 color.map \- custom color settings for Portage
16210 .SH "SYNOPSIS"
16211 @@ -9,9 +9,11 @@ Portage will check this file first for color classes settings. If no setting
16212 of given color class is found in /etc/portage/color.map, Portage uses default
16213 value defined internally.
16214 .SH "SYNTAX"
16215 -\fBVARIABLE\fR = \fI[space delimited list of attributes or ansi code pattern]\fR
16216 +\fBVARIABLE\fR = \fI[space delimited list of attributes or ansi code
16217 +pattern]\fR
16218 .TP
16219 -\fBATTRIBUTE\fR = \fI[space delimited list of attributes or ansi code pattern]\fR
16220 +\fBATTRIBUTE\fR = \fI[space delimited list of attributes or ansi code \
16221 +pattern]\fR
16222 .SH "VARIABLES"
16223 .TP
16224 \fBNORMAL\fR = \fI"normal"\fR
16225 @@ -54,10 +56,12 @@ Defines color used for world packages planned to be merged.
16226 Defines color used for packages planned to be merged using a binary package.
16227 .TP
16228 \fBPKG_BINARY_MERGE_SYSTEM\fR = \fI"purple"\fR
16229 -Defines color used for system packages planned to be merged using a binary package.
16230 +Defines color used for system packages planned to be merged using a binary
16231 +package.
16232 .TP
16233 \fBPKG_BINARY_MERGE_WORLD\fR = \fI"fuchsia"\fR
16234 -Defines color used for world packages planned to be merged using a binary package.
16235 +Defines color used for world packages planned to be merged using a binary
16236 +package.
16237 .TP
16238 \fBPKG_NOMERGE\fR = \fI"darkblue"\fR
16239 Defines color used for packages not planned to be merged.
16240 @@ -185,14 +189,14 @@ Defines color used for warnings.
16241 Please report bugs via http://bugs.gentoo.org/
16242 .SH "AUTHORS"
16243 .nf
16244 -Arfrever Frehtes Taifersar Arahesis <Arfrever.FTA@×××××.com>
16245 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
16246 .fi
16247 .SH "FILES"
16248 .TP
16249 .B /etc/portage/color.map
16250 Contains variables customizing colors.
16251 .TP
16252 -.B /etc/make.conf
16253 +.B /etc/portage/make.conf
16254 Contains other variables.
16255 .SH "SEE ALSO"
16256 .BR console_codes (4),
16257
16258 diff --git a/man/dispatch-conf.1 b/man/dispatch-conf.1
16259 index b82c215..3a5264a 100644
16260 --- a/man/dispatch-conf.1
16261 +++ b/man/dispatch-conf.1
16262 @@ -1,46 +1,36 @@
16263 .TH "DISPATCH-CONF" "1" "Jan 2011" "Portage VERSION" "Portage"
16264 -.SH NAME
16265 -dispatch-conf \- Sanely update configuration files after emerging new packages
16266 -.SH SYNOPSIS
16267 -.B dispatch-conf
16268 -.SH DESCRIPTION
16269 -.I dispatch-conf
16270 -is designed to be run after merging new packages in order to see if
16271 -there are updates to the configuration files. If a new
16272 -configuration file will overwrite an old one,
16273 -.I dispatch-conf
16274 -will prompt the user for a decision about how to resolve the
16275 -discrepancy.
16276 -Advantages of
16277 -.I dispatch-conf
16278 -include easy rollback (changes to config files are stored either using
16279 -patches or rcs) and the ability to automatically update config files
16280 -that the user has never modified or
16281 +.SH "NAME"
16282 +dispatch\-conf \- Sanely update configuration files after emerging new packages
16283 +.SH "SYNOPSIS"
16284 +.B dispatch\-conf
16285 +.SH "DESCRIPTION"
16286 +\fIdispatch\-conf\fR is designed to be run after merging new packages
16287 +in order to see if there are updates to the configuration files.
16288 +If a new configuration file will overwrite an old one, \fIdispatch\-conf\fR
16289 +will prompt the user for a decision about how to resolve the discrepancy.
16290 +Advantages of \fIdispatch\-conf\fR include easy rollback (changes to config
16291 +files are stored either using patches or rcs) and the ability to
16292 +automatically update config files that the user has never modified or
16293 that differ from the current version only in CVS cruft or white space.
16294
16295 -.I dispatch-conf
16296 -will check all directories in the \fICONFIG_PROTECT\fR variable. All
16297 -config files found in \fICONFIG_PROTECT_MASK\fR will automatically be
16298 -updated for you by \fIdispatch-conf\fR. See \fBmake.conf\fR(5) for more
16299 -information.
16300 -.SH OPTIONS
16301 +\fIdispatch\-conf\fR will check all directories in the \fICONFIG_PROTECT\fR
16302 +variable. All config files found in \fICONFIG_PROTECT_MASK\fR will
16303 +automatically be updated for you by \fIdispatch\-conf\fR. See
16304 +\fBmake.conf\fR(5) for more information.
16305 +.SH "OPTIONS"
16306 .TP
16307 None.
16308 -.SH USAGE
16309 -.I dispatch-conf
16310 -must be run as root, since the config files to be replaced are generally
16311 -owned by root. Before running
16312 -.I dispatch-conf
16313 -for the first time the settings in
16314 -.B /etc/dispatch-conf.conf
16315 -should be edited and the archive directory specified in
16316 -\fI/etc/dispatch-conf.conf\fR will need to be created. All changes to
16317 +.SH "USAGE"
16318 +\fIdispatch\-conf\fR must be run as root, since the config files to be
16319 +replaced are generally owned by root. Before running \fIdispatch\-conf\fR
16320 +for the first time the settings in \fB/etc/dispatch\-conf.conf\fR
16321 +should be edited and the archive directory specified in
16322 +\fB/etc/dispatch\-conf.conf\fR will need to be created. All changes to
16323 config files will be saved in the archive directory either as patches
16324 or using rcs, making restoration to an earlier version rather simple.
16325
16326 -When dispatch-conf finds a config file that has a new update the user
16327 -is provided
16328 -with a menu of options for how to handle the update:
16329 +When \fIdispatch\-conf\fR finds a config file that has a new update the user
16330 +is provided with a menu of options for how to handle the update:
16331 .TP
16332 .B u
16333 Update (replace) the current config file with the new config file and continue.
16334 @@ -53,7 +43,7 @@ Skip to the next config file, leaving both the original config file and any
16335 \fICONFIG_PROTECT\fRed files.
16336 .TP
16337 .B e
16338 -Edit the new config file, using the editor defined in \fI$EDITOR\fR.
16339 +Edit the new config file, using the editor defined in \fIEDITOR\fR.
16340 .TP
16341 .B m
16342 Interactively merge the current and new config files.
16343 @@ -63,17 +53,14 @@ Look at the differences between the pre-merged and merged config files.
16344 .TP
16345 .B t
16346 Toggle between the merged and pre-merged config files (in terms of which
16347 -should be installed using the
16348 -.Qt u
16349 -command).
16350 +should be installed using the \fBu\fR command).
16351 .TP
16352 .B h
16353 Display a help screen.
16354 .TP
16355 .B q
16356 -Quit
16357 -.I dispatch-conf.
16358 -.SH FILE MODES
16359 +Quit \fIdispatch\-conf\fR.
16360 +.SH "FILE MODES"
16361 \fBWARNING:\fR When \fB/etc/dispatch\-conf.conf\fR is configured
16362 to use \fBrcs\fR(1), read and execute permissions of archived
16363 files may be inherited from the first check in of a working file,
16364 @@ -85,7 +72,7 @@ to RCS files by setting the permissions of the directory
16365 containing the files.
16366 .SH "REPORTING BUGS"
16367 Please report bugs via http://bugs.gentoo.org/
16368 -.SH AUTHORS
16369 +.SH "AUTHORS"
16370 .nf
16371 Jeremy Wohl
16372 Karl Trygve Kalleberg <karltk@g.o>
16373 @@ -94,8 +81,8 @@ Grant Goodyear <g2boojum@g.o>
16374 .fi
16375 .SH "FILES"
16376 .TP
16377 -.B /etc/dispatch-conf.conf
16378 -Configuration settings for \fIdispatch-conf\fR are stored here.
16379 +.B /etc/dispatch\-conf.conf
16380 +Configuration settings for \fIdispatch\-conf\fR are stored here.
16381 .SH "SEE ALSO"
16382 .BR make.conf (5),
16383 .BR ci (1),
16384
16385 diff --git a/man/ebuild.1 b/man/ebuild.1
16386 index e74779a..29f88b0 100644
16387 --- a/man/ebuild.1
16388 +++ b/man/ebuild.1
16389 @@ -1,4 +1,4 @@
16390 -.TH "EBUILD" "1" "Feb 2011" "Portage VERSION" "Portage"
16391 +.TH "EBUILD" "1" "Mar 2013" "Portage VERSION" "Portage"
16392 .SH "NAME"
16393 ebuild \- a low level interface to the Portage system
16394 .SH "SYNOPSIS"
16395 @@ -19,11 +19,15 @@ This must be a valid ebuild script. For further information read
16396 \fBebuild\fR(5).
16397 .SH "COMMANDS"
16398 By default, portage will execute all the functions in order up to the
16399 -one actually specified. For example, simply issuing the command \fBcompile\fR
16400 -will trigger the functions before it to also be run (such as \fBsetup\fR
16401 -and \fBunpack\fR). If you wish to only have the specified command run, then
16402 -you should use the \fInoauto\fR option in the \fBFEATURES\fR environment
16403 -variable. See the \fBmake.conf\fR(5) man page for more information.
16404 +one actually specified, except for the functions that have already been
16405 +executed in a previous invocation of ebuild. For example, simply issuing the
16406 +command \fBcompile\fR will trigger the functions before it to also be run (such
16407 +as \fBsetup\fR and \fBunpack\fR), unless they were run in a previous invocation
16408 +of ebuild. If you want to make sure they are all run, you need to use
16409 +the command \fBclean\fR first. If you wish to only have the specified command
16410 +run, then you should use the \fInoauto\fR option in the \fBFEATURES\fR
16411 +environment variable. See the \fBmake.conf\fR(5) man page for more
16412 +information.
16413
16414 .TP
16415 .BR help
16416 @@ -53,13 +57,13 @@ manually clean these files with \fIrm \-rf /var/tmp/portage\fR.
16417 .BR fetch
16418 Checks to see if all the sources specified in SRC_URI are available in
16419 DISTDIR (see \fBmake.conf\fR(5) for more information) and have a valid
16420 -md5 checksum. If the sources aren't available, an attempt is made to
16421 +checksum. If the sources aren't available, an attempt is made to
16422 download them from the locations specified in SRC_URI. If multiple
16423 download locations are listed for a particular file, Portage pings
16424 each location to see which location is closer. (May not be true
16425 presently.) The Gentoo Linux mirrors defined by GENTOO_MIRRORS is
16426 always considered first. If for some reason the current or
16427 -just\-downloaded sources' md5 digests don't match those recorded
16428 +just\-downloaded sources' checksums don't match those recorded
16429 in files/digest\-[package]\-[version\-rev], a warning is printed
16430 and ebuild exits with an error code of 1.
16431 .TP
16432 @@ -74,7 +78,7 @@ for all of the files listed in SRC_URI for each ebuild. For further
16433 information regarding the behavior of this command, see the documentation for
16434 the \fIassume\-digests\fR value of the \fBFEATURES\fR variable in
16435 \fBmake.conf\fR(5). See the \fB\-\-force\fR option if you would like to
16436 -prevent digests from being assumed.
16437 +prevent digests from being assumed.
16438 .TP
16439 .BR unpack
16440 Extracts the sources to a subdirectory in the \fIbuild directory\fR
16441 @@ -112,7 +116,7 @@ current working directory will be set to ${S}. When src_compile()
16442 completes, the sources should be fully compiled.
16443 .TP
16444 .BR test
16445 -Runs package-specific test cases to verify that everything was built
16446 +Runs package-specific test cases to verify that everything was built
16447 properly.
16448 .TP
16449 .BR preinst
16450 @@ -135,8 +139,8 @@ shown here.
16451 This function installs all the files in the \fIinstall directory\fR
16452 to the live filesystem. The process works as follows: first, the
16453 \fIpkg_preinst()\fR function (if specified) is run. Then, the files
16454 -are merged into the live filesystem, and the installed files' md5
16455 -digests are recorded in
16456 +are merged into the live filesystem, and the installed files'
16457 +checksums are recorded in
16458 \fI/var/db/pkg/${CATEGORY}/${PN}\-${PVR}/CONTENTS\fR. After
16459 all the files have been merged, the \fIpkg_postinst()\fR function
16460 (if specified) is executed.
16461 @@ -150,7 +154,7 @@ particular step doesn't complete successfully.
16462 .TP
16463 .BR unmerge
16464 This function first executes the \fIpkg_prerm()\fR function (if specified).
16465 -Then it removes all files from the live filesystem that have a valid md5
16466 +Then it removes all files from the live filesystem that have a valid
16467 checksum and mtime in the package contents file. Any empty directories
16468 are recursively removed. Finally, it runs \fIpkg_postrm()\fR function (if
16469 specified). It is safe to merge a new version of a package first and
16470 @@ -179,7 +183,7 @@ tarball is created and stored in \fBPKGDIR\fR (see \fBmake.conf\fR(5)).
16471 Builds a RedHat RPM package from the files in the temporary
16472 \fIinstall directory\fR. At the moment, the ebuild's dependency
16473 information is not incorporated into the RPM.
16474 -.SH OPTIONS
16475 +.SH "OPTIONS"
16476 .TP
16477 .BR "\-\-debug"
16478 Run bash with the \-x option, causing it to output verbose debugging
16479 @@ -212,7 +216,7 @@ Mike Frysinger <vapier@g.o>
16480 .fi
16481 .SH "FILES"
16482 .TP
16483 -.B /etc/make.conf
16484 +.B /etc/portage/make.conf
16485 Contains variables for the build\-process and overwrites those
16486 in make.globals.
16487 .TP
16488 @@ -224,6 +228,6 @@ Contains variables customizing colors.
16489 .BR make.conf (5),
16490 .BR color.map (5)
16491 .TP
16492 -The \fI/usr/sbin/ebuild.sh\fR script.
16493 +The \fI/usr/lib/portage/bin/ebuild.sh\fR script.
16494 .TP
16495 The helper apps in \fI/usr/lib/portage/bin\fR.
16496
16497 diff --git a/man/ebuild.5 b/man/ebuild.5
16498 index e9e718e..89bd6a2 100644
16499 --- a/man/ebuild.5
16500 +++ b/man/ebuild.5
16501 @@ -1,57 +1,373 @@
16502 -.TH "EBUILD" "5" "Dec 2011" "Portage VERSION" "Portage"
16503 +.TH "EBUILD" "5" "Jan 2014" "Portage VERSION" "Portage"
16504 +
16505 .SH "NAME"
16506 ebuild \- the internal format, variables, and functions in an ebuild script
16507 +
16508 .SH "DESCRIPTION"
16509 -The
16510 -.BR ebuild (1)
16511 -program accepts a single ebuild script as an argument. This script
16512 -contains variables and commands that specify how to download, unpack,
16513 -patch, compile, install and merge a particular software package from
16514 -its original sources. In addition to all of this, the ebuild script
16515 -can also contain pre/post install/remove commands, as required. All
16516 -ebuild scripts are written in bash.
16517 -.SH "EXAMPLES"
16518 -Here's a simple example ebuild:
16519 +The \fBebuild\fR(1) program accepts a single ebuild script as an argument.
16520 +This script contains variables and commands that specify how to download,
16521 +unpack, patch, compile, install and merge a particular software package from
16522 +its original sources. In addition to all of this, the ebuild script can also
16523 +contain pre/post install/remove commands, as required. All ebuild scripts are
16524 +written in bash.
16525
16526 -.DS
16527 +.SS "Dependencies"
16528 +A \fIdepend atom\fR is simply a dependency that is used by portage when
16529 +calculating relationships between packages. Please note that if the atom has
16530 +not already been emerged, then the latest version available is matched.
16531 +.TP
16532 +.B Atom Bases
16533 +The base atom is just a full category/packagename.
16534 +
16535 +Examples:
16536 .nf
16537 -# Copyright 1999\-2009 Gentoo Foundation
16538 -# Distributed under the terms of the GNU General Public License v2
16539 -# $Header: $
16540 +.I sys\-apps/sed
16541 +.I sys\-libs/zlib
16542 +.I net\-misc/dhcp
16543 +.fi
16544 +.TP
16545 +.B Atom Versions
16546 +It is nice to be more specific and say that only certain versions of atoms are
16547 +acceptable. Note that versions must be combined with a prefix (see below).
16548 +Hence you may add a version number as a postfix to the base.
16549
16550 -EAPI="4"
16551 +Examples:
16552 +.nf
16553 + sys\-apps/sed\fI\-4.0.5\fR
16554 + sys\-libs/zlib\fI\-1.1.4\-r1\fR
16555 + net\-misc/dhcp\fI\-3.0_p2\fR
16556 +.fi
16557
16558 -inherit some_eclass another_eclass
16559 +Versions are normally made up of two or three numbers separated by periods,
16560 +such as 1.2 or 4.5.2. This string may be followed by a character such as 1.2a
16561 +or 4.5.2z. Note that this letter is \fInot\fR meant to indicate alpha, beta,
16562 +etc... status. For that, use the optional suffix; either _alpha, _beta, _pre
16563 +(pre\-release), _rc (release candidate), or _p (patch). This means for the
16564 +3rd pre\-release of a package, you would use something like 1.2_pre3. The
16565 +suffixes here can be arbitrarily chained without limitation.
16566 +.TP
16567 +.B Atom Prefix Operators [> >= = <= <]
16568 +Sometimes you want to be able to depend on general versions rather than
16569 +specifying exact versions all the time. Hence we provide standard boolean
16570 +operators:
16571
16572 -DESCRIPTION="Super\-useful stream editor (sed)"
16573 -HOMEPAGE="http://www.gnu.org/software/sed/sed.html"
16574 -SRC_URI="ftp://alpha.gnu.org/pub/gnu/${PN}/${P}.tar.gz"
16575 +Examples:
16576 +.nf
16577 + \fI>\fRmedia\-libs/libgd\-1.6
16578 + \fI>=\fRmedia\-libs/libgd\-1.6
16579 + \fI=\fRmedia\-libs/libgd\-1.6
16580 + \fI<=\fRmedia\-libs/libgd\-1.6
16581 + \fI<\fRmedia\-libs/libgd\-1.6
16582 +.fi
16583 +.TP
16584 +.B Extended Atom Prefixes [!~] and Postfixes [*]
16585 +Now to get even fancier, we provide the ability to define blocking packages and
16586 +version range matching. Also note that these extended prefixes/postfixes may
16587 +be combined in any way with the atom classes defined above.
16588 +.RS
16589 +.TP
16590 +.I ~
16591 +means match any revision of the base version specified. So in the
16592 +example below, we would match versions '1.0.2a', '1.0.2a\-r1', '1.0.2a\-r2',
16593 +etc...
16594
16595 -LICENSE="GPL\-2"
16596 -SLOT="0"
16597 -KEYWORDS="~x86"
16598 -IUSE=""
16599 +Example:
16600 +.nf
16601 + \fI~\fRnet\-libs/libnet\-1.0.2a
16602 +.fi
16603 +.TP
16604 +.I !
16605 +means block packages from being installed at the same time.
16606
16607 -RDEPEND=""
16608 -DEPEND="nls? ( sys-devel/gettext )"
16609 +Example:
16610 +.nf
16611 + \fI!\fRapp\-text/dos2unix
16612 +.fi
16613 +.TP
16614 +.I !!
16615 +means block packages from being installed at the same time
16616 +and explicitly disallow them from being temporarily installed
16617 +simultaneously during a series of upgrades. This syntax is supported
16618 +beginning with \fBEAPI 2\fR.
16619
16620 -src_configure() {
16621 - econf \\
16622 - \-\-bindir="${EPREFIX}"/bin
16623 -}
16624 +Example:
16625 +.nf
16626 + \fI!!\fR<sys\-apps/portage\-2.1.4_rc1
16627 +.fi
16628 +.TP
16629 +.I *
16630 +means match any version of the package so long
16631 +as the specified string prefix is matched. So with a
16632 +version of '2*', we can match '2.1', '2.2', '2.2.1',
16633 +etc... and not match version '1.0', '3.0', '4.1', etc...
16634 +Beware that, due to the string matching nature, '20'
16635 +will also be matched by '2*'. The version part
16636 +that comes before the '*' must be a valid version in the absence of the '*'.
16637 +For example, '2' is a valid version and '2.' is not. Therefore, '2*' is
16638 +allowed and '2.*' is not.
16639
16640 -src_install() {
16641 - emake DESTDIR="${D}" install
16642 - dodoc NEWS README* THANKS AUTHORS BUGS ChangeLog
16643 -}
16644 +Examples:
16645 +.nf
16646 + =dev\-libs/glib\-2\fI*\fR
16647 + \fI!\fR=net\-fs/samba\-2\fI*\fR
16648 +.fi
16649 +.RE
16650 +.TP
16651 +.B Atom Slots
16652 +Beginning with \fBEAPI 1\fR, any atom can be constrained to match a specific
16653 +\fBSLOT\fR. This is accomplished by appending a colon followed by a
16654 +\fBSLOT\fR:
16655 +
16656 +Examples:
16657 +.nf
16658 + x11\-libs/qt:3
16659 + \fI~\fRx11\-libs/qt-3.3.8:3
16660 + \fI>=\fRx11\-libs/qt-3.3.8:3
16661 + \fI=\fRx11\-libs/qt-3.3*:3
16662 +.fi
16663 +.TP
16664 +.B Sub Slots
16665 +Beginning with \fBEAPI 5\fR, a slot dependency may contain an
16666 +optional sub\-slot part that follows the regular slot and is
16667 +delimited by a \fI/\fR character.
16668 +
16669 +Examples:
16670 +.nf
16671 + dev\-libs/icu:0/0
16672 + dev\-libs/icu:0/49
16673 + dev\-lang/perl:0/5.12
16674 + dev\-libs/glib:2/2.30
16675 +.fi
16676 +.TP
16677 +.B Atom Slot Operators
16678 +Beginning with \fBEAPI 5\fR, slot operator dependency consists
16679 +of a colon followed by one of the following operators:
16680 +.RS
16681 +.TP
16682 +.I *
16683 +Indicates that any slot value is acceptable. In addition,
16684 +for runtime dependencies, indicates that the package will not
16685 +break if the matched package is uninstalled and replaced by
16686 +a different matching package in a different slot.
16687 +
16688 +Examples:
16689 +.nf
16690 + dev\-libs/icu:*
16691 + dev\-lang/perl:*
16692 + dev-libs/glib:*
16693 +.fi
16694 +.TP
16695 +.I =
16696 +Indicates that any slot value is acceptable. In addition,
16697 +for runtime dependencies, indicates that the package will
16698 +break unless a matching package with slot and sub\-slot equal
16699 +to the slot and sub\-slot of the best installed version at the
16700 +time the package was installed is available.
16701 +
16702 +Examples:
16703 +.nf
16704 + dev\-libs/icu:=
16705 + dev\-lang/perl:=
16706 + dev-libs/glib:=
16707 +.fi
16708 +.TP
16709 +.I slot=
16710 +Indicates that only a specific slot value is acceptable, and
16711 +otherwise behaves identically to the plain equals slot operator.
16712 +
16713 +Examples:
16714 +.nf
16715 + dev\-libs/icu:0=
16716 + dev\-lang/perl:0=
16717 + dev-libs/glib:2=
16718 .fi
16719 +.PP
16720 +To implement the equals slot operator, the package manager
16721 +will need to store the slot/sub\-slot pair of the best installed
16722 +version of the matching package. This syntax is only for package
16723 +manager use and must not be used by ebuilds. The package manager
16724 +may do this by inserting the appropriate slot/sub\-slot pair
16725 +between the colon and equals sign when saving the package's
16726 +dependencies. The sub\-slot part must not be omitted here
16727 +(when the SLOT variable omits the sub\-slot part, the package
16728 +is considered to have an implicit sub\-slot which is equal to
16729 +the regular slot).
16730 +
16731 +Examples:
16732 +.nf
16733 + dev\-libs/icu:0/0=
16734 + dev\-libs/icu:0/49=
16735 + dev\-lang/perl:0/5.12=
16736 + dev-libs/glib:2/2.30=
16737 +.fi
16738 +.RE
16739 +.TP
16740 +.B Atom USE
16741 +Beginning with \fBEAPI 2\fR, any atom can be constrained to match specific
16742 +\fBUSE\fR flag settings. When used together with \fBSLOT\fR dependencies,
16743 +\fBUSE\fR dependencies appear on the right hand side of \fBSLOT\fR
16744 +dependencies.
16745 +.RS
16746 +.TP
16747 +.B Unconditional USE Dependencies
16748 +.TS
16749 +l l
16750 +__
16751 +l l.
16752 +Example Meaning
16753 +foo[bar] foo must have bar enabled
16754 +foo[bar,baz] foo must have both bar and baz enabled
16755 +foo[\-bar,baz] foo must have bar disabled and baz enabled
16756 +.TE
16757 +.TP
16758 +.B Conditional USE Dependencies
16759 +.TS
16760 +l l
16761 +__
16762 +l l.
16763 +Compact Form Equivalent Expanded Form
16764 +foo[bar?] bar? ( foo[bar] ) !bar? ( foo )
16765 +foo[!bar?] bar? ( foo ) !bar? ( foo[\-bar] )
16766 +foo[bar=] bar? ( foo[bar] ) !bar? ( foo[\-bar] )
16767 +foo[!bar=] bar? ( foo[\-bar] ) !bar? ( foo[bar] )
16768 +.TE
16769 +.RE
16770 +.TP
16771 +.B Atom USE defaults
16772 +Beginning with \fBEAPI 4\fR, \fBUSE\fR dependencies may specify default
16773 +assumptions about values for flags that may or may not be missing from
16774 +the \fBIUSE\fR of the matched package. Such defaults are specified by
16775 +immediately following a flag with either \fI(+)\fR or \fI(\-)\fR. Use
16776 +\fI(+)\fR to behave as if a missing flag is present and enabled, or
16777 +\fI(\-)\fR to behave as if it is present and disabled:
16778 +
16779 +Examples:
16780 +.nf
16781 + media\-video/ffmpeg[threads(+)]
16782 + media\-video/ffmpeg[-threads(\-)]
16783 +.fi
16784 +.TP
16785 +.B Dynamic Dependencies
16786 +Sometimes programs may depend on different things depending on the USE
16787 +variable. Portage offers a few options to handle this. Note that when
16788 +using the following syntaxes, each case is considered as 1 Atom in the
16789 +scope it appears. That means that each Atom both conditionally include
16790 +multiple Atoms and be nested to an infinite depth.
16791 +.RS
16792 +.TP
16793 +.B usevar? ( Atom )
16794 +To include the jpeg library when the user has jpeg in \fBUSE\fR, simply use the
16795 +following syntax:
16796 +
16797 +jpeg? ( media\-libs/jpeg )
16798 +.TP
16799 +.B !usevar? ( Atom )
16800 +If you want to include a package only if the user does not have a certain
16801 +option in their \fBUSE\fR variable, then use the following syntax:
16802 +
16803 +!nophysfs? ( dev\-games/physfs )
16804 +
16805 +This is often useful for those times when you want to want to add optional
16806 +support for a feature and have it enabled by default.
16807 +.TP
16808 +.B usevar? ( Atom if true ) !usevar? ( Atom if false )
16809 +For functionality like the tertiary operator found in C you must use
16810 +two statements, one normal and one inverted. If a package uses
16811 +GTK2 or GTK1, but not both, then you can handle that like this:
16812 +
16813 +gtk2? ( =x11\-libs/gtk+\-2* ) !gtk2? ( =x11\-libs/gtk+\-1* )
16814 +
16815 +That way the default is the superior GTK2 library.
16816 +.TP
16817 +.B || ( Atom Atom ... )
16818 +When a package can work with a few different packages but a virtual is not
16819 +appropriate, this syntax can easily be used.
16820 +
16821 +Example:
16822 +.nf
16823 +|| (
16824 + app\-games/unreal\-tournament
16825 + app\-games/unreal\-tournament\-goty
16826 +)
16827 +.fi
16828 +
16829 +Here we see that unreal\-tournament has a normal version and it has a goty
16830 +version. Since they provide the same base set of files, another package can
16831 +use either. Adding a virtual is inappropriate due to the small scope of it.
16832 +
16833 +Another good example is when a package can be built with multiple video
16834 +interfaces, but it can only ever have just one.
16835 +
16836 +Example:
16837 +.nf
16838 +|| (
16839 + sdl? ( media\-libs/libsdl )
16840 + svga? ( media\-libs/svgalib )
16841 + opengl? ( virtual/opengl )
16842 + ggi? ( media\-libs/libggi )
16843 + virtual/x11
16844 +)
16845 +.fi
16846 +
16847 +Here only one of the packages will be chosen, and the order of preference is
16848 +determined by the order in which they appear. So sdl has the best chance of
16849 +being chosen, followed by svga, then opengl, then ggi, with a default of X if
16850 +the user does not specify any of the previous choices.
16851 +
16852 +Note that if any of the packages listed are already merged, the package manager
16853 +will use that to consider the dependency satisfied.
16854 +
16855 +.SS "Cross-compilation"
16856 +Portage supports cross-compilation into a subdirectory specified by \fBROOT\fR.
16857 +.TP
16858 +.B Host
16859 +\fIHost\fR in this context means the platform hosting the build process, i.e.
16860 +what autotools calls CBUILD.
16861 +Its packages are contained in the root of the filesystem ("\fI/\fR").
16862 +
16863 +If \fBROOT\fR is "\fI/\fR", all dependency types will be installed there.
16864 +Otherwise, for EAPIs that support \fBHDEPEND\fR (experimental
16865 +\fBEAPI 5-hdepend\fR), only \fBHDEPEND\fR is installed into "\fI/\fR".
16866 +For EAPIs that do not support \fBHDEPEND\fR, the behaviour is controlled by the
16867 +\fI\-\-root-deps\fR flag to \fBemerge\fR(1), defaulting to install only
16868 +\fBDEPEND\fR into the \fIhost\fR.
16869 +.TP
16870 +.B Target
16871 +\fITarget\fR refers to the platform that the package will later run on, i.e.
16872 +what autotools calls CHOST.
16873 +The directory housing this system is specified by \fBROOT\fR.
16874 +If it is different from "\fI/\fR", i.e. \fIhost\fR and \fItarget\fR are not the
16875 +same, this variable contains the path to the directory housing the \fItarget\fR
16876 +system.
16877 +
16878 +For EAPIs that support \fBHDEPEND\fR (experimental \fBEAPI 5-hdepend\fR),
16879 +\fBDEPEND\fR, \fBRDEPEND\fR, and \fBPDEPEND\fR
16880 +list the \fItarget\fR dependencies, i.e. those to be installed into \fBROOT\fR.
16881 +For EAPIs that do not support \fBHDEPEND\fR, the \fBemerge\fR(1) flag
16882 +\fI\-\-root-deps\fR controls what the package manager installs there.
16883 +Without it, \fBemerge\fR defaults to install only runtime dependencies (i.e.
16884 +\fBRDEPEND\fR and \fBPDEPEND\fR) into \fBROOT\fR.
16885 +.PP
16886 +See section \fBVARIABLES\fR for more information about the \fBDEPEND\fR,
16887 +\fBRDEPEND\fR and \fBHDEPEND\fR variables.
16888 +.TP
16889 +.B The targetroot USE flag
16890 +For EAPIs that support the "\fItargetroot\fR" USE flag, that flag is
16891 +automatically enabled by the package manager if \fIhost\fR and \fItarget\fR
16892 +system are not the same, i.e. if the \fBROOT\fR is not "\fI/\fR".
16893 +This is necessary where the package to be built needs an executable copy of
16894 +itself during the build process.
16895 +A known example is dev-lang/python, which needs to run a Python interpreter
16896 +during compilation.
16897 +
16898 .SH "VARIABLES"
16899 .TP
16900 -.B MISC USAGE NOTES
16901 -\- All variables defined in \fBmake.conf\fR(5) are available for use in
16902 -ebuilds (such as the PORTAGE* and PORTDIR* variables)
16903 +.B Usage Notes
16904 +\- Variables defined in \fBmake.conf\fR(5) are available for use in
16905 +ebuilds (except Portage\-specific variables, which might be not supported by
16906 +other package managers).
16907 .br
16908 -\- When assigning values to variables in ebuilds, you \fBcannot have a
16909 +\- When assigning values to variables in ebuilds, you \fIcannot have a
16910 space\fR between the variable name and the equal sign.
16911 .br
16912 \- Variable values should only contain characters that are members of the
16913 @@ -60,47 +376,57 @@ space\fR between the variable name and the equal sign.
16914 .B P
16915 This variable contains the package name without the ebuild revision.
16916 This variable must NEVER be modified.
16917 -.br
16918 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$P\fR=='\fIxfree\-4.2.1\fR'
16919 +
16920 +xfree\-4.2.1\-r2.ebuild \-\-> $P=='xfree\-4.2.1'
16921 .TP
16922 .B PN
16923 Contains the name of the script without the version number.
16924 -.br
16925 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$PN\fR=='\fIxfree\fR'
16926 +
16927 +xfree\-4.2.1\-r2.ebuild \-\-> $PN=='xfree'
16928 .TP
16929 .B PV
16930 Contains the version number without the revision.
16931 -.br
16932 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$PV\fR=='\fI4.2.1\fR'
16933 +
16934 +xfree\-4.2.1\-r2.ebuild \-\-> $PV=='4.2.1'
16935 .TP
16936 .B PR
16937 Contains the revision number or 'r0' if no revision number exists.
16938 -.br
16939 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$PR\fR=='\fIr2\fR'
16940 +
16941 +xfree\-4.2.1\-r2.ebuild \-\-> $PR=='r2'
16942 .TP
16943 .B PVR
16944 Contains the version number with the revision.
16945 -.br
16946 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$PVR\fR=='\fI4.2.1\-r2\fR'
16947 +
16948 +xfree\-4.2.1\-r2.ebuild \-\-> $PVR=='4.2.1\-r2'
16949 .TP
16950 .B PF
16951 -Contains the full package name \fI[PN]\-[PVR]\fR
16952 -.br
16953 -\fBxfree\-4.2.1\-r2.ebuild\fR \-\-> \fB$PF\fR=='\fIxfree\-4.2.1\-r2\fR'
16954 +Contains the full package name \fBPN\fR\-\fBPVR\fR
16955 +
16956 +xfree\-4.2.1\-r2.ebuild \-\-> $PF=='xfree\-4.2.1\-r2'
16957 .TP
16958 .B CATEGORY
16959 Contains the package category name.
16960 .TP
16961 .B A
16962 Contains all source files required for the package. This variable must
16963 -not be defined. It is autogenerated from the \fISRC_URI\fR variable.
16964 +not be defined. It is autogenerated from the \fBSRC_URI\fR variable.
16965 .TP
16966 -\fBWORKDIR\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/work"\fR
16967 +.B WORKDIR\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/work"
16968 Contains the path to the package build root. Do not modify this variable.
16969 .TP
16970 -\fBFILESDIR\fR = \fI"${PORTDIR}/${CATEGORY}/${PN}/files"\fR
16971 -Contains the path to the 'files' sub folder in the package specific
16972 -location in the portage tree. Do not modify this variable.
16973 +.B FILESDIR\fR = \fI"${repository_location}/${CATEGORY}/${PN}/files"
16974 +Contains the path to the 'files' subdirectory in the package specific
16975 +location in given repository. Do not modify this variable.
16976 +.TP
16977 +.B EBUILD_PHASE
16978 +Contains the abreviated name of the phase function that is
16979 +currently executing, such as "setup", "unpack", "compile", or
16980 +"preinst".
16981 +.TP
16982 +.B EBUILD_PHASE_FUNC
16983 +Beginning with \fBEAPI 5\fR, contains the full name of the phase
16984 +function that is currently executing, such as "pkg_setup",
16985 +"src_unpack", "src_compile", or "pkg_preinst".
16986 .TP
16987 .B EPREFIX
16988 Beginning with \fBEAPI 3\fR, contains the offset
16989 @@ -110,17 +436,17 @@ and is available in such cases as ${EPREFIX}. EPREFIX does not contain
16990 a trailing slash, therefore an absent offset is represented by the empty
16991 string. Do not modify this variable.
16992 .TP
16993 -\fBS\fR = \fI"${WORKDIR}/${P}"\fR
16994 +.B S\fR = \fI"${WORKDIR}/${P}"
16995 Contains the path to the temporary \fIbuild directory\fR. This variable
16996 is used by the functions \fIsrc_compile\fR and \fIsrc_install\fR. Both
16997 are executed with \fIS\fR as the current directory. This variable may
16998 be modified to match the extraction directory of a tarball for the package.
16999 .TP
17000 -\fBT\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/temp"\fR
17001 +.B T\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/temp"
17002 Contains the path to a \fItemporary directory\fR. You may use this for
17003 whatever you like.
17004 .TP
17005 -\fBD\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/image/"\fR
17006 +.B D\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/image/"
17007 Contains the path to the temporary \fIinstall directory\fR. Every write
17008 operation that does not involve the helper tools and functions (found below)
17009 should be prefixed with ${D}.
17010 @@ -129,12 +455,12 @@ to be taken into account here, for which the variable
17011 ${ED} is provided (see below).
17012 Do not modify this variable.
17013 .TP
17014 -\fBED\fT = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/image/${EPREFIX}/"\fR
17015 +.B ED\fR = \fI"${PORTAGE_TMPDIR}/portage/${CATEGORY}/${PF}/image/${EPREFIX}/"
17016 Beginning with \fBEAPI 3\fR, contains the path
17017 "${D%/}${EPREFIX}/" for convenience purposes.
17018 -For \fBEAPI\fR values prior to \fBEAPI 3\fR which do
17019 -not support \fB${ED}\fR, helpers use \fB${D}\fR where
17020 -they would otherwise use \fB${ED}\fR.
17021 +For EAPI values prior to \fBEAPI 3\fR which do
17022 +not support ED, helpers use \fBD\fR where
17023 +they would otherwise use ED.
17024 Do not modify this variable.
17025 .TP
17026 .B MERGE_TYPE
17027 @@ -148,7 +474,6 @@ l l
17028 __
17029 l l.
17030 Value Meaning
17031 -
17032 binary previously\-built which is scheduled for merge
17033 buildonly source\-build which is not scheduled for merge
17034 source source\-build which is scheduled for merge
17035 @@ -157,7 +482,7 @@ source source\-build which is scheduled for merge
17036 .TP
17037 .B PORTAGE_LOG_FILE
17038 Contains the path of the build log. If \fBPORT_LOGDIR\fR variable is unset then
17039 -\fBPORTAGE_LOG_FILE\fR=\fB"${T}/build.log"\fR.
17040 +PORTAGE_LOG_FILE=\fI"${T}/build.log"\fR.
17041 .TP
17042 .B REPLACED_BY_VERSION
17043 Beginning with \fBEAPI 4\fR, the REPLACED_BY_VERSION variable can be
17044 @@ -176,22 +501,22 @@ to the package version(s) being replaced. Typically, this variable will
17045 not contain more than one version, but according to PMS it can contain
17046 more.
17047 .TP
17048 -\fBROOT\fR = \fI"/"\fR
17049 +.B ROOT\fR = \fI"/"
17050 Contains the path that portage should use as the root of the live filesystem.
17051 When packages wish to make changes to the live filesystem, they should do so in
17052 the tree prefixed by ${ROOT}. Often the offset prefix needs to be taken
17053 into account here, for which the variable ${EROOT} is provided (see
17054 below). Do not modify this variable.
17055 .TP
17056 -\fBEROOT\fR = \fI"${ROOT%/}${EPREFIX}/"\fR
17057 +.B EROOT\fR = \fI"${ROOT%/}${EPREFIX}/"
17058 Beginning with \fBEAPI 3\fR, contains
17059 "${ROOT%/}${EPREFIX}/" for convenience
17060 purposes. Do not modify this variable.
17061 .TP
17062 -\fBDESCRIPTION\fR = \fI"A happy little package"\fR
17063 +.B DESCRIPTION\fR = \fI"A happy little package"
17064 Should contain a short description of the package.
17065 .TP
17066 -\fBEAPI\fR = \fI"0"\fR
17067 +.B EAPI\fR = \fI"0"
17068 Defines the ebuild API version to which this package conforms. If not
17069 defined then it defaults to "0". If portage does not recognize the
17070 EAPI value then it will mask the package and refuse to perform any
17071 @@ -202,7 +527,7 @@ who uses the \fBebuild\fR(1) and \fBrepoman\fR(1) commands with this
17072 package will be required to have a version of portage that recognizes
17073 the EAPI to which this package conforms.
17074 .TP
17075 -\fBSRC_URI\fR = \fI"http://example.com/path/${P}.tar.gz"\fR
17076 +.B SRC_URI\fR = \fI"http://example.com/path/${P}.tar.gz"
17077 Contains a list of URIs for the required source files. It can contain
17078 multiple URIs for a single source file. The list is processed in order
17079 if the file was not found on any of the \fIGENTOO_MIRRORS\fR.
17080 @@ -211,11 +536,11 @@ customized with a "->" operator on the right hand side, followed by the
17081 desired output file name. All tokens, including the operator and output
17082 file name, should be separated by whitespace.
17083 .TP
17084 -\fBHOMEPAGE\fR = \fI"http://example.com/"\fR
17085 +.B HOMEPAGE\fR = \fI"http://example.com/"
17086 Should contain a list of URIs for the sources main sites and other further
17087 package dependent information.
17088 .TP
17089 -\fBKEYWORDS\fR = \fI[\-~][x86,ppc,sparc,mips,alpha,arm,hppa]\fR
17090 +.B KEYWORDS\fR = \fI[\-~][x86,ppc,sparc,mips,alpha,arm,hppa]
17091 Should contain appropriate list of arches that the ebuild is know to
17092 work/not work. By default if you do not know if an ebuild runs under
17093 a particular arch simply omit that KEYWORD. If the ebuild will not
17094 @@ -226,19 +551,31 @@ unmasked for testing by setting ACCEPT_KEYWORDS="~arch" on the command
17095 line, or in \fBmake.conf\fR(5)) For an authoritative list please review
17096 /usr/portage/profiles/arch.list. Please keep this list in alphabetical order.
17097 .TP
17098 -\fBSLOT\fR
17099 +.B SLOT
17100 This sets the SLOT for packages that may need to have multiple versions
17101 co\-exist. By default you should set \fBSLOT\fR="0". If you are unsure, then
17102 do not fiddle with this until you seek some guidance from some guru. This
17103 value should \fINEVER\fR be left undefined.
17104 -.TP
17105 -\fBLICENSE\fR
17106 +
17107 +Beginning with \fBEAPI 5\fR, the SLOT variable may contain
17108 +an optional sub\-slot part that follows the regular slot and
17109 +is delimited by a / character. The sub\-slot must be a valid
17110 +slot name. The sub\-slot is used to represent cases in which
17111 +an upgrade to a new version of a package with a different
17112 +sub\-slot may require dependent packages to be rebuilt. When
17113 +the sub\-slot part is omitted from the SLOT definition, the
17114 +package is considered to have an implicit sub\-slot which is
17115 +equal to the regular slot. Refer to the \fBAtom Slot
17116 +Operators\fR section for more information about sub\-slot
17117 +usage.
17118 +.TP
17119 +.B LICENSE
17120 This should be a space delimited list of licenses that the package falls
17121 under. This \fB_must_\fR be set to a matching license in
17122 /usr/portage/licenses/. If the license does not exist in portage yet, you
17123 must add it first.
17124 .TP
17125 -\fBIUSE\fR
17126 +.B IUSE
17127 This should be a list of any and all USE flags that are leveraged within
17128 your build script. The only USE flags that should not be listed here are
17129 arch related flags (see \fBKEYWORDS\fR). Beginning with \fBEAPI 1\fR, it
17130 @@ -250,237 +587,51 @@ negative IUSE default settings are effective only for negation of
17131 repo\-level USE settings, since profile and user configuration settings
17132 override them.
17133 .TP
17134 -\fBDEPEND\fR
17135 -This should contain a list of all packages that are required for the
17136 -program to compile.
17137 -.RS
17138 -.TP
17139 -.B DEPEND Atoms
17140 -A depend atom is simply a dependency that is used by portage when calculating
17141 -relationships between packages. Please note that if the atom has not already
17142 -been emerged, then the latest version available is matched.
17143 -.RS
17144 -.TP
17145 -.B Atom Bases
17146 -The base atom is just a full category/packagename. Hence, these are base atoms:
17147 +.B DEPEND
17148 +This should contain a list of all packages that are required for the program
17149 +to compile (aka \fIbuildtime\fR dependencies). These are usually libraries and
17150 +headers.
17151
17152 -.nf
17153 -.I sys\-apps/sed
17154 -.I sys\-libs/zlib
17155 -.I net\-misc/dhcp
17156 -.fi
17157 -.TP
17158 -.B Atom Versions
17159 -It is nice to be more specific and say that only certain versions of atoms are
17160 -acceptable. Note that versions must be combined with a prefix (see below).
17161 -Hence you may add a version number as a postfix to the base:
17162 +Starting from experimental \fBEAPI 5-hdepend\fR, tools should go into the
17163 +\fBHDEPEND\fR variable instead, as \fBDEPEND\fR will only be installed into the
17164 +\fItarget\fR system and hence cannot be executed in a cross\-compile setting.
17165 +(See section \fBCross\-compilation\fR for more information.)
17166
17167 -.nf
17168 -sys\-apps/sed\fI\-4.0.5\fR
17169 -sys\-libs/zlib\fI\-1.1.4\-r1\fR
17170 -net\-misc/dhcp\fI\-3.0_p2\fR
17171 -.fi
17172 -
17173 -Versions are normally made up of two or three numbers separated by periods, such
17174 -as 1.2 or 4.5.2. This string may be followed by a character such as 1.2a or
17175 -4.5.2z. Note that this letter is \fBnot\fR meant to indicate alpha, beta,
17176 -etc... status. For that, use the optional suffix; either _alpha, _beta, _pre
17177 -(pre\-release), _rc (release candidate), or _p (patch). This means for the
17178 -3rd pre\-release of a package, you would use something like 1.2_pre3. The
17179 -suffixes here can be arbitrarily chained without limitation.
17180 +You may use the syntax described above in the \fBDependencies\fR section.
17181 .TP
17182 -.B Atom Prefix Operators [> >= = <= <]
17183 -Sometimes you want to be able to depend on general versions rather than specifying
17184 -exact versions all the time. Hence we provide standard boolean operators:
17185 -
17186 -.nf
17187 -\fI>\fRmedia\-libs/libgd\-1.6
17188 -\fI>=\fRmedia\-libs/libgd\-1.6
17189 -\fI=\fRmedia\-libs/libgd\-1.6
17190 -\fI<=\fRmedia\-libs/libgd\-1.6
17191 -\fI<\fRmedia\-libs/libgd\-1.6
17192 -.fi
17193 -.TP
17194 -.B Extended Atom Prefixes [!~] and Postfixes [*]
17195 -Now to get even fancier, we provide the ability to define blocking packages and
17196 -version range matching. Also note that these extended prefixes/postfixes may
17197 -be combined in any way with the atom classes defined above. Here are some common
17198 -examples you may find in the portage tree:
17199 -
17200 -.nf
17201 -\fI!\fRapp\-text/dos2unix
17202 -=dev\-libs/glib\-2\fI*\fR
17203 -\fI!\fR=net\-fs/samba\-2\fI*\fR
17204 -\fI~\fRnet\-libs/libnet\-1.0.2a
17205 -\fI!!\fR<sys\-apps/portage\-2.1.4_rc1\fI\fR
17206 -.fi
17207 -
17208 -\fI!\fR means block packages from being installed at the same time.
17209 -.br
17210 -\fI!!\fR means block packages from being installed at the same time
17211 -and explicitly disallow them from being temporarily installed
17212 -simultaneously during a series of upgrades. This syntax is supported
17213 -beginning with \fBEAPI 2\fR.
17214 -.br
17215 -\fI*\fR means match any version of the package so long
17216 -as the specified string prefix is matched. So with a
17217 -version of '2*', we can match '2.1', '2.2', '2.2.1',
17218 -etc... and not match version '1.0', '3.0', '4.1', etc...
17219 -Beware that, due to the string matching nature, '20'
17220 -will also be matched by '2*'. The version part
17221 -that comes before the '*' must be a valid version in the absence of the '*'.
17222 -For example, '2' is a valid version and '2.' is not. Therefore, '2*' is
17223 -allowed and '2.*' is not.
17224 -.br
17225 -\fI~\fR means match any revision of the base version specified. So in the
17226 -above example, we would match versions '1.0.2a', '1.0.2a\-r1', '1.0.2a\-r2',
17227 -etc...
17228 -.TP
17229 -.B Atom Slots
17230 -Beginning with \fBEAPI 1\fR, any atom can be constrained to match a specific
17231 -\fBSLOT\fR. This is accomplished by appending a colon followed by a
17232 -\fBSLOT\fR:
17233 +.B RDEPEND
17234 +This should contain a list of all packages that are required for this
17235 +program to run (aka \fIruntime\fR dependencies). These are usually libraries.
17236
17237 -.nf
17238 -x11\-libs/qt:3
17239 -\fI~\fRx11\-libs/qt-3.3.8:3
17240 -\fI>=\fRx11\-libs/qt-3.3.8:3
17241 -\fI=\fRx11\-libs/qt-3.3*:3
17242 -.fi
17243 -.TP
17244 -.B Atom USE
17245 -Beginning with \fBEAPI 2\fR, any atom can be constrained to match specific
17246 -\fBUSE\fR flag settings. When used together with \fBSLOT\fR dependencies,
17247 -\fBUSE\fR dependencies appear on the right hand side of \fBSLOT\fR
17248 -dependencies.
17249 +In \fBEAPI 3\fR or earlier, if this is not set, then it defaults to the value
17250 +of \fBDEPEND\fR. In \fBEAPI 4\fR or later, \fBRDEPEND\fR will never be
17251 +implicitly set.
17252
17253 -.RS
17254 +You may use the syntax described above in the \fBDependencies\fR section.
17255 .TP
17256 -.B Unconditional USE Dependencies
17257 -.TS
17258 -l l
17259 -__
17260 -l l.
17261 -Example Meaning
17262 +.B HDEPEND
17263 +This should contain a list of all packages that are required to be executable
17264 +during compilation of this program (aka \fIhost\fR buildtime dependencies).
17265 +These are usually tools, like interpreters or (cross\-)compilers.
17266
17267 -foo[bar] foo must have bar enabled
17268 -foo[bar,baz] foo must have both bar and baz enabled
17269 -foo[\-bar,baz] foo must have bar disabled and baz enabled
17270 -.TE
17271 +This variable is new in experimental \fBEAPI 5-hdepend\fR and will be installed
17272 +into the \fIhost\fR system.
17273 +(See section \fBCross-compilation\fR for more information.)
17274
17275 +You may use the syntax described above in the \fBDependencies\fR section.
17276 .TP
17277 -.B Conditional USE Dependencies
17278 -.TS
17279 -l l
17280 -__
17281 -l l.
17282 -Compact Form Equivalent Expanded Form
17283 +.B PDEPEND
17284 +This should contain a list of all packages that should be merged after this
17285 +one (aka \fIpost\fR merge dependencies), but which may be installed by the
17286 +package manager at any time, if that is not possible.
17287
17288 -foo[bar?] bar? ( foo[bar] ) !bar? ( foo )
17289 -foo[!bar?] bar? ( foo ) !bar? ( foo[\-bar] )
17290 -foo[bar=] bar? ( foo[bar] ) !bar? ( foo[\-bar] )
17291 -foo[!bar=] bar? ( foo[\-bar] ) !bar? ( foo[bar] )
17292 -.TE
17293 -.RE
17294 -.TP
17295 -.B Atom USE defaults
17296 -Beginning with \fBEAPI 4\fR, \fBUSE\fR dependencies may specify default
17297 -assumptions about values for flags that may or may not be missing from
17298 -the \fBIUSE\fR of the matched package. Such defaults are specified by
17299 -immediately following a flag with either \fB(+)\fR or \fB(\-)\fR. Use
17300 -\fB(+)\fR to behave as if a missing flag is present and enabled, or
17301 -\fB(\-)\fR to behave as if it is present and disabled:
17302 -
17303 -.RS
17304 -.nf
17305 -media\-video/ffmpeg[threads(+)]
17306 -media\-video/ffmpeg[-threads(\-)]
17307 -.fi
17308 -.RE
17309 -.RE
17310 -.TP
17311 -.B Dynamic DEPENDs
17312 -Sometimes programs may depend on different things depending on the USE
17313 -variable. Portage offers a few options to handle this. Note that when
17314 -using the following syntaxes, each case is considered as 1 Atom in the
17315 -scope it appears. That means that each Atom both conditionally include
17316 -multiple Atoms and be nested to an infinite depth.
17317 -.RS
17318 -.TP
17319 -.B usevar? ( DEPEND Atom )
17320 -To include the jpeg library when the user has jpeg in \fBUSE\fR, simply use the
17321 -following syntax:
17322 -.br
17323 -.B jpeg? ( media\-libs/jpeg )
17324 -.TP
17325 -.B !usevar? ( Atom )
17326 -If you want to include a package only if the user does not have a certain option
17327 -in their \fBUSE\fR variable, then use the following syntax:
17328 -.br
17329 -.B !nophysfs? ( dev\-games/physfs )
17330 -.br
17331 -This is often useful for those times when you want to want to add optional support
17332 -for a feature and have it enabled by default.
17333 -.TP
17334 -.B usevar? ( Atom if true ) !usevar? ( Atom if false )
17335 -For functionality like the tertiary operator found in C you must use
17336 -two statements, one normal and one inverted. If a package uses
17337 -GTK2 or GTK1, but not both, then you can handle that like this:
17338 -.br
17339 -.B gtk2? ( =x11\-libs/gtk+\-2* ) !gtk2? ( =x11\-libs/gtk+\-1* )
17340 -.br
17341 -That way the default is the superior GTK2 library.
17342 -.TP
17343 -.B || ( Atom Atom ... )
17344 -When a package can work with a few different packages but a virtual is not
17345 -appropriate, this syntax can easily be used.
17346 -.nf
17347 -.B || (
17348 -.B app\-games/unreal\-tournament
17349 -.B app\-games/unreal\-tournament\-goty
17350 -.B )
17351 -.fi
17352 -Here we see that unreal\-tournament has a normal version and it has a goty
17353 -version. Since they provide the same base set of files, another package can
17354 -use either. Adding a virtual is inappropriate due to the small scope of it.
17355 -.br
17356 -Another good example is when a package can be built with multiple video
17357 -interfaces, but it can only ever have just one.
17358 -.nf
17359 -.B || (
17360 -.B sdl? ( media\-libs/libsdl )
17361 -.B svga? ( media\-libs/svgalib )
17362 -.B opengl? ( virtual/opengl )
17363 -.B ggi? ( media\-libs/libggi )
17364 -.B virtual/x11
17365 -.B )
17366 -.fi
17367 -Here only one of the packages will be chosen, and the order of preference is
17368 -determined by the order in which they appear. So sdl has the best chance of
17369 -being chosen, followed by svga, then opengl, then ggi, with a default of X if
17370 -the user does not specify any of the previous choices.
17371 +.B ***WARNING***
17372 .br
17373 -Note that if any of the packages listed are already merged, the package manager
17374 -will use that to consider the dependency satisfied.
17375 -.RE
17376 +Use this only as last resort to break cyclic dependencies!
17377
17378 -.RE
17379 -.TP
17380 -\fBRDEPEND\fR
17381 -This should contain a list of all packages that are required for this
17382 -program to run (aka runtime depend). If this is not set in \fBEAPI 3\fR
17383 -or earlier, then it defaults to the value of \fBDEPEND\fR. In
17384 -\fBEAPI 4\fR or later, \fBRDEPEND\fR will never be implicitly set.
17385 -.br
17386 -You may use the same syntax to vary dependencies as seen above in \fBDEPEND\fR.
17387 +You may use the syntax described above in the \fBDependencies\fR section.
17388 .TP
17389 -\fBPDEPEND\fR
17390 -This should contain a list of all packages that should be merged after this one,
17391 -but may be merged before if need be.
17392 -.br
17393 -You may use the same syntax to vary dependencies as seen above in \fBDEPEND\fR.
17394 -.TP
17395 -\fBREQUIRED_USE\fR
17396 +.B REQUIRED_USE
17397 Beginning with \fBEAPI 4\fR, the \fBREQUIRED_USE\fR variable can be
17398 used to specify combinations of \fBUSE\fR flags that are allowed
17399 or not allowed. Elements can be nested when necessary.
17400 @@ -489,16 +640,16 @@ l l
17401 __
17402 l l.
17403 Behavior Expression
17404 -
17405 If flag1 enabled then flag2 disabled flag1? ( !flag2 )
17406 If flag1 enabled then flag2 enabled flag1? ( flag2 )
17407 If flag1 disabled then flag2 enabled !flag1? ( flag2 )
17408 If flag1 disabled then flag2 disabled !flag1? ( !flag2 )
17409 Must enable any one or more (inclusive or) || ( flag1 flag2 flag3 )
17410 Must enable exactly one but not more (exclusive or) ^^ ( flag1 flag2 flag3 )
17411 +May enable at most one (EAPI 5 or later) ?? ( flag1 flag2 flag3 )
17412 .TE
17413 .TP
17414 -\fBRESTRICT\fR = \fI[strip,mirror,fetch,userpriv]\fR
17415 +.B RESTRICT\fR = \fI[strip,mirror,fetch,userpriv]
17416 This should be a space delimited list of portage features to restrict.
17417 You may use conditional syntax to vary restrictions as seen above in DEPEND.
17418 .PD 0
17419 @@ -524,9 +675,19 @@ binaries that are not compatible with debugedit.
17420 .I mirror
17421 files in \fBSRC_URI\fR will not be downloaded from the \fBGENTOO_MIRRORS\fR.
17422 .TP
17423 +.I preserve\-libs
17424 +Disables preserve\-libs for specific packages. Note than when a package is
17425 +merged, RESTRICT=preserve\-libs applies if either the new instance or the
17426 +old instance sets RESTRICT=preserve\-libs.
17427 +.TP
17428 .I primaryuri
17429 fetch from URIs in \fBSRC_URI\fR before \fBGENTOO_MIRRORS\fR.
17430 .TP
17431 +.I splitdebug
17432 +Disables splitdebug for specific packages. This is for packages with
17433 +binaries that trigger problems with splitdebug, such as file\-collisions
17434 +between symlinks in /usr/lib/debug/.build-id (triggered by bundled libraries).
17435 +.TP
17436 .I strip
17437 final binaries/libraries will not be stripped of debug symbols.
17438 .TP
17439 @@ -538,7 +699,7 @@ Disables userpriv for specific packages.
17440 .RE
17441 .PD 1
17442 .TP
17443 -\fBPROPERTIES\fR = \fI[interactive]\fR
17444 +.B PROPERTIES\fR = \fI[interactive]
17445 A space delimited list of properties, with conditional syntax support.
17446 .PD 0
17447 .RS
17448 @@ -548,30 +709,35 @@ One or more ebuild phases will produce a prompt that requires user interaction.
17449 .RE
17450 .PD 1
17451 .TP
17452 -\fBPROVIDE\fR = \fI"virtual/TARGET"\fR
17453 +.B PROVIDE\fR = \fI"virtual/TARGET"
17454 This variable should only be used when a package provides a virtual target.
17455 For example, blackdown\-jdk and sun\-jdk provide \fIvirtual/jdk\fR. This
17456 allows for packages to depend on \fIvirtual/jdk\fR rather than on blackdown
17457 or sun specifically.
17458 +
17459 +The \fBPROVIDE\fR variable has been deprecated. See
17460 +\fIhttp://www.gentoo.org/proj/en/glep/glep-0037.html\fR for details.
17461 +
17462 .TP
17463 -\fBDOCS\fR
17464 +.B DOCS
17465 Beginning with \fBEAPI 4\fR, an array or space\-delimited list of documentation
17466 files for the default src_install function to install using dodoc. If
17467 undefined, a reasonable default list is used. See the documentation for
17468 src_install below.
17469 -.SH "QA CONTROL VARIABLES"
17470 +
17471 +.SS "QA Control Variables:"
17472 .TP
17473 -.B USAGE NOTES
17474 +.B Usage Notes
17475 Several QA variables are provided which allow an ebuild to manipulate some
17476 of the QA checks performed by portage. Use of these variables in ebuilds
17477 should be kept to an absolute minimum otherwise they defeat the purpose
17478 of the QA checks, and their use is subject to agreement of the QA team.
17479 They are primarily intended for use by ebuilds that install closed\-source
17480 binary objects that cannot be altered.
17481 -.br
17482 +
17483 Note that objects that violate these rules may fail on some architectures.
17484 .TP
17485 -\fBQA_PREBUILT\fR
17486 +.B QA_PREBUILT
17487 This should contain a list of file paths, relative to the image
17488 directory, of files that are pre\-built binaries. Paths
17489 listed here will be appended to each of the QA_* variables
17490 @@ -581,65 +747,78 @@ the QA_* variables that support regular expressions instead
17491 of fnmatch patterns. The translation mechanism simply replaces
17492 "*" with ".*".
17493 .TP
17494 -\fBQA_TEXTRELS\fR
17495 +.B QA_TEXTRELS
17496 This variable can be set to a list of file paths, relative to the image
17497 directory, of files that contain text relocations that cannot be eliminated.
17498 The paths may contain fnmatch patterns.
17499 -.br
17500 +
17501 This variable is intended to be used on closed\-source binary objects that
17502 cannot be altered.
17503 .TP
17504 -\fBQA_EXECSTACK\fR
17505 +.B QA_EXECSTACK
17506 This should contain a list of file paths, relative to the image directory, of
17507 objects that require executable stack in order to run.
17508 The paths may contain fnmatch patterns.
17509 -.br
17510 +
17511 This variable is intended to be used on objects that truly need executable
17512 stack (i.e. not those marked to need it which in fact do not).
17513 .TP
17514 -\fBQA_WX_LOAD\fR
17515 +.B QA_WX_LOAD
17516 This should contain a list of file paths, relative to the image directory, of
17517 files that contain writable and executable segments. These are rare.
17518 The paths may contain fnmatch patterns.
17519 .TP
17520 -\fBQA_FLAGS_IGNORED\fR
17521 +.B QA_FLAGS_IGNORED
17522 This should contain a list of file paths, relative to the image directory, of
17523 files that do not contain .GCC.command.line sections or contain .hash sections.
17524 -The paths may contain regular expressions with escape\-quoted special characters.
17525 -.br
17526 +The paths may contain regular expressions with escape\-quoted special
17527 +characters.
17528 +
17529 This variable is intended to be used on files of binary packages which ignore
17530 CFLAGS, CXXFLAGS, FFLAGS, FCFLAGS, and LDFLAGS variables.
17531 .TP
17532 -.TP
17533 -\fBQA_DT_HASH\fR
17534 +.B QA_MULTILIB_PATHS
17535 This should contain a list of file paths, relative to the image directory, of
17536 -files that contain .hash sections. The paths may contain regular expressions
17537 -with escape\-quoted special characters. This variable is deprecated. Use
17538 -\fBQA_FLAGS_IGNORED\fR instead.
17539 -.br
17540 -This variable is intended to be used on files of binary packages which ignore
17541 -LDFLAGS variable.
17542 +files that should be ignored for the multilib\-strict checks.
17543 +The paths may contain regular expressions with escape\-quoted special
17544 +characters.
17545 .TP
17546 -\fBQA_PRESTRIPPED\fR
17547 +.B QA_PRESTRIPPED
17548 This should contain a list of file paths, relative to the image directory, of
17549 files that contain pre-stripped binaries. The paths may contain regular
17550 expressions with escape\-quoted special characters.
17551 .TP
17552 -\fBQA_SONAME\fR
17553 +.B QA_SONAME
17554 This should contain a list of file paths, relative to the image directory, of
17555 shared libraries that lack SONAMEs. The paths may contain regular expressions
17556 with escape\-quoted special characters.
17557 .TP
17558 -\fBQA_SONAME_NO_SYMLINK\fR
17559 +.B QA_SONAME_NO_SYMLINK
17560 This should contain a list of file paths, relative to the image directory, of
17561 shared libraries that have SONAMEs but should not have a corresponding SONAME
17562 symlink in the same directory. The paths may contain regular expressions
17563 with escape\-quoted special characters.
17564 .TP
17565 -\fBQA_DT_NEEDED\fR
17566 +.B QA_AM_MAINTAINER_MODE
17567 +This should contain a list of lines containing automake missing \-\-run
17568 +commands. The lines may contain regular expressions with escape\-quoted
17569 +special characters.
17570 +.TP
17571 +.B QA_CONFIGURE_OPTIONS
17572 +This should contain a list of configure options which trigger warnings about
17573 +unrecognized options. The options may contain regular expressions with
17574 +escape\-quoted special characters.
17575 +.TP
17576 +.B QA_DT_NEEDED
17577 This should contain a list of file paths, relative to the image directory, of
17578 shared libraries that lack NEEDED entries. The paths may contain regular
17579 expressions with escape\-quoted special characters.
17580 +.TP
17581 +.B QA_DESKTOP_FILE
17582 +This should contain a list of file paths, relative to the image directory, of
17583 +desktop files which should not be validated. The paths may contain regular
17584 +expressions with escape\-quoted special characters.
17585 +
17586 .SH "PORTAGE DECLARATIONS"
17587 .TP
17588 .B inherit
17589 @@ -652,6 +831,7 @@ ebuild. Specification of the eclasses contains only their name and not the
17590 \fI.eclass\fR extension. Also note that the inherit statement must come
17591 before other variable declarations unless these variables are used in global
17592 scope of eclasses.
17593 +
17594 .SH "PHASE FUNCTIONS"
17595 .TP
17596 .B pkg_pretend
17597 @@ -664,9 +844,10 @@ is used to execute pkg_pretend is not saved and therefore is not
17598 available in phases that execute afterwards.
17599 .TP
17600 .B pkg_nofetch
17601 -If you turn on \fIfetch\fR in \fBRESTRICT\fR, then this function will be
17602 -run when the files in \fBSRC_URI\fR cannot be found. Useful for
17603 -displaying information to the user on *how* to obtain said files. All
17604 +This function will be executed when the files in \fBSRC_URI\fR
17605 +cannot be fetched for any reason. If you turn on \fIfetch\fR in
17606 +\fBRESTRICT\fR, this is useful for displaying information to the
17607 +user on *how* to obtain said files. All
17608 you have to do is output a message and let the function return. Do not
17609 end the function with a call to \fBdie\fR.
17610 .TP
17611 @@ -674,45 +855,48 @@ end the function with a call to \fBdie\fR.
17612 This function can be used if the package needs specific setup actions or
17613 checks to be preformed before anything else.
17614 .br
17615 -Initial working directory of ${PORTAGE_TMPDIR}.
17616 +Initial working directory: $PORTAGE_TMPDIR
17617 .TP
17618 .B src_unpack
17619 This function is used to unpack all the sources in \fIA\fR to \fIWORKDIR\fR.
17620 If not defined in the \fIebuild script\fR it calls \fIunpack ${A}\fR. Any
17621 patches and other pre configure/compile modifications should be done here.
17622 .br
17623 -Initial working directory of $WORKDIR.
17624 +Initial working directory: $WORKDIR
17625 .TP
17626 .B src_prepare
17627 All preparation of source code, such as application of patches, should be done
17628 here. This function is supported beginning with \fBEAPI 2\fR.
17629 .br
17630 -Initial working directory of $S.
17631 +Initial working directory: $S
17632 .TP
17633 .B src_configure
17634 All necessary steps for configuration should be done here. This function is
17635 supported beginning with \fBEAPI 2\fR.
17636 .br
17637 -Initial working directory of $S.
17638 +Initial working directory: $S
17639 .TP
17640 .B src_compile
17641 With less than \fBEAPI 2\fR, all necessary steps for both configuration and
17642 compilation should be done here. Beginning with \fBEAPI 2\fR, only compilation
17643 steps should be done here.
17644 .br
17645 -Initial working directory of $S.
17646 +Initial working directory: $S
17647 .TP
17648 .B src_test
17649 -Run all package specific test cases. The default is to run 'make check'
17650 -followed 'make test'.
17651 +Run all package specific test cases. The default is to run
17652 +\'emake check\' followed \'emake test\'. Prior to \fBEAPI 5\fR,
17653 +the default src_test implementation will automatically pass the
17654 +\-j1 option as the last argument to emake, and beginning with
17655 +\fBEAPI 5\fR it will allow the tests to run in parallel.
17656 .br
17657 -Initial working directory of $S.
17658 +Initial working directory: $S
17659 .TP
17660 .B src_install
17661 Should contain everything required to install the package in the temporary
17662 \fIinstall directory\fR.
17663 .br
17664 -Initial working directory of $S.
17665 +Initial working directory: $S
17666
17667 Beginning with \fBEAPI 4\fR, if src_install is undefined then the
17668 following default implementation is used:
17669 @@ -742,18 +926,20 @@ All modifications required on the live\-filesystem before and after the
17670 package is merged should be placed here. Also commentary for the user
17671 should be listed here as it will be displayed last.
17672 .br
17673 -Initial working directory of $PWD.
17674 +Initial working directory: $PWD
17675 .TP
17676 .B pkg_prerm pkg_postrm
17677 Like the pkg_*inst functions but for unmerge.
17678 .br
17679 -Initial working directory of $PWD.
17680 +Initial working directory: $PWD
17681 .TP
17682 .B pkg_config
17683 This function should contain optional basic configuration steps.
17684 .br
17685 -Initial working directory of $PWD.
17686 -.SH "HELPER FUNCTIONS: PHASES"
17687 +Initial working directory: $PWD
17688 +
17689 +.SH "HELPER FUNCTIONS"
17690 +.SS "Phases:"
17691 .TP
17692 .B default
17693 Calls the default phase function implementation for the currently executing
17694 @@ -772,7 +958,6 @@ l
17695 _
17696 l.
17697 Default Phase Functions
17698 -
17699 default_pkg_nofetch
17700 default_src_unpack
17701 default_src_prepare
17702 @@ -781,9 +966,10 @@ default_src_compile
17703 default_src_test
17704 .TE
17705 .RE
17706 -.SH "HELPER FUNCTIONS: GENERAL"
17707 +
17708 +.SS "General:"
17709 .TP
17710 -\fBdie\fR \fI[reason]\fR
17711 +.B die\fR \fI[reason]
17712 Causes the current emerge process to be aborted. The final display will
17713 include \fIreason\fR.
17714
17715 @@ -791,11 +977,11 @@ Beginning with \fBEAPI 4\fR, all helpers automatically call \fBdie\fR
17716 whenever some sort of error occurs. Helper calls may be prefixed with
17717 the \fBnonfatal\fR helper in order to prevent errors from being fatal.
17718 .TP
17719 -\fBnonfatal\fR \fI<helper>\fR
17720 +.B nonfatal\fR \fI<helper>
17721 Execute \fIhelper\fR and \fIdo not\fR call die if it fails.
17722 The \fBnonfatal\fR helper is available beginning with \fBEAPI 4\fR.
17723 .TP
17724 -\fBuse\fR \fI<USE item>\fR
17725 +.B use\fR \fI<USE item>
17726 If \fIUSE item\fR is in the \fBUSE\fR variable, the function will silently
17727 return 0 (aka shell true). If \fIUSE item\fR is not in the \fBUSE\fR
17728 variable, the function will silently return 1 (aka shell false). \fBusev\fR
17729 @@ -817,17 +1003,26 @@ fi
17730 .fi
17731 .RE
17732 .TP
17733 -\fBuse_with\fR \fI<USE item>\fR \fI[configure name]\fR \fI[configure opt]\fR
17734 +.B usev\fR \fI<USE item>
17735 +Like \fBuse\fR, but also echoes \fIUSE item\fR when \fBuse\fR returns true.
17736 +.TP
17737 +.B usex\fR \fI<USE flag>\fR \fI[true output]\fR \fI[false output]\fR \fI[true \
17738 +suffix]\fR \fI[false suffix]
17739 +If USE flag is set, echo [true output][true suffix] (defaults to
17740 +"yes"), otherwise echo [false output][false suffix] (defaults to
17741 +"no"). The usex helper is available beginning with \fBEAPI 5\fR.
17742 +.TP
17743 +.B use_with\fR \fI<USE item>\fR \fI[configure name]\fR \fI[configure opt]
17744 Useful for creating custom options to pass to a configure script. If \fIUSE
17745 item\fR is in the \fBUSE\fR variable and a \fIconfigure opt\fR is specified,
17746 -then the string \fI\-\-with\-[configure name]=[configure opt]\fR will be echoed.
17747 -If \fIconfigure opt\fR is not specified, then just \fI\-\-with\-[configure
17748 -name]\fR will be echoed. If \fIUSE item\fR is not in the \fBUSE\fR variable,
17749 -then the string \fI\-\-without\-[configure name]\fR will be echoed. If
17750 -\fIconfigure name\fR is not specified, then \fIUSE item\fR will be used in
17751 -its place. Beginning with \fBEAPI 4\fR, an empty \fIconfigure opt\fR argument
17752 -is recognized. In \fBEAPI 3\fR and earlier, an empty \fIconfigure opt\fR
17753 -argument is treated as if it weren't provided.
17754 +then the string \fI\-\-with\-[configure name]=[configure opt]\fR will be
17755 +echoed. If \fIconfigure opt\fR is not specified, then just
17756 +\fI\-\-with\-[configure name]\fR will be echoed. If \fIUSE item\fR is not in
17757 +the \fBUSE\fR variable, then the string \fI\-\-without\-[configure name]\fR
17758 +will be echoed. If \fIconfigure name\fR is not specified, then \fIUSE item\fR
17759 +will be used in its place. Beginning with \fBEAPI 4\fR, an empty \fIconfigure
17760 +opt\fR argument is recognized. In \fBEAPI 3\fR and earlier, an empty
17761 +\fIconfigure opt\fR argument is treated as if it weren't provided.
17762 .RS
17763 .TP
17764 .I Examples:
17765 @@ -850,88 +1045,96 @@ myconf=$(use_with sdl SDL all\-plugins)
17766 .fi
17767 .RE
17768 .TP
17769 -\fBuse_enable\fR \fI<USE item>\fR \fI[configure name]\fR \fI[configure opt]\fR
17770 +.B use_enable\fR \fI<USE item>\fR \fI[configure name]\fR \fI[configure opt]
17771 Same as \fBuse_with\fR above, except that the configure options are
17772 -\fI\-\-enable\-\fR instead of \fI\-\-with\-\fR and \fI\-\-disable\-\fR instead of
17773 -\fI\-\-without\-\fR. Beginning with \fBEAPI 4\fR, an empty \fIconfigure opt\fR
17774 -argument is recognized. In \fBEAPI 3\fR and earlier, an empty
17775 +\fI\-\-enable\-\fR instead of \fI\-\-with\-\fR and \fI\-\-disable\-\fR instead
17776 +of \fI\-\-without\-\fR. Beginning with \fBEAPI 4\fR, an empty \fIconfigure
17777 +opt\fR argument is recognized. In \fBEAPI 3\fR and earlier, an empty
17778 \fIconfigure opt\fR argument is treated as if it weren't provided.
17779 .TP
17780 -\fBhasv\fR \fI<item>\fR \fI<item list>\fR
17781 -If \fIitem\fR is in \fIitem list\fR, then \fIitem\fR is echoed and \fBhasv\fR
17782 -returns 0. Otherwise, nothing is echoed and 1 is returned. As indicated with
17783 -use, there is a non\-echoing version \fBhas\fR. Please use \fBhas\fR in all
17784 -places where output is to be disregarded. Never use the output for calculation.
17785 +.B has\fR \fI<item>\fR \fI<item list>
17786 +If \fIitem\fR is in \fIitem list\fR, then \fBhas\fR returns
17787 +0. Otherwise, 1 is returned. There is another version, \fBhasv\fR, that
17788 +will conditionally echo \fIitem\fR.
17789 .br
17790 The \fIitem list\fR is delimited by the \fIIFS\fR variable. This variable
17791 has a default value of ' ', or a space. It is a \fBbash\fR(1) setting.
17792 .TP
17793 -\fBhas_version\fR \fI<category/package\-version>\fR
17794 +.B hasv\fR \fI<item>\fR \fI<item list>
17795 +Like \fBhas\fR, but also echoes \fIitem\fR when \fBhas\fR returns true.
17796 +.TP
17797 +.B has_version\fR \fI[\-\-host\-root]\fR \fI<category/package\-version>
17798 Check to see if \fIcategory/package\-version\fR is installed on the system.
17799 The parameter accepts all values that are acceptable in the \fBDEPEND\fR
17800 variable. The function returns 0 if \fIcategory/package\-version\fR is
17801 -installed, 1 otherwise.
17802 +installed, 1 otherwise. Beginning with \fBEAPI 5\fR, the
17803 +\-\-host\-root option may be used in order to cause the query
17804 +to apply to the host root instead of ${ROOT}.
17805 .TP
17806 -\fBbest_version\fR \fI<package name>\fR
17807 +.B best_version\fR \fI[\-\-host\-root]\fR \fI<package name>
17808 This function will look up \fIpackage name\fR in the database of currently
17809 installed programs and echo the "best version" of the package that is
17810 -currently installed.
17811 -.RS
17812 -.TP
17813 -.I Example:
17814 -VERINS="$(best_version net\-ftp/glftpd)"
17815 -.br
17816 -(VERINS now has the value "net\-ftp/glftpd\-1.27" if glftpd\-1.27 is installed)
17817 -.RE
17818 -.SH "HELPER FUNCTIONS: HOOKS"
17819 +currently installed. Beginning with \fBEAPI 5\fR, the
17820 +\-\-host\-root option may be used in order to cause the query
17821 +to apply to the host root instead of ${ROOT}.
17822 +
17823 +Example:
17824 +.nf
17825 + VERINS="$(best_version net\-ftp/glftpd)"
17826 + (VERINS now has the value "net\-ftp/glftpd\-1.27" if glftpd\-1.27 is \
17827 + installed)
17828 +.fi
17829 +
17830 +.SS "Hooks:"
17831 .TP
17832 -\fBregister_die_hook\fR \fI[list of function names]\fR
17833 +.B register_die_hook\fR \fI[list of function names]
17834 Register one or more functions to call when the ebuild fails for any reason,
17835 including file collisions with other packages.
17836 .TP
17837 -\fBregister_success_hook\fR \fI[list of function names]\fR
17838 +.B register_success_hook\fR \fI[list of function names]
17839 Register one or more functions to call when the ebuild builds and/or installs
17840 successfully.
17841 +
17842 +.SS "Output:"
17843 .TP
17844 -.RE
17845 -.SH "HELPER FUNCTIONS: OUTPUT"
17846 -.TP
17847 -\fBeinfo\fR \fI"disposable message"\fR
17848 +.B einfo\fR \fI"disposable message"
17849 Same as \fBelog\fR, but should be used when the message isn't important to the
17850 user (like progress or status messages during the build process).
17851 .TP
17852 -\fBelog\fR \fI"informative message"\fR
17853 +.B elog\fR \fI"informative message"
17854 If you need to display a message that you wish the user to read and take
17855 notice of, then use \fBelog\fR. It works just like \fBecho\fR(1), but
17856 adds a little more to the output so as to catch the user's eye. The message
17857 will also be logged by portage for later review.
17858 .TP
17859 -\fBewarn\fR \fI"warning message"\fR
17860 +.B ewarn\fR \fI"warning message"
17861 Same as \fBeinfo\fR, but should be used when showing a warning to the user.
17862 .TP
17863 -\fBeqawarn\fR \fI"QA warning message"\fR
17864 +.B eqawarn\fR \fI"QA warning message"
17865 Same as \fBeinfo\fR, but should be used when showing a QA warning to the user.
17866 .TP
17867 -\fBeerror\fR \fI"error message"\fR
17868 +.B eerror\fR \fI"error message"
17869 Same as \fBeinfo\fR, but should be used when showing an error to the user.
17870 .TP
17871 -\fBebegin\fR \fI"helpful message"\fR
17872 +.B ebegin\fR \fI"helpful message"
17873 Like \fBeinfo\fR, we output a \fIhelpful message\fR and then hint that the
17874 following operation may take some time to complete. Once the task is
17875 finished, you need to call \fBeend\fR.
17876 .TP
17877 -\fBeend\fR \fI<status>\fR \fI["error message"]\fR
17878 +.B eend\fR \fI<status>\fR \fI["error message"]
17879 Followup the \fBebegin\fR message with an appropriate "OK" or "!!" (for
17880 errors) marker. If \fIstatus\fR is non\-zero, then the additional \fIerror
17881 message\fR is displayed.
17882 -.SH "HELPER FUNCTIONS: UNPACK"
17883 +
17884 +.SS "Unpack:"
17885 .TP
17886 -\fBunpack\fR \fI<source>\fR \fI[list of more sources]\fR
17887 +.B unpack\fR \fI<source>\fR \fI[list of more sources]
17888 This function uncompresses and/or untars a list of sources into the current
17889 directory. The function will append \fIsource\fR to the \fBDISTDIR\fR variable.
17890 -.SH "HELPER FUNCTIONS: COMPILE"
17891 +
17892 +.SS "Compile:"
17893 .TP
17894 -\fBeconf\fR \fI[configure options]\fR
17895 +.B econf\fR \fI[configure options]
17896 This is used as a replacement for configure. Performs:
17897 .nf
17898 ${\fIECONF_SOURCE\fR:-.}/configure \\
17899 @@ -958,21 +1161,26 @@ Beginning with \fBEAPI 4\fR, \fBeconf\fR adds
17900 \fI\-\-disable\-dependency\-tracking\fR to the arguments if the
17901 string \fIdisable\-dependency\-tracking\fR occurs in the output
17902 of \fIconfigure \-\-help\fR.
17903 +Beginning with \fBEAPI 5\fR, \fBeconf\fR adds
17904 +\fIdisable\-silent\-rules\fR to the arguments if the
17905 +string \fIdisable\-silent\-rules\fR occurs in the output
17906 +of \fIconfigure \-\-help\fR.
17907 .TP
17908 -\fBemake\fR \fI[make options]\fR
17909 +.B emake\fR \fI[make options]
17910 This is used as a replacement for make. Performs 'make ${MAKEOPTS}
17911 \fImake options\fR' (as set in make.globals), default is MAKEOPTS="\-j2".
17912
17913 -\fB***warning***\fR
17914 +.B ***WARNING***
17915 .br
17916 if you are going to use \fBemake\fR, make sure your build is happy with
17917 parallel makes (make \-j2). It should be tested thoroughly as parallel
17918 makes are notorious for failing _sometimes_ but not always. If you determine
17919 that your package fails to build in parallel, and you are unable to resolve
17920 the issue, then you should run '\fBemake\fR \-j1' instead of 'make'.
17921 -.SH "HELPER FUNCTIONS: INSTALL"
17922 +
17923 +.SS "Install:"
17924 .TP
17925 -\fBeinstall\fR \fI[make options]\fR
17926 +.B einstall\fR \fI[make options]
17927 This is used as a replacement for make install. Performs:
17928 .nf
17929 make \\
17930 @@ -1029,11 +1237,11 @@ Strips all executable files of debugging symboles. This includes libraries.
17931 .RE
17932
17933 .TP
17934 -\fBprepinfo\fR \fI[dir]\fR
17935 +.B prepinfo\fR \fI[dir]
17936 .TP
17937 -\fBprepman\fR \fI[dir]\fR
17938 +.B prepman\fR \fI[dir]
17939 .TP
17940 -\fBprepstrip\fR \fI[dir]\fR
17941 +.B prepstrip\fR \fI[dir]
17942 .PD 1
17943 Similar to the \fBprepall\fR functions, these are subtle in their differences.
17944 .RS
17945 @@ -1055,7 +1263,7 @@ multiple directories.
17946 .RE
17947 .PD 1
17948 .TP
17949 -\fBdocompress\fR \fI[\-x] <path> [list of more paths]\fR
17950 +.B docompress\fR \fI[\-x] <path> [list of more paths]
17951 .RS
17952 Beginning with \fBEAPI 4\fR, the \fBdocompress\fR helper is used to
17953 manage lists of files to be included or excluded from optional compression.
17954 @@ -1094,7 +1302,7 @@ If the item does not exist, it is ignored.
17955 .RE
17956 .RE
17957 .TP
17958 -\fBdosed\fR \fI"s:orig:change:g" <filename>\fR
17959 +.B dosed\fR \fI"s:orig:change:g" <filename>
17960 Beginning with \fBEAPI 4\fR, the \fBdosed\fR helper no longer exists. Ebuilds
17961 should call \fBsed(1)\fR directly (and assume that it is GNU sed).
17962
17963 @@ -1105,66 +1313,66 @@ that this expression does \fBNOT\fR use the offset prefix.
17964 .BR 'dosed\ "s:/usr/local:/usr:g"\ /usr/bin/some\-script'
17965 runs sed on ${ED}/usr/bin/some\-script
17966 .TP
17967 -\fBdodir\fR \fI<path> [more paths]\fR
17968 +.B dodir\fR \fI<path> [more paths]
17969 Creates directories inside of ${ED}.
17970 .br
17971 .BR 'dodir\ /usr/lib/apache'
17972 creates ${ED}/usr/lib/apache. Note that the do* functions will run
17973 \fBdodir\fR for you.
17974 .TP
17975 -\fBdiropts\fR \fI[options for install(1)]\fR
17976 +.B diropts\fR \fI[options for install(1)]
17977 Can be used to define options for the install function used in
17978 \fBdodir\fR. The default is \fI\-m0755\fR.
17979 .TP
17980 -\fBinto\fR \fI<path>\fR
17981 +.B into\fR \fI<path>
17982 Sets the root (\fIDESTTREE\fR) for other functions like \fBdobin\fR,
17983 \fBdosbin\fR, \fBdoman\fR, \fBdoinfo\fR, \fBdolib\fR.
17984 .br
17985 The default root is /usr.
17986 .TP
17987 -\fBkeepdir\fR \fI<path> [more paths]\fR
17988 +.B keepdir\fR \fI<path> [more paths]
17989 Tells portage to leave directories behind even if they're empty. Functions
17990 the same as \fBdodir\fR.
17991 .TP
17992 -\fBdobin\fR \fI<binary> [list of more binaries]\fR
17993 +.B dobin\fR \fI<binary> [list of more binaries]
17994 Installs a \fIbinary\fR or a list of binaries into \fIDESTTREE\fR/bin.
17995 Creates all necessary dirs.
17996 .TP
17997 -\fBdosbin\fR \fI<binary> [list of more binaries]\fR
17998 +.B dosbin\fR \fI<binary> [list of more binaries]
17999 Installs a \fIbinary\fR or a list of binaries into \fIDESTTREE\fR/sbin.
18000 Creates all necessary dirs.
18001 .TP
18002 -\fBdoinitd\fR \fI<init.d script> [list of more init.d scripts]\fR
18003 +.B doinitd\fR \fI<init.d script> [list of more init.d scripts]
18004 Install Gentoo \fIinit.d scripts\fR. They will be installed into the
18005 correct location for Gentoo init.d scripts (/etc/init.d/). Creates all
18006 necessary dirs.
18007 .TP
18008 -\fBdoconfd\fR \fI<conf.d file> [list of more conf.d file]\fR
18009 +.B doconfd\fR \fI<conf.d file> [list of more conf.d file]
18010 Install Gentoo \fIconf.d files\fR. They will be installed into the
18011 correct location for Gentoo conf.d files (/etc/conf.d/). Creates all
18012 necessary dirs.
18013 .TP
18014 -\fBdoenvd\fR \fI<env.d entry> [list of more env.d entries]\fR
18015 +.B doenvd\fR \fI<env.d entry> [list of more env.d entries]
18016 Install Gentoo \fIenv.d entries\fR. They will be installed into the
18017 correct location for Gentoo env.d entries (/etc/env.d/). Creates all
18018 necessary dirs.
18019
18020 .PD 0
18021 .TP
18022 -\fBdolib\fR \fI<library>\fR \fI[list of more libraries]\fR
18023 +.B dolib\fR \fI<library>\fR \fI[list of more libraries]
18024 .TP
18025 -\fBdolib.a\fR \fI<library>\fR \fI[list of more libraries]\fR
18026 +.B dolib.a\fR \fI<library>\fR \fI[list of more libraries]
18027 .TP
18028 -\fBdolib.so\fR \fI<library>\fR \fI[list of more libraries]\fR
18029 +.B dolib.so\fR \fI<library>\fR \fI[list of more libraries]
18030 .PD 1
18031 Installs a library or a list of libraries into \fIDESTTREE\fR/lib.
18032 Creates all necessary dirs.
18033 .TP
18034 -\fBlibopts\fR \fI[options for install(1)]\fR
18035 +.B libopts\fR \fI[options for install(1)]
18036 Can be used to define options for the install function used in
18037 the \fBdolib\fR functions. The default is \fI\-m0644\fR.
18038 .TP
18039 -\fBdoman\fR \fI[\-i18n=<locale>]\fR \fI<man\-page> [list of more man\-pages]\fR
18040 +.B doman\fR \fI[\-i18n=<locale>]\fR \fI<man\-page> [list of more man\-pages]
18041 Installs manual\-pages into /usr/share/man/man[0\-9n] depending on the
18042 manual file ending. The files are compressed if they are not already. You
18043 can specify locale\-specific manpages with the \fI\-i18n\fR option. Then the
18044 @@ -1177,135 +1385,186 @@ foo.\fI<locale>\fR.1 will be installed as
18045 /usr/share/man/\fI<locale>\fR/man1/foo.1. Beginning with \fBEAPI 4\fR,
18046 the \fI\-i18n\fR option takes precedence over the locale suffix of the
18047 file name.
18048 +
18049 .PD 0
18050 .TP
18051 -\fBdohard\fR \fI<filename> <linkname>\fR
18052 +.B dohard\fR \fI<filename> <linkname>
18053 Beginning with \fBEAPI 4\fR, the \fBdohard\fR helper no longer exists. Ebuilds
18054 should call \fBln(1)\fR directly.
18055 .TP
18056 -\fBdosym\fR \fI<filename> <linkname>\fR
18057 +.B dosym\fR \fI<filename> <linkname>
18058 .PD 1
18059 Performs the ln command to create a symlink.
18060 .TP
18061 -\fBdohtml\fR \fI [\-a filetypes] [\-r] [\-x list\-of\-dirs\-to\-ignore] [list\-of\-files\-and\-dirs]\fR
18062 +.B doheader\fR \fI[\-r] <file> [list of more files]
18063 +Installs the given header files into /usr/include/, by default
18064 +with file mode \fI0644\fR (this can be overridden with the
18065 +\fBinsopts\fR function). Setting \-r sets recursive. The
18066 +\fBdoheader\fR helper is available beginning with \fBEAPI 5\fR.
18067 +.TP
18068 +.B dohtml\fR \fI [\-a filetypes] [\-r] [\-x list\-of\-dirs\-to\-ignore] \
18069 +[list\-of\-files\-and\-dirs]
18070 Installs the files in the list of files (space\-separated list) into
18071 -/usr/share/doc/${PF}/html provided the file ends in .htm, .html, .css, .js, .gif, .jpeg, .jpg, or .png.
18072 +/usr/share/doc/${PF}/html provided the file ends in .htm, .html, .css, .js, \
18073 +.gif, .jpeg, .jpg, or .png.
18074 Setting \fI\-a\fR limits what types of files will be included,
18075 \fI\-A\fR appends to the default list, setting \fI\-x\fR sets which dirs to
18076 -exclude (CVS excluded by default), \fI\-p\fR sets a document prefix, \fI\-r\fR sets recursive.
18077 +exclude (CVS excluded by default), \fI\-p\fR sets a document prefix,
18078 +\fI\-r\fR sets recursive.
18079 .TP
18080 -\fBdoinfo\fR \fI<info\-file> [list of more info\-files]\fR
18081 +.B doinfo\fR \fI<info\-file> [list of more info\-files]
18082 Installs info\-pages into \fIDESTDIR\fR/info. Files are automatically
18083 gzipped. Creates all necessary dirs.
18084 .TP
18085 -\fBdomo\fR \fI<locale\-file> [list of more locale\-files] \fR
18086 +.B domo\fR \fI<locale\-file> [list of more locale\-files]
18087 Installs locale\-files into \fIDESTDIR\fR/usr/share/locale/[LANG]
18088 depending on local\-file's ending. Creates all necessary dirs.
18089
18090 .PD 0
18091 .TP
18092 -\fBfowners\fR \fI<permissions> <file> [files]\fR
18093 +.B fowners\fR \fI<permissions> <file> [files]
18094 .TP
18095 -\fBfperms\fR \fI<permissions> <file> [files]\fR
18096 +.B fperms\fR \fI<permissions> <file> [files]
18097 .PD 1
18098 Performs chown (\fBfowners\fR) or chmod (\fBfperms\fR), applying
18099 \fIpermissions\fR to \fIfiles\fR.
18100 .TP
18101 -\fBinsinto\fR \fI[path]\fR
18102 +.B insinto\fR \fI[path]
18103 Sets the destination path for the \fBdoins\fR function.
18104 .br
18105 The default path is /.
18106 .TP
18107 -\fBinsopts\fR \fI[options for install(1)]\fR
18108 +.B insopts\fR \fI[options for install(1)]
18109 Can be used to define options for the install function used in
18110 \fBdoins\fR. The default is \fI\-m0644\fR.
18111 .TP
18112 -\fBdoins\fR \fI[\-r] <file> [list of more files]\fR
18113 +.B doins\fR \fI[\-r] <file> [list of more files]
18114 Installs files into the path controlled by \fBinsinto\fR. This function
18115 uses \fBinstall\fR(1). Creates all necessary dirs.
18116 Setting \-r sets recursive. Beginning with \fBEAPI 4\fR, both
18117 \fBdoins\fR and \fBnewins\fR preserve symlinks. In \fBEAPI 3\fR and
18118 earlier, symlinks are dereferenced rather than preserved.
18119 .TP
18120 -\fBexeinto\fR \fI[path]\fR
18121 +.B exeinto\fR \fI[path]
18122 Sets the destination path for the \fBdoexe\fR function.
18123 .br
18124 The default path is /.
18125 .TP
18126 -\fBexeopts\fR \fI[options for install(1)]\fR
18127 +.B exeopts\fR \fI[options for install(1)]
18128 Can be used to define options for the install function used in \fBdoexe\fR.
18129 The default is \fI\-m0755\fR.
18130 .TP
18131 -\fBdoexe\fR \fI<executable> [list of more executables]\fR
18132 +.B doexe\fR \fI<executable> [list of more executables]
18133 Installs executables into the path controlled by \fBexeinto\fR. This function
18134 uses \fBinstall\fR(1). Creates all necessary dirs.
18135 .TP
18136 -\fBdocinto\fR \fI[path]\fR
18137 +.B docinto\fR \fI[path]
18138 Sets the subdir used by \fBdodoc\fR and \fBdohtml\fR
18139 when installing into the document tree
18140 (based in /usr/share/doc/${PF}/). Default is no subdir, or just "".
18141 .TP
18142 -\fBdodoc\fR \fI[-r] <document> [list of more documents]\fR
18143 -Installs a document or a list of documents into /usr/share/doc/${PF}/\fI<docinto path>\fR.
18144 +.B dodoc\fR \fI[-r] <document> [list of more documents]
18145 +Installs a document or a list of documents into
18146 +/usr/share/doc/${PF}/\fI<docinto path>\fR.
18147 Documents are marked for compression. Creates all necessary dirs.
18148 Beginning with \fBEAPI 4\fR, there is support for recursion, enabled by the
18149 new \fI\-r\fR option.
18150
18151 .PD 0
18152 .TP
18153 -\fBnewbin\fR \fI<old file> <new filename>\fR
18154 +.B newbin\fR \fI<old file> <new filename>
18155 .TP
18156 -\fBnewsbin\fR \fI<old file> <new filename>\fR
18157 +.B newsbin\fR \fI<old file> <new filename>
18158 .TP
18159 -\fBnewinitd\fR \fI<old file> <new filename>\fR
18160 +.B newinitd\fR \fI<old file> <new filename>
18161 .TP
18162 -\fBnewconfd\fR \fI<old file> <new filename>\fR
18163 +.B newconfd\fR \fI<old file> <new filename>
18164 .TP
18165 -\fBnewenvd\fR \fI<old file> <new filename>\fR
18166 +.B newenvd\fR \fI<old file> <new filename>
18167 .TP
18168 -\fBnewlib.so\fR \fI<old file> <new filename>\fR
18169 +.B newlib.so\fR \fI<old file> <new filename>
18170 .TP
18171 -\fBnewlib.a\fR \fI<old file> <new filename>\fR
18172 +.B newlib.a\fR \fI<old file> <new filename>
18173 .TP
18174 -\fBnewman\fR \fI<old file> <new filename>\fR
18175 +.B newman\fR \fI<old file> <new filename>
18176 .TP
18177 -\fBnewinfo\fR \fI<old file> <new filename>\fR
18178 +.B newins\fR \fI<old file> <new filename>
18179 .TP
18180 -\fBnewins\fR \fI<old file> <new filename>\fR
18181 +.B newexe\fR \fI<old file> <new filename>
18182 .TP
18183 -\fBnewexe\fR \fI<old file> <new filename>\fR
18184 -.TP
18185 -\fBnewdoc\fR \fI<old file> <new filename>\fR
18186 +.B newdoc\fR \fI<old file> <new filename>
18187 .PD 1
18188 All these functions act like the do* functions, but they only work with one
18189 file and the file is installed as \fI[new filename]\fR.
18190 -.SH "REPORTING BUGS"
18191 -Please report bugs via http://bugs.gentoo.org/
18192 -.SH "AUTHORS"
18193 +Beginning with \fBEAPI 5\fR, standard input is read when the
18194 +first parameter is \- (a hyphen).
18195 +
18196 +.SH "EXAMPLES"
18197 +.DS
18198 .nf
18199 -Achim Gottinger <achim@g.o>
18200 -Mark Guertin <gerk@g.o>
18201 -Nicholas Jones <carpaski@g.o>
18202 -Mike Frysinger <vapier@g.o>
18203 -Arfrever Frehtes Taifersar Arahesis <Arfrever.FTA@×××××.com>
18204 -Fabian Groffen <grobian@g.o>
18205 +# Copyright 1999\-2013 Gentoo Foundation
18206 +# Distributed under the terms of the GNU General Public License v2
18207 +# $Header: $
18208 +
18209 +EAPI="5"
18210 +
18211 +inherit some_eclass another_eclass
18212 +
18213 +DESCRIPTION="Super\-useful stream editor (sed)"
18214 +HOMEPAGE="http://www.gnu.org/software/sed/sed.html"
18215 +SRC_URI="ftp://alpha.gnu.org/pub/gnu/${PN}/${P}.tar.gz"
18216 +
18217 +LICENSE="GPL\-2"
18218 +SLOT="0"
18219 +KEYWORDS="~x86"
18220 +IUSE=""
18221 +
18222 +RDEPEND=""
18223 +DEPEND="nls? ( sys-devel/gettext )"
18224 +
18225 +src_configure() {
18226 + econf \\
18227 + \-\-bindir="${EPREFIX}"/bin
18228 +}
18229 +
18230 +src_install() {
18231 + emake DESTDIR="${D}" install
18232 + dodoc NEWS README* THANKS AUTHORS BUGS ChangeLog
18233 +}
18234 .fi
18235 +.DE
18236 +
18237 .SH "FILES"
18238 .TP
18239 -The \fI/usr/sbin/ebuild.sh\fR script.
18240 +The \fI/usr/lib/portage/bin/ebuild.sh\fR script.
18241 .TP
18242 The helper apps in \fI/usr/lib/portage/bin\fR.
18243 .TP
18244 -.B /etc/make.conf
18245 -Contains variables for the build\-process and overwrites those in make.defaults.
18246 +.B /etc/portage/make.conf
18247 +Contains variables for the build\-process and overwrites those in
18248 +make.defaults.
18249 .TP
18250 .B /usr/share/portage/config/make.globals
18251 Contains the default variables for the build\-process, you should edit
18252 -\fI/etc/make.conf\fR instead.
18253 +\fI/etc/portage/make.conf\fR instead.
18254 .TP
18255 .B /etc/portage/color.map
18256 Contains variables customizing colors.
18257 +
18258 .SH "SEE ALSO"
18259 .BR ebuild (1),
18260 .BR make.conf (5),
18261 .BR color.map (5)
18262 +
18263 +.SH "REPORTING BUGS"
18264 +Please report bugs via http://bugs.gentoo.org/
18265 +
18266 +.SH "AUTHORS"
18267 +.nf
18268 +Achim Gottinger <achim@g.o>
18269 +Mark Guertin <gerk@g.o>
18270 +Nicholas Jones <carpaski@g.o>
18271 +Mike Frysinger <vapier@g.o>
18272 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
18273 +Fabian Groffen <grobian@g.o>
18274 +.fi
18275
18276 diff --git a/man/egencache.1 b/man/egencache.1
18277 index 9094595..f71feb3 100644
18278 --- a/man/egencache.1
18279 +++ b/man/egencache.1
18280 @@ -1,4 +1,4 @@
18281 -.TH "EGENCACHE" "1" "Oct 2010" "Portage VERSION" "Portage"
18282 +.TH "EGENCACHE" "1" "Jul 2013" "Portage VERSION" "Portage"
18283 .SH "NAME"
18284 egencache \- generate metadata cache for ebuild repositories
18285 .SH "SYNOPSIS"
18286 @@ -6,12 +6,13 @@ egencache \- generate metadata cache for ebuild repositories
18287 .I [options] --update [ATOM]\fR...
18288 .SH "DESCRIPTION"
18289 The egencache program generates metadata cache for ebuild repositories and
18290 -stores it in the \fImetadata/cache/\fR directory within the repository itself,
18291 -for distribution.
18292 +stores it in the \fImetadata/md5\-cache/\fR directory within the repository
18293 +itself, for distribution.
18294 .SH ACTIONS
18295 .TP
18296 .BR "\-\-update [ATOM] ... "
18297 -Update the \fImetadata/cache/\fR directory (generate metadata as necessary).
18298 +Update the \fImetadata/md5\-cache/\fR directory (generate metadata as
18299 +necessary).
18300 If no package atoms are specified then all will be updated. See ebuild(5)
18301 for the details on package atom syntax.
18302 .TP
18303 @@ -20,6 +21,12 @@ Update the ChangeLog files from SCM logs (supported only in git repos).
18304 .TP
18305 .BR "\-\-update\-use\-local\-desc"
18306 Update the \fIprofiles/use.local.desc\fR file from metadata.xml.
18307 +.TP
18308 +.BR "\-\-update\-manifests"
18309 +Update manifest files, and sign them if signing is enabled. This supports
18310 +parallelization if enabled via the \-\-jobs option. The \-\-thin\-manifests
18311 +and \-\-sign\-manifests options may be used to manually override layout.conf
18312 +settings.
18313 .SH OPTIONS
18314 .TP
18315 .BR "\-\-cache\-dir=CACHE_DIR"
18316 @@ -34,6 +41,12 @@ Location of portage config files.
18317 .br
18318 Defaults to /.
18319 .TP
18320 +.BR "\-\-gpg\-dir"
18321 +Override the PORTAGE_GPG_DIR variable.
18322 +.TP
18323 +.BR "\-\-gpg\-key"
18324 +Override the PORTAGE_GPG_KEY variable.
18325 +.TP
18326 .BR "\-\-ignore-default-opts"
18327 Causes \fIEGENCACHE_DEFAULT_OPTS\fR to be ignored.
18328 .TP
18329 @@ -45,21 +58,24 @@ Also see the related \fB\-\-load\-average\fR option.
18330 Specifies that maximum load allowed when spawning multiple jobs.
18331 .TP
18332 .BR "\-\-portdir=PORTDIR"
18333 -Override the portage tree location.
18334 +Override the PORTDIR variable. This option is deprecated in favor of
18335 +\-\-repositories\-configuration option.
18336 .TP
18337 .BR "\-\-portdir\-overlay=PORTDIR_OVERLAY"
18338 -Override the PORTDIR_OVERLAY variable (requires that
18339 -\-\-repo is also specified).
18340 +Override the PORTDIR_OVERLAY variable. This option is deprecated in favor of
18341 +\-\-repositories\-configuration option.
18342 .TP
18343 .BR "\-\-preserve\-comments"
18344 Preserve the comments found in the output use.local.desc file. This requires
18345 the output file to exist before egencache is called.
18346 .TP
18347 .BR "\-\-repo=REPO"
18348 -Name of the repo to operate on (default repo is located at \fBPORTDIR\fR).
18349 -The name should correspond the value of a \fBrepo_name\fR entry (see
18350 -\fBportage\fR(5)) from one of the repositories that is configured via the
18351 -\fBPORTDIR\fR or \fBPORTDIR_OVERLAY\fR variables (see \fBmake.conf\fR(5)).
18352 +Name of the repo to operate on. The name should correspond the value of
18353 +a \fBrepo_name\fR entry (see \fBportage\fR(5)) from one of the repositories.
18354 +.TP
18355 +.BR "\-\-repositories\-configuration=REPOSITORIES_CONFIGURATION"
18356 +Override configuration of repositories. The argument of this option has
18357 +the same format as repos.conf (see \fBportage\fR(5)).
18358 .TP
18359 .BR "\-\-rsync"
18360 When used together with the \fB\-\-update\fR action, this enables a workaround
18361 @@ -72,6 +88,15 @@ This option should only be needed for distribution via something like
18362 more thorough mechanism which allows it to detect changed inode numbers
18363 (described in \fIracy-git.txt\fR in the git technical docs).
18364 .TP
18365 +.BR "\-\-sign\-manifests< y | n >"
18366 +Manually override layout.conf sign-manifests setting.
18367 +.TP
18368 +.BR "\-\-strict\-manifests< y | n >"
18369 +Manually override "strict" FEATURES setting.
18370 +.TP
18371 +.BR "\-\-thin\-manifests< y | n >"
18372 +Manually override layout.conf thin-manifests setting.
18373 +.TP
18374 .BR "\-\-tolerant"
18375 Exit successfully if only minor errors occurred, such as skipped cache
18376 updates due to ebuilds that either fail to source or are not sourced
18377 @@ -87,10 +112,10 @@ contains will be added to the beginning of the command line on every
18378 invocation. These options will not be added if the
18379 \fB\-\-ignore-default\-opts\fR option is specified.
18380 .SH "BUGS"
18381 -There are significant limitations associated with the metadata
18382 -cache format that is distributed in the \fImetadata/cache/\fR directory
18383 -of the repository. These limitations are related to the cache validation
18384 -mechanism. Currently, the validation mechanism involves comparison of
18385 +Prior to portage-2.1.11.32, the 'pms' cache format was enabled by default.
18386 +This 'pms' format, which is distributed in the \fImetadata/cache/\fR
18387 +directory of the repository, has significant limitations related to the
18388 +cache validation mechanism which involves comparison of
18389 a cache entry mtime to the mtime of the corresponding \fBebuild(5)\fR. This
18390 mechanism is unreliable in cases when eclass changes result in metadata
18391 changes, since no information about eclass state is available in the cache.
18392 @@ -102,11 +127,21 @@ implemented in \fBemerge\fR(1) \fB\-\-sync\fR which updates ebuild mtimes
18393 to match their corresponding cache entries (except for ebuilds that are
18394 modified relative to HEAD).
18395
18396 -In order to solve the above problems, a future extension
18397 -to the cache format will include additional
18398 -validation data in the form of digests for both the ebuild
18399 -and its inherited eclasses. Until the
18400 -cache format has been extended in this way, it is necessary to enable
18401 +In order to solve the above problems, the newer 'md5-dict' format has been
18402 +enabled by default since portage-2.1.11.32. This format is distributed in
18403 +the \fImetadata/md5-cache/\fR directory of the repository, and includes
18404 +additional validation data in the form of digests for both the ebuild
18405 +and its inherited eclasses. \fBWARNING:\fR Portage versions prior to
18406 +portage-2.1.11.14 will \fBNOT\fR recognize the 'md5-dict' format unless it is
18407 +explicitly listed in \fImetadata/layout.conf\fR (refer to \fBportage\fR(5)
18408 +for example usage).
18409 +
18410 +\fBWARNING:\fR For backward compatibility, the obsolete 'pms' cache format
18411 +will still be generated by default if the \fImetadata/cache/\fR directory
18412 +exists in the repository. It can also be explicitly enabled via the
18413 +cache\-formats setting in \fImetadata/layout.conf\fR (refer to \fBportage\fR(5)
18414 +for example usage). If the 'pms' cache format is enabled and the 'md5-dict'
18415 +format is not enabled, then it is necessary to enable
18416 \fBmetadata-transfer\fR in \fBFEATURES\fR (see \fBmake.conf(5)\fR).
18417 This causes intermediate cache (in a different format that includes
18418 eclass state) to be generated inside the directory which is configurable
18419 @@ -116,10 +151,11 @@ Please report bugs via http://bugs.gentoo.org/
18420 .SH "AUTHORS"
18421 .nf
18422 Zac Medico <zmedico@g.o>
18423 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
18424 .fi
18425 .SH "FILES"
18426 .TP
18427 -.B /etc/make.conf
18428 +.B /etc/portage/make.conf
18429 Contains variables.
18430 .SH "SEE ALSO"
18431 .BR emerge (1),
18432
18433 diff --git a/man/emaint.1 b/man/emaint.1
18434 index c588a0b..8356299 100644
18435 --- a/man/emaint.1
18436 +++ b/man/emaint.1
18437 @@ -26,9 +26,10 @@ Discard no longer installed config tracker entries.
18438 Discard merge lists saved for the \fBemerge\fR(1) \fB--resume\fR action.
18439 .TP
18440 .BR logs
18441 -Clean out old logs from the \fBPORT_LOGDIR\fR using the command \fBPORT_LOGDIR_CLEAN\fR
18442 -See the \fBmake.conf\fR(5) man page for additional information as well as enabling the
18443 -\fB'clean-logs'\fR feature in emerge to do this automatically.
18444 +Clean out old logs from the \fBPORT_LOGDIR\fR using the command
18445 +\fBPORT_LOGDIR_CLEAN\fR
18446 +See the \fBmake.conf\fR(5) man page for additional information as well as
18447 +enabling the \fB'clean-logs'\fR feature in emerge to do this automatically.
18448 .TP
18449 .BR movebin
18450 Perform package move updates for binary packages located in \fBPKGDIR\fR.
18451 @@ -38,7 +39,7 @@ Perform package move updates for installed packages.
18452 .TP
18453 .BR world
18454 Fix problems in the \fIworld\fR file.
18455 -.SH DEFAULT OPTIONS
18456 +.SH DEFAULT OPTIONS
18457 .TP
18458 .B \-c, \-\-check
18459 Check for any problems that may exist. (all commands)
18460 @@ -51,10 +52,12 @@ Fix any problems that may exist. (not all commands)
18461 Cleans the logs from \fBPORT_LOGDIR\fR (logs command only)
18462 .TP
18463 .B \-p, \-\-pretend
18464 -Sets pretend mode (same as \-c, \-\-check) for use with the \-C, \-\-clean OPTION (logs command only)
18465 +Sets pretend mode (same as \-c, \-\-check) for use with the \-C, \-\-clean
18466 +OPTION (logs command only)
18467 .TP
18468 .B \-t NUM, \-\-time NUM
18469 -Changes the minimum age \fBNUM\fR (in days) of the logs to be listed or deleted. (logs command only)
18470 +Changes the minimum age \fBNUM\fR (in days) of the logs to be listed or
18471 +deleted. (logs command only)
18472 .SH "REPORTING BUGS"
18473 Please report bugs via http://bugs.gentoo.org/
18474 .SH AUTHORS
18475
18476 diff --git a/man/emerge.1 b/man/emerge.1
18477 index 7aa4622..abb0ed8 100644
18478 --- a/man/emerge.1
18479 +++ b/man/emerge.1
18480 @@ -1,10 +1,11 @@
18481 -.TH "EMERGE" "1" "Jun 2012" "Portage VERSION" "Portage"
18482 +.TH "EMERGE" "1" "Mar 2014" "Portage VERSION" "Portage"
18483 .SH "NAME"
18484 emerge \- Command\-line interface to the Portage system
18485 .SH "SYNOPSIS"
18486 .TP
18487 .BR emerge
18488 -[\fIoptions\fR] [\fIaction\fR] [\fIebuild\fR | \fItbz2file\fR | \fIfile\fR | \fI@set\fR | \fIatom\fR] ...
18489 +[\fIoptions\fR] [\fIaction\fR] [\fIebuild\fR | \fItbz2file\fR | \fIfile\fR |
18490 +\fI@set\fR | \fIatom\fR] ...
18491 .TP
18492 .BR emerge
18493 \fB\-\-sync\fR | \fB\-\-version\fR
18494 @@ -16,7 +17,7 @@ emerge \- Command\-line interface to the Portage system
18495 \fB\-\-search\fR \fIsomestring\fR
18496 .TP
18497 .BR emerge
18498 -\fB\-\-help\fR [\fB\-\-verbose\fR]
18499 +\fB\-\-help\fR
18500 .SH "DESCRIPTION"
18501 \fBemerge\fR is the definitive command\-line interface to the Portage
18502 system. It is primarily used for installing packages, and \fBemerge\fR
18503 @@ -48,7 +49,7 @@ so this syntax shouldn't be used.
18504 .TP
18505 .BR tbz2file
18506 A \fItbz2file\fR must be a valid .tbz2 created with \fBebuild
18507 -<package>\-<version>.ebuild package\fR or \fBemerge \-\-buildpkg
18508 +<package>\-<version>.ebuild package\fR or \fBemerge \-\-buildpkg
18509 [category/]<package>\fR or \fBquickpkg /var/db/pkg/<category>/<package>\fR.
18510 .TP
18511 .BR file
18512 @@ -72,20 +73,22 @@ on the current configuration. The default set configuration is located
18513 in the \fB/usr/share/portage/config/sets\fR directory.
18514 User sets may be created by placing files in the \fB/etc/portage/sets/\fR
18515 directory (see \fBportage\fR(5)). Note that a \fIset\fR
18516 -is generally used in conjunction with \fB\-\-update\fR. When used as
18517 +is generally used in conjunction with \fB\-\-update\fR. When used as
18518 arguments to \fBemerge\fR sets have to be prefixed with \fB@\fR to be
18519 recognized. Use the \fB\-\-list\-sets\fR action to display a list of
18520 available package sets.
18521 .TP
18522 .BR atom
18523 -An \fIatom\fR describes bounds on a package that you wish to install.
18524 +An \fIatom\fR describes bounds on a package that you wish to install.
18525 \fISee ebuild(5) for the details on atom syntax.\fR For example,
18526 -\fB>=dev\-lang/python\-2.2.1\-r2\fR matches the latest available version of
18527 -Python greater than or equal to 2.2.1\-r2. Similarly,
18528 -\fB<dev\-lang/python\-2.0\fR matches the latest available version of Python
18529 -before 2.0. Note that in many shells you will need to escape characters such
18530 -as '<' and '='; use single\- or double\-quotes around the \fIatom\fR
18531 -to get around escaping problems.
18532 +\fB>=dev\-lang/python\-2.2.1\-r2\fR matches the latest available version of
18533 +Python greater than or equal to 2.2.1\-r2. Similarly,
18534 +\fB<dev\-lang/python\-2.0\fR matches the latest available version of Python
18535 +before 2.0. Note that in many shells you will need to escape characters such
18536 +as '<' and '='; use single\- or double\-quotes around the \fIatom\fR
18537 +to get around escaping problems. You may also constrain an atom to match a
18538 +specific \fBSLOT\fR by appending a colon and a \fBSLOT\fR. Example:
18539 +\fBx11\-libs/qt:3\fR.
18540 .SH "ACTIONS"
18541 .TP
18542 .BR "No action"
18543 @@ -100,18 +103,20 @@ later updating.
18544 .TP
18545 .BR \-\-check\-news
18546 Scan all repositories for relevant unread GLEP 42 news items, and display
18547 -how many are found. See \fIhttp://www.gentoo.org/proj/en/glep/glep-0042.html\fR.
18548 +how many are found. See
18549 +\fIhttp://www.gentoo.org/proj/en/glep/glep-0042.html\fR.
18550 .TP
18551 .BR \-\-clean
18552 Cleans up the system by examining the installed packages and removing older
18553 -packages. This is accomplished by looking at each installed package and separating
18554 -the installed versions by \fBslot\fR. Clean will \fBremove all but the most recently
18555 -installed version in each \fbslot\fR. Clean should not remove unslotted packages.
18556 -Note: Most recently installed means most \fBrecent\fR, not highest version.
18557 +packages. This is accomplished by looking at each installed package and
18558 +separating the installed versions by \fBslot\fR. Clean will \fBremove all but
18559 +the most recently installed version in each \fbslot\fR. Clean should not
18560 +remove unslotted packages. Note: Most recently installed means most
18561 +\fBrecent\fR, not highest version.
18562 .TP
18563 .BR "\-\-config "
18564 -Run package specific actions needed to be executed after the emerge process
18565 -has completed. This usually entails configuration file setup or other similar
18566 +Run package specific actions needed to be executed after the emerge process
18567 +has completed. This usually entails configuration file setup or other similar
18568 setups that the user may wish to run.
18569 .TP
18570 .BR "\-\-depclean (-c)"
18571 @@ -161,21 +166,21 @@ updated more frequently than this man page; check it out if you
18572 are having problems that this man page does not help resolve.
18573 .TP
18574 .BR \-\-info
18575 -Produces a list of information to include in bug reports which aids the
18576 -developers when fixing the reported problem. \fBPlease include this
18577 -information when submitting a bug report.\fR Expanded output can be obtained
18578 +Produces a list of information to include in bug reports which aids the
18579 +developers when fixing the reported problem. \fBPlease include this
18580 +information when submitting a bug report.\fR Expanded output can be obtained
18581 with the \fI\-\-verbose\fR option.
18582 .TP
18583 .BR \-\-list\-sets
18584 Displays a list of available package sets.
18585 .TP
18586 .BR \-\-metadata
18587 -Transfers metadata cache from ${PORTDIR}/metadata/cache/ to
18588 -/var/cache/edb/dep/ as is normally done on the
18589 -tail end of an rsync update using \fBemerge \-\-sync\fR. This process
18590 -populates the cache database that portage uses for pre-parsed lookups of
18591 -package data. It does not populate cache for the overlays listed in
18592 -PORTDIR_OVERLAY. In order to generate cache for overlays, use \fB\-\-regen\fR.
18593 +Transfers pregenerated metadata cache from ${repository_location}/metadata/md5\-cache/
18594 +to /var/cache/edb/dep/ as is normally done on the tail end of an rsync update using
18595 +\fBemerge \-\-sync\fR. This process populates the cache database that Portage uses
18596 +for pre-parsed lookups of package data. It does not populate cache for repositories
18597 +not distributing pregenerated metadata cache. In order to generate cache for these
18598 +repositories, use \fB\-\-regen\fR.
18599 In versions of portage >=2.1.5 the \-\-metadata action is totally unnecessary
18600 unless the user has enabled FEATURES="metadata-transfer" in \fBmake.conf\fR(5).
18601 .TP
18602 @@ -188,40 +193,45 @@ the emerge output of the next \-\-depclean run carefully! Use
18603 \-\-depclean to avoid this issue.\fR
18604 .TP
18605 .BR \-\-regen
18606 -Causes portage to check and update the dependency cache of all ebuilds in the
18607 -portage tree. The cache is used to speed up searches and the building of
18608 -dependency trees. This command is not recommended for rsync users as rsync
18609 -updates the cache using server\-side caches. If you do not know the
18610 -differences between a 'rsync user' and some other user, then you are a 'rsync
18611 -user' :). Rsync users should simply run \fBemerge \-\-sync\fR to regenerate
18612 -the cache. After a portage update, rsync users may find it convenient to run
18613 -\fBemerge \-\-metadata\fR to rebuild the cache as portage does at the end of
18614 +Causes portage to check and update the dependency cache of all ebuilds in the
18615 +portage tree. The cache is used to speed up searches and the building of
18616 +dependency trees. This command is not recommended for rsync users as rsync
18617 +updates the cache using server\-side caches. If you do not know the
18618 +differences between a 'rsync user' and some other user, then you are a 'rsync
18619 +user' :). Rsync users should simply run \fBemerge \-\-sync\fR to regenerate
18620 +the cache. After a portage update, rsync users may find it convenient to run
18621 +\fBemerge \-\-metadata\fR to rebuild the cache as portage does at the end of
18622 a sync operation. In order to specify parallel \fB\-\-regen\fR behavior, use
18623 the \fB\-\-jobs\fR and \fB\-\-load\-average\fR options. If you would like to
18624 generate and distribute cache for use by others, use \fBegencache\fR(1).
18625 .TP
18626 .BR "\-\-resume" (\fB\-r\fR)
18627 Resumes the most recent merge list that has been aborted due to an error.
18628 -This re\-uses the options that were given with the original
18629 +This re\-uses the arguments and options that were given with the original
18630 command that's being resumed, and the user may also provide
18631 -additional options when calling \fB\-\-resume\fR.
18632 +additional options when calling \fB\-\-resume\fR. It is an error to provide
18633 +atoms or sets as arguments to \fB\-\-resume\fR, since the arguments from the
18634 +resumed command are used instead.
18635 Please note that this operation will only return an error on failure. If there
18636 is nothing for portage to do, then portage will exit with a message and a
18637 success condition. A resume list will persist until it has been completed in
18638 entirety or until another aborted merge list replaces it. The resume history
18639 is capable of storing two merge lists. After one resume list completes, it is
18640 possible to invoke \-\-resume once again in order to resume an older list.
18641 +The resume lists are stored in \fI/var/cache/edb/mtimedb\fR, and may be
18642 +explicitly discarded by running `emaint \-\-fix cleanresume` (see
18643 +\fBemaint\fR(1)).
18644 .TP
18645 .BR "\-\-search " (\fB\-s\fR)
18646 Searches for matches of the supplied string in the portage tree.
18647 -By default emerge uses a case-insensitive simple search, but you can
18648 +By default emerge uses a case-insensitive simple search, but you can
18649 enable a regular expression search by prefixing the search string with %.
18650 -For example, \fBemerge \-\-search "%^kde"\fR searches for any package whose
18651 -name starts with "kde"; \fBemerge \-\-search "%gcc$"\fR searches for any
18652 -package that ends with "gcc"; \fBemerge \-\-search "office"\fR searches for
18653 -any package that contains the word "office". If you want to include the
18654 -category into the search string, prepend an @: \fBemerge \-\-search
18655 -"%@^dev-java.*jdk"\fR. If you want to search the package descriptions as well,
18656 +For example, \fBemerge \-\-search "%^kde"\fR searches for any package whose
18657 +name starts with "kde"; \fBemerge \-\-search "%gcc$"\fR searches for any
18658 +package that ends with "gcc"; \fBemerge \-\-search "office"\fR searches for
18659 +any package that contains the word "office". If you want to include the
18660 +category into the search string, prepend an @: \fBemerge \-\-search
18661 +"%@^dev-java.*jdk"\fR. If you want to search the package descriptions as well,
18662 use the \fB\-\-searchdesc\fR action.
18663 .TP
18664 .BR "\-\-searchdesc " (\fB\-S\fR)
18665 @@ -230,20 +240,15 @@ the package name. \fBTake caution\fR as the descriptions are also
18666 matched as regular expressions.
18667 .TP
18668 .BR \-\-sync
18669 -This updates the portage tree that is located in the
18670 -directory that the PORTDIR variable refers to (default
18671 -location is /usr/portage). The SYNC variable specifies
18672 -the remote URI from which files will be synchronized.
18673 +Updates repositories, for which sync\-type and sync\-uri attributes are
18674 +set in repos.conf. See \fBportage\fR(5) for more information.
18675 The \fBPORTAGE_SYNC_STALE\fR variable configures
18676 warnings that are shown when emerge \-\-sync has not
18677 been executed recently.
18678
18679 \fBWARNING:\fR
18680 -The emerge \-\-sync action will modify and/or delete
18681 -files located inside the directory that the PORTDIR
18682 -variable refers to (default location is /usr/portage).
18683 -For more information, see the PORTDIR documentation in
18684 -the make.conf(5) man page.
18685 +The emerge \-\-sync action will revert local changes (e.g. modifications or
18686 +additions of files) inside repositories synchronized using rsync.
18687
18688 \fBNOTE:\fR
18689 The \fBemerge\-webrsync\fR program will download the entire
18690 @@ -277,6 +282,21 @@ temporarily mask interactive packages. With default
18691 configuration, this would result in an effective
18692 \fBACCEPT_PROPERTIES\fR value of "* -interactive".
18693 .TP
18694 +.BR \-\-accept\-restrict=ACCEPT_RESTRICT
18695 +This option temporarily overrides the \fBACCEPT_RESTRICT\fR
18696 +variable. The \fBACCEPT_RESTRICT\fR variable is incremental,
18697 +which means that the specified setting is appended to the
18698 +existing value from your configuration. The special \fB-*\fR
18699 +token can be used to discard the existing configuration
18700 +value and start fresh. See the \fBMASKED PACKAGES\fR section
18701 +and \fBmake.conf\fR(5) for more information about
18702 +ACCEPT_RESTRICT. A typical usage example for this option
18703 +would be to use \fI\-\-accept\-restrict=\-bindist\fR to
18704 +temporarily mask packages that are not binary
18705 +re\-distributable. With default
18706 +configuration, this would result in an effective
18707 +\fBACCEPT_RESTRICT\fR value of "* -bindist".
18708 +.TP
18709 .BR "\-\-alphabetical "
18710 When displaying USE and other flag output, combines the enabled and
18711 disabled lists into one list and sorts the whole list alphabetically.
18712 @@ -286,9 +306,10 @@ Before performing the action, display what will take place (server info for
18713 \fB\-\-sync\fR, \fB\-\-pretend\fR output for merge, and so forth), then ask
18714 whether to proceed with the action or abort. Using \fB\-\-ask\fR is more
18715 efficient than using \fB\-\-pretend\fR and then executing the same command
18716 -without \fB\-\-pretend\fR, as dependencies will only need to be calculated once.
18717 -\fBWARNING: If the "Enter" key is pressed at the prompt (with no other input),
18718 -it is interpreted as acceptance of the first choice. Note that the input
18719 +without \fB\-\-pretend\fR, as dependencies will only need to be calculated
18720 +once. \fBWARNING: If the "Enter" key is pressed at the prompt (with no other
18721 +input), it is interpreted as acceptance of the first choice. Note that the
18722 +input
18723 buffer is not cleared prior to the prompt, so an accidental press of the
18724 "Enter" key at any time prior to the prompt will be interpreted as a choice!
18725 Use the \-\-ask\-enter\-invalid option if you want a single "Enter" key
18726 @@ -360,7 +381,7 @@ possible ways to enable building of binary packages.
18727 .TP
18728 .BR "\-\-buildpkgonly " (\fB\-B\fR)
18729 Creates binary packages for all ebuilds processed without actually
18730 -merging the packages. This comes with the caveat that all build-time
18731 +merging the packages. This comes with the caveat that all build-time
18732 dependencies must already be emerged on the system.
18733 .TP
18734 .BR "\-\-changed\-use"
18735 @@ -369,6 +390,10 @@ changed since installation. This option also implies the
18736 \fB\-\-selective\fR option. Unlike \fB\-\-newuse\fR, the
18737 \fB\-\-changed\-use\fR option does not trigger reinstallation when
18738 flags that the user has not enabled are added or removed.
18739 +
18740 +NOTE: This option ignores the state of the "test" USE flag, since that flag
18741 +has a special binding to FEATURES="test" (see \fBmake.conf\fR(5) for more
18742 +information about \fBFEATURES\fR settings).
18743 .TP
18744 .BR "\-\-changelog " (\fB\-l\fR)
18745 Use this in conjunction with the \fB\-\-pretend\fR option. This will
18746 @@ -380,7 +405,7 @@ Enable or disable color output. This option will override \fINOCOLOR\fR
18747 is not a tty (by default, color is disabled unless stdout is a tty).
18748 .TP
18749 .BR "\-\-columns"
18750 -Used alongside \fB\-\-pretend\fR to cause the package name, new version,
18751 +Used alongside \fB\-\-pretend\fR to cause the package name, new version,
18752 and old version to be displayed in an aligned format for easy cut\-n\-paste.
18753 .TP
18754 .BR "\-\-complete\-graph [ y | n ]"
18755 @@ -409,7 +434,7 @@ Set the \fBPORTAGE_CONFIGROOT\fR environment variable.
18756 .TP
18757 .BR "\-\-debug " (\fB\-d\fR)
18758 Tells emerge to run the emerge command in \fB\-\-debug\fR mode. In this
18759 -mode the bash build environment will run with the \-x option, causing
18760 +mode the bash build environment will run with the \-x option, causing
18761 it to output verbose debugging information to stdout. This also enables
18762 a plethora of other output (mostly dependency resolution messages).
18763 .TP
18764 @@ -424,12 +449,17 @@ required.
18765 .TP
18766 .BR "\-\-depclean\-lib\-check [ y | n ]"
18767 Account for library link-level dependencies during
18768 -\fB\-\-depclean\fR and \fB\-\-prune\fR actions. This
18769 -option is enabled by default. In some cases this can
18770 -be somewhat time\-consuming. This option is ignored
18771 -when FEATURES="preserve\-libs" is enabled in
18772 -\fBmake.conf\fR(5), since any libraries that have
18773 -consumers will simply be preserved.
18774 +\fB\-\-depclean\fR and \fB\-\-prune\fR actions.
18775 +This option is enabled by default. If FEATURES="preserve\-libs" is
18776 +enabled in \fBmake.conf\fR(5), and preserve\-libs is not restricted
18777 +for any of the packages selected for removal, then this option is
18778 +ignored because any libraries that have consumers will simply be
18779 +preserved.
18780 +.TP
18781 +.BR \-\-digest
18782 +Prevent corruption from being noticed. The `repoman manifest` command is the
18783 +preferred way to generate manifests and it is capable of doing an entire
18784 +repository or category at once (see \fBrepoman\fR(1)).
18785 .TP
18786 .BR "\-\-dynamic\-deps < y | n >"
18787 In dependency calculations, substitute the dependencies of installed
18788 @@ -472,10 +502,10 @@ Instead of doing any package building, just perform fetches for all
18789 packages (fetch everything in SRC_URI regardless of USE setting).
18790 .TP
18791 .BR "\-\-getbinpkg [ y | n ] (\-g short option)"
18792 -Using the server and location defined in \fIPORTAGE_BINHOST\fR (see
18793 -\fBmake.conf\fR(5)), portage will download the information from each binary
18794 -package found and it will use that information to help build the dependency
18795 -list. This option implies \fB\-k\fR. (Use \fB\-gK\fR for binary\-only
18796 +Using the server and location defined in \fIPORTAGE_BINHOST\fR (see
18797 +\fBmake.conf\fR(5)), portage will download the information from each binary
18798 +package found and it will use that information to help build the dependency
18799 +list. This option implies \fB\-k\fR. (Use \fB\-gK\fR for binary\-only
18800 merging.)
18801 .TP
18802 .BR "\-\-getbinpkgonly [ y | n ] (\-G short option)"
18803 @@ -485,12 +515,12 @@ remote server are preferred over local packages if they are not identical.
18804 .BR "\-\-ignore-default-opts"
18805 Causes \fIEMERGE_DEFAULT_OPTS\fR (see \fBmake.conf\fR(5)) to be ignored.
18806 .TP
18807 -.BR "\-\-ignore\-built\-slot\-abi\-deps < y | n >"
18808 -Ignore the SLOT/ABI := operator parts of dependencies that have
18809 +.BR "\-\-ignore\-built\-slot\-operator\-deps < y | n >"
18810 +Ignore the slot/sub\-slot := operator parts of dependencies that have
18811 been recorded when packages where built. This option is intended
18812 only for debugging purposes, and it only affects built packages
18813 -that specify SLOT/ABI := operator dependencies using the
18814 -experimental "4\-slot\-abi" EAPI.
18815 +that specify slot/sub\-slot := operator dependencies which are
18816 +supported beginning with \fBEAPI 5\fR.
18817 .TP
18818 .BR "-j [JOBS], \-\-jobs[=JOBS]"
18819 Specifies the number of packages to build simultaneously. If this option is
18820 @@ -508,9 +538,10 @@ dependencies are recalculated for remaining packages and any with
18821 unsatisfied dependencies are automatically dropped. Also see
18822 the related \fB\-\-skipfirst\fR option.
18823 .TP
18824 -.BR \-\-load\-average=LOAD
18825 +.BR "\-\-load\-average [LOAD]"
18826 Specifies that no new builds should be started if there are other builds
18827 running and the load average is at least LOAD (a floating-point number).
18828 +With no argument, removes a previous load limit.
18829 This option is recommended for use in combination with \fB\-\-jobs\fR in
18830 order to avoid excess load. See \fBmake\fR(1) for information about
18831 analogous options that should be configured via \fBMAKEOPTS\fR in
18832 @@ -522,6 +553,11 @@ a list of packages with similar names when a package doesn't exist.
18833 The \fIEMERGE_DEFAULT_OPTS\fR variable may be used to disable this
18834 option by default.
18835 .TP
18836 +.BR "\-\-newrepo "
18837 +Tells emerge to recompile a package if it is now being pulled from a
18838 +different repository. This option also implies the
18839 +\fB\-\-selective\fR option.
18840 +.TP
18841 .BR "\-\-newuse " (\fB\-N\fR)
18842 Tells emerge to include installed packages where USE
18843 flags have changed since compilation. This option
18844 @@ -538,6 +574,10 @@ settings. If you would like to skip rebuilds for which disabled flags have
18845 been added to or removed from IUSE, see the related
18846 \fB\-\-changed\-use\fR option. If you would like to skip rebuilds for
18847 specific packages, see the \fB\-\-exclude\fR option.
18848 +
18849 +NOTE: This option ignores the state of the "test" USE flag, since that flag
18850 +has a special binding to FEATURES="test" (see \fBmake.conf\fR(5) for more
18851 +information about \fBFEATURES\fR settings).
18852 .TP
18853 .BR "\-\-noconfmem"
18854 Causes portage to disregard merge records indicating that a config file
18855 @@ -555,7 +595,8 @@ Skips the packages specified on the command\-line that have already
18856 been installed. Without this option, any package atoms or package sets
18857 you specify on the command\-line \fBwill\fR cause Portage to remerge
18858 the package, even if it is already installed. Note that Portage will
18859 -not remerge dependencies by default.
18860 +not remerge dependencies by default. This option can be used to update the
18861 +world file without rebuilding the packages.
18862 .TP
18863 .BR "\-\-nospinner"
18864 Disables the spinner for the session. The spinner is active when the
18865 @@ -599,6 +640,13 @@ exhaustively apply the entire history of package moves,
18866 regardless of whether or not any of the package moves have
18867 been previously applied.
18868 .TP
18869 +.BR \-\-pkg\-format
18870 +Specify which binary package format will be created as target.
18871 +Possible choices now are tar and rpm or their combinations.
18872 +.TP
18873 +.BR \-\-prefix=DIR
18874 +Set the \fBEPREFIX\fR environment variable.
18875 +.TP
18876 .BR "\-\-pretend " (\fB\-p\fR)
18877 Instead of actually performing the merge, simply display what *would*
18878 have been installed if \fB\-\-pretend\fR weren't used. Using \fB\-\-pretend\fR
18879 @@ -608,10 +656,11 @@ the printout:
18880 .TS
18881 lI l.
18882 N new (not yet installed)
18883 -S new SLOT installation (side-by-side versions)
18884 +S new SLOT installation (side-by-side versions)
18885 U updating (to another version)
18886 D downgrading (best version seems lower)
18887 -R replacing (remerging same version))
18888 +r reinstall (forced for some reason, possibly due to slot or sub\-slot)
18889 +R replacing (remerging same version)
18890 F fetch restricted (must be manually downloaded)
18891 f fetch restricted (already downloaded)
18892 I interactive (requires user input)
18893 @@ -626,7 +675,8 @@ output from portage's displays.
18894 .BR "\-\-quiet\-build [ y | n ]"
18895 Redirect all build output to logs alone, and do not display it on
18896 stdout. If a build failure occurs for a single package, the build
18897 -log will be automatically displayed on stdout. If there are multiple
18898 +log will be automatically displayed on stdout (unless the
18899 +\fI\-\-quiet\-fail\fR option is enabled). If there are multiple
18900 build failures (due to options like \-\-keep\-going or \-\-jobs),
18901 then the content of the log files will not be displayed, and instead
18902 the paths of the log files will be displayed together with the
18903 @@ -635,6 +685,12 @@ Note that interactive packages currently force all build output to
18904 be displayed on stdout. This issue can be temporarily avoided
18905 by specifying \fI\-\-accept\-properties=\-interactive\fR.
18906 .TP
18907 +.BR "\-\-quiet\-fail [ y | n ]"
18908 +Suppresses display of the build log on stdout when build output is hidden
18909 +due to options such as \fI\-\-jobs\fR, \fI\-\-quiet\fR, or
18910 +\fI\-\-quiet\-build\fR. Only the die message and the path of the build log
18911 +will be displayed on stdout.
18912 +.TP
18913 .BR "\-\-quiet\-repo\-display"
18914 In the package merge list display, suppress ::repository output, and
18915 instead use numbers to indicate which repositories package come from.
18916 @@ -645,16 +701,23 @@ Disable the warning message that's shown prior to
18917 to be set in the \fBmake.conf\fR(5)
18918 \fBEMERGE_DEFAULT_OPTS\fR variable.
18919 .TP
18920 -.BR "\-\-rebuild\-if\-new\-slot\-abi [ y | n ]"
18921 -Automatically rebuild or reinstall packages when SLOT/ABI :=
18922 +.BR "\-\-rebuild\-if\-new\-slot [ y | n ]"
18923 +Automatically rebuild or reinstall packages when slot/sub\-slot :=
18924 operator dependencies can be satisfied by a newer slot, so that
18925 older packages slots will become eligible for removal by the
18926 \-\-depclean action as soon as possible. This option only
18927 -affects packages that specify SLOT/ABI dependencies using the
18928 -experimental "4\-slot\-abi" EAPI. Since this option requires
18929 +affects packages that specify slot/sub\-slot := dependencies
18930 +which are supported beginning with \fBEAPI 5\fR.
18931 +Since this option requires
18932 checking of reverse dependencies, it enables \-\-complete\-graph
18933 mode whenever a new slot is installed. This option is enabled by
18934 default.
18935 +
18936 +NOTE: If you want to skip all rebuilds involving slot\-operator
18937 +dependecies (including those that involve sub\-slot changes alone),
18938 +then \fI\-\-ignore\-built\-slot\-operator\-deps=y\fR is the option
18939 +that you are looking for, since \fI\-\-rebuild\-if\-new\-slot\fR
18940 +does not affect rebuilds triggered by sub\-slot changes alone.
18941 .TP
18942 .BR "\-\-rebuild\-if\-new\-rev [ y | n ]"
18943 Rebuild packages when build\-time dependencies are built from source, if the
18944 @@ -696,16 +759,20 @@ Set the \fBROOT\fR environment variable.
18945 .TP
18946 .BR "\-\-root\-deps[=rdeps]"
18947 If no argument is given then build\-time dependencies of packages for
18948 -\fBROOT\fR are installed to
18949 -\fBROOT\fR instead of /. If the \fBrdeps\fR argument is given then discard
18950 -all build\-time dependencies of packages for \fBROOT\fR. This option is
18951 -only meaningful when used together with \fBROOT\fR and it should not
18952 -be enabled under normal circumstances. For currently supported
18953 -\fBEAPI\fR values, the build-time dependencies are specified in the
18954 -\fBDEPEND\fR variable. However, behavior may change for new
18955 -\fBEAPI\fRs when related extensions are added in the future.
18956 -.TP
18957 -.BR "\-\-select [ y | n ]"
18958 +\fBROOT\fR are installed to \fBROOT\fR instead of /.
18959 +If the \fBrdeps\fR argument is given then discard all build\-time dependencies
18960 +of packages for \fBROOT\fR.
18961 +This option is only meaningful when used together with \fBROOT\fR and it should
18962 +not be enabled under normal circumstances!
18963 +
18964 +Does not affect EAPIs that support \fBHDEPEND\fR.
18965 +Experimental \fBEAPI 5-hdepend\fR provides \fBHDEPEND\fR as a new
18966 +means to adjust installation into "\fI/\fR" and \fBROOT\fR.
18967 +If ebuilds using EAPIs which \fIdo not\fR support \fBHDEPEND\fR are built in
18968 +the same \fBemerge\fR run as those using EAPIs which \fIdo\fR support
18969 +\fBHDEPEND\fR, this option affects only the former.
18970 +.TP
18971 +.BR "\-\-select [ y | n ] (\-w short option)"
18972 Add specified packages to the world set (inverse of
18973 \fB\-\-oneshot\fR). This is useful if you want to
18974 use \fBEMERGE_DEFAULT_OPTS\fR to make
18975 @@ -719,7 +786,7 @@ Use \fB\-\-selective=n\fR if you want to forcefully disable
18976 \fB\-\-newuse\fR, \fB\-\-noreplace\fR, or \fB\-\-update\fR.
18977 .TP
18978 .BR "\-\-skipfirst"
18979 -This option is only valid when used with \fB\-\-resume\fR. It removes the
18980 +This option is only valid when used with \fB\-\-resume\fR. It removes the
18981 first package in the resume list. Dependencies are recalculated for
18982 remaining packages and any that have unsatisfied dependencies or are
18983 masked will be automatically dropped. Also see the related
18984 @@ -727,7 +794,7 @@ masked will be automatically dropped. Also see the related
18985 .TP
18986 .BR "\-\-tree " (\fB\-t\fR)
18987 Shows the dependency tree for the given target by indenting dependencies.
18988 -This is only really useful in combination with \fB\-\-emptytree\fR or
18989 +This is only really useful in combination with \fB\-\-emptytree\fR or
18990 \fB\-\-update\fR and \fB\-\-deep\fR.
18991 .TP
18992 .BR "\-\-unordered\-display"
18993 @@ -753,20 +820,21 @@ A space separated list of package names or slot atoms. Emerge will prefer
18994 matching binary packages over newer unbuilt packages.
18995 .TP
18996 .BR "\-\-usepkg [ y | n ] (\-k short option)"
18997 -Tells emerge to use binary packages (from $PKGDIR) if they are available, thus
18998 -possibly avoiding some time\-consuming compiles. This option is useful for CD
18999 -installs; you can export PKGDIR=/mnt/cdrom/packages and then use this option to
19000 -have emerge "pull" binary packages from the CD in order to satisfy dependencies.
19001 +Tells emerge to use binary packages (from $PKGDIR) if they are available, thus
19002 +possibly avoiding some time\-consuming compiles. This option is useful for CD
19003 +installs; you can export PKGDIR=/mnt/cdrom/packages and then use this option to
19004 +have emerge "pull" binary packages from the CD in order to satisfy
19005 +dependencies.
19006 .TP
19007 .BR "\-\-usepkgonly [ y | n ] (\-K short option)"
19008 -Tells emerge to only use binary packages (from $PKGDIR). All the binary
19009 -packages must be available at the time of dependency calculation or emerge
19010 -will simply abort. Portage does not use $PORTDIR when calculating dependency
19011 -information so all masking information is ignored.
19012 -.TP
19013 -.BR "\-\-verbose " (\fB\-v\fR)
19014 -Tell emerge to run in verbose mode. Currently this flag causes emerge to print
19015 -out GNU info errors, if any, and to show the USE flags that will be used for
19016 +Tells emerge to only use binary packages (from $PKGDIR). All the binary
19017 +packages must be available at the time of dependency calculation or emerge
19018 +will simply abort. Portage does not use ebuild repositories when calculating
19019 +dependency information so all masking information is ignored.
19020 +.TP
19021 +.BR "\-\-verbose [ y | n ] (\-v short option)"
19022 +Tell emerge to run in verbose mode. Currently this flag causes emerge to print
19023 +out GNU info errors, if any, and to show the USE flags that will be used for
19024 each package when pretending. The following symbols are affixed to USE flags
19025 in order to indicate their status:
19026
19027 @@ -780,11 +848,20 @@ Symbol Location Meaning
19028 * suffix transition to or from the enabled state
19029 % suffix newly added or removed
19030 () circumfix forced, masked, or removed
19031 +{} circumfix state is bound to FEATURES settings
19032 .TE
19033 .TP
19034 +.BR "\-\-verbose\-conflicts"
19035 +Make slot conflicts more verbose. Note that this may in some cases output
19036 +hundreds of packages for slot conflicts.
19037 +.TP
19038 .BR "\-\-verbose\-main\-repo\-display"
19039 In the package merge list display, print ::repository even for main repository.
19040 .TP
19041 +.BR "\-\-verbose\-slot\-rebuilds [ y | n ]"
19042 +Turns on/off the extra emerge output to list which packages are causing rebuilds.
19043 +The default is set to "y" (on).
19044 +.TP
19045 .BR "\-\-with\-bdeps < y | n >"
19046 In dependency calculations, pull in build time dependencies
19047 that are not strictly required. This defaults to \'n\' for
19048 @@ -795,6 +872,13 @@ This setting can be added to
19049 command line.
19050 .SH "ENVIRONMENT OPTIONS"
19051 .TP
19052 +\fBEPREFIX\fR = \fI[path]\fR
19053 +Use \fBEPREFIX\fR to specify the target prefix to be used for merging packages
19054 +or ebuilds. This variable can be set via the \fB\-\-prefix\fR
19055 +option or in \fBmake.conf\fR(5) (the command line overrides other settings).
19056 +.br
19057 +Defaults to the prefix where portage is currently installed.
19058 +.TP
19059 \fBROOT\fR = \fI[path]\fR
19060 Use \fBROOT\fR to specify the target root filesystem to be used for
19061 merging packages or ebuilds. This variable can be set via the \fB\-\-root\fR
19062 @@ -803,18 +887,19 @@ option or in \fBmake.conf\fR(5) (the command line overrides other settings).
19063 Defaults to /.
19064 .TP
19065 \fBPORTAGE_CONFIGROOT\fR = \fI[path]\fR
19066 -Use \fBPORTAGE_CONFIGROOT\fR to specify the location for various portage
19067 +Use \fBPORTAGE_CONFIGROOT\fR to specify the location for various portage
19068 configuration files
19069 (see \fBFILES\fR for a detailed list of configuration files). This variable
19070 can be set via the \fB\-\-config\-root\fR option.
19071 .br
19072 Defaults to /.
19073 .SH "OUTPUT"
19074 -When utilizing \fBemerge\fR with the \fB\-\-pretend\fR and \fB\-\-verbose\fR
19075 +When utilizing \fBemerge\fR with the \fB\-\-pretend\fR and \fB\-\-verbose\fR
19076 flags, the output may be a little hard to understand at first. This section
19077 explains the abbreviations.
19078 .TP
19079 -.B [blocks B ] app\-text/dos2unix ("app\-text/dos2unix" is blocking app\-text/hd2u\-0.8.0)
19080 +.B [blocks B ] app\-text/dos2unix ("app\-text/dos2unix" is blocking \
19081 +app\-text/hd2u\-0.8.0)
19082 Dos2unix is Blocking hd2u from being emerged. Blockers are defined when
19083 two packages will clobber each others files, or otherwise cause some form
19084 of breakage in your system. However, blockers usually do not need to be
19085 @@ -824,34 +909,34 @@ simultaneously emerged because they usually provide the same functionality.
19086 Qstat is New to your system, and will be emerged for the first time.
19087 .TP
19088 .B [ebuild NS ] dev-libs/glib-2.4.7
19089 -You already have a version of glib installed, but a 'new' version in
19090 +You already have a version of glib installed, but a 'new' version in
19091 a different SLOT is available.
19092 .TP
19093 .B [ebuild R ] sys\-apps/sed\-4.0.5
19094 -Sed 4.0.5 has already been emerged, but if you run the command, then
19095 +Sed 4.0.5 has already been emerged, but if you run the command, then
19096 portage will Re\-emerge the specified package (sed in this case).
19097 .TP
19098 .B [ebuild F ] media\-video/realplayer\-8\-r6
19099 -The realplayer package requires that you Fetch the sources manually.
19100 -When you attempt to emerge the package, if the sources are not found,
19101 -then portage will halt and you will be provided with instructions on how
19102 +The realplayer package requires that you Fetch the sources manually.
19103 +When you attempt to emerge the package, if the sources are not found,
19104 +then portage will halt and you will be provided with instructions on how
19105 to download the required files.
19106 .TP
19107 .B [ebuild f ] media\-video/realplayer\-8\-r6
19108 The realplayer package's files are already downloaded.
19109 .TP
19110 .B [ebuild U ] net\-fs/samba\-2.2.8_pre1 [2.2.7a]
19111 -Samba 2.2.7a has already been emerged and can be Updated to version
19112 +Samba 2.2.7a has already been emerged and can be Updated to version
19113 2.2.8_pre1.
19114 .TP
19115 .B [ebuild UD] media\-libs/libgd\-1.8.4 [2.0.11]
19116 -Libgd 2.0.11 is already emerged, but if you run the command, then
19117 +Libgd 2.0.11 is already emerged, but if you run the command, then
19118 portage will Downgrade to version 1.8.4 for you.
19119 -.br
19120 +.br
19121 This may occur if a newer version of a package has been masked because it is
19122 broken or it creates a security risk on your system and a fix has not been
19123 released yet.
19124 -.br
19125 +.br
19126 Another reason this may occur is if a package you are trying to emerge requires
19127 an older version of a package in order to emerge successfully. In this case,
19128 libgd 2.x is incompatible with libgd 1.x. This means that packages that were
19129 @@ -872,6 +957,19 @@ displayed when you use the \fB\-\-pretend\fR and \fB\-\-verbose\fR options.
19130 Using the \fB\-\-quiet\fR option will prevent all information from being
19131 displayed.
19132 .TP
19133 +.B [ebuild r U ] dev\-libs/icu\-50.1.1:0/50.1.1 [50.1\-r2:0/50.1]
19134 +Icu 50.1\-r2 has already been emerged and can be Updated to version
19135 +50.1.1. The \fBr\fR symbol indicates that a sub\-slot change (from 50.1
19136 +to 50.1.1 in this case) will force packages having slot\-operator
19137 +dependencies on it to be rebuilt (as libxml2 will be rebuilt in the next
19138 +example).
19139 +.TP
19140 +.B [ebuild rR ] dev\-libs/libxml2\-2.9.0\-r1:2 USE="icu"
19141 +Libxml2 2.9.0\-r1 has already been emerged, but if you run the command,
19142 +then portage will Re\-emerge it in order to satisfy a slot\-operator
19143 +dependency which forces it to be rebuilt when the icu sub\-slot changes
19144 +(as it changed in the previous example).
19145 +.TP
19146 .B [ebuild U *] sys\-apps/portage\-2.2.0_alpha6 [2.1.9.25]
19147 Portage 2.1.9.25 is installed, but if you run the command, then
19148 portage will upgrade to version 2.2.0_alpha6. In this case,
19149 @@ -899,14 +997,14 @@ globally via \fBACCEPT_KEYWORDS\fR.
19150
19151
19152 .SH "NOTES"
19153 -You should almost always precede any package install or update attempt with a
19154 -\fB\-\-pretend\fR install or update. This lets you see how much will be
19155 -done, and shows you any blocking packages that you will have to rectify.
19156 -This goes doubly so for the \fBsystem\fR and \fBworld\fR sets, which can
19157 -update a large number of packages if the portage tree has been particularly
19158 +You should almost always precede any package install or update attempt with a
19159 +\fB\-\-pretend\fR install or update. This lets you see how much will be
19160 +done, and shows you any blocking packages that you will have to rectify.
19161 +This goes doubly so for the \fBsystem\fR and \fBworld\fR sets, which can
19162 +update a large number of packages if the portage tree has been particularly
19163 active.
19164 .LP
19165 -You also want to typically use \fB\-\-update\fR, which ignores packages that
19166 +You also want to typically use \fB\-\-update\fR, which ignores packages that
19167 are already fully updated but updates those that are not.
19168 .LP
19169 When you install a package with uninstalled dependencies and do
19170 @@ -921,7 +1019,7 @@ avoid using some dependencies you may not want to have. \fBUSE
19171 flags specified on the command line are NOT remembered\fR. For
19172 example, \fBenv USE="\-X \-gnome" emerge mc\fR will emerge mc with
19173 those USE settings (on Bourne-compatible shells you may omit the \fBenv\fR
19174 -part). If you want those USE settings to be more
19175 +part). If you want those USE settings to be more
19176 permanent, you can put them in /etc/portage/package.use instead.
19177 .LP
19178 If \fBemerge \-\-update @system\fR or \fBemerge \-\-update @world\fR
19179 @@ -939,7 +1037,7 @@ Masks in \fBportage\fR have many uses: they allow a
19180 testing period where the packages can be used in live machines; they
19181 prevent the use of a package when it will fail; and they mask existing
19182 packages that are broken or could pose a security risk. Read below
19183 -to find out how to unmask in various cases. Also note that if you give
19184 +to find out how to unmask in various cases. Also note that if you give
19185 \fBemerge\fR an ebuild, then all forms of masking will be ignored and
19186 \fBemerge\fR will attempt to emerge the package.
19187 .TP
19188 @@ -965,15 +1063,15 @@ that are not supported by the current version of portage. Packages masked by
19189 \fBEAPI\fR can only be installed after portage has been upgraded.
19190 .TP
19191 .BR KEYWORDS
19192 -The \fBKEYWORDS\fR variable in an \fBebuild\fR file is also used for masking
19193 -a package still in testing. There are architecture\-specific keywords for
19194 -each package that let \fBportage\fR know which systems are compatible with
19195 -the package. Packages which compile on an architecture, but have not been
19196 -proven to be "stable", are masked with a tilde (\fB~\fR) in front of the
19197 -architecture name. \fBemerge\fR examines the \fBACCEPT_KEYWORDS\fR environment
19198 -variable to allow or disallow the emerging of a package masked by
19199 -\fBKEYWORDS\fR. To inform \fBemerge\fR that it should build these 'testing'
19200 -versions of packages, you should update your
19201 +The \fBKEYWORDS\fR variable in an \fBebuild\fR file is also used for masking
19202 +a package still in testing. There are architecture\-specific keywords for
19203 +each package that let \fBportage\fR know which systems are compatible with
19204 +the package. Packages which compile on an architecture, but have not been
19205 +proven to be "stable", are masked with a tilde (\fB~\fR) in front of the
19206 +architecture name. \fBemerge\fR examines the \fBACCEPT_KEYWORDS\fR environment
19207 +variable to allow or disallow the emerging of a package masked by
19208 +\fBKEYWORDS\fR. To inform \fBemerge\fR that it should build these 'testing'
19209 +versions of packages, you should update your
19210 \fI/etc/portage/package.accept_keywords\fR
19211 file to list the packages you want the
19212 \'testing\' version. See \fBportage\fR(5) for more information.
19213 @@ -994,6 +1092,15 @@ of a package masked by \fBPROPERTIES\fR. See \fBmake.conf\fR(5) for information
19214 about \fBACCEPT_PROPERTIES\fR, and see \fBportage\fR(5) for information about
19215 \fI/etc/portage/package.properties\fR. Use the \fB\-\-accept\-properties\fR
19216 option to temporarily override \fBACCEPT_PROPERTIES\fR.
19217 +.TP
19218 +.BR RESTRICT
19219 +The \fBRESTRICT\fR variable in an \fBebuild\fR file can be used to mask
19220 +packages based on RESTRICT tokens. \fBemerge\fR examines the
19221 +\fBACCEPT_RESTRICT\fR environment variable to allow or disallow the emerging
19222 +of a package masked by \fBRESTRICT\fR. See \fBmake.conf\fR(5) for information
19223 +about \fBACCEPT_RESTRICT\fR, and see \fBportage\fR(5) for information about
19224 +\fI/etc/portage/package.accept_restrict\fR. Use the \fB\-\-accept\-restrict\fR
19225 +option to temporarily override \fBACCEPT_RESTRICT\fR.
19226 .SH "CONFIGURATION FILES"
19227 Portage has a special feature called "config file protection". The purpose of
19228 this feature is to prevent new package installs from clobbering existing
19229 @@ -1002,8 +1109,8 @@ and the KDE configuration dirs; more may be added in the future.
19230 .LP
19231 When Portage installs a file into a protected directory tree like /etc, any
19232 existing files will not be overwritten. If a file of the same name already
19233 -exists, Portage will change the name of the to\-be\-installed file from 'foo' to
19234 -\'._cfg0000_foo\'. If \'._cfg0000_foo\' already exists, this name becomes
19235 +exists, Portage will change the name of the to\-be\-installed file from 'foo'
19236 +to \'._cfg0000_foo\'. If \'._cfg0000_foo\' already exists, this name becomes
19237 \'._cfg0001_foo\', etc. In this way, existing files are not overwritten,
19238 allowing the administrator to manually merge the new config files and avoid any
19239 unexpected changes.
19240 @@ -1015,21 +1122,23 @@ deleted, which is of paramount importance.
19241 .LP
19242 Protected directories are set using the \fICONFIG_PROTECT\fR variable, normally
19243 defined in make.globals. Directory exceptions to the CONFIG_PROTECTed
19244 -directories can be specified using the \fICONFIG_PROTECT_MASK\fR variable. To find
19245 -files that need to be updated in /etc, type \fBfind /etc \-iname \'._cfg????_*\'\fR.
19246 +directories can be specified using the \fICONFIG_PROTECT_MASK\fR variable.
19247 +To find files that need to be updated in /etc, type \fBfind /etc \-name
19248 +\[aq]._cfg????_*\[aq]\fR.
19249 .LP
19250 -You can disable this feature by setting \fICONFIG_PROTECT="\-*"\fR in /etc/make.conf.
19251 +You can disable this feature by setting \fICONFIG_PROTECT="\-*"\fR in
19252 +\fBmake.conf\fR(5).
19253 Then, Portage will mercilessly auto\-update your config files. Alternatively,
19254 you can leave Config File Protection on but tell Portage that it can overwrite
19255 files in certain specific /etc subdirectories. For example, if you wanted
19256 Portage to automatically update your rc scripts and your wget configuration,
19257 but didn't want any other changes made without your explicit approval, you'd
19258 -add this to /etc/make.conf:
19259 +add this to \fBmake.conf\fR(5):
19260 .LP
19261 .I CONFIG_PROTECT_MASK="/etc/wget /etc/rc.d"
19262 .LP
19263 -Tools such as dispatch\-conf, cfg\-update, and etc\-update are also available to
19264 -aid in the merging of these files. They provide interactive merging and can
19265 +Tools such as dispatch\-conf, cfg\-update, and etc\-update are also available
19266 +to aid in the merging of these files. They provide interactive merging and can
19267 auto\-merge trivial changes.
19268 .SH "REPORTING BUGS"
19269 Please report any bugs you encounter through our website:
19270 @@ -1050,9 +1159,10 @@ Marius Mauch <genone@g.o>
19271 Jason Stubbs <jstubbs@g.o>
19272 Brian Harring <ferringb@×××××.com>
19273 Zac Medico <zmedico@g.o>
19274 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
19275 .fi
19276 .SH "FILES"
19277 -Here is a common list of files you will probably be interested in. For a
19278 +Here is a common list of files you will probably be interested in. For a
19279 complete listing, please refer to the \fBportage\fR(5) man page.
19280 .TP
19281 .B /usr/share/portage/config/sets/
19282 @@ -1068,7 +1178,7 @@ This is like the world file but instead of package atoms it contains
19283 packages sets which always begin with the \fB@\fR character. Use
19284 \fB/etc/portage/sets/\fR to define user package sets.
19285 .TP
19286 -.B /etc/make.conf
19287 +.B /etc/portage/make.conf
19288 Contains variables for the build process, overriding those in
19289 \fBmake.globals\fR.
19290 .TP
19291 @@ -1079,10 +1189,10 @@ Contains variables customizing colors.
19292 Contains user package set definitions (see \fBportage\fR(5)).
19293 .TP
19294 .B /etc/dispatch\-conf.conf
19295 -Contains settings to handle automatic updates/backups of configuration
19296 +Contains settings to handle automatic updates/backups of configuration
19297 files.
19298 .TP
19299 -.B /etc/make.profile/make.defaults
19300 +.B /etc/portage/make.profile/make.defaults
19301 Contains profile\-specific variables for the build process. \fBDo not
19302 edit this file\fR.
19303 .TP
19304 @@ -1090,17 +1200,28 @@ edit this file\fR.
19305 Contains the master list of USE flags with descriptions of their
19306 functions. \fBDo not edit this file\fR.
19307 .TP
19308 -.B /etc/make.profile/virtuals
19309 +.B /etc/portage/make.profile/virtuals
19310 Contains a list of default packages used to resolve virtual dependencies.
19311 \fBDo not edit this file\fR.
19312 .TP
19313 -.B /etc/make.profile/packages
19314 +.B /etc/portage/make.profile/packages
19315 Contains a list of packages used for the base system. The \fBsystem\fR
19316 and \fBworld\fR sets consult this file. \fBDo not edit this file\fR.
19317 .TP
19318 .B /usr/share/portage/config/make.globals
19319 Contains the default variables for the build process. \fBDo not edit
19320 this file\fR.
19321 +.TP
19322 +.B /var/log/emerge.log
19323 +Contains a log of all emerge output. This file is always appended to, so if you
19324 +want to clean it, you need to do so manually.
19325 +.TP
19326 +.B /var/log/emerge-fetch.log
19327 +Contains a log of all the fetches in the previous emerge invocation.
19328 +.TP
19329 +.B
19330 +/var/log/portage/elog/summary.log
19331 +Contains the emerge summaries. Installs \fI/etc/logrotate/elog-save-summary\fR.
19332 .SH "SEE ALSO"
19333 .BR "emerge \-\-help",
19334 .BR quickpkg (1),
19335 @@ -1112,5 +1233,5 @@ this file\fR.
19336 .LP
19337 A number of helper applications reside in \fI/usr/lib/portage/bin\fR.
19338 .LP
19339 -The \fBapp\-portage/gentoolkit\fR package contains useful scripts such as
19340 +The \fBapp\-portage/gentoolkit\fR package contains useful scripts such as
19341 \fBequery\fR (a package query tool).
19342
19343 diff --git a/man/emirrordist.1 b/man/emirrordist.1
19344 new file mode 100644
19345 index 0000000..2c93830
19346 --- /dev/null
19347 +++ b/man/emirrordist.1
19348 @@ -0,0 +1,148 @@
19349 +.TH "EMIRRORDIST" "1" "Jul 2013" "Portage VERSION" "Portage"
19350 +.SH "NAME"
19351 +emirrordist \- a fetch tool for mirroring of package distfiles
19352 +.SH SYNOPSIS
19353 +.B emirrordist
19354 +[\fIoptions\fR] \fI<action>\fR
19355 +.SH ACTIONS
19356 +.TP
19357 +\fB\-h\fR, \fB\-\-help\fR
19358 +Show a help message and exit.
19359 +.TP
19360 +\fB\-\-version\fR
19361 +Display portage version and exit.
19362 +.TP
19363 +\fB\-\-mirror\fR
19364 +Mirror distfiles for the selected repository.
19365 +.SH OPTIONS
19366 +.TP
19367 +\fB\-\-dry\-run\fR
19368 +Perform a trial run with no changes made (typically combined
19369 +with \fI\-v\fR or \fI\-vv\fR).
19370 +.TP
19371 +\fB\-v\fR, \fB\-\-verbose\fR
19372 +Display extra information on stderr (multiple occurences
19373 +increase verbosity).
19374 +.TP
19375 +\fB\-\-ignore\-default\-opts\fR
19376 +Do not use the \fIEMIRRORDIST_DEFAULT_OPTS\fR environment
19377 +variable.
19378 +.TP
19379 +\fB\-\-distfiles\fR=\fIDIR\fR
19380 +Distfiles directory to use (required).
19381 +.TP
19382 +\fB\-j\fR JOBS, \fB\-\-jobs\fR=\fIJOBS\fR
19383 +Number of concurrent jobs to run.
19384 +.TP
19385 +\fB\-l\fR LOAD, \fB\-\-load\-average\fR=\fILOAD\fR
19386 +Load average limit for spawning of new concurrent jobs.
19387 +.TP
19388 +\fB\-\-tries\fR=\fITRIES\fR
19389 +Maximum number of tries per file, 0 means unlimited
19390 +(default is 10).
19391 +.TP
19392 +\fB\-\-repo\fR=\fIREPO\fR
19393 +Name of repo to operate on.
19394 +.TP
19395 +\fB\-\-config\-root\fR=\fIDIR\fR
19396 +Location of portage config files.
19397 +.TP
19398 +\fB\-\-portdir\fR=\fIDIR\fR
19399 +Override the PORTDIR variable. This option is deprecated in favor of
19400 +\-\-repositories\-configuration option.
19401 +.TP
19402 +\fB\-\-portdir\-overlay\fR=\fIPORTDIR_OVERLAY\fR
19403 +Override the PORTDIR_OVERLAY variable. This option is deprecated in favor of
19404 +\-\-repositories\-configuration option.
19405 +.TP
19406 +\fB\-\-repositories\-configuration\fR=\fIREPOSITORIES_CONFIGURATION\fR
19407 +Override configuration of repositories. The argument of this option has
19408 +the same format as repos.conf (see \fBportage\fR(5)).
19409 +.TP
19410 +\fB\-\-strict\-manifests=\fR<y|n>
19411 +Manually override "strict" FEATURES setting.
19412 +.TP
19413 +\fB\-\-failure\-log\fR=\fIFILE\fR
19414 +Log file for fetch failures, with tab\-delimited output, for
19415 +reporting purposes. Opened in append mode.
19416 +.TP
19417 +\fB\-\-success\-log\fR=\fIFILE\fR
19418 +Log file for fetch successes, with tab\-delimited output, for
19419 +reporting purposes. Opened in append mode.
19420 +.TP
19421 +\fB\-\-scheduled\-deletion\-log\fR=\fIFILE\fR
19422 +Log file for scheduled deletions, with tab\-delimited output, for
19423 +reporting purposes. Overwritten with each run.
19424 +.TP
19425 +\fB\-\-delete\fR
19426 +Enable deletion of unused distfiles.
19427 +.TP
19428 +\fB\-\-deletion\-db\fR=\fIFILE\fR
19429 +Database file used to track lifetime of files scheduled for
19430 +delayed deletion.
19431 +.TP
19432 +\fB\-\-deletion\-delay\fR=\fISECONDS\fR
19433 +Delay time for deletion of unused distfiles, measured in seconds.
19434 +.TP
19435 +\fB\-\-temp\-dir\fR=\fIDIR\fR
19436 +Temporary directory for downloads.
19437 +.TP
19438 +\fB\-\-mirror\-overrides\fR=\fIFILE\fR
19439 +File holding a list of mirror overrides.
19440 +.TP
19441 +\fB\-\-mirror\-skip\fR=\fIMIRROR_SKIP\fR
19442 +Comma delimited list of mirror targets to skip when
19443 +fetching.
19444 +.TP
19445 +\fB\-\-restrict\-mirror\-exemptions\fR=\fIRESTRICT_MIRROR_EXEMPTIONS\fR
19446 +Comma delimited list of mirror targets for which to ignore
19447 +RESTRICT="mirror" (see \fBebuild\fR(5)).
19448 +.TP
19449 +\fB\-\-verify\-existing\-digest\fR
19450 +Use digest as a verification of whether existing
19451 +distfiles are valid.
19452 +.TP
19453 +\fB\-\-distfiles\-local\fR=\fIDIR\fR
19454 +The distfiles\-local directory to use.
19455 +.TP
19456 +\fB\-\-distfiles\-db\fR=\fIFILE\fR
19457 +Database file used to track which ebuilds a distfile belongs to.
19458 +.TP
19459 +\fB\-\-recycle\-dir\fR=\fIDIR\fR
19460 +Directory for extended retention of files that are removed from
19461 +distdir with the \-\-delete option. These files may be be recycled if
19462 +they are needed again, instead of downloading them again.
19463 +.TP
19464 +\fB\-\-recycle\-db\fR=\fIFILE\fR
19465 +Database file used to track lifetime of files in recycle dir.
19466 +.TP
19467 +\fB\-\-recycle\-deletion\-delay\fR=\fISECONDS\fR
19468 +Delay time for deletion of unused files from recycle dir,
19469 +measured in seconds (defaults to the equivalent of 60 days).
19470 +.TP
19471 +\fB\-\-fetch\-log\-dir\fR=\fIDIR\fR
19472 +Directory for individual fetch logs.
19473 +.TP
19474 +\fB\-\-whitelist\-from\fR=\fIFILE\fR
19475 +Specifies a file containing a list of files to whitelist, one per line,
19476 +# prefixed lines ignored. Use this option multiple times in order to
19477 +specify multiple whitelists.
19478 +.SH "REPORTING BUGS"
19479 +Please report bugs via http://bugs.gentoo.org/
19480 +.SH "THANKS"
19481 +Special thanks to Brian Harring, author of the mirror\-dist program from
19482 +which emirrordist is derived.
19483 +.SH "AUTHORS"
19484 +.nf
19485 +Zac Medico <zmedico@g.o>
19486 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
19487 +.fi
19488 +.SH "FILES"
19489 +.TP
19490 +.B /etc/portage/make.conf
19491 +Contains variables.
19492 +.SH "SEE ALSO"
19493 +.BR ebuild (5),
19494 +.BR egencache (1),
19495 +.BR make.conf (5),
19496 +.BR portage (5)
19497
19498 diff --git a/man/env-update.1 b/man/env-update.1
19499 index 4561ab4..9ceddab 100644
19500 --- a/man/env-update.1
19501 +++ b/man/env-update.1
19502 @@ -1,26 +1,27 @@
19503 .TH "ENV-UPDATE" "1" "Aug 2008" "Portage VERSION" "Portage"
19504 -.SH NAME
19505 +.SH "NAME"
19506 env\-update \- updates environment settings automatically
19507 -.SH SYNOPSIS
19508 +.SH "SYNOPSIS"
19509 \fBenv\-update\fR \fI[options]\fR
19510 -.SH DESCRIPTION
19511 +.SH "DESCRIPTION"
19512 .B env\-update
19513 reads the files in \fI/etc/env.d\fR and automatically generates
19514 \fI/etc/profile.env\fR and \fI/etc/ld.so.conf\fR. Then \fBldconfig\fR(8)
19515 is run to update \fI/etc/ld.so.cache\fR. \fBenv-update\fR is run by
19516 \fBemerge\fR(1) automatically after each package merge. Also, if you
19517 -make changes to \fI/etc/env.d\fR, you should run \fIenv-update\fR
19518 -yourself for changes to take effect immediately. Note that this would
19519 -only affect new processes. In order for the changes to affect your
19520 -active shell, you will probably have to run \fIsource /etc/profile\fR
19521 +make changes to \fI/etc/env.d\fR, you should run \fBenv-update\fR
19522 +yourself for changes to take effect immediately. Note that this would
19523 +only affect new processes. In order for the changes to affect your
19524 +active shell, you will probably have to run \fIsource /etc/profile\fR
19525 first.
19526 -.SH OPTIONS
19527 +.SH "OPTIONS"
19528 .TP
19529 .B \-\-no\-ldconfig
19530 -Do not run ldconfig (and thus skip rebuilding the ld.so cache, etc...).
19531 +Do not run \fBldconfig\fR (and thus skip rebuilding the \fIld.so.cache\fR,
19532 +etc...).
19533 .SH "REPORTING BUGS"
19534 Please report bugs via http://bugs.gentoo.org/
19535 -.SH AUTHORS
19536 +.SH "AUTHORS"
19537 Daniel Robbins <drobbins@g.o>
19538 .SH "SEE ALSO"
19539 .BR emerge (1),
19540
19541 diff --git a/man/etc-update.1 b/man/etc-update.1
19542 index 366e850..71900da 100644
19543 --- a/man/etc-update.1
19544 +++ b/man/etc-update.1
19545 @@ -1,28 +1,24 @@
19546 .TH "ETC-UPDATE" "1" "Mar 2012" "Portage VERSION" "Portage"
19547 -.SH NAME
19548 -etc-update \- handle configuration file updates
19549 -.SH SYNOPSIS
19550 -.BR etc-update
19551 +.SH "NAME"
19552 +etc\-update \- handle configuration file updates
19553 +.SH "SYNOPSIS"
19554 +.BR etc\-update
19555 [\fIoptions\fR] [\fI--automode <mode>\fR] [\fIpaths to scan\fR]
19556 -.SH DESCRIPTION
19557 -.I etc-update
19558 -is supposed to be run after merging a new package to see if
19559 +.SH "DESCRIPTION"
19560 +\fIetc\-update\fR is supposed to be run after merging a new package to see if
19561 there are updates to the configuration files. If a new
19562 -configuration file will override an old one,
19563 -.I etc-update
19564 -will prompt the user for a decision.
19565 +configuration file will override an old one,
19566 +\fIetc\-update\fR will prompt the user for a decision.
19567 .PP
19568 -.I etc-update
19569 -will check all directories specified on the command line. If no paths
19570 -are given, then the \fICONFIG_PROTECT\fR variable will be used. All
19571 -config files found in \fICONFIG_PROTECT_MASK\fR will automatically be
19572 -updated for you by \fIetc-update\fR. See \fBmake.conf\fR(5) for more
19573 -information.
19574 +\fIetc\-update\fR will check all directories specified on the command
19575 +line. If no paths are given, then the \fICONFIG_PROTECT\fR variable
19576 +will be used. All config files found in \fICONFIG_PROTECT_MASK\fR will
19577 +automatically be updated for you by \fIetc\-update\fR.
19578 +See \fBmake.conf\fR(5) for more information.
19579 .PP
19580 -.I etc-update
19581 -respects the normal \fIPORTAGE_CONFIGROOT\fR and \fIEROOT\fR variables
19582 -for finding the aforementioned config protect variables.
19583 -.SH OPTIONS
19584 +\fIetc\-update\fR respects the normal \fIPORTAGE_CONFIGROOT\fR and
19585 +\fIEROOT\fR variables for finding the aforementioned config protect variables.
19586 +.SH "OPTIONS"
19587 .TP
19588 .BR \-d ", " \-\-debug
19589 Run with shell tracing enabled.
19590 @@ -37,11 +33,11 @@ Automerge trivial changes only and quit.
19591 Show settings and important decision info while running.
19592 .TP
19593 .BR "\-\-automode <mode>"
19594 -Select one of the automatic merge modes. Valid modes are: -3 -5 -7 -9.
19595 +Select one of the automatic merge modes. Valid modes are: \-3 \-5 \-7 \-9.
19596 See the \fI\-\-help\fR text for more details.
19597 .SH "REPORTING BUGS"
19598 Please report bugs via http://bugs.gentoo.org/
19599 -.SH AUTHORS
19600 +.SH "AUTHORS"
19601 .nf
19602 Jochem Kossen and Leo Lipelis
19603 Karl Trygve Kalleberg <karltk@g.o>
19604 @@ -49,8 +45,8 @@ Mike Frysinger <vapier@g.o>
19605 .fi
19606 .SH "FILES"
19607 .TP
19608 -.B /etc/etc-update.conf
19609 -Configuration settings for \fIetc-update\fR are stored here.
19610 +.B /etc/etc\-update.conf
19611 +Configuration settings for \fIetc\-update\fR are stored here.
19612 .SH "SEE ALSO"
19613 -.BR dispatch-conf (1),
19614 +.BR dispatch\-conf (1),
19615 .BR make.conf (5)
19616
19617 diff --git a/man/make.conf.5 b/man/make.conf.5
19618 index 876a8a3..2281014 100644
19619 --- a/man/make.conf.5
19620 +++ b/man/make.conf.5
19621 @@ -1,10 +1,20 @@
19622 -.TH "MAKE.CONF" "5" "Jul 2012" "Portage VERSION" "Portage"
19623 +.TH "MAKE.CONF" "5" "Jan 2014" "Portage VERSION" "Portage"
19624 .SH "NAME"
19625 make.conf \- custom settings for Portage
19626 .SH "SYNOPSIS"
19627 \fB/etc/make.conf\fR and \fB/etc/portage/make.conf\fR
19628 .SH "DESCRIPTION"
19629 -This file contains various variables that are used by Portage.
19630 +
19631 +This file contains various variables that are used by Portage. The file has a
19632 +newline\-delimited list of \fI<key>=<value>\fR pairs (see the default file for
19633 +examples) which are accessible from the environment of ebuilds. It supports
19634 +simple shell\-like expansion of the form \fIvar="${var}"\fR, the source
19635 +keyword and variable substitution, but not some of the more advanced BASH
19636 +features like arrays and special parameter expansions. For more details, see
19637 +the Simple lexical analysis documentation:
19638 +\fLhttp://docs.python.org/3/library/shlex.html\fR. Note that if you source
19639 +files, they need to be in the same shlex syntax for portage to read them.
19640 +.br
19641 Portage will check the currently\-defined environment variables
19642 first for any settings. If no environment settings are found,
19643 Portage then checks the make.conf files. Both /etc/make.conf and
19644 @@ -13,7 +23,7 @@ Portage then checks the make.conf files. Both /etc/make.conf and
19645 If no setting is found in the make.conf files, Portage checks
19646 make.globals. If no
19647 setting is found there, the profile's default setting is grabbed
19648 -from /etc/make.profile/make.defaults. Please note that all user
19649 +from /etc/portage/make.profile/make.defaults. Please note that all user
19650 settings should be made in the environment or in the make.conf
19651 files, which are intended to be customized by the user.
19652 .br
19653 @@ -37,7 +47,7 @@ Defaults to the value of $CHOST.
19654 \fBACCEPT_KEYWORDS\fR = \fI[space delimited list of KEYWORDS]\fR
19655 Enable testing of ebuilds that have not yet been deemed 'stable'. Users
19656 of the 'x86' architecture would set this to '~x86' while ppc users would
19657 -set this to '~ppc'. This is an incremental variable. Only define a
19658 +set this to '~ppc'. This is an incremental variable. Only define a
19659 ~arch.
19660 .br
19661 Defaults to the value of $ARCH.
19662 @@ -55,12 +65,12 @@ Defaults to the value of * -@EULA.
19663 .br
19664 .I Examples:
19665 .nf
19666 -# Accept any license
19667 -ACCEPT_LICENSE="*"
19668 -# Accept any license except the "public-domain" license
19669 -ACCEPT_LICENSE="* -public-domain"
19670 -# Only accept licenses in the FSF-APPROVED license group
19671 -ACCEPT_LICENSE="-* @FSF-APPROVED"
19672 +# Only accept licenses in the FREE license group (i.e. Free Software)
19673 +ACCEPT_LICENSE="-* @FREE"
19674 +# As before, but exclude the "Artistic" license
19675 +ACCEPT_LICENSE="-* @FREE -Artistic"
19676 +# Accept any license except those in the EULA license group (default)
19677 +ACCEPT_LICENSE="* -@EULA"
19678 .fi
19679 .TP
19680 \fBACCEPT_PROPERTIES\fR = \fI[space delimited list of properties]\fR
19681 @@ -68,7 +78,7 @@ This variable is used to mask packages based on PROPERTIES restrictions.
19682 In addition to property names, the \fI*\fR and \fI-*\fR wildcard tokens are
19683 also supported. This variable can be temporarily overridden using the
19684 \fB\-\-accept\-properties\fR option of \fBemerge\fR(1).
19685 -See \fBebuild\fR(5) for more information about PROPERTIES.
19686 +See \fBebuild\fR(5) for more information about PROPERTIES.
19687 .br
19688 Defaults to the value of *.
19689 .br
19690 @@ -80,6 +90,23 @@ ACCEPT_PROPERTIES="*"
19691 ACCEPT_PROPERTIES="* -interactive"
19692 .fi
19693 .TP
19694 +\fBACCEPT_RESTRICT\fR = \fI[space delimited list of RESTRICT tokens]\fR
19695 +This variable is used to mask packages based on RESTRICT tokens.
19696 +In addition to RESTRICT tokens, the \fI*\fR and \fI-*\fR wildcard tokens are
19697 +also supported. This variable can be temporarily overridden using the
19698 +\fB\-\-accept\-restrict\fR option of \fBemerge\fR(1).
19699 +See \fBebuild\fR(5) for more information about RESTRICT.
19700 +.br
19701 +Defaults to the value of *.
19702 +.br
19703 +.I Examples:
19704 +.nf
19705 +# Accept any restrict tokens
19706 +ACCEPT_RESTRICT="*"
19707 +# Accept any tokens except "bindist"
19708 +ACCEPT_RESTRICT="* -bindist"
19709 +.fi
19710 +.TP
19711 .B CBUILD
19712 This variable is passed by the \fIebuild scripts\fR to the \fIconfigure\fR
19713 as \fI\-\-build=${CBUILD}\fR only if it is defined. Do not set this yourself
19714 @@ -92,7 +119,7 @@ man page for more information.
19715 Defaults to /var/tmp/ccache
19716 .TP
19717 \fBCCACHE_SIZE\fR = \fI"size"\fR
19718 -This controls the space use limitations for ccache. The default is 2 gigabytes
19719 +This controls the space use limitations for ccache. The default is 2 gigabytes
19720 ('2G'). Sizes are specified with 'G', 'M', or 'K'.
19721 .TP
19722 .B CFLAGS CXXFLAGS
19723 @@ -137,13 +164,14 @@ automatically have /* appended to them.
19724 Defaults to "/lib/modules/* *.py[co]".
19725 .TP
19726 \fBCONFIG_PROTECT\fR = \fI[space delimited list of files and/or directories]\fR
19727 -All files and/or directories that are defined here will have "config file protection"
19728 -enabled for them. See the \fBCONFIGURATION FILES\fR section
19729 +All files and/or directories that are defined here will have "config file
19730 +protection" enabled for them. See the \fBCONFIGURATION FILES\fR section
19731 of \fBemerge\fR(1) for more information.
19732 .TP
19733 -\fBCONFIG_PROTECT_MASK\fR = \fI[space delimited list of files and/or directories]\fR
19734 -All files and/or directories that are defined here will have "config file protection"
19735 -disabled for them. See the \fBCONFIGURATION FILES\fR section
19736 +\fBCONFIG_PROTECT_MASK\fR = \fI[space delimited list of files and/or \
19737 +directories]\fR
19738 +All files and/or directories that are defined here will have "config file
19739 +protection" disabled for them. See the \fBCONFIGURATION FILES\fR section
19740 of \fBemerge\fR(1) for more information.
19741 .TP
19742 .B CTARGET
19743 @@ -166,6 +194,10 @@ See the \fBPORTDIR\fR documentation for more information.
19744 .br
19745 Defaults to /usr/portage/distfiles.
19746 .TP
19747 +.B DCO_SIGNED_OFF_BY
19748 +This variable may contain a name and email address which will be used by
19749 +\fBrepoman\fR(1) to add a Signed\-off\-by line to each commit message.
19750 +.TP
19751 .B DOC_SYMLINKS_DIR
19752 If this variable contains a directory then symlinks to html documentation will
19753 be installed into it.
19754 @@ -175,9 +207,9 @@ Defines whether or not to ignore audible beeps when displaying important
19755 informational messages. This variable is unset by default.
19756 .TP
19757 .B EMERGE_DEFAULT_OPTS
19758 -Options to append to the end of the \fBemerge\fR(1) command line on every invocation.
19759 -These options will not be appended to the command line if \-\-ignore\-default\-opts
19760 -is specified.
19761 +Options to append to the end of the \fBemerge\fR(1) command line on every
19762 +invocation. These options will not be appended to the command line if
19763 +\-\-ignore\-default\-opts is specified.
19764 .TP
19765 .B EMERGE_LOG_DIR
19766 Controls the location of emerge.log and emerge-fetch.log.
19767 @@ -207,7 +239,7 @@ should not be disabled by default.
19768 .RS
19769 .TP
19770 .B assume\-digests
19771 -When commiting work to cvs with \fBrepoman\fR(1), assume that all existing
19772 +When commiting work to cvs with \fBrepoman\fR(1), assume that all existing
19773 SRC_URI digests are correct. This feature also affects digest generation via
19774 \fBebuild\fR(1) and \fBemerge\fR(1) (emerge generates digests only when the
19775 \fIdigest\fR feature is enabled). Existing digests for files that do not exist
19776 @@ -234,8 +266,8 @@ Enable a special progress indicator when \fBemerge\fR(1) is calculating
19777 dependencies.
19778 .TP
19779 .B ccache
19780 -Enable portage support for the ccache package. If the ccache dir is not
19781 -present in the user's environment, then portage will default to
19782 +Enable portage support for the ccache package. If the ccache dir is not
19783 +present in the user's environment, then portage will default to
19784 ${PORTAGE_TMPDIR}/ccache.
19785
19786 \fBWarning\fR: This feature is known to cause numerous compilation failures.
19787 @@ -245,6 +277,10 @@ like "File not recognized: File truncated"), try recompiling the application
19788 with ccache disabled before reporting a bug. Unless you are doing development
19789 work, do not enable ccache.
19790 .TP
19791 +.B cgroup
19792 +Use Linux control group to control processes spawned by ebuilds. This allows
19793 +emerge to safely kill all subprocesses when ebuild phase exits.
19794 +.TP
19795 .B clean\-logs
19796 Enable automatic execution of the command specified by the
19797 PORT_LOGDIR_CLEAN variable. The default PORT_LOGDIR_CLEAN setting will
19798 @@ -268,6 +304,13 @@ space. Make sure you have built both binutils and gdb with USE=zlib
19799 support for this to work. See \fBsplitdebug\fR for general split debug
19800 information (upon which this feature depends).
19801 .TP
19802 +.B compress\-index
19803 +If set then a compressed copy of 'Packages' index file will be written.
19804 +This feature is intended for Gentoo binhosts using certain webservers
19805 +(such as, but not limited to, Nginx with gzip_static module) to avoid
19806 +redundant on\-the\-fly compression. The resulting file will be called
19807 +\[aq]Packages.gz' and its modification time will match that of 'Packages'.
19808 +.TP
19809 .B config\-protect\-if\-modified
19810 This causes the \fBCONFIG_PROTECT\fR behavior to be skipped for files
19811 that have not been modified since they were installed. This feature is
19812 @@ -315,7 +358,7 @@ Both the \fBebuild\fR(1) command and the \fInoclean\fR feature cause the
19813 \fIfail\-clean\fR feature to be automatically disabled.
19814 .TP
19815 .B getbinpkg
19816 -Force emerges to always try to fetch files from the \fIPORTAGE_BINHOST\fR. See
19817 +Force emerges to always try to fetch files from the \fIPORTAGE_BINHOST\fR. See
19818 \fBmake.conf\fR(5) for more information.
19819 .TP
19820 .B installsources
19821 @@ -347,6 +390,10 @@ would otherwise be useless with prefix configurations. This brings
19822 compatibility with the prefix branch of portage, which also supports EPREFIX
19823 for all EAPIs (for obvious reasons).
19824 .TP
19825 +.B ipc\-sandbox
19826 +Isolate the ebuild phase functions from host IPC namespace. Supported
19827 +only on Linux. Requires IPC namespace support in kernel.
19828 +.TP
19829 .B lmirror
19830 When \fImirror\fR is enabled in \fBFEATURES\fR, fetch files even
19831 when \fImirror\fR is also in the \fBebuild\fR(5) \fBRESTRICT\fR variable.
19832 @@ -354,18 +401,17 @@ Do \fBNOT\fR use \fIlmirror\fR for clients that need to override \fBRESTRICT\fR
19833 when fetching from a local mirror, but instead use a "local" mirror setting
19834 in \fI/etc/portage/mirrors\fR, as described in \fBportage\fR(5).
19835 .TP
19836 +.B merge\-sync
19837 +After a package is merged or unmerged, sync relevant files to
19838 +disk in order to avoid data\-loss in the event of a power failure.
19839 +This feature is enabled by default.
19840 +.TP
19841 .B metadata\-transfer
19842 Automatically perform a metadata transfer when `emerge \-\-sync` is run.
19843 In versions of portage >=2.1.5, this feature is disabled by
19844 default. When metadata\-transfer is disabled, metadata cache from the
19845 -${PORTDIR}/metadata/cache/ directory will be used directly (if available)
19846 -and eclasses in ${PORTDIR}/eclass/ must not be modified except by
19847 -`emerge \-\-sync` operations since the cache validation mechanism
19848 -will not recognize eclass modifications. Normally, this issue only
19849 -pertains to users of the rsync tree since the cvs tree does not contain
19850 -a metadata/cache/ directory. Users of the rsync tree who want to modify
19851 -eclasses should use \fBPORTDIR_OVERLAY\fR in order for the cache
19852 -validation mechanism to work correctly.
19853 +${repository_location}/metadata/md5\-cache/ directory will be used directly
19854 +(if available).
19855 .TP
19856 .B mirror
19857 Fetch everything in \fBSRC_URI\fR regardless of \fBUSE\fR settings,
19858 @@ -378,12 +424,18 @@ isn't a symlink to /usr/lib64. To find the bad packages, we have a
19859 portage feature called \fImultilib\-strict\fR. It will prevent emerge
19860 from putting 64bit libraries into anything other than (/usr)/lib64.
19861 .TP
19862 +.B network\-sandbox
19863 +Isolate the ebuild phase functions from host network interfaces.
19864 +Supported only on Linux. Requires network namespace support in kernel.
19865 +.TP
19866 .B news
19867 Enable GLEP 42 news support. See
19868 \fIhttp://www.gentoo.org/proj/en/glep/glep-0042.html\fR.
19869 .TP
19870 .B noauto
19871 -When utilizing \fBebuild\fR(1), only run the function requested.
19872 +When utilizing \fBebuild\fR(1), only run the function requested. Also, forces
19873 +the corresponding ebuild and eclasses to be sourced again for each phase, in
19874 +order to account for modifications.
19875 .TP
19876 .B noclean
19877 Do not delete the the source and temporary files after the merge process.
19878 @@ -413,11 +465,6 @@ Use finer\-grained locks when installing packages, allowing for greater
19879 parallelization. For additional parallelization, disable
19880 \fIebuild\-locks\fR.
19881 .TP
19882 -.B parse\-eapi\-ebuild\-head
19883 -Parse \fBEAPI\fR from the head of the ebuild as specified in PMS section
19884 -7.3.1, and treat non\-conformant ebuilds as invalid. This feature is
19885 -enabled by default, and will soon become enabled unconditionally.
19886 -.TP
19887 .B prelink\-checksums
19888 If \fBprelink\fR(8) is installed then use it to undo any prelinks on files
19889 before computing checksums for merge and unmerge. This feature is
19890 @@ -434,6 +481,9 @@ already prelinked files to be merged.
19891 .B preserve\-libs
19892 Preserve libraries when the sonames change during upgrade or downgrade.
19893 Libraries are preserved only if consumers of those libraries are detected.
19894 +Preserved libraries are automatically removed when there are no remaining
19895 +consumers. Run `emerge @preserved\-rebuild` in order to rebuild all
19896 +consumers of preserved libraries.
19897 .TP
19898 .B protect\-owned
19899 This is identical to the \fIcollision\-protect\fR feature except that files
19900 @@ -447,10 +497,10 @@ selectively disable this feature. It is recommended to leave either
19901 since otherwise file collisions between packages may result in files being
19902 overwritten or uninstalled at inappropriate times.
19903 If \fIcollision\-protect\fR is enabled then it takes precedence over
19904 -\fIprotect\-owned\fR.
19905 +\fIprotect\-owned\fR.
19906 .TP
19907 .B python\-trace
19908 -Output a verbose trace of python execution to stderr when a command's
19909 +Output a verbose trace of python execution to stderr when a command's
19910 \-\-debug option is enabled.
19911 .TP
19912 .B sandbox
19913 @@ -460,15 +510,16 @@ Enable sandbox\-ing when running \fBemerge\fR(1) and \fBebuild\fR(1).
19914 Enable SELinux sandbox\-ing. Do not toggle this \fBFEATURE\fR yourself.
19915 .TP
19916 .B sfperms
19917 -Stands for Smart Filesystem Permissions. Before merging packages to the
19918 -live filesystem, automatically search for and set permissions on setuid
19919 -and setgid files. Files that are setuid have the group and other read
19920 -bits removed while files that are setgid have the other read bit removed.
19921 +Stands for Smart Filesystem Permissions. Before merging packages to the
19922 +live filesystem, automatically search for and set permissions on setuid
19923 +and setgid files. Files that are setuid have the group and other read
19924 +bits removed while files that are setgid have the other read bit removed.
19925 See also \fIsuidctl\fR below.
19926 .TP
19927 .B sign
19928 -When commiting work to cvs with \fBrepoman\fR(1), sign the Manifest with
19929 -a GPG key. Read about the \fIPORTAGE_GPG_KEY\fR variable in \fBmake.conf\fR(5).
19930 +When commiting work to cvs with \fBrepoman\fR(1), sign the Manifest with
19931 +a GPG key. Read about the \fIPORTAGE_GPG_KEY\fR variable in
19932 +\fBmake.conf\fR(5).
19933 .TP
19934 .B skiprocheck
19935 Skip write access checks on \fBDISTDIR\fR when fetching files. This is
19936 @@ -483,20 +534,20 @@ incompatibility.
19937 Store logs created by \fBPORTAGE_ELOG_SYSTEM="save"\fR in category
19938 subdirectories of \fBPORT_LOGDIR/elog\fR, instead of using
19939 \fBPORT_LOGDIR/elog\fR directly.
19940 -.TP
19941 +.TP
19942 .B split\-log
19943 Store build logs in category subdirectories of \fBPORT_LOGDIR/build\fR,
19944 instead of using \fBPORT_LOGDIR\fR directly.
19945 .TP
19946 .B splitdebug
19947 -Prior to stripping ELF etdyn and etexec files, the debugging info is
19948 +Prior to stripping ELF etdyn and etexec files, the debugging info is
19949 stored for later use by various debuggers. This feature is disabled by
19950 \fBnostrip\fR. You should also consider setting \fBcompressdebug\fR so
19951 the files don't suck up a lot of space. For installation of source code,
19952 see \fBinstallsources\fR.
19953 .TP
19954 .B strict
19955 -Have portage react strongly to conditions that have the potential to be
19956 +Have portage react strongly to conditions that have the potential to be
19957 dangerous (like missing or incorrect digests for ebuilds).
19958 .TP
19959 .B stricter
19960 @@ -505,14 +556,17 @@ security provisions (for example textrels, executable stack). Read about
19961 the \fIQA_STRICT_*\fR variables in \fBmake.conf\fR(5).
19962 .TP
19963 .B suidctl
19964 -Before merging packages to the live filesystem, automatically strip setuid
19965 +Before merging packages to the live filesystem, automatically strip setuid
19966 bits from any file that is not listed in \fI/etc/portage/suidctl.conf\fR.
19967 .TP
19968 .B test
19969 -Run package\-specific tests during each merge to help make sure
19970 -the package compiled properly. See \fItest\fR in \fBebuild\fR(1)
19971 +Run package\-specific tests during each merge to help make sure
19972 +the package compiled properly. See \fItest\fR in \fBebuild\fR(1)
19973 and \fIsrc_test()\fR in \fBebuild\fR(5). This feature implies the "test"
19974 -\fBUSE\fR flag.
19975 +\fBUSE\fR flag if it is a member of \fBIUSE\fR, either explicitly or
19976 +implicitly (see \fBebuild\fR(5) for more information about \fBIUSE\fR).
19977 +The "test" \fBUSE\fR flag is also automatically disabled when the
19978 +"test" feature is disabled.
19979 .TP
19980 .B test\-fail\-continue
19981 If "test" is enabled \fBFEATURES\fR and the test phase of an ebuild fails,
19982 @@ -545,22 +599,28 @@ When portage is run as root, drop privileges to portage:portage during the
19983 fetching of package sources.
19984 .TP
19985 .B userpriv
19986 -Allow portage to drop root privileges and compile packages as
19987 +Allow portage to drop root privileges and compile packages as
19988 portage:portage without a sandbox (unless \fIusersandbox\fR is also used).
19989 .TP
19990 .B usersandbox
19991 -Enable the sandbox in the compile phase, when running without root privs (\fIuserpriv\fR).
19992 +Enable the sandbox in the compile phase, when running without root privs
19993 +(\fIuserpriv\fR).
19994 .TP
19995 .B usersync
19996 -Drop privileges to the owner of \fBPORTDIR\fR for \fBemerge(1) --sync\fR
19997 -operations.
19998 +Drop privileges to the owner of ${repository_location} for \fBemerge(1) --sync\fR
19999 +operations. Note that this feature assumes that all subdirectories of
20000 +${repository_location} have the same ownership as ${repository_location} itself.
20001 +It is the user's responsibility to ensure correct ownership, since otherwise
20002 +Portage would have to waste time validating ownership for each and every sync
20003 +operation.
20004 .TP
20005 .B webrsync-gpg
20006 Enable GPG verification when using \fIemerge\-webrsync\fR.
20007 .TP
20008 .B xattr
20009 Preserve extended attributes (filesystem-stored metadata) when installing
20010 -files (see \fBattr\fR(1)).
20011 +files (see \fBattr\fR(1)). The \fBPORTAGE_XATTR_EXCLUDE\fR variable may be
20012 +used to exclude specific attributes from being preserved.
20013 .RE
20014 .TP
20015 .B FETCHCOMMAND
20016 @@ -587,9 +647,9 @@ the \fIebuild scripts\fR. Merging 'mirrorselect' can help. Entries in this
20017 variable that have no protocol and simply start with a '/' path separator may
20018 be used to specify mounted filesystem mirrors.
20019 .TP
20020 -\fBhttp_proxy ftp_proxy\fR = \fI[protocol://host:port]\fR
20021 -These vars are used if the sources must be downloaded from the
20022 -internet by \fBwget\fR(1). They are only required if you use a
20023 +\fBhttp_proxy ftp_proxy RSYNC_PROXY\fR = \fI[protocol://host:port]\fR
20024 +These variables are used by network clients such as \fBwget\fR(1) and
20025 +\fBrsync\fR(1). They are only required if you use a
20026 proxy server for internet access.
20027 .TP
20028 \fBINSTALL_MASK\fR = \fI[space delimited list of file names]\fR
20029 @@ -615,7 +675,7 @@ enabled for these flags may be closed as INVALID.
20030 .TP
20031 .B MAKEOPTS
20032 Use this variable if you want to use parallel make. For example, if you
20033 -have a dual\-processor system, set this variable to "\-j2" or "\-j3" for
20034 +have a dual\-processor system, set this variable to "\-j2" or "\-j3" for
20035 enhanced build performance with many packages. Suggested settings are
20036 between \fICPUs+1\fR and \fI2*CPUs+1\fR. In order to avoid
20037 excess load, the \fB\-\-load\-average\fR option is recommended.
20038 @@ -644,9 +704,9 @@ Defaults to /usr/portage/packages.
20039 This variable defines the directory in which per\-ebuild logs are kept.
20040 Logs are created only when this is set. They are stored as
20041 ${CATEGORY}:${PF}:YYYYMMDD\-HHMMSS.log in the directory specified. If the
20042 -directory does not exist, it will be created automatically and group permissions
20043 -will be applied to it. If the directory already exists, portage will not
20044 -modify it's permissions.
20045 +directory does not exist, it will be created automatically and group
20046 +permissions will be applied to it. If the directory already exists, portage
20047 +will not modify it's permissions.
20048 .TP
20049 .B PORT_LOGDIR_CLEAN
20050 This variable should contain a command for portage to call in order
20051 @@ -658,16 +718,13 @@ unless \fBclean\-logs\fR is enabled in \fBFEATURES\fR.
20052 \fBPORTAGE_BINHOST\fR = \fI[space delimited URI list]\fR
20053 This is a list of hosts from which portage will grab prebuilt\-binary packages.
20054 Each entry in the list must specify the full address of a directory
20055 -serving tbz2's for your system. This is only used when running with
20056 -the get binary pkg options are given to \fBemerge\fR. Review \fBemerge\fR(1)
20057 -for more information. For versions of portage less that 2.1.6, this variable
20058 -should point to the 'All' directory on the host that creates the binary
20059 -packages and not to the root of the \fBPKGDIR\fR. Starting with portage 2.1.6,
20060 -it should point to a directory containing a 'Packages' index file. If
20061 -${PORTAGE_BINHOST}/Packages does not exist then portage will attempt to use
20062 -the older protocol.
20063 -.TP
20064 -\fBPORTAGE_BINHOST_HEADER_URI\fR = \fI"ftp://login:pass@××××××××××.site/pub/grp/i686/athlon\-xp/"\fR
20065 +serving tbz2's for your system (this directory must contain a 'Packages' index
20066 +file). This is only used when running with
20067 +the get binary pkg options are given to \fBemerge\fR. Review \fBemerge\fR(1)
20068 +for more information.
20069 +.TP
20070 +\fBPORTAGE_BINHOST_HEADER_URI\fR = \
20071 +\fI"ftp://login:pass@××××××××××.site/pub/grp/i686/athlon\-xp/"\fR
20072 This variable only makes sense on a system that will serve as a binhost and
20073 build packages for clients. It defines the URI header field for the package
20074 index file which is located at ${PKGDIR}/Packages. Clients that have
20075 @@ -680,6 +737,10 @@ setting as the base URI.
20076 This variable contains options to be passed to the tar command for creation
20077 of binary packages.
20078 .TP
20079 +.B PORTAGE_BINPKG_FORMAT
20080 +This variable sets default format used for binary packages. Possible values
20081 +are tar and rpm or both.
20082 +.TP
20083 \fBPORTAGE_BUNZIP2_COMMAND\fR = \fI[bunzip2 command string]\fR
20084 This variable should contain a command that is suitable for portage to call
20085 for bunzip2 extraction operations.
20086 @@ -690,6 +751,23 @@ for bzip2 compression operations. \fBPORTAGE_BZIP2_COMMAND\fR will also be
20087 called for extraction operation, with -d appended, unless the
20088 \fBPORTAGE_BUNZIP2_COMMAND\fR variable is set.
20089 .TP
20090 +\fBPORTAGE_CHECKSUM_FILTER\fR = \fI[space delimited list of hash names]\fR
20091 +This variable may be used to filter the hash functions that are used to
20092 +verify integrity of files. Hash function names are case\-insensitive, and
20093 +the \fI*\fR and \fI\-*\fR wildcard tokens are supported.
20094 +.br
20095 +Defaults to the value of *.
20096 +.br
20097 +.I Examples:
20098 +.nf
20099 +# Use all available hash functions
20100 +PORTAGE_CHECKSUM_FILTER="*"
20101 +# Use any function except whirlpool
20102 +PORTAGE_CHECKSUM_FILTER="* \-whirlpool"
20103 +# Only use sha256
20104 +PORTAGE_CHECKSUM_FILTER="\-* sha256"
20105 +.fi
20106 +.TP
20107 \fBPORTAGE_COMPRESS\fR = \fI"bzip2"\fR
20108 This variable contains the command used to compress documentation during the
20109 install phase.
20110 @@ -794,7 +872,7 @@ Additional rsync options to be used by \fBemerge \-\-sync\fR.
20111 Defaults to no value.
20112 .TP
20113 \fBPORTAGE_RSYNC_OPTS\fR = \fI[rsync options string]\fR
20114 -Default rsync options to be used by \fBemerge \-\-sync\fR.
20115 +Default rsync options to be used by \fBemerge \-\-sync\fR.
20116 .br
20117 \fBDon't change this unless you know exactly what you're doing!\fR
20118 .br
20119 @@ -810,6 +888,14 @@ addresses are exhausted.
20120 .br
20121 Defaults to -1.
20122 .TP
20123 +\fBPORTAGE_SSH_OPTS\fR = \fI[list of ssh options]\fR
20124 +Additional ssh options to be used when portage executes ssh or sftp.
20125 +This variable supports use of embedded quote characters to quote
20126 +whitespace or special shell characters within arguments (embedded
20127 +quotes must be escaped in make.conf settings).
20128 +.br
20129 +Defaults to no value.
20130 +.TP
20131 \fBPORTAGE_SYNC_STALE\fR = \fI[NUMBER]\fR
20132 Defines the number of days after the last `emerge \-\-sync` that a warning
20133 message should be produced. A value of 0 will disable warnings.
20134 @@ -820,6 +906,8 @@ Defaults to 30.
20135 Defines the location of the temporary build directories.
20136 .br
20137 Defaults to /var/tmp.
20138 +
20139 +This should not be set to point anywhere under location of any repository.
20140 .TP
20141 \fBPORTAGE_USERNAME\fR = \fI[user]\fR
20142 Defines the username to use when executing in userpriv/etc... modes (i.e.
20143 @@ -830,12 +918,18 @@ Defaults to portage.
20144 \fBPORTAGE_WORKDIR_MODE\fR = \fI"0700"\fR
20145 This variable controls permissions for \fIWORKDIR\fR (see \fBebuild\fR(5)).
20146 .TP
20147 +\fBPORTAGE_XATTR_EXCLUDE\fR = \fI[space delimited list of fnmatch patterns]\fR
20148 +This variable may be used to exclude specific attributes from being preserved
20149 +when \fBxattr\fR is in \fBFEATURES\fR.
20150 +.br
20151 +Defaults to "security.*" (security labels are special, see bug #461868).
20152 +.TP
20153 \fBPORTDIR\fR = \fI[path]\fR
20154 -Defines the location of the Portage tree. This is the repository for all
20155 -profile information as well as all ebuilds. If you change this, you must update
20156 -your /etc/make.profile symlink accordingly.
20157 +Defines the location of main repository. This variable is deprecated in favor of
20158 +settings in \fBrepos.conf\fR. If you change this, you must update
20159 +your /etc/portage/make.profile symlink accordingly.
20160 .br
20161 -Defaults to /usr/portage.
20162 +Defaults to /usr/portage.
20163 .br
20164 \fB***Warning***\fR
20165 .br
20166 @@ -843,13 +937,13 @@ Data stored inside \fBPORTDIR\fR is in peril of being overwritten or deleted by
20167 the emerge \-\-sync command. The default value of
20168 \fBPORTAGE_RSYNC_OPTS\fR will protect the default locations of
20169 \fBDISTDIR\fR and \fBPKGDIR\fR, but users are warned that any other locations
20170 -inside \fBPORTDIR\fR are not necessarily safe for data storage. You should not
20171 -put other data (such as overlays) in your \fBPORTDIR\fB. Portage will walk
20172 -directory structures and may arbitrary add invalid categories as packages.
20173 +inside \fBPORTDIR\fR are not necessarily safe for data storage. You should not
20174 +put other data (such as overlays) in your \fBPORTDIR\fB. Portage will walk
20175 +directory structures and may arbitrarily add invalid categories as packages.
20176 .TP
20177 \fBPORTDIR_OVERLAY\fR = \fI"[path] [different\-path] [etc...]"\fR
20178 -Defines the directories in which user made ebuilds may be stored and not
20179 -overwriten when `emerge \-\-sync` is run. This is a space delimited list of
20180 +Defines the locations of other repositories. This variable is deprecated in
20181 +favor of settings in \fBrepos.conf\fR. This variable is a space\-delimited list of
20182 directories.
20183 .br
20184 Defaults to no value.
20185 @@ -870,10 +964,9 @@ settings from ebuilds. See also \fBebuild\fR(5).
20186 Set this to cause portage to ignore any \fIQA_FLAGS_IGNORED\fR override
20187 settings from ebuilds. See also \fBebuild\fR(5).
20188 .TP
20189 -\fBQA_STRICT_DT_HASH = \fI"set"\fR
20190 -Set this to cause portage to ignore any \fIQA_DT_HASH\fR override
20191 -settings from ebuilds. This variable is deprecated. Use
20192 -\fIQA_STRICT_FLAGS_IGNORED\fR instead.
20193 +\fBQA_STRICT_MULTILIB_PATHS = \fI"set"\fR
20194 +Set this to cause portage to ignore any \fIQA_MULTILIB_PATHS\fR override
20195 +settings from ebuilds. See also \fBebuild\fR(5).
20196 .TP
20197 \fBQA_STRICT_PRESTRIPPED = \fI"set"\fR
20198 Set this to cause portage to ignore any \fIQA_PRESTRIPPED\fR override
20199 @@ -887,25 +980,46 @@ be necessary in order to continue a partially downloaded file located at
20200 \\${DISTDIR}/\\${FILE}.
20201 .TP
20202 \fBROOT\fR = \fI[path]\fR
20203 -Use \fBROOT\fR to specify the target root filesystem to be used for merging
20204 -packages or ebuilds. All \fBRDEPEND\fR and \fBPDEPEND\fR will be installed
20205 -into \fBROOT\fR while all \fBDEPEND\fR will be still be installed into /.
20206 -Typically, you should set this setting in the environment rather than in
20207 -\fI/etc/make.conf\fR itself. It's commonly used for creating new build
20208 -images. Make sure you use an absolute path.
20209 +Use \fBROOT\fR to specify the target root filesystem to be used for merging
20210 +packages or ebuilds.
20211 +Typically, you should set this setting in the environment rather than in
20212 +\fImake.conf\fR itself. It's commonly used for creating new build
20213 +images. Make sure you use an absolute path. Refer to the
20214 +\fBCross-compilation\fR section of \fBebuild\fR(5) for information about
20215 +how dependencies are handled for \fBROOT\fR.
20216 .br
20217 Defaults to /.
20218 .TP
20219 \fBRPMDIR\fR = \fI[path]\fR
20220 Defines the location where created RPM packages will be stored.
20221 .br
20222 -Defaults to ${PORTDIR}/rpm.
20223 +Defaults to /usr/portage/rpm.
20224 .TP
20225 \fBSYNC\fR = \fI[RSYNC]\fR
20226 Insert your preferred rsync mirror here. This rsync server
20227 is used to sync the local portage tree when `emerge \-\-sync` is run.
20228 -.br
20229 +
20230 +Note that the \fBSYNC\fR variable is now deprecated, and instead the
20231 +sync\-type and sync\-uri attributes in repos.conf should be used. See
20232 +\fBportage\fR(5) for more information.
20233 +
20234 Defaults to rsync://rsync.gentoo.org/gentoo\-portage
20235 +.RS
20236 +.TP
20237 +.B Usage:
20238 +(rsync|ssh)://[username@]hostname[:port]/(module|path)
20239 +.TP
20240 +.B Examples:
20241 +rsync://private\-mirror.com/portage\-module
20242 +.br
20243 +rsync://rsync\-user@private\-mirror.com:873/gentoo\-portage
20244 +.br
20245 +ssh://ssh\-user@192.168.0.1:22/usr/portage
20246 +.br
20247 +ssh://ssh\-user@192.168.0.1:22/\\${HOME}/portage\-storage
20248 +.TP
20249 +Note: For the ssh:// scheme, key\-based authentication might be of interest.
20250 +.RE
20251 .TP
20252 \fBUNINSTALL_IGNORE\fR = \fI[space delimited list of fnmatch patterns]\fR
20253 This variable prevents uninstallation of files that match
20254 @@ -920,15 +1034,6 @@ This variable contains options that control the build behavior of several
20255 packages. More information in \fBebuild\fR(5). Possible USE values
20256 can be found in \fI/usr/portage/profiles/use.desc\fR.
20257 .TP
20258 -\fBUSE_EXPAND\fR = \fI[space delimited list of variable names]\fR
20259 -Any variable listed here will be used to augment USE by inserting a new flag
20260 -for every value in that variable, so USE_EXPAND="FOO" and FOO="bar bla" results
20261 -in USE="foo_bar foo_bla".
20262 -.TP
20263 -\fBUSE_EXPAND_HIDDEN\fR = \fI[space delimited list of variable names]\fR
20264 -Names of \fBUSE_EXPAND\fR variables that should not be shown in the verbose merge
20265 -list output of the \fBemerge\fR(1) command.
20266 -.TP
20267 \fBUSE_ORDER\fR = \fI"env:pkg:conf:defaults:pkginternal:repo:env.d"\fR
20268 Determines the precedence of layers in the incremental stacking of the USE
20269 variable. Precedence decreases from left to right such that env overrides
20270 @@ -975,14 +1080,17 @@ Daniel Robbins <drobbins@g.o>
20271 Nicholas Jones <carpaski@g.o>
20272 Mike Frysinger <vapier@g.o>
20273 Saleem Abdulrasool <compnerd@g.o>
20274 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
20275 .fi
20276 .SH "FILES"
20277 .TP
20278 \fB/etc/make.conf\fR and \fB/etc/portage/make.conf\fR
20279 -Contains variables for the build\-process and overwrites those in make.defaults.
20280 +Contains variables for the build\-process and overwrites those in
20281 +make.defaults.
20282 .TP
20283 .B /usr/share/portage/config/make.globals
20284 -Contains the default variables for the build\-process, you should edit \fI/etc/make.conf\fR instead.
20285 +Contains the default variables for the build\-process, you should edit
20286 +\fI/etc/portage/make.conf\fR instead.
20287 .TP
20288 .B /etc/portage/color.map
20289 Contains variables customizing colors.
20290 @@ -998,6 +1106,6 @@ Contains a list of all local USE variables.
20291 .BR ebuild (1),
20292 .BR ebuild (5)
20293 .TP
20294 -The \fI/usr/sbin/ebuild.sh\fR script.
20295 +The \fI/usr/lib/portage/bin/ebuild.sh\fR script.
20296 .TP
20297 The helper apps in \fI/usr/lib/portage/bin\fR.
20298
20299 diff --git a/man/portage.5 b/man/portage.5
20300 index ad84ff1..e399f0f 100644
20301 --- a/man/portage.5
20302 +++ b/man/portage.5
20303 @@ -1,26 +1,21 @@
20304 -.TH "PORTAGE" "5" "Jun 2012" "Portage VERSION" "Portage"
20305 +.TH "PORTAGE" "5" "Feb 2014" "Portage VERSION" "Portage"
20306 .SH NAME
20307 portage \- the heart of Gentoo
20308 .SH "DESCRIPTION"
20309 -The current portage code uses many different configuration files, most of which
20310 -are unknown to users and normal developers. Here we will try to collect all
20311 -the odds and ends so as to help users more effectively utilize portage. This
20312 +The current portage code uses many different configuration files, most of which
20313 +are unknown to users and normal developers. Here we will try to collect all
20314 +the odds and ends so as to help users more effectively utilize portage. This
20315 is a reference only for files which do not already have a man page.
20316
20317 -All files in the make.profile directory may be tweaked via parent profiles
20318 -when using cascading profiles. For more info, please see
20319 +All files in the make.profile directory may be tweaked via parent profiles
20320 +when using cascading profiles. For more info, please see
20321 http://www.gentoo.org/proj/en/releng/docs/cascading-profiles.xml
20322 .IP Note:
20323 -If you are looking for information on how to emerge something, please see
20324 +If you are looking for information on how to emerge something, please see
20325 .BR emerge (1).
20326 .SH "SYNOPSIS"
20327 .TP
20328 -.BR /etc/
20329 -.nf
20330 -.BR make.conf (5)
20331 -.fi
20332 -.TP
20333 -\fB/etc/make.profile/\fR or \fB/etc/portage/make.profile/\fR
20334 +\fB/etc/portage/make.profile/\fR or \fB/etc/make.profile/\fR
20335 site\-specific overrides go in \fB/etc/portage/profile/\fR
20336 .nf
20337 deprecated
20338 @@ -36,10 +31,14 @@ package.unmask
20339 package.use
20340 package.use.force
20341 package.use.mask
20342 +package.use.stable.force
20343 +package.use.stable.mask
20344 parent
20345 profile.bashrc
20346 use.force
20347 use.mask
20348 +use.stable.mask
20349 +use.stable.force
20350 virtuals
20351 .fi
20352 .TP
20353 @@ -49,10 +48,11 @@ bashrc
20354 categories
20355 color.map
20356 license_groups
20357 -make.conf
20358 +.BR make.conf (5)
20359 mirrors
20360 modules
20361 package.accept_keywords
20362 +package.accept_restrict
20363 package.env
20364 package.keywords
20365 package.license
20366 @@ -67,7 +67,7 @@ repos.conf
20367 package-specific bashrc files
20368 .TP
20369 .BR /etc/portage/profile/
20370 -site-specific overrides of \fB/etc/make.profile/\fR
20371 +site-specific overrides of \fB/etc/portage/make.profile/\fR
20372 .TP
20373 .BR /etc/portage/sets/
20374 user\-defined package sets
20375 @@ -90,6 +90,8 @@ package.unmask
20376 package.use
20377 package.use.force
20378 package.use.mask
20379 +package.use.stable.force
20380 +package.use.stable.mask
20381 profiles.desc
20382 repo_name
20383 thirdpartymirrors
20384 @@ -97,11 +99,14 @@ use.desc
20385 use.force
20386 use.local.desc
20387 use.mask
20388 +use.stable.mask
20389 +use.stable.force
20390 .fi
20391 .TP
20392 .BR /usr/share/portage/config/
20393 .nf
20394 make.globals
20395 +repos.conf
20396 .fi
20397 .TP
20398 .BR /var/cache/edb/
20399 @@ -117,8 +122,8 @@ world
20400 world_sets
20401 .fi
20402 .SH "GLOSSARY"
20403 -In the following sections, some terminology may be foreign to you or used
20404 -with meaning specific to Portage. Please see the referenced manpages for
20405 +In the following sections, some terminology may be foreign to you or used
20406 +with meaning specific to Portage. Please see the referenced manpages for
20407 more detailed explanations.
20408 .RS
20409 .TP
20410 @@ -127,7 +132,7 @@ An atom is either of the form category/package or consists of an operator
20411 followed by category/package followed by a hyphen and a version specification.
20412 An atom might be suffixed by a slot specification.
20413 .br
20414 -More reading:
20415 +More reading:
20416 .BR ebuild (5)
20417
20418 .B Extended Atom Syntax
20419 @@ -141,8 +146,7 @@ configuration files and command line arguments for programs such as
20420 Atoms with repository constraints have a '::' separator appended to the
20421 right side, followed by a repository name. Each repository name should
20422 correspond to the value of a \fBrepo_name\fR entry from one of the
20423 -repositories that is configured via the \fBPORTDIR\fR or
20424 -\fBPORTDIR_OVERLAY\fR variables (see \fBmake.conf\fR(5)).
20425 +repositories that is configured in \fBrepos.conf\fR file.
20426
20427 .I Examples:
20428 .nf
20429 @@ -165,6 +169,8 @@ next to each other.
20430 # match anything with a version containing 9999, which can be used in
20431 # package.mask to prevent emerge --autounmask from selecting live ebuilds
20432 =*/*-*9999*
20433 +# match anything with a version containing _beta
20434 +=*/*-*_beta*
20435 # match anything from the 'sys\-apps' category
20436 sys\-apps/*
20437 # match packages named 'zlib' from any category
20438 @@ -181,38 +187,31 @@ net\-*/*
20439 .B KEYWORD
20440 Each architecture has a unique KEYWORD.
20441 .br
20442 -More reading:
20443 +More reading:
20444 .BR ebuild (5)
20445 .TP
20446 .B virtual
20447 -A DEPEND atom that is part of the "virtual" category. They are used
20448 -when different packages can satisfy a dependency and only one of them is
20449 +A DEPEND atom that is part of the "virtual" category. They are used
20450 +when different packages can satisfy a dependency and only one of them is
20451 needed.
20452 .br
20453 -More reading:
20454 +More reading:
20455 .BR ebuild (5)
20456 .RE
20457 .SH "SPECIFIC FILE DESCRIPTIONS"
20458 .TP
20459 -.BR /etc/
20460 -.RS
20461 -.TP
20462 -.BR make.conf
20463 -The global custom settings for Portage. See \fBmake.conf\fR(5).
20464 -.RE
20465 -.TP
20466 -\fB/etc/make.profile/\fR or \fB/etc/portage/make.profile/\fR
20467 -This is usually just a symlink to the correct profile in
20468 -\fB/usr/portage/profiles/\fR. Since it is part of the portage tree, it
20469 -may easily be updated/regenerated by running `emerge \-\-sync`. It defines
20470 -what a profile is (usually arch specific stuff). If you need a custom
20471 -profile, then you should make your own \fB/etc/make.profile/\fR
20472 -directory and populate it. However, if you just wish to override some
20473 +\fB/etc/portage/make.profile/\fR or \fB/etc/make.profile/\fR
20474 +This is usually just a symlink to the correct profile in
20475 +\fB/usr/portage/profiles/\fR. Since it is part of the portage tree, it
20476 +may easily be updated/regenerated by running `emerge \-\-sync`. It defines
20477 +what a profile is (usually arch specific stuff). If you need a custom
20478 +profile, then you should make your own \fBmake.profile\fR
20479 +directory and populate it. However, if you just wish to override some
20480 settings, use \fB/etc/portage/profile/\fR (it supports all of the same file
20481 -types that \fB/etc/make.profile/\fR does, except parent). Do NOT edit the
20482 -settings in \fB/etc/make.profile/\fR because they WILL be lost with the next
20483 -`emerge \-\-sync`. If both \fB/etc/make.profile/\fR and
20484 -\fB/etc/portage/make.profile/\fR exist, then \fB/etc/make.profile/\fR
20485 +types that \fBmake.profile\fR does, except parent). Do NOT edit the
20486 +settings in \fBmake.profile\fR because they WILL be lost with the next
20487 +`emerge \-\-sync`. If both \fB/etc/portage/make.profile/\fR and
20488 +\fB/etc/make.profile/\fR exist, then \fB/etc/portage/make.profile/\fR
20489 will be preferred.
20490
20491 Any file in this directory, directories of other profiles or top-level
20492 @@ -224,33 +223,39 @@ portage-2.1.6.7, and it is not included in PMS at this time.
20493
20494 .I Example:
20495 .nf
20496 -${PORTDIR}/profiles/package.mask/removals
20497 -${PORTDIR}/profiles/package.mask/testing
20498 +${repository_location}/profiles/package.mask/removals
20499 +${repository_location}/profiles/package.mask/testing
20500 .fi
20501 .RS
20502 .TP
20503 .BR deprecated
20504 -The existence of this file marks a profile as deprecated, meaning it is
20505 -not supported by Gentoo anymore. The first line must be the profile to which
20506 -users are encouraged to upgrade, optionally followed by some instructions
20507 +The existence of this file marks a profile as deprecated, meaning it is
20508 +not supported by Gentoo anymore. The first line must be the profile to which
20509 +users are encouraged to upgrade, optionally followed by some instructions
20510 explaining how they can upgrade.
20511
20512 .I Example:
20513 .nf
20514 default-linux/x86/2005.0
20515 # emerge -n '>=sys-apps/portage-2.0.51'
20516 -# rm -f /etc/make.profile
20517 -# ln -s /usr/portage/profiles/default-linux/alpha/2005.0 /etc/make.profile
20518 +# rm -f /etc/portage/make.profile
20519 +# ln -s /usr/portage/profiles/default-linux/alpha/2005.0 \
20520 +/etc/portage/make.profile
20521 .fi
20522 .TP
20523 .BR eapi
20524 The first line of this file specifies the \fBEAPI\fR to which files in the
20525 same directory conform. See \fBebuild\fR(5) for information about \fBEAPI\fR
20526 -and related features.
20527 +and related features. Beginning with \fBEAPI 5\fR, new USE
20528 +configuration files are supported: use.stable.mask,
20529 +use.stable.force, package.use.stable.mask and
20530 +package.use.stable.force. These files behave similarly to
20531 +previously supported USE configuration files, except that they
20532 +only influence packages that are merged due to a stable keyword.
20533 .TP
20534 .BR make.defaults
20535 -The profile default settings for Portage. The general format is described
20536 -in \fBmake.conf\fR(5). The \fImake.defaults\fR for your profile defines a
20537 +The profile default settings for Portage. The general format is described
20538 +in \fBmake.conf\fR(5). The \fImake.defaults\fR for your profile defines a
20539 few specific variables too:
20540
20541 .PD 0
20542 @@ -259,21 +264,66 @@ few specific variables too:
20543 .BR ARCH
20544 Architecture type (x86/ppc/hppa/etc...).
20545 .TP
20546 +\fBIUSE_IMPLICIT\fR = \fI[space delimited list of USE flags]\fR
20547 +Defines implicit \fBIUSE\fR for ebuilds using \fBEAPI 5\fR or
20548 +later. Flags that come from \fBUSE_EXPAND\fR or
20549 +\fBUSE_EXPAND_UNPREFIXED\fR variables do not belong in
20550 +\fBIUSE_IMPLICIT\fR, since \fBUSE_EXPAND_VALUES_*\fR variables
20551 +are used to define implicit \fBIUSE\fR for those flags. See
20552 +\fBebuild\fR(5) for more information about \fBIUSE\fR.
20553 +.TP
20554 .B USERLAND = \fI"GNU"\fR
20555 Support BSD/cygwin/etc...
20556 .TP
20557 +\fBUSE_EXPAND\fR = \fI[space delimited list of variable names]\fR
20558 +Any variable listed here will be used to augment USE by inserting a new flag
20559 +for every value in that variable, so USE_EXPAND="FOO" and FOO="bar bla" results
20560 +in USE="foo_bar foo_bla".
20561 +.TP
20562 +\fBUSE_EXPAND_HIDDEN\fR = \fI[space delimited list of variable names]\fR
20563 +Names of \fBUSE_EXPAND\fR variables that should not be shown in the verbose
20564 +merge list output of the \fBemerge\fR(1) command.
20565 +.TP
20566 +\fBUSE_EXPAND_IMPLICIT\fR = \fI[space delimited list of variable names]\fR
20567 +Defines \fBUSE_EXPAND\fR and \fBUSE_EXPAND_UNPREFIXED\fR
20568 +variables for which the corresponding USE flags may have
20569 +implicit \fBIUSE\fR for ebuilds using \fBEAPI 5\fR or later.
20570 +.TP
20571 +\fBUSE_EXPAND_UNPREFIXED\fR = \fI[space delimited list of variable names]\fR
20572 +Any variable listed here will be used to augment USE by
20573 +inserting a new flag for every value in that variable, so
20574 +USE_EXPAND_UNPREFIXED="FOO" and FOO="bar bla" results in
20575 +USE="bar bla".
20576 +.TP
20577 +\fBUSE_EXPAND_VALUES_ARCH\fR = \fI[space delimited list of ARCH values]\fR
20578 +Defines ARCH values used to generate implicit
20579 +\fBIUSE\fR for ebuilds using \fBEAPI 5\fR or later.
20580 +.TP
20581 +\fBUSE_EXPAND_VALUES_ELIBC\fR = \fI[space delimited list of ELIBC values]\fR
20582 +Defines ELIBC values used to generate implicit
20583 +\fBIUSE\fR for ebuilds using \fBEAPI 5\fR or later.
20584 +.TP
20585 +\fBUSE_EXPAND_VALUES_KERNEL\fR = \fI[space delimited list of KERNEL values]\fR
20586 +Defines KERNEL values used to generate implicit
20587 +\fBIUSE\fR for ebuilds using \fBEAPI 5\fR or later.
20588 +.TP
20589 +\fBUSE_EXPAND_VALUES_USERLAND\fR = \fI[space delimited list of USERLAND \
20590 +values]\fR
20591 +Defines USERLAND values used to generate implicit
20592 +\fBIUSE\fR for ebuilds using \fBEAPI 5\fR or later.
20593 +.TP
20594 .B ELIBC = \fI"glibc"\fR
20595 Support uClibc/BSD libc/etc...
20596 .TP
20597 .B PROFILE_ONLY_VARIABLES = \fI"ARCH"\fR
20598 -Prevent critical variables from being changed by the user in make.conf
20599 +Prevent critical variables from being changed by the user in make.conf
20600 or the env.
20601 .TP
20602 .BR PROFILE_ARCH
20603 -Distinguish machines classes that have the same \fBARCH\fR. All sparc
20604 +Distinguish machines classes that have the same \fBARCH\fR. All sparc
20605 machines have ARCH=sparc but set this to either 'sparc32' or 'sparc64'.
20606 .TP
20607 -.BR STAGE1_USE
20608 +.BR BOOTSTRAP_USE
20609 Special USE flags which may be needed when bootstrapping from stage1 to stage2.
20610 .RE
20611 .PD 1
20612 @@ -289,8 +339,8 @@ Provides the list of packages that compose the special \fIsystem\fR set.
20613 \- atoms without * only appear for legacy reasons
20614 .fi
20615 .I Note:
20616 -In a cascading profile setup, you can remove packages in children
20617 -profiles which were added by parent profiles by prefixing the atom with
20618 +In a cascading profile setup, you can remove packages in children
20619 +profiles which were added by parent profiles by prefixing the atom with
20620 a '\-'.
20621
20622 .I Example:
20623 @@ -305,16 +355,16 @@ a '\-'.
20624 .fi
20625 .TP
20626 .BR packages.build
20627 -A list of packages (one per line) that make up a stage1 tarball. Really only
20628 +A list of packages (one per line) that make up a stage1 tarball. Really only
20629 useful for stage builders.
20630 .TP
20631 .BR package.provided
20632 -A list of packages (one per line) that portage should assume have been
20633 +A list of packages (one per line) that portage should assume have been
20634 provided. Useful for porting to non-Linux systems. Basically, it's a
20635 list that replaces the \fBemerge \-\-inject\fR syntax.
20636
20637 -For example, if you manage your own copy of a 2.6 kernel, then you can
20638 -tell portage that 'sys-kernel/development-sources-2.6.7' is already taken
20639 +For example, if you manage your own copy of a 2.6 kernel, then you can
20640 +tell portage that 'sys-kernel/development-sources-2.6.7' is already taken
20641 care of and it should get off your back about it.
20642
20643 Portage will not attempt to update a package that is listed here unless
20644 @@ -324,7 +374,9 @@ entries may cause installed packages satisfying equivalent dependencies
20645 to be removed by \fBemerge\fR(1) \fB\-\-depclean\fR actions (see the
20646 \fBACTIONS\fR section of the \fBemerge\fR(1) man page for more information).
20647
20648 -Virtual packages (virtual/*) should not be specified in package.provided.
20649 +Virtual packages (virtual/*) should not be specified in package.provided,
20650 +since virtual packages themselves do not provide any files, and
20651 +package.provided is intended to represent packages that do provide files.
20652 Depending on the type of virtual, it may be necessary to add an entry to the
20653 virtuals file and/or add a package that satisfies a virtual to
20654 package.provided.
20655 @@ -349,12 +401,12 @@ x11-libs/qt-3.3.0
20656 x11-base/xorg-x11-6.8
20657 .fi
20658 .TP
20659 -.BR package.use.force
20660 +\fBpackage.use.force\fR and \fBpackage.use.stable.force\fR
20661 Per\-package USE flag forcing.
20662
20663 .I Note:
20664 -In a cascading profile setup, you can remove USE flags in children
20665 -profiles which were added by parent profiles by prefixing the flag with
20666 +In a cascading profile setup, you can remove USE flags in children
20667 +profiles which were added by parent profiles by prefixing the flag with
20668 a '\-'.
20669
20670 .I Format:
20671 @@ -371,12 +423,12 @@ a '\-'.
20672 x11\-libs/qt \-mysql
20673 .fi
20674 .TP
20675 -.BR package.use.mask
20676 +\fBpackage.use.mask\fR and \fBpackage.use.stable.mask\fR
20677 Per\-package USE flag masks.
20678
20679 .I Note:
20680 -In a cascading profile setup, you can remove USE flags in children
20681 -profiles which were added by parent profiles by prefixing the flag with
20682 +In a cascading profile setup, you can remove USE flags in children
20683 +profiles which were added by parent profiles by prefixing the flag with
20684 a '\-'.
20685
20686 .I Format:
20687 @@ -394,23 +446,31 @@ x11\-libs/qt \-mysql
20688 .fi
20689 .TP
20690 .BR parent
20691 -This contains a path to the parent profile. It may be either relative or
20692 -absolute. The paths will be relative to the location of the profile. Most
20693 -commonly this file contains '..' to indicate the directory above. Utilized
20694 -only in cascading profiles.
20695 +This contains paths to the parent profiles (one per line). They may be either
20696 +relative (to the location of the profile) or absolute. Most commonly this file
20697 +contains '..' to indicate the directory above. Utilized only in cascading
20698 +profiles.
20699 +
20700 +When multiple parent profiles are specified, they are inherited in order from
20701 +the first line to the last.
20702 +
20703 +If \fBlayout.conf\fR is new enough, you can also use the <repo>:<path>
20704 +syntax. The <repo> is the same string as is stored in the \fBrepo_name\fR
20705 +file (or omitted to refer to the current repo), and <path> is a subdir starting
20706 +at profiles/.
20707 .TP
20708 .BR profile.bashrc
20709 If needed, this file can be used to set up a special environment for ebuilds,
20710 different from the standard root environment. The syntax is the same as for
20711 any other bash script.
20712 .TP
20713 -.BR use.force
20714 +\fBuse.force\fR and \fBuse.stable.force\fR
20715 Some USE flags don't make sense to disable under certain conditions. Here we
20716 list forced flags.
20717
20718 .I Note:
20719 -In a cascading profile setup, you can remove USE flags in children
20720 -profiles which were added by parent profiles by prefixing the flag with
20721 +In a cascading profile setup, you can remove USE flags in children
20722 +profiles which were added by parent profiles by prefixing the flag with
20723 a '\-'.
20724
20725 .I Format:
20726 @@ -419,14 +479,14 @@ a '\-'.
20727 \- one USE flag per line
20728 .fi
20729 .TP
20730 -.BR use.mask
20731 -Some USE flags don't make sense on some archs (for example altivec on
20732 -non\-ppc or mmx on non\-x86), or haven't yet been tested. Here we list
20733 +\fBuse.mask\fR and \fBuse.stable.mask\fR
20734 +Some USE flags don't make sense on some archs (for example altivec on
20735 +non\-ppc or mmx on non\-x86), or haven't yet been tested. Here we list
20736 the masked ones.
20737
20738 .I Note:
20739 -In a cascading profile setup, you can remove USE flags in children
20740 -profiles which were added by parent profiles by prefixing the flag with
20741 +In a cascading profile setup, you can remove USE flags in children
20742 +profiles which were added by parent profiles by prefixing the flag with
20743 a '\-'.
20744
20745 .I Format:
20746 @@ -444,12 +504,13 @@ doc
20747 .fi
20748 .TP
20749 .BR virtuals
20750 -This controls what packages will provide a virtual by default. For example,
20751 -if a package needs to send e\-mail, it will need virtual/mta. In the absence
20752 -of a package that provides virtual/mta (like qmail, sendmail, postfix, etc...),
20753 -portage will look here to see what package to use. In this case, Gentoo uses
20754 -net\-mail/ssmtp as the default (as defined in the virtuals file) because it's
20755 -the package that does the very bare minimum to send e\-mail.
20756 +The virtuals file controls default preferences for virtuals that
20757 +are defined via the \fBPROVIDE\fR ebuild variable (see
20758 +\fBebuild\fR(5)). Since Gentoo now uses \fBGLEP 37\fR virtuals
20759 +instead of \fBPROVIDE\fR virtuals, the virtuals file is
20760 +irrelevant for all Gentoo ebuilds. However, it is still possible
20761 +for third\-parties to distribute ebuilds that make use of
20762 +\fBPROVIDE\fR.
20763
20764 .I Format:
20765 .nf
20766 @@ -467,10 +528,10 @@ virtual/aspell\-dict app\-dicts/aspell\-en
20767 .RE
20768 .TP
20769 .BR /etc/portage/
20770 -Any file in this directory that begins with "package." can be more than just a
20771 -flat file. If it is a directory, then all the files in that directory will be
20772 -sorted in ascending alphabetical order by file name and summed together as if
20773 -it were a single file.
20774 +Any file in this directory that begins with "package." or is repos.conf can be
20775 +more than just a flat file. If it is a directory, then all the files in that
20776 +directory will be sorted in ascending alphabetical order by file name and summed
20777 +together as if it were a single file.
20778
20779 .I Example:
20780 .nf
20781 @@ -488,9 +549,8 @@ any other bash script.
20782 Additional package-specific bashrc files can be created in /etc/portage/env.
20783 .TP
20784 .BR categories
20785 -A simple list of valid categories that may be used in /usr/portage,
20786 -PORTDIR_OVERLAY, and PKGDIR (see \fBmake.conf\fR(5)). This allows for custom
20787 -categories to be created.
20788 +A simple list of valid categories that may be used in repositories and PKGDIR
20789 +(see \fBmake.conf\fR(5)). This allows for custom categories to be created.
20790
20791 .I Format:
20792 .nf
20793 @@ -508,14 +568,13 @@ Contains variables customizing colors. See \fBcolor.map\fR(5).
20794 .TP
20795 .BR make.conf
20796 The global custom settings for Portage. See \fBmake.conf\fR(5).
20797 -If present, this file will override settings from /etc/make.conf.
20798 .TP
20799 .BR mirrors
20800 -Whenever portage encounters a mirror:// style URI it will look up the actual
20801 -hosts here. If the mirror set is not found here, it will check the global
20802 -mirrors file at /usr/portage/profiles/thirdpartymirrors. You may also set a
20803 -special mirror type called "local". This list of mirrors will be checked
20804 -before GENTOO_MIRRORS and will be used even if the package has
20805 +Whenever portage encounters a mirror:// style URI it will look up the actual
20806 +hosts here. If the mirror set is not found here, it will check the global
20807 +mirrors file at /usr/portage/profiles/thirdpartymirrors. You may also set a
20808 +special mirror type called "local". This list of mirrors will be checked
20809 +before GENTOO_MIRRORS and will be used even if the package has
20810 RESTRICT="mirror" or RESTRICT="fetch".
20811
20812 .I Format:
20813 @@ -550,15 +609,15 @@ After changing the portdbapi.auxdbmodule setting, it may be necessary to
20814 transfer or regenerate metadata cache. Users of the rsync tree need to
20815 run `emerge \-\-metadata` if they have enabled FEATURES="metadata-transfer"
20816 in \fBmake.conf\fR(5). In order to regenerate metadata for repositories
20817 -listed in \fBPORTDIR_OVERLAY\fR or a cvs tree, run `emerge \-\-regen`
20818 +not distributing pregenerated metadata cache, run `emerge \-\-regen`
20819 (see \fBemerge\fR(1)). If you use something like the sqlite module and want
20820 to keep all metadata in that format alone (useful for querying), enable
20821 FEATURES="metadata-transfer" in \fBmake.conf\fR(5).
20822 .TP
20823 \fBpackage.accept_keywords\fR and \fBpackage.keywords\fR
20824 -Per\-package ACCEPT_KEYWORDS. Useful for mixing unstable packages in with a normally
20825 -stable system or vice versa. This will allow ACCEPT_KEYWORDS to be augmented
20826 -for a single package. If both \fBpackage.accept_keywords\fR and
20827 +Per\-package ACCEPT_KEYWORDS. Useful for mixing unstable packages in with a
20828 +normally stable system or vice versa. This will allow ACCEPT_KEYWORDS to be
20829 +augmented for a single package. If both \fBpackage.accept_keywords\fR and
20830 \fBpackage.keywords\fR are present, both of them will be used, and values
20831 from \fBpackage.accept_keywords\fR will override values from
20832 \fBpackage.keywords\fR. The \fBpackage.accept_keywords\fR file is
20833 @@ -593,22 +652,37 @@ three special tokens:
20834 .fi
20835
20836 .I Additional Note:
20837 -If you encounter the \fB-*\fR KEYWORD, this indicates that the package is known
20838 -to be broken on all systems which are not otherwise listed in KEYWORDS. For
20839 +If you encounter the \fB-*\fR KEYWORD, this indicates that the package is known
20840 +to be broken on all systems which are not otherwise listed in KEYWORDS. For
20841 example, a binary only package which is built for x86 will look like:
20842
20843 games-fps/quake3-demo-1.11.ebuild:KEYWORDS="-* x86"
20844
20845 -If you wish to accept this package anyways, then use one of the other keywords in your
20846 -package.accept_keywords like this:
20847 +If you wish to accept this package anyways, then use one of the other keywords
20848 +in your package.accept_keywords like this:
20849
20850 games-fps/quake3-demo x86
20851
20852 .TP
20853 +.BR package.accept_restrict
20854 +This will allow ACCEPT_RESTRICT (see \fBmake.conf\fR(5)) to be augmented for a
20855 +single package.
20856 +
20857 +.I Format:
20858 +.nf
20859 +\- comment lines begin with # (no inline comments)
20860 +\- one DEPEND atom per line followed by additional RESTRICT tokens
20861 +.fi
20862 +.TP
20863 .BR package.env
20864 Per\-package environment variable settings. Entries refer to
20865 environment files that are placed in the \fB/etc/portage/env/\fR
20866 -directory and have the same format as \fBmake.conf\fR(5).
20867 +directory and have the same format as \fBmake.conf\fR(5). Note that these
20868 +files are interpreted much earlier than the package\-specific \fIbashrc\fR
20869 +files which are described in a later section about \fB/etc/portage/env/\fR.
20870 +Beginners should be careful to recognize the difference between these two types
20871 +of files. When environment variable settings are all that's needed,
20872 +\fBpackage.env\fR is the recommended approach to use.
20873
20874 .I Format:
20875 .nf
20876 @@ -618,13 +692,15 @@ directory and have the same format as \fBmake.conf\fR(5).
20877
20878 .I Example:
20879 .nf
20880 -# use environment variables from /etc/portage/env/glibc.conf for the glibc package
20881 +# use environment variables from /etc/portage/env/glibc.conf for the glibc \
20882 +package
20883 sys\-libs/glibc glibc.conf
20884 .fi
20885
20886 .TP
20887 .BR package.license
20888 -This will allow ACCEPT_LICENSE to be augmented for a single package.
20889 +This will allow ACCEPT_LICENSE (see \fBmake.conf\fR(5)) to be augmented for a
20890 +single package.
20891
20892 .I Format:
20893 .nf
20894 @@ -652,7 +728,8 @@ versions earlier than 1.0.4496. No problem!
20895 .fi
20896 .TP
20897 .BR package.properties
20898 -This will allow ACCEPT_PROPERTIES to be augmented for a single package.
20899 +This will allow ACCEPT_PROPERTIES (see \fBmake.conf\fR(5)) to be augmented for
20900 +a single package.
20901
20902 .I Format:
20903 .nf
20904 @@ -661,15 +738,15 @@ This will allow ACCEPT_PROPERTIES to be augmented for a single package.
20905 .fi
20906 .TP
20907 .BR package.unmask
20908 -Just like package.mask above, except here you list packages you want to
20909 -unmask. Useful for overriding the global package.mask file (see
20910 -above). Note that this does not override packages that are masked via
20911 +Just like package.mask above, except here you list packages you want to
20912 +unmask. Useful for overriding the global package.mask file (see
20913 +above). Note that this does not override packages that are masked via
20914 KEYWORDS.
20915 .TP
20916 .BR package.use
20917 -Per\-package USE flags. Useful for tracking local USE flags or for
20918 -enabling USE flags for certain packages only. Perhaps you develop GTK
20919 -and thus you want documentation for it, but you don't want
20920 +Per\-package USE flags. Useful for tracking local USE flags or for
20921 +enabling USE flags for certain packages only. Perhaps you develop GTK
20922 +and thus you want documentation for it, but you don't want
20923 documentation for QT. Easy as pie my friend!
20924
20925 .I Format:
20926 @@ -687,30 +764,153 @@ x11\-libs/qt \-mysql
20927 .fi
20928 .TP
20929 .BR repos.conf
20930 -Specifies \fIsite\-specific\fR repository configuration information. Note that
20931 -configuration settings which are specified here do not apply to tools
20932 -such as \fBrepoman\fR(1) and \fBegencache\fR(1), since operations
20933 -performed by these tools
20934 -are inherently \fBnot\fR \fIsite\-specific\fR. \fBWARNING:\fR Use of
20935 -\fBrepos.conf\fR is generally not recommended since resulting changes in
20936 -eclass inheritance (especially due to \fBeclass\-overrides\fR) may trigger
20937 -performance issues under some circumstances (see \fBbug #124041\fR). When
20938 -using \fBeclass\-overrides\fR, due to bug #276264, you must ensure that
20939 -your portage tree does not contain a metadata/cache/ directory. If that
20940 -directory exists then you should remove it entirely, and set
20941 -PORTAGE_RSYNC_EXTRA_OPTS="\-\-exclude=/metadata/cache" in
20942 -make.conf in order to exclude the metadata/cache/ directory during
20943 -\fBemerge\fR(1) \-\-sync operations.
20944 +Specifies \fIsite\-specific\fR repository configuration information.
20945 +.br
20946 +Configuration specified in \fBrepos.conf\fR can be overriden by \fBPORTAGE_REPOSITORIES\fR
20947 +environmental variable, which has the same format as \fBrepos.conf\fR.
20948 +
20949 +.I Format:
20950 +.nf
20951 +\- comments begin with # (no inline comments)
20952 +\- configuration of each repository is specified in a section starting with \
20953 +"[${repository_name}]"
20954 +\- attributes are specified in "${attribute} = ${value}" format
20955 +.fi
20956 +
20957 +.I Attributes supported in DEFAULT section:
20958 +.RS
20959 +.RS
20960 +.TP
20961 +.B main\-repo
20962 +Specifies main repository.
20963 +.TP
20964 +.B eclass\-overrides
20965 +Makes all repositories inherit eclasses from specified repositories.
20966 +.br
20967 +Setting this attribute is generally not recommended since resulting changes
20968 +in eclass inheritance may trigger performance issues due to invalidation
20969 +of metadata cache.
20970 +.br
20971 +When 'force = eclass\-overrides' attribute is not set, \fBegencache\fR(1),
20972 +\fBemirrordist\fR(1) and \fBrepoman\fR(1) ignore this attribute,
20973 +since operations performed by these tools are inherently
20974 +\fBnot\fR \fIsite\-specific\fR.
20975 +.TP
20976 +.B force
20977 +Specifies names of attributes, which should be forcefully respected by
20978 +\fBegencache\fR(1), \fBemirrordist\fR(1) and \fBrepoman\fR(1).
20979 +.br
20980 +Valid values: aliases, eclass\-overrides, masters
20981 +.RE
20982 +
20983 +.I Attributes supported in sections of repositories:
20984 +.RS
20985 +.TP
20986 +.B aliases
20987 +Specifies aliases of given repository.
20988 +.br
20989 +Setting this attribute is generally not recommended since resulting changes
20990 +in eclass inheritance may trigger performance issues due to invalidation
20991 +of metadata cache.
20992 +.br
20993 +When 'force = aliases' attribute is not set, \fBegencache\fR(1),
20994 +\fBemirrordist\fR(1) and \fBrepoman\fR(1) ignore this attribute,
20995 +since operations performed by these tools are inherently
20996 +\fBnot\fR \fIsite\-specific\fR.
20997 +.TP
20998 +.B eclass\-overrides
20999 +Makes given repository inherit eclasses from specified repositories.
21000 +.br
21001 +Setting this attribute is generally not recommended since resulting changes
21002 +in eclass inheritance may trigger performance issues due to invalidation
21003 +of metadata cache.
21004 +.br
21005 +When 'force = eclass\-overrides' attribute is not set, \fBegencache\fR(1),
21006 +\fBemirrordist\fR(1) and \fBrepoman\fR(1) ignore this attribute,
21007 +since operations performed by these tools are inherently
21008 +\fBnot\fR \fIsite\-specific\fR.
21009 +.TP
21010 +.B force
21011 +Specifies names of attributes, which should be forcefully respected by
21012 +\fBegencache\fR(1), \fBemirrordist\fR(1) and \fBrepoman\fR(1).
21013 +.br
21014 +Valid values: aliases, eclass\-overrides, masters
21015 +.TP
21016 +.B location
21017 +Specifies location of given repository.
21018 +.TP
21019 +.B masters
21020 +Specifies master repositories of given repository.
21021 +.br
21022 +Setting this attribute is generally not recommended since resulting changes
21023 +in eclass inheritance may trigger performance issues due to invalidation
21024 +of metadata cache.
21025 +.br
21026 +When 'force = masters' attribute is not set, \fBegencache\fR(1),
21027 +\fBemirrordist\fR(1) and \fBrepoman\fR(1) ignore this attribute,
21028 +since operations performed by these tools are inherently
21029 +\fBnot\fR \fIsite\-specific\fR.
21030 +.TP
21031 +.B priority
21032 +Specifies priority of given repository.
21033 +.TP
21034 +.B sync\-cvs\-repo
21035 +Specifies CVS repository.
21036 +.TP
21037 +.B sync\-type
21038 +Specifies type of synchronization performed by `emerge \-\-sync`.
21039 +.br
21040 +Valid non\-empty values: cvs, git, rsync
21041 +.br
21042 +This attribute can be set to empty value to disable synchronization of given
21043 +repository. Empty value is default.
21044 +.TP
21045 +.B sync\-uri
21046 +Specifies URI of repository used for synchronization performed by `emerge
21047 +\-\-sync`.
21048 +.br
21049 +This attribute can be set to empty value to disable synchronization of given
21050 +repository. Empty value is default.
21051 +.RS
21052 +.TP
21053 +Syntax:
21054 +cvs: [cvs://]:access_method:[username@]hostname[:port]:/path
21055 +.br
21056 +git: (git|git+ssh|http|https)://[username@]hostname[:port]/path
21057 +.br
21058 +rsync: (rsync|ssh)://[username@]hostname[:port]/(module|path)
21059 +.TP
21060 +Examples:
21061 +.RS
21062 +rsync://private\-mirror.com/portage\-module
21063 +.br
21064 +rsync://rsync\-user@private\-mirror.com:873/gentoo\-portage
21065 +.br
21066 +ssh://ssh\-user@192.168.0.1:22/usr/portage
21067 +.br
21068 +ssh://ssh\-user@192.168.0.1:22/\\${HOME}/portage\-storage
21069 +.RE
21070 +.TP
21071 +Note: For the ssh:// scheme, key\-based authentication might be of interest.
21072 +.RE
21073 +.RE
21074
21075 .I Example:
21076 .nf
21077 [DEFAULT]
21078 +# make gentoo the main repository, which makes it the default master
21079 +# repository for repositories that do not specify masters
21080 +main\-repo = gentoo
21081 # make all repositories inherit eclasses from the java\-overlay and
21082 # java\-experimental repositories, with eclasses from java\-experimental
21083 # taking precedence over those from java\-overlay
21084 eclass\-overrides = java\-overlay java\-experimental
21085
21086 [gentoo]
21087 +# repos with higher priorities are preferred when ebuilds with equal versions
21088 +# are found in multiple repos (see the `emerge \-\-info \-\-verbose` repo
21089 +# display for a listing of repos and their corresponding priorities).
21090 +priority = 9999
21091 # disable all eclass overrides for ebuilds from the gentoo repository
21092 eclass\-overrides =
21093 # when processing metadata/layout.conf from other repositories, substitute
21094 @@ -728,8 +928,16 @@ masters = gentoo kde
21095 # those master repos won't be required as dependencies (the user must
21096 # ensure that any required dependencies such as eclasses are satisfied)
21097 masters =
21098 +
21099 +# Repository 'gentoo' synchronized using CVS
21100 +[gentoo]
21101 +location = /usr/portage
21102 +sync\-type = cvs
21103 +sync\-uri = :pserver:anonymous@××××××××××××××.org:/var/cvsroot
21104 +sync\-cvs\-repo = gentoo\-x86
21105 .fi
21106 .RE
21107 +.RE
21108 .TP
21109 .BR /etc/portage/env/
21110 .RS
21111 @@ -739,20 +947,9 @@ needed, then \fB/etc/portage/package.env\fR should be used instead of the
21112 bashrc approach that is described here. Also note that special variables
21113 such as \fBFEATURES\fR and \fBINSTALL_MASK\fR will not produce the intended
21114 results if they are set in bashrc, and therefore
21115 -\fB/etc/portage/package.env\fR should be used instead.
21116 -
21117 -set_unless_changed and unset_unless_changed functions can be used to set or
21118 -unset given variables only if these variable have not been set to values
21119 -different than values set in make.conf. This functionality can be useful for
21120 -temporary overriding of these variables during emerge invocation. Variables
21121 -set without using set_unless_changed will unconditionally override variables
21122 -set during emerge invocation.
21123 -
21124 -.I Syntax:
21125 -.nf
21126 -set_unless_changed VARIABLE=VALUE
21127 -unset_unless_changed VALUE
21128 -.fi
21129 +\fB/etc/portage/package.env\fR should be used instead. Lastly, note that these
21130 +files are interpreted much later than the portage environment file
21131 +\fBpackage.env\fR.
21132
21133 Portage will source all of these bashrc files after \fB/etc/portage/bashrc\fR
21134 in the following order:
21135 @@ -771,8 +968,10 @@ in the following order:
21136 .RS
21137 For each file in this directory, a package set is created with its name
21138 corresponding to the name of the file. Each file should contain a list
21139 -of package atoms, one per line. When referencing package sets in
21140 -\fBemerge\fR(1) arguments, the set name is prefixed with \fB@\fR.
21141 +of package atoms and nested package sets, one per line. When a package
21142 +set is referenced as an \fBemerge\fR(1) argument or when it is
21143 +referenced as a nested package set (inside of another package set), the
21144 +set name is prefixed with \fB@\fR.
21145
21146 Also see \fB/var/lib/portage/world_sets\fR and the \fBemerge\fR(1)
21147 \fB\-\-list\-sets\fR option.
21148 @@ -782,49 +981,127 @@ Also see \fB/var/lib/portage/world_sets\fR and the \fBemerge\fR(1)
21149 .RS
21150 .TP
21151 .BR layout.conf
21152 -Specifies information about the repository layout. A
21153 -"masters" attribute is supported, which is used to specify names of
21154 -repositories which satisfy dependencies on eclasses and/or ebuilds. Each
21155 -repository name should correspond the value of a \fBrepo_name\fR entry
21156 -from one of the repositories that is configured via the \fBPORTDIR\fR or
21157 -\fBPORTDIR_OVERLAY\fR variables (see \fBmake.conf\fR(5)). Repositories listed
21158 -toward the right of the \fBmasters\fR list take precedence over those listed
21159 -toward the left of the list. An "aliases" attribute is also supported, which
21160 -behaves like an "aliases" attribute in \fBrepos.conf\fR. \fISite-specific\fR
21161 -overrides to \fBlayout.conf\fR settings may be specified in
21162 -\fB/etc/portage/repos.conf\fR. Settings in \fBrepos.conf\fR take
21163 -precedence over settings in \fBlayout.conf\fR, except tools such as
21164 -\fBrepoman\fR(1) and \fBegencache\fR(1) will entirely ignore
21165 +Specifies information about the repository layout.
21166 +\fISite-specific\fR overrides to \fBlayout.conf\fR settings may be specified in
21167 +\fB/etc/portage/repos.conf\fR.
21168 +Settings in \fBrepos.conf\fR take precedence over settings in
21169 +\fBlayout.conf\fR, except tools such as \fBrepoman\fR(1) and \fBegencache\fR(1)
21170 +ignore "aliases", "eclass-overrides" and "masters" attributes set in
21171 \fBrepos.conf\fR since their operations are inherently \fBnot\fR
21172 \fIsite\-specific\fR.
21173
21174 +.I Format:
21175 +.nf
21176 +\- comments begin with # (no inline comments)
21177 +\- attributes are specified in "${attribute} = ${value}" format
21178 +.fi
21179 +
21180 +.I Supported attributes.
21181 +.RS
21182 +.RS
21183 +.TP
21184 +.BR aliases
21185 +Behaves like an "aliases" attribute in \fBrepos.conf\fR.
21186 +.TP
21187 +.BR eapis\-banned
21188 +List of EAPIs which are not allowed in this repo.
21189 +.TP
21190 +.BR eapis\-deprecated
21191 +List of EAPIs which are allowed but generate warnings when used.
21192 +.TP
21193 +.BR masters
21194 +Names of repositories which satisfy dependencies on eclasses and from which
21195 +settings specified in various repository\-level files (\fBpackage.mask\fR,
21196 +\fBpackage.use.mask\fR, \fBuse.mask\fR etc.) are inherited. Each repository
21197 +name should correspond to the value of a \fBrepo_name\fR entry from one of
21198 +the repositories that is configured in \fBrepos.conf\fR file. Repositories
21199 +listed toward the right of the \fBmasters\fR list take precedence over those
21200 +listed toward the left of the list.
21201 +.TP
21202 +.BR repo\-name " = <value of profiles/repo_name>"
21203 +The name of this repository (overrides profiles/repo_name if it exists).
21204 +.TP
21205 +.BR sign\-commits " = [true|" false "]"
21206 +Boolean value whether we should sign commits in this repo.
21207 +.TP
21208 +.BR sign\-manifests " = [" true "|false]"
21209 +Boolean value whether we should sign Manifest files in this repo.
21210 +.TP
21211 +.BR thin\-manifests " = [true|" false "]"
21212 +Boolean value whether Manifest files contain only DIST entries.
21213 +.TP
21214 +.BR use\-manifests " = [" strict "|true|false]"
21215 +How Manifest files get used. Possible values are "strict" (require an entry
21216 +for every file), "true" (if an entry exists for a file, enforce it), or "false"
21217 +(don't check Manifest files at all).
21218 +.TP
21219 +.BR manifest\-hashes
21220 +List of hashes to generate/check in Manifest files. Valid hashes depend on the
21221 +current version of portage; see the portage.const.MANIFEST2_HASH_FUNCTIONS
21222 +constant for the current list.
21223 +.TP
21224 +.BR update\-changelog " = [true|" false "]"
21225 +The default setting for repoman's --echangelog option.
21226 +.TP
21227 +.BR cache\-formats " = [pms] [md5-dict]"
21228 +The cache formats supported in the metadata tree. There is the old "pms" format
21229 +and the newer/faster "md5-dict" format. Default is to detect dirs.
21230 +.TP
21231 +.BR profile\-formats " = [pms|portage-1|portage-2]"
21232 +Control functionality available to profiles in this repo such as which files
21233 +may be dirs, or the syntax available in parent files. Use "portage-2" if you're
21234 +unsure. The default is "portage-1-compat" mode which is meant to be compatible
21235 +with old profiles, but is not allowed to be opted into directly.
21236 +.RE
21237 +.RE
21238 +
21239 +.RS
21240 .I Example:
21241 .nf
21242 +# Specify the repository name (overriding profils/repo_name).
21243 +repo\-name = foo-overlay
21244 +
21245 # eclasses provided by java-overlay take precedence over identically named
21246 # eclasses that are provided by gentoo
21247 masters = gentoo java-overlay
21248 +
21249 # indicate that this repo can be used as a substitute for foo-overlay
21250 aliases = foo-overlay
21251 +
21252 +# indicate that ebuilds with the specified EAPIs are banned
21253 +eapis\-banned = 0 1
21254 +
21255 +# indicate that ebuilds with the specified EAPIs are deprecated
21256 +eapis\-deprecated = 2 3
21257 +
21258 # sign commits in this repo, which requires Git >=1.7.9, and
21259 # key configured by `git config user.signingkey key_id`
21260 sign\-commits = true
21261 -# do not sign manifests in this repo
21262 +
21263 +# do not sign Manifest files in this repo
21264 sign\-manifests = false
21265 -# thin\-manifests only contain DIST entries
21266 +
21267 +# Manifest files only contain DIST entries
21268 thin\-manifests = true
21269 +
21270 # indicate that this repo requires manifests for each package, and is
21271 # considered a failure if a manifest file is missing/incorrect
21272 use\-manifests = strict
21273 +
21274 # customize the set of hashes generated for Manifest entries
21275 manifest\-hashes = SHA256 SHA512 WHIRLPOOL
21276 +
21277 # indicate that this repo enables repoman's --echangelog=y option automatically
21278 update\-changelog = true
21279 +
21280 # indicate that this repo contains both md5-dict and pms cache formats,
21281 # which may be generated by egencache(1)
21282 cache\-formats = md5-dict pms
21283 +
21284 # indicate that this repo contains profiles that may use directories for
21285 -# package.mask, package.provided, package.use, package.use.mask,
21286 -# package.use.force, use.mask and use.force.
21287 +# package.mask, package.provided, package.use, package.use.force,
21288 +# package.use.mask, package.use.stable.force, package.use.stable.mask,
21289 +# use.force, use.mask, use.stable.force, and use.stable.mask.
21290 # profile\-formats = portage-1
21291 # indicate that paths such as 'gentoo:targets/desktop' or ':targets/desktop' in
21292 # profile parent files can be used to express paths relative to the root
21293 @@ -833,9 +1110,10 @@ cache\-formats = md5-dict pms
21294 profile\-formats = portage-2
21295 .fi
21296 .RE
21297 +.RE
21298 .TP
21299 .BR /usr/portage/profiles/
21300 -Global Gentoo settings that are controlled by the developers. To override
21301 +Global Gentoo settings that are controlled by the developers. To override
21302 these settings, you can use the files in \fB/etc/portage/\fR.
21303 .RS
21304 .TP
21305 @@ -855,8 +1133,8 @@ sparc
21306 .fi
21307 .TP
21308 .BR categories
21309 -A simple list of valid categories that may be used in /usr/portage,
21310 -PORTDIR_OVERLAY, and PKGDIR (see \fBmake.conf\fR(5)).
21311 +A simple list of valid categories that may be used in repositories and PKGDIR
21312 +(see \fBmake.conf\fR(5)).
21313
21314 .I Format:
21315 .nf
21316 @@ -925,10 +1203,10 @@ net-analyzer/netcat -*
21317 .fi
21318 .TP
21319 .BR package.mask
21320 -This contains a list of DEPEND atoms for packages that should not be installed
21321 -in any profile. Useful for adding the latest KDE betas and making sure no
21322 -one accidentally upgrades to them. Also useful for quickly masking specific
21323 -versions due to security issues. ALWAYS include a comment explaining WHY the
21324 +This contains a list of DEPEND atoms for packages that should not be installed
21325 +in any profile. Useful for adding the latest KDE betas and making sure no
21326 +one accidentally upgrades to them. Also useful for quickly masking specific
21327 +versions due to security issues. ALWAYS include a comment explaining WHY the
21328 package has been masked and WHO is doing the masking.
21329
21330 .I Format:
21331 @@ -948,7 +1226,7 @@ package has been masked and WHO is doing the masking.
21332 .fi
21333 .TP
21334 .BR profiles.desc
21335 -List all the current stable and development profiles. If a profile is listed
21336 +List all the current stable and development profiles. If a profile is listed
21337 here, then it will be checked by repoman.
21338 .I Format:
21339 .nf
21340 @@ -970,10 +1248,11 @@ x86-linux prefix/linux/x86 exp
21341 .BR repo_name
21342 The first line of the file should define a unique repository name. The name
21343 may contain any of the characters [A\-Za\-z0\-9_\-]. It must not begin with a
21344 -hyphen.
21345 +hyphen. If the repo\-name attribute is specified in layout.conf, then that
21346 +setting will take precedence.
21347 .TP
21348 .BR thirdpartymirrors
21349 -Controls the mapping of mirror:// style URIs to actual lists of
21350 +Controls the mapping of mirror:// style URIs to actual lists of
21351 mirrors. Keeps us from overloading a single server.
21352
21353 .I Format:
21354 @@ -984,15 +1263,17 @@ mirrors. Keeps us from overloading a single server.
21355
21356 .I Example:
21357 .nf
21358 -sourceforge http://aleron.dl.sourceforge.net/sourceforge http://unc.dl.sourceforge.net/sourceforge
21359 +sourceforge http://aleron.dl.sourceforge.net/sourceforge \
21360 +http://unc.dl.sourceforge.net/sourceforge
21361
21362 -gentoo http://distro.ibiblio.org/pub/linux/distributions/gentoo/distfiles/ ftp://ftp.gtlib.cc.gatech.edu/pub/gentoo/distfiles
21363 +gentoo http://distro.ibiblio.org/pub/linux/distributions/gentoo/distfiles/ \
21364 +ftp://ftp.gtlib.cc.gatech.edu/pub/gentoo/distfiles
21365
21366 kernel http://www.kernel.org/pub http://www.us.kernel.org/pub
21367 .fi
21368 .TP
21369 .BR use.desc
21370 -All global USE flags must be listed here with a description of what they do.
21371 +All global USE flags must be listed here with a description of what they do.
21372
21373 .I Format:
21374 .nf
21375 @@ -1008,7 +1289,7 @@ doc \- Adds extra documentation
21376 .fi
21377 .TP
21378 .BR use.local.desc
21379 -All local USE flags are listed here along with the package and a
21380 +All local USE flags are listed here along with the package and a
21381 description. This file is automatically generated from the
21382 metadata.xml files that are included with each individual package.
21383 Refer to GLEP 56 for further information:
21384 @@ -1030,10 +1311,15 @@ games\-emulation/xmess:net \- Adds network support
21385 .RS
21386 .TP
21387 .BR make.globals
21388 -The global default settings for Portage. This comes from the portage package
21389 -itself. Settings in \fBmake.conf\fR or \fBpackage.env\fR
21390 -override values here. The format
21391 -is described extensivly in \fBmake.conf\fR(5).
21392 +The global default settings for Portage. This comes from the portage package
21393 +itself. Settings in \fBmake.conf\fR or \fBpackage.env\fR override values set
21394 +here. The format is described extensively in \fBmake.conf\fR(5).
21395 +.TP
21396 +.BR repos.conf
21397 +The default configuration of repositories for Portage. This comes from
21398 +the portage package itself. Settings in \fB/etc/portage/repos.conf\fR
21399 +override values set here. The format is described extensively in section
21400 +for \fB/etc/portage/repos.conf\fR.
21401 .RE
21402 .TP
21403 .BR /var/cache/edb/
21404 @@ -1077,12 +1363,12 @@ directories have been modified since being installed. Files which have not
21405 been modified will automatically be unmerged.
21406 .TP
21407 .BR world
21408 -Every time you emerge a package, the package that you requested is
21409 -recorded here. Then when you run `emerge world \-up`, the list of
21410 -packages is read from this file. Note that this does not mean that the
21411 -packages that were installed as dependencies are listed here. For
21412 -example, if you run `emerge mod_wsgi` and you do not have apache
21413 -already, then "www\-apache/mod_wsgi" is recorded in the world file but
21414 +Every time you emerge a package, the package that you requested is
21415 +recorded here. Then when you run `emerge world \-up`, the list of
21416 +packages is read from this file. Note that this does not mean that the
21417 +packages that were installed as dependencies are listed here. For
21418 +example, if you run `emerge mod_wsgi` and you do not have apache
21419 +already, then "www\-apache/mod_wsgi" is recorded in the world file but
21420 "www\-servers/apache" is not. For more information, review \fBemerge\fR(1).
21421
21422 .I Format:
21423 @@ -1114,7 +1400,7 @@ Please report bugs via http://bugs.gentoo.org/
21424 Marius Mauch <genone@g.o>
21425 Mike Frysinger <vapier@g.o>
21426 Drake Wyrm <wyrm@×××××.com>
21427 -Arfrever Frehtes Taifersar Arahesis <arfrever@g.o>
21428 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
21429 .fi
21430 .SH "SEE ALSO"
21431 .BR emerge (1),
21432
21433 diff --git a/man/quickpkg.1 b/man/quickpkg.1
21434 index 7389400..a3f6165 100644
21435 --- a/man/quickpkg.1
21436 +++ b/man/quickpkg.1
21437 @@ -1,22 +1,22 @@
21438 -.TH "QUICKPKG" "1" "Mar 2010" "Portage VERSION" "Portage"
21439 +.TH "QUICKPKG" "1" "Dec 2012" "Portage VERSION" "Portage"
21440 .SH NAME
21441 quickpkg \- creates portage packages
21442 .SH SYNOPSIS
21443 -.B quickpkg <list of packages or package\-sets>
21444 +.B quickpkg [options] <list of packages or package\-sets>
21445 .SH DESCRIPTION
21446 .I quickpkg
21447 can be utilized to quickly create a package for portage by
21448 utilizing the files already on your filesystem. This package
21449 -then can be emerged on any system. To review syntax for
21450 +then can be emerged on any system. To review syntax for
21451 emerging binary packages, review \fBemerge\fR(1). The upside
21452 -of this process is that you don't have to wait for the package
21453 -to unpack, configure, compile, and install before you can have
21454 -the package ready to go. The downside is that the package will
21455 -contain the files that exist on your filesystem even if they have
21456 +of this process is that you don't have to wait for the package
21457 +to unpack, configure, compile, and install before you can have
21458 +the package ready to go. The downside is that the package will
21459 +contain the files that exist on your filesystem even if they have
21460 modified since they were first installed.
21461 .br
21462 -The packages, after being created, will be placed in \fBPKGDIR\fR.
21463 -This variable is defined in \fBmake.conf\fR(5) and defaults to
21464 +The packages, after being created, will be placed in \fBPKGDIR\fR.
21465 +This variable is defined in \fBmake.conf\fR(5) and defaults to
21466 /usr/portage/packages.
21467 .SH OPTIONS
21468 .TP
21469 @@ -28,6 +28,20 @@ The second form is a portage depend atom or a portage package
21470 set. The atom or set is of the same form that you would give
21471 \fBemerge\fR if you wanted to emerge something.
21472 See \fBebuild\fR(5) for full definition.
21473 +.TP
21474 +.BR "\-\-ignore\-default\-opts"
21475 +Causes the \fIQUICKPKG_DEFAULT_OPTS\fR environment variable to be ignored.
21476 +.TP
21477 +.BR "\-\-include\-config < y | n >"
21478 +Include all files protected by CONFIG_PROTECT (as a security precaution,
21479 +default is 'n').
21480 +.TP
21481 +.BR "\-\-include\-unmodified\-config < y | n >"
21482 +Include files protected by CONFIG_PROTECT that have not been modified
21483 +since installation (as a security precaution, default is 'n').
21484 +.TP
21485 +.BR \-\-umask=UMASK
21486 +The umask used during package creation (default is 0077).
21487 .SH "EXAMPLES"
21488 .B quickpkg
21489 /var/db/pkg/dev-python/pyogg-1.1
21490 @@ -52,7 +66,7 @@ Mike Frysinger <vapier@g.o> (revamped version)
21491 .fi
21492 .SH "FILES"
21493 .TP
21494 -.B /etc/make.conf
21495 +.B /etc/portage/make.conf
21496 The \fBPKGDIR\fR variable is defined here.
21497 .SH "SEE ALSO"
21498 .BR ebuild (5),
21499
21500 diff --git a/man/repoman.1 b/man/repoman.1
21501 index b8c0f48..a78f94e 100644
21502 --- a/man/repoman.1
21503 +++ b/man/repoman.1
21504 @@ -1,6 +1,7 @@
21505 -.TH "REPOMAN" "1" "June 2012" "Portage VERSION" "Portage"
21506 +.TH "REPOMAN" "1" "Aug 2013" "Portage VERSION" "Portage"
21507 .SH NAME
21508 -repoman \- Gentoo's program to enforce a minimal level of quality assurance in packages added to the portage tree
21509 +repoman \- Gentoo's program to enforce a minimal level of quality assurance in
21510 +packages added to the portage tree
21511 .SH SYNOPSIS
21512 \fBrepoman\fR [\fIoption\fR] [\fImode\fR]
21513 .SH DESCRIPTION
21514 @@ -9,7 +10,8 @@ repoman \- Gentoo's program to enforce a minimal level of quality assurance in p
21515 .BR repoman
21516 checks the quality of ebuild repositories.
21517
21518 -Note: \fBrepoman commit\fR only works \fIinside local\fR cvs, git, or subversion repositories.
21519 +Note: \fBrepoman commit\fR only works \fIinside local\fR cvs, git, or
21520 +subversion repositories.
21521 .SH OPTIONS
21522 .TP
21523 \fB-a\fR, \fB--ask\fR
21524 @@ -63,6 +65,10 @@ can be enabled by default for a particular repository by setting
21525 "update\-changelog = true" in metadata/layout.conf (see
21526 \fBportage(5)\fR).
21527 .TP
21528 +\fB\-\-experimental\-inherit=<y|n>\fR
21529 +Enable experimental inherit.missing checks which may misbehave when the
21530 +internal eclass database becomes outdated.
21531 +.TP
21532 \fB\-\-if\-modified=<y|n>\fR
21533 Only check packages that have uncommitted modifications
21534 .TP
21535 @@ -75,9 +81,16 @@ Do not use the \fIREPOMAN_DEFAULT_OPTS\fR environment variable.
21536 \fB\-I\fR, \fB\-\-ignore\-masked\fR
21537 Ignore masked packages (not allowed with commit mode)
21538 .TP
21539 +.BR "\-\-include\-arches " ARCHES
21540 +A space separated list of arches used to filter the selection of
21541 +profiles for dependency checks.
21542 +.TP
21543 \fB\-d\fR, \fB\-\-include\-dev\fR
21544 Include dev profiles in dependency checks.
21545 .TP
21546 +\fB\-e <y|n>\fR, \fB\-\-include\-exp\-profiles=<y|n>\fR
21547 +Include exp profiles in dependency checks.
21548 +.TP
21549 \fB\-\-unmatched\-removal\fR
21550 Enable strict checking of package.mask and package.unmask files for
21551 unmatched removal atoms.
21552 @@ -124,24 +137,6 @@ Scan directory tree for QA issues; if OK, commit via cvs
21553 .B CVS/Entries.IO_error
21554 Attempting to commit, and an IO error was encountered access the Entries file
21555 .TP
21556 -.B DEPEND.bad
21557 -User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds)
21558 -.TP
21559 -.B DEPEND.badindev
21560 -User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds) in developing arch
21561 -.TP
21562 -.B DEPEND.badmasked
21563 -Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds)
21564 -.TP
21565 -.B DEPEND.badmaskedindev
21566 -Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds) in developing arch
21567 -.TP
21568 -.B DEPEND.badtilde
21569 -DEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)
21570 -.TP
21571 -.B DEPEND.syntax
21572 -Syntax error in DEPEND (usually an extra/missing space/parenthesis)
21573 -.TP
21574 .B DESCRIPTION.missing
21575 Ebuilds that have a missing or empty DESCRIPTION variable
21576 .TP
21577 @@ -165,19 +160,19 @@ Ebuilds that have a missing or empty HOMEPAGE variable
21578 Virtuals that have a non-empty HOMEPAGE variable
21579 .TP
21580 .B IUSE.invalid
21581 -This ebuild has a variable in IUSE that is not in the use.desc or its metadata.xml file
21582 +This ebuild has a variable in IUSE that is not in the use.desc or its
21583 +metadata.xml file
21584 .TP
21585 .B IUSE.missing
21586 -This ebuild has a USE conditional which references a flag that is not listed in IUSE
21587 -.TP
21588 -.B IUSE.undefined
21589 -This ebuild does not define IUSE (style guideline says to define IUSE even when empty)
21590 +This ebuild has a USE conditional which references a flag that is not listed in
21591 +IUSE
21592 .TP
21593 .B KEYWORDS.dropped
21594 Ebuilds that appear to have dropped KEYWORDS for some arch
21595 .TP
21596 .B KEYWORDS.invalid
21597 -This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found
21598 +This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for
21599 +which no valid profile was found
21600 .TP
21601 .B KEYWORDS.missing
21602 Ebuilds that have a missing or empty KEYWORDS variable
21603 @@ -188,6 +183,9 @@ Ebuilds that have been added directly with stable KEYWORDS
21604 .B KEYWORDS.stupid
21605 Ebuilds that use KEYWORDS=-* instead of package.mask
21606 .TP
21607 +.B LICENSE.deprecated
21608 +This ebuild is listing a deprecated license.
21609 +.TP
21610 .B LICENSE.invalid
21611 This ebuild is listing a license that doesnt exist in portages license/ dir.
21612 .TP
21613 @@ -201,51 +199,19 @@ Syntax error in LICENSE (usually an extra/missing space/parenthesis)
21614 Virtuals that have a non-empty LICENSE variable
21615 .TP
21616 .B LIVEVCS.stable
21617 -Ebuild is a live ebuild (cvs, git, darcs, svn, etc) checkout with stable keywords.
21618 +Ebuild is a live ebuild (cvs, git, darcs, svn, etc) checkout with stable
21619 +keywords.
21620 .TP
21621 .B LIVEVCS.unmasked
21622 Ebuild is a live ebuild (cvs, git, darcs, svn, etc) checkout but has keywords
21623 and is not masked in the global package.mask.
21624 .TP
21625 -.B PDEPEND.bad
21626 -User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds)
21627 -.TP
21628 -.B PDEPEND.badindev
21629 -User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds) in developing arch
21630 -.TP
21631 -.B PDEPEND.badmasked
21632 -Masked ebuilds with PDEPEND settings (matched against *all* ebuilds)
21633 -.TP
21634 -.B PDEPEND.badmaskedindev
21635 -Masked ebuilds with PDEPEND settings (matched against *all* ebuilds) in developing arch
21636 -.TP
21637 -.B PDEPEND.badtilde
21638 -PDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)
21639 -.TP
21640 .B PDEPEND.suspect
21641 PDEPEND contains a package that usually only belongs in DEPEND
21642 .TP
21643 -.B PDEPEND.syntax
21644 -Syntax error in PDEPEND (usually an extra/missing space/parenthesis)
21645 -.TP
21646 .B PROVIDE.syntax
21647 Syntax error in PROVIDE (usually an extra/missing space/parenthesis)
21648 .TP
21649 -.B RDEPEND.bad
21650 -User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds)
21651 -.TP
21652 -.B RDEPEND.badindev
21653 -User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds) in developing arch
21654 -.TP
21655 -.B RDEPEND.badmasked
21656 -Masked ebuilds with RDEPEND settings (matched against *all* ebuilds)
21657 -.TP
21658 -.B RDEPEND.badmaskedindev
21659 -Masked ebuilds with RDEPEND settings (matched against *all* ebuilds) in developing arch
21660 -.TP
21661 -.B RDEPEND.badtilde
21662 -RDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)
21663 -.TP
21664 .B RDEPEND.implicit
21665 RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND
21666 assignment (prior to EAPI 4)
21667 @@ -253,9 +219,6 @@ assignment (prior to EAPI 4)
21668 .B RDEPEND.suspect
21669 RDEPEND contains a package that usually only belongs in DEPEND
21670 .TP
21671 -.B RDEPEND.syntax
21672 -Syntax error in RDEPEND (usually an extra/missing space/parenthesis)
21673 -.TP
21674 .B PROPERTIES.syntax
21675 Syntax error in PROPERTIES (usually an extra/missing space/parenthesis)
21676 .TP
21677 @@ -276,6 +239,28 @@ Missing ChangeLog files
21678 .B changelog.notadded
21679 ChangeLogs that exist but have not been added to cvs
21680 .TP
21681 +.B dependency.bad
21682 +User-visible ebuilds with unsatisfied dependencies (matched against *visible*
21683 +ebuilds)
21684 +.TP
21685 +.B dependency.badindev
21686 +User-visible ebuilds with unsatisfied dependencies (matched against *visible*
21687 +ebuilds) in developing arch
21688 +.TP
21689 +.B dependency.badmasked
21690 +Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds)
21691 +.TP
21692 +.B dependency.badmaskedindev
21693 +Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds) in
21694 +developing arch
21695 +.TP
21696 +.B dependency.badtilde
21697 +Uses the ~ dep operator with a non-zero revision part, which is useless (the
21698 +revision is ignored)
21699 +.TP
21700 +.B dependency.syntax
21701 +Syntax error in dependency string (usually an extra/missing space/parenthesis)
21702 +.TP
21703 .B dependency.unknown
21704 Ebuild has a dependency that refers to an unknown package (which may be
21705 valid if it is a blocker for a renamed/removed package, or is an
21706 @@ -294,10 +279,12 @@ Some files listed in the Manifest aren't referenced in SRC_URI
21707 This ebuild has a malformed header
21708 .TP
21709 .B ebuild.invalidname
21710 -Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1 versioning extensions)
21711 +Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1
21712 +versioning extensions)
21713 .TP
21714 .B ebuild.majorsyn
21715 -This ebuild has a major syntax error that may cause the ebuild to fail partially or fully
21716 +This ebuild has a major syntax error that may cause the ebuild to fail
21717 +partially or fully
21718 .TP
21719 .B ebuild.minorsyn
21720 This ebuild has a minor syntax error that contravenes gentoo coding style
21721 @@ -318,14 +305,15 @@ A simple sourcing of the ebuild produces output; this breaks ebuild policy.
21722 PATCHES variable should be a bash array to ensure white space safety
21723 .TP
21724 .B ebuild.syntax
21725 -Error generating cache entry for ebuild; typically caused by ebuild syntax error
21726 -or digest verification failure.
21727 +Error generating cache entry for ebuild; typically caused by ebuild syntax
21728 +error or digest verification failure.
21729 .TP
21730 .B file.UTF8
21731 File is not UTF8 compliant
21732 .TP
21733 .B file.executable
21734 -Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the executable bit
21735 +Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the
21736 +executable bit
21737 .TP
21738 .B file.name
21739 File/dir name must be composed of only the following chars: a-zA-Z0-9._-+:
21740 @@ -358,8 +346,19 @@ Missing metadata.xml files
21741 .B metadata.warning
21742 Warnings in metadata.xml files
21743 .TP
21744 +.B repo.eapi.banned
21745 +The ebuild uses an EAPI which is banned by the repository's
21746 +metadata/layout.conf settings.
21747 +.TP
21748 +.B repo.eapi.deprecated
21749 +The ebuild uses an EAPI which is deprecated by the repository's
21750 +metadata/layout.conf settings.
21751 +.TP
21752 +.B IUSE.rubydeprecated
21753 +The ebuild has set a ruby interpreter in USE_RUBY, that is not available as a ruby target anymore
21754 +.TP
21755 .B portage.internal
21756 -The ebuild uses an internal Portage function
21757 +The ebuild uses an internal Portage function or variable
21758 .TP
21759 .B upstream.workaround
21760 The ebuild works around an upstream bug, an upstream bug should be filed and
21761
21762 diff --git a/man/ru/color.map.5 b/man/ru/color.map.5
21763 new file mode 100644
21764 index 0000000..f7e65e3
21765 --- /dev/null
21766 +++ b/man/ru/color.map.5
21767 @@ -0,0 +1,217 @@
21768 +.TH "COLOR.MAP" "5" "Jul 2013" "Portage VERSION" "Portage"
21769 +.SH "НАЗВАНИЕ"
21770 +color.map \- пользовательские настройки цвета в Portage
21771 +.SH "ПАРАМЕТРЫ"
21772 +.B /etc/portage/color.map
21773 +.SH "ОПИСАНИЕ"
21774 +Указанный файл содержит переменные, определяющие классы цвета, которые
21775 +использует Portage. Проверяя настройки цвета, Portage в первую очередь
21776 +обращается к нему. Если тот или иной класс цвета не определен в
21777 +\fB/etc/portage/color.map\fR, Portage использует внутренние значения,
21778 +принятые по умолчанию.
21779 +.SH "СИНТАКСИС"
21780 +\fBПЕРЕМЕННАЯ\fR = \fI[атрибуты или коды ansi, через пробел]\fR
21781 +.TP
21782 +\fBАТРИБУТ\fR = \fI[атрибуты или коды ansi, через пробел]]\fR
21783 +.SH "ПЕРЕМЕННЫЕ"
21784 +.TP
21785 +\fBNORMAL\fR = \fI"normal"\fR
21786 +Определяет цвет, используемый для некоторых слов, встречающихся в контекстах,
21787 +отличных от перечисленных ниже.
21788 +.TP
21789 +\fBBAD\fR = \fI"red"\fR
21790 +Определяет цвет, используемый для некоторых слов, встречающихся в отрицательном
21791 +контексте.
21792 +.TP
21793 +\fBBRACKET\fR = \fI"blue"\fR
21794 +Определяет цвет, используемый для скобок.
21795 +.TP
21796 +\fBGOOD\fR = \fI"green"\fR
21797 +Определяет цвет, используемый для некоторых слов, встречающихся в положительном
21798 +контексте.
21799 +.TP
21800 +\fBHILITE\fR = \fI"teal"\fR
21801 +Определяет цвет, используемый для выделения слов.
21802 +.TP
21803 +\fBINFORM\fR = \fI"darkgreen"\fR
21804 +Определяет цвет, используемый для информационных сообщений.
21805 +.TP
21806 +\fBMERGE_LIST_PROGRESS\fR = \fI"yellow"\fR
21807 +Определяет цвет, используемый для чисел, отображающих ход установки.
21808 +.TP
21809 +\fBPKG_BLOCKER\fR = \fI"red"\fR
21810 +Определяет цвет, используемый для пакетов, создающих неразрешенный конфликт.
21811 +.TP
21812 +\fBPKG_BLOCKER_SATISFIED\fR = \fI"darkblue"\fR
21813 +Определяет цвет, используемый для пакетов, создававших конфликт, который
21814 +затем был разрешен.
21815 +.TP
21816 +\fBPKG_MERGE\fR = \fI"darkgreen"\fR
21817 +Определяет цвет, используемый для пакетов, которые будут установлены.
21818 +.TP
21819 +\fBPKG_MERGE_SYSTEM\fR = \fI"darkgreen"\fR
21820 +Определяет цвет, используемый для system-пакетов, которые будут установлены.
21821 +.TP
21822 +\fBPKG_MERGE_WORLD\fR = \fI"green"\fR
21823 +Определяет цвет, используемый для world-пакетов, которые будут установлены.
21824 +.TP
21825 +\fBPKG_BINARY_MERGE\fR = \fI"purple"\fR
21826 +Определяет цвет, используемый для пакетов, которые будут установлены в бинарной версии.
21827 +.TP
21828 +\fBPKG_BINARY_MERGE_SYSTEM\fR = \fI"purple"\fR
21829 +Определяет цвет, используемый для system-пакетов, которые будут установлены в
21830 +бинарной версии.
21831 +.TP
21832 +\fBPKG_BINARY_MERGE_WORLD\fR = \fI"fuchsia"\fR
21833 +Определяет цвет, используемый для world-пакетов, которые будут установлены в
21834 +бинарной версии.
21835 +.TP
21836 +\fBPKG_NOMERGE\fR = \fI"darkblue"\fR
21837 +Определяет цвет, используемый для имен пакетов, которые не будут установлены.
21838 +.TP
21839 +\fBPKG_NOMERGE_SYSTEM\fR = \fI"darkblue"\fR
21840 +Определяет цвет, используемый для имен system-пакетов, которые не будут установлены.
21841 +.TP
21842 +\fBPKG_NOMERGE_WORLD\fR = \fI"blue"\fR
21843 +Определяет цвет, используемый для имен world-пакетов, которые не будут установлены.
21844 +.TP
21845 +\fBPKG_UNINSTALL\fR = \fI"red"\fR
21846 +Определяет цвет, используемый для имен пакетов, которые должны быть удалены для
21847 +разрешения конфликтов.
21848 +.TP
21849 +\fBPROMPT_CHOICE_DEFAULT\fR = \fI"green"\fR
21850 +Определяет цвет, используемый для предлагаемого на выбор значения по умолчанию.
21851 +.TP
21852 +\fBPROMPT_CHOICE_OTHER\fR = \fI"red"\fR
21853 +Определяет цвет, используемый для предлагаемого на выбор значения не по умолчанию.
21854 +.TP
21855 +\fBSECURITY_WARN\fR = \fI"red"\fR
21856 +Определяет цвет, используемый для предупреждений о безопасности.
21857 +.TP
21858 +\fBUNMERGE_WARN\fR = \fI"red"\fR
21859 +Определяет цвет, используемый для предупреждений об удалении пакета.
21860 +.TP
21861 +\fBWARN\fR = \fI"yellow"\fR
21862 +Определяет цвет, используемый для предупреждений.
21863 +.SH "ДОПУСТИМЫЕ АТРИБУТЫ"
21864 +.TP
21865 +.B Цвет текста
21866 +.RS
21867 +.TP
21868 +.B black - черный
21869 +.TP
21870 +.B darkgray - темно-серый
21871 +.TP
21872 +.B darkred - темно-красный
21873 +.TP
21874 +.B red - красный
21875 +.TP
21876 +.B darkgreen - темно-зеленый
21877 +.TP
21878 +.B green - зеленый
21879 +.TP
21880 +.B brown - коричневый
21881 +.TP
21882 +.B yellow - желтый
21883 +.TP
21884 +.B darkyellow - темно-желтый
21885 +.TP
21886 +.B darkblue - темно-синий
21887 +.TP
21888 +.B blue - синий
21889 +.TP
21890 +.B purple - фиолетовый
21891 +.TP
21892 +.B fuchsia - лиловый
21893 +.TP
21894 +.B teal - серо-зеленый
21895 +.TP
21896 +\fBturquoise\fR = \fBdarkteal\fR - \fBбирюзовый\fR
21897 +.TP
21898 +.B lightgray - светло-серый
21899 +.TP
21900 +.B white - белый
21901 +.RE
21902 +.TP
21903 +.B Цвет фона
21904 +.RS
21905 +.TP
21906 +.B bg_black - черный фон
21907 +.TP
21908 +.B bg_darkred - темно-красный фон
21909 +.TP
21910 +.B bg_darkgreen - темно-зеленый фон
21911 +.TP
21912 +\fBbg_brown\fR = \fBbg_darkyellow\fR - \fBкоричневый фон\fR
21913 +.TP
21914 +.B bg_darkblue - темно-синий фон
21915 +.TP
21916 +.B bg_purple - фиолетовый фон
21917 +.TP
21918 +.B bg_teal - серо-зеленый фон
21919 +.TP
21920 +.B bg_lightgray - светло-серый фон
21921 +.RE
21922 +.TP
21923 +.B Другие атрибуты
21924 +.RS
21925 +.TP
21926 +.B normal - обычный
21927 +.TP
21928 +.B no\-attr - без атрибутов
21929 +.TP
21930 +.B reset - переопределить
21931 +.TP
21932 +.B bold - жирный
21933 +.TP
21934 +.B faint - бледный
21935 +.TP
21936 +.B standout - стандартный вывод
21937 +.TP
21938 +.B no\-standout - не использовать стандартный вывод
21939 +.TP
21940 +.B underline - с подчеркиванием
21941 +.TP
21942 +.B no\-underline - без подчеркивания
21943 +.TP
21944 +.B blink - мигающий
21945 +.TP
21946 +.B no\-blink - без мигания
21947 +.TP
21948 +.B overline - с надчеркиванием
21949 +.TP
21950 +.B no\-overline - без надчеркивания
21951 +.TP
21952 +.B reverse - негатив
21953 +.TP
21954 +.B no\-reverse - не отражать цвет
21955 +.TP
21956 +.B invisible - невидимый
21957 +.RE
21958 +.SH "БАГТРЕКЕР"
21959 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
21960 +.SH "АВТОРЫ"
21961 +.nf
21962 +Arfrever Frehtes Taifersar Arahesis <arfrever@××××××.org>
21963 +.fi
21964 +.SH "ФАЙЛЫ"
21965 +.TP
21966 +.B /etc/portage/color.map
21967 +Содержит переменные, используемые для пользовательской настройки цветного вывода.
21968 +.TP
21969 +.B /etc/portage/make.conf
21970 +Содержит другие переменные.
21971 +.SH "СМ. ТАКЖЕ"
21972 +.BR console_codes (4),
21973 +.BR make.conf (5),
21974 +.BR portage (5),
21975 +.BR emerge (1),
21976 +.BR ebuild (1),
21977 +.BR ebuild (5)
21978 +.TP
21979 +Модуль \fIPython /usr/lib/portage/pym/portage/output.py\fR.
21980 +.SH "ПЕРЕВОД"
21981 +.nf
21982 +\fRПереводчик\fR - Елена Гаврилова <e.vl.gavrilova@××××××.ru>
21983 +\fRРедактор\fR - Романов Владимир <blueboar2@×××××.com>
21984 +.fi
21985
21986 diff --git a/man/ru/dispatch-conf.1 b/man/ru/dispatch-conf.1
21987 new file mode 100644
21988 index 0000000..a511e97
21989 --- /dev/null
21990 +++ b/man/ru/dispatch-conf.1
21991 @@ -0,0 +1,100 @@
21992 +.TH "DISPATCH-CONF" "1" "Jan 2011" "Portage VERSION" "Portage"
21993 +.SH "НАЗВАНИЕ"
21994 +dispatch\-conf \- безопасное обновление конфигурационных файлов после
21995 +установки новых пакетов
21996 +.SH "СИНТАКСИС"
21997 +.B dispatch\-conf
21998 +.SH "ОПИСАНИЕ"
21999 +Утилиту \fIdispatch\-conf\fR следует запускать после установки новых
22000 +пакетов для проверки конфигурационных файлов на обновления. Если
22001 +новый конфигурационный файл попытается затереть текущий,
22002 +\fIdispatch\-conf\fR предложит пользователю самому решить, каким
22003 +образом разрешить эту ситуацию. Среди достоинств \fIdispatch\-conf\fR \-
22004 +легкость отката изменений (изменения конфигурационных файлов сохраняются
22005 +с помощью либо патчей, либо RCS) и возможность автоматического
22006 +обновления тех файлов, которые пользователь не изменял, и тех, которые
22007 +отличаются от текущей версии только CVS-мусором или пробелом.
22008 +
22009 +\fIdispatch\-conf\fR проверит на обновления все каталоги, указанные в
22010 +переменной \fICONFIG_PROTECT\fR. Также, программа \fIdispatch\-conf\fR
22011 +автоматически обновит все файлы конфигурации, найденные в
22012 +\fICONFIG_PROTECT_MASK\fR. Подробнее см. в \fBmake.conf\fR(5).
22013 +.SH "ОПЦИИ"
22014 +.TP
22015 +Нет.
22016 +.SH "СИНТАКСИС"
22017 +\fIdispatch\-conf\fR следует запускать от пользователя root, поскольку
22018 +владельцем файлов, с которыми работает утилита, как правило, является
22019 +именно пользователь root. Перед первым запуском \fIdispatch\-conf\fR
22020 +необходимо отредактировать настройки в файле \fB/etc/dispatch\-conf.conf\fR
22021 +и создать каталог архивов, указанный в \fB/etc/dispatch\-conf.conf\fR.
22022 +Все изменения конфигурационных файлов сохраняются в каталоге архивов \-
22023 +либо как патчи, либо с помощью RCS, благодаря чему довольно просто
22024 +вернуться к предыдущей версии.
22025 +
22026 +Всякий раз, когда \fIdispatch\-conf\fR обнаруживает конфигурационный файл,
22027 +который был обновлен, пользователю дается возможность выбрать один из
22028 +следующих вариантов, чтобы решить, что делать с предлагаемым обновлением:
22029 +.TP
22030 +.B u
22031 +Обновить (заменить) текущий конфигурационный файл новым и продолжить.
22032 +.TP
22033 +.B z
22034 +Затереть (удалить) новый конфигурационный файл и продолжить.
22035 +.TP
22036 +.B n
22037 +Пропустить и перейти к следующему конфигурационному файлу, не удаляя ни
22038 +исходную версию, ни файлы, защищенные \fICONFIG_PROTECT\fR.
22039 +.TP
22040 +.B e
22041 +Редактировать новый конфигурационный файл в редакторе текста,
22042 +определенном переменной \fIEDITOR\fR.
22043 +.TP
22044 +.B m
22045 +В интерактивном режиме произвести слияние текущего и нового конфигурационных файлов.
22046 +.TP
22047 +.B l
22048 +Просмотреть различия между текущим и новым конфигурационными файлами.
22049 +.TP
22050 +.B t
22051 +Переключаться между текущим и новым конфигурационными файлами
22052 +(в конечном итоге потребуется установить конечную версию, нажав \fBu\fR).
22053 +.TP
22054 +.B h
22055 +Вывести справку.
22056 +.TP
22057 +.B q
22058 +Выйти из \fIdispatch\-conf\fR.
22059 +.SH "ПРАВА НА ФАЙЛЫ"
22060 +\fBВНИМАНИЕ\fR: Если \fB/etc/dispatch\-conf.conf\fR сконфигурирован
22061 +для использования \fBrcs\fR(1), права на чтение и исполнение
22062 +архивированных файлов могут быть унаследованы от первой проверки
22063 +рабочего файла, как описано в man\-руководстве \fBci\fR(1). Это
22064 +означает, что даже если права доступа к рабочему файлу изменились,
22065 +прежние права, действовавшие при первой проверке, могут быть
22066 +возвращены. Согласно руководству \fBci\fR(1), пользователи могут
22067 +управлять доступом к RCS\-файлам, изменив права на доступ к
22068 +каталогу, в котором они лежат.
22069 +.SH "БАГТРЕКЕР"
22070 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
22071 +.SH "АВТОРЫ"
22072 +.nf
22073 +Jeremy Wohl
22074 +Karl Trygve Kalleberg <karltk@g.o>
22075 +Mike Frysinger <vapier@g.o>
22076 +Grant Goodyear <g2boojum@g.o>
22077 +.fi
22078 +.SH "ФАЙЛЫ"
22079 +.TP
22080 +.B /etc/dispatch\-conf.conf
22081 +Здесь хранятся настройки конфигурации для \fIdispatch\-conf\fR.
22082 +.SH "СМОТРИ ТАКЖЕ"
22083 +.BR make.conf (5),
22084 +.BR ci (1),
22085 +.BR etc-update (1),
22086 +.BR rcs (1)
22087 +.SH "ПЕРЕВОД"
22088 +.nf
22089 +Переводчик: Елена Гаврилова <e.vl.gavrilova@××××××.ru>
22090 +Редактор: Романов Владимир <blueboar2@×××××.com>
22091 +.fi
22092
22093 diff --git a/man/ru/ebuild.1 b/man/ru/ebuild.1
22094 new file mode 100644
22095 index 0000000..98d4b5a
22096 --- /dev/null
22097 +++ b/man/ru/ebuild.1
22098 @@ -0,0 +1,249 @@
22099 +.TH "EBUILD" "1" "Jan 2014" "Portage VERSION" "Portage"
22100 +.SH "НАЗВАНИЕ"
22101 +ebuild \- низкоуровневый интерфейс системы Portage
22102 +.SH "СИНТАКСИС"
22103 +.B ebuild
22104 +.I файл команда [команда]\fR...
22105 +.SH "ОПИСАНИЕ"
22106 +Программа ebuild представляет собой низкоуровневый интерфейс
22107 +системы Portage. Она обеспечивает возможность непосредственного
22108 +взаимодействия со сценарием ebuild при помощи специальных
22109 +подкоманд или групп команд, выполняемых в контексте данного файла,
22110 +и функций. Утилита ebuild принимает в качестве аргументов
22111 +ebuild-сценарий и одну или более команд, подвергает сценарий
22112 +синтаксическому анализу и выполняет указанные команды. Имеются
22113 +отдельные команды для загрузки исходных файлов, их распаковки,
22114 +компиляции, установки объектных файлов во временный каталог image,
22115 +установки образа в локальную файловую систему, создания архива
22116 +пакета из образа и т.д.
22117 +.SH "ФАЙЛ"
22118 +Должен быть рабочим ebuild-сценарием. Подробнее смотри в руководстве
22119 +по \fBebuild\fR(5).
22120 +.SH "КОМАНДЫ"
22121 +По умолчанию portage выполняет все функции по порядку вплоть до
22122 +указанной, кроме функций, которые уже были вызваны в предыдущих
22123 +вызовах ebuild. Например, если вы дадите команду \fBcompile\fR, то
22124 +вызовете тем самым и предшествующие ей функции (такие как
22125 +\fBsetup\fR и \fBunpack\fR), если они не были запущены в
22126 +предыдущем запуске ebuild. Если вы хотите быть уверенным, что
22127 +все они были выполнены, вам нужно сначала использовать команду
22128 +\fBclean\fR. Если вы хотите, чтобы запускалась только одна команда,
22129 +вам следует добавить опцию \fInoauto\fR к значению переменной
22130 +окружения \fBFEATURES\fR. Подробнее смотри в справке по \fBmake.conf\fR(5).
22131 +
22132 +.TP
22133 +.BR help
22134 +Выводит справочную информацию о программе в сжатом изложении,
22135 +а также целый ряд сведений о пакете.
22136 +.TP
22137 +.BR setup
22138 +Запускает все действия по настройке данного пакета, в том числе
22139 +специфические системные тесты.
22140 +.TP
22141 +.BR clean
22142 +Очищает временный каталог сборки, созданный Portage специально для
22143 +этого файла ebuild. Временный каталог сборки обычно содержит
22144 +извлеченный из архива исходный код, а также, возможно, так называемый
22145 +установочный образ (все файлы, которые будут установлены в локальную
22146 +файловую систему или сохранены в пакете). Расположение каталога
22147 +сборки определяется значением переменной PORTAGE_TMPDIR. Чтобы узнать
22148 +ее текущее значение, выполните \fIemerge \-\-info\fR. О том, как
22149 +переопределить эту переменную, смотри \fBmake.conf\fR(5).
22150 +
22151 +Примечание: Portage удаляет практически все данные, оставшиеся после
22152 +успешной установки пакета, за исключением тех случаев, когда в
22153 +переменной FEATURES явно указано 'noclean'. Если вы добавите noclean
22154 +в значение FEATURES, очень скоро большой объем дискового пространства
22155 +будет занят ненужными файлами. Не рекомендуется пользоваться этим
22156 +режимом постоянно, а лишь в том случае, если исходники пакетов
22157 +потребуются вам после установки. Впрочем, возможно и ручное удаление
22158 +этих файлов: для этого следует выполнить \fIrm \-rf /var/tmp/portage\fR.
22159 +.TP
22160 +.BR fetch
22161 +Проверяет, все ли источники данных, фигурирующие в SRC_URI, доступны
22162 +в каталоге DISTDIR (подробнее см. в \fBmake.conf\fR(5)) и имеют
22163 +верную контрольную сумму. Если исходные коды недоступны, будет
22164 +предпринята попытка загрузить их с серверов, адреса которых указаны
22165 +в SRC_URI. Если для того или иного файла имеется несколько адресов
22166 +загрузки, Portage проверит каждый из них и выберет тот сервер,
22167 +который ближе. (Точность этого выбора на данный момент не
22168 +гарантируется.) В первую очередь всегда обрабатываются зеркала Gentoo
22169 +Linux, содержащиеся в переменной GENTOO_MIRRORS. Если по какой-либо
22170 +причине контрольная сумма текущих или только что загруженных исходных
22171 +кодов не совпадает с контрольной суммой, записанной в файле
22172 +files/digest\-[пакет]\-[версия\-ревизия], выводится предупреждение, и
22173 +программа ebuild завершает работу с кодом ошибки 1.
22174 +.TP
22175 +.BR digest
22176 +В настоящее время \- эквивалент команды \fImanifest\fR.
22177 +.TP
22178 +.BR manifest
22179 +Обновляет Manifest\-файл пакета. В результате создаются контрольные суммы
22180 +для всех файлов, обнаруженных в одном каталоге с обрабатываемым файлом
22181 +ebuild, а также содержимое вложенных каталогов подкаталога files.
22182 +При этом контрольные суммы генерируются и для всех файлов, перечисленных
22183 +в SRC_URI для каждого файла ebuild. Подробнее о поведении данной команды,
22184 +см. в разделе о смысле значения \fIassume\-digests\fR переменной
22185 +\fBFEATURES\fR справочного руководства по \fBmake.conf\fR(5). Если вы
22186 +не хотите, чтобы дайджесты принимались неявно, см. опцию \fB\-\-force\fR.
22187 +.TP
22188 +.BR unpack
22189 +Извлекает исходные коды в подкаталог \fIкаталога сборки\fR (BUILD_PREFIX),
22190 +вызывая функцию \fIsrc_unpack()\fR внутри файла ebuild. Если функция
22191 +src_unpack() не определена, для распаковки всех файлов, перечисленных в
22192 +SRC_URI, будет использована стандартная src_unpack(). Как правило,
22193 +исходники распаковываются в каталог ${BUILD_PREFIX}/[пакет]\-[версия-ревизия]/work.
22194 +Обращаться к нему можно с помощью переменной ${WORKDIR}.
22195 +
22196 +Создавая файл ebuild самостоятельно, убедитесь, что переменная S
22197 +(каталог исходных файлов), определенная в начале ebuild-сценария, указывает
22198 +на каталог, в котором действительно содержатся распакованные исходные коды.
22199 +По умолчанию он определяется как ${WORKDIR}/${P}, поэтому, как правило,
22200 +ничего не требуется исправлять. Функция src_unpack() также отвечает за
22201 +наложение патчей перед компиляцией пакетов.
22202 +.TP
22203 +.BR prepare
22204 +Подготавливает извлеченные из архива исходные коды, вызывая функцию
22205 +\fIsrc_prepare()\fR, определенную в ebuild-файле. При запуске src_prepare()
22206 +текущим рабочим каталогом становится ${S}. Данная функция поддерживается,
22207 +начиная с \fBEAPI 2\fR.
22208 +.TP
22209 +.BR configure
22210 +Производит конфигурирование распакованных исходных кодов, вызывая функцию
22211 +\fIsrc_configure()\fR, определенную в ebuild-файле. При запуске src_configure()
22212 +текущим рабочим каталогом становится ${S}. Данная функция поддерживается
22213 +начиная с \fBEAPI 2\fR.
22214 +.TP
22215 +.BR compile
22216 +Компилирует распакованные исходные коды, вызывая функцию \fIsrc_compile()\fR,
22217 +определенную в ebuild-файле. При запуске src_compile() текущим рабочим
22218 +каталогом становится ${S}. По завершении работы src_compile() исходные
22219 +коды должны быть полностью скомпилированы.
22220 +.TP
22221 +.BR test
22222 +Выполняет специальные тесты для отдельных пакетов, проверяя сборку.
22223 +.TP
22224 +.BR preinst
22225 +Выполняет специальные действия для отдельных пакетов, которые
22226 +требуется произвести до установки пакета в текущую файловую систему.
22227 +.TP
22228 +.BR install
22229 +Устанавливает пакет во временный \fIкаталог установки\fR, вызывая
22230 +функцию \fIsrc_install()\fR. По завершении каталог установки в
22231 +(${BUILD_PREFIX}/[пакет]\-[версия\-ревизия]/image) будет содержать
22232 +все файлы, которые должны быть либо установлены в текущую файловую
22233 +систему, либо включены в бинарный пакет.
22234 +.TP
22235 +.BR postinst
22236 +Выполняет специальные действия для отдельных пакетов, которые
22237 +требуется произвести после установки пакета в текущую файловую
22238 +систему. Как правило, при этом выводятся полезные сообщения.
22239 +.TP
22240 +.BR qmerge
22241 +Эта функция устанавливает все файлы в \fIкаталоге установки\fR на
22242 +текущую файловую систему. Это производится следующим образом:
22243 +сначала запускается функция \fIpkg_preinst()\fR (если она существует).
22244 +Затем все файлы устанавливаются в файловую систему, а их
22245 +контрольные суммы записываются в
22246 +\fI/var/db/pkg/${CATEGORY}/${PN}-${PVR}/CONTENTS\fR. Наконец, по
22247 +завершении установки всех файлов выполняется функция
22248 +\fIpkg_postinst()\fR (если она существует).
22249 +.TP
22250 +.BR merge
22251 +Обычно для установки файла ebuild, необходимо последовательно выполнить
22252 +следующие действия: \fIfetch\fR, \fIunpack\fR, \fIcompile\fR,
22253 +\fIinstall\fR и \fIqmerge\fR. Если вам нужно только установить
22254 +файл ebuild, вы можете использовать данную команду: она сама выполнит
22255 +все перечисленные операции и остановится в процессе выполнения только
22256 +в том случае, если какая-либо функция отрабатывает с ошибкой.
22257 +.TP
22258 +.BR unmerge
22259 +Эта команда сначала вызывает функцию \fIpkg_prerm()\fR (если она существует).
22260 +Затем она удаляет все файлы из текущих файловых систем, файл содержимого
22261 +пакета для которых имеет верную контрольную сумму и время изменения.
22262 +Все пустые каталоги удаляются вместе с вложенными. Наконец, команда
22263 +запускает функцию \fIpkg_postrm()\fR (если она существует). Можно сначала
22264 +установить новую версию пакета, а затем удалить прежнюю - собственно,
22265 +именно в этом заключается рекомендуемый метод обновления.
22266 +.TP
22267 +.BR prerm
22268 +Запускает для определенного пакета действия, которые необходимо выполнить
22269 +до удаления пакета из файловой системы. См. также \fIunmerge\fR.
22270 +.TP
22271 +.BR postrm
22272 +Запускает для определенного пакета действия, которые необходимо выполнить
22273 +после удаления пакета из файловой системы. См. также \fIunmerge\fR.
22274 +.TP
22275 +.BR config
22276 +Запускает для определенного пакета действия, которые необходимо выполнить
22277 +до начала установки. Как правило, это настройка конфигурационных файлов
22278 +или другие настроечные действия, которые пользователь может захотеть
22279 +выполнить.
22280 +.TP
22281 +.BR package
22282 +Эта команда очень напоминает \fImerge\fR, за исключением того, что после
22283 +загрузки, распаковки, компиляции и установки создается .tbz2-архив
22284 +бинарного пакета, который затем сохраняется в каталоге \fBPKGDIR\fR
22285 +(см. \fBmake.conf\fR(5)).
22286 +.TP
22287 +.BR rpm
22288 +Собирает RPM\-пакет RedHat из файлов во временном \fIкаталоге установки\fR.
22289 +На данный момент сведения о зависимостях файла ebuild не включаются в RPM.
22290 +.SH "ОПЦИИ"
22291 +.TP
22292 +.BR "\-\-debug"
22293 +Запустить bash с опцией \-x, в результате чего стандартный вывод будет
22294 +включать подробную отладочную информацию.
22295 +.TP
22296 +.BR "\-\-color < y | n >"
22297 +Включить или отключить цветное отображение. Эта опция переопределяет
22298 +значение переменной \fINOCOLOR\fR (см. \fBmake.conf\fR(5)) и может быть
22299 +использована для принудительного назначения цвета в том случае, если
22300 +стандартный вывод - не терминал (по умолчанию цвет включен только в том
22301 +случае, если стандартный вывод - терминал).
22302 +.TP
22303 +.BR "\-\-force"
22304 +При использовании в связке с командой digest или manifest данная опция
22305 +принудительно генерирует новые дайджесты для всех файлов исходного кода,
22306 +относящихся к данному файлу ebuild. Если в каталоге ${DISTDIR} требуемых
22307 +исходников нет, они будут автоматически загружены.
22308 +.TP
22309 +.BR "\-\-ignore\-default\-opts"
22310 +Не использовать переменную окружения \fIEBUILD_DEFAULT_OPTS\fR.
22311 +.TP
22312 +.BR "\-\-skip\-manifest"
22313 +Пропустить проверку Manifest-файлов.
22314 +.SH "БАГТРЕКЕР"
22315 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
22316 +.SH "АВТОРЫ"
22317 +.nf
22318 +Achim Gottinger <achim@g.o>
22319 +Daniel Robbins <drobbins@g.o>
22320 +Nicholas Jones <carpaski@g.o>
22321 +Mike Frysinger <vapier@g.o>
22322 +.fi
22323 +.SH "ФАЙЛЫ"
22324 +.TP
22325 +.B /etc/portage/make.conf
22326 +Содержит переменные сборки, имеющие приоритет перед значениями,
22327 +указанными в файле make.globals.
22328 +.TP
22329 +.B /etc/portage/color.map
22330 +Содержит переменные, позволяющие назначать пользовательские настройки
22331 +цветного вывода.
22332 +.SH "СМОТРИ ТАКЖЕ"
22333 +.BR emerge (1),
22334 +.BR ebuild (5),
22335 +.BR make.conf (5),
22336 +.BR color.map (5)
22337 +.TP
22338 +Сценарий \fI/usr/lib/portage/bin/ebuild.sh\fR.
22339 +.TP
22340 +Вспомогательные приложения в \fI/usr/lib/portage/bin\fR.
22341 +
22342 +.SH "ПЕРЕВОД"
22343 +.nf
22344 +Переводчик: Елена Гаврилова <e.vl.gavrilova@××××××.ru>
22345 +Правка и обновление: Романов Владимир <blueboar2@×××××.com>
22346 +Переведенная версия соответствует английской версии от 2013-07-31
22347 +.fi
22348
22349 diff --git a/man/ru/env-update.1 b/man/ru/env-update.1
22350 new file mode 100644
22351 index 0000000..9e0775e
22352 --- /dev/null
22353 +++ b/man/ru/env-update.1
22354 @@ -0,0 +1,35 @@
22355 +.TH "ENV-UPDATE" "1" "Aug 2008" "Portage VERSION" "Portage"
22356 +.SH "НАЗВАНИЕ"
22357 +env\-update \- автоматическое обновление настроек окружения
22358 +.SH "СИНТАКСИС"
22359 +\fBenv\-update\fR \fI[опции]\fR
22360 +.SH "ОПИСАНИЕ"
22361 +.B env\-update
22362 +читает файлы в каталоге \fI/etc/env.d\fR и автоматически генерирует
22363 +\fI/etc/profile.env\fR и \fI/etc/ld.so.conf\fR. Затем для обновления
22364 +\fI/etc/ld.so.cache\fR запускается \fBldconfig\fR(8). \fBemerge\fR(1)
22365 +автоматически вызывает \fBenv-update\fR после каждой установки пакета.
22366 +Если же вы вносите изменения в \fI/etc/env.d\fR, вам следует
22367 +самостоятельно выполнить \fBenv-update\fR, чтобы внесенные
22368 +изменения вступили в силу. Обратите внимание, что это повлияет
22369 +только на последующие операции. Чтобы изменения отразились на уже
22370 +запущенных процессах, вероятно, понадобится выполнить
22371 +\fIsource /etc/profile\fR.
22372 +.SH "ОПЦИИ"
22373 +.TP
22374 +.B \-\-no\-ldconfig
22375 +Не запускать \fBldconfig\fR (и, тем самым, опустить пересборку
22376 +кэша \fIld.so.cache\fR и т.д.).
22377 +.SH "БАГТРЕКЕР"
22378 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
22379 +.SH "АВТОРЫ"
22380 +Daniel Robbins <drobbins@g.o>
22381 +.SH "СМОТРИ ТАКЖЕ"
22382 +.BR emerge (1),
22383 +.BR ldconfig (8)
22384 +
22385 +.SH "ПЕРЕВОД"
22386 +.nf
22387 +Переводчик: Елена Гаврилова <e.vl.gavrilova@××××××.ru>
22388 +Правка и обновление: Романов Владимир <blueboar2@×××××.com>
22389 +.fi
22390
22391 diff --git a/man/ru/etc-update.1 b/man/ru/etc-update.1
22392 new file mode 100644
22393 index 0000000..f799317
22394 --- /dev/null
22395 +++ b/man/ru/etc-update.1
22396 @@ -0,0 +1,63 @@
22397 +.TH "ETC-UPDATE" "1" "Mar 2012" "Portage VERSION" "Portage"
22398 +.SH "НАЗВАНИЕ"
22399 +etc\-update \- обработка изменений конфигурационных файлов
22400 +.SH "СИНТАКСИС"
22401 +.BR etc\-update
22402 +[\fIопции\fR] [\fI--automode <режим>\fR] [\fIпути для сканирования\fR]
22403 +.SH "ОПИСАНИЕ"
22404 +Утилиту \fIetc\-update\fR следует запускать после установки
22405 +новых пакетов для проверки предлагаемых обновлений
22406 +конфигурационных файлов. Если новый конфигурационный файл
22407 +может перезаписать имеющийся, \fIetc\-update\fR спросит
22408 +у пользователя, как с ним поступить.
22409 +.PP
22410 +\fIetc\-update\fR проверяет все каталоги, заданные в командной
22411 +строке. Если никаких путей не задано, тогда будет использована
22412 +переменная \fICONFIG_PROTECT\fR. Все конфигурационные файлы,
22413 +которые будут найдены в переменной \fICONFIG_PROTECT_MASK\fR
22414 +будут обновлены программой \fIetc\-update\fR автоматически.
22415 +Подробнее об этом смотри в справке по \fBmake.conf\fR(5).
22416 +.PP
22417 +\fIetc\-update\fR учитывает переменные \fIPORTAGE_CONFIGROOT\fR
22418 +и \fIEROOT\fR при использовании ранее означенных переменных
22419 +(\fICONFIG_PROTECT\fR и \fICONFIG_PROTECT_MASK\fR).
22420 +.SH "ОПЦИИ"
22421 +.TP
22422 +.BR \-d ", " \-\-debug
22423 +Запускает оболочку со включенным режимом отладки.
22424 +.TP
22425 +.BR \-h ", " \-\-help
22426 +Неожиданно, показывает помощь.
22427 +.TP
22428 +.BR \-p ", " \-\-preen
22429 +Автоматически применить тривиальные изменения и выйти.
22430 +.TP
22431 +.BR \-v ", " \-\-verbose
22432 +Показывать в процессе работы настройки и информацию о
22433 +важных решениях.
22434 +.TP
22435 +.BR "\-\-automode <режим>"
22436 +Выбрать один из автоматических режимов работы. Разрешенные
22437 +режимы работы это \-3, \-5, \-7, \-9. Для более подробной
22438 +информации смотри текст, выдаваемый опцией \fI\-\-help\fR.
22439 +.SH "БАГТРЕКЕР"
22440 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
22441 +.SH "АВТОРЫ"
22442 +.nf
22443 +Jochem Kossen and Leo Lipelis
22444 +Karl Trygve Kalleberg <karltk@g.o>
22445 +Mike Frysinger <vapier@g.o>
22446 +.fi
22447 +.SH "ФАЙЛЫ"
22448 +.TP
22449 +.B /etc/etc-update.conf
22450 +Здесь хранятся настройки \fIetc-update\fR.
22451 +.SH "СМОТРИ ТАКЖЕ"
22452 +.BR dispatch-conf (1),
22453 +.BR make.conf (5)
22454 +
22455 +.SH "ПЕРЕВОД"
22456 +.nf
22457 +Переводчик: Елена Гаврилова <e.vl.gavrilova@××××××.ru>
22458 +Правка и обновление: Романов Владимир <blueboar2@×××××.com>
22459 +.fi
22460
22461 diff --git a/man/ru/fixpackages.1 b/man/ru/fixpackages.1
22462 new file mode 100644
22463 index 0000000..41eb284
22464 --- /dev/null
22465 +++ b/man/ru/fixpackages.1
22466 @@ -0,0 +1,22 @@
22467 +.TH "FIXPACKAGES" "1" "Dec 2011" "Portage VERSION" "Portage"
22468 +.SH "НАЗВАНИЕ"
22469 +fixpackages \- выполняет переносы пакетов при обновлениях
22470 +для всех пакетов
22471 +.SH "СИНТАКСИС"
22472 +\fBfixpackages\fR
22473 +.SH "ОПИСАНИЕ"
22474 +Программа fixpackages выполняет переносы пакетов при обновлениях
22475 +для конфигурационных файлов, установленных пакетов и двоичных
22476 +пакетов.
22477 +.SH "БАГТРЕКЕР"
22478 +Об обнаруженных ошибках сообщайте на http://bugs.gentoo.org/
22479 +.SH "АВТОРЫ"
22480 +Zac Medico <zmedico@g.o>
22481 +.SH "СМОТРИ ТАКЖЕ"
22482 +.BR emaint (1),
22483 +.BR emerge (1)
22484 +
22485 +.SH "ПЕРЕВОД"
22486 +.nf
22487 +Переводчик: Романов Владимир <blueboar2@×××××.com>
22488 +.fi
22489
22490 diff --git a/man/xpak.5 b/man/xpak.5
22491 index 0b5b874..536810d 100644
22492 --- a/man/xpak.5
22493 +++ b/man/xpak.5
22494 @@ -11,7 +11,8 @@ The following conventions cover all occurrences in this documentation
22495 .IP Integer
22496 All offsets/lengths are big endian unsigned 32bit integers
22497 .IP String
22498 -All strings are ASCII encoded, and not NUL terminated (quotes are for illustration only)
22499 +All strings are ASCII encoded, and not NUL terminated (quotes are for
22500 +illustration only)
22501 .IP Values
22502 The actual values of the individual xpak entries are stored as Strings
22503 .P
22504 @@ -46,7 +47,7 @@ String \fI"STOP"\fR.
22505 |<xpak_offset>|
22506 <tar>|<---xpak---->|<xpak_offset>"STOP"
22507
22508 -Here you see the \fItar\fR archive, the attached \fIxpak\fR blob, the
22509 +Here you see the \fItar\fR archive, the attached \fIxpak\fR blob, the
22510 \fIxpak_offset\fR and the string \fI"STOP"\fR at the end. This metadata
22511 is not considered "part" of the \fIxpak\fR, but rather part of the binpkg.
22512
22513
22514 diff --git a/misc/emerge-delta-webrsync b/misc/emerge-delta-webrsync
22515 new file mode 100755
22516 index 0000000..96564af
22517 --- /dev/null
22518 +++ b/misc/emerge-delta-webrsync
22519 @@ -0,0 +1,809 @@
22520 +#!/bin/bash
22521 +# Copyright 1999-2014 Gentoo Foundation
22522 +# Distributed under the terms of the GNU General Public License v2
22523 +# Author: Brian Harring <ferringb@g.o>, karltk@g.o originally.
22524 +# Rewritten from the old, Perl-based emerge-webrsync script
22525 +
22526 +#
22527 +# gpg key import
22528 +# KEY_ID=0x96D8BF6D
22529 +# gpg --homedir /etc/portage/gnupg --keyserver subkeys.pgp.net --recv-keys $KEY_ID
22530 +# gpg --homedir /etc/portage/gnupg --edit-key $KEY_ID trust
22531 +#
22532 +
22533 +argv0=$0
22534 +
22535 +# Only echo if not in verbose mode
22536 +nvecho() { [[ ${do_verbose} -eq 0 ]] && echo "$@" ; }
22537 +# warning echos
22538 +wecho() { echo "${argv0##*/}: warning: $*" 1>&2 ; }
22539 +# error echos
22540 +eecho() { echo "${argv0##*/}: error: $*" 1>&2 ; }
22541 +
22542 +
22543 +#-------------------
22544 +#initialization
22545 +#------------------
22546 +
22547 +# Use portageq from the same directory/prefix as the current script, so
22548 +# that we don't have to rely on PATH including the current EPREFIX.
22549 +scriptpath=${BASH_SOURCE[0]}
22550 +if [ -x "${scriptpath%/*}/portageq" ]; then
22551 + portageq=${scriptpath%/*}/portageq
22552 +elif type -P portageq > /dev/null ; then
22553 + portageq=portageq
22554 +else
22555 + eecho "could not find 'portageq'; aborting"
22556 + exit 1
22557 +fi
22558 +eval "$("${portageq}" envvar -v DISTDIR EPREFIX FEATURES \
22559 + FETCHCOMMAND GENTOO_MIRRORS \
22560 + PORTAGE_BIN_PATH PORTAGE_CONFIGROOT PORTAGE_GPG_DIR \
22561 + PORTAGE_NICENESS PORTAGE_REPOSITORIES PORTAGE_RSYNC_EXTRA_OPTS \
22562 + PORTAGE_RSYNC_OPTS PORTAGE_TMPDIR \
22563 + USERLAND http_proxy ftp_proxy)"
22564 +export http_proxy ftp_proxy
22565 +
22566 +source "${PORTAGE_BIN_PATH}"/isolated-functions.sh || exit
22567 +
22568 +repo_name=gentoo
22569 +repo_location=$(__repo_attr "${repo_name}" location)
22570 +if [[ -z ${repo_location} ]]; then
22571 + eecho "Repository '${repo_name}' not found"
22572 + exit 1
22573 +fi
22574 +
22575 +if [ -z "$NICENESS_PULLED" ]; then
22576 + if [ -n "${PORTAGE_NICENESS}" ]; then
22577 + export NICENESS_PULLED=asdf
22578 + exec nice -n "${PORTAGE_NICENESS}" "$0" "$@"
22579 + echo "failed setting PORTAGE_NICENESS to '$PORTAGE_NICENESS', disabling"
22580 + fi
22581 +fi
22582 +
22583 +STATE_DIR="${EPREFIX}/var/delta-webrsync/"
22584 +
22585 +# hack. bug 92224
22586 +if [ "${FETCHCOMMAND/getdelta.sh}" != "${FETCHCOMMAND}" ]; then
22587 + # evil evil evil evil
22588 + eval "$(grep "^FETCHCOMMAND=" "${EPREFIX}/usr/share/portage/config/make.globals")"
22589 +fi
22590 +
22591 +unset f
22592 +unset IFS
22593 +
22594 +do_verbose=0
22595 +MUST_SYNC='1'
22596 +unset PUKE_HELP
22597 +for x in $*; do
22598 + case "${x}" in
22599 + -q|--quiet)
22600 + PORTAGE_QUIET=1
22601 + continue
22602 + ;;
22603 + esac
22604 + if [[ $x == "-u" ]]; then
22605 + MUST_SYNC=''
22606 + elif [[ $x == "-k" ]]; then
22607 + KEEP_OLDIES='asdf'
22608 + elif [[ $x == "-h" ]]; then
22609 + PUKE_HELP=1
22610 + elif [[ $x == "-v" ]]; then
22611 + do_verbose=1
22612 + else
22613 + PUKE_HELP=1
22614 + echo "$x isn't a valid arg. bailing."
22615 + fi
22616 + if [[ -n $PUKE_HELP ]]; then
22617 + echo "-u for upgrade; sync only if new snapshots are found"
22618 + echo "-k for keep; keep old tree snapshots around"
22619 + exit -1
22620 + fi
22621 +done
22622 +
22623 +if [[ ! -d $STATE_DIR ]]; then
22624 + echo "$STATE_DIR doesn't exist. don't have the ability to compensate for compressor differences without it!"
22625 + exit -2
22626 +fi
22627 +
22628 +if has webrsync-gpg ${FEATURES} ; then
22629 + WEBSYNC_VERIFY_SIGNATURE=1
22630 +else
22631 + WEBSYNC_VERIFY_SIGNATURE=0
22632 +fi
22633 +if [ ${WEBSYNC_VERIFY_SIGNATURE} != 0 -a -z "${PORTAGE_GPG_DIR}" ]; then
22634 + eecho "please set PORTAGE_GPG_DIR in make.conf"
22635 + exit 1
22636 +fi
22637 +
22638 +[[ -d ${repo_location} ]] || mkdir -p "${repo_location}"
22639 +if [[ ! -w ${repo_location} ]] ; then
22640 + eecho "Repository '${repo_name}' is not writable: ${repo_location}"
22641 + exit 1
22642 +fi
22643 +
22644 +[[ -d ${DISTDIR} ]] || mkdir -p "${DISTDIR}"
22645 +if [[ ! -w ${DISTDIR} ]] ; then
22646 + eecho "DISTDIR is not writable: ${DISTDIR}"
22647 + exit 1
22648 +fi
22649 +
22650 +[[ -d ${PORTAGE_TMPDIR}/portage ]] || mkdir -p "${PORTAGE_TMPDIR}/portage"
22651 +TMPDIR=$(mktemp -d "${PORTAGE_TMPDIR}/portage/delta-webrsync-XXXXXX")
22652 +if [[ ! -w ${TMPDIR} ]] ; then
22653 + eecho "TMPDIR is not writable: ${TMPDIR}"
22654 + exit 1
22655 +fi
22656 +
22657 +cd "$DISTDIR"
22658 +
22659 +found=0
22660 +
22661 +if type -p md5sum > /dev/null; then
22662 + md5_com='md5sum -c "${MD5_LOC}" &> /dev/null'
22663 +elif type -p md5 > /dev/null; then
22664 + md5_com='[ "$(md5 -q ${FILE})" == "$(cut -d \ -f 1 ${MD5_LOC})" ]'
22665 +else
22666 + echo "warning, unable to do md5 verification of the snapshot!"
22667 + echo "no suitable md5/md5sum binary was found!"
22668 + md5_com='true'
22669 +fi
22670 +
22671 +#---------------
22672 +#funcs
22673 +#---------------
22674 +
22675 +cleanse_state_dir() {
22676 + [[ ${STATE_DIR:-/} != '/' ]] && rm -f "${STATE_DIR}"/* &> /dev/null
22677 +}
22678 +
22679 +do_tar() {
22680 + local file=$1; shift
22681 + local decompressor
22682 + case ${file} in
22683 + *.xz) decompressor="xzcat" ;;
22684 + *.bz2) decompressor="bzcat" ;;
22685 + *.gz) decompressor="zcat" ;;
22686 + *) decompressor="cat" ;;
22687 + esac
22688 + ${decompressor} "${file}" | tar "$@"
22689 + _pipestatus=${PIPESTATUS[*]}
22690 + [[ ${_pipestatus// /} -eq 0 ]]
22691 +}
22692 +
22693 +get_utc_date_in_seconds() {
22694 + date -u +"%s"
22695 +}
22696 +
22697 +get_date_part() {
22698 + local utc_time_in_secs="$1"
22699 + local part="$2"
22700 +
22701 + if [[ ${USERLAND} == BSD ]] ; then
22702 + date -r ${utc_time_in_secs} -u +"${part}"
22703 + else
22704 + date -d @${utc_time_in_secs} -u +"${part}"
22705 + fi
22706 +}
22707 +
22708 +get_utc_second_from_string() {
22709 + local s="$1"
22710 + if [[ ${USERLAND} == BSD ]] ; then
22711 + # Specify zeros for the least significant digits, or else those
22712 + # digits are inherited from the current system clock time.
22713 + date -juf "%Y%m%d%H%M.%S" "${s}0000.00" +"%s"
22714 + else
22715 + date -d "${s:0:4}-${s:4:2}-${s:6:2}" -u +"%s"
22716 + fi
22717 +}
22718 +
22719 +get_portage_timestamp() {
22720 + local portage_current_timestamp=0
22721 +
22722 + if [ -f "${repo_location}/metadata/timestamp.x" ]; then
22723 + portage_current_timestamp=$(cut -f 1 -d " " "${repo_location}/metadata/timestamp.x" )
22724 + fi
22725 +
22726 + echo "${portage_current_timestamp}"
22727 +}
22728 +
22729 +increment_date() {
22730 + local s="$1" inc="$2"
22731 + if [[ ${USERLAND} == BSD ]] ; then
22732 + # Specify zeros for the least significant digits, or else those
22733 + # digits are inherited from the current system clock time.
22734 + date -v${inc}d -juf "%Y%m%d%H%M.%S" "${s}0000.00" +"%Y%m%d"
22735 + else
22736 + date -d "${s:0:4}-${s:4:2}-${s:6:2} ${inc} day" -u +"%Y%m%d"
22737 + fi
22738 +}
22739 +
22740 +
22741 +fetch_file() {
22742 + local URI="$1"
22743 + local FILE="$2"
22744 + local opts
22745 +
22746 + if [ "${FETCHCOMMAND/wget/}" != "${FETCHCOMMAND}" ]; then
22747 + opts="--continue $(nvecho -q)"
22748 + elif [ "${FETCHCOMMAND/curl/}" != "${FETCHCOMMAND}" ]; then
22749 + opts="--continue-at - $(nvecho -s -f)"
22750 + else
22751 + rm -f "${DISTDIR}/${FILE}"
22752 + fi
22753 +
22754 + __vecho "Fetching file ${FILE} ..."
22755 + # already set DISTDIR=
22756 + eval "${FETCHCOMMAND} ${opts}"
22757 + if [[ $? -eq 0 && -s ${DISTDIR}/${FILE} ]] ; then
22758 + return 0
22759 + else
22760 + rm -f "${DISTDIR}/${FILE}"
22761 + return 1
22762 + fi
22763 +}
22764 +
22765 +check_file_digest() {
22766 + local digest="$1"
22767 + local file="$2"
22768 + local r=1
22769 +
22770 + __vecho "Checking digest ..."
22771 +
22772 + if type -P md5sum > /dev/null; then
22773 + local md5sum_output=$(md5sum "${file}")
22774 + local digest_content=$(< "${digest}")
22775 + [ "${md5sum_output%%[[:space:]]*}" = "${digest_content%%[[:space:]]*}" ] && r=0
22776 + elif type -P md5 > /dev/null; then
22777 + [ "$(md5 -q "${file}")" == "$(cut -d ' ' -f 1 "${digest}")" ] && r=0
22778 + else
22779 + eecho "cannot check digest: no suitable md5/md5sum binaries found"
22780 + fi
22781 +
22782 + return "${r}"
22783 +}
22784 +
22785 +check_file_signature() {
22786 + local signature="$1"
22787 + local file="$2"
22788 + local r=1
22789 +
22790 + if [[ ${WEBSYNC_VERIFY_SIGNATURE} != 0 ]] ; then
22791 +
22792 + __vecho "Checking signature ..."
22793 +
22794 + if type -P gpg > /dev/null; then
22795 + gpg --homedir "${PORTAGE_GPG_DIR}" --verify "$signature" "$file" && r=0
22796 + else
22797 + eecho "cannot check signature: gpg binary not found"
22798 + exit 1
22799 + fi
22800 + else
22801 + r=0
22802 + fi
22803 +
22804 + return "${r}"
22805 +}
22806 +
22807 +get_snapshot_timestamp() {
22808 + local file="$1"
22809 +
22810 + do_tar "${file}" --to-stdout -xf - portage/metadata/timestamp.x | cut -f 1 -d " "
22811 +}
22812 +
22813 +sync_local() {
22814 + local file="$1"
22815 +
22816 + __vecho "Syncing local tree ..."
22817 +
22818 + local ownership="portage:portage"
22819 + if has usersync ${FEATURES} ; then
22820 + case "${USERLAND}" in
22821 + BSD)
22822 + ownership=$(stat -f '%Su:%Sg' "${repo_location}")
22823 + ;;
22824 + *)
22825 + ownership=$(stat -c '%U:%G' "${repo_location}")
22826 + ;;
22827 + esac
22828 + fi
22829 +
22830 + if type -P tarsync > /dev/null ; then
22831 + local chown_opts="-o ${ownership%:*} -g ${ownership#*:}"
22832 + chown ${ownership} "${repo_location}" > /dev/null 2>&1 || chown_opts=""
22833 + if ! tarsync $(__vecho -v) -s 1 ${chown_opts} \
22834 + -e /distfiles -e /packages -e /local "${file}" "${repo_location}"; then
22835 + eecho "tarsync failed; tarball is corrupt? (${file})"
22836 + return 1
22837 + fi
22838 + else
22839 + if ! do_tar "${file}" xf - -C "${TMPDIR}" ; then
22840 + eecho "tar failed to extract the image. tarball is corrupt? (${file})"
22841 + rm -fr "${TMPDIR}"/portage
22842 + return 1
22843 + fi
22844 +
22845 + local rsync_opts="${PORTAGE_RSYNC_OPTS} ${PORTAGE_RSYNC_EXTRA_OPTS}"
22846 + if chown ${ownership} "${TMPDIR}"/portage > /dev/null 2>&1; then
22847 + chown -R ${ownership} "${TMPDIR}"/portage
22848 + rsync_opts+=" --owner --group"
22849 + fi
22850 + cd "${TMPDIR}"/portage
22851 + rsync ${rsync_opts} . "${repo_location%%/}"
22852 + cd "${DISTDIR}"
22853 +
22854 + __vecho "Cleaning up ..."
22855 + rm -fr "${TMPDIR}"
22856 + fi
22857 +
22858 + if has metadata-transfer ${FEATURES} ; then
22859 + __vecho "Updating cache ..."
22860 + "${PORTAGE_BIN_PATH}/emerge" --metadata
22861 + fi
22862 + local post_sync=${PORTAGE_CONFIGROOT}etc/portage/bin/post_sync
22863 + [ -x "${post_sync}" ] && "${post_sync}"
22864 + # --quiet suppresses output if there are no relevant news items
22865 + has news ${FEATURES} && "${PORTAGE_BIN_PATH}/emerge" --check-news --quiet
22866 + return 0
22867 +}
22868 +
22869 +do_snapshot() {
22870 + local ignore_timestamp="$1"
22871 + local date="$2"
22872 +
22873 + local r=1
22874 +
22875 + local base_file="portage-${date}.tar"
22876 +
22877 + local have_files=0
22878 + local mirror
22879 +
22880 + local compressions=""
22881 + type -P bzcat > /dev/null && compressions="${compressions} bz2"
22882 +
22883 + if [[ -z ${compressions} ]] ; then
22884 + eecho "unable to locate any decompressors (xzcat or bzcat or zcat)"
22885 + exit 1
22886 + fi
22887 +
22888 + for mirror in ${GENTOO_MIRRORS} ; do
22889 +
22890 + mirror=${mirror%/}
22891 + __vecho "Trying to retrieve ${date} snapshot from ${mirror} ..."
22892 +
22893 + for compression in ${compressions} ; do
22894 + local file="portage-${date}.tar.${compression}"
22895 + local digest="${file}.md5sum"
22896 + local signature="${file}.gpgsig"
22897 +
22898 + if [ -s "${DISTDIR}/${file}" -a -s "${DISTDIR}/${digest}" -a -s "${DISTDIR}/${signature}" ] ; then
22899 + check_file_digest "${DISTDIR}/${digest}" "${DISTDIR}/${file}" && \
22900 + check_file_signature "${DISTDIR}/${signature}" "${DISTDIR}/${file}" && \
22901 + have_files=1
22902 + fi
22903 +
22904 + if [ ${have_files} -eq 0 ] ; then
22905 + fetch_file "${mirror}/snapshots/${digest}" "${digest}" && \
22906 + fetch_file "${mirror}/snapshots/${signature}" "${signature}" && \
22907 + fetch_file "${mirror}/snapshots/${file}" "${file}" && \
22908 + check_file_digest "${DISTDIR}/${digest}" "${DISTDIR}/${file}" && \
22909 + check_file_signature "${DISTDIR}/${signature}" "${DISTDIR}/${file}" && \
22910 + have_files=1
22911 + fi
22912 +
22913 + #
22914 + # If timestamp is invalid
22915 + # we want to try and retrieve
22916 + # from a different mirror
22917 + #
22918 + if [ ${have_files} -eq 1 ]; then
22919 +
22920 + __vecho "Getting snapshot timestamp ..."
22921 + local snapshot_timestamp=$(get_snapshot_timestamp "${DISTDIR}/${file}")
22922 +
22923 + if [ ${ignore_timestamp} == 0 ]; then
22924 + if [ ${snapshot_timestamp} -lt $(get_portage_timestamp) ]; then
22925 + wecho "portage is newer than snapshot"
22926 + have_files=0
22927 + fi
22928 + else
22929 + local utc_seconds=$(get_utc_second_from_string "${date}")
22930 +
22931 + #
22932 + # Check that this snapshot
22933 + # is what it claims to be ...
22934 + #
22935 + if [ ${snapshot_timestamp} -lt ${utc_seconds} ] || \
22936 + [ ${snapshot_timestamp} -gt $((${utc_seconds}+ 2*86400)) ]; then
22937 +
22938 + wecho "snapshot timestamp is not in acceptable period"
22939 + have_files=0
22940 + fi
22941 + fi
22942 + fi
22943 +
22944 + if [ ${have_files} -eq 1 ]; then
22945 + break
22946 + else
22947 + #
22948 + # Remove files and use a different mirror
22949 + #
22950 + rm -f "${DISTDIR}/${file}" "${DISTDIR}/${digest}" "${DISTDIR}/${signature}"
22951 + fi
22952 + done
22953 +
22954 + [ ${have_files} -eq 1 ] && break
22955 + done
22956 +
22957 + if [ ${have_files} -eq 1 ]; then
22958 + sync_local "${DISTDIR}/${file}" && r=0
22959 + else
22960 + __vecho "${date} snapshot was not found"
22961 + fi
22962 +
22963 + return "${r}"
22964 +}
22965 +
22966 +do_latest_snapshot() {
22967 + local attempts=0
22968 + local r=1
22969 +
22970 + __vecho "Fetching most recent snapshot ..."
22971 +
22972 + # The snapshot for a given day is generated at 00:45 UTC on the following
22973 + # day, so the current day's snapshot (going by UTC time) hasn't been
22974 + # generated yet. Therefore, always start by looking for the previous day's
22975 + # snapshot (for attempts=1, subtract 1 day from the current UTC time).
22976 +
22977 + # Timestamps that differ by less than 2 hours
22978 + # are considered to be approximately equal.
22979 + local min_time_diff=$(( 2 * 60 * 60 ))
22980 +
22981 + local existing_timestamp=$(get_portage_timestamp)
22982 + local timestamp_difference
22983 + local timestamp_problem
22984 + local approx_snapshot_time
22985 + local start_time=$(get_utc_date_in_seconds)
22986 + local start_hour=$(get_date_part ${start_time} "%H")
22987 +
22988 + # Daily snapshots are created at 00:45 and are not
22989 + # available until after 01:00. Don't waste time trying
22990 + # to fetch a snapshot before it's been created.
22991 + if [ ${start_hour} -lt 1 ] ; then
22992 + (( start_time -= 86400 ))
22993 + fi
22994 + local snapshot_date=$(get_date_part ${start_time} "%Y%m%d")
22995 + local snapshot_date_seconds=$(get_utc_second_from_string ${snapshot_date})
22996 +
22997 + while (( ${attempts} < 40 )) ; do
22998 + (( attempts++ ))
22999 + (( snapshot_date_seconds -= 86400 ))
23000 + # snapshots are created at 00:45
23001 + (( approx_snapshot_time = snapshot_date_seconds + 86400 + 2700 ))
23002 + (( timestamp_difference = existing_timestamp - approx_snapshot_time ))
23003 + [ ${timestamp_difference} -lt 0 ] && (( timestamp_difference = -1 * timestamp_difference ))
23004 + snapshot_date=$(get_date_part ${snapshot_date_seconds} "%Y%m%d")
23005 +
23006 + timestamp_problem=""
23007 + if [ ${timestamp_difference} -eq 0 ]; then
23008 + timestamp_problem="is identical to"
23009 + elif [ ${timestamp_difference} -lt ${min_time_diff} ]; then
23010 + timestamp_problem="is possibly identical to"
23011 + elif [ ${approx_snapshot_time} -lt ${existing_timestamp} ] ; then
23012 + timestamp_problem="is newer than"
23013 + fi
23014 +
23015 + if [ -n "${timestamp_problem}" ]; then
23016 + ewarn "Latest snapshot date: ${snapshot_date}"
23017 + ewarn
23018 + ewarn "Approximate snapshot timestamp: ${approx_snapshot_time}"
23019 + ewarn " Current local timestamp: ${existing_timestamp}"
23020 + ewarn
23021 + echo -e "The current local timestamp" \
23022 + "${timestamp_problem} the" \
23023 + "timestamp of the latest" \
23024 + "snapshot. In order to force sync," \
23025 + "use the --revert option or remove" \
23026 + "the timestamp file located at" \
23027 + "'${repo_location}/metadata/timestamp.x'." | fmt -w 70 | \
23028 + while read -r line ; do
23029 + ewarn "${line}"
23030 + done
23031 + r=0
23032 + break
23033 + fi
23034 +
23035 + if do_snapshot 0 "${snapshot_date}"; then
23036 + r=0
23037 + break;
23038 + fi
23039 + done
23040 +
23041 + return "${r}"
23042 +}
23043 +
23044 +fetch_from_mirrors() {
23045 + local i URI FILE MIRRORS
23046 + if [[ "$#" == 3 ]]; then
23047 + MIRRORS="${3}"
23048 + else
23049 + MIRRORS=$GENTOO_MIRRORS
23050 + fi
23051 + FILE="$2"
23052 + for i in $MIRRORS ; do
23053 + URI="${i%/}/${1#/}"
23054 + fetch_file "${URI}" "${FILE}" && return 0
23055 + done
23056 + return 1
23057 +}
23058 +
23059 +verify_md5_file() {
23060 + local FILE MD5_LOC
23061 + FILE="$1"
23062 + if [[ $# == 2 ]]; then
23063 + MD5_LOC="$2"
23064 + else
23065 + MD5_LOC="$(pwd)/$1.md5sum"
23066 + fi
23067 + check_file_digest "${MD5_LOC}" "${FILE}"
23068 +}
23069 +
23070 +#--------------------
23071 +#inline actual script
23072 +#--------------------
23073 +
23074 +if ! type -p patcher &> /dev/null; then
23075 + echo "!!!"
23076 + echo "!!! cannot find patcher, did you emerge dev-util/diffball?"
23077 + echo "!!! lack of patcher == have to do full fetch"
23078 + echo "!!!"
23079 + sleep 10
23080 + if do_latest_snapshot; then
23081 + rm -fr "${TMPDIR}"
23082 + cleanse_state_dir
23083 + exit 0
23084 + fi
23085 + exit 1
23086 +fi
23087 +
23088 +echo "Looking for available base versions for a delta"
23089 +
23090 +#note we're already in distdir
23091 +
23092 +unset base_version
23093 +# portage-snapshots in reverse order.
23094 +# icky.
23095 +unset dfile
23096 +potentials="$(ls -1 portage-2[[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]].tar.bz2 ${STATE_DIR}/portage-2[[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]][[:digit:]].tar.bz2 2> /dev/null | sed -e 's:^.*/::' | sort -r)"
23097 +for basef in ${potentials}; do
23098 + chksum=''
23099 + found="dar"
23100 + if [ -e "${STATE_DIR}/${basef}.md5sum" ]; then
23101 + chksum="${STATE_DIR}/${basef}.md5sum"
23102 + elif [ -e "${basef}.md5sum" ]; then
23103 + chksum="${DISTDIR}/${basef}.md5sum"
23104 + else
23105 + echo "attempting to get md5sum for $basef"
23106 + if ! fetch_from_mirrors "/snapshots/${basef}.md5sum" "${basef}.md5sum"; then
23107 + echo "can't get md5 for ${basef}"
23108 + continue
23109 + fi
23110 + chksum="${basef}.md5sum"
23111 + fi
23112 + if [ -e "${basef}" ]; then
23113 + dfile="${DISTDIR}/${basef}"
23114 + else
23115 + dfile="${STATE_DIR}/${basef}"
23116 + fi
23117 + if ! verify_md5_file "${dfile}" "${chksum}"; then
23118 + echo "found a stale snapshot. cleansing"
23119 + rm -f "${dfile}" &> /dev/null
23120 + rm -f "${chksum}.md5sum" &> /dev/null
23121 + dar=""
23122 + else
23123 + base_version="${basef}"
23124 + break
23125 + fi
23126 +done
23127 +
23128 +#by this point, we either have a base_version, or we don't.
23129 +if [[ -z ${base_version} ]]; then
23130 + echo "no base found. resorting to pulling a full version"
23131 + if do_latest_snapshot; then
23132 + rm -fr "${TMPDIR}"
23133 + cleanse_state_dir
23134 + exit 0
23135 + fi
23136 + exit 1
23137 +fi
23138 +
23139 +#we have a md5 verified base. now we get the patch.
23140 +
23141 +base_date="${base_version%.tar.bz2}"
23142 +base_date="${base_date#portage-}"
23143 +# we now have yyyymmdd
23144 +
23145 +patches=''
23146 +echo "fetching patches"
23147 +fetched='asdf'
23148 +while [[ -n ${fetched} ]]; do
23149 + next_day=$(increment_date ${base_date} +1)
23150 + # if we can't get a *single* patch or md5, even one missing, do full.
23151 + p="snapshot-${base_date}-${next_day}.patch.bz2"
23152 + if [[ ! -e ${p}.md5sum ]] && ! fetch_from_mirrors "/snapshots/deltas/${p}.md5sum" "${p}.md5sum"; then
23153 + echo "failed fetching ${p}.md5sum"
23154 + fetched=''
23155 + break
23156 + fi
23157 + fetch="yes"
23158 + if [[ -e ${p} ]]; then
23159 + if ! verify_md5_file "${p}"; then
23160 + rm -f "${p}" &> /dev/null
23161 + else
23162 + fetch=""
23163 + fi
23164 + fi
23165 + if [[ -n $fetch ]]; then
23166 + if ! fetch_from_mirrors "/snapshots/deltas/${p}" "${p}"; then
23167 + echo "failed fetching ${p}"
23168 + fetched=''
23169 + fi
23170 + fi
23171 + if [[ -z ${fetched} ]]; then
23172 + break
23173 + fi
23174 + if ! verify_md5_file "${p}"; then
23175 + echo "md5 failed on ${p}"
23176 + fetched=''
23177 + break
23178 + fi
23179 + patches="${patches} ${p}"
23180 + base_date="${next_day}"
23181 +done
23182 +final_date=${base_date}
23183 +
23184 +if [[ -z $patches ]]; then
23185 + echo "no patches found? up to date?"
23186 + if [[ -n $MUST_SYNC ]]; then
23187 + echo "syncing with existing file"
23188 + if [[ ${WEBSYNC_VERIFY_SIGNATURE} == 1 &&
23189 + ! -e ${DISTDIR}/portage-${base_date}.tar.bz2.gpgsig ]] && \
23190 + ! fetch_from_mirrors "/snapshots/portage-${base_date}.tar.bz2.gpgsig" "portage-${base_date}.tar.bz2.gpgsig" ; then
23191 + eecho "Couldn't fetch portage-${base_date}.tar.bz2.gpgsig"
23192 + exit 5
23193 + fi
23194 + if [[ ${WEBSYNC_VERIFY_SIGNATURE} == 1 ]] ; then
23195 + check_file_signature "${DISTDIR}/portage-${base_date}.tar.bz2.gpgsig" "${dfile}" || exit 1
23196 + fi
23197 + sync_local "${dfile}" && rm -fr "${TMPDIR}"
23198 + else
23199 + rm -fr "${TMPDIR}"
23200 + fi
23201 + exit $?
23202 +fi
23203 +
23204 +unset got_umd5
23205 +#grab the md5 for later usage.
23206 +if [[ ! -e portage-${final_date}.tar.bz2.md5sum ]] && ! fetch_from_mirrors "/snapshots/portage-${final_date}.tar.bz2.md5sum" "portage-${final_date}.tar.bz2.md5sum"; then
23207 + echo "warning... couldn't grab the md5sum for ${final_date}. which is odd"
23208 + echo "thus, bailing (sorry)"
23209 + exit 5
23210 +else
23211 + if [[ ! -e portage-${final_date}.tar.bz2.umd5sum ]] && ! fetch_from_mirrors "/snapshots/portage-${final_date}.tar.bz2.umd5sum" "portage-${final_date}.tar.bz2.umd5sum"; then
23212 + if ! fetch_from_mirrors "/snapshots/portage-${final_date}.tar.bz2.umd5sum" "portage-${final_date}.tar.bz2.umd5sum"; then
23213 + echo "couldn't grab umd5sum (uncompressed md5sum) for ${final_date}."
23214 + echo "can't compensate for bzip2 version differences iow."
23215 + else
23216 + got_umd5=1
23217 + fi
23218 + else
23219 + got_umd5=1
23220 + fi
23221 +fi
23222 +
23223 +if [[ ${WEBSYNC_VERIFY_SIGNATURE} == 1 && ! -e portage-${final_date}.tar.bz2.gpgsig ]] && \
23224 + ! fetch_from_mirrors "/snapshots/portage-${final_date}.tar.bz2.gpgsig" "portage-${final_date}.tar.bz2.gpgsig" ; then
23225 + echo "warning... couldn't grab the gpgsig for ${final_date}. which is odd"
23226 + echo "thus, bailing (sorry)"
23227 + exit 5
23228 +fi
23229 +
23230 +# got our patches.
23231 +if ! patcher -v "${dfile}" ${patches} "${TMPDIR}/portage-${final_date}.tar"; then
23232 + echo "reconstruction failed (contact the author with the error from the reconstructor please)"
23233 + rm -f "${TMPDIR}/portage-${final_date}.tar"
23234 + if do_latest_snapshot; then
23235 + rm -fr "${TMPDIR}"
23236 + cleanse_state_dir
23237 + exit 0
23238 + fi
23239 + exit 1
23240 +fi
23241 +verified=0
23242 +if [[ -n $got_umd5 ]]; then
23243 + echo "verifying uncompressed md5"
23244 + if ! verify_md5_file "${TMPDIR}/portage-${final_date}.tar" "${DISTDIR}/portage-${final_date}.tar.bz2.umd5sum"; then
23245 + echo "uncompressed verification failed. This means either you found a bug in diffball, or something odd is going on"
23246 + echo "with upstream patch generation"
23247 + echo "trying md5sum next, which probably will fail."
23248 + else
23249 + verified="1"
23250 + fi
23251 +fi
23252 +
23253 +unset need_last_sync
23254 +if [ "$verified" == "1" ]; then
23255 + need_last_sync="dar"
23256 + if [[ ${WEBSYNC_VERIFY_SIGNATURE} == 1 ]] ; then
23257 + # BUG: Signature verification will fail if the local bzip2
23258 + # program does not produce output that is perfectly identical
23259 + # to the bzip2 program used to compress the signed tar file.
23260 + echo "recompressing ..."
23261 + bzip2 -vk9 "${TMPDIR}/portage-${final_date}.tar"
23262 + check_file_signature "${DISTDIR}/portage-${final_date}.tar.bz2.gpgsig" "${TMPDIR}/portage-${final_date}.tar.bz2" || exit 1
23263 + else
23264 + echo "recompressing. (backgrounding)"
23265 + bzip2 -vk9 "${TMPDIR}/portage-${final_date}.tar" &
23266 + fi
23267 +
23268 + echo "beginning update to the tree"
23269 + sync_local "${TMPDIR}/portage-${final_date}.tar"
23270 + echo "doing final md5 stuff"
23271 + wait
23272 + # bzip2 is finished now.
23273 + rm -f "${TMPDIR}/portage-${final_date}.tar"
23274 +else
23275 + echo "recompressing."
23276 + bzip2 -v9 "${TMPDIR}/portage-${final_date}.tar.bz2"
23277 +fi
23278 +
23279 +echo "verifying generated tarball"
23280 +
23281 +if ! verify_md5_file "${TMPDIR}/portage-${final_date}.tar.bz2" "${DISTDIR}/portage-${final_date}.tar.bz2.md5sum"; then
23282 + if [[ -z $verified ]]; then
23283 + echo "couldn't verify the generated tarball. bug, most likely."
23284 + exit 5
23285 + fi
23286 + # hokay. md5 doesn't agree with umd5. bzip2 issue in effect.
23287 + echo "compressed md5 differs, but uncompressed md5 says it right. bzip2 version incompatability in other words"
23288 + echo "saving the md5"
23289 + if type -p md5sum &> /dev/null; then
23290 + md5sum "${TMPDIR}/portage-${final_date}.tar.bz2" | sed -e "s:${TMPDIR}/\?::" > \
23291 + "${STATE_DIR}/portage-${final_date}.tar.bz2.md5sum"
23292 + elif type -p md5 &> /dev/null; then
23293 + echo "$(md5 -q "${TMPDIR}/portage-${final_date}.tar.bz2") portage-${final_date}.tar.bz2" > \
23294 + "${STATE_DIR}/portage-${final_date}.tar.bz2.md5sum"
23295 + else
23296 + echo "couldn't find either md5 or md5sum. something is screwed... (bailing, sorry)"
23297 + exit 7
23298 + fi
23299 + mv "${DISTDIR}/portage-${final_date}.tar.bz2.umd5sum" "${TMPDIR}/portage-${final_date}.tar.bz2" "${STATE_DIR}/"
23300 + dfile="${STATE_DIR}/portage-${final_date}.tar.bz2"
23301 +else
23302 + dfile="${DISTDIR}/portage-${final_date}.tar.bz2"
23303 + mv "${TMPDIR}/portage-${final_date}.tar.bz2" "${DISTDIR}/"
23304 +fi
23305 +
23306 +if [ -z "${need_last_sync}" ]; then
23307 + if [[ ${WEBSYNC_VERIFY_SIGNATURE} == 1 ]] ; then
23308 + check_file_signature "${DISTDIR}/portage-${final_date}.tar.bz2.gpgsig" "${dfile}" || exit 1
23309 + fi
23310 + echo "beginning update to the tree"
23311 + sync_local "${dfile}"
23312 +fi
23313 +
23314 +for x in ${patches} ; do
23315 + rm -f "${DISTDIR}/${x}"{,.md5sum}
23316 +done
23317 +
23318 +if [[ -z $KEEP_OLDIES ]]; then
23319 + echo "cleansing"
23320 + for x in $potentials; do
23321 + echo "removing ${x}"
23322 + rm -f "${DISTDIR}/${x}"{,.md5sum,.umd5sum,.gpgsig} &> /dev/null
23323 + rm -f "${STATE_DIR}/${x}"{,.md5sum,.umd5sum} &> /dev/null
23324 + done
23325 +fi
23326 +rm -rf "${TMPDIR}"
23327 +echo "done."
23328 +
23329
23330 diff --git a/mkrelease.sh b/mkrelease.sh
23331 index 87bb4bf5..f9f7564 100755
23332 --- a/mkrelease.sh
23333 +++ b/mkrelease.sh
23334 @@ -1,4 +1,6 @@
23335 #!/bin/bash
23336 +# Copyright 2008-2014 Gentoo Foundation
23337 +# Distributed under the terms of the GNU General Public License v2
23338
23339 RELEASE_BUILDDIR=${RELEASE_BUILDDIR:-/var/tmp/portage-release}
23340 SOURCE_DIR=${RELEASE_BUILDDIR}/checkout
23341 @@ -6,21 +8,27 @@ BRANCH=${BRANCH:-master}
23342 USE_TAG=false
23343 CHANGELOG_REVISION=
23344 UPLOAD_LOCATION=
23345 +RUNTESTS=false
23346 +USER=
23347 +
23348 +usage() {
23349 + echo "Usage: ${0##*/} [--changelog-rev <tree-ish>] [-t|--tag] [-u|--upload <location>] [--user <username>] [--runtests] <version>"
23350 + exit ${1:-0}
23351 +}
23352
23353 die() {
23354 - echo $@
23355 - echo "Usage: ${0##*/} [--changelog-rev <tree-ish>] [-t|--tag] [-u|--upload <location>] <version>"
23356 - exit 1
23357 + printf 'error: %s\n' "$*"
23358 + usage 1
23359 }
23360
23361 -ARGS=$(getopt -o tu: --long changelog-rev:,tag,upload: \
23362 - -n ${0##*/} -- "$@")
23363 +ARGS=$(getopt -o htu: --long help,changelog-rev:,runtests,tag,upload:,user: \
23364 + -n "${0##*/}" -- "$@")
23365 [ $? != 0 ] && die "initialization error"
23366
23367 eval set -- "${ARGS}"
23368
23369 while true; do
23370 - case "$1" in
23371 + case $1 in
23372 --changelog-rev)
23373 CHANGELOG_REVISION=$2
23374 shift 2
23375 @@ -30,9 +38,20 @@ while true; do
23376 shift
23377 ;;
23378 -u|--upload)
23379 - UPLOAD_LOCATION=${2}
23380 + UPLOAD_LOCATION=$2
23381 shift 2
23382 ;;
23383 + --user)
23384 + USER=$2"@"
23385 + shift 2
23386 + ;;
23387 + -h|--help)
23388 + usage
23389 + ;;
23390 + --runtests)
23391 + RUNTESTS=true
23392 + shift
23393 + ;;
23394 --)
23395 shift
23396 break
23397 @@ -43,56 +62,62 @@ while true; do
23398 esac
23399 done
23400
23401 -[ -z "$1" ] && die "Need version argument"
23402 -[ -n "${1/[0-9]*}" ] && die "Invalid version argument"
23403 +[ $# != 1 ] && die "Need version argument"
23404 +[[ -n ${1/[0-9]*} ]] && die "Invalid version argument"
23405
23406 -VERSION=${1}
23407 +VERSION=$1
23408 RELEASE=portage-${VERSION}
23409 RELEASE_DIR=${RELEASE_BUILDDIR}/${RELEASE}
23410 RELEASE_TARBALL="${RELEASE_BUILDDIR}/${RELEASE}.tar.bz2"
23411 -TREE_ISH=$BRANCH
23412 -if [[ $USE_TAG = true ]] ; then
23413 - TREE_ISH=v$VERSION
23414 +TREE_ISH=${BRANCH}
23415 +if [[ ${USE_TAG} == "true" ]] ; then
23416 + TREE_ISH="v${VERSION}"
23417 fi
23418
23419 echo ">>> Cleaning working directories ${RELEASE_DIR} ${SOURCE_DIR}"
23420 rm -rf "${RELEASE_DIR}" "${SOURCE_DIR}" || die "directory cleanup failed"
23421 mkdir -p "${RELEASE_DIR}" || die "directory creation failed"
23422 -mkdir -p "$SOURCE_DIR" || die "mkdir failed"
23423 +mkdir -p "${SOURCE_DIR}" || die "mkdir failed"
23424
23425 echo ">>> Starting GIT archive"
23426 -git archive --format=tar $TREE_ISH | \
23427 - tar -xf - -C "$SOURCE_DIR" || die "git archive failed"
23428 +git archive --format=tar ${TREE_ISH} | \
23429 + tar -xf - -C "${SOURCE_DIR}" || die "git archive failed"
23430
23431 echo ">>> Building release tree"
23432 -cp -a "${SOURCE_DIR}/"{bin,cnf,doc,man,pym} "${RELEASE_DIR}/" || die "directory copy failed"
23433 -cp "${SOURCE_DIR}/"{DEVELOPING,LICENSE,Makefile,NEWS,RELEASE-NOTES,TEST-NOTES} \
23434 +cp -a "${SOURCE_DIR}/"{bin,cnf,doc,man,misc,pym} "${RELEASE_DIR}/" || die "directory copy failed"
23435 +cp "${SOURCE_DIR}/"{.portage_not_installed,DEVELOPING,LICENSE,Makefile,NEWS,README,RELEASE-NOTES,TEST-NOTES} \
23436 "${RELEASE_DIR}/" || die "file copy failed"
23437
23438 -rm -rf "$SOURCE_DIR" || die "directory cleanup failed"
23439 +if [[ ${RUNTESTS} == "true" ]] ; then
23440 + pushd "${SOURCE_DIR}" >/dev/null
23441 + ./runtests.sh --python-versions=supported || die "tests failed"
23442 + popd >/dev/null
23443 +fi
23444 +
23445 +rm -rf "${SOURCE_DIR}" || die "directory cleanup failed"
23446
23447 echo ">>> Setting portage.VERSION"
23448 -sed -e "s/^VERSION=.*/VERSION=\"${VERSION}\"/" \
23449 +sed -e "s/^VERSION = .*/VERSION = \"${VERSION}\"/" \
23450 -i "${RELEASE_DIR}/pym/portage/__init__.py" || \
23451 die "Failed to patch portage.VERSION"
23452
23453 echo ">>> Creating Changelog"
23454 git_log_opts=""
23455 -if [ -n "$CHANGELOG_REVISION" ] ; then
23456 - git_log_opts+=" $CHANGELOG_REVISION^..$TREE_ISH"
23457 +if [[ -n ${CHANGELOG_REVISION} ]] ; then
23458 + git_log_opts+=" ${CHANGELOG_REVISION}^..${TREE_ISH}"
23459 else
23460 - git_log_opts+=" $TREE_ISH"
23461 + git_log_opts+=" ${TREE_ISH}"
23462 fi
23463 skip_next=false
23464 -git log $git_log_opts | fmt -w 80 -p " " | while read -r ; do
23465 - if [[ $skip_next = true ]] ; then
23466 +git log ${git_log_opts} | fmt -w 80 -p " " | while read -r ; do
23467 + if [[ ${skip_next} == "true" ]] ; then
23468 skip_next=false
23469 - elif [[ $REPLY = " svn path="* ]] ; then
23470 + elif [[ ${REPLY} == " svn path="* ]] ; then
23471 skip_next=true
23472 else
23473 - echo "$REPLY"
23474 + echo "${REPLY}"
23475 fi
23476 -done > "$RELEASE_DIR/ChangeLog" || die "ChangeLog creation failed"
23477 +done > "${RELEASE_DIR}/ChangeLog" || die "ChangeLog creation failed"
23478
23479 cd "${RELEASE_BUILDDIR}"
23480
23481 @@ -101,16 +126,16 @@ tar --owner portage --group portage -cjf "${RELEASE_TARBALL}" "${RELEASE}" || \
23482 die "tarball creation failed"
23483
23484 DISTDIR=$(portageq distdir)
23485 -if [ -n "${DISTDIR}" -a -d "${DISTDIR}" -a -w "${DISTDIR}" ]; then
23486 +if [[ -n ${DISTDIR} && -d ${DISTDIR} && -w ${DISTDIR} ]] ; then
23487 echo ">>> Copying release tarball into ${DISTDIR}"
23488 cp "${RELEASE_TARBALL}" "${DISTDIR}"/ || echo "!!! tarball copy failed"
23489 fi
23490
23491 -if [ -n "${UPLOAD_LOCATION}" ]; then
23492 - echo ">>> Uploading ${RELEASE_TARBALL} to ${UPLOAD_LOCATION}"
23493 - scp "${RELEASE_TARBALL}" "dev.gentoo.org:${UPLOAD_LOCATION}" || die "upload failed"
23494 +if [[ -n ${UPLOAD_LOCATION} ]] ; then
23495 + echo ">>> Uploading ${RELEASE_TARBALL} to ${USER}dev.gentoo.org:${UPLOAD_LOCATION}"
23496 + scp "${RELEASE_TARBALL}" "${USER}dev.gentoo.org:${UPLOAD_LOCATION}" || die "upload failed"
23497 else
23498 - echo "${RELEASE_TARBALL} created"
23499 + du -h "${RELEASE_TARBALL}"
23500 fi
23501
23502 exit 0
23503
23504 diff --git a/pym/_emerge/AbstractDepPriority.py b/pym/_emerge/AbstractDepPriority.py
23505 index 94f26ef..1fcd043 100644
23506 --- a/pym/_emerge/AbstractDepPriority.py
23507 +++ b/pym/_emerge/AbstractDepPriority.py
23508 @@ -1,11 +1,12 @@
23509 -# Copyright 1999-2012 Gentoo Foundation
23510 +# Copyright 1999-2013 Gentoo Foundation
23511 # Distributed under the terms of the GNU General Public License v2
23512
23513 import copy
23514 from portage.util.SlotObject import SlotObject
23515
23516 class AbstractDepPriority(SlotObject):
23517 - __slots__ = ("buildtime", "runtime", "runtime_post")
23518 + __slots__ = ("buildtime", "buildtime_slot_op",
23519 + "runtime", "runtime_post", "runtime_slot_op")
23520
23521 def __lt__(self, other):
23522 return self.__int__() < other
23523
23524 diff --git a/pym/_emerge/AbstractEbuildProcess.py b/pym/_emerge/AbstractEbuildProcess.py
23525 index c7b8f83..31127f4 100644
23526 --- a/pym/_emerge/AbstractEbuildProcess.py
23527 +++ b/pym/_emerge/AbstractEbuildProcess.py
23528 @@ -1,8 +1,10 @@
23529 -# Copyright 1999-2011 Gentoo Foundation
23530 +# Copyright 1999-2012 Gentoo Foundation
23531 # Distributed under the terms of the GNU General Public License v2
23532
23533 import io
23534 +import platform
23535 import stat
23536 +import subprocess
23537 import textwrap
23538 from _emerge.SpawnProcess import SpawnProcess
23539 from _emerge.EbuildBuildDir import EbuildBuildDir
23540 @@ -20,8 +22,10 @@ class AbstractEbuildProcess(SpawnProcess):
23541
23542 __slots__ = ('phase', 'settings',) + \
23543 ('_build_dir', '_ipc_daemon', '_exit_command', '_exit_timeout_id')
23544 +
23545 _phases_without_builddir = ('clean', 'cleanrm', 'depend', 'help',)
23546 _phases_interactive_whitelist = ('config',)
23547 + _phases_without_cgroup = ('preinst', 'postinst', 'prerm', 'postrm', 'config')
23548
23549 # Number of milliseconds to allow natural exit of the ebuild
23550 # process after it has called the exit command via IPC. It
23551 @@ -52,13 +56,48 @@ class AbstractEbuildProcess(SpawnProcess):
23552 if need_builddir and \
23553 not os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
23554 msg = _("The ebuild phase '%s' has been aborted "
23555 - "since PORTAGE_BUILDIR does not exist: '%s'") % \
23556 + "since PORTAGE_BUILDDIR does not exist: '%s'") % \
23557 (self.phase, self.settings['PORTAGE_BUILDDIR'])
23558 self._eerror(textwrap.wrap(msg, 72))
23559 self._set_returncode((self.pid, 1 << 8))
23560 - self.wait()
23561 + self._async_wait()
23562 return
23563
23564 + # Check if the cgroup hierarchy is in place. If it's not, mount it.
23565 + if (os.geteuid() == 0 and platform.system() == 'Linux'
23566 + and 'cgroup' in self.settings.features
23567 + and self.phase not in self._phases_without_cgroup):
23568 + cgroup_root = '/sys/fs/cgroup'
23569 + cgroup_portage = os.path.join(cgroup_root, 'portage')
23570 + cgroup_path = os.path.join(cgroup_portage,
23571 + '%s:%s' % (self.settings["CATEGORY"],
23572 + self.settings["PF"]))
23573 + try:
23574 + # cgroup tmpfs
23575 + if not os.path.ismount(cgroup_root):
23576 + # we expect /sys/fs to be there already
23577 + if not os.path.isdir(cgroup_root):
23578 + os.mkdir(cgroup_root, 0o755)
23579 + subprocess.check_call(['mount', '-t', 'tmpfs',
23580 + '-o', 'rw,nosuid,nodev,noexec,mode=0755',
23581 + 'tmpfs', cgroup_root])
23582 +
23583 + # portage subsystem
23584 + if not os.path.ismount(cgroup_portage):
23585 + if not os.path.isdir(cgroup_portage):
23586 + os.mkdir(cgroup_portage, 0o755)
23587 + subprocess.check_call(['mount', '-t', 'cgroup',
23588 + '-o', 'rw,nosuid,nodev,noexec,none,name=portage',
23589 + 'tmpfs', cgroup_portage])
23590 +
23591 + # the ebuild cgroup
23592 + if not os.path.isdir(cgroup_path):
23593 + os.mkdir(cgroup_path)
23594 + except (subprocess.CalledProcessError, OSError):
23595 + pass
23596 + else:
23597 + self.cgroup = cgroup_path
23598 +
23599 if self.background:
23600 # Automatically prevent color codes from showing up in logs,
23601 # since we're not displaying to a terminal anyway.
23602 @@ -67,7 +106,7 @@ class AbstractEbuildProcess(SpawnProcess):
23603 if self._enable_ipc_daemon:
23604 self.settings.pop('PORTAGE_EBUILD_EXIT_FILE', None)
23605 if self.phase not in self._phases_without_builddir:
23606 - if 'PORTAGE_BUILDIR_LOCKED' not in self.settings:
23607 + if 'PORTAGE_BUILDDIR_LOCKED' not in self.settings:
23608 self._build_dir = EbuildBuildDir(
23609 scheduler=self.scheduler, settings=self.settings)
23610 self._build_dir.lock()
23611 @@ -143,9 +182,14 @@ class AbstractEbuildProcess(SpawnProcess):
23612 self._exit_command.reply_hook = self._exit_command_callback
23613 query_command = QueryCommand(self.settings, self.phase)
23614 commands = {
23615 - 'best_version' : query_command,
23616 - 'exit' : self._exit_command,
23617 - 'has_version' : query_command,
23618 + 'available_eclasses' : query_command,
23619 + 'best_version' : query_command,
23620 + 'eclass_path' : query_command,
23621 + 'exit' : self._exit_command,
23622 + 'has_version' : query_command,
23623 + 'license_path' : query_command,
23624 + 'master_repositories' : query_command,
23625 + 'repository_path' : query_command,
23626 }
23627 input_fifo, output_fifo = self._init_ipc_fifos()
23628 self._ipc_daemon = EbuildIpcDaemon(commands=commands,
23629
23630 diff --git a/pym/_emerge/AbstractPollTask.py b/pym/_emerge/AbstractPollTask.py
23631 index 2c84709..3f6dd6c 100644
23632 --- a/pym/_emerge/AbstractPollTask.py
23633 +++ b/pym/_emerge/AbstractPollTask.py
23634 @@ -151,4 +151,4 @@ class AbstractPollTask(AsynchronousTask):
23635 while self._registered and not timeout_cb.timed_out:
23636 self.scheduler.iteration()
23637 finally:
23638 - self.scheduler.unregister(timeout_cb.timeout_id)
23639 + self.scheduler.source_remove(timeout_cb.timeout_id)
23640
23641 diff --git a/pym/_emerge/AsynchronousLock.py b/pym/_emerge/AsynchronousLock.py
23642 index 587aa46..c0b9b26 100644
23643 --- a/pym/_emerge/AsynchronousLock.py
23644 +++ b/pym/_emerge/AsynchronousLock.py
23645 @@ -1,4 +1,4 @@
23646 -# Copyright 2010-2012 Gentoo Foundation
23647 +# Copyright 2010-2013 Gentoo Foundation
23648 # Distributed under the terms of the GNU General Public License v2
23649
23650 import dummy_threading
23651 @@ -49,7 +49,7 @@ class AsynchronousLock(AsynchronousTask):
23652 pass
23653 else:
23654 self.returncode = os.EX_OK
23655 - self.wait()
23656 + self._async_wait()
23657 return
23658
23659 if self._force_process or \
23660 @@ -105,44 +105,27 @@ class _LockThread(AbstractPollTask):
23661 """
23662
23663 __slots__ = ('path',) + \
23664 - ('_files', '_force_dummy', '_lock_obj',
23665 - '_thread', '_reg_id',)
23666 + ('_force_dummy', '_lock_obj', '_thread',)
23667
23668 def _start(self):
23669 - pr, pw = os.pipe()
23670 - self._files = {}
23671 - self._files['pipe_read'] = pr
23672 - self._files['pipe_write'] = pw
23673 - for f in self._files.values():
23674 - fcntl.fcntl(f, fcntl.F_SETFL,
23675 - fcntl.fcntl(f, fcntl.F_GETFL) | os.O_NONBLOCK)
23676 - self._reg_id = self.scheduler.register(self._files['pipe_read'],
23677 - self.scheduler.IO_IN, self._output_handler)
23678 self._registered = True
23679 threading_mod = threading
23680 if self._force_dummy:
23681 threading_mod = dummy_threading
23682 self._thread = threading_mod.Thread(target=self._run_lock)
23683 + self._thread.daemon = True
23684 self._thread.start()
23685
23686 def _run_lock(self):
23687 self._lock_obj = lockfile(self.path, wantnewlockfile=True)
23688 - os.write(self._files['pipe_write'], b'\0')
23689 -
23690 - def _output_handler(self, f, event):
23691 - buf = None
23692 - if event & self.scheduler.IO_IN:
23693 - try:
23694 - buf = os.read(self._files['pipe_read'], self._bufsize)
23695 - except OSError as e:
23696 - if e.errno not in (errno.EAGAIN,):
23697 - raise
23698 - if buf:
23699 - self._unregister()
23700 - self.returncode = os.EX_OK
23701 - self.wait()
23702 + # Thread-safe callback to EventLoop
23703 + self.scheduler.idle_add(self._run_lock_cb)
23704
23705 - return True
23706 + def _run_lock_cb(self):
23707 + self._unregister()
23708 + self.returncode = os.EX_OK
23709 + self.wait()
23710 + return False
23711
23712 def _cancel(self):
23713 # There's currently no way to force thread termination.
23714 @@ -163,15 +146,6 @@ class _LockThread(AbstractPollTask):
23715 self._thread.join()
23716 self._thread = None
23717
23718 - if self._reg_id is not None:
23719 - self.scheduler.unregister(self._reg_id)
23720 - self._reg_id = None
23721 -
23722 - if self._files is not None:
23723 - for f in self._files.values():
23724 - os.close(f)
23725 - self._files = None
23726 -
23727 class _LockProcess(AbstractPollTask):
23728 """
23729 This uses the portage.locks module to acquire a lock asynchronously,
23730 @@ -190,16 +164,28 @@ class _LockProcess(AbstractPollTask):
23731 self._files = {}
23732 self._files['pipe_in'] = in_pr
23733 self._files['pipe_out'] = out_pw
23734 +
23735 fcntl.fcntl(in_pr, fcntl.F_SETFL,
23736 fcntl.fcntl(in_pr, fcntl.F_GETFL) | os.O_NONBLOCK)
23737 - self._reg_id = self.scheduler.register(in_pr,
23738 +
23739 + # FD_CLOEXEC is enabled by default in Python >=3.4.
23740 + if sys.hexversion < 0x3040000:
23741 + try:
23742 + fcntl.FD_CLOEXEC
23743 + except AttributeError:
23744 + pass
23745 + else:
23746 + fcntl.fcntl(in_pr, fcntl.F_SETFD,
23747 + fcntl.fcntl(in_pr, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
23748 +
23749 + self._reg_id = self.scheduler.io_add_watch(in_pr,
23750 self.scheduler.IO_IN, self._output_handler)
23751 self._registered = True
23752 self._proc = SpawnProcess(
23753 args=[portage._python_interpreter,
23754 os.path.join(portage._bin_path, 'lock-helper.py'), self.path],
23755 env=dict(os.environ, PORTAGE_PYM_PATH=portage._pym_path),
23756 - fd_pipes={0:out_pr, 1:in_pw, 2:sys.stderr.fileno()},
23757 + fd_pipes={0:out_pr, 1:in_pw, 2:sys.__stderr__.fileno()},
23758 scheduler=self.scheduler)
23759 self._proc.addExitListener(self._proc_exit)
23760 self._proc.start()
23761 @@ -273,7 +259,7 @@ class _LockProcess(AbstractPollTask):
23762 self._registered = False
23763
23764 if self._reg_id is not None:
23765 - self.scheduler.unregister(self._reg_id)
23766 + self.scheduler.source_remove(self._reg_id)
23767 self._reg_id = None
23768
23769 if self._files is not None:
23770
23771 diff --git a/pym/_emerge/AsynchronousTask.py b/pym/_emerge/AsynchronousTask.py
23772 index 7a193ce..da58261 100644
23773 --- a/pym/_emerge/AsynchronousTask.py
23774 +++ b/pym/_emerge/AsynchronousTask.py
23775 @@ -60,6 +60,20 @@ class AsynchronousTask(SlotObject):
23776 def _wait(self):
23777 return self.returncode
23778
23779 + def _async_wait(self):
23780 + """
23781 + For cases where _start exits synchronously, this method is a
23782 + convenient way to trigger an asynchronous call to self.wait()
23783 + (in order to notify exit listeners), avoiding excessive event
23784 + loop recursion (or stack overflow) that synchronous calling of
23785 + exit listeners can cause. This method is thread-safe.
23786 + """
23787 + self.scheduler.idle_add(self._async_wait_cb)
23788 +
23789 + def _async_wait_cb(self):
23790 + self.wait()
23791 + return False
23792 +
23793 def cancel(self):
23794 """
23795 Cancel the task, but do not wait for exit status. If asynchronous exit
23796
23797 diff --git a/pym/_emerge/Binpkg.py b/pym/_emerge/Binpkg.py
23798 index ea8a1ad..a740efd 100644
23799 --- a/pym/_emerge/Binpkg.py
23800 +++ b/pym/_emerge/Binpkg.py
23801 @@ -1,4 +1,4 @@
23802 -# Copyright 1999-2012 Gentoo Foundation
23803 +# Copyright 1999-2013 Gentoo Foundation
23804 # Distributed under the terms of the GNU General Public License v2
23805
23806 from _emerge.EbuildPhase import EbuildPhase
23807 @@ -298,6 +298,7 @@ class Binpkg(CompositeTask):
23808
23809 extractor = BinpkgExtractorAsync(background=self.background,
23810 env=self.settings.environ(),
23811 + features=self.settings.features,
23812 image_dir=self._image_dir,
23813 pkg=self.pkg, pkg_path=self._pkg_path,
23814 logfile=self.settings.get("PORTAGE_LOG_FILE"),
23815 @@ -328,11 +329,13 @@ class Binpkg(CompositeTask):
23816 self.wait()
23817 return
23818
23819 + env = self.settings.environ()
23820 + env["PYTHONPATH"] = self.settings["PORTAGE_PYTHONPATH"]
23821 chpathtool = SpawnProcess(
23822 args=[portage._python_interpreter,
23823 os.path.join(self.settings["PORTAGE_BIN_PATH"], "chpathtool.py"),
23824 self.settings["D"], self._build_prefix, self.settings["EPREFIX"]],
23825 - background=self.background, env=self.settings.environ(),
23826 + background=self.background, env=env,
23827 scheduler=self.scheduler,
23828 logfile=self.settings.get('PORTAGE_LOG_FILE'))
23829 self._writemsg_level(">>> Adjusting Prefix to %s\n" % self.settings["EPREFIX"])
23830
23831 diff --git a/pym/_emerge/BinpkgExtractorAsync.py b/pym/_emerge/BinpkgExtractorAsync.py
23832 index f25cbf9..be74c2f 100644
23833 --- a/pym/_emerge/BinpkgExtractorAsync.py
23834 +++ b/pym/_emerge/BinpkgExtractorAsync.py
23835 @@ -1,23 +1,31 @@
23836 -# Copyright 1999-2011 Gentoo Foundation
23837 +# Copyright 1999-2013 Gentoo Foundation
23838 # Distributed under the terms of the GNU General Public License v2
23839
23840 from _emerge.SpawnProcess import SpawnProcess
23841 import portage
23842 import signal
23843 +import subprocess
23844
23845 class BinpkgExtractorAsync(SpawnProcess):
23846
23847 - __slots__ = ("image_dir", "pkg", "pkg_path")
23848 + __slots__ = ("features", "image_dir", "pkg", "pkg_path")
23849
23850 _shell_binary = portage.const.BASH_BINARY
23851
23852 def _start(self):
23853 + tar_options = ""
23854 + if "xattr" in self.features:
23855 + process = subprocess.Popen(["tar", "--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
23856 + output = process.communicate()[0]
23857 + if b"--xattrs" in output:
23858 + tar_options = "--xattrs"
23859 +
23860 # Add -q to bzip2 opts, in order to avoid "trailing garbage after
23861 # EOF ignored" warning messages due to xpak trailer.
23862 # SIGPIPE handling (128 + SIGPIPE) should be compatible with
23863 # assert_sigpipe_ok() that's used by the ebuild unpack() helper.
23864 self.args = [self._shell_binary, "-c",
23865 - ("${PORTAGE_BUNZIP2_COMMAND:-${PORTAGE_BZIP2_COMMAND} -d} -cq -- %s | tar -xp -C %s -f - ; " + \
23866 + ("${PORTAGE_BUNZIP2_COMMAND:-${PORTAGE_BZIP2_COMMAND} -d} -cq -- %s | tar -xp %s -C %s -f - ; " + \
23867 "p=(${PIPESTATUS[@]}) ; " + \
23868 "if [[ ${p[0]} != 0 && ${p[0]} != %d ]] ; then " % (128 + signal.SIGPIPE) + \
23869 "echo bzip2 failed with status ${p[0]} ; exit ${p[0]} ; fi ; " + \
23870 @@ -25,6 +33,7 @@ class BinpkgExtractorAsync(SpawnProcess):
23871 "echo tar failed with status ${p[1]} ; exit ${p[1]} ; fi ; " + \
23872 "exit 0 ;") % \
23873 (portage._shell_quote(self.pkg_path),
23874 + tar_options,
23875 portage._shell_quote(self.image_dir))]
23876
23877 SpawnProcess._start(self)
23878
23879 diff --git a/pym/_emerge/BinpkgFetcher.py b/pym/_emerge/BinpkgFetcher.py
23880 index f415e2e..543881e 100644
23881 --- a/pym/_emerge/BinpkgFetcher.py
23882 +++ b/pym/_emerge/BinpkgFetcher.py
23883 @@ -1,4 +1,4 @@
23884 -# Copyright 1999-2011 Gentoo Foundation
23885 +# Copyright 1999-2013 Gentoo Foundation
23886 # Distributed under the terms of the GNU General Public License v2
23887
23888 from _emerge.AsynchronousLock import AsynchronousLock
23889 @@ -63,7 +63,7 @@ class BinpkgFetcher(SpawnProcess):
23890 if pretend:
23891 portage.writemsg_stdout("\n%s\n" % uri, noiselevel=-1)
23892 self._set_returncode((self.pid, os.EX_OK << 8))
23893 - self.wait()
23894 + self._async_wait()
23895 return
23896
23897 protocol = urllib_parse_urlparse(uri)[0]
23898 @@ -80,6 +80,12 @@ class BinpkgFetcher(SpawnProcess):
23899 "FILE" : os.path.basename(pkg_path)
23900 }
23901
23902 + for k in ("PORTAGE_SSH_OPTS",):
23903 + try:
23904 + fcmd_vars[k] = settings[k]
23905 + except KeyError:
23906 + pass
23907 +
23908 fetch_env = dict(settings.items())
23909 fetch_args = [portage.util.varexpand(x, mydict=fcmd_vars) \
23910 for x in portage.util.shlex_split(fcmd)]
23911 @@ -91,9 +97,9 @@ class BinpkgFetcher(SpawnProcess):
23912 # Redirect all output to stdout since some fetchers like
23913 # wget pollute stderr (if portage detects a problem then it
23914 # can send it's own message to stderr).
23915 - fd_pipes.setdefault(0, sys.stdin.fileno())
23916 - fd_pipes.setdefault(1, sys.stdout.fileno())
23917 - fd_pipes.setdefault(2, sys.stdout.fileno())
23918 + fd_pipes.setdefault(0, portage._get_stdin().fileno())
23919 + fd_pipes.setdefault(1, sys.__stdout__.fileno())
23920 + fd_pipes.setdefault(2, sys.__stdout__.fileno())
23921
23922 self.args = fetch_args
23923 self.env = fetch_env
23924 @@ -104,7 +110,7 @@ class BinpkgFetcher(SpawnProcess):
23925 def _pipe(self, fd_pipes):
23926 """When appropriate, use a pty so that fetcher progress bars,
23927 like wget has, will work properly."""
23928 - if self.background or not sys.stdout.isatty():
23929 + if self.background or not sys.__stdout__.isatty():
23930 # When the output only goes to a log file,
23931 # there's no point in creating a pty.
23932 return os.pipe()
23933
23934 diff --git a/pym/_emerge/BinpkgVerifier.py b/pym/_emerge/BinpkgVerifier.py
23935 index 0052967..2c69792 100644
23936 --- a/pym/_emerge/BinpkgVerifier.py
23937 +++ b/pym/_emerge/BinpkgVerifier.py
23938 @@ -1,75 +1,120 @@
23939 -# Copyright 1999-2011 Gentoo Foundation
23940 +# Copyright 1999-2013 Gentoo Foundation
23941 # Distributed under the terms of the GNU General Public License v2
23942
23943 -from _emerge.AsynchronousTask import AsynchronousTask
23944 -from portage.util import writemsg
23945 +import errno
23946 import io
23947 import sys
23948 +
23949 +from _emerge.CompositeTask import CompositeTask
23950 import portage
23951 from portage import os
23952 +from portage.checksum import (_apply_hash_filter,
23953 + _filter_unaccelarated_hashes, _hash_filter)
23954 +from portage.output import EOutput
23955 +from portage.util._async.FileDigester import FileDigester
23956 from portage.package.ebuild.fetch import _checksum_failure_temp_file
23957
23958 -class BinpkgVerifier(AsynchronousTask):
23959 - __slots__ = ("logfile", "pkg", "scheduler")
23960 +class BinpkgVerifier(CompositeTask):
23961 + __slots__ = ("logfile", "pkg", "_digests", "_pkg_path")
23962
23963 def _start(self):
23964 - """
23965 - Note: Unlike a normal AsynchronousTask.start() method,
23966 - this one does all work is synchronously. The returncode
23967 - attribute will be set before it returns.
23968 - """
23969 -
23970 - pkg = self.pkg
23971 - root_config = pkg.root_config
23972 - bintree = root_config.trees["bintree"]
23973 - rval = os.EX_OK
23974 +
23975 + bintree = self.pkg.root_config.trees["bintree"]
23976 + digests = bintree._get_digests(self.pkg)
23977 + if "size" not in digests:
23978 + self.returncode = os.EX_OK
23979 + self._async_wait()
23980 + return
23981 +
23982 + digests = _filter_unaccelarated_hashes(digests)
23983 + hash_filter = _hash_filter(
23984 + bintree.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
23985 + if not hash_filter.transparent:
23986 + digests = _apply_hash_filter(digests, hash_filter)
23987 +
23988 + self._digests = digests
23989 + self._pkg_path = bintree.getname(self.pkg.cpv)
23990 +
23991 + try:
23992 + size = os.stat(self._pkg_path).st_size
23993 + except OSError as e:
23994 + if e.errno not in (errno.ENOENT, errno.ESTALE):
23995 + raise
23996 + self.scheduler.output(("!!! Fetching Binary failed "
23997 + "for '%s'\n") % self.pkg.cpv, log_path=self.logfile,
23998 + background=self.background)
23999 + self.returncode = 1
24000 + self._async_wait()
24001 + return
24002 + else:
24003 + if size != digests["size"]:
24004 + self._digest_exception("size", size, digests["size"])
24005 + self.returncode = 1
24006 + self._async_wait()
24007 + return
24008 +
24009 + self._start_task(FileDigester(file_path=self._pkg_path,
24010 + hash_names=(k for k in digests if k != "size"),
24011 + background=self.background, logfile=self.logfile,
24012 + scheduler=self.scheduler),
24013 + self._digester_exit)
24014 +
24015 + def _digester_exit(self, digester):
24016 +
24017 + if self._default_exit(digester) != os.EX_OK:
24018 + self.wait()
24019 + return
24020 +
24021 + for hash_name in digester.hash_names:
24022 + if digester.digests[hash_name] != self._digests[hash_name]:
24023 + self._digest_exception(hash_name,
24024 + digester.digests[hash_name], self._digests[hash_name])
24025 + self.returncode = 1
24026 + self.wait()
24027 + return
24028 +
24029 + if self.pkg.root_config.settings.get("PORTAGE_QUIET") != "1":
24030 + self._display_success()
24031 +
24032 + self.returncode = os.EX_OK
24033 + self.wait()
24034 +
24035 + def _display_success(self):
24036 stdout_orig = sys.stdout
24037 stderr_orig = sys.stderr
24038 global_havecolor = portage.output.havecolor
24039 out = io.StringIO()
24040 - file_exists = True
24041 try:
24042 sys.stdout = out
24043 sys.stderr = out
24044 if portage.output.havecolor:
24045 portage.output.havecolor = not self.background
24046 - try:
24047 - bintree.digestCheck(pkg)
24048 - except portage.exception.FileNotFound:
24049 - writemsg("!!! Fetching Binary failed " + \
24050 - "for '%s'\n" % pkg.cpv, noiselevel=-1)
24051 - rval = 1
24052 - file_exists = False
24053 - except portage.exception.DigestException as e:
24054 - writemsg("\n!!! Digest verification failed:\n",
24055 - noiselevel=-1)
24056 - writemsg("!!! %s\n" % e.value[0],
24057 - noiselevel=-1)
24058 - writemsg("!!! Reason: %s\n" % e.value[1],
24059 - noiselevel=-1)
24060 - writemsg("!!! Got: %s\n" % e.value[2],
24061 - noiselevel=-1)
24062 - writemsg("!!! Expected: %s\n" % e.value[3],
24063 - noiselevel=-1)
24064 - rval = 1
24065 - if rval == os.EX_OK:
24066 - pass
24067 - elif file_exists:
24068 - pkg_path = bintree.getname(pkg.cpv)
24069 - head, tail = os.path.split(pkg_path)
24070 - temp_filename = _checksum_failure_temp_file(head, tail)
24071 - writemsg("File renamed to '%s'\n" % (temp_filename,),
24072 - noiselevel=-1)
24073 +
24074 + eout = EOutput()
24075 + eout.ebegin("%s %s ;-)" % (os.path.basename(self._pkg_path),
24076 + " ".join(sorted(self._digests))))
24077 + eout.eend(0)
24078 +
24079 finally:
24080 sys.stdout = stdout_orig
24081 sys.stderr = stderr_orig
24082 portage.output.havecolor = global_havecolor
24083
24084 - msg = out.getvalue()
24085 - if msg:
24086 - self.scheduler.output(msg, log_path=self.logfile,
24087 - background=self.background)
24088 + self.scheduler.output(out.getvalue(), log_path=self.logfile,
24089 + background=self.background)
24090
24091 - self.returncode = rval
24092 - self.wait()
24093 + def _digest_exception(self, name, value, expected):
24094 +
24095 + head, tail = os.path.split(self._pkg_path)
24096 + temp_filename = _checksum_failure_temp_file(head, tail)
24097
24098 + self.scheduler.output((
24099 + "\n!!! Digest verification failed:\n"
24100 + "!!! %s\n"
24101 + "!!! Reason: Failed on %s verification\n"
24102 + "!!! Got: %s\n"
24103 + "!!! Expected: %s\n"
24104 + "File renamed to '%s'\n") %
24105 + (self._pkg_path, name, value, expected, temp_filename),
24106 + log_path=self.logfile,
24107 + background=self.background)
24108
24109 diff --git a/pym/_emerge/BlockerCache.py b/pym/_emerge/BlockerCache.py
24110 index fce81f8..53342d6 100644
24111 --- a/pym/_emerge/BlockerCache.py
24112 +++ b/pym/_emerge/BlockerCache.py
24113 @@ -1,4 +1,4 @@
24114 -# Copyright 1999-2012 Gentoo Foundation
24115 +# Copyright 1999-2013 Gentoo Foundation
24116 # Distributed under the terms of the GNU General Public License v2
24117
24118 import errno
24119 @@ -62,7 +62,9 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
24120 self._cache_data = mypickle.load()
24121 f.close()
24122 del f
24123 - except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
24124 + except (SystemExit, KeyboardInterrupt):
24125 + raise
24126 + except Exception as e:
24127 if isinstance(e, EnvironmentError) and \
24128 getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
24129 pass
24130 @@ -126,9 +128,9 @@ class BlockerCache(portage.cache.mappings.MutableMapping):
24131 self._modified.clear()
24132
24133 def flush(self):
24134 - """If the current user has permission and the internal blocker cache
24135 + """If the current user has permission and the internal blocker cache has
24136 been updated, save it to disk and mark it unmodified. This is called
24137 - by emerge after it has proccessed blockers for all installed packages.
24138 + by emerge after it has processed blockers for all installed packages.
24139 Currently, the cache is only written if the user has superuser
24140 privileges (since that's required to obtain a lock), but all users
24141 have read access and benefit from faster blocker lookups (as long as
24142
24143 diff --git a/pym/_emerge/BlockerDB.py b/pym/_emerge/BlockerDB.py
24144 index 459affd..8bb8f5f 100644
24145 --- a/pym/_emerge/BlockerDB.py
24146 +++ b/pym/_emerge/BlockerDB.py
24147 @@ -1,4 +1,4 @@
24148 -# Copyright 1999-2011 Gentoo Foundation
24149 +# Copyright 1999-2012 Gentoo Foundation
24150 # Distributed under the terms of the GNU General Public License v2
24151
24152 import sys
24153 @@ -9,6 +9,7 @@ from portage import digraph
24154 from portage._sets.base import InternalPackageSet
24155
24156 from _emerge.BlockerCache import BlockerCache
24157 +from _emerge.Package import Package
24158 from _emerge.show_invalid_depstring_notice import show_invalid_depstring_notice
24159
24160 if sys.hexversion >= 0x3000000:
24161 @@ -38,7 +39,7 @@ class BlockerDB(object):
24162 """
24163 blocker_cache = BlockerCache(None,
24164 self._vartree.dbapi)
24165 - dep_keys = ["RDEPEND", "PDEPEND"]
24166 + dep_keys = Package._runtime_keys
24167 settings = self._vartree.settings
24168 stale_cache = set(blocker_cache)
24169 fake_vartree = self._fake_vartree
24170 @@ -50,7 +51,7 @@ class BlockerDB(object):
24171 stale_cache.discard(inst_pkg.cpv)
24172 cached_blockers = blocker_cache.get(inst_pkg.cpv)
24173 if cached_blockers is not None and \
24174 - cached_blockers.counter != long(inst_pkg.metadata["COUNTER"]):
24175 + cached_blockers.counter != inst_pkg.counter:
24176 cached_blockers = None
24177 if cached_blockers is not None:
24178 blocker_atoms = cached_blockers.atoms
24179 @@ -71,9 +72,8 @@ class BlockerDB(object):
24180 blocker_atoms = [atom for atom in atoms \
24181 if atom.startswith("!")]
24182 blocker_atoms.sort()
24183 - counter = long(inst_pkg.metadata["COUNTER"])
24184 blocker_cache[inst_pkg.cpv] = \
24185 - blocker_cache.BlockerData(counter, blocker_atoms)
24186 + blocker_cache.BlockerData(inst_pkg.counter, blocker_atoms)
24187 for cpv in stale_cache:
24188 del blocker_cache[cpv]
24189 blocker_cache.flush()
24190 @@ -92,7 +92,7 @@ class BlockerDB(object):
24191 blocking_pkgs.update(blocker_parents.parent_nodes(atom))
24192
24193 # Check for blockers in the other direction.
24194 - depstr = " ".join(new_pkg.metadata[k] for k in dep_keys)
24195 + depstr = " ".join(new_pkg._metadata[k] for k in dep_keys)
24196 success, atoms = portage.dep_check(depstr,
24197 vardb, settings, myuse=new_pkg.use.enabled,
24198 trees=dep_check_trees, myroot=new_pkg.root)
24199
24200 diff --git a/pym/_emerge/CompositeTask.py b/pym/_emerge/CompositeTask.py
24201 index 3e43478..40cf859 100644
24202 --- a/pym/_emerge/CompositeTask.py
24203 +++ b/pym/_emerge/CompositeTask.py
24204 @@ -142,6 +142,10 @@ class CompositeTask(AsynchronousTask):
24205 a task.
24206
24207 """
24208 + try:
24209 + task.scheduler = self.scheduler
24210 + except AttributeError:
24211 + pass
24212 task.addExitListener(exit_handler)
24213 self._current_task = task
24214 task.start()
24215
24216 diff --git a/pym/_emerge/DepPriority.py b/pym/_emerge/DepPriority.py
24217 index 3c2256a..34fdb48 100644
24218 --- a/pym/_emerge/DepPriority.py
24219 +++ b/pym/_emerge/DepPriority.py
24220 @@ -1,4 +1,4 @@
24221 -# Copyright 1999-2011 Gentoo Foundation
24222 +# Copyright 1999-2013 Gentoo Foundation
24223 # Distributed under the terms of the GNU General Public License v2
24224
24225 from _emerge.AbstractDepPriority import AbstractDepPriority
24226 @@ -16,31 +16,38 @@ class DepPriority(AbstractDepPriority):
24227
24228 Attributes Hardness
24229
24230 - buildtime 0
24231 - runtime -1
24232 - runtime_post -2
24233 - optional -3
24234 - (none of the above) -4
24235 + buildtime_slot_op 0
24236 + buildtime -1
24237 + runtime -2
24238 + runtime_post -3
24239 + optional -4
24240 + (none of the above) -5
24241
24242 """
24243
24244 if self.optional:
24245 - return -3
24246 - if self.buildtime:
24247 + return -4
24248 + if self.buildtime_slot_op:
24249 return 0
24250 - if self.runtime:
24251 + if self.buildtime:
24252 return -1
24253 - if self.runtime_post:
24254 + if self.runtime:
24255 return -2
24256 - return -4
24257 + if self.runtime_post:
24258 + return -3
24259 + return -5
24260
24261 def __str__(self):
24262 if self.ignored:
24263 return "ignored"
24264 if self.optional:
24265 return "optional"
24266 + if self.buildtime_slot_op:
24267 + return "buildtime_slot_op"
24268 if self.buildtime:
24269 return "buildtime"
24270 + if self.runtime_slot_op:
24271 + return "runtime_slot_op"
24272 if self.runtime:
24273 return "runtime"
24274 if self.runtime_post:
24275
24276 diff --git a/pym/_emerge/DepPrioritySatisfiedRange.py b/pym/_emerge/DepPrioritySatisfiedRange.py
24277 index edb29df..391f540 100644
24278 --- a/pym/_emerge/DepPrioritySatisfiedRange.py
24279 +++ b/pym/_emerge/DepPrioritySatisfiedRange.py
24280 @@ -1,4 +1,4 @@
24281 -# Copyright 1999-2011 Gentoo Foundation
24282 +# Copyright 1999-2013 Gentoo Foundation
24283 # Distributed under the terms of the GNU General Public License v2
24284
24285 from _emerge.DepPriority import DepPriority
24286 @@ -7,17 +7,18 @@ class DepPrioritySatisfiedRange(object):
24287 DepPriority Index Category
24288
24289 not satisfied and buildtime HARD
24290 - not satisfied and runtime 6 MEDIUM
24291 - not satisfied and runtime_post 5 MEDIUM_SOFT
24292 + not satisfied and runtime 7 MEDIUM
24293 + not satisfied and runtime_post 6 MEDIUM_SOFT
24294 + satisfied and buildtime_slot_op 5 SOFT
24295 satisfied and buildtime 4 SOFT
24296 satisfied and runtime 3 SOFT
24297 satisfied and runtime_post 2 SOFT
24298 optional 1 SOFT
24299 (none of the above) 0 NONE
24300 """
24301 - MEDIUM = 6
24302 - MEDIUM_SOFT = 5
24303 - SOFT = 4
24304 + MEDIUM = 7
24305 + MEDIUM_SOFT = 6
24306 + SOFT = 5
24307 NONE = 0
24308
24309 @classmethod
24310 @@ -50,6 +51,16 @@ class DepPrioritySatisfiedRange(object):
24311 def _ignore_satisfied_buildtime(cls, priority):
24312 if priority.__class__ is not DepPriority:
24313 return False
24314 + if priority.optional:
24315 + return True
24316 + if priority.buildtime_slot_op:
24317 + return False
24318 + return bool(priority.satisfied)
24319 +
24320 + @classmethod
24321 + def _ignore_satisfied_buildtime_slot_op(cls, priority):
24322 + if priority.__class__ is not DepPriority:
24323 + return False
24324 return bool(priority.optional or \
24325 priority.satisfied)
24326
24327 @@ -80,6 +91,7 @@ DepPrioritySatisfiedRange.ignore_priority = (
24328 DepPrioritySatisfiedRange._ignore_satisfied_runtime_post,
24329 DepPrioritySatisfiedRange._ignore_satisfied_runtime,
24330 DepPrioritySatisfiedRange._ignore_satisfied_buildtime,
24331 + DepPrioritySatisfiedRange._ignore_satisfied_buildtime_slot_op,
24332 DepPrioritySatisfiedRange._ignore_runtime_post,
24333 DepPrioritySatisfiedRange._ignore_runtime
24334 )
24335
24336 diff --git a/pym/_emerge/DependencyArg.py b/pym/_emerge/DependencyArg.py
24337 index 80134c8..29a0072 100644
24338 --- a/pym/_emerge/DependencyArg.py
24339 +++ b/pym/_emerge/DependencyArg.py
24340 @@ -1,9 +1,11 @@
24341 -# Copyright 1999-2012 Gentoo Foundation
24342 +# Copyright 1999-2013 Gentoo Foundation
24343 # Distributed under the terms of the GNU General Public License v2
24344
24345 +from __future__ import unicode_literals
24346 +
24347 import sys
24348
24349 -from portage import _encodings, _unicode_encode, _unicode_decode
24350 +from portage import _encodings, _unicode_encode
24351
24352 class DependencyArg(object):
24353
24354 @@ -31,10 +33,10 @@ class DependencyArg(object):
24355 return hash((self.arg, self.root_config.root))
24356
24357 def __str__(self):
24358 - # Force unicode format string for python-2.x safety,
24359 + # Use unicode_literals format string for python-2.x safety,
24360 # ensuring that self.arg.__unicode__() is used
24361 # when necessary.
24362 - return _unicode_decode("%s") % (self.arg,)
24363 + return "%s" % (self.arg,)
24364
24365 if sys.hexversion < 0x3000000:
24366
24367
24368 diff --git a/pym/_emerge/EbuildBuild.py b/pym/_emerge/EbuildBuild.py
24369 index 784a3e2..e13b1cf 100644
24370 --- a/pym/_emerge/EbuildBuild.py
24371 +++ b/pym/_emerge/EbuildBuild.py
24372 @@ -1,4 +1,4 @@
24373 -# Copyright 1999-2012 Gentoo Foundation
24374 +# Copyright 1999-2013 Gentoo Foundation
24375 # Distributed under the terms of the GNU General Public License v2
24376
24377 from _emerge.EbuildExecuter import EbuildExecuter
24378 @@ -10,11 +10,14 @@ from _emerge.EbuildMerge import EbuildMerge
24379 from _emerge.EbuildFetchonly import EbuildFetchonly
24380 from _emerge.EbuildBuildDir import EbuildBuildDir
24381 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
24382 +from _emerge.TaskSequence import TaskSequence
24383 +
24384 from portage.util import writemsg
24385 import portage
24386 from portage import os
24387 from portage.output import colorize
24388 from portage.package.ebuild.digestcheck import digestcheck
24389 +from portage.package.ebuild.digestgen import digestgen
24390 from portage.package.ebuild.doebuild import _check_temp_dir
24391 from portage.package.ebuild._spawn_nofetch import spawn_nofetch
24392
24393 @@ -35,7 +38,7 @@ class EbuildBuild(CompositeTask):
24394 if rval != os.EX_OK:
24395 self.returncode = rval
24396 self._current_task = None
24397 - self.wait()
24398 + self._async_wait()
24399 return
24400
24401 root_config = pkg.root_config
24402 @@ -60,7 +63,7 @@ class EbuildBuild(CompositeTask):
24403 if not self._check_manifest():
24404 self.returncode = 1
24405 self._current_task = None
24406 - self.wait()
24407 + self._async_wait()
24408 return
24409
24410 prefetcher = self.prefetcher
24411 @@ -91,7 +94,8 @@ class EbuildBuild(CompositeTask):
24412 success = True
24413
24414 settings = self.settings
24415 - if 'strict' in settings.features:
24416 + if 'strict' in settings.features and \
24417 + 'digest' not in settings.features:
24418 settings['O'] = os.path.dirname(self._ebuild_path)
24419 quiet_setting = settings.get('PORTAGE_QUIET')
24420 settings['PORTAGE_QUIET'] = '1'
24421 @@ -160,6 +164,10 @@ class EbuildBuild(CompositeTask):
24422 if self.returncode != os.EX_OK:
24423 portdb = self.pkg.root_config.trees[self._tree].dbapi
24424 spawn_nofetch(portdb, self._ebuild_path, settings=self.settings)
24425 + elif 'digest' in self.settings.features:
24426 + if not digestgen(mysettings=self.settings,
24427 + myportdb=self.pkg.root_config.trees[self._tree].dbapi):
24428 + self.returncode = 1
24429 self.wait()
24430
24431 def _pre_clean_exit(self, pre_clean_phase):
24432 @@ -260,8 +268,8 @@ class EbuildBuild(CompositeTask):
24433 # to be displayed for problematic packages even though they do
24434 # not set RESTRICT=fetch (bug #336499).
24435
24436 - if 'fetch' not in self.pkg.metadata.restrict and \
24437 - 'nofetch' not in self.pkg.metadata.defined_phases:
24438 + if 'fetch' not in self.pkg.restrict and \
24439 + 'nofetch' not in self.pkg.defined_phases:
24440 self._unlock_builddir()
24441 self.wait()
24442 return
24443 @@ -300,10 +308,20 @@ class EbuildBuild(CompositeTask):
24444 self.scheduler.output(msg,
24445 log_path=self.settings.get("PORTAGE_LOG_FILE"))
24446
24447 - packager = EbuildBinpkg(background=self.background, pkg=self.pkg,
24448 - scheduler=self.scheduler, settings=self.settings)
24449 + binpkg_tasks = TaskSequence()
24450 + requested_binpkg_formats = self.settings.get("PORTAGE_BINPKG_FORMAT", "tar").split()
24451 + for pkg_fmt in portage.const.SUPPORTED_BINPKG_FORMATS:
24452 + if pkg_fmt in requested_binpkg_formats:
24453 + if pkg_fmt == "rpm":
24454 + binpkg_tasks.add(EbuildPhase(background=self.background,
24455 + phase="rpm", scheduler=self.scheduler,
24456 + settings=self.settings))
24457 + else:
24458 + binpkg_tasks.add(EbuildBinpkg(background=self.background,
24459 + pkg=self.pkg, scheduler=self.scheduler,
24460 + settings=self.settings))
24461
24462 - self._start_task(packager, self._buildpkg_exit)
24463 + self._start_task(binpkg_tasks, self._buildpkg_exit)
24464
24465 def _buildpkg_exit(self, packager):
24466 """
24467
24468 diff --git a/pym/_emerge/EbuildBuildDir.py b/pym/_emerge/EbuildBuildDir.py
24469 index 9773bd7..58905c2 100644
24470 --- a/pym/_emerge/EbuildBuildDir.py
24471 +++ b/pym/_emerge/EbuildBuildDir.py
24472 @@ -7,7 +7,6 @@ import portage
24473 from portage import os
24474 from portage.exception import PortageException
24475 from portage.util.SlotObject import SlotObject
24476 -import errno
24477
24478 class EbuildBuildDir(SlotObject):
24479
24480 @@ -60,7 +59,7 @@ class EbuildBuildDir(SlotObject):
24481 builddir_lock.wait()
24482 self._assert_lock(builddir_lock)
24483 self._lock_obj = builddir_lock
24484 - self.settings['PORTAGE_BUILDIR_LOCKED'] = '1'
24485 + self.settings['PORTAGE_BUILDDIR_LOCKED'] = '1'
24486 finally:
24487 self.locked = self._lock_obj is not None
24488 catdir_lock.unlock()
24489 @@ -92,16 +91,14 @@ class EbuildBuildDir(SlotObject):
24490 self._lock_obj.unlock()
24491 self._lock_obj = None
24492 self.locked = False
24493 - self.settings.pop('PORTAGE_BUILDIR_LOCKED', None)
24494 + self.settings.pop('PORTAGE_BUILDDIR_LOCKED', None)
24495 catdir_lock = AsynchronousLock(path=self._catdir, scheduler=self.scheduler)
24496 catdir_lock.start()
24497 if catdir_lock.wait() == os.EX_OK:
24498 try:
24499 os.rmdir(self._catdir)
24500 - except OSError as e:
24501 - if e.errno not in (errno.ENOENT,
24502 - errno.ENOTEMPTY, errno.EEXIST, errno.EPERM):
24503 - raise
24504 + except OSError:
24505 + pass
24506 finally:
24507 catdir_lock.unlock()
24508
24509
24510 diff --git a/pym/_emerge/EbuildExecuter.py b/pym/_emerge/EbuildExecuter.py
24511 index fd663a4..5587d4e 100644
24512 --- a/pym/_emerge/EbuildExecuter.py
24513 +++ b/pym/_emerge/EbuildExecuter.py
24514 @@ -16,16 +16,7 @@ class EbuildExecuter(CompositeTask):
24515
24516 _phases = ("prepare", "configure", "compile", "test", "install")
24517
24518 - _live_eclasses = frozenset([
24519 - "bzr",
24520 - "cvs",
24521 - "darcs",
24522 - "git",
24523 - "git-2",
24524 - "mercurial",
24525 - "subversion",
24526 - "tla",
24527 - ])
24528 + _live_eclasses = portage.const.LIVE_ECLASSES
24529
24530 def _start(self):
24531 pkg = self.pkg
24532 @@ -83,7 +74,7 @@ class EbuildExecuter(CompositeTask):
24533
24534 pkg = self.pkg
24535 phases = self._phases
24536 - eapi = pkg.metadata["EAPI"]
24537 + eapi = pkg.eapi
24538 if not eapi_has_src_prepare_and_src_configure(eapi):
24539 # skip src_prepare and src_configure
24540 phases = phases[2:]
24541
24542 diff --git a/pym/_emerge/EbuildFetcher.py b/pym/_emerge/EbuildFetcher.py
24543 index c0a7fdd..d98d007 100644
24544 --- a/pym/_emerge/EbuildFetcher.py
24545 +++ b/pym/_emerge/EbuildFetcher.py
24546 @@ -1,23 +1,22 @@
24547 # Copyright 1999-2012 Gentoo Foundation
24548 # Distributed under the terms of the GNU General Public License v2
24549
24550 -import traceback
24551 -
24552 -from _emerge.SpawnProcess import SpawnProcess
24553 import copy
24554 import io
24555 -import signal
24556 import sys
24557 +
24558 import portage
24559 from portage import os
24560 from portage import _encodings
24561 from portage import _unicode_encode
24562 from portage import _unicode_decode
24563 +from portage.checksum import _hash_filter
24564 from portage.elog.messages import eerror
24565 from portage.package.ebuild.fetch import _check_distfile, fetch
24566 +from portage.util._async.ForkProcess import ForkProcess
24567 from portage.util._pty import _create_pty_or_pipe
24568
24569 -class EbuildFetcher(SpawnProcess):
24570 +class EbuildFetcher(ForkProcess):
24571
24572 __slots__ = ("config_pool", "ebuild_path", "fetchonly", "fetchall",
24573 "pkg", "prefetch") + \
24574 @@ -57,6 +56,9 @@ class EbuildFetcher(SpawnProcess):
24575 if st.st_size != expected_size:
24576 return False
24577
24578 + hash_filter = _hash_filter(settings.get("PORTAGE_CHECKSUM_FILTER", ""))
24579 + if hash_filter.transparent:
24580 + hash_filter = None
24581 stdout_orig = sys.stdout
24582 stderr_orig = sys.stderr
24583 global_havecolor = portage.output.havecolor
24584 @@ -78,7 +80,7 @@ class EbuildFetcher(SpawnProcess):
24585 break
24586 continue
24587 ok, st = _check_distfile(os.path.join(distdir, filename),
24588 - mydigests, eout, show_errors=False)
24589 + mydigests, eout, show_errors=False, hash_filter=hash_filter)
24590 if not ok:
24591 success = False
24592 break
24593 @@ -115,13 +117,13 @@ class EbuildFetcher(SpawnProcess):
24594 msg_lines.append(msg)
24595 self._eerror(msg_lines)
24596 self._set_returncode((self.pid, 1 << 8))
24597 - self.wait()
24598 + self._async_wait()
24599 return
24600
24601 if not uri_map:
24602 # Nothing to fetch.
24603 self._set_returncode((self.pid, os.EX_OK << 8))
24604 - self.wait()
24605 + self._async_wait()
24606 return
24607
24608 settings = self.config_pool.allocate()
24609 @@ -133,7 +135,7 @@ class EbuildFetcher(SpawnProcess):
24610 self._prefetch_size_ok(uri_map, settings, ebuild_path):
24611 self.config_pool.deallocate(settings)
24612 self._set_returncode((self.pid, os.EX_OK << 8))
24613 - self.wait()
24614 + self._async_wait()
24615 return
24616
24617 nocolor = settings.get("NOCOLOR")
24618 @@ -148,7 +150,7 @@ class EbuildFetcher(SpawnProcess):
24619 settings["NOCOLOR"] = nocolor
24620
24621 self._settings = settings
24622 - SpawnProcess._start(self)
24623 + ForkProcess._start(self)
24624
24625 # Free settings now since it's no longer needed in
24626 # this process (the subprocess has a private copy).
24627 @@ -156,48 +158,20 @@ class EbuildFetcher(SpawnProcess):
24628 settings = None
24629 self._settings = None
24630
24631 - def _spawn(self, args, fd_pipes=None, **kwargs):
24632 - """
24633 - Fork a subprocess, apply local settings, and call fetch().
24634 - """
24635 -
24636 - pid = os.fork()
24637 - if pid != 0:
24638 - if not isinstance(pid, int):
24639 - raise AssertionError(
24640 - "fork returned non-integer: %s" % (repr(pid),))
24641 - portage.process.spawned_pids.append(pid)
24642 - return [pid]
24643 -
24644 - portage.locks._close_fds()
24645 - # Disable close_fds since we don't exec (see _setup_pipes docstring).
24646 - portage.process._setup_pipes(fd_pipes, close_fds=False)
24647 -
24648 - # Use default signal handlers in order to avoid problems
24649 - # killing subprocesses as reported in bug #353239.
24650 - signal.signal(signal.SIGINT, signal.SIG_DFL)
24651 - signal.signal(signal.SIGTERM, signal.SIG_DFL)
24652 -
24653 + def _run(self):
24654 # Force consistent color output, in case we are capturing fetch
24655 # output through a normal pipe due to unavailability of ptys.
24656 portage.output.havecolor = self._settings.get('NOCOLOR') \
24657 not in ('yes', 'true')
24658
24659 rval = 1
24660 - allow_missing = self._get_manifest().allow_missing
24661 - try:
24662 - if fetch(self._uri_map, self._settings, fetchonly=self.fetchonly,
24663 - digests=copy.deepcopy(self._get_digests()),
24664 - allow_missing_digests=allow_missing):
24665 - rval = os.EX_OK
24666 - except SystemExit:
24667 - raise
24668 - except:
24669 - traceback.print_exc()
24670 - finally:
24671 - # Call os._exit() from finally block, in order to suppress any
24672 - # finally blocks from earlier in the call stack. See bug #345289.
24673 - os._exit(rval)
24674 + allow_missing = self._get_manifest().allow_missing or \
24675 + 'digest' in self._settings.features
24676 + if fetch(self._uri_map, self._settings, fetchonly=self.fetchonly,
24677 + digests=copy.deepcopy(self._get_digests()),
24678 + allow_missing_digests=allow_missing):
24679 + rval = os.EX_OK
24680 + return rval
24681
24682 def _get_ebuild_path(self):
24683 if self.ebuild_path is not None:
24684 @@ -297,7 +271,7 @@ class EbuildFetcher(SpawnProcess):
24685 self.scheduler.output(msg, log_path=self.logfile)
24686
24687 def _set_returncode(self, wait_retval):
24688 - SpawnProcess._set_returncode(self, wait_retval)
24689 + ForkProcess._set_returncode(self, wait_retval)
24690 # Collect elog messages that might have been
24691 # created by the pkg_nofetch phase.
24692 # Skip elog messages for prefetch, in order to avoid duplicates.
24693
24694 diff --git a/pym/_emerge/EbuildMetadataPhase.py b/pym/_emerge/EbuildMetadataPhase.py
24695 index c2d3747..bbb1ca9 100644
24696 --- a/pym/_emerge/EbuildMetadataPhase.py
24697 +++ b/pym/_emerge/EbuildMetadataPhase.py
24698 @@ -1,4 +1,4 @@
24699 -# Copyright 1999-2012 Gentoo Foundation
24700 +# Copyright 1999-2013 Gentoo Foundation
24701 # Distributed under the terms of the GNU General Public License v2
24702
24703 from _emerge.SubProcess import SubProcess
24704 @@ -6,12 +6,14 @@ import sys
24705 from portage.cache.mappings import slot_dict_class
24706 import portage
24707 portage.proxy.lazyimport.lazyimport(globals(),
24708 - 'portage.package.ebuild._eapi_invalid:eapi_invalid',
24709 + 'portage.package.ebuild._metadata_invalid:eapi_invalid',
24710 )
24711 from portage import os
24712 from portage import _encodings
24713 from portage import _unicode_decode
24714 from portage import _unicode_encode
24715 +from portage.dep import extract_unpack_dependencies
24716 +from portage.eapi import eapi_has_automatic_unpack_dependencies
24717
24718 import errno
24719 import fcntl
24720 @@ -25,12 +27,11 @@ class EbuildMetadataPhase(SubProcess):
24721 """
24722
24723 __slots__ = ("cpv", "eapi_supported", "ebuild_hash", "fd_pipes",
24724 - "metadata", "portdb", "repo_path", "settings") + \
24725 + "metadata", "portdb", "repo_path", "settings", "write_auxdb") + \
24726 ("_eapi", "_eapi_lineno", "_raw_metadata",)
24727
24728 _file_names = ("ebuild",)
24729 _files_dict = slot_dict_class(_file_names, prefix="")
24730 - _metadata_fd = 9
24731
24732 def _start(self):
24733 ebuild_path = self.ebuild_hash.location
24734 @@ -49,14 +50,14 @@ class EbuildMetadataPhase(SubProcess):
24735 # An empty EAPI setting is invalid.
24736 self._eapi_invalid(None)
24737 self._set_returncode((self.pid, 1 << 8))
24738 - self.wait()
24739 + self._async_wait()
24740 return
24741
24742 self.eapi_supported = portage.eapi_is_supported(parsed_eapi)
24743 if not self.eapi_supported:
24744 self.metadata = {"EAPI": parsed_eapi}
24745 self._set_returncode((self.pid, os.EX_OK << 8))
24746 - self.wait()
24747 + self._async_wait()
24748 return
24749
24750 settings = self.settings
24751 @@ -74,28 +75,41 @@ class EbuildMetadataPhase(SubProcess):
24752
24753 null_input = open('/dev/null', 'rb')
24754 fd_pipes.setdefault(0, null_input.fileno())
24755 - fd_pipes.setdefault(1, sys.stdout.fileno())
24756 - fd_pipes.setdefault(2, sys.stderr.fileno())
24757 + fd_pipes.setdefault(1, sys.__stdout__.fileno())
24758 + fd_pipes.setdefault(2, sys.__stderr__.fileno())
24759
24760 # flush any pending output
24761 + stdout_filenos = (sys.__stdout__.fileno(), sys.__stderr__.fileno())
24762 for fd in fd_pipes.values():
24763 - if fd == sys.stdout.fileno():
24764 - sys.stdout.flush()
24765 - if fd == sys.stderr.fileno():
24766 - sys.stderr.flush()
24767 + if fd in stdout_filenos:
24768 + sys.__stdout__.flush()
24769 + sys.__stderr__.flush()
24770 + break
24771
24772 self._files = self._files_dict()
24773 files = self._files
24774
24775 master_fd, slave_fd = os.pipe()
24776 +
24777 fcntl.fcntl(master_fd, fcntl.F_SETFL,
24778 fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
24779
24780 - fd_pipes[self._metadata_fd] = slave_fd
24781 + # FD_CLOEXEC is enabled by default in Python >=3.4.
24782 + if sys.hexversion < 0x3040000:
24783 + try:
24784 + fcntl.FD_CLOEXEC
24785 + except AttributeError:
24786 + pass
24787 + else:
24788 + fcntl.fcntl(master_fd, fcntl.F_SETFD,
24789 + fcntl.fcntl(master_fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
24790 +
24791 + fd_pipes[slave_fd] = slave_fd
24792 + settings["PORTAGE_PIPE_FD"] = str(slave_fd)
24793
24794 self._raw_metadata = []
24795 files.ebuild = master_fd
24796 - self._reg_id = self.scheduler.register(files.ebuild,
24797 + self._reg_id = self.scheduler.io_add_watch(files.ebuild,
24798 self._registered_events, self._output_handler)
24799 self._registered = True
24800
24801 @@ -103,6 +117,7 @@ class EbuildMetadataPhase(SubProcess):
24802 settings=settings, debug=debug,
24803 mydbapi=self.portdb, tree="porttree",
24804 fd_pipes=fd_pipes, returnpid=True)
24805 + settings.pop("PORTAGE_PIPE_FD", None)
24806
24807 os.close(slave_fd)
24808 null_input.close()
24809 @@ -111,11 +126,10 @@ class EbuildMetadataPhase(SubProcess):
24810 # doebuild failed before spawning
24811 self._unregister()
24812 self._set_returncode((self.pid, retval << 8))
24813 - self.wait()
24814 + self._async_wait()
24815 return
24816
24817 self.pid = retval[0]
24818 - portage.process.spawned_pids.remove(self.pid)
24819
24820 def _output_handler(self, fd, event):
24821
24822 @@ -141,8 +155,7 @@ class EbuildMetadataPhase(SubProcess):
24823 def _set_returncode(self, wait_retval):
24824 SubProcess._set_returncode(self, wait_retval)
24825 # self._raw_metadata is None when _start returns
24826 - # early due to an unsupported EAPI detected with
24827 - # FEATURES=parse-eapi-ebuild-head
24828 + # early due to an unsupported EAPI
24829 if self.returncode == os.EX_OK and \
24830 self._raw_metadata is not None:
24831 metadata_lines = _unicode_decode(b''.join(self._raw_metadata),
24832 @@ -163,8 +176,7 @@ class EbuildMetadataPhase(SubProcess):
24833 if (not metadata["EAPI"] or self.eapi_supported) and \
24834 metadata["EAPI"] != parsed_eapi:
24835 self._eapi_invalid(metadata)
24836 - if 'parse-eapi-ebuild-head' in self.settings.features:
24837 - metadata_valid = False
24838 + metadata_valid = False
24839
24840 if metadata_valid:
24841 # Since we're supposed to be able to efficiently obtain the
24842 @@ -181,8 +193,18 @@ class EbuildMetadataPhase(SubProcess):
24843 metadata["_eclasses_"] = {}
24844 metadata.pop("INHERITED", None)
24845
24846 - self.portdb._write_cache(self.cpv,
24847 - self.repo_path, metadata, self.ebuild_hash)
24848 + if eapi_has_automatic_unpack_dependencies(metadata["EAPI"]):
24849 + repo = self.portdb.repositories.get_name_for_location(self.repo_path)
24850 + unpackers = self.settings.unpack_dependencies.get(repo, {}).get(metadata["EAPI"], {})
24851 + unpack_dependencies = extract_unpack_dependencies(metadata["SRC_URI"], unpackers)
24852 + if unpack_dependencies:
24853 + metadata["DEPEND"] += (" " if metadata["DEPEND"] else "") + unpack_dependencies
24854 +
24855 + # If called by egencache, this cache write is
24856 + # undesirable when metadata-transfer is disabled.
24857 + if self.write_auxdb is not False:
24858 + self.portdb._write_cache(self.cpv,
24859 + self.repo_path, metadata, self.ebuild_hash)
24860 else:
24861 metadata = {"EAPI": metadata["EAPI"]}
24862 self.metadata = metadata
24863
24864 diff --git a/pym/_emerge/EbuildPhase.py b/pym/_emerge/EbuildPhase.py
24865 index fe44abc..b1f7c21 100644
24866 --- a/pym/_emerge/EbuildPhase.py
24867 +++ b/pym/_emerge/EbuildPhase.py
24868 @@ -1,4 +1,4 @@
24869 -# Copyright 1999-2011 Gentoo Foundation
24870 +# Copyright 1999-2013 Gentoo Foundation
24871 # Distributed under the terms of the GNU General Public License v2
24872
24873 import gzip
24874 @@ -11,6 +11,7 @@ from _emerge.BinpkgEnvExtractor import BinpkgEnvExtractor
24875 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
24876 from _emerge.EbuildProcess import EbuildProcess
24877 from _emerge.CompositeTask import CompositeTask
24878 +from portage.package.ebuild.prepare_build_dirs import _prepare_workdir
24879 from portage.util import writemsg
24880
24881 try:
24882 @@ -38,7 +39,7 @@ from portage import _unicode_encode
24883
24884 class EbuildPhase(CompositeTask):
24885
24886 - __slots__ = ("actionmap", "phase", "settings") + \
24887 + __slots__ = ("actionmap", "fd_pipes", "phase", "settings") + \
24888 ("_ebuild_lock",)
24889
24890 # FEATURES displayed prior to setup phase
24891 @@ -156,8 +157,7 @@ class EbuildPhase(CompositeTask):
24892 return
24893 self._start_ebuild()
24894
24895 - def _start_ebuild(self):
24896 -
24897 + def _get_log_path(self):
24898 # Don't open the log file during the clean phase since the
24899 # open file can result in an nfs lock on $T/build.log which
24900 # prevents the clean phase from removing $T.
24901 @@ -165,17 +165,21 @@ class EbuildPhase(CompositeTask):
24902 if self.phase not in ("clean", "cleanrm") and \
24903 self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
24904 logfile = self.settings.get("PORTAGE_LOG_FILE")
24905 + return logfile
24906 +
24907 + def _start_ebuild(self):
24908
24909 - fd_pipes = None
24910 - if not self.background and self.phase == 'nofetch':
24911 - # All the pkg_nofetch output goes to stderr since
24912 - # it's considered to be an error message.
24913 - fd_pipes = {1 : sys.stderr.fileno()}
24914 + fd_pipes = self.fd_pipes
24915 + if fd_pipes is None:
24916 + if not self.background and self.phase == 'nofetch':
24917 + # All the pkg_nofetch output goes to stderr since
24918 + # it's considered to be an error message.
24919 + fd_pipes = {1 : sys.__stderr__.fileno()}
24920
24921 ebuild_process = EbuildProcess(actionmap=self.actionmap,
24922 - background=self.background, fd_pipes=fd_pipes, logfile=logfile,
24923 - phase=self.phase, scheduler=self.scheduler,
24924 - settings=self.settings)
24925 + background=self.background, fd_pipes=fd_pipes,
24926 + logfile=self._get_log_path(), phase=self.phase,
24927 + scheduler=self.scheduler, settings=self.settings)
24928
24929 self._start_task(ebuild_process, self._ebuild_exit)
24930
24931 @@ -189,16 +193,21 @@ class EbuildPhase(CompositeTask):
24932 if self._default_exit(ebuild_process) != os.EX_OK:
24933 if self.phase == "test" and \
24934 "test-fail-continue" in self.settings.features:
24935 - pass
24936 + # mark test phase as complete (bug #452030)
24937 + try:
24938 + open(_unicode_encode(os.path.join(
24939 + self.settings["PORTAGE_BUILDDIR"], ".tested"),
24940 + encoding=_encodings['fs'], errors='strict'),
24941 + 'wb').close()
24942 + except OSError:
24943 + pass
24944 else:
24945 fail = True
24946
24947 if not fail:
24948 self.returncode = None
24949
24950 - logfile = None
24951 - if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
24952 - logfile = self.settings.get("PORTAGE_LOG_FILE")
24953 + logfile = self._get_log_path()
24954
24955 if self.phase == "install":
24956 out = io.StringIO()
24957 @@ -213,7 +222,14 @@ class EbuildPhase(CompositeTask):
24958 settings = self.settings
24959 _post_phase_userpriv_perms(settings)
24960
24961 - if self.phase == "install":
24962 + if self.phase == "unpack":
24963 + # Bump WORKDIR timestamp, in case tar gave it a timestamp
24964 + # that will interfere with distfiles / WORKDIR timestamp
24965 + # comparisons as reported in bug #332217. Also, fix
24966 + # ownership since tar can change that too.
24967 + os.utime(settings["WORKDIR"], None)
24968 + _prepare_workdir(settings)
24969 + elif self.phase == "install":
24970 out = io.StringIO()
24971 _post_src_install_write_metadata(settings)
24972 _post_src_install_uid_fix(settings, out)
24973 @@ -235,8 +251,9 @@ class EbuildPhase(CompositeTask):
24974 fd, logfile = tempfile.mkstemp()
24975 os.close(fd)
24976 post_phase = MiscFunctionsProcess(background=self.background,
24977 - commands=post_phase_cmds, logfile=logfile, phase=self.phase,
24978 - scheduler=self.scheduler, settings=settings)
24979 + commands=post_phase_cmds, fd_pipes=self.fd_pipes,
24980 + logfile=logfile, phase=self.phase, scheduler=self.scheduler,
24981 + settings=settings)
24982 self._start_task(post_phase, self._post_phase_exit)
24983 return
24984
24985 @@ -311,8 +328,9 @@ class EbuildPhase(CompositeTask):
24986 self.returncode = None
24987 phase = 'die_hooks'
24988 die_hooks = MiscFunctionsProcess(background=self.background,
24989 - commands=[phase], phase=phase,
24990 - scheduler=self.scheduler, settings=self.settings)
24991 + commands=[phase], phase=phase, logfile=self._get_log_path(),
24992 + fd_pipes=self.fd_pipes, scheduler=self.scheduler,
24993 + settings=self.settings)
24994 self._start_task(die_hooks, self._die_hooks_exit)
24995
24996 def _die_hooks_exit(self, die_hooks):
24997 @@ -331,7 +349,8 @@ class EbuildPhase(CompositeTask):
24998 portage.elog.elog_process(self.settings.mycpv, self.settings)
24999 phase = "clean"
25000 clean_phase = EbuildPhase(background=self.background,
25001 - phase=phase, scheduler=self.scheduler, settings=self.settings)
25002 + fd_pipes=self.fd_pipes, phase=phase, scheduler=self.scheduler,
25003 + settings=self.settings)
25004 self._start_task(clean_phase, self._fail_clean_exit)
25005 return
25006
25007
25008 diff --git a/pym/_emerge/EbuildProcess.py b/pym/_emerge/EbuildProcess.py
25009 index ce97aff..333ad7b 100644
25010 --- a/pym/_emerge/EbuildProcess.py
25011 +++ b/pym/_emerge/EbuildProcess.py
25012 @@ -1,4 +1,4 @@
25013 -# Copyright 1999-2010 Gentoo Foundation
25014 +# Copyright 1999-2013 Gentoo Foundation
25015 # Distributed under the terms of the GNU General Public License v2
25016
25017 from _emerge.AbstractEbuildProcess import AbstractEbuildProcess
25018 @@ -17,5 +17,11 @@ class EbuildProcess(AbstractEbuildProcess):
25019 if actionmap is None:
25020 actionmap = _spawn_actionmap(self.settings)
25021
25022 - return _doebuild_spawn(self.phase, self.settings,
25023 - actionmap=actionmap, **kwargs)
25024 + if self._dummy_pipe_fd is not None:
25025 + self.settings["PORTAGE_PIPE_FD"] = str(self._dummy_pipe_fd)
25026 +
25027 + try:
25028 + return _doebuild_spawn(self.phase, self.settings,
25029 + actionmap=actionmap, **kwargs)
25030 + finally:
25031 + self.settings.pop("PORTAGE_PIPE_FD", None)
25032
25033 diff --git a/pym/_emerge/EbuildSpawnProcess.py b/pym/_emerge/EbuildSpawnProcess.py
25034 index e1f682a..26d26fc 100644
25035 --- a/pym/_emerge/EbuildSpawnProcess.py
25036 +++ b/pym/_emerge/EbuildSpawnProcess.py
25037 @@ -1,4 +1,4 @@
25038 -# Copyright 2010 Gentoo Foundation
25039 +# Copyright 2010-2013 Gentoo Foundation
25040 # Distributed under the terms of the GNU General Public License v2
25041
25042 from _emerge.AbstractEbuildProcess import AbstractEbuildProcess
25043 @@ -13,4 +13,10 @@ class EbuildSpawnProcess(AbstractEbuildProcess):
25044 __slots__ = ('fakeroot_state', 'spawn_func')
25045
25046 def _spawn(self, args, **kwargs):
25047 - return self.spawn_func(args, env=self.settings.environ(), **kwargs)
25048 +
25049 + env = self.settings.environ()
25050 +
25051 + if self._dummy_pipe_fd is not None:
25052 + env["PORTAGE_PIPE_FD"] = str(self._dummy_pipe_fd)
25053 +
25054 + return self.spawn_func(args, env=env, **kwargs)
25055
25056 diff --git a/pym/_emerge/FakeVartree.py b/pym/_emerge/FakeVartree.py
25057 index ce15f5a..14be50c 100644
25058 --- a/pym/_emerge/FakeVartree.py
25059 +++ b/pym/_emerge/FakeVartree.py
25060 @@ -1,6 +1,8 @@
25061 -# Copyright 1999-2011 Gentoo Foundation
25062 +# Copyright 1999-2013 Gentoo Foundation
25063 # Distributed under the terms of the GNU General Public License v2
25064
25065 +from __future__ import unicode_literals
25066 +
25067 import sys
25068 import warnings
25069
25070 @@ -10,11 +12,11 @@ from _emerge.Package import Package
25071 from _emerge.PackageVirtualDbapi import PackageVirtualDbapi
25072 from portage.const import VDB_PATH
25073 from portage.dbapi.vartree import vartree
25074 -from portage.dep._slot_abi import find_built_slot_abi_atoms
25075 +from portage.dep._slot_operator import find_built_slot_operator_atoms
25076 from portage.eapi import _get_eapi_attrs
25077 -from portage.exception import InvalidDependString
25078 -from portage.repository.config import _gen_valid_repo
25079 +from portage.exception import InvalidData, InvalidDependString
25080 from portage.update import grab_updates, parse_updates, update_dbentries
25081 +from portage.versions import _pkg_str
25082
25083 if sys.hexversion >= 0x3000000:
25084 long = int
25085 @@ -33,6 +35,9 @@ class FakeVardbapi(PackageVirtualDbapi):
25086 path =os.path.join(path, filename)
25087 return path
25088
25089 +class _DynamicDepsNotApplicable(Exception):
25090 + pass
25091 +
25092 class FakeVartree(vartree):
25093 """This is implements an in-memory copy of a vartree instance that provides
25094 all the interfaces required for use by the depgraph. The vardb is locked
25095 @@ -45,10 +50,10 @@ class FakeVartree(vartree):
25096 is not a matching ebuild in the tree). Instances of this class are not
25097 populated until the sync() method is called."""
25098 def __init__(self, root_config, pkg_cache=None, pkg_root_config=None,
25099 - dynamic_deps=True, ignore_built_slot_abi_deps=False):
25100 + dynamic_deps=True, ignore_built_slot_operator_deps=False):
25101 self._root_config = root_config
25102 self._dynamic_deps = dynamic_deps
25103 - self._ignore_built_slot_abi_deps = ignore_built_slot_abi_deps
25104 + self._ignore_built_slot_operator_deps = ignore_built_slot_operator_deps
25105 if pkg_root_config is None:
25106 pkg_root_config = self._root_config
25107 self._pkg_root_config = pkg_root_config
25108 @@ -75,7 +80,7 @@ class FakeVartree(vartree):
25109 self.dbapi.aux_get = self._aux_get_wrapper
25110 self.dbapi.match = self._match_wrapper
25111 self._aux_get_history = set()
25112 - self._portdb_keys = ["EAPI", "KEYWORDS", "DEPEND", "RDEPEND", "PDEPEND"]
25113 + self._portdb_keys = Package._dep_keys + ("EAPI", "KEYWORDS")
25114 self._portdb = portdb
25115 self._global_updates = None
25116
25117 @@ -102,29 +107,30 @@ class FakeVartree(vartree):
25118 self._aux_get_wrapper(cpv, [])
25119 return matches
25120
25121 - def _aux_get_wrapper(self, pkg, wants, myrepo=None):
25122 - if pkg in self._aux_get_history:
25123 - return self._aux_get(pkg, wants)
25124 - self._aux_get_history.add(pkg)
25125 - # We need to check the EAPI, and this also raises
25126 - # a KeyError to the caller if appropriate.
25127 - pkg_obj = self.dbapi._cpv_map[pkg]
25128 - installed_eapi = pkg_obj.metadata['EAPI']
25129 - repo = pkg_obj.metadata['repository']
25130 - eapi_attrs = _get_eapi_attrs(installed_eapi)
25131 - built_slot_abi_atoms = None
25132 -
25133 - if eapi_attrs.slot_abi and not self._ignore_built_slot_abi_deps:
25134 - try:
25135 - built_slot_abi_atoms = find_built_slot_abi_atoms(pkg_obj)
25136 - except InvalidDependString:
25137 - pass
25138 + def _aux_get_wrapper(self, cpv, wants, myrepo=None):
25139 + if cpv in self._aux_get_history:
25140 + return self._aux_get(cpv, wants)
25141 + self._aux_get_history.add(cpv)
25142 +
25143 + # This raises a KeyError to the caller if appropriate.
25144 + pkg = self.dbapi._cpv_map[cpv]
25145
25146 try:
25147 - # Use the live ebuild metadata if possible.
25148 - repo = _gen_valid_repo(repo)
25149 live_metadata = dict(zip(self._portdb_keys,
25150 - self._portdb.aux_get(pkg, self._portdb_keys, myrepo=repo)))
25151 + self._portdb.aux_get(cpv, self._portdb_keys,
25152 + myrepo=pkg.repo)))
25153 + except (KeyError, portage.exception.PortageException):
25154 + live_metadata = None
25155 +
25156 + self._apply_dynamic_deps(pkg, live_metadata)
25157 +
25158 + return self._aux_get(cpv, wants)
25159 +
25160 + def _apply_dynamic_deps(self, pkg, live_metadata):
25161 +
25162 + try:
25163 + if live_metadata is None:
25164 + raise _DynamicDepsNotApplicable()
25165 # Use the metadata from the installed instance if the EAPI
25166 # of either instance is unsupported, since if the installed
25167 # instance has an unsupported or corrupt EAPI then we don't
25168 @@ -134,26 +140,46 @@ class FakeVartree(vartree):
25169 # order to respect dep updates without revision bump or EAPI
25170 # bump, as in bug #368725.
25171 if not (portage.eapi_is_supported(live_metadata["EAPI"]) and \
25172 - portage.eapi_is_supported(installed_eapi)):
25173 - raise KeyError(pkg)
25174 + portage.eapi_is_supported(pkg.eapi)):
25175 + raise _DynamicDepsNotApplicable()
25176
25177 - # preserve built SLOT/ABI := operator deps
25178 - if built_slot_abi_atoms:
25179 + # preserve built slot/sub-slot := operator deps
25180 + built_slot_operator_atoms = None
25181 + if not self._ignore_built_slot_operator_deps and \
25182 + _get_eapi_attrs(pkg.eapi).slot_operator:
25183 + try:
25184 + built_slot_operator_atoms = \
25185 + find_built_slot_operator_atoms(pkg)
25186 + except InvalidDependString:
25187 + pass
25188 +
25189 + if built_slot_operator_atoms:
25190 live_eapi_attrs = _get_eapi_attrs(live_metadata["EAPI"])
25191 - if not live_eapi_attrs.slot_abi:
25192 - raise KeyError(pkg)
25193 - for k, v in built_slot_abi_atoms.items():
25194 + if not live_eapi_attrs.slot_operator:
25195 + raise _DynamicDepsNotApplicable()
25196 + for k, v in built_slot_operator_atoms.items():
25197 live_metadata[k] += (" " +
25198 " ".join(_unicode(atom) for atom in v))
25199
25200 - self.dbapi.aux_update(pkg, live_metadata)
25201 - except (KeyError, portage.exception.PortageException):
25202 + self.dbapi.aux_update(pkg.cpv, live_metadata)
25203 + except _DynamicDepsNotApplicable:
25204 if self._global_updates is None:
25205 self._global_updates = \
25206 grab_global_updates(self._portdb)
25207 +
25208 + # Bypass _aux_get_wrapper, since calling that
25209 + # here would trigger infinite recursion.
25210 + aux_keys = Package._dep_keys + self.dbapi._pkg_str_aux_keys
25211 + aux_dict = dict(zip(aux_keys, self._aux_get(pkg.cpv, aux_keys)))
25212 perform_global_updates(
25213 - pkg, self.dbapi, self._global_updates)
25214 - return self._aux_get(pkg, wants)
25215 + pkg.cpv, aux_dict, self.dbapi, self._global_updates)
25216 +
25217 + def dynamic_deps_preload(self, pkg, metadata):
25218 + if metadata is not None:
25219 + metadata = dict((k, metadata.get(k, ''))
25220 + for k in self._portdb_keys)
25221 + self._apply_dynamic_deps(pkg, metadata)
25222 + self._aux_get_history.add(pkg.cpv)
25223
25224 def cpv_discard(self, pkg):
25225 """
25226 @@ -251,12 +277,6 @@ class FakeVartree(vartree):
25227 root_config=self._pkg_root_config,
25228 type_name="installed")
25229
25230 - try:
25231 - mycounter = long(pkg.metadata["COUNTER"])
25232 - except ValueError:
25233 - mycounter = 0
25234 - pkg.metadata["COUNTER"] = str(mycounter)
25235 -
25236 self._pkg_cache[pkg] = pkg
25237 return pkg
25238
25239 @@ -285,13 +305,14 @@ def grab_global_updates(portdb):
25240
25241 return retupdates
25242
25243 -def perform_global_updates(mycpv, mydb, myupdates):
25244 - aux_keys = ["DEPEND", "EAPI", "RDEPEND", "PDEPEND", 'repository']
25245 - aux_dict = dict(zip(aux_keys, mydb.aux_get(mycpv, aux_keys)))
25246 - eapi = aux_dict.pop('EAPI')
25247 - repository = aux_dict.pop('repository')
25248 +def perform_global_updates(mycpv, aux_dict, mydb, myupdates):
25249 + try:
25250 + pkg = _pkg_str(mycpv, metadata=aux_dict, settings=mydb.settings)
25251 + except InvalidData:
25252 + return
25253 + aux_dict = dict((k, aux_dict[k]) for k in Package._dep_keys)
25254 try:
25255 - mycommands = myupdates[repository]
25256 + mycommands = myupdates[pkg.repo]
25257 except KeyError:
25258 try:
25259 mycommands = myupdates['DEFAULT']
25260 @@ -301,6 +322,6 @@ def perform_global_updates(mycpv, mydb, myupdates):
25261 if not mycommands:
25262 return
25263
25264 - updates = update_dbentries(mycommands, aux_dict, eapi=eapi)
25265 + updates = update_dbentries(mycommands, aux_dict, parent=pkg)
25266 if updates:
25267 mydb.aux_update(mycpv, updates)
25268
25269 diff --git a/pym/_emerge/FifoIpcDaemon.py b/pym/_emerge/FifoIpcDaemon.py
25270 index fcc4ab4..7468de5 100644
25271 --- a/pym/_emerge/FifoIpcDaemon.py
25272 +++ b/pym/_emerge/FifoIpcDaemon.py
25273 @@ -1,6 +1,14 @@
25274 -# Copyright 2010-2012 Gentoo Foundation
25275 +# Copyright 2010-2013 Gentoo Foundation
25276 # Distributed under the terms of the GNU General Public License v2
25277
25278 +import sys
25279 +
25280 +try:
25281 + import fcntl
25282 +except ImportError:
25283 + # http://bugs.jython.org/issue1074
25284 + fcntl = None
25285 +
25286 from portage import os
25287 from _emerge.AbstractPollTask import AbstractPollTask
25288 from portage.cache.mappings import slot_dict_class
25289 @@ -21,7 +29,18 @@ class FifoIpcDaemon(AbstractPollTask):
25290 self._files.pipe_in = \
25291 os.open(self.input_fifo, os.O_RDONLY|os.O_NONBLOCK)
25292
25293 - self._reg_id = self.scheduler.register(
25294 + # FD_CLOEXEC is enabled by default in Python >=3.4.
25295 + if sys.hexversion < 0x3040000 and fcntl is not None:
25296 + try:
25297 + fcntl.FD_CLOEXEC
25298 + except AttributeError:
25299 + pass
25300 + else:
25301 + fcntl.fcntl(self._files.pipe_in, fcntl.F_SETFD,
25302 + fcntl.fcntl(self._files.pipe_in,
25303 + fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
25304 +
25305 + self._reg_id = self.scheduler.io_add_watch(
25306 self._files.pipe_in,
25307 self._registered_events, self._input_handler)
25308
25309 @@ -32,11 +51,23 @@ class FifoIpcDaemon(AbstractPollTask):
25310 Re-open the input stream, in order to suppress
25311 POLLHUP events (bug #339976).
25312 """
25313 - self.scheduler.unregister(self._reg_id)
25314 + self.scheduler.source_remove(self._reg_id)
25315 os.close(self._files.pipe_in)
25316 self._files.pipe_in = \
25317 os.open(self.input_fifo, os.O_RDONLY|os.O_NONBLOCK)
25318 - self._reg_id = self.scheduler.register(
25319 +
25320 + # FD_CLOEXEC is enabled by default in Python >=3.4.
25321 + if sys.hexversion < 0x3040000 and fcntl is not None:
25322 + try:
25323 + fcntl.FD_CLOEXEC
25324 + except AttributeError:
25325 + pass
25326 + else:
25327 + fcntl.fcntl(self._files.pipe_in, fcntl.F_SETFD,
25328 + fcntl.fcntl(self._files.pipe_in,
25329 + fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
25330 +
25331 + self._reg_id = self.scheduler.io_add_watch(
25332 self._files.pipe_in,
25333 self._registered_events, self._input_handler)
25334
25335 @@ -47,6 +78,8 @@ class FifoIpcDaemon(AbstractPollTask):
25336 if self.returncode is None:
25337 self.returncode = 1
25338 self._unregister()
25339 + # notify exit listeners
25340 + self.wait()
25341
25342 def _wait(self):
25343 if self.returncode is not None:
25344 @@ -67,7 +100,7 @@ class FifoIpcDaemon(AbstractPollTask):
25345 self._registered = False
25346
25347 if self._reg_id is not None:
25348 - self.scheduler.unregister(self._reg_id)
25349 + self.scheduler.source_remove(self._reg_id)
25350 self._reg_id = None
25351
25352 if self._files is not None:
25353
25354 diff --git a/pym/_emerge/JobStatusDisplay.py b/pym/_emerge/JobStatusDisplay.py
25355 index 5b9b221..9f6f09b 100644
25356 --- a/pym/_emerge/JobStatusDisplay.py
25357 +++ b/pym/_emerge/JobStatusDisplay.py
25358 @@ -1,6 +1,8 @@
25359 -# Copyright 1999-2011 Gentoo Foundation
25360 +# Copyright 1999-2013 Gentoo Foundation
25361 # Distributed under the terms of the GNU General Public License v2
25362
25363 +from __future__ import unicode_literals
25364 +
25365 import formatter
25366 import io
25367 import sys
25368 @@ -9,7 +11,6 @@ import time
25369 import portage
25370 from portage import os
25371 from portage import _encodings
25372 -from portage import _unicode_decode
25373 from portage import _unicode_encode
25374 from portage.output import xtermTitle
25375
25376 @@ -121,7 +122,8 @@ class JobStatusDisplay(object):
25377
25378 term_codes = {}
25379 for k, capname in self._termcap_name_map.items():
25380 - code = tigetstr(capname)
25381 + # Use _native_string for PyPy compat (bug #470258).
25382 + code = tigetstr(portage._native_string(capname))
25383 if code is None:
25384 code = self._default_term_codes[capname]
25385 term_codes[k] = code
25386 @@ -233,10 +235,10 @@ class JobStatusDisplay(object):
25387 def _display_status(self):
25388 # Don't use len(self._completed_tasks) here since that also
25389 # can include uninstall tasks.
25390 - curval_str = str(self.curval)
25391 - maxval_str = str(self.maxval)
25392 - running_str = str(self.running)
25393 - failed_str = str(self.failed)
25394 + curval_str = "%s" % (self.curval,)
25395 + maxval_str = "%s" % (self.maxval,)
25396 + running_str = "%s" % (self.running,)
25397 + failed_str = "%s" % (self.failed,)
25398 load_avg_str = self._load_avg_str()
25399
25400 color_output = io.StringIO()
25401 @@ -248,36 +250,36 @@ class JobStatusDisplay(object):
25402 f = formatter.AbstractFormatter(style_writer)
25403
25404 number_style = "INFORM"
25405 - f.add_literal_data(_unicode_decode("Jobs: "))
25406 + f.add_literal_data("Jobs: ")
25407 f.push_style(number_style)
25408 - f.add_literal_data(_unicode_decode(curval_str))
25409 + f.add_literal_data(curval_str)
25410 f.pop_style()
25411 - f.add_literal_data(_unicode_decode(" of "))
25412 + f.add_literal_data(" of ")
25413 f.push_style(number_style)
25414 - f.add_literal_data(_unicode_decode(maxval_str))
25415 + f.add_literal_data(maxval_str)
25416 f.pop_style()
25417 - f.add_literal_data(_unicode_decode(" complete"))
25418 + f.add_literal_data(" complete")
25419
25420 if self.running:
25421 - f.add_literal_data(_unicode_decode(", "))
25422 + f.add_literal_data(", ")
25423 f.push_style(number_style)
25424 - f.add_literal_data(_unicode_decode(running_str))
25425 + f.add_literal_data(running_str)
25426 f.pop_style()
25427 - f.add_literal_data(_unicode_decode(" running"))
25428 + f.add_literal_data(" running")
25429
25430 if self.failed:
25431 - f.add_literal_data(_unicode_decode(", "))
25432 + f.add_literal_data(", ")
25433 f.push_style(number_style)
25434 - f.add_literal_data(_unicode_decode(failed_str))
25435 + f.add_literal_data(failed_str)
25436 f.pop_style()
25437 - f.add_literal_data(_unicode_decode(" failed"))
25438 + f.add_literal_data(" failed")
25439
25440 padding = self._jobs_column_width - len(plain_output.getvalue())
25441 if padding > 0:
25442 - f.add_literal_data(padding * _unicode_decode(" "))
25443 + f.add_literal_data(padding * " ")
25444
25445 - f.add_literal_data(_unicode_decode("Load avg: "))
25446 - f.add_literal_data(_unicode_decode(load_avg_str))
25447 + f.add_literal_data("Load avg: ")
25448 + f.add_literal_data(load_avg_str)
25449
25450 # Truncate to fit width, to avoid making the terminal scroll if the
25451 # line overflows (happens when the load average is large).
25452
25453 diff --git a/pym/_emerge/MergeListItem.py b/pym/_emerge/MergeListItem.py
25454 index 8086c68..938f801 100644
25455 --- a/pym/_emerge/MergeListItem.py
25456 +++ b/pym/_emerge/MergeListItem.py
25457 @@ -1,7 +1,8 @@
25458 -# Copyright 1999-2011 Gentoo Foundation
25459 +# Copyright 1999-2014 Gentoo Foundation
25460 # Distributed under the terms of the GNU General Public License v2
25461
25462 from portage import os
25463 +from portage.dep import _repo_separator
25464 from portage.output import colorize
25465
25466 from _emerge.AsynchronousTask import AsynchronousTask
25467 @@ -32,7 +33,7 @@ class MergeListItem(CompositeTask):
25468 if pkg.installed:
25469 # uninstall, executed by self.merge()
25470 self.returncode = os.EX_OK
25471 - self.wait()
25472 + self._async_wait()
25473 return
25474
25475 args_set = self.args_set
25476 @@ -47,7 +48,9 @@ class MergeListItem(CompositeTask):
25477
25478 action_desc = "Emerging"
25479 preposition = "for"
25480 + pkg_color = "PKG_MERGE"
25481 if pkg.type_name == "binary":
25482 + pkg_color = "PKG_BINARY_MERGE"
25483 action_desc += " binary"
25484
25485 if build_opts.fetchonly:
25486 @@ -57,16 +60,7 @@ class MergeListItem(CompositeTask):
25487 (action_desc,
25488 colorize("MERGE_LIST_PROGRESS", str(pkg_count.curval)),
25489 colorize("MERGE_LIST_PROGRESS", str(pkg_count.maxval)),
25490 - colorize("GOOD", pkg.cpv))
25491 -
25492 - portdb = pkg.root_config.trees["porttree"].dbapi
25493 - portdir_repo_name = portdb.getRepositoryName(portdb.porttree_root)
25494 - if portdir_repo_name:
25495 - pkg_repo_name = pkg.repo
25496 - if pkg_repo_name != portdir_repo_name:
25497 - if pkg_repo_name == pkg.UNKNOWN_REPO:
25498 - pkg_repo_name = "unknown repo"
25499 - msg += " from %s" % pkg_repo_name
25500 + colorize(pkg_color, pkg.cpv + _repo_separator + pkg.repo))
25501
25502 if pkg.root_config.settings["ROOT"] != "/":
25503 msg += " %s %s" % (preposition, pkg.root)
25504
25505 diff --git a/pym/_emerge/MetadataRegen.py b/pym/_emerge/MetadataRegen.py
25506 index e82015f..d92b6a0 100644
25507 --- a/pym/_emerge/MetadataRegen.py
25508 +++ b/pym/_emerge/MetadataRegen.py
25509 @@ -1,18 +1,20 @@
25510 -# Copyright 1999-2012 Gentoo Foundation
25511 +# Copyright 1999-2013 Gentoo Foundation
25512 # Distributed under the terms of the GNU General Public License v2
25513
25514 import portage
25515 from portage import os
25516 from portage.dep import _repo_separator
25517 from _emerge.EbuildMetadataPhase import EbuildMetadataPhase
25518 -from _emerge.PollScheduler import PollScheduler
25519 +from portage.cache.cache_errors import CacheError
25520 +from portage.util._async.AsyncScheduler import AsyncScheduler
25521
25522 -class MetadataRegen(PollScheduler):
25523 +class MetadataRegen(AsyncScheduler):
25524
25525 def __init__(self, portdb, cp_iter=None, consumer=None,
25526 - max_jobs=None, max_load=None):
25527 - PollScheduler.__init__(self, main=True)
25528 + write_auxdb=True, **kwargs):
25529 + AsyncScheduler.__init__(self, **kwargs)
25530 self._portdb = portdb
25531 + self._write_auxdb = write_auxdb
25532 self._global_cleanse = False
25533 if cp_iter is None:
25534 cp_iter = self._iter_every_cp()
25535 @@ -22,34 +24,21 @@ class MetadataRegen(PollScheduler):
25536 self._cp_iter = cp_iter
25537 self._consumer = consumer
25538
25539 - if max_jobs is None:
25540 - max_jobs = 1
25541 -
25542 - self._max_jobs = max_jobs
25543 - self._max_load = max_load
25544 -
25545 self._valid_pkgs = set()
25546 self._cp_set = set()
25547 self._process_iter = self._iter_metadata_processes()
25548 - self.returncode = os.EX_OK
25549 - self._error_count = 0
25550 self._running_tasks = set()
25551 - self._remaining_tasks = True
25552
25553 - def _terminate_tasks(self):
25554 - for task in list(self._running_tasks):
25555 - task.cancel()
25556 + def _next_task(self):
25557 + return next(self._process_iter)
25558
25559 def _iter_every_cp(self):
25560 - portage.writemsg_stdout("Listing available packages...\n")
25561 - every_cp = self._portdb.cp_all()
25562 - portage.writemsg_stdout("Regenerating cache entries...\n")
25563 - every_cp.sort(reverse=True)
25564 - try:
25565 - while not self._terminated_tasks:
25566 - yield every_cp.pop()
25567 - except IndexError:
25568 - pass
25569 + # List categories individually, in order to start yielding quicker,
25570 + # and in order to reduce latency in case of a signal interrupt.
25571 + cp_all = self._portdb.cp_all
25572 + for category in sorted(self._portdb.categories):
25573 + for cp in cp_all(categories=(category,)):
25574 + yield cp
25575
25576 def _iter_metadata_processes(self):
25577 portdb = self._portdb
25578 @@ -57,8 +46,9 @@ class MetadataRegen(PollScheduler):
25579 cp_set = self._cp_set
25580 consumer = self._consumer
25581
25582 + portage.writemsg_stdout("Regenerating cache entries...\n")
25583 for cp in self._cp_iter:
25584 - if self._terminated_tasks:
25585 + if self._terminated.is_set():
25586 break
25587 cp_set.add(cp)
25588 portage.writemsg_stdout("Processing %s\n" % cp)
25589 @@ -68,7 +58,7 @@ class MetadataRegen(PollScheduler):
25590 repo = portdb.repositories.get_repo_for_location(mytree)
25591 cpv_list = portdb.cp_list(cp, mytree=[repo.location])
25592 for cpv in cpv_list:
25593 - if self._terminated_tasks:
25594 + if self._terminated.is_set():
25595 break
25596 valid_pkgs.add(cpv)
25597 ebuild_path, repo_path = portdb.findname2(cpv, myrepo=repo.name)
25598 @@ -84,22 +74,21 @@ class MetadataRegen(PollScheduler):
25599 yield EbuildMetadataPhase(cpv=cpv,
25600 ebuild_hash=ebuild_hash,
25601 portdb=portdb, repo_path=repo_path,
25602 - settings=portdb.doebuild_settings)
25603 + settings=portdb.doebuild_settings,
25604 + write_auxdb=self._write_auxdb)
25605
25606 - def _keep_scheduling(self):
25607 - return self._remaining_tasks and not self._terminated_tasks
25608 + def _wait(self):
25609
25610 - def run(self):
25611 + AsyncScheduler._wait(self)
25612
25613 portdb = self._portdb
25614 - from portage.cache.cache_errors import CacheError
25615 dead_nodes = {}
25616
25617 - self._main_loop()
25618 -
25619 + self._termination_check()
25620 if self._terminated_tasks:
25621 - self.returncode = 1
25622 - return
25623 + portdb.flush_cache()
25624 + self.returncode = self._cancelled_returncode
25625 + return self.returncode
25626
25627 if self._global_cleanse:
25628 for mytree in portdb.porttrees:
25629 @@ -142,29 +131,12 @@ class MetadataRegen(PollScheduler):
25630 except (KeyError, CacheError):
25631 pass
25632
25633 - def _schedule_tasks(self):
25634 - if self._terminated_tasks:
25635 - return
25636 -
25637 - while self._can_add_job():
25638 - try:
25639 - metadata_process = next(self._process_iter)
25640 - except StopIteration:
25641 - self._remaining_tasks = False
25642 - return
25643 -
25644 - self._jobs += 1
25645 - self._running_tasks.add(metadata_process)
25646 - metadata_process.scheduler = self.sched_iface
25647 - metadata_process.addExitListener(self._metadata_exit)
25648 - metadata_process.start()
25649 -
25650 - def _metadata_exit(self, metadata_process):
25651 - self._jobs -= 1
25652 - self._running_tasks.discard(metadata_process)
25653 + portdb.flush_cache()
25654 + return self.returncode
25655 +
25656 + def _task_exit(self, metadata_process):
25657 +
25658 if metadata_process.returncode != os.EX_OK:
25659 - self.returncode = 1
25660 - self._error_count += 1
25661 self._valid_pkgs.discard(metadata_process.cpv)
25662 if not self._terminated_tasks:
25663 portage.writemsg("Error processing %s, continuing...\n" % \
25664 @@ -179,5 +151,4 @@ class MetadataRegen(PollScheduler):
25665 metadata_process.ebuild_hash,
25666 metadata_process.eapi_supported)
25667
25668 - self._schedule()
25669 -
25670 + AsyncScheduler._task_exit(self, metadata_process)
25671
25672 diff --git a/pym/_emerge/MiscFunctionsProcess.py b/pym/_emerge/MiscFunctionsProcess.py
25673 index afa44fb..bada79d 100644
25674 --- a/pym/_emerge/MiscFunctionsProcess.py
25675 +++ b/pym/_emerge/MiscFunctionsProcess.py
25676 @@ -1,4 +1,4 @@
25677 -# Copyright 1999-2011 Gentoo Foundation
25678 +# Copyright 1999-2013 Gentoo Foundation
25679 # Distributed under the terms of the GNU General Public License v2
25680
25681 from _emerge.AbstractEbuildProcess import AbstractEbuildProcess
25682 @@ -29,6 +29,10 @@ class MiscFunctionsProcess(AbstractEbuildProcess):
25683 AbstractEbuildProcess._start(self)
25684
25685 def _spawn(self, args, **kwargs):
25686 +
25687 + if self._dummy_pipe_fd is not None:
25688 + self.settings["PORTAGE_PIPE_FD"] = str(self._dummy_pipe_fd)
25689 +
25690 # Temporarily unset EBUILD_PHASE so that bashrc code doesn't
25691 # think this is a real phase.
25692 phase_backup = self.settings.pop("EBUILD_PHASE", None)
25693 @@ -37,3 +41,4 @@ class MiscFunctionsProcess(AbstractEbuildProcess):
25694 finally:
25695 if phase_backup is not None:
25696 self.settings["EBUILD_PHASE"] = phase_backup
25697 + self.settings.pop("PORTAGE_PIPE_FD", None)
25698
25699 diff --git a/pym/_emerge/Package.py b/pym/_emerge/Package.py
25700 index 14d0694..a09f73c 100644
25701 --- a/pym/_emerge/Package.py
25702 +++ b/pym/_emerge/Package.py
25703 @@ -1,8 +1,12 @@
25704 -# Copyright 1999-2012 Gentoo Foundation
25705 +# Copyright 1999-2014 Gentoo Foundation
25706 # Distributed under the terms of the GNU General Public License v2
25707
25708 +from __future__ import unicode_literals
25709 +
25710 import sys
25711 from itertools import chain
25712 +import warnings
25713 +
25714 import portage
25715 from portage import _encodings, _unicode_decode, _unicode_encode
25716 from portage.cache.mappings import slot_dict_class
25717 @@ -10,67 +14,82 @@ from portage.const import EBUILD_PHASES
25718 from portage.dep import Atom, check_required_use, use_reduce, \
25719 paren_enclose, _slot_separator, _repo_separator
25720 from portage.versions import _pkg_str, _unknown_repo
25721 -from portage.eapi import _get_eapi_attrs
25722 +from portage.eapi import _get_eapi_attrs, eapi_has_use_aliases
25723 from portage.exception import InvalidDependString
25724 +from portage.localization import _
25725 from _emerge.Task import Task
25726
25727 if sys.hexversion >= 0x3000000:
25728 basestring = str
25729 long = int
25730 + _unicode = str
25731 +else:
25732 + _unicode = unicode
25733
25734 class Package(Task):
25735
25736 __hash__ = Task.__hash__
25737 __slots__ = ("built", "cpv", "depth",
25738 - "installed", "metadata", "onlydeps", "operation",
25739 + "installed", "onlydeps", "operation",
25740 "root_config", "type_name",
25741 "category", "counter", "cp", "cpv_split",
25742 "inherited", "iuse", "mtime",
25743 - "pf", "root", "slot", "slot_abi", "slot_atom", "version") + \
25744 - ("_invalid", "_raw_metadata", "_masks", "_use",
25745 + "pf", "root", "slot", "sub_slot", "slot_atom", "version") + \
25746 + ("_invalid", "_masks", "_metadata", "_raw_metadata", "_use",
25747 "_validated_atoms", "_visible")
25748
25749 metadata_keys = [
25750 "BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "EAPI",
25751 - "INHERITED", "IUSE", "KEYWORDS",
25752 + "HDEPEND", "INHERITED", "IUSE", "KEYWORDS",
25753 "LICENSE", "PDEPEND", "PROVIDE", "RDEPEND",
25754 "repository", "PROPERTIES", "RESTRICT", "SLOT", "USE",
25755 "_mtime_", "DEFINED_PHASES", "REQUIRED_USE"]
25756
25757 - _dep_keys = ('DEPEND', 'PDEPEND', 'RDEPEND',)
25758 + _dep_keys = ('DEPEND', 'HDEPEND', 'PDEPEND', 'RDEPEND')
25759 + _buildtime_keys = ('DEPEND', 'HDEPEND')
25760 + _runtime_keys = ('PDEPEND', 'RDEPEND')
25761 _use_conditional_misc_keys = ('LICENSE', 'PROPERTIES', 'RESTRICT')
25762 UNKNOWN_REPO = _unknown_repo
25763
25764 def __init__(self, **kwargs):
25765 + metadata = _PackageMetadataWrapperBase(kwargs.pop('metadata'))
25766 Task.__init__(self, **kwargs)
25767 # the SlotObject constructor assigns self.root_config from keyword args
25768 # and is an instance of a '_emerge.RootConfig.RootConfig class
25769 self.root = self.root_config.root
25770 - self._raw_metadata = _PackageMetadataWrapperBase(self.metadata)
25771 - self.metadata = _PackageMetadataWrapper(self, self._raw_metadata)
25772 + self._raw_metadata = metadata
25773 + self._metadata = _PackageMetadataWrapper(self, metadata)
25774 if not self.built:
25775 - self.metadata['CHOST'] = self.root_config.settings.get('CHOST', '')
25776 - eapi_attrs = _get_eapi_attrs(self.metadata["EAPI"])
25777 - self.cpv = _pkg_str(self.cpv, slot=self.metadata["SLOT"],
25778 - repo=self.metadata.get('repository', ''),
25779 - eapi=self.metadata["EAPI"])
25780 + self._metadata['CHOST'] = self.root_config.settings.get('CHOST', '')
25781 + eapi_attrs = _get_eapi_attrs(self.eapi)
25782 + self.cpv = _pkg_str(self.cpv, metadata=self._metadata,
25783 + settings=self.root_config.settings)
25784 if hasattr(self.cpv, 'slot_invalid'):
25785 self._invalid_metadata('SLOT.invalid',
25786 - "SLOT: invalid value: '%s'" % self.metadata["SLOT"])
25787 + "SLOT: invalid value: '%s'" % self._metadata["SLOT"])
25788 + self.cpv_split = self.cpv.cpv_split
25789 + self.category, self.pf = portage.catsplit(self.cpv)
25790 self.cp = self.cpv.cp
25791 + self.version = self.cpv.version
25792 self.slot = self.cpv.slot
25793 - self.slot_abi = self.cpv.slot_abi
25794 + self.sub_slot = self.cpv.sub_slot
25795 + self.slot_atom = Atom("%s%s%s" % (self.cp, _slot_separator, self.slot))
25796 # sync metadata with validated repo (may be UNKNOWN_REPO)
25797 - self.metadata['repository'] = self.cpv.repo
25798 + self._metadata['repository'] = self.cpv.repo
25799 +
25800 + if eapi_attrs.iuse_effective:
25801 + implicit_match = self.root_config.settings._iuse_effective_match
25802 + else:
25803 + implicit_match = self.root_config.settings._iuse_implicit_match
25804 + usealiases = self.root_config.settings._use_manager.getUseAliases(self)
25805 + self.iuse = self._iuse(self, self._metadata["IUSE"].split(), implicit_match,
25806 + usealiases, self.eapi)
25807 +
25808 if (self.iuse.enabled or self.iuse.disabled) and \
25809 not eapi_attrs.iuse_defaults:
25810 if not self.installed:
25811 self._invalid_metadata('EAPI.incompatible',
25812 "IUSE contains defaults, but EAPI doesn't allow them")
25813 - self.slot_atom = Atom("%s%s%s" % (self.cp, _slot_separator, self.slot))
25814 - self.category, self.pf = portage.catsplit(self.cpv)
25815 - self.cpv_split = self.cpv.cpv_split
25816 - self.version = self.cpv.version
25817 if self.inherited is None:
25818 self.inherited = frozenset()
25819
25820 @@ -87,6 +106,37 @@ class Package(Task):
25821 type_name=self.type_name)
25822 self._hash_value = hash(self._hash_key)
25823
25824 + @property
25825 + def eapi(self):
25826 + return self._metadata["EAPI"]
25827 +
25828 + @property
25829 + def build_time(self):
25830 + if not self.built:
25831 + raise AttributeError('build_time')
25832 + try:
25833 + return long(self._metadata['BUILD_TIME'])
25834 + except (KeyError, ValueError):
25835 + return 0
25836 +
25837 + @property
25838 + def defined_phases(self):
25839 + return self._metadata.defined_phases
25840 +
25841 + @property
25842 + def properties(self):
25843 + return self._metadata.properties
25844 +
25845 + @property
25846 + def restrict(self):
25847 + return self._metadata.restrict
25848 +
25849 + @property
25850 + def metadata(self):
25851 + warnings.warn("_emerge.Package.Package.metadata is deprecated",
25852 + DeprecationWarning, stacklevel=3)
25853 + return self._metadata
25854 +
25855 # These are calculated on-demand, so that they are calculated
25856 # after FakeVartree applies its metadata tweaks.
25857 @property
25858 @@ -120,6 +170,10 @@ class Package(Task):
25859 self._validate_deps()
25860 return self._validated_atoms
25861
25862 + @property
25863 + def stable(self):
25864 + return self.cpv.stable
25865 +
25866 @classmethod
25867 def _gen_hash_key(cls, cpv=None, installed=None, onlydeps=None,
25868 operation=None, repo_name=None, root_config=None,
25869 @@ -154,15 +208,15 @@ class Package(Task):
25870 # So overwrite the repo_key with type_name.
25871 repo_key = type_name
25872
25873 - return (type_name, root, cpv, operation, repo_key)
25874 + return (type_name, root, _unicode(cpv), operation, repo_key)
25875
25876 def _validate_deps(self):
25877 """
25878 Validate deps. This does not trigger USE calculation since that
25879 is expensive for ebuilds and therefore we want to avoid doing
25880 - in unnecessarily (like for masked packages).
25881 + it unnecessarily (like for masked packages).
25882 """
25883 - eapi = self.metadata['EAPI']
25884 + eapi = self.eapi
25885 dep_eapi = eapi
25886 dep_valid_flag = self.iuse.is_valid_flag
25887 if self.installed:
25888 @@ -175,31 +229,42 @@ class Package(Task):
25889
25890 validated_atoms = []
25891 for k in self._dep_keys:
25892 - v = self.metadata.get(k)
25893 + v = self._metadata.get(k)
25894 if not v:
25895 continue
25896 try:
25897 - validated_atoms.extend(use_reduce(v, eapi=dep_eapi,
25898 + atoms = use_reduce(v, eapi=dep_eapi,
25899 matchall=True, is_valid_flag=dep_valid_flag,
25900 - token_class=Atom, flat=True))
25901 + token_class=Atom, flat=True)
25902 except InvalidDependString as e:
25903 self._metadata_exception(k, e)
25904 + else:
25905 + validated_atoms.extend(atoms)
25906 + if not self.built:
25907 + for atom in atoms:
25908 + if not isinstance(atom, Atom):
25909 + continue
25910 + if atom.slot_operator_built:
25911 + e = InvalidDependString(
25912 + _("Improper context for slot-operator "
25913 + "\"built\" atom syntax: %s") %
25914 + (atom.unevaluated_atom,))
25915 + self._metadata_exception(k, e)
25916
25917 self._validated_atoms = tuple(set(atom for atom in
25918 validated_atoms if isinstance(atom, Atom)))
25919
25920 k = 'PROVIDE'
25921 - v = self.metadata.get(k)
25922 + v = self._metadata.get(k)
25923 if v:
25924 try:
25925 use_reduce(v, eapi=dep_eapi, matchall=True,
25926 is_valid_flag=dep_valid_flag, token_class=Atom)
25927 except InvalidDependString as e:
25928 - self._invalid_metadata("PROVIDE.syntax",
25929 - _unicode_decode("%s: %s") % (k, e))
25930 + self._invalid_metadata("PROVIDE.syntax", "%s: %s" % (k, e))
25931
25932 for k in self._use_conditional_misc_keys:
25933 - v = self.metadata.get(k)
25934 + v = self._metadata.get(k)
25935 if not v:
25936 continue
25937 try:
25938 @@ -209,24 +274,20 @@ class Package(Task):
25939 self._metadata_exception(k, e)
25940
25941 k = 'REQUIRED_USE'
25942 - v = self.metadata.get(k)
25943 - if v:
25944 + v = self._metadata.get(k)
25945 + if v and not self.built:
25946 if not _get_eapi_attrs(eapi).required_use:
25947 self._invalid_metadata('EAPI.incompatible',
25948 "REQUIRED_USE set, but EAPI='%s' doesn't allow it" % eapi)
25949 else:
25950 try:
25951 check_required_use(v, (),
25952 - self.iuse.is_valid_flag)
25953 + self.iuse.is_valid_flag, eapi=eapi)
25954 except InvalidDependString as e:
25955 - # Force unicode format string for python-2.x safety,
25956 - # ensuring that PortageException.__unicode__() is used
25957 - # when necessary.
25958 - self._invalid_metadata(k + ".syntax",
25959 - _unicode_decode("%s: %s") % (k, e))
25960 + self._invalid_metadata(k + ".syntax", "%s: %s" % (k, e))
25961
25962 k = 'SRC_URI'
25963 - v = self.metadata.get(k)
25964 + v = self._metadata.get(k)
25965 if v:
25966 try:
25967 use_reduce(v, is_src_uri=True, eapi=eapi, matchall=True,
25968 @@ -248,36 +309,45 @@ class Package(Task):
25969 if self.invalid is not False:
25970 masks['invalid'] = self.invalid
25971
25972 - if not settings._accept_chost(self.cpv, self.metadata):
25973 - masks['CHOST'] = self.metadata['CHOST']
25974 + if not settings._accept_chost(self.cpv, self._metadata):
25975 + masks['CHOST'] = self._metadata['CHOST']
25976
25977 - eapi = self.metadata["EAPI"]
25978 + eapi = self.eapi
25979 if not portage.eapi_is_supported(eapi):
25980 masks['EAPI.unsupported'] = eapi
25981 if portage._eapi_is_deprecated(eapi):
25982 masks['EAPI.deprecated'] = eapi
25983
25984 missing_keywords = settings._getMissingKeywords(
25985 - self.cpv, self.metadata)
25986 + self.cpv, self._metadata)
25987 if missing_keywords:
25988 masks['KEYWORDS'] = missing_keywords
25989
25990 try:
25991 missing_properties = settings._getMissingProperties(
25992 - self.cpv, self.metadata)
25993 + self.cpv, self._metadata)
25994 if missing_properties:
25995 masks['PROPERTIES'] = missing_properties
25996 except InvalidDependString:
25997 # already recorded as 'invalid'
25998 pass
25999
26000 - mask_atom = settings._getMaskAtom(self.cpv, self.metadata)
26001 + try:
26002 + missing_restricts = settings._getMissingRestrict(
26003 + self.cpv, self._metadata)
26004 + if missing_restricts:
26005 + masks['RESTRICT'] = missing_restricts
26006 + except InvalidDependString:
26007 + # already recorded as 'invalid'
26008 + pass
26009 +
26010 + mask_atom = settings._getMaskAtom(self.cpv, self._metadata)
26011 if mask_atom is not None:
26012 masks['package.mask'] = mask_atom
26013
26014 try:
26015 missing_licenses = settings._getMissingLicenses(
26016 - self.cpv, self.metadata)
26017 + self.cpv, self._metadata)
26018 if missing_licenses:
26019 masks['LICENSE'] = missing_licenses
26020 except InvalidDependString:
26021 @@ -303,7 +373,8 @@ class Package(Task):
26022 'CHOST' in masks or \
26023 'EAPI.deprecated' in masks or \
26024 'KEYWORDS' in masks or \
26025 - 'PROPERTIES' in masks):
26026 + 'PROPERTIES' in masks or \
26027 + 'RESTRICT' in masks):
26028 return False
26029
26030 if 'package.mask' in masks or \
26031 @@ -316,7 +387,7 @@ class Package(Task):
26032 """returns None, 'missing', or 'unstable'."""
26033
26034 missing = self.root_config.settings._getRawMissingKeywords(
26035 - self.cpv, self.metadata)
26036 + self.cpv, self._metadata)
26037
26038 if not missing:
26039 return None
26040 @@ -337,17 +408,22 @@ class Package(Task):
26041 """returns a bool if the cpv is in the list of
26042 expanded pmaskdict[cp] available ebuilds"""
26043 pmask = self.root_config.settings._getRawMaskAtom(
26044 - self.cpv, self.metadata)
26045 + self.cpv, self._metadata)
26046 return pmask is not None
26047
26048 def _metadata_exception(self, k, e):
26049
26050 + if k.endswith('DEPEND'):
26051 + qacat = 'dependency.syntax'
26052 + else:
26053 + qacat = k + ".syntax"
26054 +
26055 # For unicode safety with python-2.x we need to avoid
26056 # using the string format operator with a non-unicode
26057 # format string, since that will result in the
26058 # PortageException.__str__() method being invoked,
26059 # followed by unsafe decoding that may result in a
26060 - # UnicodeDecodeError. Therefore, use _unicode_decode()
26061 + # UnicodeDecodeError. Therefore, use unicode_literals
26062 # to ensure that format strings are unicode, so that
26063 # PortageException.__unicode__() is used when necessary
26064 # in python-2.x.
26065 @@ -359,19 +435,17 @@ class Package(Task):
26066 continue
26067 categorized_error = True
26068 self._invalid_metadata(error.category,
26069 - _unicode_decode("%s: %s") % (k, error))
26070 + "%s: %s" % (k, error))
26071
26072 if not categorized_error:
26073 - self._invalid_metadata(k + ".syntax",
26074 - _unicode_decode("%s: %s") % (k, e))
26075 + self._invalid_metadata(qacat,"%s: %s" % (k, e))
26076 else:
26077 # For installed packages, show the path of the file
26078 # containing the invalid metadata, since the user may
26079 # want to fix the deps by hand.
26080 vardb = self.root_config.trees['vartree'].dbapi
26081 path = vardb.getpath(self.cpv, filename=k)
26082 - self._invalid_metadata(k + ".syntax",
26083 - _unicode_decode("%s: %s in '%s'") % (k, e, path))
26084 + self._invalid_metadata(qacat, "%s: %s in '%s'" % (k, e, path))
26085
26086 def _invalid_metadata(self, msg_type, msg):
26087 if self._invalid is None:
26088 @@ -394,7 +468,8 @@ class Package(Task):
26089 cpv_color = "PKG_NOMERGE"
26090
26091 s = "(%s, %s" \
26092 - % (portage.output.colorize(cpv_color, self.cpv + _repo_separator + self.repo) , self.type_name)
26093 + % (portage.output.colorize(cpv_color, self.cpv + _slot_separator + \
26094 + self.slot + "/" + self.sub_slot + _repo_separator + self.repo) , self.type_name)
26095
26096 if self.type_name == "installed":
26097 if self.root_config.settings['ROOT'] != "/":
26098 @@ -425,13 +500,16 @@ class Package(Task):
26099 # Share identical frozenset instances when available.
26100 _frozensets = {}
26101
26102 - def __init__(self, pkg, use_str):
26103 + def __init__(self, pkg, enabled_flags):
26104 self._pkg = pkg
26105 self._expand = None
26106 self._expand_hidden = None
26107 self._force = None
26108 self._mask = None
26109 - self.enabled = frozenset(use_str.split())
26110 + if eapi_has_use_aliases(pkg.eapi):
26111 + for enabled_flag in enabled_flags:
26112 + enabled_flags.extend(pkg.iuse.alias_mapping.get(enabled_flag, []))
26113 + self.enabled = frozenset(enabled_flags)
26114 if pkg.built:
26115 # Use IUSE to validate USE settings for built packages,
26116 # in case the package manager that built this package
26117 @@ -481,7 +559,7 @@ class Package(Task):
26118
26119 @property
26120 def repo(self):
26121 - return self.metadata['repository']
26122 + return self._metadata['repository']
26123
26124 @property
26125 def repo_priority(self):
26126 @@ -493,7 +571,7 @@ class Package(Task):
26127 @property
26128 def use(self):
26129 if self._use is None:
26130 - self.metadata._init_use()
26131 + self._init_use()
26132 return self._use
26133
26134 def _get_pkgsettings(self):
26135 @@ -502,28 +580,81 @@ class Package(Task):
26136 pkgsettings.setcpv(self)
26137 return pkgsettings
26138
26139 + def _init_use(self):
26140 + if self.built:
26141 + # Use IUSE to validate USE settings for built packages,
26142 + # in case the package manager that built this package
26143 + # failed to do that for some reason (or in case of
26144 + # data corruption). The enabled flags must be consistent
26145 + # with implicit IUSE, in order to avoid potential
26146 + # inconsistencies in USE dep matching (see bug #453400).
26147 + use_str = self._metadata['USE']
26148 + is_valid_flag = self.iuse.is_valid_flag
26149 + enabled_flags = [x for x in use_str.split() if is_valid_flag(x)]
26150 + use_str = " ".join(enabled_flags)
26151 + self._use = self._use_class(
26152 + self, enabled_flags)
26153 + else:
26154 + try:
26155 + use_str = _PackageMetadataWrapperBase.__getitem__(
26156 + self._metadata, 'USE')
26157 + except KeyError:
26158 + use_str = None
26159 + calculated_use = False
26160 + if not use_str:
26161 + use_str = self._get_pkgsettings()["PORTAGE_USE"]
26162 + calculated_use = True
26163 + self._use = self._use_class(
26164 + self, use_str.split())
26165 + # Initialize these now, since USE access has just triggered
26166 + # setcpv, and we want to cache the result of the force/mask
26167 + # calculations that were done.
26168 + if calculated_use:
26169 + self._use._init_force_mask()
26170 +
26171 + _PackageMetadataWrapperBase.__setitem__(
26172 + self._metadata, 'USE', use_str)
26173 +
26174 + return use_str
26175 +
26176 class _iuse(object):
26177
26178 - __slots__ = ("__weakref__", "all", "enabled", "disabled",
26179 - "tokens") + ("_iuse_implicit_match",)
26180 + __slots__ = ("__weakref__", "_iuse_implicit_match", "_pkg", "alias_mapping",
26181 + "all", "all_aliases", "enabled", "disabled", "tokens")
26182
26183 - def __init__(self, tokens, iuse_implicit_match):
26184 + def __init__(self, pkg, tokens, iuse_implicit_match, aliases, eapi):
26185 + self._pkg = pkg
26186 self.tokens = tuple(tokens)
26187 self._iuse_implicit_match = iuse_implicit_match
26188 enabled = []
26189 disabled = []
26190 other = []
26191 + enabled_aliases = []
26192 + disabled_aliases = []
26193 + other_aliases = []
26194 + aliases_supported = eapi_has_use_aliases(eapi)
26195 + self.alias_mapping = {}
26196 for x in tokens:
26197 prefix = x[:1]
26198 if prefix == "+":
26199 enabled.append(x[1:])
26200 + if aliases_supported:
26201 + self.alias_mapping[x[1:]] = aliases.get(x[1:], [])
26202 + enabled_aliases.extend(self.alias_mapping[x[1:]])
26203 elif prefix == "-":
26204 disabled.append(x[1:])
26205 + if aliases_supported:
26206 + self.alias_mapping[x[1:]] = aliases.get(x[1:], [])
26207 + disabled_aliases.extend(self.alias_mapping[x[1:]])
26208 else:
26209 other.append(x)
26210 - self.enabled = frozenset(enabled)
26211 - self.disabled = frozenset(disabled)
26212 + if aliases_supported:
26213 + self.alias_mapping[x] = aliases.get(x, [])
26214 + other_aliases.extend(self.alias_mapping[x])
26215 + self.enabled = frozenset(chain(enabled, enabled_aliases))
26216 + self.disabled = frozenset(chain(disabled, disabled_aliases))
26217 self.all = frozenset(chain(enabled, disabled, other))
26218 + self.all_aliases = frozenset(chain(enabled_aliases, disabled_aliases, other_aliases))
26219
26220 def is_valid_flag(self, flags):
26221 """
26222 @@ -534,7 +665,7 @@ class Package(Task):
26223 flags = [flags]
26224
26225 for flag in flags:
26226 - if not flag in self.all and \
26227 + if not flag in self.all and not flag in self.all_aliases and \
26228 not self._iuse_implicit_match(flag):
26229 return False
26230 return True
26231 @@ -547,11 +678,28 @@ class Package(Task):
26232 flags = [flags]
26233 missing_iuse = []
26234 for flag in flags:
26235 - if not flag in self.all and \
26236 + if not flag in self.all and not flag in self.all_aliases and \
26237 not self._iuse_implicit_match(flag):
26238 missing_iuse.append(flag)
26239 return missing_iuse
26240
26241 + def get_real_flag(self, flag):
26242 + """
26243 + Returns the flag's name within the scope of this package
26244 + (accounting for aliases), or None if the flag is unknown.
26245 + """
26246 + if flag in self.all:
26247 + return flag
26248 + elif flag in self.all_aliases:
26249 + for k, v in self.alias_mapping.items():
26250 + if flag in v:
26251 + return k
26252 +
26253 + if self._iuse_implicit_match(flag):
26254 + return flag
26255 +
26256 + return None
26257 +
26258 def __len__(self):
26259 return 4
26260
26261 @@ -604,7 +752,7 @@ class _PackageMetadataWrapper(_PackageMetadataWrapperBase):
26262
26263 __slots__ = ("_pkg",)
26264 _wrapped_keys = frozenset(
26265 - ["COUNTER", "INHERITED", "IUSE", "USE", "_mtime_"])
26266 + ["COUNTER", "INHERITED", "USE", "_mtime_"])
26267 _use_conditional_keys = frozenset(
26268 ['LICENSE', 'PROPERTIES', 'PROVIDE', 'RESTRICT',])
26269
26270 @@ -617,31 +765,6 @@ class _PackageMetadataWrapper(_PackageMetadataWrapperBase):
26271
26272 self.update(metadata)
26273
26274 - def _init_use(self):
26275 - if self._pkg.built:
26276 - use_str = self['USE']
26277 - self._pkg._use = self._pkg._use_class(
26278 - self._pkg, use_str)
26279 - else:
26280 - try:
26281 - use_str = _PackageMetadataWrapperBase.__getitem__(self, 'USE')
26282 - except KeyError:
26283 - use_str = None
26284 - calculated_use = False
26285 - if not use_str:
26286 - use_str = self._pkg._get_pkgsettings()["PORTAGE_USE"]
26287 - calculated_use = True
26288 - _PackageMetadataWrapperBase.__setitem__(self, 'USE', use_str)
26289 - self._pkg._use = self._pkg._use_class(
26290 - self._pkg, use_str)
26291 - # Initialize these now, since USE access has just triggered
26292 - # setcpv, and we want to cache the result of the force/mask
26293 - # calculations that were done.
26294 - if calculated_use:
26295 - self._pkg._use._init_force_mask()
26296 -
26297 - return use_str
26298 -
26299 def __getitem__(self, k):
26300 v = _PackageMetadataWrapperBase.__getitem__(self, k)
26301 if k in self._use_conditional_keys:
26302 @@ -659,7 +782,7 @@ class _PackageMetadataWrapper(_PackageMetadataWrapperBase):
26303 elif k == 'USE' and not self._pkg.built:
26304 if not v:
26305 # This is lazy because it's expensive.
26306 - v = self._init_use()
26307 + v = self._pkg._init_use()
26308
26309 return v
26310
26311 @@ -673,10 +796,6 @@ class _PackageMetadataWrapper(_PackageMetadataWrapperBase):
26312 v = frozenset(v.split())
26313 self._pkg.inherited = v
26314
26315 - def _set_iuse(self, k, v):
26316 - self._pkg.iuse = self._pkg._iuse(
26317 - v.split(), self._pkg.root_config.settings._iuse_implicit_match)
26318 -
26319 def _set_counter(self, k, v):
26320 if isinstance(v, basestring):
26321 try:
26322
26323 diff --git a/pym/_emerge/PackageMerge.py b/pym/_emerge/PackageMerge.py
26324 index eed34e9..ef298ca 100644
26325 --- a/pym/_emerge/PackageMerge.py
26326 +++ b/pym/_emerge/PackageMerge.py
26327 @@ -1,4 +1,4 @@
26328 -# Copyright 1999-2011 Gentoo Foundation
26329 +# Copyright 1999-2012 Gentoo Foundation
26330 # Distributed under the terms of the GNU General Public License v2
26331
26332 from _emerge.CompositeTask import CompositeTask
26333 @@ -11,6 +11,9 @@ class PackageMerge(CompositeTask):
26334 self.scheduler = self.merge.scheduler
26335 pkg = self.merge.pkg
26336 pkg_count = self.merge.pkg_count
26337 + pkg_color = "PKG_MERGE"
26338 + if pkg.type_name == "binary":
26339 + pkg_color = "PKG_BINARY_MERGE"
26340
26341 if pkg.installed:
26342 action_desc = "Uninstalling"
26343 @@ -26,7 +29,7 @@ class PackageMerge(CompositeTask):
26344 msg = "%s %s%s" % \
26345 (action_desc,
26346 counter_str,
26347 - colorize("GOOD", pkg.cpv))
26348 + colorize(pkg_color, pkg.cpv))
26349
26350 if pkg.root_config.settings["ROOT"] != "/":
26351 msg += " %s %s" % (preposition, pkg.root)
26352
26353 diff --git a/pym/_emerge/PackageUninstall.py b/pym/_emerge/PackageUninstall.py
26354 index eb6a947..16c2f74 100644
26355 --- a/pym/_emerge/PackageUninstall.py
26356 +++ b/pym/_emerge/PackageUninstall.py
26357 @@ -1,4 +1,4 @@
26358 -# Copyright 1999-2011 Gentoo Foundation
26359 +# Copyright 1999-2012 Gentoo Foundation
26360 # Distributed under the terms of the GNU General Public License v2
26361
26362 import logging
26363 @@ -33,7 +33,7 @@ class PackageUninstall(CompositeTask):
26364 # Apparently the package got uninstalled
26365 # already, so we can safely return early.
26366 self.returncode = os.EX_OK
26367 - self.wait()
26368 + self._async_wait()
26369 return
26370
26371 self.settings.setcpv(self.pkg)
26372 @@ -67,7 +67,7 @@ class PackageUninstall(CompositeTask):
26373 if retval != os.EX_OK:
26374 self._builddir_lock.unlock()
26375 self.returncode = retval
26376 - self.wait()
26377 + self._async_wait()
26378 return
26379
26380 self._writemsg_level(">>> Unmerging %s...\n" % (self.pkg.cpv,),
26381
26382 diff --git a/pym/_emerge/PackageVirtualDbapi.py b/pym/_emerge/PackageVirtualDbapi.py
26383 index 0f7be44..56a5576 100644
26384 --- a/pym/_emerge/PackageVirtualDbapi.py
26385 +++ b/pym/_emerge/PackageVirtualDbapi.py
26386 @@ -140,10 +140,10 @@ class PackageVirtualDbapi(dbapi):
26387 self._clear_cache()
26388
26389 def aux_get(self, cpv, wants, myrepo=None):
26390 - metadata = self._cpv_map[cpv].metadata
26391 + metadata = self._cpv_map[cpv]._metadata
26392 return [metadata.get(x, "") for x in wants]
26393
26394 def aux_update(self, cpv, values):
26395 - self._cpv_map[cpv].metadata.update(values)
26396 + self._cpv_map[cpv]._metadata.update(values)
26397 self._clear_cache()
26398
26399
26400 diff --git a/pym/_emerge/PipeReader.py b/pym/_emerge/PipeReader.py
26401 index 90febdf..a8392c3 100644
26402 --- a/pym/_emerge/PipeReader.py
26403 +++ b/pym/_emerge/PipeReader.py
26404 @@ -1,9 +1,11 @@
26405 -# Copyright 1999-2012 Gentoo Foundation
26406 +# Copyright 1999-2013 Gentoo Foundation
26407 # Distributed under the terms of the GNU General Public License v2
26408
26409 +import fcntl
26410 +import sys
26411 +
26412 from portage import os
26413 from _emerge.AbstractPollTask import AbstractPollTask
26414 -import fcntl
26415
26416 class PipeReader(AbstractPollTask):
26417
26418 @@ -27,18 +29,28 @@ class PipeReader(AbstractPollTask):
26419 output_handler = self._output_handler
26420
26421 for f in self.input_files.values():
26422 - fcntl.fcntl(f.fileno(), fcntl.F_SETFL,
26423 - fcntl.fcntl(f.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
26424 - self._reg_ids.add(self.scheduler.register(f.fileno(),
26425 + fd = isinstance(f, int) and f or f.fileno()
26426 + fcntl.fcntl(fd, fcntl.F_SETFL,
26427 + fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
26428 +
26429 + # FD_CLOEXEC is enabled by default in Python >=3.4.
26430 + if sys.hexversion < 0x3040000:
26431 + try:
26432 + fcntl.FD_CLOEXEC
26433 + except AttributeError:
26434 + pass
26435 + else:
26436 + fcntl.fcntl(fd, fcntl.F_SETFD,
26437 + fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
26438 +
26439 + self._reg_ids.add(self.scheduler.io_add_watch(fd,
26440 self._registered_events, output_handler))
26441 self._registered = True
26442
26443 - def isAlive(self):
26444 - return self._registered
26445 -
26446 def _cancel(self):
26447 + self._unregister()
26448 if self.returncode is None:
26449 - self.returncode = 1
26450 + self.returncode = self._cancelled_returncode
26451
26452 def _wait(self):
26453 if self.returncode is not None:
26454 @@ -102,11 +114,14 @@ class PipeReader(AbstractPollTask):
26455
26456 if self._reg_ids is not None:
26457 for reg_id in self._reg_ids:
26458 - self.scheduler.unregister(reg_id)
26459 + self.scheduler.source_remove(reg_id)
26460 self._reg_ids = None
26461
26462 if self.input_files is not None:
26463 for f in self.input_files.values():
26464 - f.close()
26465 + if isinstance(f, int):
26466 + os.close(f)
26467 + else:
26468 + f.close()
26469 self.input_files = None
26470
26471
26472 diff --git a/pym/_emerge/PollScheduler.py b/pym/_emerge/PollScheduler.py
26473 index 5103e31..b118ac1 100644
26474 --- a/pym/_emerge/PollScheduler.py
26475 +++ b/pym/_emerge/PollScheduler.py
26476 @@ -1,18 +1,13 @@
26477 -# Copyright 1999-2012 Gentoo Foundation
26478 +# Copyright 1999-2013 Gentoo Foundation
26479 # Distributed under the terms of the GNU General Public License v2
26480
26481 -import gzip
26482 -import errno
26483 -
26484 try:
26485 import threading
26486 except ImportError:
26487 import dummy_threading as threading
26488
26489 -from portage import _encodings
26490 -from portage import _unicode_encode
26491 -from portage.util import writemsg_level
26492 -from portage.util.SlotObject import SlotObject
26493 +import portage
26494 +from portage.util._async.SchedulerInterface import SchedulerInterface
26495 from portage.util._eventloop.EventLoop import EventLoop
26496 from portage.util._eventloop.global_event_loop import global_event_loop
26497
26498 @@ -20,14 +15,10 @@ from _emerge.getloadavg import getloadavg
26499
26500 class PollScheduler(object):
26501
26502 - class _sched_iface_class(SlotObject):
26503 - __slots__ = ("IO_ERR", "IO_HUP", "IO_IN", "IO_NVAL", "IO_OUT",
26504 - "IO_PRI", "child_watch_add",
26505 - "idle_add", "io_add_watch", "iteration",
26506 - "output", "register", "run",
26507 - "source_remove", "timeout_add", "unregister")
26508 + # max time between loadavg checks (milliseconds)
26509 + _loadavg_latency = None
26510
26511 - def __init__(self, main=False):
26512 + def __init__(self, main=False, event_loop=None):
26513 """
26514 @param main: If True then use global_event_loop(), otherwise use
26515 a local EventLoop instance (default is False, for safe use in
26516 @@ -38,29 +29,20 @@ class PollScheduler(object):
26517 self._terminated_tasks = False
26518 self._max_jobs = 1
26519 self._max_load = None
26520 - self._jobs = 0
26521 self._scheduling = False
26522 self._background = False
26523 - if main:
26524 + if event_loop is not None:
26525 + self._event_loop = event_loop
26526 + elif main:
26527 self._event_loop = global_event_loop()
26528 else:
26529 - self._event_loop = EventLoop(main=False)
26530 - self.sched_iface = self._sched_iface_class(
26531 - IO_ERR=self._event_loop.IO_ERR,
26532 - IO_HUP=self._event_loop.IO_HUP,
26533 - IO_IN=self._event_loop.IO_IN,
26534 - IO_NVAL=self._event_loop.IO_NVAL,
26535 - IO_OUT=self._event_loop.IO_OUT,
26536 - IO_PRI=self._event_loop.IO_PRI,
26537 - child_watch_add=self._event_loop.child_watch_add,
26538 - idle_add=self._event_loop.idle_add,
26539 - io_add_watch=self._event_loop.io_add_watch,
26540 - iteration=self._event_loop.iteration,
26541 - output=self._task_output,
26542 - register=self._event_loop.io_add_watch,
26543 - source_remove=self._event_loop.source_remove,
26544 - timeout_add=self._event_loop.timeout_add,
26545 - unregister=self._event_loop.source_remove)
26546 + self._event_loop = (portage._internal_caller and
26547 + global_event_loop() or EventLoop(main=False))
26548 + self._sched_iface = SchedulerInterface(self._event_loop,
26549 + is_background=self._is_background)
26550 +
26551 + def _is_background(self):
26552 + return self._background
26553
26554 def terminate(self):
26555 """
26556 @@ -135,48 +117,23 @@ class PollScheduler(object):
26557 Calls _schedule_tasks() and automatically returns early from
26558 any recursive calls to this method that the _schedule_tasks()
26559 call might trigger. This makes _schedule() safe to call from
26560 - inside exit listeners.
26561 + inside exit listeners. This method always returns True, so that
26562 + it may be scheduled continuously via EventLoop.timeout_add().
26563 """
26564 if self._scheduling:
26565 - return False
26566 + return True
26567 self._scheduling = True
26568 try:
26569 self._schedule_tasks()
26570 finally:
26571 self._scheduling = False
26572 -
26573 - def _main_loop(self):
26574 - term_check_id = self.sched_iface.idle_add(self._termination_check)
26575 - try:
26576 - # Populate initial event sources. Unless we're scheduling
26577 - # based on load average, we only need to do this once
26578 - # here, since it can be called during the loop from within
26579 - # event handlers.
26580 - self._schedule()
26581 - max_load = self._max_load
26582 -
26583 - # Loop while there are jobs to be scheduled.
26584 - while self._keep_scheduling():
26585 - self.sched_iface.iteration()
26586 -
26587 - if max_load is not None:
26588 - # We have to schedule periodically, in case the load
26589 - # average has changed since the last call.
26590 - self._schedule()
26591 -
26592 - # Clean shutdown of previously scheduled jobs. In the
26593 - # case of termination, this allows for basic cleanup
26594 - # such as flushing of buffered output to logs.
26595 - while self._is_work_scheduled():
26596 - self.sched_iface.iteration()
26597 - finally:
26598 - self.sched_iface.source_remove(term_check_id)
26599 + return True
26600
26601 def _is_work_scheduled(self):
26602 return bool(self._running_job_count())
26603
26604 def _running_job_count(self):
26605 - return self._jobs
26606 + raise NotImplementedError(self)
26607
26608 def _can_add_job(self):
26609 if self._terminated_tasks:
26610 @@ -201,47 +158,3 @@ class PollScheduler(object):
26611 return False
26612
26613 return True
26614 -
26615 - def _task_output(self, msg, log_path=None, background=None,
26616 - level=0, noiselevel=-1):
26617 - """
26618 - Output msg to stdout if not self._background. If log_path
26619 - is not None then append msg to the log (appends with
26620 - compression if the filename extension of log_path
26621 - corresponds to a supported compression type).
26622 - """
26623 -
26624 - if background is None:
26625 - # If the task does not have a local background value
26626 - # (like for parallel-fetch), then use the global value.
26627 - background = self._background
26628 -
26629 - msg_shown = False
26630 - if not background:
26631 - writemsg_level(msg, level=level, noiselevel=noiselevel)
26632 - msg_shown = True
26633 -
26634 - if log_path is not None:
26635 - try:
26636 - f = open(_unicode_encode(log_path,
26637 - encoding=_encodings['fs'], errors='strict'),
26638 - mode='ab')
26639 - f_real = f
26640 - except IOError as e:
26641 - if e.errno not in (errno.ENOENT, errno.ESTALE):
26642 - raise
26643 - if not msg_shown:
26644 - writemsg_level(msg, level=level, noiselevel=noiselevel)
26645 - else:
26646 -
26647 - if log_path.endswith('.gz'):
26648 - # NOTE: The empty filename argument prevents us from
26649 - # triggering a bug in python3 which causes GzipFile
26650 - # to raise AttributeError if fileobj.name is bytes
26651 - # instead of unicode.
26652 - f = gzip.GzipFile(filename='', mode='ab', fileobj=f)
26653 -
26654 - f.write(_unicode_encode(msg))
26655 - f.close()
26656 - if f_real is not f:
26657 - f_real.close()
26658
26659 diff --git a/pym/_emerge/QueueScheduler.py b/pym/_emerge/QueueScheduler.py
26660 deleted file mode 100644
26661 index 206087c..0000000
26662 --- a/pym/_emerge/QueueScheduler.py
26663 +++ /dev/null
26664 @@ -1,105 +0,0 @@
26665 -# Copyright 1999-2012 Gentoo Foundation
26666 -# Distributed under the terms of the GNU General Public License v2
26667 -
26668 -from _emerge.PollScheduler import PollScheduler
26669 -
26670 -class QueueScheduler(PollScheduler):
26671 -
26672 - """
26673 - Add instances of SequentialTaskQueue and then call run(). The
26674 - run() method returns when no tasks remain.
26675 - """
26676 -
26677 - def __init__(self, main=True, max_jobs=None, max_load=None):
26678 - PollScheduler.__init__(self, main=main)
26679 -
26680 - if max_jobs is None:
26681 - max_jobs = 1
26682 -
26683 - self._max_jobs = max_jobs
26684 - self._max_load = max_load
26685 -
26686 - self._queues = []
26687 - self._schedule_listeners = []
26688 -
26689 - def add(self, q):
26690 - self._queues.append(q)
26691 -
26692 - def remove(self, q):
26693 - self._queues.remove(q)
26694 -
26695 - def clear(self):
26696 - for q in self._queues:
26697 - q.clear()
26698 -
26699 - def run(self, timeout=None):
26700 -
26701 - timeout_callback = None
26702 - if timeout is not None:
26703 - def timeout_callback():
26704 - timeout_callback.timed_out = True
26705 - return False
26706 - timeout_callback.timed_out = False
26707 - timeout_callback.timeout_id = self.sched_iface.timeout_add(
26708 - timeout, timeout_callback)
26709 -
26710 - term_check_id = self.sched_iface.idle_add(self._termination_check)
26711 - try:
26712 - while not (timeout_callback is not None and
26713 - timeout_callback.timed_out):
26714 - # We don't have any callbacks to trigger _schedule(),
26715 - # so we have to call it explicitly here.
26716 - self._schedule()
26717 - if self._keep_scheduling():
26718 - self.sched_iface.iteration()
26719 - else:
26720 - break
26721 -
26722 - while self._is_work_scheduled() and \
26723 - not (timeout_callback is not None and
26724 - timeout_callback.timed_out):
26725 - self.sched_iface.iteration()
26726 - finally:
26727 - self.sched_iface.source_remove(term_check_id)
26728 - if timeout_callback is not None:
26729 - self.sched_iface.unregister(timeout_callback.timeout_id)
26730 -
26731 - def _schedule_tasks(self):
26732 - """
26733 - @rtype: bool
26734 - @return: True if there may be remaining tasks to schedule,
26735 - False otherwise.
26736 - """
26737 - if self._terminated_tasks:
26738 - return
26739 -
26740 - while self._can_add_job():
26741 - n = self._max_jobs - self._running_job_count()
26742 - if n < 1:
26743 - break
26744 -
26745 - if not self._start_next_job(n):
26746 - return
26747 -
26748 - def _keep_scheduling(self):
26749 - return not self._terminated_tasks and any(self._queues)
26750 -
26751 - def _running_job_count(self):
26752 - job_count = 0
26753 - for q in self._queues:
26754 - job_count += len(q.running_tasks)
26755 - self._jobs = job_count
26756 - return job_count
26757 -
26758 - def _start_next_job(self, n=1):
26759 - started_count = 0
26760 - for q in self._queues:
26761 - initial_job_count = len(q.running_tasks)
26762 - q.schedule()
26763 - final_job_count = len(q.running_tasks)
26764 - if final_job_count > initial_job_count:
26765 - started_count += (final_job_count - initial_job_count)
26766 - if started_count >= n:
26767 - break
26768 - return started_count
26769 -
26770
26771 diff --git a/pym/_emerge/RootConfig.py b/pym/_emerge/RootConfig.py
26772 index bb0d768..3648d01 100644
26773 --- a/pym/_emerge/RootConfig.py
26774 +++ b/pym/_emerge/RootConfig.py
26775 @@ -1,10 +1,10 @@
26776 -# Copyright 1999-2011 Gentoo Foundation
26777 +# Copyright 1999-2013 Gentoo Foundation
26778 # Distributed under the terms of the GNU General Public License v2
26779
26780 class RootConfig(object):
26781 """This is used internally by depgraph to track information about a
26782 particular $ROOT."""
26783 - __slots__ = ("root", "setconfig", "sets", "settings", "trees")
26784 + __slots__ = ("mtimedb", "root", "setconfig", "sets", "settings", "trees")
26785
26786 pkg_tree_map = {
26787 "ebuild" : "porttree",
26788 @@ -31,4 +31,11 @@ class RootConfig(object):
26789 Shallow copy all attributes from another instance.
26790 """
26791 for k in self.__slots__:
26792 - setattr(self, k, getattr(other, k))
26793 + try:
26794 + setattr(self, k, getattr(other, k))
26795 + except AttributeError:
26796 + # mtimedb is currently not a required attribute
26797 + try:
26798 + delattr(self, k)
26799 + except AttributeError:
26800 + pass
26801
26802 diff --git a/pym/_emerge/Scheduler.py b/pym/_emerge/Scheduler.py
26803 index 0b72a4c..dd268f7 100644
26804 --- a/pym/_emerge/Scheduler.py
26805 +++ b/pym/_emerge/Scheduler.py
26806 @@ -1,7 +1,7 @@
26807 -# Copyright 1999-2012 Gentoo Foundation
26808 +# Copyright 1999-2014 Gentoo Foundation
26809 # Distributed under the terms of the GNU General Public License v2
26810
26811 -from __future__ import print_function
26812 +from __future__ import print_function, unicode_literals
26813
26814 from collections import deque
26815 import gc
26816 @@ -18,7 +18,7 @@ import zlib
26817 import portage
26818 from portage import os
26819 from portage import _encodings
26820 -from portage import _unicode_decode, _unicode_encode
26821 +from portage import _unicode_encode
26822 from portage.cache.mappings import slot_dict_class
26823 from portage.elog.messages import eerror
26824 from portage.localization import _
26825 @@ -28,6 +28,8 @@ from portage._sets import SETPREFIX
26826 from portage._sets.base import InternalPackageSet
26827 from portage.util import ensure_dirs, writemsg, writemsg_level
26828 from portage.util.SlotObject import SlotObject
26829 +from portage.util._async.SchedulerInterface import SchedulerInterface
26830 +from portage.util._eventloop.EventLoop import EventLoop
26831 from portage.package.ebuild.digestcheck import digestcheck
26832 from portage.package.ebuild.digestgen import digestgen
26833 from portage.package.ebuild.doebuild import (_check_temp_dir,
26834 @@ -50,6 +52,7 @@ from _emerge.EbuildFetcher import EbuildFetcher
26835 from _emerge.EbuildPhase import EbuildPhase
26836 from _emerge.emergelog import emergelog
26837 from _emerge.FakeVartree import FakeVartree
26838 +from _emerge.getloadavg import getloadavg
26839 from _emerge._find_deep_system_runtime_deps import _find_deep_system_runtime_deps
26840 from _emerge._flush_elog_mod_echo import _flush_elog_mod_echo
26841 from _emerge.JobStatusDisplay import JobStatusDisplay
26842 @@ -64,6 +67,9 @@ if sys.hexversion >= 0x3000000:
26843
26844 class Scheduler(PollScheduler):
26845
26846 + # max time between loadavg checks (milliseconds)
26847 + _loadavg_latency = 30000
26848 +
26849 # max time between display status updates (milliseconds)
26850 _max_display_latency = 3000
26851
26852 @@ -79,7 +85,7 @@ class Scheduler(PollScheduler):
26853 _opts_no_self_update = frozenset(["--buildpkgonly",
26854 "--fetchonly", "--fetch-all-uri", "--pretend"])
26855
26856 - class _iface_class(PollScheduler._sched_iface_class):
26857 + class _iface_class(SchedulerInterface):
26858 __slots__ = ("fetch",
26859 "scheduleSetup", "scheduleUnpack")
26860
26861 @@ -135,8 +141,7 @@ class Scheduler(PollScheduler):
26862 portage.exception.PortageException.__init__(self, value)
26863
26864 def __init__(self, settings, trees, mtimedb, myopts,
26865 - spinner, mergelist=None, favorites=None, graph_config=None,
26866 - uninstall_only=False):
26867 + spinner, mergelist=None, favorites=None, graph_config=None):
26868 PollScheduler.__init__(self, main=True)
26869
26870 if mergelist is not None:
26871 @@ -152,7 +157,6 @@ class Scheduler(PollScheduler):
26872 self._spinner = spinner
26873 self._mtimedb = mtimedb
26874 self._favorites = favorites
26875 - self._uninstall_only = uninstall_only
26876 self._args_set = InternalPackageSet(favorites, allow_repo=True)
26877 self._build_opts = self._build_opts_class()
26878
26879 @@ -161,6 +165,8 @@ class Scheduler(PollScheduler):
26880 self._build_opts.buildpkg_exclude = InternalPackageSet( \
26881 initial_atoms=" ".join(myopts.get("--buildpkg-exclude", [])).split(), \
26882 allow_wildcard=True, allow_repo=True)
26883 + if "mirror" in self.settings.features:
26884 + self._build_opts.fetch_all_uri = True
26885
26886 self._binpkg_opts = self._binpkg_opts_class()
26887 for k in self._binpkg_opts.__slots__:
26888 @@ -217,14 +223,15 @@ class Scheduler(PollScheduler):
26889 fetch_iface = self._fetch_iface_class(log_file=self._fetch_log,
26890 schedule=self._schedule_fetch)
26891 self._sched_iface = self._iface_class(
26892 + self._event_loop,
26893 + is_background=self._is_background,
26894 fetch=fetch_iface,
26895 scheduleSetup=self._schedule_setup,
26896 - scheduleUnpack=self._schedule_unpack,
26897 - **dict((k, getattr(self.sched_iface, k))
26898 - for k in self.sched_iface.__slots__))
26899 + scheduleUnpack=self._schedule_unpack)
26900
26901 self._prefetchers = weakref.WeakValueDictionary()
26902 self._pkg_queue = []
26903 + self._jobs = 0
26904 self._running_tasks = {}
26905 self._completed_tasks = set()
26906
26907 @@ -243,10 +250,15 @@ class Scheduler(PollScheduler):
26908 # The load average takes some time to respond when new
26909 # jobs are added, so we need to limit the rate of adding
26910 # new jobs.
26911 - self._job_delay_max = 10
26912 - self._job_delay_factor = 1.0
26913 - self._job_delay_exp = 1.5
26914 + self._job_delay_max = 5
26915 self._previous_job_start_time = None
26916 + self._job_delay_timeout_id = None
26917 +
26918 + # The load average takes some time to respond when after
26919 + # a SIGSTOP/SIGCONT cycle, so delay scheduling for some
26920 + # time after SIGCONT is received.
26921 + self._sigcont_delay = 5
26922 + self._sigcont_time = None
26923
26924 # This is used to memoize the _choose_pkg() result when
26925 # no packages can be chosen until one of the existing
26926 @@ -300,15 +312,10 @@ class Scheduler(PollScheduler):
26927 if not portage.dep.match_from_list(
26928 portage.const.PORTAGE_PACKAGE_ATOM, [x]):
26929 continue
26930 - if self._running_portage is None or \
26931 - self._running_portage.cpv != x.cpv or \
26932 - '9999' in x.cpv or \
26933 - 'git' in x.inherited or \
26934 - 'git-2' in x.inherited:
26935 - rval = _check_temp_dir(self.settings)
26936 - if rval != os.EX_OK:
26937 - return rval
26938 - _prepare_self_update(self.settings)
26939 + rval = _check_temp_dir(self.settings)
26940 + if rval != os.EX_OK:
26941 + return rval
26942 + _prepare_self_update(self.settings)
26943 break
26944
26945 return os.EX_OK
26946 @@ -328,15 +335,13 @@ class Scheduler(PollScheduler):
26947 self._set_graph_config(graph_config)
26948 self._blocker_db = {}
26949 dynamic_deps = self.myopts.get("--dynamic-deps", "y") != "n"
26950 - ignore_built_slot_abi_deps = self.myopts.get(
26951 - "--ignore-built-slot-abi-deps", "n") == "y"
26952 + ignore_built_slot_operator_deps = self.myopts.get(
26953 + "--ignore-built-slot-operator-deps", "n") == "y"
26954 for root in self.trees:
26955 - if self._uninstall_only:
26956 - continue
26957 if graph_config is None:
26958 fake_vartree = FakeVartree(self.trees[root]["root_config"],
26959 pkg_cache=self._pkg_cache, dynamic_deps=dynamic_deps,
26960 - ignore_built_slot_abi_deps=ignore_built_slot_abi_deps)
26961 + ignore_built_slot_operator_deps=ignore_built_slot_operator_deps)
26962 fake_vartree.sync()
26963 else:
26964 fake_vartree = graph_config.trees[root]['vartree']
26965 @@ -413,7 +418,7 @@ class Scheduler(PollScheduler):
26966 if not (isinstance(task, Package) and \
26967 task.operation == "merge"):
26968 continue
26969 - if 'interactive' in task.metadata.properties:
26970 + if 'interactive' in task.properties:
26971 interactive_tasks.append(task)
26972 return interactive_tasks
26973
26974 @@ -658,10 +663,11 @@ class Scheduler(PollScheduler):
26975 if value and value.strip():
26976 continue
26977 msg = _("%(var)s is not set... "
26978 - "Are you missing the '%(configroot)setc/make.profile' symlink? "
26979 + "Are you missing the '%(configroot)s%(profile_path)s' symlink? "
26980 "Is the symlink correct? "
26981 "Is your portage tree complete?") % \
26982 - {"var": var, "configroot": settings["PORTAGE_CONFIGROOT"]}
26983 + {"var": var, "configroot": settings["PORTAGE_CONFIGROOT"],
26984 + "profile_path": portage.const.PROFILE_PATH}
26985
26986 out = portage.output.EOutput()
26987 for line in textwrap.wrap(msg, 70):
26988 @@ -721,7 +727,6 @@ class Scheduler(PollScheduler):
26989 return
26990
26991 if self._parallel_fetch:
26992 - self._status_msg("Starting parallel fetch")
26993
26994 prefetchers = self._prefetchers
26995
26996 @@ -753,7 +758,8 @@ class Scheduler(PollScheduler):
26997 prefetcher = EbuildFetcher(background=True,
26998 config_pool=self._ConfigPool(pkg.root,
26999 self._allocate_config, self._deallocate_config),
27000 - fetchonly=1, logfile=self._fetch_log,
27001 + fetchonly=1, fetchall=self._build_opts.fetch_all_uri,
27002 + logfile=self._fetch_log,
27003 pkg=pkg, prefetch=True, scheduler=self._sched_iface)
27004
27005 elif pkg.type_name == "binary" and \
27006 @@ -774,10 +780,10 @@ class Scheduler(PollScheduler):
27007
27008 failures = 0
27009
27010 - # Use a local PollScheduler instance here, since we don't
27011 + # Use a local EventLoop instance here, since we don't
27012 # want tasks here to trigger the usual Scheduler callbacks
27013 # that handle job scheduling and status display.
27014 - sched_iface = PollScheduler().sched_iface
27015 + sched_iface = SchedulerInterface(EventLoop(main=False))
27016
27017 for x in self._mergelist:
27018 if not isinstance(x, Package):
27019 @@ -786,10 +792,10 @@ class Scheduler(PollScheduler):
27020 if x.operation == "uninstall":
27021 continue
27022
27023 - if x.metadata["EAPI"] in ("0", "1", "2", "3"):
27024 + if x.eapi in ("0", "1", "2", "3"):
27025 continue
27026
27027 - if "pretend" not in x.metadata.defined_phases:
27028 + if "pretend" not in x.defined_phases:
27029 continue
27030
27031 out_str =">>> Running pre-merge checks for " + colorize("INFORM", x.cpv) + "\n"
27032 @@ -808,7 +814,7 @@ class Scheduler(PollScheduler):
27033 build_dir_path = os.path.join(
27034 os.path.realpath(settings["PORTAGE_TMPDIR"]),
27035 "portage", x.category, x.pf)
27036 - existing_buildir = os.path.isdir(build_dir_path)
27037 + existing_builddir = os.path.isdir(build_dir_path)
27038 settings["PORTAGE_BUILDDIR"] = build_dir_path
27039 build_dir = EbuildBuildDir(scheduler=sched_iface,
27040 settings=settings)
27041 @@ -819,7 +825,7 @@ class Scheduler(PollScheduler):
27042
27043 # Clean up the existing build dir, in case pkg_pretend
27044 # checks for available space (bug #390711).
27045 - if existing_buildir:
27046 + if existing_builddir:
27047 if x.built:
27048 tree = "bintree"
27049 infloc = os.path.join(build_dir_path, "build-info")
27050 @@ -908,13 +914,18 @@ class Scheduler(PollScheduler):
27051 failures += 1
27052 portage.elog.elog_process(x.cpv, settings)
27053 finally:
27054 - if current_task is not None and current_task.isAlive():
27055 - current_task.cancel()
27056 - current_task.wait()
27057 - clean_phase = EbuildPhase(background=False,
27058 - phase='clean', scheduler=sched_iface, settings=settings)
27059 - clean_phase.start()
27060 - clean_phase.wait()
27061 +
27062 + if current_task is not None:
27063 + if current_task.isAlive():
27064 + current_task.cancel()
27065 + current_task.wait()
27066 + if current_task.returncode == os.EX_OK:
27067 + clean_phase = EbuildPhase(background=False,
27068 + phase='clean', scheduler=sched_iface,
27069 + settings=settings)
27070 + clean_phase.start()
27071 + clean_phase.wait()
27072 +
27073 build_dir.unlock()
27074
27075 if failures:
27076 @@ -1004,6 +1015,8 @@ class Scheduler(PollScheduler):
27077
27078 earlier_sigint_handler = signal.signal(signal.SIGINT, sighandler)
27079 earlier_sigterm_handler = signal.signal(signal.SIGTERM, sighandler)
27080 + earlier_sigcont_handler = \
27081 + signal.signal(signal.SIGCONT, self._sigcont_handler)
27082
27083 try:
27084 rval = self._merge()
27085 @@ -1017,6 +1030,10 @@ class Scheduler(PollScheduler):
27086 signal.signal(signal.SIGTERM, earlier_sigterm_handler)
27087 else:
27088 signal.signal(signal.SIGTERM, signal.SIG_DFL)
27089 + if earlier_sigcont_handler is not None:
27090 + signal.signal(signal.SIGCONT, earlier_sigcont_handler)
27091 + else:
27092 + signal.signal(signal.SIGCONT, signal.SIG_DFL)
27093
27094 if received_signal:
27095 sys.exit(received_signal[0])
27096 @@ -1063,7 +1080,8 @@ class Scheduler(PollScheduler):
27097 printer = portage.output.EOutput()
27098 background = self._background
27099 failure_log_shown = False
27100 - if background and len(self._failed_pkgs_all) == 1:
27101 + if background and len(self._failed_pkgs_all) == 1 and \
27102 + self.myopts.get('--quiet-fail', 'n') != 'y':
27103 # If only one package failed then just show it's
27104 # whole log for easy viewing.
27105 failed_pkg = self._failed_pkgs_all[-1]
27106 @@ -1142,9 +1160,9 @@ class Scheduler(PollScheduler):
27107 printer.eerror(line)
27108 printer.eerror("")
27109 for failed_pkg in self._failed_pkgs_all:
27110 - # Use _unicode_decode() to force unicode format string so
27111 + # Use unicode_literals to force unicode format string so
27112 # that Package.__unicode__() is called in python2.
27113 - msg = _unicode_decode(" %s") % (failed_pkg.pkg,)
27114 + msg = " %s" % (failed_pkg.pkg,)
27115 log_path = self._locate_failure_log(failed_pkg)
27116 if log_path is not None:
27117 msg += ", Log file:"
27118 @@ -1341,6 +1359,38 @@ class Scheduler(PollScheduler):
27119 blocker_db = self._blocker_db[pkg.root]
27120 blocker_db.discardBlocker(pkg)
27121
27122 + def _main_loop(self):
27123 + term_check_id = self._event_loop.idle_add(self._termination_check)
27124 + loadavg_check_id = None
27125 + if self._max_load is not None and \
27126 + self._loadavg_latency is not None and \
27127 + (self._max_jobs is True or self._max_jobs > 1):
27128 + # We have to schedule periodically, in case the load
27129 + # average has changed since the last call.
27130 + loadavg_check_id = self._event_loop.timeout_add(
27131 + self._loadavg_latency, self._schedule)
27132 +
27133 + try:
27134 + # Populate initial event sources. Unless we're scheduling
27135 + # based on load average, we only need to do this once
27136 + # here, since it can be called during the loop from within
27137 + # event handlers.
27138 + self._schedule()
27139 +
27140 + # Loop while there are jobs to be scheduled.
27141 + while self._keep_scheduling():
27142 + self._event_loop.iteration()
27143 +
27144 + # Clean shutdown of previously scheduled jobs. In the
27145 + # case of termination, this allows for basic cleanup
27146 + # such as flushing of buffered output to logs.
27147 + while self._is_work_scheduled():
27148 + self._event_loop.iteration()
27149 + finally:
27150 + self._event_loop.source_remove(term_check_id)
27151 + if loadavg_check_id is not None:
27152 + self._event_loop.source_remove(loadavg_check_id)
27153 +
27154 def _merge(self):
27155
27156 if self._opts_no_background.intersection(self.myopts):
27157 @@ -1351,8 +1401,10 @@ class Scheduler(PollScheduler):
27158 failed_pkgs = self._failed_pkgs
27159 portage.locks._quiet = self._background
27160 portage.elog.add_listener(self._elog_listener)
27161 - display_timeout_id = self.sched_iface.timeout_add(
27162 - self._max_display_latency, self._status_display.display)
27163 + display_timeout_id = None
27164 + if self._status_display._isatty and not self._status_display.quiet:
27165 + display_timeout_id = self._event_loop.timeout_add(
27166 + self._max_display_latency, self._status_display.display)
27167 rval = os.EX_OK
27168
27169 try:
27170 @@ -1361,7 +1413,8 @@ class Scheduler(PollScheduler):
27171 self._main_loop_cleanup()
27172 portage.locks._quiet = False
27173 portage.elog.remove_listener(self._elog_listener)
27174 - self.sched_iface.source_remove(display_timeout_id)
27175 + if display_timeout_id is not None:
27176 + self._event_loop.source_remove(display_timeout_id)
27177 if failed_pkgs:
27178 rval = failed_pkgs[-1].returncode
27179
27180 @@ -1493,12 +1546,15 @@ class Scheduler(PollScheduler):
27181 self._config_pool[settings['EROOT']].append(settings)
27182
27183 def _keep_scheduling(self):
27184 - return bool(not self._terminated_tasks and self._pkg_queue and \
27185 + return bool(not self._terminated.is_set() and self._pkg_queue and \
27186 not (self._failed_pkgs and not self._build_opts.fetchonly))
27187
27188 def _is_work_scheduled(self):
27189 return bool(self._running_tasks)
27190
27191 + def _running_job_count(self):
27192 + return self._jobs
27193 +
27194 def _schedule_tasks(self):
27195
27196 while True:
27197 @@ -1539,6 +1595,9 @@ class Scheduler(PollScheduler):
27198 not self._task_queues.merge)):
27199 break
27200
27201 + def _sigcont_handler(self, signum, frame):
27202 + self._sigcont_time = time.time()
27203 +
27204 def _job_delay(self):
27205 """
27206 @rtype: bool
27207 @@ -1549,14 +1608,53 @@ class Scheduler(PollScheduler):
27208
27209 current_time = time.time()
27210
27211 - delay = self._job_delay_factor * self._jobs ** self._job_delay_exp
27212 + if self._sigcont_time is not None:
27213 +
27214 + elapsed_seconds = current_time - self._sigcont_time
27215 + # elapsed_seconds < 0 means the system clock has been adjusted
27216 + if elapsed_seconds > 0 and \
27217 + elapsed_seconds < self._sigcont_delay:
27218 +
27219 + if self._job_delay_timeout_id is not None:
27220 + self._event_loop.source_remove(
27221 + self._job_delay_timeout_id)
27222 +
27223 + self._job_delay_timeout_id = self._event_loop.timeout_add(
27224 + 1000 * (self._sigcont_delay - elapsed_seconds),
27225 + self._schedule_once)
27226 + return True
27227 +
27228 + # Only set this to None after the delay has expired,
27229 + # since this method may be called again before the
27230 + # delay has expired.
27231 + self._sigcont_time = None
27232 +
27233 + try:
27234 + avg1, avg5, avg15 = getloadavg()
27235 + except OSError:
27236 + return False
27237 +
27238 + delay = self._job_delay_max * avg1 / self._max_load
27239 if delay > self._job_delay_max:
27240 delay = self._job_delay_max
27241 - if (current_time - self._previous_job_start_time) < delay:
27242 + elapsed_seconds = current_time - self._previous_job_start_time
27243 + # elapsed_seconds < 0 means the system clock has been adjusted
27244 + if elapsed_seconds > 0 and elapsed_seconds < delay:
27245 +
27246 + if self._job_delay_timeout_id is not None:
27247 + self._event_loop.source_remove(
27248 + self._job_delay_timeout_id)
27249 +
27250 + self._job_delay_timeout_id = self._event_loop.timeout_add(
27251 + 1000 * (delay - elapsed_seconds), self._schedule_once)
27252 return True
27253
27254 return False
27255
27256 + def _schedule_once(self):
27257 + self._schedule()
27258 + return False
27259 +
27260 def _schedule_tasks_imp(self):
27261 """
27262 @rtype: bool
27263 @@ -1738,7 +1836,7 @@ class Scheduler(PollScheduler):
27264 # scope
27265 e = exc
27266 mydepgraph = e.depgraph
27267 - dropped_tasks = set()
27268 + dropped_tasks = {}
27269
27270 if e is not None:
27271 def unsatisfied_resume_dep_msg():
27272 @@ -1775,11 +1873,7 @@ class Scheduler(PollScheduler):
27273 return False
27274
27275 if success and self._show_list():
27276 - mylist = mydepgraph.altlist()
27277 - if mylist:
27278 - if "--tree" in self.myopts:
27279 - mylist.reverse()
27280 - mydepgraph.display(mylist, favorites=self._favorites)
27281 + mydepgraph.display(mydepgraph.altlist(), favorites=self._favorites)
27282
27283 if not success:
27284 self._post_mod_echo_msgs.append(mydepgraph.display_problems)
27285 @@ -1788,7 +1882,7 @@ class Scheduler(PollScheduler):
27286 self._init_graph(mydepgraph.schedulerGraph())
27287
27288 msg_width = 75
27289 - for task in dropped_tasks:
27290 + for task, atoms in dropped_tasks.items():
27291 if not (isinstance(task, Package) and task.operation == "merge"):
27292 continue
27293 pkg = task
27294 @@ -1796,7 +1890,10 @@ class Scheduler(PollScheduler):
27295 " %s" % (pkg.cpv,)
27296 if pkg.root_config.settings["ROOT"] != "/":
27297 msg += " for %s" % (pkg.root,)
27298 - msg += " dropped due to unsatisfied dependency."
27299 + if not atoms:
27300 + msg += " dropped because it is masked or unavailable"
27301 + else:
27302 + msg += " dropped because it requires %s" % ", ".join(atoms)
27303 for line in textwrap.wrap(msg, msg_width):
27304 eerror(line, phase="other", key=pkg.cpv)
27305 settings = self.pkgsettings[pkg.root]
27306 @@ -1841,11 +1938,21 @@ class Scheduler(PollScheduler):
27307 root_config = pkg.root_config
27308 world_set = root_config.sets["selected"]
27309 world_locked = False
27310 - if hasattr(world_set, "lock"):
27311 - world_set.lock()
27312 - world_locked = True
27313 + atom = None
27314 +
27315 + if pkg.operation != "uninstall":
27316 + # Do this before acquiring the lock, since it queries the
27317 + # portdbapi which can call the global event loop, triggering
27318 + # a concurrent call to this method or something else that
27319 + # needs an exclusive (non-reentrant) lock on the world file.
27320 + atom = create_world_atom(pkg, args_set, root_config)
27321
27322 try:
27323 +
27324 + if hasattr(world_set, "lock"):
27325 + world_set.lock()
27326 + world_locked = True
27327 +
27328 if hasattr(world_set, "load"):
27329 world_set.load() # maybe it's changed on disk
27330
27331 @@ -1857,8 +1964,7 @@ class Scheduler(PollScheduler):
27332 for s in pkg.root_config.setconfig.active:
27333 world_set.remove(SETPREFIX+s)
27334 else:
27335 - atom = create_world_atom(pkg, args_set, root_config)
27336 - if atom:
27337 + if atom is not None:
27338 if hasattr(world_set, "add"):
27339 self._status_msg(('Recording %s in "world" ' + \
27340 'favorites file...') % atom)
27341
27342 diff --git a/pym/_emerge/SpawnProcess.py b/pym/_emerge/SpawnProcess.py
27343 index 9fbc964..15d3dc5 100644
27344 --- a/pym/_emerge/SpawnProcess.py
27345 +++ b/pym/_emerge/SpawnProcess.py
27346 @@ -1,17 +1,23 @@
27347 -# Copyright 1999-2011 Gentoo Foundation
27348 +# Copyright 2008-2013 Gentoo Foundation
27349 # Distributed under the terms of the GNU General Public License v2
27350
27351 -from _emerge.SubProcess import SubProcess
27352 +try:
27353 + import fcntl
27354 +except ImportError:
27355 + # http://bugs.jython.org/issue1074
27356 + fcntl = None
27357 +
27358 +import errno
27359 +import logging
27360 +import signal
27361 import sys
27362 -from portage.cache.mappings import slot_dict_class
27363 +
27364 +from _emerge.SubProcess import SubProcess
27365 import portage
27366 -from portage import _encodings
27367 -from portage import _unicode_encode
27368 from portage import os
27369 from portage.const import BASH_BINARY
27370 -import fcntl
27371 -import errno
27372 -import gzip
27373 +from portage.util import writemsg_level
27374 +from portage.util._async.PipeLogger import PipeLogger
27375
27376 class SpawnProcess(SubProcess):
27377
27378 @@ -23,31 +29,27 @@ class SpawnProcess(SubProcess):
27379
27380 _spawn_kwarg_names = ("env", "opt_name", "fd_pipes",
27381 "uid", "gid", "groups", "umask", "logfile",
27382 - "path_lookup", "pre_exec")
27383 + "path_lookup", "pre_exec", "close_fds", "cgroup",
27384 + "unshare_ipc", "unshare_net")
27385
27386 __slots__ = ("args",) + \
27387 - _spawn_kwarg_names + ("_log_file_real", "_selinux_type",)
27388 -
27389 - _file_names = ("log", "process", "stdout")
27390 - _files_dict = slot_dict_class(_file_names, prefix="")
27391 + _spawn_kwarg_names + ("_pipe_logger", "_selinux_type",)
27392
27393 def _start(self):
27394
27395 if self.fd_pipes is None:
27396 self.fd_pipes = {}
27397 + else:
27398 + self.fd_pipes = self.fd_pipes.copy()
27399 fd_pipes = self.fd_pipes
27400
27401 - self._files = self._files_dict()
27402 - files = self._files
27403 -
27404 master_fd, slave_fd = self._pipe(fd_pipes)
27405 - fcntl.fcntl(master_fd, fcntl.F_SETFL,
27406 - fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
27407 - files.process = master_fd
27408
27409 - logfile = None
27410 - if self._can_log(slave_fd):
27411 - logfile = self.logfile
27412 + can_log = self._can_log(slave_fd)
27413 + if can_log:
27414 + log_file_path = self.logfile
27415 + else:
27416 + log_file_path = None
27417
27418 null_input = None
27419 if not self.background or 0 in fd_pipes:
27420 @@ -62,48 +64,34 @@ class SpawnProcess(SubProcess):
27421 null_input = os.open('/dev/null', os.O_RDWR)
27422 fd_pipes[0] = null_input
27423
27424 - fd_pipes.setdefault(0, sys.stdin.fileno())
27425 - fd_pipes.setdefault(1, sys.stdout.fileno())
27426 - fd_pipes.setdefault(2, sys.stderr.fileno())
27427 + fd_pipes.setdefault(0, portage._get_stdin().fileno())
27428 + fd_pipes.setdefault(1, sys.__stdout__.fileno())
27429 + fd_pipes.setdefault(2, sys.__stderr__.fileno())
27430
27431 # flush any pending output
27432 + stdout_filenos = (sys.__stdout__.fileno(), sys.__stderr__.fileno())
27433 for fd in fd_pipes.values():
27434 - if fd == sys.stdout.fileno():
27435 - sys.stdout.flush()
27436 - if fd == sys.stderr.fileno():
27437 - sys.stderr.flush()
27438 + if fd in stdout_filenos:
27439 + sys.__stdout__.flush()
27440 + sys.__stderr__.flush()
27441 + break
27442
27443 - if logfile is not None:
27444 + fd_pipes_orig = fd_pipes.copy()
27445
27446 - fd_pipes_orig = fd_pipes.copy()
27447 + if log_file_path is not None or self.background:
27448 fd_pipes[1] = slave_fd
27449 fd_pipes[2] = slave_fd
27450
27451 - files.log = open(_unicode_encode(logfile,
27452 - encoding=_encodings['fs'], errors='strict'), mode='ab')
27453 - if logfile.endswith('.gz'):
27454 - self._log_file_real = files.log
27455 - files.log = gzip.GzipFile(filename='', mode='ab',
27456 - fileobj=files.log)
27457 -
27458 - portage.util.apply_secpass_permissions(logfile,
27459 - uid=portage.portage_uid, gid=portage.portage_gid,
27460 - mode=0o660)
27461 -
27462 - if not self.background:
27463 - files.stdout = os.dup(fd_pipes_orig[1])
27464 -
27465 - output_handler = self._output_handler
27466 -
27467 else:
27468 -
27469 - # Create a dummy pipe so the scheduler can monitor
27470 - # the process from inside a poll() loop.
27471 - fd_pipes[self._dummy_pipe_fd] = slave_fd
27472 - if self.background:
27473 - fd_pipes[1] = slave_fd
27474 - fd_pipes[2] = slave_fd
27475 - output_handler = self._dummy_handler
27476 + # Create a dummy pipe that PipeLogger uses to efficiently
27477 + # monitor for process exit by listening for the EOF event.
27478 + # Re-use of the allocated fd number for the key in fd_pipes
27479 + # guarantees that the keys will not collide for similarly
27480 + # allocated pipes which are used by callers such as
27481 + # FileDigester and MergeProcess. See the _setup_pipes
27482 + # docstring for more benefits of this allocation approach.
27483 + self._dummy_pipe_fd = slave_fd
27484 + fd_pipes[slave_fd] = slave_fd
27485
27486 kwargs = {}
27487 for k in self._spawn_kwarg_names:
27488 @@ -115,10 +103,6 @@ class SpawnProcess(SubProcess):
27489 kwargs["returnpid"] = True
27490 kwargs.pop("logfile", None)
27491
27492 - self._reg_id = self.scheduler.register(files.process,
27493 - self._registered_events, output_handler)
27494 - self._registered = True
27495 -
27496 retval = self._spawn(self.args, **kwargs)
27497
27498 os.close(slave_fd)
27499 @@ -129,11 +113,32 @@ class SpawnProcess(SubProcess):
27500 # spawn failed
27501 self._unregister()
27502 self._set_returncode((self.pid, retval))
27503 - self.wait()
27504 + self._async_wait()
27505 return
27506
27507 self.pid = retval[0]
27508 - portage.process.spawned_pids.remove(self.pid)
27509 +
27510 + stdout_fd = None
27511 + if can_log and not self.background:
27512 + stdout_fd = os.dup(fd_pipes_orig[1])
27513 + # FD_CLOEXEC is enabled by default in Python >=3.4.
27514 + if sys.hexversion < 0x3040000 and fcntl is not None:
27515 + try:
27516 + fcntl.FD_CLOEXEC
27517 + except AttributeError:
27518 + pass
27519 + else:
27520 + fcntl.fcntl(stdout_fd, fcntl.F_SETFD,
27521 + fcntl.fcntl(stdout_fd,
27522 + fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
27523 +
27524 + self._pipe_logger = PipeLogger(background=self.background,
27525 + scheduler=self.scheduler, input_fd=master_fd,
27526 + log_file_path=log_file_path,
27527 + stdout_fd=stdout_fd)
27528 + self._pipe_logger.addExitListener(self._pipe_logger_exit)
27529 + self._pipe_logger.start()
27530 + self._registered = True
27531
27532 def _can_log(self, slave_fd):
27533 return True
27534 @@ -157,92 +162,56 @@ class SpawnProcess(SubProcess):
27535
27536 return spawn_func(args, **kwargs)
27537
27538 - def _output_handler(self, fd, event):
27539 -
27540 - files = self._files
27541 - while True:
27542 - buf = self._read_buf(fd, event)
27543 -
27544 - if buf is None:
27545 - # not a POLLIN event, EAGAIN, etc...
27546 - break
27547 -
27548 - if not buf:
27549 - # EOF
27550 - self._unregister()
27551 - self.wait()
27552 - break
27553 -
27554 - else:
27555 - if not self.background:
27556 - write_successful = False
27557 - failures = 0
27558 - while True:
27559 - try:
27560 - if not write_successful:
27561 - os.write(files.stdout, buf)
27562 - write_successful = True
27563 - break
27564 - except OSError as e:
27565 - if e.errno != errno.EAGAIN:
27566 - raise
27567 - del e
27568 - failures += 1
27569 - if failures > 50:
27570 - # Avoid a potentially infinite loop. In
27571 - # most cases, the failure count is zero
27572 - # and it's unlikely to exceed 1.
27573 - raise
27574 -
27575 - # This means that a subprocess has put an inherited
27576 - # stdio file descriptor (typically stdin) into
27577 - # O_NONBLOCK mode. This is not acceptable (see bug
27578 - # #264435), so revert it. We need to use a loop
27579 - # here since there's a race condition due to
27580 - # parallel processes being able to change the
27581 - # flags on the inherited file descriptor.
27582 - # TODO: When possible, avoid having child processes
27583 - # inherit stdio file descriptors from portage
27584 - # (maybe it can't be avoided with
27585 - # PROPERTIES=interactive).
27586 - fcntl.fcntl(files.stdout, fcntl.F_SETFL,
27587 - fcntl.fcntl(files.stdout,
27588 - fcntl.F_GETFL) ^ os.O_NONBLOCK)
27589 -
27590 - files.log.write(buf)
27591 - files.log.flush()
27592 -
27593 - self._unregister_if_appropriate(event)
27594 -
27595 - return True
27596 -
27597 - def _dummy_handler(self, fd, event):
27598 - """
27599 - This method is mainly interested in detecting EOF, since
27600 - the only purpose of the pipe is to allow the scheduler to
27601 - monitor the process from inside a poll() loop.
27602 - """
27603 -
27604 - while True:
27605 - buf = self._read_buf(fd, event)
27606 -
27607 - if buf is None:
27608 - # not a POLLIN event, EAGAIN, etc...
27609 - break
27610 -
27611 - if not buf:
27612 - # EOF
27613 - self._unregister()
27614 - self.wait()
27615 - break
27616 -
27617 - self._unregister_if_appropriate(event)
27618 -
27619 - return True
27620 -
27621 - def _unregister(self):
27622 - super(SpawnProcess, self)._unregister()
27623 - if self._log_file_real is not None:
27624 - # Avoid "ResourceWarning: unclosed file" since python 3.2.
27625 - self._log_file_real.close()
27626 - self._log_file_real = None
27627 + def _pipe_logger_exit(self, pipe_logger):
27628 + self._pipe_logger = None
27629 + self._unregister()
27630 + self.wait()
27631 +
27632 + def _waitpid_loop(self):
27633 + SubProcess._waitpid_loop(self)
27634 +
27635 + pipe_logger = self._pipe_logger
27636 + if pipe_logger is not None:
27637 + self._pipe_logger = None
27638 + pipe_logger.removeExitListener(self._pipe_logger_exit)
27639 + pipe_logger.cancel()
27640 + pipe_logger.wait()
27641 +
27642 + def _set_returncode(self, wait_retval):
27643 + SubProcess._set_returncode(self, wait_retval)
27644 +
27645 + if self.cgroup:
27646 + def get_pids(cgroup):
27647 + try:
27648 + with open(os.path.join(cgroup, 'cgroup.procs'), 'r') as f:
27649 + return [int(p) for p in f.read().split()]
27650 + except OSError:
27651 + # cgroup removed already?
27652 + return []
27653 +
27654 + def kill_all(pids, sig):
27655 + for p in pids:
27656 + try:
27657 + os.kill(p, sig)
27658 + except OSError as e:
27659 + if e.errno == errno.EPERM:
27660 + # Reported with hardened kernel (bug #358211).
27661 + writemsg_level(
27662 + "!!! kill: (%i) - Operation not permitted\n" %
27663 + (p,), level=logging.ERROR,
27664 + noiselevel=-1)
27665 + elif e.errno != errno.ESRCH:
27666 + raise
27667 +
27668 + # step 1: kill all orphans
27669 + pids = get_pids(self.cgroup)
27670 + if pids:
27671 + kill_all(pids, signal.SIGKILL)
27672 +
27673 + # step 2: remove the cgroup
27674 + try:
27675 + os.rmdir(self.cgroup)
27676 + except OSError:
27677 + # it may be removed already, or busy
27678 + # we can't do anything good about it
27679 + pass
27680
27681 diff --git a/pym/_emerge/SubProcess.py b/pym/_emerge/SubProcess.py
27682 index 76b313f..13d9382 100644
27683 --- a/pym/_emerge/SubProcess.py
27684 +++ b/pym/_emerge/SubProcess.py
27685 @@ -1,7 +1,10 @@
27686 -# Copyright 1999-2012 Gentoo Foundation
27687 +# Copyright 1999-2013 Gentoo Foundation
27688 # Distributed under the terms of the GNU General Public License v2
27689
27690 +import logging
27691 +
27692 from portage import os
27693 +from portage.util import writemsg_level
27694 from _emerge.AbstractPollTask import AbstractPollTask
27695 import signal
27696 import errno
27697 @@ -9,12 +12,7 @@ import errno
27698 class SubProcess(AbstractPollTask):
27699
27700 __slots__ = ("pid",) + \
27701 - ("_files", "_reg_id")
27702 -
27703 - # A file descriptor is required for the scheduler to monitor changes from
27704 - # inside a poll() loop. When logging is not enabled, create a pipe just to
27705 - # serve this purpose alone.
27706 - _dummy_pipe_fd = 9
27707 + ("_dummy_pipe_fd", "_files", "_reg_id")
27708
27709 # This is how much time we allow for waitpid to succeed after
27710 # we've sent a kill signal to our subprocess.
27711 @@ -50,7 +48,13 @@ class SubProcess(AbstractPollTask):
27712 try:
27713 os.kill(self.pid, signal.SIGTERM)
27714 except OSError as e:
27715 - if e.errno != errno.ESRCH:
27716 + if e.errno == errno.EPERM:
27717 + # Reported with hardened kernel (bug #358211).
27718 + writemsg_level(
27719 + "!!! kill: (%i) - Operation not permitted\n" %
27720 + (self.pid,), level=logging.ERROR,
27721 + noiselevel=-1)
27722 + elif e.errno != errno.ESRCH:
27723 raise
27724
27725 def isAlive(self):
27726 @@ -69,7 +73,13 @@ class SubProcess(AbstractPollTask):
27727 try:
27728 os.kill(self.pid, signal.SIGKILL)
27729 except OSError as e:
27730 - if e.errno != errno.ESRCH:
27731 + if e.errno == errno.EPERM:
27732 + # Reported with hardened kernel (bug #358211).
27733 + writemsg_level(
27734 + "!!! kill: (%i) - Operation not permitted\n" %
27735 + (self.pid,), level=logging.ERROR,
27736 + noiselevel=-1)
27737 + elif e.errno != errno.ESRCH:
27738 raise
27739 del e
27740 self._wait_loop(timeout=self._cancel_timeout)
27741 @@ -116,7 +126,7 @@ class SubProcess(AbstractPollTask):
27742 self._registered = False
27743
27744 if self._reg_id is not None:
27745 - self.scheduler.unregister(self._reg_id)
27746 + self.scheduler.source_remove(self._reg_id)
27747 self._reg_id = None
27748
27749 if self._files is not None:
27750
27751 diff --git a/pym/_emerge/Task.py b/pym/_emerge/Task.py
27752 index 40f5066..250d458 100644
27753 --- a/pym/_emerge/Task.py
27754 +++ b/pym/_emerge/Task.py
27755 @@ -1,4 +1,4 @@
27756 -# Copyright 1999-2012 Gentoo Foundation
27757 +# Copyright 1999-2013 Gentoo Foundation
27758 # Distributed under the terms of the GNU General Public License v2
27759
27760 from portage.util.SlotObject import SlotObject
27761 @@ -41,3 +41,10 @@ class Task(SlotObject):
27762 strings.
27763 """
27764 return "(%s)" % ", ".join(("'%s'" % x for x in self._hash_key))
27765 +
27766 + def __repr__(self):
27767 + if self._hash_key is None:
27768 + # triggered by python-trace
27769 + return SlotObject.__repr__(self)
27770 + return "<%s (%s)>" % (self.__class__.__name__,
27771 + ", ".join(("'%s'" % x for x in self._hash_key)))
27772
27773 diff --git a/pym/_emerge/TaskScheduler.py b/pym/_emerge/TaskScheduler.py
27774 deleted file mode 100644
27775 index 583bfe3..0000000
27776 --- a/pym/_emerge/TaskScheduler.py
27777 +++ /dev/null
27778 @@ -1,26 +0,0 @@
27779 -# Copyright 1999-2012 Gentoo Foundation
27780 -# Distributed under the terms of the GNU General Public License v2
27781 -
27782 -from _emerge.QueueScheduler import QueueScheduler
27783 -from _emerge.SequentialTaskQueue import SequentialTaskQueue
27784 -
27785 -class TaskScheduler(object):
27786 -
27787 - """
27788 - A simple way to handle scheduling of AsynchrousTask instances. Simply
27789 - add tasks and call run(). The run() method returns when no tasks remain.
27790 - """
27791 -
27792 - def __init__(self, main=True, max_jobs=None, max_load=None):
27793 - self._queue = SequentialTaskQueue(max_jobs=max_jobs)
27794 - self._scheduler = QueueScheduler(main=main,
27795 - max_jobs=max_jobs, max_load=max_load)
27796 - self.sched_iface = self._scheduler.sched_iface
27797 - self.run = self._scheduler.run
27798 - self.clear = self._scheduler.clear
27799 - self.wait = self._queue.wait
27800 - self._scheduler.add(self._queue)
27801 -
27802 - def add(self, task):
27803 - self._queue.add(task)
27804 -
27805
27806 diff --git a/pym/_emerge/UnmergeDepPriority.py b/pym/_emerge/UnmergeDepPriority.py
27807 index 4316600..ec44a67 100644
27808 --- a/pym/_emerge/UnmergeDepPriority.py
27809 +++ b/pym/_emerge/UnmergeDepPriority.py
27810 @@ -1,4 +1,4 @@
27811 -# Copyright 1999-2011 Gentoo Foundation
27812 +# Copyright 1999-2013 Gentoo Foundation
27813 # Distributed under the terms of the GNU General Public License v2
27814
27815 from _emerge.AbstractDepPriority import AbstractDepPriority
27816 @@ -7,15 +7,16 @@ class UnmergeDepPriority(AbstractDepPriority):
27817 """
27818 Combination of properties Priority Category
27819
27820 - runtime 0 HARD
27821 - runtime_post -1 HARD
27822 - buildtime -2 SOFT
27823 - (none of the above) -2 SOFT
27824 + runtime_slot_op 0 HARD
27825 + runtime -1 HARD
27826 + runtime_post -2 HARD
27827 + buildtime -3 SOFT
27828 + (none of the above) -3 SOFT
27829 """
27830
27831 MAX = 0
27832 - SOFT = -2
27833 - MIN = -2
27834 + SOFT = -3
27835 + MIN = -3
27836
27837 def __init__(self, **kwargs):
27838 AbstractDepPriority.__init__(self, **kwargs)
27839 @@ -23,17 +24,21 @@ class UnmergeDepPriority(AbstractDepPriority):
27840 self.optional = True
27841
27842 def __int__(self):
27843 - if self.runtime:
27844 + if self.runtime_slot_op:
27845 return 0
27846 - if self.runtime_post:
27847 + if self.runtime:
27848 return -1
27849 - if self.buildtime:
27850 + if self.runtime_post:
27851 return -2
27852 - return -2
27853 + if self.buildtime:
27854 + return -3
27855 + return -3
27856
27857 def __str__(self):
27858 if self.ignored:
27859 return "ignored"
27860 + if self.runtime_slot_op:
27861 + return "hard slot op"
27862 myvalue = self.__int__()
27863 if myvalue > self.SOFT:
27864 return "hard"
27865
27866 diff --git a/pym/_emerge/UseFlagDisplay.py b/pym/_emerge/UseFlagDisplay.py
27867 index 3daca19..f460474 100644
27868 --- a/pym/_emerge/UseFlagDisplay.py
27869 +++ b/pym/_emerge/UseFlagDisplay.py
27870 @@ -1,10 +1,12 @@
27871 -# Copyright 1999-2011 Gentoo Foundation
27872 +# Copyright 1999-2013 Gentoo Foundation
27873 # Distributed under the terms of the GNU General Public License v2
27874
27875 +from __future__ import unicode_literals
27876 +
27877 from itertools import chain
27878 import sys
27879
27880 -from portage import _encodings, _unicode_decode, _unicode_encode
27881 +from portage import _encodings, _unicode_encode
27882 from portage.output import red
27883 from portage.util import cmp_sort_key
27884 from portage.output import blue
27885 @@ -114,9 +116,9 @@ def pkg_use_display(pkg, opts, modified_use=None):
27886 flags.sort(key=UseFlagDisplay.sort_combined)
27887 else:
27888 flags.sort(key=UseFlagDisplay.sort_separated)
27889 - # Use _unicode_decode() to force unicode format string so
27890 + # Use unicode_literals to force unicode format string so
27891 # that UseFlagDisplay.__unicode__() is called in python2.
27892 flag_displays.append('%s="%s"' % (varname,
27893 - ' '.join(_unicode_decode("%s") % (f,) for f in flags)))
27894 + ' '.join("%s" % (f,) for f in flags)))
27895
27896 return ' '.join(flag_displays)
27897
27898 diff --git a/pym/_emerge/actions.py b/pym/_emerge/actions.py
27899 index 9a023a8..2a1354b 100644
27900 --- a/pym/_emerge/actions.py
27901 +++ b/pym/_emerge/actions.py
27902 @@ -1,7 +1,7 @@
27903 -# Copyright 1999-2012 Gentoo Foundation
27904 +# Copyright 1999-2014 Gentoo Foundation
27905 # Distributed under the terms of the GNU General Public License v2
27906
27907 -from __future__ import print_function
27908 +from __future__ import print_function, unicode_literals
27909
27910 import errno
27911 import logging
27912 @@ -18,27 +18,35 @@ import sys
27913 import tempfile
27914 import textwrap
27915 import time
27916 +import warnings
27917 from itertools import chain
27918
27919 import portage
27920 portage.proxy.lazyimport.lazyimport(globals(),
27921 + 'portage.dbapi._similar_name_search:similar_name_search',
27922 + 'portage.debug',
27923 'portage.news:count_unread_news,display_news_notifications',
27924 + 'portage.util._get_vm_info:get_vm_info',
27925 + '_emerge.chk_updated_cfg_files:chk_updated_cfg_files',
27926 + '_emerge.help:help@emerge_help',
27927 + '_emerge.post_emerge:display_news_notification,post_emerge',
27928 + '_emerge.stdout_spinner:stdout_spinner',
27929 )
27930
27931 from portage.localization import _
27932 from portage import os
27933 from portage import shutil
27934 -from portage import eapi_is_supported, _unicode_decode
27935 +from portage import eapi_is_supported, _encodings, _unicode_decode
27936 from portage.cache.cache_errors import CacheError
27937 -from portage.const import GLOBAL_CONFIG_PATH
27938 -from portage.const import _ENABLE_DYN_LINK_MAP
27939 +from portage.const import GLOBAL_CONFIG_PATH, VCS_DIRS, _DEPCLEAN_LIB_CHECK_DEFAULT
27940 +from portage.const import SUPPORTED_BINPKG_FORMATS, TIMESTAMP_FORMAT
27941 from portage.dbapi.dep_expand import dep_expand
27942 from portage.dbapi._expand_new_virt import expand_new_virt
27943 from portage.dep import Atom
27944 from portage.eclass_cache import hashed_path
27945 -from portage.exception import InvalidAtom, InvalidData
27946 +from portage.exception import InvalidAtom, InvalidData, ParseError
27947 from portage.output import blue, bold, colorize, create_color_func, darkgreen, \
27948 - red, yellow
27949 + red, xtermTitle, xtermTitleReset, yellow
27950 good = create_color_func("GOOD")
27951 bad = create_color_func("BAD")
27952 warn = create_color_func("WARN")
27953 @@ -46,9 +54,13 @@ from portage.package.ebuild._ipc.QueryCommand import QueryCommand
27954 from portage.package.ebuild.doebuild import _check_temp_dir
27955 from portage._sets import load_default_config, SETPREFIX
27956 from portage._sets.base import InternalPackageSet
27957 -from portage.util import cmp_sort_key, writemsg, \
27958 +from portage.util import cmp_sort_key, writemsg, varexpand, \
27959 writemsg_level, writemsg_stdout
27960 from portage.util.digraph import digraph
27961 +from portage.util.SlotObject import SlotObject
27962 +from portage.util._async.run_main_scheduler import run_main_scheduler
27963 +from portage.util._async.SchedulerInterface import SchedulerInterface
27964 +from portage.util._eventloop.global_event_loop import global_event_loop
27965 from portage._global_updates import _global_updates
27966
27967 from _emerge.clear_caches import clear_caches
27968 @@ -277,8 +289,14 @@ def action_build(settings, trees, mtimedb,
27969 "dropped due to\n" + \
27970 "!!! masking or unsatisfied dependencies:\n\n",
27971 noiselevel=-1)
27972 - for task in dropped_tasks:
27973 - portage.writemsg(" " + str(task) + "\n", noiselevel=-1)
27974 + for task, atoms in dropped_tasks.items():
27975 + if not atoms:
27976 + writemsg(" %s is masked or unavailable\n" %
27977 + (task,), noiselevel=-1)
27978 + else:
27979 + writemsg(" %s requires %s\n" %
27980 + (task, ", ".join(atoms)), noiselevel=-1)
27981 +
27982 portage.writemsg("\n", noiselevel=-1)
27983 del dropped_tasks
27984 else:
27985 @@ -309,6 +327,7 @@ def action_build(settings, trees, mtimedb,
27986 mydepgraph.display_problems()
27987 return 1
27988
27989 + mergecount = None
27990 if "--pretend" not in myopts and \
27991 ("--ask" in myopts or "--tree" in myopts or \
27992 "--verbose" in myopts) and \
27993 @@ -320,7 +339,7 @@ def action_build(settings, trees, mtimedb,
27994 return os.EX_OK
27995 favorites = mtimedb["resume"]["favorites"]
27996 retval = mydepgraph.display(
27997 - mydepgraph.altlist(reversed=tree),
27998 + mydepgraph.altlist(),
27999 favorites=favorites)
28000 mydepgraph.display_problems()
28001 mergelist_shown = True
28002 @@ -329,7 +348,7 @@ def action_build(settings, trees, mtimedb,
28003 prompt="Would you like to resume merging these packages?"
28004 else:
28005 retval = mydepgraph.display(
28006 - mydepgraph.altlist(reversed=("--tree" in myopts)),
28007 + mydepgraph.altlist(),
28008 favorites=favorites)
28009 mydepgraph.display_problems()
28010 mergelist_shown = True
28011 @@ -340,6 +359,7 @@ def action_build(settings, trees, mtimedb,
28012 if isinstance(x, Package) and x.operation == "merge":
28013 mergecount += 1
28014
28015 + prompt = None
28016 if mergecount==0:
28017 sets = trees[settings['EROOT']]['root_config'].sets
28018 world_candidates = None
28019 @@ -352,14 +372,11 @@ def action_build(settings, trees, mtimedb,
28020 world_candidates = [x for x in favorites \
28021 if not (x.startswith(SETPREFIX) and \
28022 not sets[x[1:]].world_candidate)]
28023 +
28024 if "selective" in myparams and \
28025 not oneshot and world_candidates:
28026 - print()
28027 - for x in world_candidates:
28028 - print(" %s %s" % (good("*"), x))
28029 - prompt="Would you like to add these packages to your world favorites?"
28030 - elif settings["AUTOCLEAN"] and "yes"==settings["AUTOCLEAN"]:
28031 - prompt="Nothing to merge; would you like to auto-clean packages?"
28032 + # Prompt later, inside saveNomergeFavorites.
28033 + prompt = None
28034 else:
28035 print()
28036 print("Nothing to merge; quitting.")
28037 @@ -370,13 +387,15 @@ def action_build(settings, trees, mtimedb,
28038 else:
28039 prompt="Would you like to merge these packages?"
28040 print()
28041 - if "--ask" in myopts and userquery(prompt, enter_invalid) == "No":
28042 + if prompt is not None and "--ask" in myopts and \
28043 + userquery(prompt, enter_invalid) == "No":
28044 print()
28045 print("Quitting.")
28046 print()
28047 return 128 + signal.SIGINT
28048 # Don't ask again (e.g. when auto-cleaning packages after merge)
28049 - myopts.pop("--ask", None)
28050 + if mergecount != 0:
28051 + myopts.pop("--ask", None)
28052
28053 if ("--pretend" in myopts) and not ("--fetchonly" in myopts or "--fetch-all-uri" in myopts):
28054 if ("--resume" in myopts):
28055 @@ -386,7 +405,7 @@ def action_build(settings, trees, mtimedb,
28056 return os.EX_OK
28057 favorites = mtimedb["resume"]["favorites"]
28058 retval = mydepgraph.display(
28059 - mydepgraph.altlist(reversed=tree),
28060 + mydepgraph.altlist(),
28061 favorites=favorites)
28062 mydepgraph.display_problems()
28063 mergelist_shown = True
28064 @@ -394,39 +413,14 @@ def action_build(settings, trees, mtimedb,
28065 return retval
28066 else:
28067 retval = mydepgraph.display(
28068 - mydepgraph.altlist(reversed=("--tree" in myopts)),
28069 + mydepgraph.altlist(),
28070 favorites=favorites)
28071 mydepgraph.display_problems()
28072 mergelist_shown = True
28073 if retval != os.EX_OK:
28074 return retval
28075 - if "--buildpkgonly" in myopts:
28076 - graph_copy = mydepgraph._dynamic_config.digraph.copy()
28077 - removed_nodes = set()
28078 - for node in graph_copy:
28079 - if not isinstance(node, Package) or \
28080 - node.operation == "nomerge":
28081 - removed_nodes.add(node)
28082 - graph_copy.difference_update(removed_nodes)
28083 - if not graph_copy.hasallzeros(ignore_priority = \
28084 - DepPrioritySatisfiedRange.ignore_medium):
28085 - print("\n!!! --buildpkgonly requires all dependencies to be merged.")
28086 - print("!!! You have to merge the dependencies before you can build this package.\n")
28087 - return 1
28088 +
28089 else:
28090 - if "--buildpkgonly" in myopts:
28091 - graph_copy = mydepgraph._dynamic_config.digraph.copy()
28092 - removed_nodes = set()
28093 - for node in graph_copy:
28094 - if not isinstance(node, Package) or \
28095 - node.operation == "nomerge":
28096 - removed_nodes.add(node)
28097 - graph_copy.difference_update(removed_nodes)
28098 - if not graph_copy.hasallzeros(ignore_priority = \
28099 - DepPrioritySatisfiedRange.ignore_medium):
28100 - print("\n!!! --buildpkgonly requires all dependencies to be merged.")
28101 - print("!!! Cannot merge requested packages. Merge deps and try again.\n")
28102 - return 1
28103
28104 if not mergelist_shown:
28105 # If we haven't already shown the merge list above, at
28106 @@ -446,25 +440,29 @@ def action_build(settings, trees, mtimedb,
28107
28108 mydepgraph.saveNomergeFavorites()
28109
28110 - mergetask = Scheduler(settings, trees, mtimedb, myopts,
28111 - spinner, favorites=favorites,
28112 - graph_config=mydepgraph.schedulerGraph())
28113 -
28114 - del mydepgraph
28115 - clear_caches(trees)
28116 -
28117 - retval = mergetask.merge()
28118 -
28119 - if retval == os.EX_OK and not (buildpkgonly or fetchonly or pretend):
28120 - if "yes" == settings.get("AUTOCLEAN"):
28121 - portage.writemsg_stdout(">>> Auto-cleaning packages...\n")
28122 - unmerge(trees[settings['EROOT']]['root_config'],
28123 - myopts, "clean", [],
28124 - ldpath_mtimes, autoclean=1)
28125 - else:
28126 - portage.writemsg_stdout(colorize("WARN", "WARNING:")
28127 - + " AUTOCLEAN is disabled. This can cause serious"
28128 - + " problems due to overlapping packages.\n")
28129 + if mergecount == 0:
28130 + retval = os.EX_OK
28131 + else:
28132 + mergetask = Scheduler(settings, trees, mtimedb, myopts,
28133 + spinner, favorites=favorites,
28134 + graph_config=mydepgraph.schedulerGraph())
28135 +
28136 + del mydepgraph
28137 + clear_caches(trees)
28138 +
28139 + retval = mergetask.merge()
28140 +
28141 + if retval == os.EX_OK and \
28142 + not (buildpkgonly or fetchonly or pretend):
28143 + if "yes" == settings.get("AUTOCLEAN"):
28144 + portage.writemsg_stdout(">>> Auto-cleaning packages...\n")
28145 + unmerge(trees[settings['EROOT']]['root_config'],
28146 + myopts, "clean", [],
28147 + ldpath_mtimes, autoclean=1)
28148 + else:
28149 + portage.writemsg_stdout(colorize("WARN", "WARNING:")
28150 + + " AUTOCLEAN is disabled. This can cause serious"
28151 + + " problems due to overlapping packages.\n")
28152
28153 return retval
28154
28155 @@ -544,7 +542,8 @@ def action_depclean(settings, trees, ldpath_mtimes,
28156 # specific packages.
28157
28158 msg = []
28159 - if not _ENABLE_DYN_LINK_MAP:
28160 + if "preserve-libs" not in settings.features and \
28161 + not myopts.get("--depclean-lib-check", _DEPCLEAN_LIB_CHECK_DEFAULT) != "n":
28162 msg.append("Depclean may break link level dependencies. Thus, it is\n")
28163 msg.append("recommended to use a tool such as " + good("`revdep-rebuild`") + " (from\n")
28164 msg.append("app-portage/gentoolkit) in order to detect such breakage.\n")
28165 @@ -610,11 +609,17 @@ def action_depclean(settings, trees, ldpath_mtimes,
28166 if not cleanlist and "--quiet" in myopts:
28167 return rval
28168
28169 + set_atoms = {}
28170 + for k in ("system", "selected"):
28171 + try:
28172 + set_atoms[k] = root_config.setconfig.getSetAtoms(k)
28173 + except portage.exception.PackageSetNotFound:
28174 + # A nested set could not be resolved, so ignore nested sets.
28175 + set_atoms[k] = root_config.sets[k].getAtoms()
28176 +
28177 print("Packages installed: " + str(len(vardb.cpv_all())))
28178 - print("Packages in world: " + \
28179 - str(len(root_config.sets["selected"].getAtoms())))
28180 - print("Packages in system: " + \
28181 - str(len(root_config.sets["system"].getAtoms())))
28182 + print("Packages in world: %d" % len(set_atoms["selected"]))
28183 + print("Packages in system: %d" % len(set_atoms["system"]))
28184 print("Required packages: "+str(req_pkg_count))
28185 if "--pretend" in myopts:
28186 print("Number to remove: "+str(len(cleanlist)))
28187 @@ -647,13 +652,21 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28188 required_sets[protected_set_name] = protected_set
28189 system_set = psets["system"]
28190
28191 - if not system_set or not selected_set:
28192 + set_atoms = {}
28193 + for k in ("system", "selected"):
28194 + try:
28195 + set_atoms[k] = root_config.setconfig.getSetAtoms(k)
28196 + except portage.exception.PackageSetNotFound:
28197 + # A nested set could not be resolved, so ignore nested sets.
28198 + set_atoms[k] = root_config.sets[k].getAtoms()
28199 +
28200 + if not set_atoms["system"] or not set_atoms["selected"]:
28201
28202 - if not system_set:
28203 + if not set_atoms["system"]:
28204 writemsg_level("!!! You have no system list.\n",
28205 level=logging.ERROR, noiselevel=-1)
28206
28207 - if not selected_set:
28208 + if not set_atoms["selected"]:
28209 writemsg_level("!!! You have no world file.\n",
28210 level=logging.WARNING, noiselevel=-1)
28211
28212 @@ -697,7 +710,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28213 continue
28214 except portage.exception.InvalidDependString as e:
28215 show_invalid_depstring_notice(pkg,
28216 - pkg.metadata["PROVIDE"], str(e))
28217 + pkg._metadata["PROVIDE"], _unicode(e))
28218 del e
28219 protected_set.add("=" + pkg.cpv)
28220 continue
28221 @@ -751,7 +764,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28222 continue
28223 except portage.exception.InvalidDependString as e:
28224 show_invalid_depstring_notice(pkg,
28225 - pkg.metadata["PROVIDE"], str(e))
28226 + pkg._metadata["PROVIDE"], _unicode(e))
28227 del e
28228 protected_set.add("=" + pkg.cpv)
28229 continue
28230 @@ -769,7 +782,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28231 required_sets['__excluded__'].add("=" + pkg.cpv)
28232 except portage.exception.InvalidDependString as e:
28233 show_invalid_depstring_notice(pkg,
28234 - pkg.metadata["PROVIDE"], str(e))
28235 + pkg._metadata["PROVIDE"], _unicode(e))
28236 del e
28237 required_sets['__excluded__'].add("=" + pkg.cpv)
28238
28239 @@ -805,7 +818,12 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28240 msg.append("the following required packages not being installed:")
28241 msg.append("")
28242 for atom, parent in unresolvable:
28243 - msg.append(" %s pulled in by:" % (atom,))
28244 + if atom != atom.unevaluated_atom and \
28245 + vardb.match(_unicode(atom)):
28246 + msg.append(" %s (%s) pulled in by:" %
28247 + (atom.unevaluated_atom, atom))
28248 + else:
28249 + msg.append(" %s pulled in by:" % (atom,))
28250 msg.append(" %s" % (parent,))
28251 msg.append("")
28252 msg.extend(textwrap.wrap(
28253 @@ -848,15 +866,27 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28254 required_pkgs_total += 1
28255
28256 def show_parents(child_node):
28257 - parent_nodes = graph.parent_nodes(child_node)
28258 - if not parent_nodes:
28259 + parent_atoms = \
28260 + resolver._dynamic_config._parent_atoms.get(child_node, [])
28261 +
28262 + # Never display the special internal protected_set.
28263 + parent_atoms = [parent_atom for parent_atom in parent_atoms
28264 + if not (isinstance(parent_atom[0], SetArg) and
28265 + parent_atom[0].name == protected_set_name)]
28266 +
28267 + if not parent_atoms:
28268 # With --prune, the highest version can be pulled in without any
28269 # real parent since all installed packages are pulled in. In that
28270 # case there's nothing to show here.
28271 return
28272 + parent_atom_dict = {}
28273 + for parent, atom in parent_atoms:
28274 + parent_atom_dict.setdefault(parent, []).append(atom)
28275 +
28276 parent_strs = []
28277 - for node in parent_nodes:
28278 - parent_strs.append(str(getattr(node, "cpv", node)))
28279 + for parent, atoms in parent_atom_dict.items():
28280 + parent_strs.append("%s requires %s" %
28281 + (getattr(parent, "cpv", parent), ", ".join(atoms)))
28282 parent_strs.sort()
28283 msg = []
28284 msg.append(" %s pulled in by:\n" % (child_node.cpv,))
28285 @@ -881,12 +911,6 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28286 graph.debug_print()
28287 writemsg("\n", noiselevel=-1)
28288
28289 - # Never display the special internal protected_set.
28290 - for node in graph:
28291 - if isinstance(node, SetArg) and node.name == protected_set_name:
28292 - graph.remove(node)
28293 - break
28294 -
28295 pkgs_to_remove = []
28296
28297 if action == "depclean":
28298 @@ -939,10 +963,19 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28299 cleanlist = create_cleanlist()
28300 clean_set = set(cleanlist)
28301
28302 - if cleanlist and \
28303 - real_vardb._linkmap is not None and \
28304 - myopts.get("--depclean-lib-check") != "n" and \
28305 - "preserve-libs" not in settings.features:
28306 + depclean_lib_check = cleanlist and real_vardb._linkmap is not None and \
28307 + myopts.get("--depclean-lib-check", _DEPCLEAN_LIB_CHECK_DEFAULT) != "n"
28308 + preserve_libs = "preserve-libs" in settings.features
28309 + preserve_libs_restrict = False
28310 +
28311 + if depclean_lib_check and preserve_libs:
28312 + for pkg in cleanlist:
28313 + if "preserve-libs" in pkg.restrict:
28314 + preserve_libs_restrict = True
28315 + break
28316 +
28317 + if depclean_lib_check and \
28318 + (preserve_libs_restrict or not preserve_libs):
28319
28320 # Check if any of these packages are the sole providers of libraries
28321 # with consumers that have not been selected for removal. If so, these
28322 @@ -955,6 +988,13 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28323 writemsg_level(">>> Checking for lib consumers...\n")
28324
28325 for pkg in cleanlist:
28326 +
28327 + if preserve_libs and "preserve-libs" not in pkg.restrict:
28328 + # Any needed libraries will be preserved
28329 + # when this package is unmerged, so there's
28330 + # no need to account for it here.
28331 + continue
28332 +
28333 pkg_dblink = real_vardb._dblink(pkg.cpv)
28334 consumers = {}
28335
28336 @@ -1109,7 +1149,8 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28337 "installed", root_config, installed=True)
28338 if not resolver._add_pkg(pkg,
28339 Dependency(parent=consumer_pkg,
28340 - priority=UnmergeDepPriority(runtime=True),
28341 + priority=UnmergeDepPriority(runtime=True,
28342 + runtime_slot_op=True),
28343 root=pkg.root)):
28344 resolver.display_problems()
28345 return 1, [], False, 0
28346 @@ -1146,30 +1187,30 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28347 graph = digraph()
28348 del cleanlist[:]
28349
28350 - dep_keys = ["DEPEND", "RDEPEND", "PDEPEND"]
28351 runtime = UnmergeDepPriority(runtime=True)
28352 runtime_post = UnmergeDepPriority(runtime_post=True)
28353 buildtime = UnmergeDepPriority(buildtime=True)
28354 priority_map = {
28355 "RDEPEND": runtime,
28356 "PDEPEND": runtime_post,
28357 + "HDEPEND": buildtime,
28358 "DEPEND": buildtime,
28359 }
28360
28361 for node in clean_set:
28362 graph.add(node, None)
28363 - for dep_type in dep_keys:
28364 - depstr = node.metadata[dep_type]
28365 + for dep_type in Package._dep_keys:
28366 + depstr = node._metadata[dep_type]
28367 if not depstr:
28368 continue
28369 priority = priority_map[dep_type]
28370
28371 if debug:
28372 - writemsg_level(_unicode_decode("\nParent: %s\n") \
28373 + writemsg_level("\nParent: %s\n"
28374 % (node,), noiselevel=-1, level=logging.DEBUG)
28375 - writemsg_level(_unicode_decode( "Depstring: %s\n") \
28376 + writemsg_level( "Depstring: %s\n"
28377 % (depstr,), noiselevel=-1, level=logging.DEBUG)
28378 - writemsg_level(_unicode_decode( "Priority: %s\n") \
28379 + writemsg_level( "Priority: %s\n"
28380 % (priority,), noiselevel=-1, level=logging.DEBUG)
28381
28382 try:
28383 @@ -1183,7 +1224,7 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28384
28385 if debug:
28386 writemsg_level("Candidates: [%s]\n" % \
28387 - ', '.join(_unicode_decode("'%s'") % (x,) for x in atoms),
28388 + ', '.join("'%s'" % (x,) for x in atoms),
28389 noiselevel=-1, level=logging.DEBUG)
28390
28391 for atom in atoms:
28392 @@ -1197,7 +1238,15 @@ def calc_depclean(settings, trees, ldpath_mtimes,
28393 continue
28394 for child_node in matches:
28395 if child_node in clean_set:
28396 - graph.add(child_node, node, priority=priority)
28397 +
28398 + mypriority = priority.copy()
28399 + if atom.slot_operator_built:
28400 + if mypriority.buildtime:
28401 + mypriority.buildtime_slot_op = True
28402 + if mypriority.runtime:
28403 + mypriority.runtime_slot_op = True
28404 +
28405 + graph.add(child_node, node, priority=mypriority)
28406
28407 if debug:
28408 writemsg_level("\nunmerge digraph:\n\n",
28409 @@ -1277,11 +1326,8 @@ def action_deselect(settings, trees, opts, atoms):
28410 allow_repo=True, allow_wildcard=True))
28411
28412 for cpv in vardb.match(atom):
28413 - slot, = vardb.aux_get(cpv, ["SLOT"])
28414 - if not slot:
28415 - slot = "0"
28416 - expanded_atoms.add(Atom("%s:%s" % \
28417 - (portage.cpv_getkey(cpv), slot)))
28418 + pkg = vardb._pkg_str(cpv, None)
28419 + expanded_atoms.add(Atom("%s:%s" % (pkg.cp, pkg.slot)))
28420
28421 discard_atoms = set()
28422 for atom in world_set:
28423 @@ -1352,10 +1398,90 @@ class _info_pkgs_ver(object):
28424
28425 def action_info(settings, trees, myopts, myfiles):
28426
28427 + # See if we can find any packages installed matching the strings
28428 + # passed on the command line
28429 + mypkgs = []
28430 + eroot = settings['EROOT']
28431 + vardb = trees[eroot]["vartree"].dbapi
28432 + portdb = trees[eroot]['porttree'].dbapi
28433 + bindb = trees[eroot]["bintree"].dbapi
28434 + for x in myfiles:
28435 + any_match = False
28436 + cp_exists = bool(vardb.match(x.cp))
28437 + installed_match = vardb.match(x)
28438 + for installed in installed_match:
28439 + mypkgs.append((installed, "installed"))
28440 + any_match = True
28441 +
28442 + if any_match:
28443 + continue
28444 +
28445 + for db, pkg_type in ((portdb, "ebuild"), (bindb, "binary")):
28446 + if pkg_type == "binary" and "--usepkg" not in myopts:
28447 + continue
28448 +
28449 + # Use match instead of cp_list, to account for old-style virtuals.
28450 + if not cp_exists and db.match(x.cp):
28451 + cp_exists = True
28452 + # Search for masked packages too.
28453 + if not cp_exists and hasattr(db, "xmatch") and \
28454 + db.xmatch("match-all", x.cp):
28455 + cp_exists = True
28456 +
28457 + matches = db.match(x)
28458 + matches.reverse()
28459 + for match in matches:
28460 + if pkg_type == "binary":
28461 + if db.bintree.isremote(match):
28462 + continue
28463 + auxkeys = ["EAPI", "DEFINED_PHASES"]
28464 + metadata = dict(zip(auxkeys, db.aux_get(match, auxkeys)))
28465 + if metadata["EAPI"] not in ("0", "1", "2", "3") and \
28466 + "info" in metadata["DEFINED_PHASES"].split():
28467 + mypkgs.append((match, pkg_type))
28468 + break
28469 +
28470 + if not cp_exists:
28471 + xinfo = '"%s"' % x.unevaluated_atom
28472 + # Discard null/ from failed cpv_expand category expansion.
28473 + xinfo = xinfo.replace("null/", "")
28474 + if settings["ROOT"] != "/":
28475 + xinfo = "%s for %s" % (xinfo, eroot)
28476 + writemsg("\nemerge: there are no ebuilds to satisfy %s.\n" %
28477 + colorize("INFORM", xinfo), noiselevel=-1)
28478 +
28479 + if myopts.get("--misspell-suggestions", "y") != "n":
28480 +
28481 + writemsg("\nemerge: searching for similar names..."
28482 + , noiselevel=-1)
28483 +
28484 + dbs = [vardb]
28485 + #if "--usepkgonly" not in myopts:
28486 + dbs.append(portdb)
28487 + if "--usepkg" in myopts:
28488 + dbs.append(bindb)
28489 +
28490 + matches = similar_name_search(dbs, x)
28491 +
28492 + if len(matches) == 1:
28493 + writemsg("\nemerge: Maybe you meant " + matches[0] + "?\n"
28494 + , noiselevel=-1)
28495 + elif len(matches) > 1:
28496 + writemsg(
28497 + "\nemerge: Maybe you meant any of these: %s?\n" % \
28498 + (", ".join(matches),), noiselevel=-1)
28499 + else:
28500 + # Generally, this would only happen if
28501 + # all dbapis are empty.
28502 + writemsg(" nothing similar found.\n"
28503 + , noiselevel=-1)
28504 +
28505 + return 1
28506 +
28507 output_buffer = []
28508 append = output_buffer.append
28509 root_config = trees[settings['EROOT']]['root_config']
28510 - running_eroot = trees._running_eroot
28511 + chost = settings.get("CHOST")
28512
28513 append(getportageversion(settings["PORTDIR"], None,
28514 settings.profile_path, settings["CHOST"],
28515 @@ -1369,6 +1495,18 @@ def action_info(settings, trees, myopts, myfiles):
28516 append(header_width * "=")
28517 append("System uname: %s" % (platform.platform(aliased=1),))
28518
28519 + vm_info = get_vm_info()
28520 + if "ram.total" in vm_info:
28521 + line = "%-9s %10d total" % ("KiB Mem:", vm_info["ram.total"] / 1024)
28522 + if "ram.free" in vm_info:
28523 + line += ",%10d free" % (vm_info["ram.free"] / 1024,)
28524 + append(line)
28525 + if "swap.total" in vm_info:
28526 + line = "%-9s %10d total" % ("KiB Swap:", vm_info["swap.total"] / 1024)
28527 + if "swap.free" in vm_info:
28528 + line += ",%10d free" % (vm_info["swap.free"] / 1024,)
28529 + append(line)
28530 +
28531 lastSync = portage.grabfile(os.path.join(
28532 settings["PORTDIR"], "metadata", "timestamp.chk"))
28533 if lastSync:
28534 @@ -1377,6 +1515,23 @@ def action_info(settings, trees, myopts, myfiles):
28535 lastSync = "Unknown"
28536 append("Timestamp of tree: %s" % (lastSync,))
28537
28538 + ld_names = []
28539 + if chost:
28540 + ld_names.append(chost + "-ld")
28541 + ld_names.append("ld")
28542 + for name in ld_names:
28543 + try:
28544 + proc = subprocess.Popen([name, "--version"],
28545 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
28546 + except OSError:
28547 + pass
28548 + else:
28549 + output = _unicode_decode(proc.communicate()[0]).splitlines()
28550 + proc.wait()
28551 + if proc.wait() == os.EX_OK and output:
28552 + append("ld %s" % (output[0]))
28553 + break
28554 +
28555 try:
28556 proc = subprocess.Popen(["distcc", "--version"],
28557 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
28558 @@ -1413,7 +1568,6 @@ def action_info(settings, trees, myopts, myfiles):
28559 "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"]
28560 myvars += portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_pkgs")
28561 atoms = []
28562 - vardb = trees[running_eroot]['vartree'].dbapi
28563 for x in myvars:
28564 try:
28565 x = Atom(x)
28566 @@ -1426,7 +1580,6 @@ def action_info(settings, trees, myopts, myfiles):
28567
28568 myvars = sorted(set(atoms))
28569
28570 - portdb = trees[running_eroot]['porttree'].dbapi
28571 main_repo = portdb.getRepositoryName(portdb.porttree_root)
28572 cp_map = {}
28573 cp_max_len = 0
28574 @@ -1493,7 +1646,7 @@ def action_info(settings, trees, myopts, myfiles):
28575 'PORTDIR_OVERLAY', 'PORTAGE_BUNZIP2_COMMAND',
28576 'PORTAGE_BZIP2_COMMAND',
28577 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS',
28578 - 'ACCEPT_KEYWORDS', 'ACCEPT_LICENSE', 'SYNC', 'FEATURES',
28579 + 'ACCEPT_KEYWORDS', 'ACCEPT_LICENSE', 'FEATURES',
28580 'EMERGE_DEFAULT_OPTS']
28581
28582 myvars.extend(portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_vars"))
28583 @@ -1539,40 +1692,7 @@ def action_info(settings, trees, myopts, myfiles):
28584 append("")
28585 writemsg_stdout("\n".join(output_buffer),
28586 noiselevel=-1)
28587 -
28588 - # See if we can find any packages installed matching the strings
28589 - # passed on the command line
28590 - mypkgs = []
28591 - eroot = settings['EROOT']
28592 - vardb = trees[eroot]["vartree"].dbapi
28593 - portdb = trees[eroot]['porttree'].dbapi
28594 - bindb = trees[eroot]["bintree"].dbapi
28595 - for x in myfiles:
28596 - match_found = False
28597 - installed_match = vardb.match(x)
28598 - for installed in installed_match:
28599 - mypkgs.append((installed, "installed"))
28600 - match_found = True
28601 -
28602 - if match_found:
28603 - continue
28604 -
28605 - for db, pkg_type in ((portdb, "ebuild"), (bindb, "binary")):
28606 - if pkg_type == "binary" and "--usepkg" not in myopts:
28607 - continue
28608 -
28609 - matches = db.match(x)
28610 - matches.reverse()
28611 - for match in matches:
28612 - if pkg_type == "binary":
28613 - if db.bintree.isremote(match):
28614 - continue
28615 - auxkeys = ["EAPI", "DEFINED_PHASES"]
28616 - metadata = dict(zip(auxkeys, db.aux_get(match, auxkeys)))
28617 - if metadata["EAPI"] not in ("0", "1", "2", "3") and \
28618 - "info" in metadata["DEFINED_PHASES"].split():
28619 - mypkgs.append((match, pkg_type))
28620 - break
28621 + del output_buffer[:]
28622
28623 # If some packages were found...
28624 if mypkgs:
28625 @@ -1586,11 +1706,15 @@ def action_info(settings, trees, myopts, myfiles):
28626 # Loop through each package
28627 # Only print settings if they differ from global settings
28628 header_title = "Package Settings"
28629 - print(header_width * "=")
28630 - print(header_title.rjust(int(header_width/2 + len(header_title)/2)))
28631 - print(header_width * "=")
28632 - from portage.output import EOutput
28633 - out = EOutput()
28634 + append(header_width * "=")
28635 + append(header_title.rjust(int(header_width/2 + len(header_title)/2)))
28636 + append(header_width * "=")
28637 + append("")
28638 + writemsg_stdout("\n".join(output_buffer),
28639 + noiselevel=-1)
28640 + del output_buffer[:]
28641 +
28642 + out = portage.output.EOutput()
28643 for mypkg in mypkgs:
28644 cpv = mypkg[0]
28645 pkg_type = mypkg[1]
28646 @@ -1608,28 +1732,32 @@ def action_info(settings, trees, myopts, myfiles):
28647 root_config=root_config, type_name=pkg_type)
28648
28649 if pkg_type == "installed":
28650 - print("\n%s was built with the following:" % \
28651 + append("\n%s was built with the following:" % \
28652 colorize("INFORM", str(pkg.cpv)))
28653 elif pkg_type == "ebuild":
28654 - print("\n%s would be build with the following:" % \
28655 + append("\n%s would be build with the following:" % \
28656 colorize("INFORM", str(pkg.cpv)))
28657 elif pkg_type == "binary":
28658 - print("\n%s (non-installed binary) was built with the following:" % \
28659 + append("\n%s (non-installed binary) was built with the following:" % \
28660 colorize("INFORM", str(pkg.cpv)))
28661
28662 - writemsg_stdout('%s\n' % pkg_use_display(pkg, myopts),
28663 - noiselevel=-1)
28664 + append('%s' % pkg_use_display(pkg, myopts))
28665 if pkg_type == "installed":
28666 for myvar in mydesiredvars:
28667 if metadata[myvar].split() != settings.get(myvar, '').split():
28668 - print("%s=\"%s\"" % (myvar, metadata[myvar]))
28669 - print()
28670 + append("%s=\"%s\"" % (myvar, metadata[myvar]))
28671 + append("")
28672 + append("")
28673 + writemsg_stdout("\n".join(output_buffer),
28674 + noiselevel=-1)
28675 + del output_buffer[:]
28676
28677 if metadata['DEFINED_PHASES']:
28678 if 'info' not in metadata['DEFINED_PHASES'].split():
28679 continue
28680
28681 - print(">>> Attempting to run pkg_info() for '%s'" % pkg.cpv)
28682 + writemsg_stdout(">>> Attempting to run pkg_info() for '%s'\n"
28683 + % pkg.cpv, noiselevel=-1)
28684
28685 if pkg_type == "installed":
28686 ebuildpath = vardb.findname(pkg.cpv)
28687 @@ -1856,6 +1984,7 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
28688 print()
28689 signal.signal(signal.SIGWINCH, signal.SIG_DFL)
28690
28691 + portdb.flush_cache()
28692 sys.stdout.flush()
28693 os.umask(old_umask)
28694
28695 @@ -1865,35 +1994,12 @@ def action_regen(settings, portdb, max_jobs, max_load):
28696 #regenerate cache entries
28697 sys.stdout.flush()
28698
28699 - regen = MetadataRegen(portdb, max_jobs=max_jobs, max_load=max_load)
28700 - received_signal = []
28701 -
28702 - def emergeexitsig(signum, frame):
28703 - signal.signal(signal.SIGINT, signal.SIG_IGN)
28704 - signal.signal(signal.SIGTERM, signal.SIG_IGN)
28705 - portage.util.writemsg("\n\nExiting on signal %(signal)s\n" % \
28706 - {"signal":signum})
28707 - regen.terminate()
28708 - received_signal.append(128 + signum)
28709 -
28710 - earlier_sigint_handler = signal.signal(signal.SIGINT, emergeexitsig)
28711 - earlier_sigterm_handler = signal.signal(signal.SIGTERM, emergeexitsig)
28712 + regen = MetadataRegen(portdb, max_jobs=max_jobs,
28713 + max_load=max_load, main=True)
28714
28715 - try:
28716 - regen.run()
28717 - finally:
28718 - # Restore previous handlers
28719 - if earlier_sigint_handler is not None:
28720 - signal.signal(signal.SIGINT, earlier_sigint_handler)
28721 - else:
28722 - signal.signal(signal.SIGINT, signal.SIG_DFL)
28723 - if earlier_sigterm_handler is not None:
28724 - signal.signal(signal.SIGTERM, earlier_sigterm_handler)
28725 - else:
28726 - signal.signal(signal.SIGTERM, signal.SIG_DFL)
28727 -
28728 - if received_signal:
28729 - sys.exit(received_signal[0])
28730 + signum = run_main_scheduler(regen)
28731 + if signum is not None:
28732 + sys.exit(128 + signum)
28733
28734 portage.writemsg_stdout("done!\n")
28735 return regen.returncode
28736 @@ -1914,37 +2020,110 @@ def action_search(root_config, myopts, myfiles, spinner):
28737 sys.exit(1)
28738 searchinstance.output()
28739
28740 -def action_sync(settings, trees, mtimedb, myopts, myaction):
28741 +def action_sync(emerge_config, trees=DeprecationWarning,
28742 + mtimedb=DeprecationWarning, opts=DeprecationWarning,
28743 + action=DeprecationWarning):
28744 +
28745 + if not isinstance(emerge_config, _emerge_config):
28746 + warnings.warn("_emerge.actions.action_sync() now expects "
28747 + "an _emerge_config instance as the first parameter",
28748 + DeprecationWarning, stacklevel=2)
28749 + emerge_config = load_emerge_config(
28750 + action=action, args=[], trees=trees, opts=opts)
28751 +
28752 + xterm_titles = "notitles" not in \
28753 + emerge_config.target_config.settings.features
28754 + emergelog(xterm_titles, " === sync")
28755 +
28756 + selected_repos = []
28757 + unknown_repo_names = []
28758 + missing_sync_type = []
28759 + if emerge_config.args:
28760 + for repo_name in emerge_config.args:
28761 + try:
28762 + repo = emerge_config.target_config.settings.repositories[repo_name]
28763 + except KeyError:
28764 + unknown_repo_names.append(repo_name)
28765 + else:
28766 + selected_repos.append(repo)
28767 + if repo.sync_type is None:
28768 + missing_sync_type.append(repo)
28769 +
28770 + if unknown_repo_names:
28771 + writemsg_level("!!! %s\n" % _("Unknown repo(s): %s") %
28772 + " ".join(unknown_repo_names),
28773 + level=logging.ERROR, noiselevel=-1)
28774 +
28775 + if missing_sync_type:
28776 + writemsg_level("!!! %s\n" %
28777 + _("Missing sync-type for repo(s): %s") %
28778 + " ".join(repo.name for repo in missing_sync_type),
28779 + level=logging.ERROR, noiselevel=-1)
28780 +
28781 + if unknown_repo_names or missing_sync_type:
28782 + return 1
28783 +
28784 + else:
28785 + selected_repos.extend(emerge_config.target_config.settings.repositories)
28786 +
28787 + for repo in selected_repos:
28788 + if repo.sync_type is not None:
28789 + returncode = _sync_repo(emerge_config, repo)
28790 + if returncode != os.EX_OK:
28791 + return returncode
28792 +
28793 + # Reload the whole config from scratch.
28794 + portage._sync_mode = False
28795 + load_emerge_config(emerge_config=emerge_config)
28796 + adjust_configs(emerge_config.opts, emerge_config.trees)
28797 +
28798 + if emerge_config.opts.get('--package-moves') != 'n' and \
28799 + _global_updates(emerge_config.trees,
28800 + emerge_config.target_config.mtimedb["updates"],
28801 + quiet=("--quiet" in emerge_config.opts)):
28802 + emerge_config.target_config.mtimedb.commit()
28803 + # Reload the whole config from scratch.
28804 + load_emerge_config(emerge_config=emerge_config)
28805 + adjust_configs(emerge_config.opts, emerge_config.trees)
28806 +
28807 + mybestpv = emerge_config.target_config.trees['porttree'].dbapi.xmatch(
28808 + "bestmatch-visible", portage.const.PORTAGE_PACKAGE_ATOM)
28809 + mypvs = portage.best(
28810 + emerge_config.target_config.trees['vartree'].dbapi.match(
28811 + portage.const.PORTAGE_PACKAGE_ATOM))
28812 +
28813 + chk_updated_cfg_files(emerge_config.target_config.root,
28814 + portage.util.shlex_split(
28815 + emerge_config.target_config.settings.get("CONFIG_PROTECT", "")))
28816 +
28817 + if mybestpv != mypvs and "--quiet" not in emerge_config.opts:
28818 + print()
28819 + print(warn(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended")
28820 + print(warn(" * ")+"that you update portage now, before any other packages are updated.")
28821 + print()
28822 + print(warn(" * ")+"To update portage, run 'emerge --oneshot portage' now.")
28823 + print()
28824 +
28825 + display_news_notification(emerge_config.target_config, emerge_config.opts)
28826 + return os.EX_OK
28827 +
28828 +def _sync_repo(emerge_config, repo):
28829 + settings, trees, mtimedb = emerge_config
28830 + myopts = emerge_config.opts
28831 enter_invalid = '--ask-enter-invalid' in myopts
28832 xterm_titles = "notitles" not in settings.features
28833 - emergelog(xterm_titles, " === sync")
28834 - portdb = trees[settings['EROOT']]['porttree'].dbapi
28835 - myportdir = portdb.porttree_root
28836 - if not myportdir:
28837 - myportdir = settings.get('PORTDIR', '')
28838 - if myportdir and myportdir.strip():
28839 - myportdir = os.path.realpath(myportdir)
28840 - else:
28841 - myportdir = None
28842 + msg = ">>> Synchronization of repository '%s' located in '%s'..." % (repo.name, repo.location)
28843 + emergelog(xterm_titles, msg)
28844 + writemsg_level(msg + "\n")
28845 out = portage.output.EOutput()
28846 - global_config_path = GLOBAL_CONFIG_PATH
28847 - if settings['EPREFIX']:
28848 - global_config_path = os.path.join(settings['EPREFIX'],
28849 - GLOBAL_CONFIG_PATH.lstrip(os.sep))
28850 - if not myportdir:
28851 - sys.stderr.write("!!! PORTDIR is undefined. " + \
28852 - "Is %s/make.globals missing?\n" % global_config_path)
28853 - sys.exit(1)
28854 - if myportdir[-1]=="/":
28855 - myportdir=myportdir[:-1]
28856 try:
28857 - st = os.stat(myportdir)
28858 + st = os.stat(repo.location)
28859 except OSError:
28860 st = None
28861 if st is None:
28862 - print(">>>",myportdir,"not found, creating it.")
28863 - portage.util.ensure_dirs(myportdir, mode=0o755)
28864 - st = os.stat(myportdir)
28865 + print(">>> '%s' not found, creating it." % repo.location)
28866 + portage.util.ensure_dirs(repo.location, mode=0o755)
28867 + st = os.stat(repo.location)
28868
28869 usersync_uid = None
28870 spawn_kwargs = {}
28871 @@ -1977,59 +2156,51 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28872 if rval != os.EX_OK:
28873 return rval
28874
28875 - syncuri = settings.get("SYNC", "").strip()
28876 - if not syncuri:
28877 - writemsg_level("!!! SYNC is undefined. " + \
28878 - "Is %s/make.globals missing?\n" % global_config_path,
28879 - noiselevel=-1, level=logging.ERROR)
28880 - return 1
28881 + syncuri = repo.sync_uri
28882
28883 - vcs_dirs = frozenset([".git", ".svn", "CVS", ".hg"])
28884 - vcs_dirs = vcs_dirs.intersection(os.listdir(myportdir))
28885 + vcs_dirs = frozenset(VCS_DIRS)
28886 + vcs_dirs = vcs_dirs.intersection(os.listdir(repo.location))
28887
28888 os.umask(0o022)
28889 dosyncuri = syncuri
28890 updatecache_flg = False
28891 - git = False
28892 - if myaction == "metadata":
28893 - print("skipping sync")
28894 - updatecache_flg = True
28895 - elif ".git" in vcs_dirs:
28896 + if repo.sync_type == "git":
28897 # Update existing git repository, and ignore the syncuri. We are
28898 # going to trust the user and assume that the user is in the branch
28899 # that he/she wants updated. We'll let the user manage branches with
28900 # git directly.
28901 if portage.process.find_binary("git") is None:
28902 msg = ["Command not found: git",
28903 - "Type \"emerge dev-util/git\" to enable git support."]
28904 + "Type \"emerge %s\" to enable git support." % portage.const.GIT_PACKAGE_ATOM]
28905 for l in msg:
28906 writemsg_level("!!! %s\n" % l,
28907 level=logging.ERROR, noiselevel=-1)
28908 return 1
28909 - msg = ">>> Starting git pull in %s..." % myportdir
28910 + msg = ">>> Starting git pull in %s..." % repo.location
28911 emergelog(xterm_titles, msg )
28912 writemsg_level(msg + "\n")
28913 exitcode = portage.process.spawn_bash("cd %s ; git pull" % \
28914 - (portage._shell_quote(myportdir),), **spawn_kwargs)
28915 + (portage._shell_quote(repo.location),),
28916 + **portage._native_kwargs(spawn_kwargs))
28917 if exitcode != os.EX_OK:
28918 - msg = "!!! git pull error in %s." % myportdir
28919 + msg = "!!! git pull error in %s." % repo.location
28920 emergelog(xterm_titles, msg)
28921 writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
28922 return exitcode
28923 - msg = ">>> Git pull in %s successful" % myportdir
28924 + msg = ">>> Git pull in %s successful" % repo.location
28925 emergelog(xterm_titles, msg)
28926 writemsg_level(msg + "\n")
28927 - git = True
28928 - elif syncuri[:8]=="rsync://" or syncuri[:6]=="ssh://":
28929 + elif repo.sync_type == "rsync":
28930 for vcs_dir in vcs_dirs:
28931 writemsg_level(("!!! %s appears to be under revision " + \
28932 "control (contains %s).\n!!! Aborting rsync sync.\n") % \
28933 - (myportdir, vcs_dir), level=logging.ERROR, noiselevel=-1)
28934 + (repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1)
28935 return 1
28936 - if not os.path.exists("/usr/bin/rsync"):
28937 + rsync_binary = portage.process.find_binary("rsync")
28938 + if rsync_binary is None:
28939 print("!!! /usr/bin/rsync does not exist, so rsync support is disabled.")
28940 - print("!!! Type \"emerge net-misc/rsync\" to enable rsync support.")
28941 - sys.exit(1)
28942 + print("!!! Type \"emerge %s\" to enable rsync support." % portage.const.RSYNC_PACKAGE_ATOM)
28943 + return os.EX_UNAVAILABLE
28944 mytimeout=180
28945
28946 rsync_opts = []
28947 @@ -2041,6 +2212,7 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28948 "--safe-links", # Ignore links outside of tree
28949 "--perms", # Preserve permissions
28950 "--times", # Preserive mod times
28951 + "--omit-dir-times",
28952 "--compress", # Compress the data transmitted
28953 "--force", # Force deletion on non-empty dirs
28954 "--whole-file", # Don't do block transfers, only entire files
28955 @@ -2103,14 +2275,14 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28956
28957 # Real local timestamp file.
28958 servertimestampfile = os.path.join(
28959 - myportdir, "metadata", "timestamp.chk")
28960 + repo.location, "metadata", "timestamp.chk")
28961
28962 content = portage.util.grabfile(servertimestampfile)
28963 mytimestamp = 0
28964 if content:
28965 try:
28966 mytimestamp = time.mktime(time.strptime(content[0],
28967 - "%a, %d %b %Y %H:%M:%S +0000"))
28968 + TIMESTAMP_FORMAT))
28969 except (OverflowError, ValueError):
28970 pass
28971 del content
28972 @@ -2134,9 +2306,12 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28973 r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
28974 syncuri, maxsplit=4)[1:5]
28975 except ValueError:
28976 - writemsg_level("!!! SYNC is invalid: %s\n" % syncuri,
28977 + writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri,
28978 noiselevel=-1, level=logging.ERROR)
28979 return 1
28980 +
28981 + ssh_opts = settings.get("PORTAGE_SSH_OPTS")
28982 +
28983 if port is None:
28984 port=""
28985 if user_name is None:
28986 @@ -2252,7 +2427,10 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28987 if mytimestamp != 0 and "--quiet" not in myopts:
28988 print(">>> Checking server timestamp ...")
28989
28990 - rsynccommand = ["/usr/bin/rsync"] + rsync_opts + extra_rsync_opts
28991 + rsynccommand = [rsync_binary] + rsync_opts + extra_rsync_opts
28992 +
28993 + if proto == 'ssh' and ssh_opts:
28994 + rsynccommand.append("--rsh=ssh " + ssh_opts)
28995
28996 if "--debug" in myopts:
28997 print(rsynccommand)
28998 @@ -2298,7 +2476,8 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
28999 rsync_initial_timeout)
29000
29001 mypids.extend(portage.process.spawn(
29002 - mycommand, returnpid=True, **spawn_kwargs))
29003 + mycommand, returnpid=True,
29004 + **portage._native_kwargs(spawn_kwargs)))
29005 exitcode = os.waitpid(mypids[0], 0)[1]
29006 if usersync_uid is not None:
29007 portage.util.apply_permissions(tmpservertimestampfile,
29008 @@ -2328,12 +2507,11 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
29009 exitcode = (exitcode & 0xff) << 8
29010 else:
29011 exitcode = exitcode >> 8
29012 - if mypids:
29013 - portage.process.spawned_pids.remove(mypids[0])
29014 +
29015 if content:
29016 try:
29017 servertimestamp = time.mktime(time.strptime(
29018 - content[0], "%a, %d %b %Y %H:%M:%S +0000"))
29019 + content[0], TIMESTAMP_FORMAT))
29020 except (OverflowError, ValueError):
29021 pass
29022 del mycommand, mypids, content
29023 @@ -2349,7 +2527,7 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
29024 print(">>> In order to force sync, remove '%s'." % servertimestampfile)
29025 print(">>>")
29026 print()
29027 - sys.exit(0)
29028 + return os.EX_OK
29029 elif (servertimestamp != 0) and (servertimestamp < mytimestamp):
29030 emergelog(xterm_titles,
29031 ">>> Server out of date: %s" % dosyncuri)
29032 @@ -2363,8 +2541,33 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
29033 exitcode = SERVER_OUT_OF_DATE
29034 elif (servertimestamp == 0) or (servertimestamp > mytimestamp):
29035 # actual sync
29036 - mycommand = rsynccommand + [dosyncuri+"/", myportdir]
29037 - exitcode = portage.process.spawn(mycommand, **spawn_kwargs)
29038 + mycommand = rsynccommand + [dosyncuri+"/", repo.location]
29039 + exitcode = None
29040 + try:
29041 + exitcode = portage.process.spawn(mycommand,
29042 + **portage._native_kwargs(spawn_kwargs))
29043 + finally:
29044 + if exitcode is None:
29045 + # interrupted
29046 + exitcode = 128 + signal.SIGINT
29047 +
29048 + # 0 Success
29049 + # 1 Syntax or usage error
29050 + # 2 Protocol incompatibility
29051 + # 5 Error starting client-server protocol
29052 + # 35 Timeout waiting for daemon connection
29053 + if exitcode not in (0, 1, 2, 5, 35):
29054 + # If the exit code is not among those listed above,
29055 + # then we may have a partial/inconsistent sync
29056 + # state, so our previously read timestamp as well
29057 + # as the corresponding file can no longer be
29058 + # trusted.
29059 + mytimestamp = 0
29060 + try:
29061 + os.unlink(servertimestampfile)
29062 + except OSError:
29063 + pass
29064 +
29065 if exitcode in [0,1,3,4,11,14,20,21]:
29066 break
29067 elif exitcode in [1,3,4,11,14,20,21]:
29068 @@ -2390,23 +2593,23 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
29069 if (exitcode==0):
29070 emergelog(xterm_titles, "=== Sync completed with %s" % dosyncuri)
29071 elif exitcode == SERVER_OUT_OF_DATE:
29072 - sys.exit(1)
29073 + return 1
29074 elif exitcode == EXCEEDED_MAX_RETRIES:
29075 sys.stderr.write(
29076 ">>> Exceeded PORTAGE_RSYNC_RETRIES: %s\n" % maxretries)
29077 - sys.exit(1)
29078 + return 1
29079 elif (exitcode>0):
29080 msg = []
29081 if exitcode==1:
29082 msg.append("Rsync has reported that there is a syntax error. Please ensure")
29083 - msg.append("that your SYNC statement is proper.")
29084 - msg.append("SYNC=" + settings["SYNC"])
29085 + msg.append("that sync-uri attribute for repository '%s' is proper." % repo.name)
29086 + msg.append("sync-uri: '%s'" % repo.sync_uri)
29087 elif exitcode==11:
29088 msg.append("Rsync has reported that there is a File IO error. Normally")
29089 msg.append("this means your disk is full, but can be caused by corruption")
29090 - msg.append("on the filesystem that contains PORTDIR. Please investigate")
29091 + msg.append("on the filesystem that contains repository '%s'. Please investigate" % repo.name)
29092 msg.append("and try again after the problem has been fixed.")
29093 - msg.append("PORTDIR=" + settings["PORTDIR"])
29094 + msg.append("Location of repository: '%s'" % repo.location)
29095 elif exitcode==20:
29096 msg.append("Rsync was killed before it finished.")
29097 else:
29098 @@ -2417,115 +2620,76 @@ def action_sync(settings, trees, mtimedb, myopts, myaction):
29099 msg.append("(and possibly your system's filesystem) configuration.")
29100 for line in msg:
29101 out.eerror(line)
29102 - sys.exit(exitcode)
29103 - elif syncuri[:6]=="cvs://":
29104 + return exitcode
29105 + elif repo.sync_type == "cvs":
29106 if not os.path.exists("/usr/bin/cvs"):
29107 print("!!! /usr/bin/cvs does not exist, so CVS support is disabled.")
29108 - print("!!! Type \"emerge dev-vcs/cvs\" to enable CVS support.")
29109 - sys.exit(1)
29110 - cvsroot=syncuri[6:]
29111 - cvsdir=os.path.dirname(myportdir)
29112 - if not os.path.exists(myportdir+"/CVS"):
29113 + print("!!! Type \"emerge %s\" to enable CVS support." % portage.const.CVS_PACKAGE_ATOM)
29114 + return os.EX_UNAVAILABLE
29115 + cvs_root = syncuri
29116 + if cvs_root.startswith("cvs://"):
29117 + cvs_root = cvs_root[6:]
29118 + if not os.path.exists(os.path.join(repo.location, "CVS")):
29119 #initial checkout
29120 print(">>> Starting initial cvs checkout with "+syncuri+"...")
29121 - if os.path.exists(cvsdir+"/gentoo-x86"):
29122 - print("!!! existing",cvsdir+"/gentoo-x86 directory; exiting.")
29123 - sys.exit(1)
29124 try:
29125 - os.rmdir(myportdir)
29126 + os.rmdir(repo.location)
29127 except OSError as e:
29128 if e.errno != errno.ENOENT:
29129 sys.stderr.write(
29130 - "!!! existing '%s' directory; exiting.\n" % myportdir)
29131 - sys.exit(1)
29132 + "!!! existing '%s' directory; exiting.\n" % repo.location)
29133 + return 1
29134 del e
29135 if portage.process.spawn_bash(
29136 - "cd %s; exec cvs -z0 -d %s co -P gentoo-x86" % \
29137 - (portage._shell_quote(cvsdir), portage._shell_quote(cvsroot)),
29138 - **spawn_kwargs) != os.EX_OK:
29139 + "cd %s; exec cvs -z0 -d %s co -P -d %s %s" %
29140 + (portage._shell_quote(os.path.dirname(repo.location)), portage._shell_quote(cvs_root),
29141 + portage._shell_quote(os.path.basename(repo.location)), portage._shell_quote(repo.sync_cvs_repo)),
29142 + **portage._native_kwargs(spawn_kwargs)) != os.EX_OK:
29143 print("!!! cvs checkout error; exiting.")
29144 - sys.exit(1)
29145 - os.rename(os.path.join(cvsdir, "gentoo-x86"), myportdir)
29146 + return 1
29147 else:
29148 #cvs update
29149 print(">>> Starting cvs update with "+syncuri+"...")
29150 retval = portage.process.spawn_bash(
29151 "cd %s; exec cvs -z0 -q update -dP" % \
29152 - (portage._shell_quote(myportdir),), **spawn_kwargs)
29153 + (portage._shell_quote(repo.location),),
29154 + **portage._native_kwargs(spawn_kwargs))
29155 if retval != os.EX_OK:
29156 writemsg_level("!!! cvs update error; exiting.\n",
29157 noiselevel=-1, level=logging.ERROR)
29158 - sys.exit(retval)
29159 + return retval
29160 dosyncuri = syncuri
29161 - else:
29162 - writemsg_level("!!! Unrecognized protocol: SYNC='%s'\n" % (syncuri,),
29163 - noiselevel=-1, level=logging.ERROR)
29164 - return 1
29165
29166 # Reload the whole config from scratch.
29167 - settings, trees, mtimedb = load_emerge_config(trees=trees)
29168 - adjust_configs(myopts, trees)
29169 - root_config = trees[settings['EROOT']]['root_config']
29170 + settings, trees, mtimedb = load_emerge_config(emerge_config=emerge_config)
29171 + adjust_configs(emerge_config.opts, emerge_config.trees)
29172 portdb = trees[settings['EROOT']]['porttree'].dbapi
29173
29174 - if git:
29175 + if repo.sync_type == "git":
29176 # NOTE: Do this after reloading the config, in case
29177 # it did not exist prior to sync, so that the config
29178 # and portdb properly account for its existence.
29179 - exitcode = git_sync_timestamps(portdb, myportdir)
29180 + exitcode = git_sync_timestamps(portdb, repo.location)
29181 if exitcode == os.EX_OK:
29182 updatecache_flg = True
29183
29184 - if updatecache_flg and \
29185 - myaction != "metadata" and \
29186 - "metadata-transfer" not in settings.features:
29187 + if updatecache_flg and "metadata-transfer" not in settings.features:
29188 updatecache_flg = False
29189
29190 if updatecache_flg and \
29191 - os.path.exists(os.path.join(myportdir, 'metadata', 'cache')):
29192 + os.path.exists(os.path.join(repo.location, 'metadata', 'cache')):
29193
29194 - # Only update cache for myportdir since that's
29195 + # Only update cache for repo.location since that's
29196 # the only one that's been synced here.
29197 - action_metadata(settings, portdb, myopts, porttrees=[myportdir])
29198 -
29199 - if myopts.get('--package-moves') != 'n' and \
29200 - _global_updates(trees, mtimedb["updates"], quiet=("--quiet" in myopts)):
29201 - mtimedb.commit()
29202 - # Reload the whole config from scratch.
29203 - settings, trees, mtimedb = load_emerge_config(trees=trees)
29204 - adjust_configs(myopts, trees)
29205 - portdb = trees[settings['EROOT']]['porttree'].dbapi
29206 - root_config = trees[settings['EROOT']]['root_config']
29207 -
29208 - mybestpv = portdb.xmatch("bestmatch-visible",
29209 - portage.const.PORTAGE_PACKAGE_ATOM)
29210 - mypvs = portage.best(
29211 - trees[settings['EROOT']]['vartree'].dbapi.match(
29212 - portage.const.PORTAGE_PACKAGE_ATOM))
29213 -
29214 - chk_updated_cfg_files(settings["EROOT"],
29215 - portage.util.shlex_split(settings.get("CONFIG_PROTECT", "")))
29216 -
29217 - if myaction != "metadata":
29218 - postsync = os.path.join(settings["PORTAGE_CONFIGROOT"],
29219 - portage.USER_CONFIG_PATH, "bin", "post_sync")
29220 - if os.access(postsync, os.X_OK):
29221 - retval = portage.process.spawn(
29222 - [postsync, dosyncuri], env=settings.environ())
29223 - if retval != os.EX_OK:
29224 - writemsg_level(
29225 - " %s spawn failed of %s\n" % (bad("*"), postsync,),
29226 - level=logging.ERROR, noiselevel=-1)
29227 + action_metadata(settings, portdb, myopts, porttrees=[repo.location])
29228
29229 - if(mybestpv != mypvs) and not "--quiet" in myopts:
29230 - print()
29231 - print(warn(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended")
29232 - print(warn(" * ")+"that you update portage now, before any other packages are updated.")
29233 - print()
29234 - print(warn(" * ")+"To update portage, run 'emerge portage' now.")
29235 - print()
29236 + postsync = os.path.join(settings["PORTAGE_CONFIGROOT"], portage.USER_CONFIG_PATH, "bin", "post_sync")
29237 + if os.access(postsync, os.X_OK):
29238 + retval = portage.process.spawn([postsync, dosyncuri], env=settings.environ())
29239 + if retval != os.EX_OK:
29240 + writemsg_level(" %s spawn failed of %s\n" % (bad("*"), postsync,),
29241 + level=logging.ERROR, noiselevel=-1)
29242
29243 - display_news_notification(root_config, myopts)
29244 return os.EX_OK
29245
29246 def action_uninstall(settings, trees, ldpath_mtimes,
29247 @@ -2647,13 +2811,8 @@ def action_uninstall(settings, trees, ldpath_mtimes,
29248
29249 if owners:
29250 for cpv in owners:
29251 - slot = vardb.aux_get(cpv, ['SLOT'])[0]
29252 - if not slot:
29253 - # portage now masks packages with missing slot, but it's
29254 - # possible that one was installed by an older version
29255 - atom = portage.cpv_getkey(cpv)
29256 - else:
29257 - atom = '%s:%s' % (portage.cpv_getkey(cpv), slot)
29258 + pkg = vardb._pkg_str(cpv, None)
29259 + atom = '%s:%s' % (pkg.cp, pkg.slot)
29260 valid_atoms.append(portage.dep.Atom(atom))
29261 else:
29262 writemsg_level(("!!! '%s' is not claimed " + \
29263 @@ -2677,20 +2836,20 @@ def action_uninstall(settings, trees, ldpath_mtimes,
29264 if action == 'deselect':
29265 return action_deselect(settings, trees, opts, valid_atoms)
29266
29267 - # Create a Scheduler for calls to unmerge(), in order to cause
29268 - # redirection of ebuild phase output to logs as required for
29269 - # options such as --quiet.
29270 - sched = Scheduler(settings, trees, None, opts,
29271 - spinner, uninstall_only=True)
29272 - sched._background = sched._background_mode()
29273 - sched._status_display.quiet = True
29274 -
29275 - if sched._background:
29276 - sched.settings.unlock()
29277 - sched.settings["PORTAGE_BACKGROUND"] = "1"
29278 - sched.settings.backup_changes("PORTAGE_BACKGROUND")
29279 - sched.settings.lock()
29280 - sched.pkgsettings[eroot] = portage.config(clone=sched.settings)
29281 + # Use the same logic as the Scheduler class to trigger redirection
29282 + # of ebuild pkg_prerm/postrm phase output to logs as appropriate
29283 + # for options such as --jobs, --quiet and --quiet-build.
29284 + max_jobs = opts.get("--jobs", 1)
29285 + background = (max_jobs is True or max_jobs > 1 or
29286 + "--quiet" in opts or opts.get("--quiet-build") == "y")
29287 + sched_iface = SchedulerInterface(global_event_loop(),
29288 + is_background=lambda: background)
29289 +
29290 + if background:
29291 + settings.unlock()
29292 + settings["PORTAGE_BACKGROUND"] = "1"
29293 + settings.backup_changes("PORTAGE_BACKGROUND")
29294 + settings.lock()
29295
29296 if action in ('clean', 'unmerge') or \
29297 (action == 'prune' and "--nodeps" in opts):
29298 @@ -2698,10 +2857,11 @@ def action_uninstall(settings, trees, ldpath_mtimes,
29299 ordered = action == 'unmerge'
29300 rval = unmerge(trees[settings['EROOT']]['root_config'], opts, action,
29301 valid_atoms, ldpath_mtimes, ordered=ordered,
29302 - scheduler=sched._sched_iface)
29303 + scheduler=sched_iface)
29304 else:
29305 rval = action_depclean(settings, trees, ldpath_mtimes,
29306 - opts, action, valid_atoms, spinner, scheduler=sched._sched_iface)
29307 + opts, action, valid_atoms, spinner,
29308 + scheduler=sched_iface)
29309
29310 return rval
29311
29312 @@ -2807,6 +2967,10 @@ def adjust_config(myopts, settings):
29313 settings["NOCOLOR"] = "true"
29314 settings.backup_changes("NOCOLOR")
29315
29316 + if "--pkg-format" in myopts:
29317 + settings["PORTAGE_BINPKG_FORMAT"] = myopts["--pkg-format"]
29318 + settings.backup_changes("PORTAGE_BINPKG_FORMAT")
29319 +
29320 def display_missing_pkg_set(root_config, set_name):
29321
29322 msg = []
29323 @@ -3030,61 +3194,53 @@ def git_sync_timestamps(portdb, portdir):
29324
29325 return os.EX_OK
29326
29327 -def load_emerge_config(trees=None):
29328 +class _emerge_config(SlotObject):
29329 +
29330 + __slots__ = ('action', 'args', 'opts',
29331 + 'running_config', 'target_config', 'trees')
29332 +
29333 + # Support unpack as tuple, for load_emerge_config backward compatibility.
29334 + def __iter__(self):
29335 + yield self.target_config.settings
29336 + yield self.trees
29337 + yield self.target_config.mtimedb
29338 +
29339 + def __getitem__(self, index):
29340 + return list(self)[index]
29341 +
29342 + def __len__(self):
29343 + return 3
29344 +
29345 +def load_emerge_config(emerge_config=None, **kargs):
29346 +
29347 + if emerge_config is None:
29348 + emerge_config = _emerge_config(**kargs)
29349 +
29350 kwargs = {}
29351 - for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"), ("target_root", "ROOT")):
29352 + for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"), ("target_root", "ROOT"),
29353 + ("eprefix", "EPREFIX")):
29354 v = os.environ.get(envvar, None)
29355 if v and v.strip():
29356 kwargs[k] = v
29357 - trees = portage.create_trees(trees=trees, **kwargs)
29358 + emerge_config.trees = portage.create_trees(trees=emerge_config.trees,
29359 + **portage._native_kwargs(kwargs))
29360
29361 - for root_trees in trees.values():
29362 + for root_trees in emerge_config.trees.values():
29363 settings = root_trees["vartree"].settings
29364 settings._init_dirs()
29365 setconfig = load_default_config(settings, root_trees)
29366 root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
29367
29368 - settings = trees[trees._target_eroot]['vartree'].settings
29369 - mtimedbfile = os.path.join(settings['EROOT'], portage.CACHE_PATH, "mtimedb")
29370 - mtimedb = portage.MtimeDB(mtimedbfile)
29371 - QueryCommand._db = trees
29372 - return settings, trees, mtimedb
29373 -
29374 -def chk_updated_cfg_files(eroot, config_protect):
29375 - target_root = eroot
29376 - result = list(
29377 - portage.util.find_updated_config_files(target_root, config_protect))
29378 -
29379 - for x in result:
29380 - writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))),
29381 - level=logging.INFO, noiselevel=-1)
29382 - if not x[1]: # it's a protected file
29383 - writemsg_level( _("config file '%s' needs updating.\n") % x[0],
29384 - level=logging.INFO, noiselevel=-1)
29385 - else: # it's a protected dir
29386 - if len(x[1]) == 1:
29387 - head, tail = os.path.split(x[1][0])
29388 - tail = tail[len("._cfg0000_"):]
29389 - fpath = os.path.join(head, tail)
29390 - writemsg_level(_("config file '%s' needs updating.\n") % fpath,
29391 - level=logging.INFO, noiselevel=-1)
29392 - else:
29393 - writemsg_level( _("%d config files in '%s' need updating.\n") % \
29394 - (len(x[1]), x[0]), level=logging.INFO, noiselevel=-1)
29395 -
29396 - if result:
29397 - print(" "+yellow("*")+ " See the "+colorize("INFORM", _("CONFIGURATION FILES"))\
29398 - + " " + _("section of the") + " " + bold("emerge"))
29399 - print(" "+yellow("*")+ " " + _("man page to learn how to update config files."))
29400 -
29401 + target_eroot = emerge_config.trees._target_eroot
29402 + emerge_config.target_config = \
29403 + emerge_config.trees[target_eroot]['root_config']
29404 + emerge_config.target_config.mtimedb = portage.MtimeDB(
29405 + os.path.join(target_eroot, portage.CACHE_PATH, "mtimedb"))
29406 + emerge_config.running_config = emerge_config.trees[
29407 + emerge_config.trees._running_eroot]['root_config']
29408 + QueryCommand._db = emerge_config.trees
29409
29410 -def display_news_notification(root_config, myopts):
29411 - if "news" not in root_config.settings.features:
29412 - return
29413 - portdb = root_config.trees["porttree"].dbapi
29414 - vardb = root_config.trees["vartree"].dbapi
29415 - news_counts = count_unread_news(portdb, vardb)
29416 - display_news_notifications(news_counts)
29417 + return emerge_config
29418
29419 def getgccversion(chost):
29420 """
29421 @@ -3140,3 +3296,771 @@ def getgccversion(chost):
29422
29423 portage.writemsg(gcc_not_found_error, noiselevel=-1)
29424 return "[unavailable]"
29425 +
29426 +# Warn about features that may confuse users and
29427 +# lead them to report invalid bugs.
29428 +_emerge_features_warn = frozenset(['keeptemp', 'keepwork'])
29429 +
29430 +def validate_ebuild_environment(trees):
29431 + features_warn = set()
29432 + for myroot in trees:
29433 + settings = trees[myroot]["vartree"].settings
29434 + settings.validate()
29435 + features_warn.update(
29436 + _emerge_features_warn.intersection(settings.features))
29437 +
29438 + if features_warn:
29439 + msg = "WARNING: The FEATURES variable contains one " + \
29440 + "or more values that should be disabled under " + \
29441 + "normal circumstances: %s" % " ".join(features_warn)
29442 + out = portage.output.EOutput()
29443 + for line in textwrap.wrap(msg, 65):
29444 + out.ewarn(line)
29445 +
29446 +def check_procfs():
29447 + procfs_path = '/proc'
29448 + if platform.system() not in ("Linux",) or \
29449 + os.path.ismount(procfs_path):
29450 + return os.EX_OK
29451 + msg = "It seems that %s is not mounted. You have been warned." % procfs_path
29452 + writemsg_level("".join("!!! %s\n" % l for l in textwrap.wrap(msg, 70)),
29453 + level=logging.ERROR, noiselevel=-1)
29454 + return 1
29455 +
29456 +def config_protect_check(trees):
29457 + for root, root_trees in trees.items():
29458 + settings = root_trees["root_config"].settings
29459 + if not settings.get("CONFIG_PROTECT"):
29460 + msg = "!!! CONFIG_PROTECT is empty"
29461 + if settings["ROOT"] != "/":
29462 + msg += " for '%s'" % root
29463 + msg += "\n"
29464 + writemsg_level(msg, level=logging.WARN, noiselevel=-1)
29465 +
29466 +def apply_priorities(settings):
29467 + ionice(settings)
29468 + nice(settings)
29469 +
29470 +def nice(settings):
29471 + try:
29472 + os.nice(int(settings.get("PORTAGE_NICENESS", "0")))
29473 + except (OSError, ValueError) as e:
29474 + out = portage.output.EOutput()
29475 + out.eerror("Failed to change nice value to '%s'" % \
29476 + settings["PORTAGE_NICENESS"])
29477 + out.eerror("%s\n" % str(e))
29478 +
29479 +def ionice(settings):
29480 +
29481 + ionice_cmd = settings.get("PORTAGE_IONICE_COMMAND")
29482 + if ionice_cmd:
29483 + ionice_cmd = portage.util.shlex_split(ionice_cmd)
29484 + if not ionice_cmd:
29485 + return
29486 +
29487 + variables = {"PID" : str(os.getpid())}
29488 + cmd = [varexpand(x, mydict=variables) for x in ionice_cmd]
29489 +
29490 + try:
29491 + rval = portage.process.spawn(cmd, env=os.environ)
29492 + except portage.exception.CommandNotFound:
29493 + # The OS kernel probably doesn't support ionice,
29494 + # so return silently.
29495 + return
29496 +
29497 + if rval != os.EX_OK:
29498 + out = portage.output.EOutput()
29499 + out.eerror("PORTAGE_IONICE_COMMAND returned %d" % (rval,))
29500 + out.eerror("See the make.conf(5) man page for PORTAGE_IONICE_COMMAND usage instructions.")
29501 +
29502 +def setconfig_fallback(root_config):
29503 + setconfig = root_config.setconfig
29504 + setconfig._create_default_config()
29505 + setconfig._parse(update=True)
29506 + root_config.sets = setconfig.getSets()
29507 +
29508 +def get_missing_sets(root_config):
29509 + # emerge requires existence of "world", "selected", and "system"
29510 + missing_sets = []
29511 +
29512 + for s in ("selected", "system", "world",):
29513 + if s not in root_config.sets:
29514 + missing_sets.append(s)
29515 +
29516 + return missing_sets
29517 +
29518 +def missing_sets_warning(root_config, missing_sets):
29519 + if len(missing_sets) > 2:
29520 + missing_sets_str = ", ".join('"%s"' % s for s in missing_sets[:-1])
29521 + missing_sets_str += ', and "%s"' % missing_sets[-1]
29522 + elif len(missing_sets) == 2:
29523 + missing_sets_str = '"%s" and "%s"' % tuple(missing_sets)
29524 + else:
29525 + missing_sets_str = '"%s"' % missing_sets[-1]
29526 + msg = ["emerge: incomplete set configuration, " + \
29527 + "missing set(s): %s" % missing_sets_str]
29528 + if root_config.sets:
29529 + msg.append(" sets defined: %s" % ", ".join(root_config.sets))
29530 + global_config_path = portage.const.GLOBAL_CONFIG_PATH
29531 + if portage.const.EPREFIX:
29532 + global_config_path = os.path.join(portage.const.EPREFIX,
29533 + portage.const.GLOBAL_CONFIG_PATH.lstrip(os.sep))
29534 + msg.append(" This usually means that '%s'" % \
29535 + (os.path.join(global_config_path, "sets/portage.conf"),))
29536 + msg.append(" is missing or corrupt.")
29537 + msg.append(" Falling back to default world and system set configuration!!!")
29538 + for line in msg:
29539 + writemsg_level(line + "\n", level=logging.ERROR, noiselevel=-1)
29540 +
29541 +def ensure_required_sets(trees):
29542 + warning_shown = False
29543 + for root_trees in trees.values():
29544 + missing_sets = get_missing_sets(root_trees["root_config"])
29545 + if missing_sets and not warning_shown:
29546 + warning_shown = True
29547 + missing_sets_warning(root_trees["root_config"], missing_sets)
29548 + if missing_sets:
29549 + setconfig_fallback(root_trees["root_config"])
29550 +
29551 +def expand_set_arguments(myfiles, myaction, root_config):
29552 + retval = os.EX_OK
29553 + setconfig = root_config.setconfig
29554 +
29555 + sets = setconfig.getSets()
29556 +
29557 + # In order to know exactly which atoms/sets should be added to the
29558 + # world file, the depgraph performs set expansion later. It will get
29559 + # confused about where the atoms came from if it's not allowed to
29560 + # expand them itself.
29561 + do_not_expand = myaction is None
29562 + newargs = []
29563 + for a in myfiles:
29564 + if a in ("system", "world"):
29565 + newargs.append(SETPREFIX+a)
29566 + else:
29567 + newargs.append(a)
29568 + myfiles = newargs
29569 + del newargs
29570 + newargs = []
29571 +
29572 + # separators for set arguments
29573 + ARG_START = "{"
29574 + ARG_END = "}"
29575 +
29576 + for i in range(0, len(myfiles)):
29577 + if myfiles[i].startswith(SETPREFIX):
29578 + start = 0
29579 + end = 0
29580 + x = myfiles[i][len(SETPREFIX):]
29581 + newset = ""
29582 + while x:
29583 + start = x.find(ARG_START)
29584 + end = x.find(ARG_END)
29585 + if start > 0 and start < end:
29586 + namepart = x[:start]
29587 + argpart = x[start+1:end]
29588 +
29589 + # TODO: implement proper quoting
29590 + args = argpart.split(",")
29591 + options = {}
29592 + for a in args:
29593 + if "=" in a:
29594 + k, v = a.split("=", 1)
29595 + options[k] = v
29596 + else:
29597 + options[a] = "True"
29598 + setconfig.update(namepart, options)
29599 + newset += (x[:start-len(namepart)]+namepart)
29600 + x = x[end+len(ARG_END):]
29601 + else:
29602 + newset += x
29603 + x = ""
29604 + myfiles[i] = SETPREFIX+newset
29605 +
29606 + sets = setconfig.getSets()
29607 +
29608 + # display errors that occurred while loading the SetConfig instance
29609 + for e in setconfig.errors:
29610 + print(colorize("BAD", "Error during set creation: %s" % e))
29611 +
29612 + unmerge_actions = ("unmerge", "prune", "clean", "depclean")
29613 +
29614 + for a in myfiles:
29615 + if a.startswith(SETPREFIX):
29616 + s = a[len(SETPREFIX):]
29617 + if s not in sets:
29618 + display_missing_pkg_set(root_config, s)
29619 + return (None, 1)
29620 + if s == "installed":
29621 + msg = ("The @installed set is deprecated and will soon be "
29622 + "removed. Please refer to bug #387059 for details.")
29623 + out = portage.output.EOutput()
29624 + for line in textwrap.wrap(msg, 50):
29625 + out.ewarn(line)
29626 + setconfig.active.append(s)
29627 +
29628 + if do_not_expand:
29629 + # Loading sets can be slow, so skip it here, in order
29630 + # to allow the depgraph to indicate progress with the
29631 + # spinner while sets are loading (bug #461412).
29632 + newargs.append(a)
29633 + continue
29634 +
29635 + try:
29636 + set_atoms = setconfig.getSetAtoms(s)
29637 + except portage.exception.PackageSetNotFound as e:
29638 + writemsg_level(("emerge: the given set '%s' " + \
29639 + "contains a non-existent set named '%s'.\n") % \
29640 + (s, e), level=logging.ERROR, noiselevel=-1)
29641 + if s in ('world', 'selected') and \
29642 + SETPREFIX + e.value in sets['selected']:
29643 + writemsg_level(("Use `emerge --deselect %s%s` to "
29644 + "remove this set from world_sets.\n") %
29645 + (SETPREFIX, e,), level=logging.ERROR,
29646 + noiselevel=-1)
29647 + return (None, 1)
29648 + if myaction in unmerge_actions and \
29649 + not sets[s].supportsOperation("unmerge"):
29650 + writemsg_level("emerge: the given set '%s' does " % s + \
29651 + "not support unmerge operations\n",
29652 + level=logging.ERROR, noiselevel=-1)
29653 + retval = 1
29654 + elif not set_atoms:
29655 + writemsg_level("emerge: '%s' is an empty set\n" % s,
29656 + level=logging.INFO, noiselevel=-1)
29657 + else:
29658 + newargs.extend(set_atoms)
29659 + for error_msg in sets[s].errors:
29660 + writemsg_level("%s\n" % (error_msg,),
29661 + level=logging.ERROR, noiselevel=-1)
29662 + else:
29663 + newargs.append(a)
29664 + return (newargs, retval)
29665 +
29666 +def repo_name_check(trees):
29667 + missing_repo_names = set()
29668 + for root_trees in trees.values():
29669 + porttree = root_trees.get("porttree")
29670 + if porttree:
29671 + portdb = porttree.dbapi
29672 + missing_repo_names.update(portdb.getMissingRepoNames())
29673 +
29674 + # Skip warnings about missing repo_name entries for
29675 + # /usr/local/portage (see bug #248603).
29676 + try:
29677 + missing_repo_names.remove('/usr/local/portage')
29678 + except KeyError:
29679 + pass
29680 +
29681 + if missing_repo_names:
29682 + msg = []
29683 + msg.append("WARNING: One or more repositories " + \
29684 + "have missing repo_name entries:")
29685 + msg.append("")
29686 + for p in missing_repo_names:
29687 + msg.append("\t%s/profiles/repo_name" % (p,))
29688 + msg.append("")
29689 + msg.extend(textwrap.wrap("NOTE: Each repo_name entry " + \
29690 + "should be a plain text file containing a unique " + \
29691 + "name for the repository on the first line.", 70))
29692 + msg.append("\n")
29693 + writemsg_level("".join("%s\n" % l for l in msg),
29694 + level=logging.WARNING, noiselevel=-1)
29695 +
29696 + return bool(missing_repo_names)
29697 +
29698 +def repo_name_duplicate_check(trees):
29699 + ignored_repos = {}
29700 + for root, root_trees in trees.items():
29701 + if 'porttree' in root_trees:
29702 + portdb = root_trees['porttree'].dbapi
29703 + if portdb.settings.get('PORTAGE_REPO_DUPLICATE_WARN') != '0':
29704 + for repo_name, paths in portdb.getIgnoredRepos():
29705 + k = (root, repo_name, portdb.getRepositoryPath(repo_name))
29706 + ignored_repos.setdefault(k, []).extend(paths)
29707 +
29708 + if ignored_repos:
29709 + msg = []
29710 + msg.append('WARNING: One or more repositories ' + \
29711 + 'have been ignored due to duplicate')
29712 + msg.append(' profiles/repo_name entries:')
29713 + msg.append('')
29714 + for k in sorted(ignored_repos):
29715 + msg.append(' %s overrides' % ", ".join(k))
29716 + for path in ignored_repos[k]:
29717 + msg.append(' %s' % (path,))
29718 + msg.append('')
29719 + msg.extend(' ' + x for x in textwrap.wrap(
29720 + "All profiles/repo_name entries must be unique in order " + \
29721 + "to avoid having duplicates ignored. " + \
29722 + "Set PORTAGE_REPO_DUPLICATE_WARN=\"0\" in " + \
29723 + "/etc/portage/make.conf if you would like to disable this warning."))
29724 + msg.append("\n")
29725 + writemsg_level(''.join('%s\n' % l for l in msg),
29726 + level=logging.WARNING, noiselevel=-1)
29727 +
29728 + return bool(ignored_repos)
29729 +
29730 +def run_action(emerge_config):
29731 +
29732 + # skip global updates prior to sync, since it's called after sync
29733 + if emerge_config.action not in ('help', 'info', 'sync', 'version') and \
29734 + emerge_config.opts.get('--package-moves') != 'n' and \
29735 + _global_updates(emerge_config.trees,
29736 + emerge_config.target_config.mtimedb["updates"],
29737 + quiet=("--quiet" in emerge_config.opts)):
29738 + emerge_config.target_config.mtimedb.commit()
29739 + # Reload the whole config from scratch.
29740 + load_emerge_config(emerge_config=emerge_config)
29741 +
29742 + xterm_titles = "notitles" not in \
29743 + emerge_config.target_config.settings.features
29744 + if xterm_titles:
29745 + xtermTitle("emerge")
29746 +
29747 + if "--digest" in emerge_config.opts:
29748 + os.environ["FEATURES"] = os.environ.get("FEATURES","") + " digest"
29749 + # Reload the whole config from scratch so that the portdbapi internal
29750 + # config is updated with new FEATURES.
29751 + load_emerge_config(emerge_config=emerge_config)
29752 +
29753 + # NOTE: adjust_configs() can map options to FEATURES, so any relevant
29754 + # options adjustments should be made prior to calling adjust_configs().
29755 + if "--buildpkgonly" in emerge_config.opts:
29756 + emerge_config.opts["--buildpkg"] = True
29757 +
29758 + if "getbinpkg" in emerge_config.target_config.settings.features:
29759 + emerge_config.opts["--getbinpkg"] = True
29760 +
29761 + if "--getbinpkgonly" in emerge_config.opts:
29762 + emerge_config.opts["--getbinpkg"] = True
29763 +
29764 + if "--getbinpkgonly" in emerge_config.opts:
29765 + emerge_config.opts["--usepkgonly"] = True
29766 +
29767 + if "--getbinpkg" in emerge_config.opts:
29768 + emerge_config.opts["--usepkg"] = True
29769 +
29770 + if "--usepkgonly" in emerge_config.opts:
29771 + emerge_config.opts["--usepkg"] = True
29772 +
29773 + if "--buildpkgonly" in emerge_config.opts:
29774 + # --buildpkgonly will not merge anything, so
29775 + # it cancels all binary package options.
29776 + for opt in ("--getbinpkg", "--getbinpkgonly",
29777 + "--usepkg", "--usepkgonly"):
29778 + emerge_config.opts.pop(opt, None)
29779 +
29780 + adjust_configs(emerge_config.opts, emerge_config.trees)
29781 + apply_priorities(emerge_config.target_config.settings)
29782 +
29783 + for fmt in emerge_config.target_config.settings["PORTAGE_BINPKG_FORMAT"].split():
29784 + if not fmt in portage.const.SUPPORTED_BINPKG_FORMATS:
29785 + if "--pkg-format" in emerge_config.opts:
29786 + problematic="--pkg-format"
29787 + else:
29788 + problematic="PORTAGE_BINPKG_FORMAT"
29789 +
29790 + writemsg_level(("emerge: %s is not set correctly. Format " + \
29791 + "'%s' is not supported.\n") % (problematic, fmt),
29792 + level=logging.ERROR, noiselevel=-1)
29793 + return 1
29794 +
29795 + if emerge_config.action == 'version':
29796 + writemsg_stdout(getportageversion(
29797 + emerge_config.target_config.settings["PORTDIR"],
29798 + None,
29799 + emerge_config.target_config.settings.profile_path,
29800 + emerge_config.target_config.settings["CHOST"],
29801 + emerge_config.target_config.trees['vartree'].dbapi) + '\n',
29802 + noiselevel=-1)
29803 + return 0
29804 + elif emerge_config.action == 'help':
29805 + emerge_help()
29806 + return 0
29807 +
29808 + spinner = stdout_spinner()
29809 + if "candy" in emerge_config.target_config.settings.features:
29810 + spinner.update = spinner.update_scroll
29811 +
29812 + if "--quiet" not in emerge_config.opts:
29813 + portage.deprecated_profile_check(
29814 + settings=emerge_config.target_config.settings)
29815 + repo_name_check(emerge_config.trees)
29816 + repo_name_duplicate_check(emerge_config.trees)
29817 + config_protect_check(emerge_config.trees)
29818 + check_procfs()
29819 +
29820 + for mytrees in emerge_config.trees.values():
29821 + mydb = mytrees["porttree"].dbapi
29822 + # Freeze the portdbapi for performance (memoize all xmatch results).
29823 + mydb.freeze()
29824 +
29825 + if emerge_config.action in ('search', None) and \
29826 + "--usepkg" in emerge_config.opts:
29827 + # Populate the bintree with current --getbinpkg setting.
29828 + # This needs to happen before expand_set_arguments(), in case
29829 + # any sets use the bintree.
29830 + try:
29831 + mytrees["bintree"].populate(
29832 + getbinpkgs="--getbinpkg" in emerge_config.opts)
29833 + except ParseError as e:
29834 + writemsg("\n\n!!!%s.\nSee make.conf(5) for more info.\n"
29835 + % e, noiselevel=-1)
29836 + return 1
29837 +
29838 + del mytrees, mydb
29839 +
29840 + for x in emerge_config.args:
29841 + if x.endswith((".ebuild", ".tbz2")) and \
29842 + os.path.exists(os.path.abspath(x)):
29843 + print(colorize("BAD", "\n*** emerging by path is broken "
29844 + "and may not always work!!!\n"))
29845 + break
29846 +
29847 + if emerge_config.action == "list-sets":
29848 + writemsg_stdout("".join("%s\n" % s for s in
29849 + sorted(emerge_config.target_config.sets)))
29850 + return os.EX_OK
29851 + elif emerge_config.action == "check-news":
29852 + news_counts = count_unread_news(
29853 + emerge_config.target_config.trees["porttree"].dbapi,
29854 + emerge_config.target_config.trees["vartree"].dbapi)
29855 + if any(news_counts.values()):
29856 + display_news_notifications(news_counts)
29857 + elif "--quiet" not in emerge_config.opts:
29858 + print("", colorize("GOOD", "*"), "No news items were found.")
29859 + return os.EX_OK
29860 +
29861 + ensure_required_sets(emerge_config.trees)
29862 +
29863 + if emerge_config.action is None and \
29864 + "--resume" in emerge_config.opts and emerge_config.args:
29865 + writemsg("emerge: unexpected argument(s) for --resume: %s\n" %
29866 + " ".join(emerge_config.args), noiselevel=-1)
29867 + return 1
29868 +
29869 + # only expand sets for actions taking package arguments
29870 + oldargs = emerge_config.args[:]
29871 + if emerge_config.action in ("clean", "config", "depclean",
29872 + "info", "prune", "unmerge", None):
29873 + newargs, retval = expand_set_arguments(
29874 + emerge_config.args, emerge_config.action,
29875 + emerge_config.target_config)
29876 + if retval != os.EX_OK:
29877 + return retval
29878 +
29879 + # Need to handle empty sets specially, otherwise emerge will react
29880 + # with the help message for empty argument lists
29881 + if oldargs and not newargs:
29882 + print("emerge: no targets left after set expansion")
29883 + return 0
29884 +
29885 + emerge_config.args = newargs
29886 +
29887 + if "--tree" in emerge_config.opts and \
29888 + "--columns" in emerge_config.opts:
29889 + print("emerge: can't specify both of \"--tree\" and \"--columns\".")
29890 + return 1
29891 +
29892 + if '--emptytree' in emerge_config.opts and \
29893 + '--noreplace' in emerge_config.opts:
29894 + writemsg_level("emerge: can't specify both of " + \
29895 + "\"--emptytree\" and \"--noreplace\".\n",
29896 + level=logging.ERROR, noiselevel=-1)
29897 + return 1
29898 +
29899 + if ("--quiet" in emerge_config.opts):
29900 + spinner.update = spinner.update_quiet
29901 + portage.util.noiselimit = -1
29902 +
29903 + if "--fetch-all-uri" in emerge_config.opts:
29904 + emerge_config.opts["--fetchonly"] = True
29905 +
29906 + if "--skipfirst" in emerge_config.opts and \
29907 + "--resume" not in emerge_config.opts:
29908 + emerge_config.opts["--resume"] = True
29909 +
29910 + # Allow -p to remove --ask
29911 + if "--pretend" in emerge_config.opts:
29912 + emerge_config.opts.pop("--ask", None)
29913 +
29914 + # forbid --ask when not in a terminal
29915 + # note: this breaks `emerge --ask | tee logfile`, but that doesn't work anyway.
29916 + if ("--ask" in emerge_config.opts) and (not sys.stdin.isatty()):
29917 + portage.writemsg("!!! \"--ask\" should only be used in a terminal. Exiting.\n",
29918 + noiselevel=-1)
29919 + return 1
29920 +
29921 + if emerge_config.target_config.settings.get("PORTAGE_DEBUG", "") == "1":
29922 + spinner.update = spinner.update_quiet
29923 + portage.util.noiselimit = 0
29924 + if "python-trace" in emerge_config.target_config.settings.features:
29925 + portage.debug.set_trace(True)
29926 +
29927 + if not ("--quiet" in emerge_config.opts):
29928 + if '--nospinner' in emerge_config.opts or \
29929 + emerge_config.target_config.settings.get('TERM') == 'dumb' or \
29930 + not sys.stdout.isatty():
29931 + spinner.update = spinner.update_basic
29932 +
29933 + if "--debug" in emerge_config.opts:
29934 + print("myaction", emerge_config.action)
29935 + print("myopts", emerge_config.opts)
29936 +
29937 + if not emerge_config.action and not emerge_config.args and \
29938 + "--resume" not in emerge_config.opts:
29939 + emerge_help()
29940 + return 1
29941 +
29942 + pretend = "--pretend" in emerge_config.opts
29943 + fetchonly = "--fetchonly" in emerge_config.opts or \
29944 + "--fetch-all-uri" in emerge_config.opts
29945 + buildpkgonly = "--buildpkgonly" in emerge_config.opts
29946 +
29947 + # check if root user is the current user for the actions where emerge needs this
29948 + if portage.data.secpass < 2:
29949 + # We've already allowed "--version" and "--help" above.
29950 + if "--pretend" not in emerge_config.opts and \
29951 + emerge_config.action not in ("search", "info"):
29952 + need_superuser = emerge_config.action in ('clean', 'depclean',
29953 + 'deselect', 'prune', 'unmerge') or not \
29954 + (fetchonly or \
29955 + (buildpkgonly and portage.data.secpass >= 1) or \
29956 + emerge_config.action in ("metadata", "regen", "sync"))
29957 + if portage.data.secpass < 1 or \
29958 + need_superuser:
29959 + if need_superuser:
29960 + access_desc = "superuser"
29961 + else:
29962 + access_desc = "portage group"
29963 + # Always show portage_group_warning() when only portage group
29964 + # access is required but the user is not in the portage group.
29965 + if "--ask" in emerge_config.opts:
29966 + writemsg_stdout("This action requires %s access...\n" % \
29967 + (access_desc,), noiselevel=-1)
29968 + if portage.data.secpass < 1 and not need_superuser:
29969 + portage.data.portage_group_warning()
29970 + if userquery("Would you like to add --pretend to options?",
29971 + "--ask-enter-invalid" in emerge_config.opts) == "No":
29972 + return 128 + signal.SIGINT
29973 + emerge_config.opts["--pretend"] = True
29974 + emerge_config.opts.pop("--ask")
29975 + else:
29976 + sys.stderr.write(("emerge: %s access is required\n") \
29977 + % access_desc)
29978 + if portage.data.secpass < 1 and not need_superuser:
29979 + portage.data.portage_group_warning()
29980 + return 1
29981 +
29982 + # Disable emergelog for everything except build or unmerge operations.
29983 + # This helps minimize parallel emerge.log entries that can confuse log
29984 + # parsers like genlop.
29985 + disable_emergelog = False
29986 + for x in ("--pretend", "--fetchonly", "--fetch-all-uri"):
29987 + if x in emerge_config.opts:
29988 + disable_emergelog = True
29989 + break
29990 + if disable_emergelog:
29991 + pass
29992 + elif emerge_config.action in ("search", "info"):
29993 + disable_emergelog = True
29994 + elif portage.data.secpass < 1:
29995 + disable_emergelog = True
29996 +
29997 + import _emerge.emergelog
29998 + _emerge.emergelog._disable = disable_emergelog
29999 +
30000 + if not disable_emergelog:
30001 + emerge_log_dir = \
30002 + emerge_config.target_config.settings.get('EMERGE_LOG_DIR')
30003 + if emerge_log_dir:
30004 + try:
30005 + # At least the parent needs to exist for the lock file.
30006 + portage.util.ensure_dirs(emerge_log_dir)
30007 + except portage.exception.PortageException as e:
30008 + writemsg_level("!!! Error creating directory for " + \
30009 + "EMERGE_LOG_DIR='%s':\n!!! %s\n" % \
30010 + (emerge_log_dir, e),
30011 + noiselevel=-1, level=logging.ERROR)
30012 + portage.util.ensure_dirs(_emerge.emergelog._emerge_log_dir)
30013 + else:
30014 + _emerge.emergelog._emerge_log_dir = emerge_log_dir
30015 + else:
30016 + _emerge.emergelog._emerge_log_dir = os.path.join(os.sep,
30017 + portage.const.EPREFIX.lstrip(os.sep), "var", "log")
30018 + portage.util.ensure_dirs(_emerge.emergelog._emerge_log_dir)
30019 +
30020 + if not "--pretend" in emerge_config.opts:
30021 + time_fmt = "%b %d, %Y %H:%M:%S"
30022 + if sys.hexversion < 0x3000000:
30023 + time_fmt = portage._unicode_encode(time_fmt)
30024 + time_str = time.strftime(time_fmt, time.localtime(time.time()))
30025 + # Avoid potential UnicodeDecodeError in Python 2, since strftime
30026 + # returns bytes in Python 2, and %b may contain non-ascii chars.
30027 + time_str = _unicode_decode(time_str,
30028 + encoding=_encodings['content'], errors='replace')
30029 + emergelog(xterm_titles, "Started emerge on: %s" % time_str)
30030 + myelogstr=""
30031 + if emerge_config.opts:
30032 + opt_list = []
30033 + for opt, arg in emerge_config.opts.items():
30034 + if arg is True:
30035 + opt_list.append(opt)
30036 + elif isinstance(arg, list):
30037 + # arguments like --exclude that use 'append' action
30038 + for x in arg:
30039 + opt_list.append("%s=%s" % (opt, x))
30040 + else:
30041 + opt_list.append("%s=%s" % (opt, arg))
30042 + myelogstr=" ".join(opt_list)
30043 + if emerge_config.action:
30044 + myelogstr += " --" + emerge_config.action
30045 + if oldargs:
30046 + myelogstr += " " + " ".join(oldargs)
30047 + emergelog(xterm_titles, " *** emerge " + myelogstr)
30048 +
30049 + oldargs = None
30050 +
30051 + def emergeexitsig(signum, frame):
30052 + signal.signal(signal.SIGTERM, signal.SIG_IGN)
30053 + portage.util.writemsg(
30054 + "\n\nExiting on signal %(signal)s\n" % {"signal":signum})
30055 + sys.exit(128 + signum)
30056 +
30057 + signal.signal(signal.SIGTERM, emergeexitsig)
30058 +
30059 + def emergeexit():
30060 + """This gets out final log message in before we quit."""
30061 + if "--pretend" not in emerge_config.opts:
30062 + emergelog(xterm_titles, " *** terminating.")
30063 + if xterm_titles:
30064 + xtermTitleReset()
30065 + portage.atexit_register(emergeexit)
30066 +
30067 + if emerge_config.action in ("config", "metadata", "regen", "sync"):
30068 + if "--pretend" in emerge_config.opts:
30069 + sys.stderr.write(("emerge: The '%s' action does " + \
30070 + "not support '--pretend'.\n") % emerge_config.action)
30071 + return 1
30072 +
30073 + if "sync" == emerge_config.action:
30074 + return action_sync(emerge_config)
30075 + elif "metadata" == emerge_config.action:
30076 + action_metadata(emerge_config.target_config.settings,
30077 + emerge_config.target_config.trees['porttree'].dbapi,
30078 + emerge_config.opts)
30079 + elif emerge_config.action=="regen":
30080 + validate_ebuild_environment(emerge_config.trees)
30081 + return action_regen(emerge_config.target_config.settings,
30082 + emerge_config.target_config.trees['porttree'].dbapi,
30083 + emerge_config.opts.get("--jobs"),
30084 + emerge_config.opts.get("--load-average"))
30085 + # HELP action
30086 + elif "config" == emerge_config.action:
30087 + validate_ebuild_environment(emerge_config.trees)
30088 + action_config(emerge_config.target_config.settings,
30089 + emerge_config.trees, emerge_config.opts, emerge_config.args)
30090 +
30091 + # SEARCH action
30092 + elif "search" == emerge_config.action:
30093 + validate_ebuild_environment(emerge_config.trees)
30094 + action_search(emerge_config.target_config,
30095 + emerge_config.opts, emerge_config.args, spinner)
30096 +
30097 + elif emerge_config.action in \
30098 + ('clean', 'depclean', 'deselect', 'prune', 'unmerge'):
30099 + validate_ebuild_environment(emerge_config.trees)
30100 + rval = action_uninstall(emerge_config.target_config.settings,
30101 + emerge_config.trees, emerge_config.target_config.mtimedb["ldpath"],
30102 + emerge_config.opts, emerge_config.action,
30103 + emerge_config.args, spinner)
30104 + if not (emerge_config.action == 'deselect' or
30105 + buildpkgonly or fetchonly or pretend):
30106 + post_emerge(emerge_config.action, emerge_config.opts,
30107 + emerge_config.args, emerge_config.target_config.root,
30108 + emerge_config.trees, emerge_config.target_config.mtimedb, rval)
30109 + return rval
30110 +
30111 + elif emerge_config.action == 'info':
30112 +
30113 + # Ensure atoms are valid before calling unmerge().
30114 + vardb = emerge_config.target_config.trees['vartree'].dbapi
30115 + portdb = emerge_config.target_config.trees['porttree'].dbapi
30116 + bindb = emerge_config.target_config.trees['bintree'].dbapi
30117 + valid_atoms = []
30118 + for x in emerge_config.args:
30119 + if is_valid_package_atom(x, allow_repo=True):
30120 + try:
30121 + #look at the installed files first, if there is no match
30122 + #look at the ebuilds, since EAPI 4 allows running pkg_info
30123 + #on non-installed packages
30124 + valid_atom = dep_expand(x, mydb=vardb)
30125 + if valid_atom.cp.split("/")[0] == "null":
30126 + valid_atom = dep_expand(x, mydb=portdb)
30127 +
30128 + if valid_atom.cp.split("/")[0] == "null" and \
30129 + "--usepkg" in emerge_config.opts:
30130 + valid_atom = dep_expand(x, mydb=bindb)
30131 +
30132 + valid_atoms.append(valid_atom)
30133 +
30134 + except portage.exception.AmbiguousPackageName as e:
30135 + msg = "The short ebuild name \"" + x + \
30136 + "\" is ambiguous. Please specify " + \
30137 + "one of the following " + \
30138 + "fully-qualified ebuild names instead:"
30139 + for line in textwrap.wrap(msg, 70):
30140 + writemsg_level("!!! %s\n" % (line,),
30141 + level=logging.ERROR, noiselevel=-1)
30142 + for i in e.args[0]:
30143 + writemsg_level(" %s\n" % colorize("INFORM", i),
30144 + level=logging.ERROR, noiselevel=-1)
30145 + writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
30146 + return 1
30147 + continue
30148 + msg = []
30149 + msg.append("'%s' is not a valid package atom." % (x,))
30150 + msg.append("Please check ebuild(5) for full details.")
30151 + writemsg_level("".join("!!! %s\n" % line for line in msg),
30152 + level=logging.ERROR, noiselevel=-1)
30153 + return 1
30154 +
30155 + return action_info(emerge_config.target_config.settings,
30156 + emerge_config.trees, emerge_config.opts, valid_atoms)
30157 +
30158 + # "update", "system", or just process files:
30159 + else:
30160 + validate_ebuild_environment(emerge_config.trees)
30161 +
30162 + for x in emerge_config.args:
30163 + if x.startswith(SETPREFIX) or \
30164 + is_valid_package_atom(x, allow_repo=True):
30165 + continue
30166 + if x[:1] == os.sep:
30167 + continue
30168 + try:
30169 + os.lstat(x)
30170 + continue
30171 + except OSError:
30172 + pass
30173 + msg = []
30174 + msg.append("'%s' is not a valid package atom." % (x,))
30175 + msg.append("Please check ebuild(5) for full details.")
30176 + writemsg_level("".join("!!! %s\n" % line for line in msg),
30177 + level=logging.ERROR, noiselevel=-1)
30178 + return 1
30179 +
30180 + # GLEP 42 says to display news *after* an emerge --pretend
30181 + if "--pretend" not in emerge_config.opts:
30182 + display_news_notification(
30183 + emerge_config.target_config, emerge_config.opts)
30184 + retval = action_build(emerge_config.target_config.settings,
30185 + emerge_config.trees, emerge_config.target_config.mtimedb,
30186 + emerge_config.opts, emerge_config.action,
30187 + emerge_config.args, spinner)
30188 + post_emerge(emerge_config.action, emerge_config.opts,
30189 + emerge_config.args, emerge_config.target_config.root,
30190 + emerge_config.trees, emerge_config.target_config.mtimedb, retval)
30191 +
30192 + return retval
30193
30194 diff --git a/pym/_emerge/chk_updated_cfg_files.py b/pym/_emerge/chk_updated_cfg_files.py
30195 new file mode 100644
30196 index 0000000..9f2ab6f
30197 --- /dev/null
30198 +++ b/pym/_emerge/chk_updated_cfg_files.py
30199 @@ -0,0 +1,42 @@
30200 +# Copyright 1999-2012 Gentoo Foundation
30201 +# Distributed under the terms of the GNU General Public License v2
30202 +
30203 +from __future__ import print_function
30204 +
30205 +import logging
30206 +
30207 +import portage
30208 +from portage import os
30209 +from portage.localization import _
30210 +from portage.output import bold, colorize, yellow
30211 +from portage.util import writemsg_level
30212 +
30213 +def chk_updated_cfg_files(eroot, config_protect):
30214 + target_root = eroot
30215 + result = list(
30216 + portage.util.find_updated_config_files(target_root, config_protect))
30217 +
30218 + for x in result:
30219 + writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))),
30220 + level=logging.INFO, noiselevel=-1)
30221 + if not x[1]: # it's a protected file
30222 + writemsg_level( _("config file '%s' needs updating.\n") % x[0],
30223 + level=logging.INFO, noiselevel=-1)
30224 + else: # it's a protected dir
30225 + if len(x[1]) == 1:
30226 + head, tail = os.path.split(x[1][0])
30227 + tail = tail[len("._cfg0000_"):]
30228 + fpath = os.path.join(head, tail)
30229 + writemsg_level(_("config file '%s' needs updating.\n") % fpath,
30230 + level=logging.INFO, noiselevel=-1)
30231 + else:
30232 + writemsg_level(
30233 + _("%d config files in '%s' need updating.\n") % \
30234 + (len(x[1]), x[0]), level=logging.INFO, noiselevel=-1)
30235 +
30236 + if result:
30237 + print(" " + yellow("*") + " See the " +
30238 + colorize("INFORM", _("CONFIGURATION FILES")) +
30239 + " " + _("section of the") + " " + bold("emerge"))
30240 + print(" " + yellow("*") + " " +
30241 + _("man page to learn how to update config files."))
30242
30243 diff --git a/pym/_emerge/clear_caches.py b/pym/_emerge/clear_caches.py
30244 index 7b7c5ec..513df62 100644
30245 --- a/pym/_emerge/clear_caches.py
30246 +++ b/pym/_emerge/clear_caches.py
30247 @@ -1,8 +1,7 @@
30248 -# Copyright 1999-2010 Gentoo Foundation
30249 +# Copyright 1999-2013 Gentoo Foundation
30250 # Distributed under the terms of the GNU General Public License v2
30251
30252 import gc
30253 -from portage.util.listdir import dircache
30254
30255 def clear_caches(trees):
30256 for d in trees.values():
30257 @@ -15,5 +14,4 @@ def clear_caches(trees):
30258 pass
30259 else:
30260 d["vartree"].dbapi._linkmap._clear_cache()
30261 - dircache.clear()
30262 gc.collect()
30263
30264 diff --git a/pym/_emerge/countdown.py b/pym/_emerge/countdown.py
30265 index 5abdc8a..62e3c8d 100644
30266 --- a/pym/_emerge/countdown.py
30267 +++ b/pym/_emerge/countdown.py
30268 @@ -1,4 +1,4 @@
30269 -# Copyright 1999-2009 Gentoo Foundation
30270 +# Copyright 1999-2013 Gentoo Foundation
30271 # Distributed under the terms of the GNU General Public License v2
30272
30273 from __future__ import print_function
30274 @@ -8,15 +8,15 @@ import time
30275
30276 from portage.output import colorize
30277
30278 -def countdown(secs=5, doing="Starting"):
30279 +
30280 +def countdown(secs=5, doing='Starting'):
30281 if secs:
30282 - print(">>> Waiting",secs,"seconds before starting...")
30283 - print(">>> (Control-C to abort)...\n"+doing+" in: ", end=' ')
30284 - ticks=list(range(secs))
30285 - ticks.reverse()
30286 - for sec in ticks:
30287 - sys.stdout.write(colorize("UNMERGE_WARN", str(sec+1)+" "))
30288 + print(
30289 + '>>> Waiting %s seconds before starting...\n'
30290 + '>>> (Control-C to abort)...\n'
30291 + '%s in:' % (secs, doing), end='')
30292 + for sec in range(secs, 0, -1):
30293 + sys.stdout.write(colorize('UNMERGE_WARN', ' %i' % sec))
30294 sys.stdout.flush()
30295 time.sleep(1)
30296 print()
30297 -
30298
30299 diff --git a/pym/_emerge/create_depgraph_params.py b/pym/_emerge/create_depgraph_params.py
30300 index 2838e93..225b792 100644
30301 --- a/pym/_emerge/create_depgraph_params.py
30302 +++ b/pym/_emerge/create_depgraph_params.py
30303 @@ -1,4 +1,4 @@
30304 -# Copyright 1999-2011 Gentoo Foundation
30305 +# Copyright 1999-2014 Gentoo Foundation
30306 # Distributed under the terms of the GNU General Public License v2
30307
30308 import logging
30309 @@ -15,11 +15,11 @@ def create_depgraph_params(myopts, myaction):
30310 # complete: completely account for all known dependencies
30311 # remove: build graph for use in removing packages
30312 # rebuilt_binaries: replace installed packages with rebuilt binaries
30313 - # rebuild_if_new_slot_abi: rebuild or reinstall packages when
30314 - # SLOT/ABI := operator dependencies can be satisfied by a newer
30315 - # SLOT/ABI, so that older packages slots will become eligible for
30316 + # rebuild_if_new_slot: rebuild or reinstall packages when
30317 + # slot/sub-slot := operator dependencies can be satisfied by a newer
30318 + # slot/sub-slot, so that older packages slots will become eligible for
30319 # removal by the --depclean action as soon as possible
30320 - # ignore_built_slot_abi_deps: ignore the SLOT/ABI := operator parts
30321 + # ignore_built_slot_operator_deps: ignore the slot/sub-slot := operator parts
30322 # of dependencies that have been recorded when packages where built
30323 myparams = {"recurse" : True}
30324
30325 @@ -27,9 +27,9 @@ def create_depgraph_params(myopts, myaction):
30326 if bdeps is not None:
30327 myparams["bdeps"] = bdeps
30328
30329 - ignore_built_slot_abi_deps = myopts.get("--ignore-built-slot-abi-deps")
30330 - if ignore_built_slot_abi_deps is not None:
30331 - myparams["ignore_built_slot_abi_deps"] = ignore_built_slot_abi_deps
30332 + ignore_built_slot_operator_deps = myopts.get("--ignore-built-slot-operator-deps")
30333 + if ignore_built_slot_operator_deps is not None:
30334 + myparams["ignore_built_slot_operator_deps"] = ignore_built_slot_operator_deps
30335
30336 dynamic_deps = myopts.get("--dynamic-deps")
30337 if dynamic_deps is not None:
30338 @@ -41,11 +41,12 @@ def create_depgraph_params(myopts, myaction):
30339 myparams["selective"] = True
30340 return myparams
30341
30342 - rebuild_if_new_slot_abi = myopts.get('--rebuild-if-new-slot-abi')
30343 - if rebuild_if_new_slot_abi is not None:
30344 - myparams['rebuild_if_new_slot_abi'] = rebuild_if_new_slot_abi
30345 + rebuild_if_new_slot = myopts.get('--rebuild-if-new-slot')
30346 + if rebuild_if_new_slot is not None:
30347 + myparams['rebuild_if_new_slot'] = rebuild_if_new_slot
30348
30349 if "--update" in myopts or \
30350 + "--newrepo" in myopts or \
30351 "--newuse" in myopts or \
30352 "--reinstall" in myopts or \
30353 "--noreplace" in myopts or \
30354
30355 diff --git a/pym/_emerge/create_world_atom.py b/pym/_emerge/create_world_atom.py
30356 index 35fb7c4..ac994cc 100644
30357 --- a/pym/_emerge/create_world_atom.py
30358 +++ b/pym/_emerge/create_world_atom.py
30359 @@ -1,7 +1,15 @@
30360 -# Copyright 1999-2011 Gentoo Foundation
30361 +# Copyright 1999-2012 Gentoo Foundation
30362 # Distributed under the terms of the GNU General Public License v2
30363
30364 +import sys
30365 +
30366 from portage.dep import _repo_separator
30367 +from portage.exception import InvalidData
30368 +
30369 +if sys.hexversion >= 0x3000000:
30370 + _unicode = str
30371 +else:
30372 + _unicode = unicode
30373
30374 def create_world_atom(pkg, args_set, root_config):
30375 """Create a new atom for the world file if one does not exist. If the
30376 @@ -35,16 +43,15 @@ def create_world_atom(pkg, args_set, root_config):
30377 for cpv in portdb.match(cp):
30378 for repo in repos:
30379 try:
30380 - available_slots.add(portdb.aux_get(cpv, ["SLOT"],
30381 - myrepo=repo)[0])
30382 - except KeyError:
30383 + available_slots.add(portdb._pkg_str(_unicode(cpv), repo).slot)
30384 + except (KeyError, InvalidData):
30385 pass
30386
30387 slotted = len(available_slots) > 1 or \
30388 (len(available_slots) == 1 and "0" not in available_slots)
30389 if not slotted:
30390 # check the vdb in case this is multislot
30391 - available_slots = set(vardb.aux_get(cpv, ["SLOT"])[0] \
30392 + available_slots = set(vardb._pkg_str(cpv, None).slot \
30393 for cpv in vardb.match(cp))
30394 slotted = len(available_slots) > 1 or \
30395 (len(available_slots) == 1 and "0" not in available_slots)
30396 @@ -83,14 +90,14 @@ def create_world_atom(pkg, args_set, root_config):
30397 matched_slots = set()
30398 if mydb is vardb:
30399 for cpv in matches:
30400 - matched_slots.add(mydb.aux_get(cpv, ["SLOT"])[0])
30401 + matched_slots.add(mydb._pkg_str(cpv, None).slot)
30402 else:
30403 for cpv in matches:
30404 for repo in repos:
30405 try:
30406 - matched_slots.add(portdb.aux_get(cpv, ["SLOT"],
30407 - myrepo=repo)[0])
30408 - except KeyError:
30409 + matched_slots.add(
30410 + portdb._pkg_str(_unicode(cpv), repo).slot)
30411 + except (KeyError, InvalidData):
30412 pass
30413
30414 if len(matched_slots) == 1:
30415
30416 diff --git a/pym/_emerge/depgraph.py b/pym/_emerge/depgraph.py
30417 index 0f3bc93..abb70a7 100644
30418 --- a/pym/_emerge/depgraph.py
30419 +++ b/pym/_emerge/depgraph.py
30420 @@ -1,34 +1,38 @@
30421 -# Copyright 1999-2012 Gentoo Foundation
30422 +# Copyright 1999-2014 Gentoo Foundation
30423 # Distributed under the terms of the GNU General Public License v2
30424
30425 -from __future__ import print_function
30426 +from __future__ import print_function, unicode_literals
30427
30428 -import difflib
30429 +import collections
30430 import errno
30431 import io
30432 import logging
30433 import stat
30434 import sys
30435 import textwrap
30436 +import warnings
30437 from collections import deque
30438 from itertools import chain
30439
30440 import portage
30441 from portage import os, OrderedDict
30442 from portage import _unicode_decode, _unicode_encode, _encodings
30443 -from portage.const import PORTAGE_PACKAGE_ATOM, USER_CONFIG_PATH
30444 +from portage.const import PORTAGE_PACKAGE_ATOM, USER_CONFIG_PATH, VCS_DIRS
30445 from portage.dbapi import dbapi
30446 from portage.dbapi.dep_expand import dep_expand
30447 +from portage.dbapi._similar_name_search import similar_name_search
30448 from portage.dep import Atom, best_match_to_list, extract_affecting_use, \
30449 check_required_use, human_readable_required_use, match_from_list, \
30450 _repo_separator
30451 -from portage.dep._slot_abi import ignore_built_slot_abi_deps
30452 -from portage.eapi import eapi_has_strong_blocks, eapi_has_required_use
30453 -from portage.exception import (InvalidAtom, InvalidDependString,
30454 +from portage.dep._slot_operator import ignore_built_slot_operator_deps
30455 +from portage.eapi import eapi_has_strong_blocks, eapi_has_required_use, \
30456 + _get_eapi_attrs
30457 +from portage.exception import (InvalidAtom, InvalidData, InvalidDependString,
30458 PackageNotFound, PortageException)
30459 from portage.output import colorize, create_color_func, \
30460 darkgreen, green
30461 bad = create_color_func("BAD")
30462 +from portage.package.ebuild.config import _get_feature_flags
30463 from portage.package.ebuild.getmaskingstatus import \
30464 _getmaskingstatus, _MaskReason
30465 from portage._sets import SETPREFIX
30466 @@ -38,13 +42,16 @@ from portage.util import cmp_sort_key, writemsg, writemsg_stdout
30467 from portage.util import ensure_dirs
30468 from portage.util import writemsg_level, write_atomic
30469 from portage.util.digraph import digraph
30470 -from portage.util.listdir import _ignorecvs_dirs
30471 +from portage.util._async.TaskScheduler import TaskScheduler
30472 +from portage.util._eventloop.EventLoop import EventLoop
30473 +from portage.util._eventloop.global_event_loop import global_event_loop
30474 from portage.versions import catpkgsplit
30475
30476 from _emerge.AtomArg import AtomArg
30477 from _emerge.Blocker import Blocker
30478 from _emerge.BlockerCache import BlockerCache
30479 from _emerge.BlockerDepPriority import BlockerDepPriority
30480 +from .chk_updated_cfg_files import chk_updated_cfg_files
30481 from _emerge.countdown import countdown
30482 from _emerge.create_world_atom import create_world_atom
30483 from _emerge.Dependency import Dependency
30484 @@ -52,6 +59,7 @@ from _emerge.DependencyArg import DependencyArg
30485 from _emerge.DepPriority import DepPriority
30486 from _emerge.DepPriorityNormalRange import DepPriorityNormalRange
30487 from _emerge.DepPrioritySatisfiedRange import DepPrioritySatisfiedRange
30488 +from _emerge.EbuildMetadataPhase import EbuildMetadataPhase
30489 from _emerge.FakeVartree import FakeVartree
30490 from _emerge._find_deep_system_runtime_deps import _find_deep_system_runtime_deps
30491 from _emerge.is_valid_package_atom import insert_category_into_atom, \
30492 @@ -68,9 +76,10 @@ from _emerge.UseFlagDisplay import pkg_use_display
30493 from _emerge.userquery import userquery
30494
30495 from _emerge.resolver.backtracking import Backtracker, BacktrackParameter
30496 +from _emerge.resolver.package_tracker import PackageTracker, PackageTrackerDbapiWrapper
30497 from _emerge.resolver.slot_collision import slot_conflict_handler
30498 from _emerge.resolver.circular_dependency import circular_dependency_handler
30499 -from _emerge.resolver.output import Display
30500 +from _emerge.resolver.output import Display, format_unmatched_atom
30501
30502 if sys.hexversion >= 0x3000000:
30503 basestring = str
30504 @@ -115,8 +124,8 @@ class _frozen_depgraph_config(object):
30505 self._pkg_cache = {}
30506 self._highest_license_masked = {}
30507 dynamic_deps = myopts.get("--dynamic-deps", "y") != "n"
30508 - ignore_built_slot_abi_deps = myopts.get(
30509 - "--ignore-built-slot-abi-deps", "n") == "y"
30510 + ignore_built_slot_operator_deps = myopts.get(
30511 + "--ignore-built-slot-operator-deps", "n") == "y"
30512 for myroot in trees:
30513 self.trees[myroot] = {}
30514 # Create a RootConfig instance that references
30515 @@ -132,7 +141,7 @@ class _frozen_depgraph_config(object):
30516 pkg_cache=self._pkg_cache,
30517 pkg_root_config=self.roots[myroot],
30518 dynamic_deps=dynamic_deps,
30519 - ignore_built_slot_abi_deps=ignore_built_slot_abi_deps)
30520 + ignore_built_slot_operator_deps=ignore_built_slot_operator_deps)
30521 self.pkgsettings[myroot] = portage.config(
30522 clone=self.trees[myroot]["vartree"].settings)
30523
30524 @@ -267,13 +276,12 @@ class _rebuild_config(object):
30525 return True
30526 elif (parent.installed and
30527 root_slot not in self.reinstall_list):
30528 - inst_build_time = parent.metadata.get("BUILD_TIME")
30529 try:
30530 bin_build_time, = bindb.aux_get(parent.cpv,
30531 ["BUILD_TIME"])
30532 except KeyError:
30533 continue
30534 - if bin_build_time != inst_build_time:
30535 + if bin_build_time != _unicode(parent.build_time):
30536 # 2) Remote binary package is valid, and local package
30537 # is not up to date. Force reinstall.
30538 reinstall = True
30539 @@ -335,11 +343,8 @@ class _dynamic_depgraph_config(object):
30540 self.myparams = myparams.copy()
30541 self._vdb_loaded = False
30542 self._allow_backtracking = allow_backtracking
30543 - # Maps slot atom to package for each Package added to the graph.
30544 - self._slot_pkg_map = {}
30545 # Maps nodes to the reasons they were selected for reinstallation.
30546 self._reinstall_nodes = {}
30547 - self.mydbapi = {}
30548 # Contains a filtered view of preferred packages that are selected
30549 # from available repositories.
30550 self._filtered_trees = {}
30551 @@ -374,14 +379,6 @@ class _dynamic_depgraph_config(object):
30552 # This use used to check if we have accounted for blockers
30553 # relevant to a package.
30554 self._traversed_pkg_deps = set()
30555 - # This should be ordered such that the backtracker will
30556 - # attempt to solve conflicts which occurred earlier first,
30557 - # since an earlier conflict can be the cause of a conflict
30558 - # which occurs later.
30559 - self._slot_collision_info = OrderedDict()
30560 - # Slot collision nodes are not allowed to block other packages since
30561 - # blocker validation is only able to account for one package per slot.
30562 - self._slot_collision_nodes = set()
30563 self._parent_atoms = {}
30564 self._slot_conflict_handler = None
30565 self._circular_dependency_handler = None
30566 @@ -412,28 +409,31 @@ class _dynamic_depgraph_config(object):
30567 self._needed_license_changes = backtrack_parameters.needed_license_changes
30568 self._needed_use_config_changes = backtrack_parameters.needed_use_config_changes
30569 self._runtime_pkg_mask = backtrack_parameters.runtime_pkg_mask
30570 - self._slot_abi_replace_installed = backtrack_parameters.slot_abi_replace_installed
30571 + self._slot_operator_replace_installed = backtrack_parameters.slot_operator_replace_installed
30572 + self._prune_rebuilds = backtrack_parameters.prune_rebuilds
30573 self._need_restart = False
30574 # For conditions that always require user intervention, such as
30575 # unsatisfied REQUIRED_USE (currently has no autounmask support).
30576 self._skip_restart = False
30577 self._backtrack_infos = {}
30578
30579 + self._buildpkgonly_deps_unsatisfied = False
30580 self._autounmask = depgraph._frozen_config.myopts.get('--autounmask') != 'n'
30581 self._success_without_autounmask = False
30582 self._traverse_ignored_deps = False
30583 self._complete_mode = False
30584 - self._slot_abi_deps = {}
30585 + self._slot_operator_deps = {}
30586 + self._package_tracker = PackageTracker()
30587 + # Track missed updates caused by solved conflicts.
30588 + self._conflict_missed_update = collections.defaultdict(dict)
30589
30590 for myroot in depgraph._frozen_config.trees:
30591 self.sets[myroot] = _depgraph_sets()
30592 - self._slot_pkg_map[myroot] = {}
30593 vardb = depgraph._frozen_config.trees[myroot]["vartree"].dbapi
30594 # This dbapi instance will model the state that the vdb will
30595 # have after new packages have been installed.
30596 - fakedb = PackageVirtualDbapi(vardb.settings)
30597 + fakedb = PackageTrackerDbapiWrapper(myroot, self._package_tracker)
30598
30599 - self.mydbapi[myroot] = fakedb
30600 def graph_tree():
30601 pass
30602 graph_tree.dbapi = fakedb
30603 @@ -446,6 +446,7 @@ class _dynamic_depgraph_config(object):
30604 self._graph_trees[myroot]["vartree"] = graph_tree
30605 self._graph_trees[myroot]["graph_db"] = graph_tree.dbapi
30606 self._graph_trees[myroot]["graph"] = self.digraph
30607 + self._graph_trees[myroot]["want_update_pkg"] = depgraph._want_update_pkg
30608 def filtered_tree():
30609 pass
30610 filtered_tree.dbapi = _dep_check_composite_db(depgraph, myroot)
30611 @@ -472,6 +473,7 @@ class _dynamic_depgraph_config(object):
30612 self._filtered_trees[myroot]["graph"] = self.digraph
30613 self._filtered_trees[myroot]["vartree"] = \
30614 depgraph._frozen_config.trees[myroot]["vartree"]
30615 + self._filtered_trees[myroot]["want_update_pkg"] = depgraph._want_update_pkg
30616
30617 dbs = []
30618 # (db, pkg_type, built, installed, db_keys)
30619 @@ -502,8 +504,6 @@ class depgraph(object):
30620
30621 pkg_tree_map = RootConfig.pkg_tree_map
30622
30623 - _dep_keys = ["DEPEND", "RDEPEND", "PDEPEND"]
30624 -
30625 def __init__(self, settings, trees, myopts, myparams, spinner,
30626 frozen_config=None, backtrack_parameters=BacktrackParameter(), allow_backtracking=False):
30627 if frozen_config is None:
30628 @@ -517,6 +517,9 @@ class depgraph(object):
30629 self._select_atoms = self._select_atoms_highest_available
30630 self._select_package = self._select_pkg_highest_available
30631
30632 + self._event_loop = (portage._internal_caller and
30633 + global_event_loop() or EventLoop(main=False))
30634 +
30635 def _load_vdb(self):
30636 """
30637 Load installed package metadata if appropriate. This used to be called
30638 @@ -535,10 +538,6 @@ class depgraph(object):
30639 preload_installed_pkgs = \
30640 "--nodeps" not in self._frozen_config.myopts
30641
30642 - if self._frozen_config.myopts.get("--root-deps") is not None and \
30643 - myroot != self._frozen_config.target_root:
30644 - continue
30645 -
30646 fake_vartree = self._frozen_config.trees[myroot]["vartree"]
30647 if not fake_vartree.dbapi:
30648 # This needs to be called for the first depgraph, but not for
30649 @@ -552,24 +551,157 @@ class depgraph(object):
30650
30651 if preload_installed_pkgs:
30652 vardb = fake_vartree.dbapi
30653 - fakedb = self._dynamic_config._graph_trees[
30654 - myroot]["vartree"].dbapi
30655
30656 - for pkg in vardb:
30657 - self._spinner_update()
30658 - if dynamic_deps:
30659 - # This causes FakeVartree to update the
30660 - # Package instance dependencies via
30661 - # PackageVirtualDbapi.aux_update()
30662 - vardb.aux_get(pkg.cpv, [])
30663 - fakedb.cpv_inject(pkg)
30664 + if not dynamic_deps:
30665 + for pkg in vardb:
30666 + self._dynamic_config._package_tracker.add_installed_pkg(pkg)
30667 + else:
30668 + max_jobs = self._frozen_config.myopts.get("--jobs")
30669 + max_load = self._frozen_config.myopts.get("--load-average")
30670 + scheduler = TaskScheduler(
30671 + self._dynamic_deps_preload(fake_vartree),
30672 + max_jobs=max_jobs,
30673 + max_load=max_load,
30674 + event_loop=fake_vartree._portdb._event_loop)
30675 + scheduler.start()
30676 + scheduler.wait()
30677
30678 self._dynamic_config._vdb_loaded = True
30679
30680 + def _dynamic_deps_preload(self, fake_vartree):
30681 + portdb = fake_vartree._portdb
30682 + for pkg in fake_vartree.dbapi:
30683 + self._spinner_update()
30684 + self._dynamic_config._package_tracker.add_installed_pkg(pkg)
30685 + ebuild_path, repo_path = \
30686 + portdb.findname2(pkg.cpv, myrepo=pkg.repo)
30687 + if ebuild_path is None:
30688 + fake_vartree.dynamic_deps_preload(pkg, None)
30689 + continue
30690 + metadata, ebuild_hash = portdb._pull_valid_cache(
30691 + pkg.cpv, ebuild_path, repo_path)
30692 + if metadata is not None:
30693 + fake_vartree.dynamic_deps_preload(pkg, metadata)
30694 + else:
30695 + proc = EbuildMetadataPhase(cpv=pkg.cpv,
30696 + ebuild_hash=ebuild_hash,
30697 + portdb=portdb, repo_path=repo_path,
30698 + settings=portdb.doebuild_settings)
30699 + proc.addExitListener(
30700 + self._dynamic_deps_proc_exit(pkg, fake_vartree))
30701 + yield proc
30702 +
30703 + class _dynamic_deps_proc_exit(object):
30704 +
30705 + __slots__ = ('_pkg', '_fake_vartree')
30706 +
30707 + def __init__(self, pkg, fake_vartree):
30708 + self._pkg = pkg
30709 + self._fake_vartree = fake_vartree
30710 +
30711 + def __call__(self, proc):
30712 + metadata = None
30713 + if proc.returncode == os.EX_OK:
30714 + metadata = proc.metadata
30715 + self._fake_vartree.dynamic_deps_preload(self._pkg, metadata)
30716 +
30717 def _spinner_update(self):
30718 if self._frozen_config.spinner:
30719 self._frozen_config.spinner.update()
30720
30721 + def _compute_abi_rebuild_info(self):
30722 + """
30723 + Fill self._forced_rebuilds with packages that cause rebuilds.
30724 + """
30725 +
30726 + debug = "--debug" in self._frozen_config.myopts
30727 +
30728 + # Get all atoms that might have caused a forced rebuild.
30729 + atoms = {}
30730 + for s in self._dynamic_config._initial_arg_list:
30731 + if s.force_reinstall:
30732 + root = s.root_config.root
30733 + atoms.setdefault(root, set()).update(s.pset)
30734 +
30735 + if debug:
30736 + writemsg_level("forced reinstall atoms:\n",
30737 + level=logging.DEBUG, noiselevel=-1)
30738 +
30739 + for root in atoms:
30740 + writemsg_level(" root: %s\n" % root,
30741 + level=logging.DEBUG, noiselevel=-1)
30742 + for atom in atoms[root]:
30743 + writemsg_level(" atom: %s\n" % atom,
30744 + level=logging.DEBUG, noiselevel=-1)
30745 + writemsg_level("\n\n",
30746 + level=logging.DEBUG, noiselevel=-1)
30747 +
30748 + # Go through all slot operator deps and check if one of these deps
30749 + # has a parent that is matched by one of the atoms from above.
30750 + forced_rebuilds = {}
30751 + for (root, slot_atom), deps in self._dynamic_config._slot_operator_deps.items():
30752 + rebuild_atoms = atoms.get(root, set())
30753 +
30754 + for dep in deps:
30755 + if getattr(dep.parent, "installed", False) or dep.child.installed or \
30756 + dep.parent.slot_atom not in rebuild_atoms:
30757 + continue
30758 +
30759 + # Make sure the child's slot/subslot has changed. If it hasn't,
30760 + # then another child has forced this rebuild.
30761 + installed_pkg = self._select_pkg_from_installed(root, dep.child.slot_atom)[0]
30762 + if installed_pkg and installed_pkg.slot == dep.child.slot and \
30763 + installed_pkg.sub_slot == dep.child.sub_slot:
30764 + continue
30765 +
30766 + # The child has forced a rebuild of the parent
30767 + forced_rebuilds.setdefault(root, {}).setdefault(dep.child, set()).add(dep.parent)
30768 +
30769 + if debug:
30770 + writemsg_level("slot operator dependencies:\n",
30771 + level=logging.DEBUG, noiselevel=-1)
30772 +
30773 + for (root, slot_atom), deps in self._dynamic_config._slot_operator_deps.items():
30774 + writemsg_level(" (%s, %s)\n" % \
30775 + (root, slot_atom), level=logging.DEBUG, noiselevel=-1)
30776 + for dep in deps:
30777 + writemsg_level(" parent: %s\n" % dep.parent, level=logging.DEBUG, noiselevel=-1)
30778 + writemsg_level(" child: %s (%s)\n" % (dep.child, dep.priority), level=logging.DEBUG, noiselevel=-1)
30779 +
30780 + writemsg_level("\n\n",
30781 + level=logging.DEBUG, noiselevel=-1)
30782 +
30783 +
30784 + writemsg_level("forced rebuilds:\n",
30785 + level=logging.DEBUG, noiselevel=-1)
30786 +
30787 + for root in forced_rebuilds:
30788 + writemsg_level(" root: %s\n" % root,
30789 + level=logging.DEBUG, noiselevel=-1)
30790 + for child in forced_rebuilds[root]:
30791 + writemsg_level(" child: %s\n" % child,
30792 + level=logging.DEBUG, noiselevel=-1)
30793 + for parent in forced_rebuilds[root][child]:
30794 + writemsg_level(" parent: %s\n" % parent,
30795 + level=logging.DEBUG, noiselevel=-1)
30796 + writemsg_level("\n\n",
30797 + level=logging.DEBUG, noiselevel=-1)
30798 +
30799 + self._forced_rebuilds = forced_rebuilds
30800 +
30801 + def _show_abi_rebuild_info(self):
30802 +
30803 + if not self._forced_rebuilds:
30804 + return
30805 +
30806 + writemsg_stdout("\nThe following packages are causing rebuilds:\n\n", noiselevel=-1)
30807 +
30808 + for root in self._forced_rebuilds:
30809 + for child in self._forced_rebuilds[root]:
30810 + writemsg_stdout(" %s causes rebuilds for:\n" % (child,), noiselevel=-1)
30811 + for parent in self._forced_rebuilds[root][child]:
30812 + writemsg_stdout(" %s\n" % (parent,), noiselevel=-1)
30813 +
30814 def _show_ignored_binaries(self):
30815 """
30816 Show binaries that have been ignored because their USE didn't
30817 @@ -583,26 +715,23 @@ class depgraph(object):
30818
30819 for pkg in list(self._dynamic_config.ignored_binaries):
30820
30821 - selected_pkg = self._dynamic_config.mydbapi[pkg.root
30822 - ].match_pkgs(pkg.slot_atom)
30823 + selected_pkg = list()
30824
30825 - if not selected_pkg:
30826 - continue
30827 + for selected_pkg in self._dynamic_config._package_tracker.match(
30828 + pkg.root, pkg.slot_atom):
30829
30830 - selected_pkg = selected_pkg[-1]
30831 - if selected_pkg > pkg:
30832 - self._dynamic_config.ignored_binaries.pop(pkg)
30833 - continue
30834 + if selected_pkg > pkg:
30835 + self._dynamic_config.ignored_binaries.pop(pkg)
30836 + break
30837
30838 - if selected_pkg.installed and \
30839 - selected_pkg.cpv == pkg.cpv and \
30840 - selected_pkg.metadata.get('BUILD_TIME') == \
30841 - pkg.metadata.get('BUILD_TIME'):
30842 - # We don't care about ignored binaries when an
30843 - # identical installed instance is selected to
30844 - # fill the slot.
30845 - self._dynamic_config.ignored_binaries.pop(pkg)
30846 - continue
30847 + if selected_pkg.installed and \
30848 + selected_pkg.cpv == pkg.cpv and \
30849 + selected_pkg.build_time == pkg.build_time:
30850 + # We don't care about ignored binaries when an
30851 + # identical installed instance is selected to
30852 + # fill the slot.
30853 + self._dynamic_config.ignored_binaries.pop(pkg)
30854 + break
30855
30856 if not self._dynamic_config.ignored_binaries:
30857 return
30858 @@ -613,11 +742,17 @@ class depgraph(object):
30859 "due to non matching USE:\n\n", noiselevel=-1)
30860
30861 for pkg, flags in self._dynamic_config.ignored_binaries.items():
30862 - writemsg(" =%s" % pkg.cpv, noiselevel=-1)
30863 + flag_display = []
30864 + for flag in sorted(flags):
30865 + if flag not in pkg.use.enabled:
30866 + flag = "-" + flag
30867 + flag_display.append(flag)
30868 + flag_display = " ".join(flag_display)
30869 + # The user can paste this line into package.use
30870 + writemsg(" =%s %s" % (pkg.cpv, flag_display), noiselevel=-1)
30871 if pkg.root_config.settings["ROOT"] != "/":
30872 - writemsg(" for %s" % (pkg.root,), noiselevel=-1)
30873 - writemsg("\n use flag(s): %s\n" % ", ".join(sorted(flags)),
30874 - noiselevel=-1)
30875 + writemsg(" # for %s" % (pkg.root,), noiselevel=-1)
30876 + writemsg("\n", noiselevel=-1)
30877
30878 msg = [
30879 "",
30880 @@ -631,31 +766,44 @@ class depgraph(object):
30881 line = colorize("INFORM", line)
30882 writemsg(line + "\n", noiselevel=-1)
30883
30884 - def _show_missed_update(self):
30885 + def _get_missed_updates(self):
30886
30887 # In order to minimize noise, show only the highest
30888 # missed update from each SLOT.
30889 missed_updates = {}
30890 for pkg, mask_reasons in \
30891 - self._dynamic_config._runtime_pkg_mask.items():
30892 + chain(self._dynamic_config._runtime_pkg_mask.items(),
30893 + self._dynamic_config._conflict_missed_update.items()):
30894 if pkg.installed:
30895 # Exclude installed here since we only
30896 # want to show available updates.
30897 continue
30898 - chosen_pkg = self._dynamic_config.mydbapi[pkg.root
30899 - ].match_pkgs(pkg.slot_atom)
30900 - if not chosen_pkg or chosen_pkg[-1] >= pkg:
30901 - continue
30902 - k = (pkg.root, pkg.slot_atom)
30903 - if k in missed_updates:
30904 - other_pkg, mask_type, parent_atoms = missed_updates[k]
30905 - if other_pkg > pkg:
30906 - continue
30907 - for mask_type, parent_atoms in mask_reasons.items():
30908 - if not parent_atoms:
30909 - continue
30910 - missed_updates[k] = (pkg, mask_type, parent_atoms)
30911 - break
30912 + missed_update = True
30913 + any_selected = False
30914 + for chosen_pkg in self._dynamic_config._package_tracker.match(
30915 + pkg.root, pkg.slot_atom):
30916 + any_selected = True
30917 + if chosen_pkg > pkg or (not chosen_pkg.installed and \
30918 + chosen_pkg.version == pkg.version):
30919 + missed_update = False
30920 + break
30921 + if any_selected and missed_update:
30922 + k = (pkg.root, pkg.slot_atom)
30923 + if k in missed_updates:
30924 + other_pkg, mask_type, parent_atoms = missed_updates[k]
30925 + if other_pkg > pkg:
30926 + continue
30927 + for mask_type, parent_atoms in mask_reasons.items():
30928 + if not parent_atoms:
30929 + continue
30930 + missed_updates[k] = (pkg, mask_type, parent_atoms)
30931 + break
30932 +
30933 + return missed_updates
30934 +
30935 + def _show_missed_update(self):
30936 +
30937 + missed_updates = self._get_missed_updates()
30938
30939 if not missed_updates:
30940 return
30941 @@ -726,7 +874,7 @@ class depgraph(object):
30942
30943 self._show_merge_list()
30944 msg = []
30945 - msg.append("\nWARNING: One or more updates have been " + \
30946 + msg.append("\nWARNING: One or more updates/rebuilds have been " + \
30947 "skipped due to a dependency conflict:\n\n")
30948
30949 indent = " "
30950 @@ -736,22 +884,29 @@ class depgraph(object):
30951 msg.append(" for %s" % (pkg.root,))
30952 msg.append("\n\n")
30953
30954 - for parent, atom in parent_atoms:
30955 - msg.append(indent)
30956 - msg.append(str(pkg))
30957 + msg.append(indent)
30958 + msg.append(str(pkg))
30959 + msg.append(" conflicts with\n")
30960
30961 - msg.append(" conflicts with\n")
30962 - msg.append(2*indent)
30963 + for parent, atom in parent_atoms:
30964 if isinstance(parent,
30965 (PackageArg, AtomArg)):
30966 # For PackageArg and AtomArg types, it's
30967 # redundant to display the atom attribute.
30968 + msg.append(2*indent)
30969 msg.append(str(parent))
30970 + msg.append("\n")
30971 else:
30972 # Display the specific atom from SetArg or
30973 # Package types.
30974 - msg.append("%s required by %s" % (atom, parent))
30975 - msg.append("\n")
30976 + atom, marker = format_unmatched_atom(
30977 + pkg, atom, self._pkg_use_enabled)
30978 +
30979 + msg.append(2*indent)
30980 + msg.append("%s required by %s\n" % (atom, parent))
30981 + msg.append(2*indent)
30982 + msg.append(marker)
30983 + msg.append("\n")
30984 msg.append("\n")
30985
30986 writemsg("".join(msg), noiselevel=-1)
30987 @@ -764,7 +919,7 @@ class depgraph(object):
30988 cases.
30989 """
30990
30991 - if not self._dynamic_config._slot_collision_info:
30992 + if not any(self._dynamic_config._package_tracker.slot_conflicts()):
30993 return
30994
30995 self._show_merge_list()
30996 @@ -774,7 +929,7 @@ class depgraph(object):
30997
30998 conflict = handler.get_conflict()
30999 writemsg(conflict, noiselevel=-1)
31000 -
31001 +
31002 explanation = handler.get_explanation()
31003 if explanation:
31004 writemsg(explanation, noiselevel=-1)
31005 @@ -813,6 +968,239 @@ class depgraph(object):
31006 writemsg(line + '\n', noiselevel=-1)
31007 writemsg('\n', noiselevel=-1)
31008
31009 + def _solve_non_slot_operator_slot_conflicts(self):
31010 + """
31011 + This function solves slot conflicts which can
31012 + be solved by simply choosing one of the conflicting
31013 + and removing all the other ones.
31014 + It is able to solve somewhat more complex cases where
31015 + conflicts can only be solved simultaniously.
31016 + """
31017 + debug = "--debug" in self._frozen_config.myopts
31018 +
31019 + # List all conflicts. Ignore those that involve slot operator rebuilds
31020 + # as the logic there needs special slot conflict behavior which isn't
31021 + # provided by this function.
31022 + conflicts = []
31023 + for conflict in self._dynamic_config._package_tracker.slot_conflicts():
31024 + slot_key = conflict.root, conflict.atom
31025 + if slot_key not in self._dynamic_config._slot_operator_replace_installed:
31026 + conflicts.append(conflict)
31027 +
31028 + if not conflicts:
31029 + return
31030 +
31031 + # Get a set of all conflicting packages.
31032 + conflict_pkgs = set()
31033 + for conflict in conflicts:
31034 + conflict_pkgs.update(conflict)
31035 +
31036 + # Get the list of other packages which are only
31037 + # required by conflict packages.
31038 + indirect_conflict_candidates = set()
31039 + for pkg in conflict_pkgs:
31040 + indirect_conflict_candidates.update(self._dynamic_config.digraph.child_nodes(pkg))
31041 + indirect_conflict_candidates.difference_update(conflict_pkgs)
31042 +
31043 + indirect_conflict_pkgs = set()
31044 + while indirect_conflict_candidates:
31045 + pkg = indirect_conflict_candidates.pop()
31046 +
31047 + only_conflict_parents = True
31048 + for parent, atom in self._dynamic_config._parent_atoms.get(pkg, []):
31049 + if parent not in conflict_pkgs and parent not in indirect_conflict_pkgs:
31050 + only_conflict_parents = False
31051 + break
31052 + if not only_conflict_parents:
31053 + continue
31054 +
31055 + indirect_conflict_pkgs.add(pkg)
31056 + for child in self._dynamic_config.digraph.child_nodes(pkg):
31057 + if child in conflict_pkgs or child in indirect_conflict_pkgs:
31058 + continue
31059 + indirect_conflict_candidates.add(child)
31060 +
31061 + # Create a graph containing the conflict packages
31062 + # and a special 'non_conflict_node' that represents
31063 + # all non-conflict packages.
31064 + conflict_graph = digraph()
31065 +
31066 + non_conflict_node = "(non-conflict package)"
31067 + conflict_graph.add(non_conflict_node, None)
31068 +
31069 + for pkg in chain(conflict_pkgs, indirect_conflict_pkgs):
31070 + conflict_graph.add(pkg, None)
31071 +
31072 + # Add parent->child edges for each conflict package.
31073 + # Parents, which aren't conflict packages are represented
31074 + # by 'non_conflict_node'.
31075 + # If several conflicting packages are matched, but not all,
31076 + # add a tuple with the matched packages to the graph.
31077 + class or_tuple(tuple):
31078 + """
31079 + Helper class for debug printing.
31080 + """
31081 + def __str__(self):
31082 + return "(%s)" % ",".join(str(pkg) for pkg in self)
31083 +
31084 + for conflict in conflicts:
31085 + all_parent_atoms = set()
31086 + for pkg in conflict:
31087 + all_parent_atoms.update(
31088 + self._dynamic_config._parent_atoms.get(pkg, []))
31089 +
31090 + for parent, atom in all_parent_atoms:
31091 + is_arg_parent = isinstance(parent, AtomArg)
31092 +
31093 + if parent not in conflict_pkgs and \
31094 + parent not in indirect_conflict_pkgs:
31095 + parent = non_conflict_node
31096 +
31097 + atom_set = InternalPackageSet(
31098 + initial_atoms=(atom,), allow_repo=True)
31099 +
31100 + matched = []
31101 + for pkg in conflict:
31102 + if atom_set.findAtomForPackage(pkg, \
31103 + modified_use=self._pkg_use_enabled(pkg)) and \
31104 + not (is_arg_parent and pkg.installed):
31105 + matched.append(pkg)
31106 + if len(matched) == len(conflict):
31107 + # All packages match.
31108 + continue
31109 + elif len(matched) == 1:
31110 + conflict_graph.add(matched[0], parent)
31111 + else:
31112 + # More than one packages matched, but not all.
31113 + conflict_graph.add(or_tuple(matched), parent)
31114 +
31115 + for pkg in indirect_conflict_pkgs:
31116 + for parent, atom in self._dynamic_config._parent_atoms.get(pkg, []):
31117 + if parent not in conflict_pkgs and \
31118 + parent not in indirect_conflict_pkgs:
31119 + parent = non_conflict_node
31120 + conflict_graph.add(pkg, parent)
31121 +
31122 + if debug:
31123 + writemsg_level(
31124 + "\n!!! Slot conflict graph:\n",
31125 + level=logging.DEBUG, noiselevel=-1)
31126 + conflict_graph.debug_print()
31127 +
31128 + # Now select required packages. Collect them in the
31129 + # 'forced' set.
31130 + forced = set([non_conflict_node])
31131 + unexplored = set([non_conflict_node])
31132 + # or_tuples get special handling. We first explore
31133 + # all packages in the hope of having forced one of
31134 + # the packages in the tuple. This way we don't have
31135 + # to choose one.
31136 + unexplored_tuples = set()
31137 +
31138 + while unexplored:
31139 + # Handle all unexplored packages.
31140 + while unexplored:
31141 + node = unexplored.pop()
31142 + for child in conflict_graph.child_nodes(node):
31143 + if child in forced:
31144 + continue
31145 + forced.add(child)
31146 + if isinstance(child, Package):
31147 + unexplored.add(child)
31148 + else:
31149 + unexplored_tuples.add(child)
31150 +
31151 + # Now handle unexplored or_tuples. Move on with packages
31152 + # once we had to choose one.
31153 + while unexplored_tuples:
31154 + nodes = unexplored_tuples.pop()
31155 + if any(node in forced for node in nodes):
31156 + # At least one of the packages in the
31157 + # tuple is already forced, which means the
31158 + # dependency represented by this tuple
31159 + # is satisfied.
31160 + continue
31161 +
31162 + # We now have to choose one of packages in the tuple.
31163 + # In theory one could solve more conflicts if we'd be
31164 + # able to try different choices here, but that has lots
31165 + # of other problems. For now choose the package that was
31166 + # pulled first, as this should be the most desirable choice
31167 + # (otherwise it wouldn't have been the first one).
31168 + forced.add(nodes[0])
31169 + unexplored.add(nodes[0])
31170 + break
31171 +
31172 + # Remove 'non_conflict_node' and or_tuples from 'forced'.
31173 + forced = set(pkg for pkg in forced if isinstance(pkg, Package))
31174 + non_forced = set(pkg for pkg in conflict_pkgs if pkg not in forced)
31175 +
31176 + if debug:
31177 + writemsg_level(
31178 + "\n!!! Slot conflict solution:\n",
31179 + level=logging.DEBUG, noiselevel=-1)
31180 + for conflict in conflicts:
31181 + writemsg_level(
31182 + " Conflict: (%s, %s)\n" % (conflict.root, conflict.atom),
31183 + level=logging.DEBUG, noiselevel=-1)
31184 + for pkg in conflict:
31185 + if pkg in forced:
31186 + writemsg_level(
31187 + " keep: %s\n" % pkg,
31188 + level=logging.DEBUG, noiselevel=-1)
31189 + else:
31190 + writemsg_level(
31191 + " remove: %s\n" % pkg,
31192 + level=logging.DEBUG, noiselevel=-1)
31193 +
31194 + broken_packages = set()
31195 + for pkg in non_forced:
31196 + for parent, atom in self._dynamic_config._parent_atoms.get(pkg, []):
31197 + if isinstance(parent, Package) and parent not in non_forced:
31198 + # Non-forcing set args are expected to be a parent of all
31199 + # packages in the conflict.
31200 + broken_packages.add(parent)
31201 + self._remove_pkg(pkg)
31202 +
31203 + # Process the dependencies of choosen conflict packages
31204 + # again to properly account for blockers.
31205 + broken_packages.update(forced)
31206 +
31207 + # Filter out broken packages which have been removed during
31208 + # recursive removal in self._remove_pkg.
31209 + broken_packages = list(pkg for pkg in broken_packages if pkg in broken_packages \
31210 + if self._dynamic_config._package_tracker.contains(pkg, installed=False))
31211 +
31212 + self._dynamic_config._dep_stack.extend(broken_packages)
31213 +
31214 + if broken_packages:
31215 + # Process dependencies. This cannot fail because we just ensured that
31216 + # the remaining packages satisfy all dependencies.
31217 + self._create_graph()
31218 +
31219 + # Record missed updates.
31220 + for conflict in conflicts:
31221 + if not any(pkg in non_forced for pkg in conflict):
31222 + continue
31223 + for pkg in conflict:
31224 + if pkg not in non_forced:
31225 + continue
31226 +
31227 + for other in conflict:
31228 + if other is pkg:
31229 + continue
31230 +
31231 + for parent, atom in self._dynamic_config._parent_atoms.get(other, []):
31232 + atom_set = InternalPackageSet(
31233 + initial_atoms=(atom,), allow_repo=True)
31234 + if not atom_set.findAtomForPackage(pkg,
31235 + modified_use=self._pkg_use_enabled(pkg)):
31236 + self._dynamic_config._conflict_missed_update[pkg].setdefault(
31237 + "slot conflict", set())
31238 + self._dynamic_config._conflict_missed_update[pkg]["slot conflict"].add(
31239 + (parent, atom))
31240 +
31241 +
31242 def _process_slot_conflicts(self):
31243 """
31244 If there are any slot conflicts and backtracking is enabled,
31245 @@ -820,16 +1208,21 @@ class depgraph(object):
31246 is called, so that all relevant reverse dependencies are
31247 available for use in backtracking decisions.
31248 """
31249 - for (slot_atom, root), slot_nodes in \
31250 - self._dynamic_config._slot_collision_info.items():
31251 - self._process_slot_conflict(root, slot_atom, slot_nodes)
31252
31253 - def _process_slot_conflict(self, root, slot_atom, slot_nodes):
31254 + self._solve_non_slot_operator_slot_conflicts()
31255 +
31256 + for conflict in self._dynamic_config._package_tracker.slot_conflicts():
31257 + self._process_slot_conflict(conflict)
31258 +
31259 + def _process_slot_conflict(self, conflict):
31260 """
31261 Process slot conflict data to identify specific atoms which
31262 lead to conflict. These atoms only match a subset of the
31263 packages that have been pulled into a given slot.
31264 """
31265 + root = conflict.root
31266 + slot_atom = conflict.atom
31267 + slot_nodes = conflict.pkgs
31268
31269 debug = "--debug" in self._frozen_config.myopts
31270
31271 @@ -897,21 +1290,13 @@ class depgraph(object):
31272 all_parents, conflict_pkgs):
31273
31274 debug = "--debug" in self._frozen_config.myopts
31275 - existing_node = self._dynamic_config._slot_pkg_map[root][slot_atom]
31276 + existing_node = next(self._dynamic_config._package_tracker.match(
31277 + root, slot_atom, installed=False))
31278 + # In order to avoid a missed update, first mask lower versions
31279 + # that conflict with higher versions (the backtracker visits
31280 + # these in reverse order).
31281 + conflict_pkgs.sort(reverse=True)
31282 backtrack_data = []
31283 - # The ordering of backtrack_data can make
31284 - # a difference here, because both mask actions may lead
31285 - # to valid, but different, solutions and the one with
31286 - # 'existing_node' masked is usually the better one. Because
31287 - # of that, we choose an order such that
31288 - # the backtracker will first explore the choice with
31289 - # existing_node masked. The backtracker reverses the
31290 - # order, so the order it uses is the reverse of the
31291 - # order shown here. See bug #339606.
31292 - if existing_node in conflict_pkgs and \
31293 - existing_node is not conflict_pkgs[-1]:
31294 - conflict_pkgs.remove(existing_node)
31295 - conflict_pkgs.append(existing_node)
31296 for to_be_masked in conflict_pkgs:
31297 # For missed update messages, find out which
31298 # atoms matched to_be_selected that did not
31299 @@ -922,19 +1307,6 @@ class depgraph(object):
31300 if parent_atom not in parent_atoms)
31301 backtrack_data.append((to_be_masked, conflict_atoms))
31302
31303 - if len(backtrack_data) > 1:
31304 - # NOTE: Generally, we prefer to mask the higher
31305 - # version since this solves common cases in which a
31306 - # lower version is needed so that all dependencies
31307 - # will be satisfied (bug #337178). However, if
31308 - # existing_node happens to be installed then we
31309 - # mask that since this is a common case that is
31310 - # triggered when --update is not enabled.
31311 - if existing_node.installed:
31312 - pass
31313 - elif any(pkg > existing_node for pkg in conflict_pkgs):
31314 - backtrack_data.reverse()
31315 -
31316 to_be_masked = backtrack_data[-1][0]
31317
31318 self._dynamic_config._backtrack_infos.setdefault(
31319 @@ -956,7 +1328,7 @@ class depgraph(object):
31320
31321 def _slot_conflict_backtrack_abi(self, pkg, slot_nodes, conflict_atoms):
31322 """
31323 - If one or more conflict atoms have a SLOT/ABI dep that can be resolved
31324 + If one or more conflict atoms have a slot/sub-slot dep that can be resolved
31325 by rebuilding the parent package, then schedule the rebuild via
31326 backtracking, and return True. Otherwise, return False.
31327 """
31328 @@ -964,7 +1336,7 @@ class depgraph(object):
31329 found_update = False
31330 for parent_atom, conflict_pkgs in conflict_atoms.items():
31331 parent, atom = parent_atom
31332 - if atom.slot_abi_op != "=" or not parent.built:
31333 + if atom.slot_operator != "=" or not parent.built:
31334 continue
31335
31336 if pkg not in conflict_pkgs:
31337 @@ -977,13 +1349,96 @@ class depgraph(object):
31338 dep = Dependency(atom=atom, child=other_pkg,
31339 parent=parent, root=pkg.root)
31340
31341 - if self._slot_abi_update_probe(dep):
31342 - self._slot_abi_update_backtrack(dep)
31343 + new_dep = \
31344 + self._slot_operator_update_probe_slot_conflict(dep)
31345 + if new_dep is not None:
31346 + self._slot_operator_update_backtrack(dep,
31347 + new_dep=new_dep)
31348 found_update = True
31349
31350 return found_update
31351
31352 - def _slot_abi_update_backtrack(self, dep, new_child_slot=None):
31353 + def _slot_change_probe(self, dep):
31354 + """
31355 + @rtype: bool
31356 + @return: True if dep.child should be rebuilt due to a change
31357 + in sub-slot (without revbump, as in bug #456208).
31358 + """
31359 + if not (isinstance(dep.parent, Package) and \
31360 + not dep.parent.built and dep.child.built):
31361 + return None
31362 +
31363 + root_config = self._frozen_config.roots[dep.root]
31364 + matches = []
31365 + try:
31366 + matches.append(self._pkg(dep.child.cpv, "ebuild",
31367 + root_config, myrepo=dep.child.repo))
31368 + except PackageNotFound:
31369 + pass
31370 +
31371 + for unbuilt_child in chain(matches,
31372 + self._iter_match_pkgs(root_config, "ebuild",
31373 + Atom("=%s" % (dep.child.cpv,)))):
31374 + if unbuilt_child in self._dynamic_config._runtime_pkg_mask:
31375 + continue
31376 + if self._frozen_config.excluded_pkgs.findAtomForPackage(
31377 + unbuilt_child,
31378 + modified_use=self._pkg_use_enabled(unbuilt_child)):
31379 + continue
31380 + if not self._pkg_visibility_check(unbuilt_child):
31381 + continue
31382 + break
31383 + else:
31384 + return None
31385 +
31386 + if unbuilt_child.slot == dep.child.slot and \
31387 + unbuilt_child.sub_slot == dep.child.sub_slot:
31388 + return None
31389 +
31390 + return unbuilt_child
31391 +
31392 + def _slot_change_backtrack(self, dep, new_child_slot):
31393 + child = dep.child
31394 + if "--debug" in self._frozen_config.myopts:
31395 + msg = []
31396 + msg.append("")
31397 + msg.append("")
31398 + msg.append("backtracking due to slot/sub-slot change:")
31399 + msg.append(" child package: %s" % child)
31400 + msg.append(" child slot: %s/%s" %
31401 + (child.slot, child.sub_slot))
31402 + msg.append(" new child: %s" % new_child_slot)
31403 + msg.append(" new child slot: %s/%s" %
31404 + (new_child_slot.slot, new_child_slot.sub_slot))
31405 + msg.append(" parent package: %s" % dep.parent)
31406 + msg.append(" atom: %s" % dep.atom)
31407 + msg.append("")
31408 + writemsg_level("\n".join(msg),
31409 + noiselevel=-1, level=logging.DEBUG)
31410 + backtrack_infos = self._dynamic_config._backtrack_infos
31411 + config = backtrack_infos.setdefault("config", {})
31412 +
31413 + # mask unwanted binary packages if necessary
31414 + masks = {}
31415 + if not child.installed:
31416 + masks.setdefault(dep.child, {})["slot_operator_mask_built"] = None
31417 + if masks:
31418 + config.setdefault("slot_operator_mask_built", {}).update(masks)
31419 +
31420 + # trigger replacement of installed packages if necessary
31421 + reinstalls = set()
31422 + if child.installed:
31423 + replacement_atom = self._replace_installed_atom(child)
31424 + if replacement_atom is not None:
31425 + reinstalls.add((child.root, replacement_atom))
31426 + if reinstalls:
31427 + config.setdefault("slot_operator_replace_installed",
31428 + set()).update(reinstalls)
31429 +
31430 + self._dynamic_config._need_restart = True
31431 +
31432 + def _slot_operator_update_backtrack(self, dep, new_child_slot=None,
31433 + new_dep=None):
31434 if new_child_slot is None:
31435 child = dep.child
31436 else:
31437 @@ -997,6 +1452,8 @@ class depgraph(object):
31438 if new_child_slot is not None:
31439 msg.append(" new child slot package: %s" % new_child_slot)
31440 msg.append(" parent package: %s" % dep.parent)
31441 + if new_dep is not None:
31442 + msg.append(" new parent pkg: %s" % new_dep.parent)
31443 msg.append(" atom: %s" % dep.atom)
31444 msg.append("")
31445 writemsg_level("\n".join(msg),
31446 @@ -1008,28 +1465,54 @@ class depgraph(object):
31447 abi_masks = {}
31448 if new_child_slot is None:
31449 if not child.installed:
31450 - abi_masks.setdefault(child, {})["slot_abi_mask_built"] = None
31451 + abi_masks.setdefault(child, {})["slot_operator_mask_built"] = None
31452 if not dep.parent.installed:
31453 - abi_masks.setdefault(dep.parent, {})["slot_abi_mask_built"] = None
31454 + abi_masks.setdefault(dep.parent, {})["slot_operator_mask_built"] = None
31455 if abi_masks:
31456 - config.setdefault("slot_abi_mask_built", {}).update(abi_masks)
31457 + config.setdefault("slot_operator_mask_built", {}).update(abi_masks)
31458
31459 # trigger replacement of installed packages if necessary
31460 abi_reinstalls = set()
31461 if dep.parent.installed:
31462 - abi_reinstalls.add((dep.parent.root, dep.parent.slot_atom))
31463 + if new_dep is not None:
31464 + replacement_atom = new_dep.parent.slot_atom
31465 + else:
31466 + replacement_atom = self._replace_installed_atom(dep.parent)
31467 + if replacement_atom is not None:
31468 + abi_reinstalls.add((dep.parent.root, replacement_atom))
31469 if new_child_slot is None and child.installed:
31470 - abi_reinstalls.add((child.root, child.slot_atom))
31471 + replacement_atom = self._replace_installed_atom(child)
31472 + if replacement_atom is not None:
31473 + abi_reinstalls.add((child.root, replacement_atom))
31474 if abi_reinstalls:
31475 - config.setdefault("slot_abi_replace_installed",
31476 + config.setdefault("slot_operator_replace_installed",
31477 set()).update(abi_reinstalls)
31478
31479 self._dynamic_config._need_restart = True
31480
31481 - def _slot_abi_update_probe(self, dep, new_child_slot=False):
31482 + def _slot_operator_update_probe_slot_conflict(self, dep):
31483 + new_dep = self._slot_operator_update_probe(dep, slot_conflict=True)
31484 +
31485 + if new_dep is not None:
31486 + return new_dep
31487 +
31488 + if self._dynamic_config._autounmask is True:
31489 +
31490 + for autounmask_level in self._autounmask_levels():
31491 +
31492 + new_dep = self._slot_operator_update_probe(dep,
31493 + slot_conflict=True, autounmask_level=autounmask_level)
31494 +
31495 + if new_dep is not None:
31496 + return new_dep
31497 +
31498 + return None
31499 +
31500 + def _slot_operator_update_probe(self, dep, new_child_slot=False,
31501 + slot_conflict=False, autounmask_level=None):
31502 """
31503 - SLOT/ABI := operators tend to prevent updates from getting pulled in,
31504 - since installed packages pull in packages with the SLOT/ABI that they
31505 + slot/sub-slot := operators tend to prevent updates from getting pulled in,
31506 + since installed packages pull in packages with the slot/sub-slot that they
31507 were built against. Detect this case so that we can schedule rebuilds
31508 and reinstalls when appropriate.
31509 NOTE: This function only searches for updates that involve upgrades
31510 @@ -1048,20 +1531,70 @@ class depgraph(object):
31511 return None
31512
31513 debug = "--debug" in self._frozen_config.myopts
31514 + selective = "selective" in self._dynamic_config.myparams
31515 want_downgrade = None
31516
31517 + def check_reverse_dependencies(existing_pkg, candidate_pkg):
31518 + """
31519 + Check if candidate_pkg satisfies all of existing_pkg's non-
31520 + slot operator parents.
31521 + """
31522 + for parent, atom in self._dynamic_config._parent_atoms.get(existing_pkg, []):
31523 + if atom.slot_operator == "=" and parent.built:
31524 + continue
31525 +
31526 + atom_set = InternalPackageSet(initial_atoms=(atom,),
31527 + allow_repo=True)
31528 + if not atom_set.findAtomForPackage(candidate_pkg,
31529 + modified_use=self._pkg_use_enabled(candidate_pkg)):
31530 + return False
31531 + return True
31532 +
31533 +
31534 for replacement_parent in self._iter_similar_available(dep.parent,
31535 - dep.parent.slot_atom):
31536 + dep.parent.slot_atom, autounmask_level=autounmask_level):
31537
31538 - for atom in replacement_parent.validated_atoms:
31539 - if not atom.slot_abi_op == "=" or \
31540 - atom.blocker or \
31541 + if not check_reverse_dependencies(dep.parent, replacement_parent):
31542 + continue
31543 +
31544 + selected_atoms = None
31545 +
31546 + atoms = set()
31547 + invalid_metadata = False
31548 + for dep_key in ("DEPEND", "HDEPEND", "RDEPEND", "PDEPEND"):
31549 + dep_string = replacement_parent._metadata[dep_key]
31550 + if not dep_string:
31551 + continue
31552 +
31553 + try:
31554 + dep_string = portage.dep.use_reduce(dep_string,
31555 + uselist=self._pkg_use_enabled(replacement_parent),
31556 + is_valid_flag=replacement_parent.iuse.is_valid_flag,
31557 + flat=True, token_class=Atom,
31558 + eapi=replacement_parent.eapi)
31559 + except portage.exception.InvalidDependString:
31560 + invalid_metadata = True
31561 + break
31562 +
31563 + atoms.update(token for token in dep_string if isinstance(token, Atom))
31564 +
31565 + if invalid_metadata:
31566 + continue
31567 +
31568 + # List of list of child,atom pairs for each atom.
31569 + replacement_candidates = []
31570 + # Set of all packages all atoms can agree on.
31571 + all_candidate_pkgs = None
31572 +
31573 + for atom in atoms:
31574 + if atom.blocker or \
31575 atom.cp != dep.atom.cp:
31576 continue
31577
31578 # Discard USE deps, we're only searching for an approximate
31579 # pattern, and dealing with USE states is too complex for
31580 # this purpose.
31581 + unevaluated_atom = atom.unevaluated_atom
31582 atom = atom.without_use
31583
31584 if replacement_parent.built and \
31585 @@ -1071,11 +1604,13 @@ class depgraph(object):
31586 # parent and search for another.
31587 break
31588
31589 + candidate_pkg_atoms = []
31590 + candidate_pkgs = []
31591 for pkg in self._iter_similar_available(
31592 dep.child, atom):
31593 if pkg.slot == dep.child.slot and \
31594 - pkg.slot_abi == dep.child.slot_abi:
31595 - # If SLOT/ABI is identical, then there's
31596 + pkg.sub_slot == dep.child.sub_slot:
31597 + # If slot/sub-slot is identical, then there's
31598 # no point in updating.
31599 continue
31600 if new_child_slot:
31601 @@ -1093,39 +1628,192 @@ class depgraph(object):
31602 want_downgrade = self._downgrade_probe(dep.child)
31603 # be careful not to trigger a rebuild when
31604 # the only version available with a
31605 - # different slot_abi is an older version
31606 + # different slot_operator is an older version
31607 if not want_downgrade:
31608 continue
31609
31610 + insignificant = False
31611 + if not slot_conflict and \
31612 + selective and \
31613 + dep.parent.installed and \
31614 + dep.child.installed and \
31615 + dep.parent >= replacement_parent and \
31616 + dep.child.cpv == pkg.cpv:
31617 + # Then can happen if the child's sub-slot changed
31618 + # without a revision bump. The sub-slot change is
31619 + # considered insignificant until one of its parent
31620 + # packages needs to be rebuilt (which may trigger a
31621 + # slot conflict).
31622 + insignificant = True
31623 +
31624 + if not insignificant:
31625 + # Evaluate USE conditionals and || deps, in order
31626 + # to see if this atom is really desirable, since
31627 + # otherwise we may trigger an undesirable rebuild
31628 + # as in bug #460304.
31629 + if selected_atoms is None:
31630 + selected_atoms = self._select_atoms_probe(
31631 + dep.child.root, replacement_parent)
31632 + if unevaluated_atom not in selected_atoms:
31633 + continue
31634 +
31635 + if not insignificant and \
31636 + check_reverse_dependencies(dep.child, pkg):
31637 +
31638 + candidate_pkg_atoms.append((pkg, unevaluated_atom))
31639 + candidate_pkgs.append(pkg)
31640 + replacement_candidates.append(candidate_pkg_atoms)
31641 + if all_candidate_pkgs is None:
31642 + all_candidate_pkgs = set(candidate_pkgs)
31643 + else:
31644 + all_candidate_pkgs.intersection_update(candidate_pkgs)
31645 +
31646 + if not all_candidate_pkgs:
31647 + # If the atoms that connect parent and child can't agree on
31648 + # any replacement child, we can't do anything.
31649 + continue
31650 +
31651 + # Now select one of the pkgs as replacement. This is as easy as
31652 + # selecting the highest version.
31653 + # The more complicated part is to choose an atom for the
31654 + # new Dependency object. Choose the one which ranked the selected
31655 + # parent highest.
31656 + selected = None
31657 + for candidate_pkg_atoms in replacement_candidates:
31658 + for i, (pkg, atom) in enumerate(candidate_pkg_atoms):
31659 + if pkg not in all_candidate_pkgs:
31660 + continue
31661 + if selected is None or \
31662 + selected[0] < pkg or \
31663 + (selected[0] is pkg and i < selected[2]):
31664 + selected = (pkg, atom, i)
31665 +
31666 + if debug:
31667 + msg = []
31668 + msg.append("")
31669 + msg.append("")
31670 + msg.append("slot_operator_update_probe:")
31671 + msg.append(" existing child package: %s" % dep.child)
31672 + msg.append(" existing parent package: %s" % dep.parent)
31673 + msg.append(" new child package: %s" % selected[0])
31674 + msg.append(" new parent package: %s" % replacement_parent)
31675 + msg.append("")
31676 + writemsg_level("\n".join(msg),
31677 + noiselevel=-1, level=logging.DEBUG)
31678 +
31679 + return Dependency(parent=replacement_parent,
31680 + child=selected[0], atom=selected[1])
31681 +
31682 + if debug:
31683 + msg = []
31684 + msg.append("")
31685 + msg.append("")
31686 + msg.append("slot_operator_update_probe:")
31687 + msg.append(" existing child package: %s" % dep.child)
31688 + msg.append(" existing parent package: %s" % dep.parent)
31689 + msg.append(" new child package: %s" % None)
31690 + msg.append(" new parent package: %s" % None)
31691 + msg.append("")
31692 + writemsg_level("\n".join(msg),
31693 + noiselevel=-1, level=logging.DEBUG)
31694 +
31695 + return None
31696 +
31697 + def _slot_operator_unsatisfied_probe(self, dep):
31698 +
31699 + if dep.parent.installed and \
31700 + self._frozen_config.excluded_pkgs.findAtomForPackage(dep.parent,
31701 + modified_use=self._pkg_use_enabled(dep.parent)):
31702 + return False
31703 +
31704 + debug = "--debug" in self._frozen_config.myopts
31705 +
31706 + for replacement_parent in self._iter_similar_available(dep.parent,
31707 + dep.parent.slot_atom):
31708 +
31709 + for atom in replacement_parent.validated_atoms:
31710 + if not atom.slot_operator == "=" or \
31711 + atom.blocker or \
31712 + atom.cp != dep.atom.cp:
31713 + continue
31714 +
31715 + # Discard USE deps, we're only searching for an approximate
31716 + # pattern, and dealing with USE states is too complex for
31717 + # this purpose.
31718 + atom = atom.without_use
31719 +
31720 + pkg, existing_node = self._select_package(dep.root, atom,
31721 + onlydeps=dep.onlydeps)
31722 +
31723 + if pkg is not None:
31724 +
31725 if debug:
31726 msg = []
31727 msg.append("")
31728 msg.append("")
31729 - msg.append("slot_abi_update_probe:")
31730 - msg.append(" existing child package: %s" % dep.child)
31731 + msg.append("slot_operator_unsatisfied_probe:")
31732 msg.append(" existing parent package: %s" % dep.parent)
31733 - msg.append(" new child package: %s" % pkg)
31734 + msg.append(" existing parent atom: %s" % dep.atom)
31735 msg.append(" new parent package: %s" % replacement_parent)
31736 + msg.append(" new child package: %s" % pkg)
31737 msg.append("")
31738 writemsg_level("\n".join(msg),
31739 noiselevel=-1, level=logging.DEBUG)
31740
31741 - return pkg
31742 + return True
31743
31744 if debug:
31745 msg = []
31746 msg.append("")
31747 msg.append("")
31748 - msg.append("slot_abi_update_probe:")
31749 - msg.append(" existing child package: %s" % dep.child)
31750 + msg.append("slot_operator_unsatisfied_probe:")
31751 msg.append(" existing parent package: %s" % dep.parent)
31752 - msg.append(" new child package: %s" % None)
31753 + msg.append(" existing parent atom: %s" % dep.atom)
31754 msg.append(" new parent package: %s" % None)
31755 + msg.append(" new child package: %s" % None)
31756 msg.append("")
31757 writemsg_level("\n".join(msg),
31758 noiselevel=-1, level=logging.DEBUG)
31759
31760 - return None
31761 + return False
31762 +
31763 + def _slot_operator_unsatisfied_backtrack(self, dep):
31764 +
31765 + parent = dep.parent
31766 +
31767 + if "--debug" in self._frozen_config.myopts:
31768 + msg = []
31769 + msg.append("")
31770 + msg.append("")
31771 + msg.append("backtracking due to unsatisfied "
31772 + "built slot-operator dep:")
31773 + msg.append(" parent package: %s" % parent)
31774 + msg.append(" atom: %s" % dep.atom)
31775 + msg.append("")
31776 + writemsg_level("\n".join(msg),
31777 + noiselevel=-1, level=logging.DEBUG)
31778 +
31779 + backtrack_infos = self._dynamic_config._backtrack_infos
31780 + config = backtrack_infos.setdefault("config", {})
31781 +
31782 + # mask unwanted binary packages if necessary
31783 + masks = {}
31784 + if not parent.installed:
31785 + masks.setdefault(parent, {})["slot_operator_mask_built"] = None
31786 + if masks:
31787 + config.setdefault("slot_operator_mask_built", {}).update(masks)
31788 +
31789 + # trigger replacement of installed packages if necessary
31790 + reinstalls = set()
31791 + if parent.installed:
31792 + replacement_atom = self._replace_installed_atom(parent)
31793 + if replacement_atom is not None:
31794 + reinstalls.add((parent.root, replacement_atom))
31795 + if reinstalls:
31796 + config.setdefault("slot_operator_replace_installed",
31797 + set()).update(reinstalls)
31798 +
31799 + self._dynamic_config._need_restart = True
31800
31801 def _downgrade_probe(self, pkg):
31802 """
31803 @@ -1142,7 +1830,19 @@ class depgraph(object):
31804
31805 return available_pkg is not None
31806
31807 - def _iter_similar_available(self, graph_pkg, atom):
31808 + def _select_atoms_probe(self, root, pkg):
31809 + selected_atoms = []
31810 + use = self._pkg_use_enabled(pkg)
31811 + for k in pkg._dep_keys:
31812 + v = pkg._metadata.get(k)
31813 + if not v:
31814 + continue
31815 + selected_atoms.extend(self._select_atoms(
31816 + root, v, myuse=use, parent=pkg)[pkg])
31817 + return frozenset(x.unevaluated_atom for
31818 + x in selected_atoms)
31819 +
31820 + def _iter_similar_available(self, graph_pkg, atom, autounmask_level=None):
31821 """
31822 Given a package that's in the graph, do a rough check to
31823 see if a similar package is available to install. The given
31824 @@ -1166,49 +1866,91 @@ class depgraph(object):
31825 if self._frozen_config.excluded_pkgs.findAtomForPackage(pkg,
31826 modified_use=self._pkg_use_enabled(pkg)):
31827 continue
31828 - if not self._pkg_visibility_check(pkg):
31829 - continue
31830 if pkg.built:
31831 if self._equiv_binary_installed(pkg):
31832 continue
31833 if not (not use_ebuild_visibility and
31834 (usepkgonly or useoldpkg_atoms.findAtomForPackage(
31835 pkg, modified_use=self._pkg_use_enabled(pkg)))) and \
31836 - not self._equiv_ebuild_visible(pkg):
31837 + not self._equiv_ebuild_visible(pkg,
31838 + autounmask_level=autounmask_level):
31839 continue
31840 + if not self._pkg_visibility_check(pkg,
31841 + autounmask_level=autounmask_level):
31842 + continue
31843 yield pkg
31844
31845 - def _slot_abi_trigger_reinstalls(self):
31846 + def _replace_installed_atom(self, inst_pkg):
31847 + """
31848 + Given an installed package, generate an atom suitable for
31849 + slot_operator_replace_installed backtracking info. The replacement
31850 + SLOT may differ from the installed SLOT, so first search by cpv.
31851 + """
31852 + built_pkgs = []
31853 + for pkg in self._iter_similar_available(inst_pkg,
31854 + Atom("=%s" % inst_pkg.cpv)):
31855 + if not pkg.built:
31856 + return pkg.slot_atom
31857 + elif not pkg.installed:
31858 + # avoid using SLOT from a built instance
31859 + built_pkgs.append(pkg)
31860 +
31861 + for pkg in self._iter_similar_available(inst_pkg, inst_pkg.slot_atom):
31862 + if not pkg.built:
31863 + return pkg.slot_atom
31864 + elif not pkg.installed:
31865 + # avoid using SLOT from a built instance
31866 + built_pkgs.append(pkg)
31867 +
31868 + if built_pkgs:
31869 + best_version = None
31870 + for pkg in built_pkgs:
31871 + if best_version is None or pkg > best_version:
31872 + best_version = pkg
31873 + return best_version.slot_atom
31874 +
31875 + return None
31876 +
31877 + def _slot_operator_trigger_reinstalls(self):
31878 """
31879 - Search for packages with slot-abi deps on older slots, and schedule
31880 + Search for packages with slot-operator deps on older slots, and schedule
31881 rebuilds if they can link to a newer slot that's in the graph.
31882 """
31883
31884 - rebuild_if_new_slot_abi = self._dynamic_config.myparams.get(
31885 - "rebuild_if_new_slot_abi", "y") == "y"
31886 + rebuild_if_new_slot = self._dynamic_config.myparams.get(
31887 + "rebuild_if_new_slot", "y") == "y"
31888
31889 - for slot_key, slot_info in self._dynamic_config._slot_abi_deps.items():
31890 + for slot_key, slot_info in self._dynamic_config._slot_operator_deps.items():
31891
31892 for dep in slot_info:
31893 - if not (dep.child.built and dep.parent and
31894 +
31895 + atom = dep.atom
31896 + if atom.slot_operator is None:
31897 + continue
31898 +
31899 + if not atom.slot_operator_built:
31900 + new_child_slot = self._slot_change_probe(dep)
31901 + if new_child_slot is not None:
31902 + self._slot_change_backtrack(dep, new_child_slot)
31903 + continue
31904 +
31905 + if not (dep.parent and
31906 isinstance(dep.parent, Package) and dep.parent.built):
31907 continue
31908
31909 # Check for slot update first, since we don't want to
31910 # trigger reinstall of the child package when a newer
31911 # slot will be used instead.
31912 - if rebuild_if_new_slot_abi:
31913 - new_child = self._slot_abi_update_probe(dep,
31914 + if rebuild_if_new_slot:
31915 + new_dep = self._slot_operator_update_probe(dep,
31916 new_child_slot=True)
31917 - if new_child:
31918 - self._slot_abi_update_backtrack(dep,
31919 - new_child_slot=new_child)
31920 - break
31921 + if new_dep is not None:
31922 + self._slot_operator_update_backtrack(dep,
31923 + new_child_slot=new_dep.child)
31924
31925 if dep.want_update:
31926 - if self._slot_abi_update_probe(dep):
31927 - self._slot_abi_update_backtrack(dep)
31928 - break
31929 + if self._slot_operator_update_probe(dep):
31930 + self._slot_operator_update_backtrack(dep)
31931
31932 def _reinstall_for_flags(self, pkg, forced_flags,
31933 orig_use, orig_iuse, cur_use, cur_iuse):
31934 @@ -1222,18 +1964,22 @@ class depgraph(object):
31935 in ("y", "auto"))
31936 newuse = "--newuse" in self._frozen_config.myopts
31937 changed_use = "changed-use" == self._frozen_config.myopts.get("--reinstall")
31938 + feature_flags = _get_feature_flags(
31939 + _get_eapi_attrs(pkg.eapi))
31940
31941 if newuse or (binpkg_respect_use and not changed_use):
31942 flags = set(orig_iuse.symmetric_difference(
31943 cur_iuse).difference(forced_flags))
31944 flags.update(orig_iuse.intersection(orig_use).symmetric_difference(
31945 cur_iuse.intersection(cur_use)))
31946 + flags.difference_update(feature_flags)
31947 if flags:
31948 return flags
31949
31950 elif changed_use or binpkg_respect_use:
31951 - flags = orig_iuse.intersection(orig_use).symmetric_difference(
31952 - cur_iuse.intersection(cur_use))
31953 + flags = set(orig_iuse.intersection(orig_use).symmetric_difference(
31954 + cur_iuse.intersection(cur_use)))
31955 + flags.difference_update(feature_flags)
31956 if flags:
31957 return flags
31958 return None
31959 @@ -1319,11 +2065,16 @@ class depgraph(object):
31960 buildpkgonly = "--buildpkgonly" in self._frozen_config.myopts
31961 nodeps = "--nodeps" in self._frozen_config.myopts
31962 if dep.blocker:
31963 +
31964 + # Slot collision nodes are not allowed to block other packages since
31965 + # blocker validation is only able to account for one package per slot.
31966 + is_slot_conflict_parent = any(dep.parent in conflict.pkgs[1:] for conflict in \
31967 + self._dynamic_config._package_tracker.slot_conflicts())
31968 if not buildpkgonly and \
31969 not nodeps and \
31970 not dep.collapsed_priority.ignored and \
31971 not dep.collapsed_priority.optional and \
31972 - dep.parent not in self._dynamic_config._slot_collision_nodes:
31973 + not is_slot_conflict_parent:
31974 if dep.parent.onlydeps:
31975 # It's safe to ignore blockers if the
31976 # parent is an --onlydeps node.
31977 @@ -1331,7 +2082,7 @@ class depgraph(object):
31978 # The blocker applies to the root where
31979 # the parent is or will be installed.
31980 blocker = Blocker(atom=dep.atom,
31981 - eapi=dep.parent.metadata["EAPI"],
31982 + eapi=dep.parent.eapi,
31983 priority=dep.priority, root=dep.parent.root)
31984 self._dynamic_config._blocker_parents.add(blocker, dep.parent)
31985 return 1
31986 @@ -1343,8 +2094,8 @@ class depgraph(object):
31987 # The caller has selected a specific package
31988 # via self._minimize_packages().
31989 dep_pkg = dep.child
31990 - existing_node = self._dynamic_config._slot_pkg_map[
31991 - dep.root].get(dep_pkg.slot_atom)
31992 + existing_node = next(self._dynamic_config._package_tracker.match(
31993 + dep.root, dep_pkg.slot_atom, installed=False), None)
31994
31995 if not dep_pkg:
31996 if (dep.collapsed_priority.optional or
31997 @@ -1368,9 +2119,17 @@ class depgraph(object):
31998 (dep.parent,
31999 self._dynamic_config._runtime_pkg_mask[
32000 dep.parent]), noiselevel=-1)
32001 - elif not self.need_restart():
32002 + elif dep.atom.slot_operator_built and \
32003 + self._slot_operator_unsatisfied_probe(dep):
32004 + self._slot_operator_unsatisfied_backtrack(dep)
32005 + return 1
32006 + else:
32007 # Do not backtrack if only USE have to be changed in
32008 - # order to satisfy the dependency.
32009 + # order to satisfy the dependency. Note that when
32010 + # want_restart_for_use_change sets the need_restart
32011 + # flag, it causes _select_pkg_highest_available to
32012 + # return None, and eventually we come through here
32013 + # and skip the "missing dependency" backtracking path.
32014 dep_pkg, existing_node = \
32015 self._select_package(dep.root, dep.atom.without_use,
32016 onlydeps=dep.onlydeps)
32017 @@ -1401,7 +2160,9 @@ class depgraph(object):
32018 return 1
32019
32020 def _check_slot_conflict(self, pkg, atom):
32021 - existing_node = self._dynamic_config._slot_pkg_map[pkg.root].get(pkg.slot_atom)
32022 + existing_node = next(self._dynamic_config._package_tracker.match(
32023 + pkg.root, pkg.slot_atom, installed=False), None)
32024 +
32025 matches = None
32026 if existing_node:
32027 matches = pkg.cpv == existing_node.cpv
32028 @@ -1477,12 +2238,13 @@ class depgraph(object):
32029 # package selection, since we want to prompt the user
32030 # for USE adjustment rather than have REQUIRED_USE
32031 # affect package selection and || dep choices.
32032 - if not pkg.built and pkg.metadata.get("REQUIRED_USE") and \
32033 - eapi_has_required_use(pkg.metadata["EAPI"]):
32034 + if not pkg.built and pkg._metadata.get("REQUIRED_USE") and \
32035 + eapi_has_required_use(pkg.eapi):
32036 required_use_is_sat = check_required_use(
32037 - pkg.metadata["REQUIRED_USE"],
32038 + pkg._metadata["REQUIRED_USE"],
32039 self._pkg_use_enabled(pkg),
32040 - pkg.iuse.is_valid_flag)
32041 + pkg.iuse.is_valid_flag,
32042 + eapi=pkg.eapi)
32043 if not required_use_is_sat:
32044 if dep.atom is not None and dep.parent is not None:
32045 self._add_parent_atom(pkg, (dep.parent, dep.atom))
32046 @@ -1505,30 +2267,29 @@ class depgraph(object):
32047
32048 existing_node, existing_node_matches = \
32049 self._check_slot_conflict(pkg, dep.atom)
32050 - slot_collision = False
32051 if existing_node:
32052 if existing_node_matches:
32053 # The existing node can be reused.
32054 - if arg_atoms:
32055 - for parent_atom in arg_atoms:
32056 - parent, atom = parent_atom
32057 - self._dynamic_config.digraph.add(existing_node, parent,
32058 - priority=priority)
32059 - self._add_parent_atom(existing_node, parent_atom)
32060 - # If a direct circular dependency is not an unsatisfied
32061 - # buildtime dependency then drop it here since otherwise
32062 - # it can skew the merge order calculation in an unwanted
32063 - # way.
32064 - if existing_node != myparent or \
32065 - (priority.buildtime and not priority.satisfied):
32066 - self._dynamic_config.digraph.addnode(existing_node, myparent,
32067 - priority=priority)
32068 - if dep.atom is not None and dep.parent is not None:
32069 - self._add_parent_atom(existing_node,
32070 - (dep.parent, dep.atom))
32071 - return 1
32072 + if pkg != existing_node:
32073 + pkg = existing_node
32074 + previously_added = True
32075 + try:
32076 + arg_atoms = list(self._iter_atoms_for_pkg(pkg))
32077 + except InvalidDependString as e:
32078 + if not pkg.installed:
32079 + # should have been masked before
32080 + # it was selected
32081 + raise
32082 +
32083 + if debug:
32084 + writemsg_level(
32085 + "%s%s %s\n" % ("Re-used Child:".ljust(15),
32086 + pkg, pkg_use_display(pkg,
32087 + self._frozen_config.myopts,
32088 + modified_use=self._pkg_use_enabled(pkg))),
32089 + level=logging.DEBUG, noiselevel=-1)
32090 +
32091 else:
32092 - self._add_slot_conflict(pkg)
32093 if debug:
32094 writemsg_level(
32095 "%s%s %s\n" % ("Slot Conflict:".ljust(15),
32096 @@ -1537,23 +2298,8 @@ class depgraph(object):
32097 modified_use=self._pkg_use_enabled(existing_node))),
32098 level=logging.DEBUG, noiselevel=-1)
32099
32100 - slot_collision = True
32101 -
32102 - if slot_collision:
32103 - # Now add this node to the graph so that self.display()
32104 - # can show use flags and --tree portage.output. This node is
32105 - # only being partially added to the graph. It must not be
32106 - # allowed to interfere with the other nodes that have been
32107 - # added. Do not overwrite data for existing nodes in
32108 - # self._dynamic_config.mydbapi since that data will be used for blocker
32109 - # validation.
32110 - # Even though the graph is now invalid, continue to process
32111 - # dependencies so that things like --fetchonly can still
32112 - # function despite collisions.
32113 - pass
32114 - elif not previously_added:
32115 - self._dynamic_config._slot_pkg_map[pkg.root][pkg.slot_atom] = pkg
32116 - self._dynamic_config.mydbapi[pkg.root].cpv_inject(pkg)
32117 + if not previously_added:
32118 + self._dynamic_config._package_tracker.add_pkg(pkg)
32119 self._dynamic_config._filtered_trees[pkg.root]["porttree"].dbapi._clear_cache()
32120 self._dynamic_config._highest_pkg_cache.clear()
32121 self._check_masks(pkg)
32122 @@ -1563,11 +2309,11 @@ class depgraph(object):
32123 # doesn't already. Any pre-existing providers will be preferred
32124 # over this one.
32125 try:
32126 - pkgsettings.setinst(pkg.cpv, pkg.metadata)
32127 + pkgsettings.setinst(pkg.cpv, pkg._metadata)
32128 # For consistency, also update the global virtuals.
32129 settings = self._frozen_config.roots[pkg.root].settings
32130 settings.unlock()
32131 - settings.setinst(pkg.cpv, pkg.metadata)
32132 + settings.setinst(pkg.cpv, pkg._metadata)
32133 settings.lock()
32134 except portage.exception.InvalidDependString:
32135 if not pkg.installed:
32136 @@ -1577,12 +2323,19 @@ class depgraph(object):
32137 if arg_atoms:
32138 self._dynamic_config._set_nodes.add(pkg)
32139
32140 - # Do this even when addme is False (--onlydeps) so that the
32141 + # Do this even for onlydeps, so that the
32142 # parent/child relationship is always known in case
32143 # self._show_slot_collision_notice() needs to be called later.
32144 - self._dynamic_config.digraph.add(pkg, myparent, priority=priority)
32145 - if dep.atom is not None and dep.parent is not None:
32146 - self._add_parent_atom(pkg, (dep.parent, dep.atom))
32147 + # If a direct circular dependency is not an unsatisfied
32148 + # buildtime dependency then drop it here since otherwise
32149 + # it can skew the merge order calculation in an unwanted
32150 + # way.
32151 + if pkg != dep.parent or \
32152 + (priority.buildtime and not priority.satisfied):
32153 + self._dynamic_config.digraph.add(pkg,
32154 + dep.parent, priority=priority)
32155 + if dep.atom is not None and dep.parent is not None:
32156 + self._add_parent_atom(pkg, (dep.parent, dep.atom))
32157
32158 if arg_atoms:
32159 for parent_atom in arg_atoms:
32160 @@ -1612,9 +2365,9 @@ class depgraph(object):
32161 not (deep is not True and depth > deep))
32162
32163 dep.child = pkg
32164 - if (not pkg.onlydeps and pkg.built and
32165 - dep.atom and dep.atom.slot_abi_built):
32166 - self._add_slot_abi_dep(dep)
32167 + if (not pkg.onlydeps and
32168 + dep.atom and dep.atom.slot_operator is not None):
32169 + self._add_slot_operator_dep(dep)
32170
32171 recurse = deep is True or depth + 1 <= deep
32172 dep_stack = self._dynamic_config._dep_stack
32173 @@ -1629,6 +2382,64 @@ class depgraph(object):
32174 dep_stack.append(pkg)
32175 return 1
32176
32177 +
32178 + def _remove_pkg(self, pkg):
32179 + """
32180 + Remove a package and all its then parentless digraph
32181 + children from all depgraph datastructures.
32182 + """
32183 + debug = "--debug" in self._frozen_config.myopts
32184 + if debug:
32185 + writemsg_level(
32186 + "Removing package: %s\n" % pkg,
32187 + level=logging.DEBUG, noiselevel=-1)
32188 +
32189 + try:
32190 + children = [child for child in self._dynamic_config.digraph.child_nodes(pkg) \
32191 + if child is not pkg]
32192 + self._dynamic_config.digraph.remove(pkg)
32193 + except KeyError:
32194 + children = []
32195 +
32196 + self._dynamic_config._package_tracker.discard_pkg(pkg)
32197 +
32198 + self._dynamic_config._parent_atoms.pop(pkg, None)
32199 + self._dynamic_config._set_nodes.discard(pkg)
32200 +
32201 + for child in children:
32202 + try:
32203 + self._dynamic_config._parent_atoms[child] = set((parent, atom) \
32204 + for (parent, atom) in self._dynamic_config._parent_atoms[child] \
32205 + if parent is not pkg)
32206 + except KeyError:
32207 + pass
32208 +
32209 + # Remove slot operator dependencies.
32210 + slot_key = (pkg.root, pkg.slot_atom)
32211 + if slot_key in self._dynamic_config._slot_operator_deps:
32212 + self._dynamic_config._slot_operator_deps[slot_key] = \
32213 + [dep for dep in self._dynamic_config._slot_operator_deps[slot_key] \
32214 + if dep.child is not pkg]
32215 + if not self._dynamic_config._slot_operator_deps[slot_key]:
32216 + del self._dynamic_config._slot_operator_deps[slot_key]
32217 +
32218 + # Remove blockers.
32219 + self._dynamic_config._blocker_parents.discard(pkg)
32220 + self._dynamic_config._irrelevant_blockers.discard(pkg)
32221 + self._dynamic_config._unsolvable_blockers.discard(pkg)
32222 + self._dynamic_config._blocked_pkgs.discard(pkg)
32223 + self._dynamic_config._blocked_world_pkgs.pop(pkg, None)
32224 +
32225 + for child in children:
32226 + if child in self._dynamic_config.digraph and \
32227 + not self._dynamic_config.digraph.parent_nodes(child):
32228 + self._remove_pkg(child)
32229 +
32230 + # Clear caches.
32231 + self._dynamic_config._filtered_trees[pkg.root]["porttree"].dbapi._clear_cache()
32232 + self._dynamic_config._highest_pkg_cache.clear()
32233 +
32234 +
32235 def _check_masks(self, pkg):
32236
32237 slot_key = (pkg.root, pkg.slot_atom)
32238 @@ -1647,33 +2458,23 @@ class depgraph(object):
32239 self._dynamic_config._parent_atoms[pkg] = parent_atoms
32240 parent_atoms.add(parent_atom)
32241
32242 - def _add_slot_abi_dep(self, dep):
32243 + def _add_slot_operator_dep(self, dep):
32244 slot_key = (dep.root, dep.child.slot_atom)
32245 - slot_info = self._dynamic_config._slot_abi_deps.get(slot_key)
32246 + slot_info = self._dynamic_config._slot_operator_deps.get(slot_key)
32247 if slot_info is None:
32248 slot_info = []
32249 - self._dynamic_config._slot_abi_deps[slot_key] = slot_info
32250 + self._dynamic_config._slot_operator_deps[slot_key] = slot_info
32251 slot_info.append(dep)
32252
32253 - def _add_slot_conflict(self, pkg):
32254 - self._dynamic_config._slot_collision_nodes.add(pkg)
32255 - slot_key = (pkg.slot_atom, pkg.root)
32256 - slot_nodes = self._dynamic_config._slot_collision_info.get(slot_key)
32257 - if slot_nodes is None:
32258 - slot_nodes = set()
32259 - slot_nodes.add(self._dynamic_config._slot_pkg_map[pkg.root][pkg.slot_atom])
32260 - self._dynamic_config._slot_collision_info[slot_key] = slot_nodes
32261 - slot_nodes.add(pkg)
32262 -
32263 def _add_pkg_deps(self, pkg, allow_unsatisfied=False):
32264
32265 myroot = pkg.root
32266 - metadata = pkg.metadata
32267 + metadata = pkg._metadata
32268 removal_action = "remove" in self._dynamic_config.myparams
32269 + eapi_attrs = _get_eapi_attrs(pkg.eapi)
32270
32271 edepend={}
32272 - depkeys = ["DEPEND","RDEPEND","PDEPEND"]
32273 - for k in depkeys:
32274 + for k in Package._dep_keys:
32275 edepend[k] = metadata[k]
32276
32277 if not pkg.built and \
32278 @@ -1700,31 +2501,44 @@ class depgraph(object):
32279 # Removal actions never traverse ignored buildtime
32280 # dependencies, so it's safe to discard them early.
32281 edepend["DEPEND"] = ""
32282 + edepend["HDEPEND"] = ""
32283 ignore_build_time_deps = True
32284
32285 + ignore_depend_deps = ignore_build_time_deps
32286 + ignore_hdepend_deps = ignore_build_time_deps
32287 +
32288 if removal_action:
32289 depend_root = myroot
32290 else:
32291 - depend_root = self._frozen_config._running_root.root
32292 - root_deps = self._frozen_config.myopts.get("--root-deps")
32293 - if root_deps is not None:
32294 - if root_deps is True:
32295 - depend_root = myroot
32296 - elif root_deps == "rdeps":
32297 - ignore_build_time_deps = True
32298 + if eapi_attrs.hdepend:
32299 + depend_root = myroot
32300 + else:
32301 + depend_root = self._frozen_config._running_root.root
32302 + root_deps = self._frozen_config.myopts.get("--root-deps")
32303 + if root_deps is not None:
32304 + if root_deps is True:
32305 + depend_root = myroot
32306 + elif root_deps == "rdeps":
32307 + ignore_depend_deps = True
32308
32309 # If rebuild mode is not enabled, it's safe to discard ignored
32310 # build-time dependencies. If you want these deps to be traversed
32311 # in "complete" mode then you need to specify --with-bdeps=y.
32312 - if ignore_build_time_deps and \
32313 - not self._rebuild.rebuild:
32314 - edepend["DEPEND"] = ""
32315 + if not self._rebuild.rebuild:
32316 + if ignore_depend_deps:
32317 + edepend["DEPEND"] = ""
32318 + if ignore_hdepend_deps:
32319 + edepend["HDEPEND"] = ""
32320
32321 deps = (
32322 (depend_root, edepend["DEPEND"],
32323 self._priority(buildtime=True,
32324 - optional=(pkg.built or ignore_build_time_deps),
32325 - ignored=ignore_build_time_deps)),
32326 + optional=(pkg.built or ignore_depend_deps),
32327 + ignored=ignore_depend_deps)),
32328 + (self._frozen_config._running_root.root, edepend["HDEPEND"],
32329 + self._priority(buildtime=True,
32330 + optional=(pkg.built or ignore_hdepend_deps),
32331 + ignored=ignore_hdepend_deps)),
32332 (myroot, edepend["RDEPEND"],
32333 self._priority(runtime=True)),
32334 (myroot, edepend["PDEPEND"],
32335 @@ -1749,7 +2563,7 @@ class depgraph(object):
32336 uselist=self._pkg_use_enabled(pkg),
32337 is_valid_flag=pkg.iuse.is_valid_flag,
32338 opconvert=True, token_class=Atom,
32339 - eapi=pkg.metadata['EAPI'])
32340 + eapi=pkg.eapi)
32341 except portage.exception.InvalidDependString as e:
32342 if not pkg.installed:
32343 # should have been masked before it was selected
32344 @@ -1763,7 +2577,7 @@ class depgraph(object):
32345 dep_string = portage.dep.use_reduce(dep_string,
32346 uselist=self._pkg_use_enabled(pkg),
32347 opconvert=True, token_class=Atom,
32348 - eapi=pkg.metadata['EAPI'])
32349 + eapi=pkg.eapi)
32350 except portage.exception.InvalidDependString as e:
32351 self._dynamic_config._masked_installed.add(pkg)
32352 del e
32353 @@ -1806,6 +2620,37 @@ class depgraph(object):
32354 finally:
32355 self._dynamic_config._autounmask = _autounmask_backup
32356
32357 + def _ignore_dependency(self, atom, pkg, child, dep, mypriority, recurse_satisfied):
32358 + """
32359 + In some cases, dep_check will return deps that shouldn't
32360 + be processed any further, so they are identified and
32361 + discarded here. Try to discard as few as possible since
32362 + discarded dependencies reduce the amount of information
32363 + available for optimization of merge order.
32364 + Don't ignore dependencies if pkg has a slot operator dependency on the child
32365 + and the child has changed slot/sub_slot.
32366 + """
32367 + if not mypriority.satisfied:
32368 + return False
32369 + slot_operator_rebuild = False
32370 + if atom.slot_operator == '=' and \
32371 + (pkg.root, pkg.slot_atom) in self._dynamic_config._slot_operator_replace_installed and \
32372 + mypriority.satisfied is not child and \
32373 + mypriority.satisfied.installed and \
32374 + child and \
32375 + not child.installed and \
32376 + (child.slot != mypriority.satisfied.slot or child.sub_slot != mypriority.satisfied.sub_slot):
32377 + slot_operator_rebuild = True
32378 +
32379 + return not atom.blocker and \
32380 + not recurse_satisfied and \
32381 + mypriority.satisfied.visible and \
32382 + dep.child is not None and \
32383 + not dep.child.installed and \
32384 + not any(self._dynamic_config._package_tracker.match(
32385 + dep.child.root, dep.child.slot_atom, installed=False)) and \
32386 + not slot_operator_rebuild
32387 +
32388 def _wrapped_add_pkg_dep_string(self, pkg, dep_root, dep_priority,
32389 dep_string, allow_unsatisfied):
32390 depth = pkg.depth + 1
32391 @@ -1864,6 +2709,13 @@ class depgraph(object):
32392
32393 mypriority = dep_priority.copy()
32394 if not atom.blocker:
32395 +
32396 + if atom.slot_operator == "=":
32397 + if mypriority.buildtime:
32398 + mypriority.buildtime_slot_op = True
32399 + if mypriority.runtime:
32400 + mypriority.runtime_slot_op = True
32401 +
32402 inst_pkgs = [inst_pkg for inst_pkg in
32403 reversed(vardb.match_pkgs(atom))
32404 if not reinstall_atoms.findAtomForPackage(inst_pkg,
32405 @@ -1883,19 +2735,12 @@ class depgraph(object):
32406 priority=mypriority, root=dep_root)
32407
32408 # In some cases, dep_check will return deps that shouldn't
32409 - # be proccessed any further, so they are identified and
32410 + # be processed any further, so they are identified and
32411 # discarded here. Try to discard as few as possible since
32412 # discarded dependencies reduce the amount of information
32413 # available for optimization of merge order.
32414 ignored = False
32415 - if not atom.blocker and \
32416 - not recurse_satisfied and \
32417 - mypriority.satisfied and \
32418 - mypriority.satisfied.visible and \
32419 - dep.child is not None and \
32420 - not dep.child.installed and \
32421 - self._dynamic_config._slot_pkg_map[dep.child.root].get(
32422 - dep.child.slot_atom) is None:
32423 + if self._ignore_dependency(atom, pkg, child, dep, mypriority, recurse_satisfied):
32424 myarg = None
32425 try:
32426 myarg = next(self._iter_atoms_for_pkg(dep.child), None)
32427 @@ -1998,14 +2843,7 @@ class depgraph(object):
32428 collapsed_parent=pkg, collapsed_priority=dep_priority)
32429
32430 ignored = False
32431 - if not atom.blocker and \
32432 - not recurse_satisfied and \
32433 - mypriority.satisfied and \
32434 - mypriority.satisfied.visible and \
32435 - dep.child is not None and \
32436 - not dep.child.installed and \
32437 - self._dynamic_config._slot_pkg_map[dep.child.root].get(
32438 - dep.child.slot_atom) is None:
32439 + if self._ignore_dependency(atom, pkg, child, dep, mypriority, recurse_satisfied):
32440 myarg = None
32441 try:
32442 myarg = next(self._iter_atoms_for_pkg(dep.child), None)
32443 @@ -2053,7 +2891,7 @@ class depgraph(object):
32444 yield (atom, None)
32445 continue
32446 dep_pkg, existing_node = self._select_package(
32447 - root_config.root, atom)
32448 + root_config.root, atom, parent=parent)
32449 if dep_pkg is None:
32450 yield (atom, None)
32451 continue
32452 @@ -2105,12 +2943,12 @@ class depgraph(object):
32453 # Yield ~, =*, < and <= atoms first, since those are more likely to
32454 # cause slot conflicts, and we want those atoms to be displayed
32455 # in the resulting slot conflict message (see bug #291142).
32456 - # Give similar treatment to SLOT/ABI atoms.
32457 + # Give similar treatment to slot/sub-slot atoms.
32458 conflict_atoms = []
32459 normal_atoms = []
32460 abi_atoms = []
32461 for atom in cp_atoms:
32462 - if atom.slot_abi_built:
32463 + if atom.slot_operator_built:
32464 abi_atoms.append(atom)
32465 continue
32466 conflict = False
32467 @@ -2135,7 +2973,7 @@ class depgraph(object):
32468 def _queue_disjunctive_deps(self, pkg, dep_root, dep_priority, dep_struct):
32469 """
32470 Queue disjunctive (virtual and ||) deps in self._dynamic_config._dep_disjunctive_stack.
32471 - Yields non-disjunctive deps. Raises InvalidDependString when
32472 + Yields non-disjunctive deps. Raises InvalidDependString when
32473 necessary.
32474 """
32475 for x in dep_struct:
32476 @@ -2242,9 +3080,24 @@ class depgraph(object):
32477 continue
32478 yield arg, atom
32479
32480 - def select_files(self, myfiles):
32481 + def select_files(self, args):
32482 + # Use the global event loop for spinner progress
32483 + # indication during file owner lookups (bug #461412).
32484 + spinner_id = None
32485 + try:
32486 + spinner = self._frozen_config.spinner
32487 + if spinner is not None and \
32488 + spinner.update is not spinner.update_quiet:
32489 + spinner_id = self._event_loop.idle_add(
32490 + self._frozen_config.spinner.update)
32491 + return self._select_files(args)
32492 + finally:
32493 + if spinner_id is not None:
32494 + self._event_loop.source_remove(spinner_id)
32495 +
32496 + def _select_files(self, myfiles):
32497 """Given a list of .tbz2s, .ebuilds sets, and deps, populate
32498 - self._dynamic_config._initial_arg_list and call self._resolve to create the
32499 + self._dynamic_config._initial_arg_list and call self._resolve to create the
32500 appropriate depgraph and return a favorite list."""
32501 self._load_vdb()
32502 debug = "--debug" in self._frozen_config.myopts
32503 @@ -2277,8 +3130,18 @@ class depgraph(object):
32504 writemsg("!!! Please ensure the tbz2 exists as specified.\n\n", noiselevel=-1)
32505 return 0, myfavorites
32506 mytbz2=portage.xpak.tbz2(x)
32507 - mykey=mytbz2.getelements("CATEGORY")[0]+"/"+os.path.splitext(os.path.basename(x))[0]
32508 - if os.path.realpath(x) != \
32509 + mykey = None
32510 + cat = mytbz2.getfile("CATEGORY")
32511 + if cat is not None:
32512 + cat = _unicode_decode(cat.strip(),
32513 + encoding=_encodings['repo.content'])
32514 + mykey = cat + "/" + os.path.basename(x)[:-5]
32515 +
32516 + if mykey is None:
32517 + writemsg(colorize("BAD", "\n*** Package is missing CATEGORY metadata: %s.\n\n" % x), noiselevel=-1)
32518 + self._dynamic_config._skip_restart = True
32519 + return 0, myfavorites
32520 + elif os.path.realpath(x) != \
32521 os.path.realpath(bindb.bintree.getname(mykey)):
32522 writemsg(colorize("BAD", "\n*** You need to adjust PKGDIR to emerge this package.\n\n"), noiselevel=-1)
32523 self._dynamic_config._skip_restart = True
32524 @@ -2293,15 +3156,16 @@ class depgraph(object):
32525 pkgdir = os.path.dirname(ebuild_path)
32526 tree_root = os.path.dirname(os.path.dirname(pkgdir))
32527 cp = pkgdir[len(tree_root)+1:]
32528 - e = portage.exception.PackageNotFound(
32529 - ("%s is not in a valid portage tree " + \
32530 - "hierarchy or does not exist") % x)
32531 + error_msg = ("\n\n!!! '%s' is not in a valid portage tree "
32532 + "hierarchy or does not exist\n") % x
32533 if not portage.isvalidatom(cp):
32534 - raise e
32535 + writemsg(error_msg, noiselevel=-1)
32536 + return 0, myfavorites
32537 cat = portage.catsplit(cp)[0]
32538 mykey = cat + "/" + os.path.basename(ebuild_path[:-7])
32539 if not portage.isvalidatom("="+mykey):
32540 - raise e
32541 + writemsg(error_msg, noiselevel=-1)
32542 + return 0, myfavorites
32543 ebuild_path = portdb.findname(mykey)
32544 if ebuild_path:
32545 if ebuild_path != os.path.join(os.path.realpath(tree_root),
32546 @@ -2317,8 +3181,8 @@ class depgraph(object):
32547 countdown(int(self._frozen_config.settings["EMERGE_WARNING_DELAY"]),
32548 "Continuing...")
32549 else:
32550 - raise portage.exception.PackageNotFound(
32551 - "%s is not in a valid portage tree hierarchy or does not exist" % x)
32552 + writemsg(error_msg, noiselevel=-1)
32553 + return 0, myfavorites
32554 pkg = self._pkg(mykey, "ebuild", root_config,
32555 onlydeps=onlydeps, myrepo=portdb.getRepositoryName(
32556 os.path.dirname(os.path.dirname(os.path.dirname(ebuild_path)))))
32557 @@ -2351,6 +3215,30 @@ class depgraph(object):
32558 raise portage.exception.PackageSetNotFound(s)
32559 if s in depgraph_sets.sets:
32560 continue
32561 +
32562 + try:
32563 + set_atoms = root_config.setconfig.getSetAtoms(s)
32564 + except portage.exception.PackageSetNotFound as e:
32565 + writemsg_level("\n\n", level=logging.ERROR,
32566 + noiselevel=-1)
32567 + for pset in list(depgraph_sets.sets.values()) + [sets[s]]:
32568 + for error_msg in pset.errors:
32569 + writemsg_level("%s\n" % (error_msg,),
32570 + level=logging.ERROR, noiselevel=-1)
32571 +
32572 + writemsg_level(("emerge: the given set '%s' "
32573 + "contains a non-existent set named '%s'.\n") % \
32574 + (s, e), level=logging.ERROR, noiselevel=-1)
32575 + if s in ('world', 'selected') and \
32576 + SETPREFIX + e.value in sets['selected']:
32577 + writemsg_level(("Use `emerge --deselect %s%s` to "
32578 + "remove this set from world_sets.\n") %
32579 + (SETPREFIX, e,), level=logging.ERROR,
32580 + noiselevel=-1)
32581 + writemsg_level("\n", level=logging.ERROR,
32582 + noiselevel=-1)
32583 + return False, myfavorites
32584 +
32585 pset = sets[s]
32586 depgraph_sets.sets[s] = pset
32587 args.append(SetArg(arg=x, pset=pset,
32588 @@ -2370,7 +3258,7 @@ class depgraph(object):
32589 # came from, if any.
32590 # 2) It takes away freedom from the resolver to choose other
32591 # possible expansions when necessary.
32592 - if "/" in x:
32593 + if "/" in x.split(":")[0]:
32594 args.append(AtomArg(arg=x, atom=Atom(x, allow_repo=True),
32595 root_config=root_config))
32596 continue
32597 @@ -2471,13 +3359,8 @@ class depgraph(object):
32598 return 0, []
32599
32600 for cpv in owners:
32601 - slot = vardb.aux_get(cpv, ["SLOT"])[0]
32602 - if not slot:
32603 - # portage now masks packages with missing slot, but it's
32604 - # possible that one was installed by an older version
32605 - atom = Atom(portage.cpv_getkey(cpv))
32606 - else:
32607 - atom = Atom("%s:%s" % (portage.cpv_getkey(cpv), slot))
32608 + pkg = vardb._pkg_str(cpv, None)
32609 + atom = Atom("%s:%s" % (pkg.cp, pkg.slot))
32610 args.append(AtomArg(arg=atom, atom=atom,
32611 root_config=root_config))
32612
32613 @@ -2542,7 +3425,7 @@ class depgraph(object):
32614 # Order needs to be preserved since a feature of --nodeps
32615 # is to allow the user to force a specific merge order.
32616 self._dynamic_config._initial_arg_list = args[:]
32617 -
32618 +
32619 return self._resolve(myfavorites)
32620
32621 def _gen_reinstall_sets(self):
32622 @@ -2552,8 +3435,8 @@ class depgraph(object):
32623 atom_list.append((root, '__auto_rebuild__', atom))
32624 for root, atom in self._rebuild.reinstall_list:
32625 atom_list.append((root, '__auto_reinstall__', atom))
32626 - for root, atom in self._dynamic_config._slot_abi_replace_installed:
32627 - atom_list.append((root, '__auto_slot_abi_replace_installed__', atom))
32628 + for root, atom in self._dynamic_config._slot_operator_replace_installed:
32629 + atom_list.append((root, '__auto_slot_operator_replace_installed__', atom))
32630
32631 set_dict = {}
32632 for root, set_name, atom in atom_list:
32633 @@ -2572,8 +3455,8 @@ class depgraph(object):
32634 root_config=self._frozen_config.roots[root])
32635
32636 def _resolve(self, myfavorites):
32637 - """Given self._dynamic_config._initial_arg_list, pull in the root nodes,
32638 - call self._creategraph to process theier deps and return
32639 + """Given self._dynamic_config._initial_arg_list, pull in the root nodes,
32640 + call self._creategraph to process theier deps and return
32641 a favorite list."""
32642 debug = "--debug" in self._frozen_config.myopts
32643 onlydeps = "--onlydeps" in self._frozen_config.myopts
32644 @@ -2624,6 +3507,16 @@ class depgraph(object):
32645 if pprovided_match:
32646 continue
32647
32648 + excluded = False
32649 + for any_match in self._iter_match_pkgs_any(
32650 + self._frozen_config.roots[myroot], atom):
32651 + if self._frozen_config.excluded_pkgs.findAtomForPackage(
32652 + any_match, modified_use=self._pkg_use_enabled(any_match)):
32653 + excluded = True
32654 + break
32655 + if excluded:
32656 + continue
32657 +
32658 if not (isinstance(arg, SetArg) and \
32659 arg.name in ("selected", "system", "world")):
32660 self._dynamic_config._unsatisfied_deps_for_display.append(
32661 @@ -2692,7 +3585,8 @@ class depgraph(object):
32662 except self._unknown_internal_error:
32663 return False, myfavorites
32664
32665 - if (self._dynamic_config._slot_collision_info and
32666 + have_slot_conflict = any(self._dynamic_config._package_tracker.slot_conflicts())
32667 + if (have_slot_conflict and
32668 not self._accept_blocker_conflicts()) or \
32669 (self._dynamic_config._allow_backtracking and
32670 "slot conflict" in self._dynamic_config._backtrack_infos):
32671 @@ -2707,11 +3601,47 @@ class depgraph(object):
32672 return False, myfavorites
32673
32674 if "config" in self._dynamic_config._backtrack_infos and \
32675 - ("slot_abi_mask_built" in self._dynamic_config._backtrack_infos["config"] or
32676 - "slot_abi_replace_installed" in self._dynamic_config._backtrack_infos["config"]) and \
32677 + ("slot_operator_mask_built" in self._dynamic_config._backtrack_infos["config"] or
32678 + "slot_operator_replace_installed" in self._dynamic_config._backtrack_infos["config"]) and \
32679 self.need_restart():
32680 return False, myfavorites
32681
32682 + if not self._dynamic_config._prune_rebuilds and \
32683 + self._dynamic_config._slot_operator_replace_installed and \
32684 + self._get_missed_updates():
32685 + # When there are missed updates, we might have triggered
32686 + # some unnecessary rebuilds (see bug #439688). So, prune
32687 + # all the rebuilds and backtrack with the problematic
32688 + # updates masked. The next backtrack run should pull in
32689 + # any rebuilds that are really needed, and this
32690 + # prune_rebuilds path should never be entered more than
32691 + # once in a series of backtracking nodes (in order to
32692 + # avoid a backtracking loop).
32693 + backtrack_infos = self._dynamic_config._backtrack_infos
32694 + config = backtrack_infos.setdefault("config", {})
32695 + config["prune_rebuilds"] = True
32696 + self._dynamic_config._need_restart = True
32697 + return False, myfavorites
32698 +
32699 + if self.need_restart():
32700 + # want_restart_for_use_change triggers this
32701 + return False, myfavorites
32702 +
32703 + if "--fetchonly" not in self._frozen_config.myopts and \
32704 + "--buildpkgonly" in self._frozen_config.myopts:
32705 + graph_copy = self._dynamic_config.digraph.copy()
32706 + removed_nodes = set()
32707 + for node in graph_copy:
32708 + if not isinstance(node, Package) or \
32709 + node.operation == "nomerge":
32710 + removed_nodes.add(node)
32711 + graph_copy.difference_update(removed_nodes)
32712 + if not graph_copy.hasallzeros(ignore_priority = \
32713 + DepPrioritySatisfiedRange.ignore_medium):
32714 + self._dynamic_config._buildpkgonly_deps_unsatisfied = True
32715 + self._dynamic_config._skip_restart = True
32716 + return False, myfavorites
32717 +
32718 # Any failures except those due to autounmask *alone* should return
32719 # before this point, since the success_without_autounmask flag that's
32720 # set below is reserved for cases where there are *zero* other
32721 @@ -2773,8 +3703,8 @@ class depgraph(object):
32722 if refs is None:
32723 refs = []
32724 atom_arg_map[atom_key] = refs
32725 - if arg not in refs:
32726 - refs.append(arg)
32727 + if arg not in refs:
32728 + refs.append(arg)
32729
32730 for root in self._dynamic_config.sets:
32731 depgraph_sets = self._dynamic_config.sets[root]
32732 @@ -2804,14 +3734,15 @@ class depgraph(object):
32733 slots = set()
32734 for cpv in vardb.match(atom):
32735 # don't mix new virtuals with old virtuals
32736 - if portage.cpv_getkey(cpv) == highest_pkg.cp:
32737 - slots.add(vardb.aux_get(cpv, ["SLOT"])[0])
32738 + pkg = vardb._pkg_str(cpv, None)
32739 + if pkg.cp == highest_pkg.cp:
32740 + slots.add(pkg.slot)
32741
32742 - slots.add(highest_pkg.metadata["SLOT"])
32743 + slots.add(highest_pkg.slot)
32744 if len(slots) == 1:
32745 return []
32746 greedy_pkgs = []
32747 - slots.remove(highest_pkg.metadata["SLOT"])
32748 + slots.remove(highest_pkg.slot)
32749 while slots:
32750 slot = slots.pop()
32751 slot_atom = portage.dep.Atom("%s:%s" % (highest_pkg.cp, slot))
32752 @@ -2825,9 +3756,9 @@ class depgraph(object):
32753 return [pkg.slot_atom for pkg in greedy_pkgs]
32754
32755 blockers = {}
32756 - blocker_dep_keys = ["DEPEND", "PDEPEND", "RDEPEND"]
32757 + blocker_dep_keys = Package._dep_keys
32758 for pkg in greedy_pkgs + [highest_pkg]:
32759 - dep_str = " ".join(pkg.metadata[k] for k in blocker_dep_keys)
32760 + dep_str = " ".join(pkg._metadata[k] for k in blocker_dep_keys)
32761 try:
32762 selected_atoms = self._select_atoms(
32763 pkg.root, dep_str, self._pkg_use_enabled(pkg),
32764 @@ -2879,7 +3810,8 @@ class depgraph(object):
32765 not been scheduled for replacement.
32766 """
32767 kwargs["trees"] = self._dynamic_config._graph_trees
32768 - return self._select_atoms_highest_available(*pargs, **kwargs)
32769 + return self._select_atoms_highest_available(*pargs,
32770 + **portage._native_kwargs(kwargs))
32771
32772 def _select_atoms_highest_available(self, root, depstring,
32773 myuse=None, parent=None, strict=True, trees=None, priority=None):
32774 @@ -2890,7 +3822,7 @@ class depgraph(object):
32775 eapi = None
32776 is_valid_flag = None
32777 if parent is not None:
32778 - eapi = parent.metadata['EAPI']
32779 + eapi = parent.eapi
32780 if not parent.installed:
32781 is_valid_flag = parent.iuse.is_valid_flag
32782 depstring = portage.dep.use_reduce(depstring,
32783 @@ -2898,9 +3830,9 @@ class depgraph(object):
32784 is_valid_flag=is_valid_flag, eapi=eapi)
32785
32786 if (self._dynamic_config.myparams.get(
32787 - "ignore_built_slot_abi_deps", "n") == "y" and
32788 + "ignore_built_slot_operator_deps", "n") == "y" and
32789 parent and parent.built):
32790 - ignore_built_slot_abi_deps(depstring)
32791 + ignore_built_slot_operator_deps(depstring)
32792
32793 pkgsettings = self._frozen_config.pkgsettings[root]
32794 if trees is None:
32795 @@ -3005,35 +3937,37 @@ class depgraph(object):
32796 def _expand_virt_from_graph(self, root, atom):
32797 if not isinstance(atom, Atom):
32798 atom = Atom(atom)
32799 - graphdb = self._dynamic_config.mydbapi[root]
32800 - match = graphdb.match_pkgs(atom)
32801 - if not match:
32802 - yield atom
32803 - return
32804 - pkg = match[-1]
32805 - if not pkg.cpv.startswith("virtual/"):
32806 - yield atom
32807 - return
32808 - try:
32809 - rdepend = self._select_atoms_from_graph(
32810 - pkg.root, pkg.metadata.get("RDEPEND", ""),
32811 - myuse=self._pkg_use_enabled(pkg),
32812 - parent=pkg, strict=False)
32813 - except InvalidDependString as e:
32814 - writemsg_level("!!! Invalid RDEPEND in " + \
32815 - "'%svar/db/pkg/%s/RDEPEND': %s\n" % \
32816 - (pkg.root, pkg.cpv, e),
32817 - noiselevel=-1, level=logging.ERROR)
32818 +
32819 + if not atom.cp.startswith("virtual/"):
32820 yield atom
32821 return
32822
32823 - for atoms in rdepend.values():
32824 - for atom in atoms:
32825 - if hasattr(atom, "_orig_atom"):
32826 - # Ignore virtual atoms since we're only
32827 - # interested in expanding the real atoms.
32828 - continue
32829 - yield atom
32830 + any_match = False
32831 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
32832 + try:
32833 + rdepend = self._select_atoms_from_graph(
32834 + pkg.root, pkg._metadata.get("RDEPEND", ""),
32835 + myuse=self._pkg_use_enabled(pkg),
32836 + parent=pkg, strict=False)
32837 + except InvalidDependString as e:
32838 + writemsg_level("!!! Invalid RDEPEND in " + \
32839 + "'%svar/db/pkg/%s/RDEPEND': %s\n" % \
32840 + (pkg.root, pkg.cpv, e),
32841 + noiselevel=-1, level=logging.ERROR)
32842 + continue
32843 +
32844 + for atoms in rdepend.values():
32845 + for atom in atoms:
32846 + if hasattr(atom, "_orig_atom"):
32847 + # Ignore virtual atoms since we're only
32848 + # interested in expanding the real atoms.
32849 + continue
32850 + yield atom
32851 +
32852 + any_match = True
32853 +
32854 + if not any_match:
32855 + yield atom
32856
32857 def _virt_deps_visible(self, pkg, ignore_use=False):
32858 """
32859 @@ -3044,7 +3978,7 @@ class depgraph(object):
32860 """
32861 try:
32862 rdepend = self._select_atoms(
32863 - pkg.root, pkg.metadata.get("RDEPEND", ""),
32864 + pkg.root, pkg._metadata.get("RDEPEND", ""),
32865 myuse=self._pkg_use_enabled(pkg),
32866 parent=pkg, priority=self._priority(runtime=True))
32867 except InvalidDependString as e:
32868 @@ -3083,19 +4017,29 @@ class depgraph(object):
32869 child = None
32870 all_parents = self._dynamic_config._parent_atoms
32871 graph = self._dynamic_config.digraph
32872 + verbose_main_repo_display = "--verbose-main-repo-display" in \
32873 + self._frozen_config.myopts
32874 +
32875 + def format_pkg(pkg):
32876 + pkg_name = "%s" % (pkg.cpv,)
32877 + if verbose_main_repo_display or pkg.repo != \
32878 + pkg.root_config.settings.repositories.mainRepo().name:
32879 + pkg_name += _repo_separator + pkg.repo
32880 + return pkg_name
32881
32882 if target_atom is not None and isinstance(node, Package):
32883 affecting_use = set()
32884 - for dep_str in "DEPEND", "RDEPEND", "PDEPEND":
32885 + for dep_str in Package._dep_keys:
32886 try:
32887 affecting_use.update(extract_affecting_use(
32888 - node.metadata[dep_str], target_atom,
32889 - eapi=node.metadata["EAPI"]))
32890 + node._metadata[dep_str], target_atom,
32891 + eapi=node.eapi))
32892 except InvalidDependString:
32893 if not node.installed:
32894 raise
32895 affecting_use.difference_update(node.use.mask, node.use.force)
32896 - pkg_name = _unicode_decode("%s") % (node.cpv,)
32897 + pkg_name = format_pkg(node)
32898 +
32899 if affecting_use:
32900 usedep = []
32901 for flag in affecting_use:
32902 @@ -3150,7 +4094,7 @@ class depgraph(object):
32903 node_type = "set"
32904 else:
32905 node_type = "argument"
32906 - dep_chain.append((_unicode_decode("%s") % (node,), node_type))
32907 + dep_chain.append(("%s" % (node,), node_type))
32908
32909 elif node is not start_node:
32910 for ppkg, patom in all_parents[child]:
32911 @@ -3167,23 +4111,23 @@ class depgraph(object):
32912 if priorities is None:
32913 # This edge comes from _parent_atoms and was not added to
32914 # the graph, and _parent_atoms does not contain priorities.
32915 - dep_strings.add(node.metadata["DEPEND"])
32916 - dep_strings.add(node.metadata["RDEPEND"])
32917 - dep_strings.add(node.metadata["PDEPEND"])
32918 + for k in Package._dep_keys:
32919 + dep_strings.add(node._metadata[k])
32920 else:
32921 for priority in priorities:
32922 if priority.buildtime:
32923 - dep_strings.add(node.metadata["DEPEND"])
32924 + for k in Package._buildtime_keys:
32925 + dep_strings.add(node._metadata[k])
32926 if priority.runtime:
32927 - dep_strings.add(node.metadata["RDEPEND"])
32928 + dep_strings.add(node._metadata["RDEPEND"])
32929 if priority.runtime_post:
32930 - dep_strings.add(node.metadata["PDEPEND"])
32931 + dep_strings.add(node._metadata["PDEPEND"])
32932
32933 affecting_use = set()
32934 for dep_str in dep_strings:
32935 try:
32936 affecting_use.update(extract_affecting_use(
32937 - dep_str, atom, eapi=node.metadata["EAPI"]))
32938 + dep_str, atom, eapi=node.eapi))
32939 except InvalidDependString:
32940 if not node.installed:
32941 raise
32942 @@ -3192,7 +4136,7 @@ class depgraph(object):
32943 affecting_use.difference_update(node.use.mask, \
32944 node.use.force)
32945
32946 - pkg_name = _unicode_decode("%s") % (node.cpv,)
32947 + pkg_name = format_pkg(node)
32948 if affecting_use:
32949 usedep = []
32950 for flag in affecting_use:
32951 @@ -3244,8 +4188,7 @@ class depgraph(object):
32952 if self._dynamic_config.digraph.parent_nodes(parent_arg):
32953 selected_parent = parent_arg
32954 else:
32955 - dep_chain.append(
32956 - (_unicode_decode("%s") % (parent_arg,), "argument"))
32957 + dep_chain.append(("%s" % (parent_arg,), "argument"))
32958 selected_parent = None
32959
32960 node = selected_parent
32961 @@ -3260,7 +4203,7 @@ class depgraph(object):
32962 else:
32963 display_list.append("required by %s" % node)
32964
32965 - msg = "#" + ", ".join(display_list) + "\n"
32966 + msg = "# " + "\n# ".join(display_list) + "\n"
32967 return msg
32968
32969
32970 @@ -3281,7 +4224,7 @@ class depgraph(object):
32971 if arg:
32972 xinfo='"%s"' % arg
32973 if isinstance(myparent, AtomArg):
32974 - xinfo = _unicode_decode('"%s"') % (myparent,)
32975 + xinfo = '"%s"' % (myparent,)
32976 # Discard null/ from failed cpv_expand category expansion.
32977 xinfo = xinfo.replace("null/", "")
32978 if root != self._frozen_config._running_root.root:
32979 @@ -3326,9 +4269,9 @@ class depgraph(object):
32980 repo = metadata.get('repository')
32981 pkg = self._pkg(cpv, pkg_type, root_config,
32982 installed=installed, myrepo=repo)
32983 - # pkg.metadata contains calculated USE for ebuilds,
32984 + # pkg._metadata contains calculated USE for ebuilds,
32985 # required later for getMissingLicenses.
32986 - metadata = pkg.metadata
32987 + metadata = pkg._metadata
32988 if pkg.invalid:
32989 # Avoid doing any operations with packages that
32990 # have invalid metadata. It would be unsafe at
32991 @@ -3367,12 +4310,13 @@ class depgraph(object):
32992 raise
32993 if not mreasons and \
32994 not pkg.built and \
32995 - pkg.metadata.get("REQUIRED_USE") and \
32996 - eapi_has_required_use(pkg.metadata["EAPI"]):
32997 + pkg._metadata.get("REQUIRED_USE") and \
32998 + eapi_has_required_use(pkg.eapi):
32999 if not check_required_use(
33000 - pkg.metadata["REQUIRED_USE"],
33001 + pkg._metadata["REQUIRED_USE"],
33002 self._pkg_use_enabled(pkg),
33003 - pkg.iuse.is_valid_flag):
33004 + pkg.iuse.is_valid_flag,
33005 + eapi=pkg.eapi):
33006 required_use_unsatisfied.append(pkg)
33007 continue
33008 root_slot = (pkg.root, pkg.slot_atom)
33009 @@ -3422,7 +4366,7 @@ class depgraph(object):
33010 continue
33011
33012 missing_use_adjustable.add(pkg)
33013 - required_use = pkg.metadata.get("REQUIRED_USE")
33014 + required_use = pkg._metadata.get("REQUIRED_USE")
33015 required_use_warning = ""
33016 if required_use:
33017 old_use = self._pkg_use_enabled(pkg)
33018 @@ -3431,8 +4375,10 @@ class depgraph(object):
33019 new_use.add(flag)
33020 for flag in need_disable:
33021 new_use.discard(flag)
33022 - if check_required_use(required_use, old_use, pkg.iuse.is_valid_flag) and \
33023 - not check_required_use(required_use, new_use, pkg.iuse.is_valid_flag):
33024 + if check_required_use(required_use, old_use,
33025 + pkg.iuse.is_valid_flag, eapi=pkg.eapi) \
33026 + and not check_required_use(required_use, new_use,
33027 + pkg.iuse.is_valid_flag, eapi=pkg.eapi):
33028 required_use_warning = ", this change violates use flag constraints " + \
33029 "defined by %s: '%s'" % (pkg.cpv, human_readable_required_use(required_use))
33030
33031 @@ -3470,7 +4416,7 @@ class depgraph(object):
33032 if any(x in untouchable_flags for x in involved_flags):
33033 continue
33034
33035 - required_use = myparent.metadata.get("REQUIRED_USE")
33036 + required_use = myparent._metadata.get("REQUIRED_USE")
33037 required_use_warning = ""
33038 if required_use:
33039 old_use = self._pkg_use_enabled(myparent)
33040 @@ -3480,8 +4426,12 @@ class depgraph(object):
33041 new_use.discard(flag)
33042 else:
33043 new_use.add(flag)
33044 - if check_required_use(required_use, old_use, myparent.iuse.is_valid_flag) and \
33045 - not check_required_use(required_use, new_use, myparent.iuse.is_valid_flag):
33046 + if check_required_use(required_use, old_use,
33047 + myparent.iuse.is_valid_flag,
33048 + eapi=myparent.eapi) and \
33049 + not check_required_use(required_use, new_use,
33050 + myparent.iuse.is_valid_flag,
33051 + eapi=myparent.eapi):
33052 required_use_warning = ", this change violates use flag constraints " + \
33053 "defined by %s: '%s'" % (myparent.cpv, \
33054 human_readable_required_use(required_use))
33055 @@ -3568,14 +4518,15 @@ class depgraph(object):
33056 writemsg("\n The following REQUIRED_USE flag constraints " + \
33057 "are unsatisfied:\n", noiselevel=-1)
33058 reduced_noise = check_required_use(
33059 - pkg.metadata["REQUIRED_USE"],
33060 + pkg._metadata["REQUIRED_USE"],
33061 self._pkg_use_enabled(pkg),
33062 - pkg.iuse.is_valid_flag).tounicode()
33063 + pkg.iuse.is_valid_flag,
33064 + eapi=pkg.eapi).tounicode()
33065 writemsg(" %s\n" % \
33066 human_readable_required_use(reduced_noise),
33067 noiselevel=-1)
33068 normalized_required_use = \
33069 - " ".join(pkg.metadata["REQUIRED_USE"].split())
33070 + " ".join(pkg._metadata["REQUIRED_USE"].split())
33071 if reduced_noise != normalized_required_use:
33072 writemsg("\n The above constraints " + \
33073 "are a subset of the following complete expression:\n",
33074 @@ -3620,57 +4571,17 @@ class depgraph(object):
33075 not cp_exists and \
33076 self._frozen_config.myopts.get(
33077 "--misspell-suggestions", "y") != "n":
33078 - cp = myparent.atom.cp.lower()
33079 - cat, pkg = portage.catsplit(cp)
33080 - if cat == "null":
33081 - cat = None
33082
33083 writemsg("\nemerge: searching for similar names..."
33084 , noiselevel=-1)
33085
33086 - all_cp = set()
33087 - all_cp.update(vardb.cp_all())
33088 + dbs = [vardb]
33089 if "--usepkgonly" not in self._frozen_config.myopts:
33090 - all_cp.update(portdb.cp_all())
33091 + dbs.append(portdb)
33092 if "--usepkg" in self._frozen_config.myopts:
33093 - all_cp.update(bindb.cp_all())
33094 - # discard dir containing no ebuilds
33095 - all_cp.discard(cp)
33096 -
33097 - orig_cp_map = {}
33098 - for cp_orig in all_cp:
33099 - orig_cp_map.setdefault(cp_orig.lower(), []).append(cp_orig)
33100 - all_cp = set(orig_cp_map)
33101 -
33102 - if cat:
33103 - matches = difflib.get_close_matches(cp, all_cp)
33104 - else:
33105 - pkg_to_cp = {}
33106 - for other_cp in list(all_cp):
33107 - other_pkg = portage.catsplit(other_cp)[1]
33108 - if other_pkg == pkg:
33109 - # Check for non-identical package that
33110 - # differs only by upper/lower case.
33111 - identical = True
33112 - for cp_orig in orig_cp_map[other_cp]:
33113 - if portage.catsplit(cp_orig)[1] != \
33114 - portage.catsplit(atom.cp)[1]:
33115 - identical = False
33116 - break
33117 - if identical:
33118 - # discard dir containing no ebuilds
33119 - all_cp.discard(other_cp)
33120 - continue
33121 - pkg_to_cp.setdefault(other_pkg, set()).add(other_cp)
33122 - pkg_matches = difflib.get_close_matches(pkg, pkg_to_cp)
33123 - matches = []
33124 - for pkg_match in pkg_matches:
33125 - matches.extend(pkg_to_cp[pkg_match])
33126 + dbs.append(bindb)
33127
33128 - matches_orig_case = []
33129 - for cp in matches:
33130 - matches_orig_case.extend(orig_cp_map[cp])
33131 - matches = matches_orig_case
33132 + matches = similar_name_search(dbs, atom)
33133
33134 if len(matches) == 1:
33135 writemsg("\nemerge: Maybe you meant " + matches[0] + "?\n"
33136 @@ -3691,8 +4602,7 @@ class depgraph(object):
33137 dep_chain = self._get_dep_chain(myparent, atom)
33138 for node, node_type in dep_chain:
33139 msg.append('(dependency required by "%s" [%s])' % \
33140 - (colorize('INFORM', _unicode_decode("%s") % \
33141 - (node)), node_type))
33142 + (colorize('INFORM', "%s" % (node)), node_type))
33143
33144 if msg:
33145 writemsg("\n".join(msg), noiselevel=-1)
33146 @@ -3770,7 +4680,8 @@ class depgraph(object):
33147 # the newly built package still won't have the expected slot.
33148 # Therefore, assume that such SLOT dependencies are already
33149 # satisfied rather than forcing a rebuild.
33150 - if not matched_something and installed and atom.slot is not None:
33151 + if not matched_something and installed and \
33152 + atom.slot is not None and not atom.slot_operator_built:
33153
33154 if "remove" in self._dynamic_config.myparams:
33155 # We need to search the portdbapi, which is not in our
33156 @@ -3794,11 +4705,11 @@ class depgraph(object):
33157 for other_db, other_type, other_built, \
33158 other_installed, other_keys in dbs:
33159 try:
33160 - if atom.slot == \
33161 - other_db.aux_get(cpv, ["SLOT"])[0]:
33162 + if portage.dep._match_slot(atom,
33163 + other_db._pkg_str(_unicode(cpv), None)):
33164 slot_available = True
33165 break
33166 - except KeyError:
33167 + except (KeyError, InvalidData):
33168 pass
33169 if not slot_available:
33170 continue
33171 @@ -3810,12 +4721,12 @@ class depgraph(object):
33172 yield inst_pkg
33173 return
33174
33175 - def _select_pkg_highest_available(self, root, atom, onlydeps=False):
33176 + def _select_pkg_highest_available(self, root, atom, onlydeps=False, parent=None):
33177 cache_key = (root, atom, atom.unevaluated_atom, onlydeps, self._dynamic_config._autounmask)
33178 ret = self._dynamic_config._highest_pkg_cache.get(cache_key)
33179 if ret is not None:
33180 return ret
33181 - ret = self._select_pkg_highest_available_imp(root, atom, onlydeps=onlydeps)
33182 + ret = self._select_pkg_highest_available_imp(root, atom, onlydeps=onlydeps, parent=parent)
33183 self._dynamic_config._highest_pkg_cache[cache_key] = ret
33184 pkg, existing = ret
33185 if pkg is not None:
33186 @@ -3847,6 +4758,36 @@ class depgraph(object):
33187
33188 return not arg
33189
33190 + def _want_update_pkg(self, parent, pkg):
33191 +
33192 + if self._frozen_config.excluded_pkgs.findAtomForPackage(pkg,
33193 + modified_use=self._pkg_use_enabled(pkg)):
33194 + return False
33195 +
33196 + arg_atoms = None
33197 + try:
33198 + arg_atoms = list(self._iter_atoms_for_pkg(pkg))
33199 + except InvalidDependString:
33200 + if not pkg.installed:
33201 + # should have been masked before it was selected
33202 + raise
33203 +
33204 + depth = parent.depth or 0
33205 + depth += 1
33206 +
33207 + if arg_atoms:
33208 + for arg, atom in arg_atoms:
33209 + if arg.reset_depth:
33210 + depth = 0
33211 + break
33212 +
33213 + deep = self._dynamic_config.myparams.get("deep", 0)
33214 + update = "--update" in self._frozen_config.myopts
33215 +
33216 + return (not self._dynamic_config._complete_mode and
33217 + (arg_atoms or update) and
33218 + not (deep is not True and depth > deep))
33219 +
33220 def _equiv_ebuild_visible(self, pkg, autounmask_level=None):
33221 try:
33222 pkg_eb = self._pkg(
33223 @@ -3867,7 +4808,7 @@ class depgraph(object):
33224 return True
33225
33226 def _equiv_binary_installed(self, pkg):
33227 - build_time = pkg.metadata.get('BUILD_TIME')
33228 + build_time = pkg.build_time
33229 if not build_time:
33230 return False
33231
33232 @@ -3877,7 +4818,7 @@ class depgraph(object):
33233 except PackageNotFound:
33234 return False
33235
33236 - return build_time == inst_pkg.metadata.get('BUILD_TIME')
33237 + return build_time == inst_pkg.build_time
33238
33239 class _AutounmaskLevel(object):
33240 __slots__ = ("allow_use_changes", "allow_unstable_keywords", "allow_license_changes", \
33241 @@ -3898,8 +4839,9 @@ class depgraph(object):
33242 1. USE + license
33243 2. USE + ~arch + license
33244 3. USE + ~arch + license + missing keywords
33245 - 4. USE + ~arch + license + masks
33246 - 5. USE + ~arch + license + missing keywords + masks
33247 + 4. USE + license + masks
33248 + 5. USE + ~arch + license + masks
33249 + 6. USE + ~arch + license + missing keywords + masks
33250
33251 Some thoughts:
33252 * Do least invasive changes first.
33253 @@ -3919,15 +4861,25 @@ class depgraph(object):
33254 autounmask_level.allow_license_changes = True
33255 yield autounmask_level
33256
33257 - for only_use_changes in (False,):
33258 + autounmask_level.allow_unstable_keywords = True
33259 + yield autounmask_level
33260 +
33261 + if not autounmask_keep_masks:
33262
33263 - autounmask_level.allow_unstable_keywords = (not only_use_changes)
33264 - autounmask_level.allow_license_changes = (not only_use_changes)
33265 + autounmask_level.allow_missing_keywords = True
33266 + yield autounmask_level
33267
33268 - for missing_keyword, unmask in ((False,False), (True, False), (False, True), (True, True)):
33269 + # 4. USE + license + masks
33270 + # Try to respect keywords while discarding
33271 + # package.mask (see bug #463394).
33272 + autounmask_level.allow_unstable_keywords = False
33273 + autounmask_level.allow_missing_keywords = False
33274 + autounmask_level.allow_unmasks = True
33275 + yield autounmask_level
33276
33277 - if (only_use_changes or autounmask_keep_masks) and (missing_keyword or unmask):
33278 - break
33279 + autounmask_level.allow_unstable_keywords = True
33280 +
33281 + for missing_keyword, unmask in ((False, True), (True, True)):
33282
33283 autounmask_level.allow_missing_keywords = missing_keyword
33284 autounmask_level.allow_unmasks = unmask
33285 @@ -3935,33 +4887,42 @@ class depgraph(object):
33286 yield autounmask_level
33287
33288
33289 - def _select_pkg_highest_available_imp(self, root, atom, onlydeps=False):
33290 - pkg, existing = self._wrapped_select_pkg_highest_available_imp(root, atom, onlydeps=onlydeps)
33291 + def _select_pkg_highest_available_imp(self, root, atom, onlydeps=False, parent=None):
33292 + pkg, existing = self._wrapped_select_pkg_highest_available_imp(
33293 + root, atom, onlydeps=onlydeps, parent=parent)
33294
33295 default_selection = (pkg, existing)
33296
33297 - def reset_pkg(pkg):
33298 + if self._dynamic_config._autounmask is True:
33299 if pkg is not None and \
33300 pkg.installed and \
33301 not self._want_installed_pkg(pkg):
33302 pkg = None
33303
33304 - if self._dynamic_config._autounmask is True:
33305 - reset_pkg(pkg)
33306 + # Temporarily reset _need_restart state, in order to
33307 + # avoid interference as reported in bug #459832.
33308 + earlier_need_restart = self._dynamic_config._need_restart
33309 + self._dynamic_config._need_restart = False
33310 + try:
33311 + for autounmask_level in self._autounmask_levels():
33312 + if pkg is not None:
33313 + break
33314
33315 - for autounmask_level in self._autounmask_levels():
33316 - if pkg is not None:
33317 - break
33318 + pkg, existing = \
33319 + self._wrapped_select_pkg_highest_available_imp(
33320 + root, atom, onlydeps=onlydeps,
33321 + autounmask_level=autounmask_level, parent=parent)
33322
33323 - pkg, existing = \
33324 - self._wrapped_select_pkg_highest_available_imp(
33325 - root, atom, onlydeps=onlydeps,
33326 - autounmask_level=autounmask_level)
33327 + if pkg is not None and \
33328 + pkg.installed and \
33329 + not self._want_installed_pkg(pkg):
33330 + pkg = None
33331
33332 - reset_pkg(pkg)
33333 -
33334 - if self._dynamic_config._need_restart:
33335 - return None, None
33336 + if self._dynamic_config._need_restart:
33337 + return None, None
33338 + finally:
33339 + if earlier_need_restart:
33340 + self._dynamic_config._need_restart = True
33341
33342 if pkg is None:
33343 # This ensures that we can fall back to an installed package
33344 @@ -4091,25 +5052,29 @@ class depgraph(object):
33345 new_changes = {}
33346
33347 for flag, state in target_use.items():
33348 + real_flag = pkg.iuse.get_real_flag(flag)
33349 + if real_flag is None:
33350 + # Triggered by use-dep defaults.
33351 + continue
33352 if state:
33353 - if flag not in old_use:
33354 - if new_changes.get(flag) == False:
33355 + if real_flag not in old_use:
33356 + if new_changes.get(real_flag) == False:
33357 return old_use
33358 - new_changes[flag] = True
33359 + new_changes[real_flag] = True
33360 new_use.add(flag)
33361 else:
33362 - if flag in old_use:
33363 - if new_changes.get(flag) == True:
33364 + if real_flag in old_use:
33365 + if new_changes.get(real_flag) == True:
33366 return old_use
33367 - new_changes[flag] = False
33368 + new_changes[real_flag] = False
33369 new_use.update(old_use.difference(target_use))
33370
33371 def want_restart_for_use_change(pkg, new_use):
33372 if pkg not in self._dynamic_config.digraph.nodes:
33373 return False
33374
33375 - for key in "DEPEND", "RDEPEND", "PDEPEND", "LICENSE":
33376 - dep = pkg.metadata[key]
33377 + for key in Package._dep_keys + ("LICENSE",):
33378 + dep = pkg._metadata[key]
33379 old_val = set(portage.dep.use_reduce(dep, pkg.use.enabled, is_valid_flag=pkg.iuse.is_valid_flag, flat=True))
33380 new_val = set(portage.dep.use_reduce(dep, new_use, is_valid_flag=pkg.iuse.is_valid_flag, flat=True))
33381
33382 @@ -4132,9 +5097,11 @@ class depgraph(object):
33383
33384 if new_changes != old_changes:
33385 #Don't do the change if it violates REQUIRED_USE.
33386 - required_use = pkg.metadata.get("REQUIRED_USE")
33387 - if required_use and check_required_use(required_use, old_use, pkg.iuse.is_valid_flag) and \
33388 - not check_required_use(required_use, new_use, pkg.iuse.is_valid_flag):
33389 + required_use = pkg._metadata.get("REQUIRED_USE")
33390 + if required_use and check_required_use(required_use, old_use,
33391 + pkg.iuse.is_valid_flag, eapi=pkg.eapi) and \
33392 + not check_required_use(required_use, new_use,
33393 + pkg.iuse.is_valid_flag, eapi=pkg.eapi):
33394 return old_use
33395
33396 if any(x in pkg.use.mask for x in new_changes) or \
33397 @@ -4150,14 +5117,13 @@ class depgraph(object):
33398 self._dynamic_config._need_restart = True
33399 return new_use
33400
33401 - def _wrapped_select_pkg_highest_available_imp(self, root, atom, onlydeps=False, autounmask_level=None):
33402 + def _wrapped_select_pkg_highest_available_imp(self, root, atom, onlydeps=False, autounmask_level=None, parent=None):
33403 root_config = self._frozen_config.roots[root]
33404 pkgsettings = self._frozen_config.pkgsettings[root]
33405 dbs = self._dynamic_config._filtered_trees[root]["dbs"]
33406 vardb = self._frozen_config.roots[root].trees["vartree"].dbapi
33407 # List of acceptable packages, ordered by type preference.
33408 matched_packages = []
33409 - matched_pkgs_ignore_use = []
33410 highest_version = None
33411 if not isinstance(atom, portage.dep.Atom):
33412 atom = portage.dep.Atom(atom)
33413 @@ -4209,7 +5175,7 @@ class depgraph(object):
33414 # Ignore USE deps for the initial match since we want to
33415 # ensure that updates aren't missed solely due to the user's
33416 # USE configuration.
33417 - for pkg in self._iter_match_pkgs(root_config, pkg_type, atom.without_use,
33418 + for pkg in self._iter_match_pkgs(root_config, pkg_type, atom.without_use,
33419 onlydeps=onlydeps):
33420 if pkg.cp != atom_cp and have_new_virt:
33421 # pull in a new-style virtual instead
33422 @@ -4295,8 +5261,8 @@ class depgraph(object):
33423 for selected_pkg in matched_packages:
33424 if selected_pkg.type_name == "binary" and \
33425 selected_pkg.cpv == pkg.cpv and \
33426 - selected_pkg.metadata.get('BUILD_TIME') == \
33427 - pkg.metadata.get('BUILD_TIME'):
33428 + selected_pkg.build_time == \
33429 + pkg.build_time:
33430 identical_binary = True
33431 break
33432
33433 @@ -4339,7 +5305,6 @@ class depgraph(object):
33434
33435 if atom.use:
33436
33437 - matched_pkgs_ignore_use.append(pkg)
33438 if autounmask_level and autounmask_level.allow_use_changes and not pkg.built:
33439 target_use = {}
33440 for flag in atom.use.enabled:
33441 @@ -4352,8 +5317,11 @@ class depgraph(object):
33442
33443 use_match = True
33444 can_adjust_use = not pkg.built
33445 - missing_enabled = atom.use.missing_enabled.difference(pkg.iuse.all)
33446 - missing_disabled = atom.use.missing_disabled.difference(pkg.iuse.all)
33447 + is_valid_flag = pkg.iuse.is_valid_flag
33448 + missing_enabled = frozenset(x for x in
33449 + atom.use.missing_enabled if not is_valid_flag(x))
33450 + missing_disabled = frozenset(x for x in
33451 + atom.use.missing_disabled if not is_valid_flag(x))
33452
33453 if atom.use.enabled:
33454 if any(x in atom.use.enabled for x in missing_disabled):
33455 @@ -4406,7 +5374,9 @@ class depgraph(object):
33456 # will always end with a break statement below
33457 # this point.
33458 if find_existing_node:
33459 - e_pkg = self._dynamic_config._slot_pkg_map[root].get(pkg.slot_atom)
33460 + e_pkg = next(self._dynamic_config._package_tracker.match(
33461 + root, pkg.slot_atom, installed=False), None)
33462 +
33463 if not e_pkg:
33464 break
33465
33466 @@ -4427,50 +5397,56 @@ class depgraph(object):
33467 break
33468 # Compare built package to current config and
33469 # reject the built package if necessary.
33470 - if built and not useoldpkg and (not installed or matched_pkgs_ignore_use) and \
33471 - ("--newuse" in self._frozen_config.myopts or \
33472 - "--reinstall" in self._frozen_config.myopts or \
33473 - (not installed and self._dynamic_config.myparams.get(
33474 - "binpkg_respect_use") in ("y", "auto"))):
33475 - iuses = pkg.iuse.all
33476 - old_use = self._pkg_use_enabled(pkg)
33477 - if myeb:
33478 - pkgsettings.setcpv(myeb)
33479 - else:
33480 - pkgsettings.setcpv(pkg)
33481 - now_use = pkgsettings["PORTAGE_USE"].split()
33482 - forced_flags = set()
33483 - forced_flags.update(pkgsettings.useforce)
33484 - forced_flags.update(pkgsettings.usemask)
33485 - cur_iuse = iuses
33486 - if myeb and not usepkgonly and not useoldpkg:
33487 - cur_iuse = myeb.iuse.all
33488 - reinstall_for_flags = self._reinstall_for_flags(pkg,
33489 - forced_flags, old_use, iuses, now_use, cur_iuse)
33490 - if reinstall_for_flags:
33491 - if not pkg.installed:
33492 - self._dynamic_config.ignored_binaries.setdefault(pkg, set()).update(reinstall_for_flags)
33493 + reinstall_use = ("--newuse" in self._frozen_config.myopts or \
33494 + "--reinstall" in self._frozen_config.myopts)
33495 + respect_use = self._dynamic_config.myparams.get("binpkg_respect_use") in ("y", "auto")
33496 + if built and not useoldpkg and \
33497 + (not installed or matched_packages) and \
33498 + not (installed and
33499 + self._frozen_config.excluded_pkgs.findAtomForPackage(pkg,
33500 + modified_use=self._pkg_use_enabled(pkg))):
33501 + if myeb and "--newrepo" in self._frozen_config.myopts and myeb.repo != pkg.repo:
33502 break
33503 + elif reinstall_use or (not installed and respect_use):
33504 + iuses = pkg.iuse.all
33505 + old_use = self._pkg_use_enabled(pkg)
33506 + if myeb:
33507 + pkgsettings.setcpv(myeb)
33508 + else:
33509 + pkgsettings.setcpv(pkg)
33510 + now_use = pkgsettings["PORTAGE_USE"].split()
33511 + forced_flags = set()
33512 + forced_flags.update(pkgsettings.useforce)
33513 + forced_flags.update(pkgsettings.usemask)
33514 + cur_iuse = iuses
33515 + if myeb and not usepkgonly and not useoldpkg:
33516 + cur_iuse = myeb.iuse.all
33517 + reinstall_for_flags = self._reinstall_for_flags(pkg,
33518 + forced_flags, old_use, iuses, now_use, cur_iuse)
33519 + if reinstall_for_flags:
33520 + if not pkg.installed:
33521 + self._dynamic_config.ignored_binaries.setdefault(pkg, set()).update(reinstall_for_flags)
33522 + break
33523 # Compare current config to installed package
33524 # and do not reinstall if possible.
33525 - if not installed and not useoldpkg and \
33526 - ("--newuse" in self._frozen_config.myopts or \
33527 - "--reinstall" in self._frozen_config.myopts) and \
33528 - cpv in vardb.match(atom):
33529 - forced_flags = set()
33530 - forced_flags.update(pkg.use.force)
33531 - forced_flags.update(pkg.use.mask)
33532 + if not installed and not useoldpkg and cpv in vardb.match(atom):
33533 inst_pkg = vardb.match_pkgs('=' + pkg.cpv)[0]
33534 - old_use = inst_pkg.use.enabled
33535 - old_iuse = inst_pkg.iuse.all
33536 - cur_use = self._pkg_use_enabled(pkg)
33537 - cur_iuse = pkg.iuse.all
33538 - reinstall_for_flags = \
33539 - self._reinstall_for_flags(pkg,
33540 - forced_flags, old_use, old_iuse,
33541 - cur_use, cur_iuse)
33542 - if reinstall_for_flags:
33543 + if "--newrepo" in self._frozen_config.myopts and pkg.repo != inst_pkg.repo:
33544 reinstall = True
33545 + elif reinstall_use:
33546 + forced_flags = set()
33547 + forced_flags.update(pkg.use.force)
33548 + forced_flags.update(pkg.use.mask)
33549 + old_use = inst_pkg.use.enabled
33550 + old_iuse = inst_pkg.iuse.all
33551 + cur_use = self._pkg_use_enabled(pkg)
33552 + cur_iuse = pkg.iuse.all
33553 + reinstall_for_flags = \
33554 + self._reinstall_for_flags(pkg,
33555 + forced_flags, old_use, old_iuse,
33556 + cur_use, cur_iuse)
33557 + if reinstall_for_flags:
33558 + reinstall = True
33559 if reinstall_atoms.findAtomForPackage(pkg, \
33560 modified_use=self._pkg_use_enabled(pkg)):
33561 reinstall = True
33562 @@ -4512,6 +5488,26 @@ class depgraph(object):
33563 return existing_node, existing_node
33564
33565 if len(matched_packages) > 1:
33566 + if parent is not None and \
33567 + (parent.root, parent.slot_atom) in self._dynamic_config._slot_operator_replace_installed:
33568 + # We're forcing a rebuild of the parent because we missed some
33569 + # update because of a slot operator dep.
33570 + if atom.slot_operator == "=" and atom.sub_slot is None:
33571 + # This one is a slot operator dep. Exclude the installed packages if a newer non-installed
33572 + # pkg exists.
33573 + highest_installed = None
33574 + for pkg in matched_packages:
33575 + if pkg.installed:
33576 + if highest_installed is None or pkg.version > highest_installed.version:
33577 + highest_installed = pkg
33578 +
33579 + if highest_installed:
33580 + non_installed = [pkg for pkg in matched_packages \
33581 + if not pkg.installed and pkg.version > highest_installed.version]
33582 +
33583 + if non_installed:
33584 + matched_packages = non_installed
33585 +
33586 if rebuilt_binaries:
33587 inst_pkg = None
33588 built_pkg = None
33589 @@ -4529,15 +5525,8 @@ class depgraph(object):
33590 # non-empty, in order to avoid cases like to
33591 # bug #306659 where BUILD_TIME fields are missing
33592 # in local and/or remote Packages file.
33593 - try:
33594 - built_timestamp = int(built_pkg.metadata['BUILD_TIME'])
33595 - except (KeyError, ValueError):
33596 - built_timestamp = 0
33597 -
33598 - try:
33599 - installed_timestamp = int(inst_pkg.metadata['BUILD_TIME'])
33600 - except (KeyError, ValueError):
33601 - installed_timestamp = 0
33602 + built_timestamp = built_pkg.build_time
33603 + installed_timestamp = inst_pkg.build_time
33604
33605 if unbuilt_pkg is not None and unbuilt_pkg > built_pkg:
33606 pass
33607 @@ -4584,7 +5573,7 @@ class depgraph(object):
33608 # ordered by type preference ("ebuild" type is the last resort)
33609 return matched_packages[-1], existing_node
33610
33611 - def _select_pkg_from_graph(self, root, atom, onlydeps=False):
33612 + def _select_pkg_from_graph(self, root, atom, onlydeps=False, parent=None):
33613 """
33614 Select packages that have already been added to the graph or
33615 those that are installed and have not been scheduled for
33616 @@ -4594,11 +5583,18 @@ class depgraph(object):
33617 matches = graph_db.match_pkgs(atom)
33618 if not matches:
33619 return None, None
33620 - pkg = matches[-1] # highest match
33621 - in_graph = self._dynamic_config._slot_pkg_map[root].get(pkg.slot_atom)
33622 - return pkg, in_graph
33623
33624 - def _select_pkg_from_installed(self, root, atom, onlydeps=False):
33625 + # There may be multiple matches, and they may
33626 + # conflict with eachother, so choose the highest
33627 + # version that has already been added to the graph.
33628 + for pkg in reversed(matches):
33629 + if pkg in self._dynamic_config.digraph:
33630 + return pkg, pkg
33631 +
33632 + # Fall back to installed packages
33633 + return self._select_pkg_from_installed(root, atom, onlydeps=onlydeps, parent=parent)
33634 +
33635 + def _select_pkg_from_installed(self, root, atom, onlydeps=False, parent=None):
33636 """
33637 Select packages that are installed.
33638 """
33639 @@ -4621,8 +5617,18 @@ class depgraph(object):
33640 unmasked = [pkg for pkg in matches if not pkg.masks]
33641 if unmasked:
33642 matches = unmasked
33643 + if len(matches) > 1:
33644 + # Now account for packages for which existing
33645 + # ebuilds are masked or unavailable (bug #445506).
33646 + unmasked = [pkg for pkg in matches if
33647 + self._equiv_ebuild_visible(pkg)]
33648 + if unmasked:
33649 + matches = unmasked
33650 +
33651 pkg = matches[-1] # highest match
33652 - in_graph = self._dynamic_config._slot_pkg_map[root].get(pkg.slot_atom)
33653 + in_graph = next(self._dynamic_config._package_tracker.match(
33654 + root, pkg.slot_atom, installed=False), None)
33655 +
33656 return pkg, in_graph
33657
33658 def _complete_graph(self, required_sets=None):
33659 @@ -4649,9 +5655,9 @@ class depgraph(object):
33660 "complete_if_new_use", "y") == "y"
33661 complete_if_new_ver = self._dynamic_config.myparams.get(
33662 "complete_if_new_ver", "y") == "y"
33663 - rebuild_if_new_slot_abi = self._dynamic_config.myparams.get(
33664 - "rebuild_if_new_slot_abi", "y") == "y"
33665 - complete_if_new_slot = rebuild_if_new_slot_abi
33666 + rebuild_if_new_slot = self._dynamic_config.myparams.get(
33667 + "rebuild_if_new_slot", "y") == "y"
33668 + complete_if_new_slot = rebuild_if_new_slot
33669
33670 if "complete" not in self._dynamic_config.myparams and \
33671 (complete_if_new_use or
33672 @@ -4670,10 +5676,16 @@ class depgraph(object):
33673 inst_pkg = vardb.match_pkgs(node.slot_atom)
33674 if inst_pkg and inst_pkg[0].cp == node.cp:
33675 inst_pkg = inst_pkg[0]
33676 - if complete_if_new_ver and \
33677 - (inst_pkg < node or node < inst_pkg):
33678 - version_change = True
33679 - break
33680 + if complete_if_new_ver:
33681 + if inst_pkg < node or node < inst_pkg:
33682 + version_change = True
33683 + break
33684 + elif not (inst_pkg.slot == node.slot and
33685 + inst_pkg.sub_slot == node.sub_slot):
33686 + # slot/sub-slot change without revbump gets
33687 + # similar treatment to a version change
33688 + version_change = True
33689 + break
33690
33691 # Intersect enabled USE with IUSE, in order to
33692 # ignore forced USE from implicit IUSE flags, since
33693 @@ -4689,7 +5701,8 @@ class depgraph(object):
33694 if complete_if_new_slot:
33695 cp_list = vardb.match_pkgs(Atom(node.cp))
33696 if (cp_list and cp_list[0].cp == node.cp and
33697 - not any(node.slot == pkg.slot for pkg in cp_list)):
33698 + not any(node.slot == pkg.slot and
33699 + node.sub_slot == pkg.sub_slot for pkg in cp_list)):
33700 version_change = True
33701 break
33702
33703 @@ -4795,7 +5808,7 @@ class depgraph(object):
33704 return 0
33705 return 1
33706
33707 - def _pkg(self, cpv, type_name, root_config, installed=False,
33708 + def _pkg(self, cpv, type_name, root_config, installed=False,
33709 onlydeps=False, myrepo = None):
33710 """
33711 Get a package instance from the cache, or create a new
33712 @@ -4813,10 +5826,14 @@ class depgraph(object):
33713 installed=installed, onlydeps=onlydeps))
33714 if pkg is None and onlydeps and not installed:
33715 # Maybe it already got pulled in as a "merge" node.
33716 - pkg = self._dynamic_config.mydbapi[root_config.root].get(
33717 - Package._gen_hash_key(cpv=cpv, type_name=type_name,
33718 - repo_name=myrepo, root_config=root_config,
33719 - installed=installed, onlydeps=False))
33720 + for candidate in self._dynamic_config._package_tracker.match(
33721 + root_config.root, Atom("="+cpv)):
33722 + if candidate.type_name == type_name and \
33723 + candidate.repo_name == myrepo and \
33724 + candidate.root_config is root_config and \
33725 + candidate.installed == installed and \
33726 + not candidate.onlydeps:
33727 + pkg = candidate
33728
33729 if pkg is None:
33730 tree_type = self.pkg_tree_map[type_name]
33731 @@ -4866,7 +5883,7 @@ class depgraph(object):
33732 # For installed packages, always ignore blockers from DEPEND since
33733 # only runtime dependencies should be relevant for packages that
33734 # are already built.
33735 - dep_keys = ["RDEPEND", "PDEPEND"]
33736 + dep_keys = Package._runtime_keys
33737 for myroot in self._frozen_config.trees:
33738
33739 if self._frozen_config.myopts.get("--root-deps") is not None and \
33740 @@ -4876,7 +5893,8 @@ class depgraph(object):
33741 vardb = self._frozen_config.trees[myroot]["vartree"].dbapi
33742 pkgsettings = self._frozen_config.pkgsettings[myroot]
33743 root_config = self._frozen_config.roots[myroot]
33744 - final_db = self._dynamic_config.mydbapi[myroot]
33745 + final_db = PackageTrackerDbapiWrapper(
33746 + myroot, self._dynamic_config._package_tracker)
33747
33748 blocker_cache = BlockerCache(myroot, vardb)
33749 stale_cache = set(blocker_cache)
33750 @@ -4893,7 +5911,7 @@ class depgraph(object):
33751 # the merge process or by --depclean. Always warn about
33752 # packages masked by license, since the user likely wants
33753 # to adjust ACCEPT_LICENSE.
33754 - if pkg in final_db:
33755 + if pkg in self._dynamic_config._package_tracker:
33756 if not self._pkg_visibility_check(pkg,
33757 trust_graph=False) and \
33758 (pkg_in_graph or 'LICENSE' in pkg.masks):
33759 @@ -4928,7 +5946,7 @@ class depgraph(object):
33760 self._spinner_update()
33761 blocker_data = blocker_cache.get(cpv)
33762 if blocker_data is not None and \
33763 - blocker_data.counter != long(pkg.metadata["COUNTER"]):
33764 + blocker_data.counter != pkg.counter:
33765 blocker_data = None
33766
33767 # If blocker data from the graph is available, use
33768 @@ -4945,9 +5963,8 @@ class depgraph(object):
33769 blockers is not None:
33770 # Re-use the blockers from the graph.
33771 blocker_atoms = sorted(blockers)
33772 - counter = long(pkg.metadata["COUNTER"])
33773 blocker_data = \
33774 - blocker_cache.BlockerData(counter, blocker_atoms)
33775 + blocker_cache.BlockerData(pkg.counter, blocker_atoms)
33776 blocker_cache[pkg.cpv] = blocker_data
33777 continue
33778
33779 @@ -4972,13 +5989,14 @@ class depgraph(object):
33780 # matches (this can happen if an atom lacks a
33781 # category).
33782 show_invalid_depstring_notice(
33783 - pkg, depstr, _unicode_decode("%s") % (e,))
33784 + pkg, depstr, "%s" % (e,))
33785 del e
33786 raise
33787 if not success:
33788 - replacement_pkg = final_db.match_pkgs(pkg.slot_atom)
33789 - if replacement_pkg and \
33790 - replacement_pkg[0].operation == "merge":
33791 + replacement_pkgs = self._dynamic_config._package_tracker.match(
33792 + myroot, pkg.slot_atom)
33793 + if any(replacement_pkg[0].operation == "merge" for \
33794 + replacement_pkg in replacement_pkgs):
33795 # This package is being replaced anyway, so
33796 # ignore invalid dependencies so as not to
33797 # annoy the user too much (otherwise they'd be
33798 @@ -4989,22 +6007,20 @@ class depgraph(object):
33799 blocker_atoms = [myatom for myatom in atoms \
33800 if myatom.blocker]
33801 blocker_atoms.sort()
33802 - counter = long(pkg.metadata["COUNTER"])
33803 blocker_cache[cpv] = \
33804 - blocker_cache.BlockerData(counter, blocker_atoms)
33805 + blocker_cache.BlockerData(pkg.counter, blocker_atoms)
33806 if blocker_atoms:
33807 try:
33808 for atom in blocker_atoms:
33809 blocker = Blocker(atom=atom,
33810 - eapi=pkg.metadata["EAPI"],
33811 + eapi=pkg.eapi,
33812 priority=self._priority(runtime=True),
33813 root=myroot)
33814 self._dynamic_config._blocker_parents.add(blocker, pkg)
33815 except portage.exception.InvalidAtom as e:
33816 depstr = " ".join(vardb.aux_get(pkg.cpv, dep_keys))
33817 show_invalid_depstring_notice(
33818 - pkg, depstr,
33819 - _unicode_decode("Invalid Atom: %s") % (e,))
33820 + pkg, depstr, "Invalid Atom: %s" % (e,))
33821 return False
33822 for cpv in stale_cache:
33823 del blocker_cache[cpv]
33824 @@ -5025,8 +6041,7 @@ class depgraph(object):
33825 virtuals = root_config.settings.getvirtuals()
33826 myroot = blocker.root
33827 initial_db = self._frozen_config.trees[myroot]["vartree"].dbapi
33828 - final_db = self._dynamic_config.mydbapi[myroot]
33829 -
33830 +
33831 provider_virtual = False
33832 if blocker.cp in virtuals and \
33833 not self._have_new_virt(blocker.root, blocker.cp):
33834 @@ -5053,7 +6068,7 @@ class depgraph(object):
33835
33836 blocked_final = set()
33837 for atom in atoms:
33838 - for pkg in final_db.match_pkgs(atom):
33839 + for pkg in self._dynamic_config._package_tracker.match(myroot, atom):
33840 if atom_set.findAtomForPackage(pkg, modified_use=self._pkg_use_enabled(pkg)):
33841 blocked_final.add(pkg)
33842
33843 @@ -5137,7 +6152,7 @@ class depgraph(object):
33844 for inst_pkg, inst_task in depends_on_order:
33845 uninst_task = Package(built=inst_pkg.built,
33846 cpv=inst_pkg.cpv, installed=inst_pkg.installed,
33847 - metadata=inst_pkg.metadata,
33848 + metadata=inst_pkg._metadata,
33849 operation="uninstall",
33850 root_config=inst_pkg.root_config,
33851 type_name=inst_pkg.type_name)
33852 @@ -5203,7 +6218,12 @@ class depgraph(object):
33853
33854 mygraph.order.sort(key=cmp_sort_key(cmp_merge_preference))
33855
33856 - def altlist(self, reversed=False):
33857 + def altlist(self, reversed=DeprecationWarning):
33858 +
33859 + if reversed is not DeprecationWarning:
33860 + warnings.warn("The reversed parameter of "
33861 + "_emerge.depgraph.depgraph.altlist() is deprecated",
33862 + DeprecationWarning, stacklevel=2)
33863
33864 while self._dynamic_config._serialized_tasks_cache is None:
33865 self._resolve_conflicts()
33866 @@ -5213,9 +6233,13 @@ class depgraph(object):
33867 except self._serialize_tasks_retry:
33868 pass
33869
33870 - retlist = self._dynamic_config._serialized_tasks_cache[:]
33871 - if reversed:
33872 + retlist = self._dynamic_config._serialized_tasks_cache
33873 + if reversed is not DeprecationWarning and reversed:
33874 + # TODO: remove the "reversed" parameter (builtin name collision)
33875 + retlist = list(retlist)
33876 retlist.reverse()
33877 + retlist = tuple(retlist)
33878 +
33879 return retlist
33880
33881 def _implicit_libc_deps(self, mergelist, graph):
33882 @@ -5226,19 +6250,15 @@ class depgraph(object):
33883 libc_pkgs = {}
33884 implicit_libc_roots = (self._frozen_config._running_root.root,)
33885 for root in implicit_libc_roots:
33886 - graphdb = self._dynamic_config.mydbapi[root]
33887 vardb = self._frozen_config.trees[root]["vartree"].dbapi
33888 for atom in self._expand_virt_from_graph(root,
33889 portage.const.LIBC_PACKAGE_ATOM):
33890 if atom.blocker:
33891 continue
33892 - match = graphdb.match_pkgs(atom)
33893 - if not match:
33894 - continue
33895 - pkg = match[-1]
33896 - if pkg.operation == "merge" and \
33897 - not vardb.cpv_exists(pkg.cpv):
33898 - libc_pkgs.setdefault(pkg.root, set()).add(pkg)
33899 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
33900 + if pkg.operation == "merge" and \
33901 + not vardb.cpv_exists(pkg.cpv):
33902 + libc_pkgs.setdefault(pkg.root, set()).add(pkg)
33903
33904 if not libc_pkgs:
33905 return
33906 @@ -5326,7 +6346,7 @@ class depgraph(object):
33907
33908 if "complete" not in self._dynamic_config.myparams and \
33909 self._dynamic_config._allow_backtracking and \
33910 - self._dynamic_config._slot_collision_nodes and \
33911 + any(self._dynamic_config._package_tracker.slot_conflicts()) and \
33912 not self._accept_blocker_conflicts():
33913 self._dynamic_config.myparams["complete"] = True
33914
33915 @@ -5335,10 +6355,13 @@ class depgraph(object):
33916
33917 self._process_slot_conflicts()
33918
33919 - self._slot_abi_trigger_reinstalls()
33920 + if self._dynamic_config._allow_backtracking:
33921 + self._slot_operator_trigger_reinstalls()
33922
33923 if not self._validate_blockers():
33924 - self._dynamic_config._skip_restart = True
33925 + # Blockers don't trigger the _skip_restart flag, since
33926 + # backtracking may solve blockers when it solves slot
33927 + # conflicts (or by blind luck).
33928 raise self._unknown_internal_error()
33929
33930 def _serialize_tasks(self):
33931 @@ -5436,8 +6459,8 @@ class depgraph(object):
33932 initial_atoms=[PORTAGE_PACKAGE_ATOM])
33933 running_portage = self._frozen_config.trees[running_root]["vartree"].dbapi.match_pkgs(
33934 PORTAGE_PACKAGE_ATOM)
33935 - replacement_portage = self._dynamic_config.mydbapi[running_root].match_pkgs(
33936 - PORTAGE_PACKAGE_ATOM)
33937 + replacement_portage = list(self._dynamic_config._package_tracker.match(
33938 + running_root, Atom(PORTAGE_PACKAGE_ATOM)))
33939
33940 if running_portage:
33941 running_portage = running_portage[0]
33942 @@ -5455,7 +6478,7 @@ class depgraph(object):
33943 if running_portage is not None:
33944 try:
33945 portage_rdepend = self._select_atoms_highest_available(
33946 - running_root, running_portage.metadata["RDEPEND"],
33947 + running_root, running_portage._metadata["RDEPEND"],
33948 myuse=self._pkg_use_enabled(running_portage),
33949 parent=running_portage, strict=False)
33950 except portage.exception.InvalidDependString as e:
33951 @@ -5474,18 +6497,15 @@ class depgraph(object):
33952 for root in implicit_libc_roots:
33953 libc_pkgs = set()
33954 vardb = self._frozen_config.trees[root]["vartree"].dbapi
33955 - graphdb = self._dynamic_config.mydbapi[root]
33956 for atom in self._expand_virt_from_graph(root,
33957 portage.const.LIBC_PACKAGE_ATOM):
33958 if atom.blocker:
33959 continue
33960 - match = graphdb.match_pkgs(atom)
33961 - if not match:
33962 - continue
33963 - pkg = match[-1]
33964 - if pkg.operation == "merge" and \
33965 - not vardb.cpv_exists(pkg.cpv):
33966 - libc_pkgs.add(pkg)
33967 +
33968 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
33969 + if pkg.operation == "merge" and \
33970 + not vardb.cpv_exists(pkg.cpv):
33971 + libc_pkgs.add(pkg)
33972
33973 if libc_pkgs:
33974 # If there's also an os-headers upgrade, we need to
33975 @@ -5494,13 +6514,11 @@ class depgraph(object):
33976 portage.const.OS_HEADERS_PACKAGE_ATOM):
33977 if atom.blocker:
33978 continue
33979 - match = graphdb.match_pkgs(atom)
33980 - if not match:
33981 - continue
33982 - pkg = match[-1]
33983 - if pkg.operation == "merge" and \
33984 - not vardb.cpv_exists(pkg.cpv):
33985 - asap_nodes.append(pkg)
33986 +
33987 + for pkg in self._dynamic_config._package_tracker.match(root, atom):
33988 + if pkg.operation == "merge" and \
33989 + not vardb.cpv_exists(pkg.cpv):
33990 + asap_nodes.append(pkg)
33991
33992 asap_nodes.extend(libc_pkgs)
33993
33994 @@ -5803,8 +6821,7 @@ class depgraph(object):
33995 other_version = None
33996 for pkg in vardb.match_pkgs(atom):
33997 if pkg.cpv == task.cpv and \
33998 - pkg.metadata["COUNTER"] == \
33999 - task.metadata["COUNTER"]:
34000 + pkg.counter == task.counter:
34001 continue
34002 other_version = pkg
34003 break
34004 @@ -5843,13 +6860,12 @@ class depgraph(object):
34005 # For packages in the world set, go ahead an uninstall
34006 # when necessary, as long as the atom will be satisfied
34007 # in the final state.
34008 - graph_db = self._dynamic_config.mydbapi[task.root]
34009 skip = False
34010 try:
34011 for atom in root_config.sets[
34012 "selected"].iterAtomsForPackage(task):
34013 satisfied = False
34014 - for pkg in graph_db.match_pkgs(atom):
34015 + for pkg in self._dynamic_config._package_tracker.match(task.root, atom):
34016 if pkg == inst_pkg:
34017 continue
34018 satisfied = True
34019 @@ -5931,12 +6947,11 @@ class depgraph(object):
34020 # node unnecessary (due to occupying the same SLOT),
34021 # and we want to avoid executing a separate uninstall
34022 # task in that case.
34023 - slot_node = self._dynamic_config.mydbapi[uninst_task.root
34024 - ].match_pkgs(uninst_task.slot_atom)
34025 - if slot_node and \
34026 - slot_node[0].operation == "merge":
34027 - mygraph.add(slot_node[0], uninst_task,
34028 - priority=BlockerDepPriority.instance)
34029 + for slot_node in self._dynamic_config._package_tracker.match(
34030 + uninst_task.root, uninst_task.slot_atom):
34031 + if slot_node.operation == "merge":
34032 + mygraph.add(slot_node, uninst_task,
34033 + priority=BlockerDepPriority.instance)
34034
34035 # Reset the state variables for leaf node selection and
34036 # continue trying to select leaf nodes.
34037 @@ -6011,7 +7026,7 @@ class depgraph(object):
34038 inst_pkg = inst_pkg[0]
34039 uninst_task = Package(built=inst_pkg.built,
34040 cpv=inst_pkg.cpv, installed=inst_pkg.installed,
34041 - metadata=inst_pkg.metadata,
34042 + metadata=inst_pkg._metadata,
34043 operation="uninstall",
34044 root_config=inst_pkg.root_config,
34045 type_name=inst_pkg.type_name)
34046 @@ -6083,17 +7098,22 @@ class depgraph(object):
34047 for blocker in unsolvable_blockers:
34048 retlist.append(blocker)
34049
34050 + retlist = tuple(retlist)
34051 +
34052 if unsolvable_blockers and \
34053 not self._accept_blocker_conflicts():
34054 self._dynamic_config._unsatisfied_blockers_for_display = unsolvable_blockers
34055 - self._dynamic_config._serialized_tasks_cache = retlist[:]
34056 + self._dynamic_config._serialized_tasks_cache = retlist
34057 self._dynamic_config._scheduler_graph = scheduler_graph
34058 - self._dynamic_config._skip_restart = True
34059 + # Blockers don't trigger the _skip_restart flag, since
34060 + # backtracking may solve blockers when it solves slot
34061 + # conflicts (or by blind luck).
34062 raise self._unknown_internal_error()
34063
34064 - if self._dynamic_config._slot_collision_info and \
34065 + have_slot_conflict = any(self._dynamic_config._package_tracker.slot_conflicts())
34066 + if have_slot_conflict and \
34067 not self._accept_blocker_conflicts():
34068 - self._dynamic_config._serialized_tasks_cache = retlist[:]
34069 + self._dynamic_config._serialized_tasks_cache = retlist
34070 self._dynamic_config._scheduler_graph = scheduler_graph
34071 raise self._unknown_internal_error()
34072
34073 @@ -6147,13 +7167,8 @@ class depgraph(object):
34074 def _show_merge_list(self):
34075 if self._dynamic_config._serialized_tasks_cache is not None and \
34076 not (self._dynamic_config._displayed_list is not None and \
34077 - (self._dynamic_config._displayed_list == self._dynamic_config._serialized_tasks_cache or \
34078 - self._dynamic_config._displayed_list == \
34079 - list(reversed(self._dynamic_config._serialized_tasks_cache)))):
34080 - display_list = self._dynamic_config._serialized_tasks_cache[:]
34081 - if "--tree" in self._frozen_config.myopts:
34082 - display_list.reverse()
34083 - self.display(display_list)
34084 + self._dynamic_config._displayed_list is self._dynamic_config._serialized_tasks_cache):
34085 + self.display(self._dynamic_config._serialized_tasks_cache)
34086
34087 def _show_unsatisfied_blockers(self, blockers):
34088 self._show_merge_list()
34089 @@ -6175,6 +7190,18 @@ class depgraph(object):
34090 for blocker in blockers:
34091 for pkg in chain(self._dynamic_config._blocked_pkgs.child_nodes(blocker), \
34092 self._dynamic_config._blocker_parents.parent_nodes(blocker)):
34093 +
34094 + is_slot_conflict_pkg = False
34095 + for conflict in self._dynamic_config._package_tracker.slot_conflicts():
34096 + if conflict.root == pkg.root and conflict.atom == pkg.slot_atom:
34097 + is_slot_conflict_pkg = True
34098 + break
34099 + if is_slot_conflict_pkg:
34100 + # The slot conflict display has better noise reduction
34101 + # than the unsatisfied blockers display, so skip
34102 + # unsatisfied blockers display for packages involved
34103 + # directly in slot conflicts (see bug #385391).
34104 + continue
34105 parent_atoms = self._dynamic_config._parent_atoms.get(pkg)
34106 if not parent_atoms:
34107 atom = self._dynamic_config._blocked_world_pkgs.get(pkg)
34108 @@ -6232,7 +7259,14 @@ class depgraph(object):
34109 else:
34110 # Display the specific atom from SetArg or
34111 # Package types.
34112 - msg.append("%s required by %s" % (atom, parent))
34113 + if atom != atom.unevaluated_atom:
34114 + # Show the unevaluated atom, since it can reveal
34115 + # issues with conditional use-flags missing
34116 + # from IUSE.
34117 + msg.append("%s (%s) required by %s" %
34118 + (atom.unevaluated_atom, atom, parent))
34119 + else:
34120 + msg.append("%s required by %s" % (atom, parent))
34121 msg.append("\n")
34122
34123 msg.append("\n")
34124 @@ -6248,6 +7282,10 @@ class depgraph(object):
34125 # redundantly displaying this exact same merge list
34126 # again via _show_merge_list().
34127 self._dynamic_config._displayed_list = mylist
34128 +
34129 + if "--tree" in self._frozen_config.myopts:
34130 + mylist = tuple(reversed(mylist))
34131 +
34132 display = Display()
34133
34134 return display(self, mylist, favorites, verbosity)
34135 @@ -6320,7 +7358,7 @@ class depgraph(object):
34136 if is_latest:
34137 unstable_keyword_msg[root].append(">=%s %s\n" % (pkg.cpv, keyword))
34138 elif is_latest_in_slot:
34139 - unstable_keyword_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.metadata["SLOT"], keyword))
34140 + unstable_keyword_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.slot, keyword))
34141 else:
34142 unstable_keyword_msg[root].append("=%s %s\n" % (pkg.cpv, keyword))
34143 else:
34144 @@ -6343,7 +7381,7 @@ class depgraph(object):
34145 keyword = reason.unmask_hint.value
34146
34147 comment, filename = portage.getmaskingreason(
34148 - pkg.cpv, metadata=pkg.metadata,
34149 + pkg.cpv, metadata=pkg._metadata,
34150 settings=pkgsettings,
34151 portdb=pkg.root_config.trees["porttree"].dbapi,
34152 return_location=True)
34153 @@ -6360,7 +7398,7 @@ class depgraph(object):
34154 if is_latest:
34155 p_mask_change_msg[root].append(">=%s\n" % pkg.cpv)
34156 elif is_latest_in_slot:
34157 - p_mask_change_msg[root].append(">=%s:%s\n" % (pkg.cpv, pkg.metadata["SLOT"]))
34158 + p_mask_change_msg[root].append(">=%s:%s\n" % (pkg.cpv, pkg.slot))
34159 else:
34160 p_mask_change_msg[root].append("=%s\n" % pkg.cpv)
34161 else:
34162 @@ -6385,7 +7423,7 @@ class depgraph(object):
34163 if is_latest:
34164 use_changes_msg[root].append(">=%s %s\n" % (pkg.cpv, " ".join(adjustments)))
34165 elif is_latest_in_slot:
34166 - use_changes_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.metadata["SLOT"], " ".join(adjustments)))
34167 + use_changes_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.slot, " ".join(adjustments)))
34168 else:
34169 use_changes_msg[root].append("=%s %s\n" % (pkg.cpv, " ".join(adjustments)))
34170
34171 @@ -6402,7 +7440,7 @@ class depgraph(object):
34172 if is_latest:
34173 license_msg[root].append(">=%s %s\n" % (pkg.cpv, " ".join(sorted(missing_licenses))))
34174 elif is_latest_in_slot:
34175 - license_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.metadata["SLOT"], " ".join(sorted(missing_licenses))))
34176 + license_msg[root].append(">=%s:%s %s\n" % (pkg.cpv, pkg.slot, " ".join(sorted(missing_licenses))))
34177 else:
34178 license_msg[root].append("=%s %s\n" % (pkg.cpv, " ".join(sorted(missing_licenses))))
34179
34180 @@ -6442,7 +7480,7 @@ class depgraph(object):
34181 if stat.S_ISREG(st.st_mode):
34182 last_file_path = p
34183 elif stat.S_ISDIR(st.st_mode):
34184 - if os.path.basename(p) in _ignorecvs_dirs:
34185 + if os.path.basename(p) in VCS_DIRS:
34186 continue
34187 try:
34188 contents = os.listdir(p)
34189 @@ -6511,24 +7549,25 @@ class depgraph(object):
34190 if len(roots) > 1:
34191 writemsg("\nFor %s:\n" % abs_user_config, noiselevel=-1)
34192
34193 + def _writemsg(reason, file):
34194 + writemsg(('\nThe following %s are necessary to proceed:\n'
34195 + ' (see "%s" in the portage(5) man page for more details)\n')
34196 + % (colorize('BAD', reason), file), noiselevel=-1)
34197 +
34198 if root in unstable_keyword_msg:
34199 - writemsg("\nThe following " + colorize("BAD", "keyword changes") + \
34200 - " are necessary to proceed:\n", noiselevel=-1)
34201 + _writemsg('keyword changes', 'package.accept_keywords')
34202 writemsg(format_msg(unstable_keyword_msg[root]), noiselevel=-1)
34203
34204 if root in p_mask_change_msg:
34205 - writemsg("\nThe following " + colorize("BAD", "mask changes") + \
34206 - " are necessary to proceed:\n", noiselevel=-1)
34207 + _writemsg('mask changes', 'package.unmask')
34208 writemsg(format_msg(p_mask_change_msg[root]), noiselevel=-1)
34209
34210 if root in use_changes_msg:
34211 - writemsg("\nThe following " + colorize("BAD", "USE changes") + \
34212 - " are necessary to proceed:\n", noiselevel=-1)
34213 + _writemsg('USE changes', 'package.use')
34214 writemsg(format_msg(use_changes_msg[root]), noiselevel=-1)
34215
34216 if root in license_msg:
34217 - writemsg("\nThe following " + colorize("BAD", "license changes") + \
34218 - " are necessary to proceed:\n", noiselevel=-1)
34219 + _writemsg('license changes', 'package.license')
34220 writemsg(format_msg(license_msg[root]), noiselevel=-1)
34221
34222 protect_obj = {}
34223 @@ -6542,11 +7581,12 @@ class depgraph(object):
34224 def write_changes(root, changes, file_to_write_to):
34225 file_contents = None
34226 try:
34227 - file_contents = io.open(
34228 + with io.open(
34229 _unicode_encode(file_to_write_to,
34230 encoding=_encodings['fs'], errors='strict'),
34231 mode='r', encoding=_encodings['content'],
34232 - errors='replace').readlines()
34233 + errors='replace') as f:
34234 + file_contents = f.readlines()
34235 except IOError as e:
34236 if e.errno == errno.ENOENT:
34237 file_contents = []
34238 @@ -6612,10 +7652,16 @@ class depgraph(object):
34239 noiselevel=-1)
34240 writemsg("".join(problems), noiselevel=-1)
34241 elif write_to_file and roots:
34242 - writemsg("\nAutounmask changes successfully written. Remember to run dispatch-conf.\n", \
34243 + writemsg("\nAutounmask changes successfully written.\n",
34244 noiselevel=-1)
34245 + for root in roots:
34246 + chk_updated_cfg_files(root,
34247 + [os.path.join(os.sep, USER_CONFIG_PATH)])
34248 elif not pretend and not autounmask_write and roots:
34249 - writemsg("\nUse --autounmask-write to write changes to config files (honoring CONFIG_PROTECT).\n", \
34250 + writemsg("\nUse --autounmask-write to write changes to config files (honoring\n"
34251 + "CONFIG_PROTECT). Carefully examine the list of proposed changes,\n"
34252 + "paying special attention to mask or keyword changes that may expose\n"
34253 + "experimental or unstable packages.\n",
34254 noiselevel=-1)
34255
34256
34257 @@ -6632,21 +7678,35 @@ class depgraph(object):
34258 self._show_circular_deps(
34259 self._dynamic_config._circular_deps_for_display)
34260
34261 - # The slot conflict display has better noise reduction than
34262 - # the unsatisfied blockers display, so skip unsatisfied blockers
34263 - # display if there are slot conflicts (see bug #385391).
34264 - if self._dynamic_config._slot_collision_info:
34265 + unresolved_conflicts = False
34266 + have_slot_conflict = any(self._dynamic_config._package_tracker.slot_conflicts())
34267 + if have_slot_conflict:
34268 + unresolved_conflicts = True
34269 self._show_slot_collision_notice()
34270 - elif self._dynamic_config._unsatisfied_blockers_for_display is not None:
34271 + if self._dynamic_config._unsatisfied_blockers_for_display is not None:
34272 + unresolved_conflicts = True
34273 self._show_unsatisfied_blockers(
34274 self._dynamic_config._unsatisfied_blockers_for_display)
34275 - else:
34276 +
34277 + # Only show missed updates if there are no unresolved conflicts,
34278 + # since they may be irrelevant after the conflicts are solved.
34279 + if not unresolved_conflicts:
34280 self._show_missed_update()
34281
34282 + if self._frozen_config.myopts.get("--verbose-slot-rebuilds", 'y') != 'n':
34283 + self._compute_abi_rebuild_info()
34284 + self._show_abi_rebuild_info()
34285 +
34286 self._show_ignored_binaries()
34287
34288 self._display_autounmask()
34289
34290 + for depgraph_sets in self._dynamic_config.sets.values():
34291 + for pset in depgraph_sets.sets.values():
34292 + for error_msg in pset.errors:
34293 + writemsg_level("%s\n" % (error_msg,),
34294 + level=logging.ERROR, noiselevel=-1)
34295 +
34296 # TODO: Add generic support for "set problem" handlers so that
34297 # the below warnings aren't special cases for world only.
34298
34299 @@ -6722,7 +7782,7 @@ class depgraph(object):
34300 pkgsettings = self._frozen_config.pkgsettings[pkg.root]
34301 mreasons = get_masking_status(pkg, pkgsettings, root_config, use=self._pkg_use_enabled(pkg))
34302 masked_packages.append((root_config, pkgsettings,
34303 - pkg.cpv, pkg.repo, pkg.metadata, mreasons))
34304 + pkg.cpv, pkg.repo, pkg._metadata, mreasons))
34305 if masked_packages:
34306 writemsg("\n" + colorize("BAD", "!!!") + \
34307 " The following updates are masked by LICENSE changes:\n",
34308 @@ -6737,7 +7797,7 @@ class depgraph(object):
34309 pkgsettings = self._frozen_config.pkgsettings[pkg.root]
34310 mreasons = get_masking_status(pkg, pkgsettings, root_config, use=self._pkg_use_enabled)
34311 masked_packages.append((root_config, pkgsettings,
34312 - pkg.cpv, pkg.repo, pkg.metadata, mreasons))
34313 + pkg.cpv, pkg.repo, pkg._metadata, mreasons))
34314 if masked_packages:
34315 writemsg("\n" + colorize("BAD", "!!!") + \
34316 " The following installed packages are masked:\n",
34317 @@ -6747,7 +7807,15 @@ class depgraph(object):
34318 writemsg("\n", noiselevel=-1)
34319
34320 for pargs, kwargs in self._dynamic_config._unsatisfied_deps_for_display:
34321 - self._show_unsatisfied_dep(*pargs, **kwargs)
34322 + self._show_unsatisfied_dep(*pargs,
34323 + **portage._native_kwargs(kwargs))
34324 +
34325 + if self._dynamic_config._buildpkgonly_deps_unsatisfied:
34326 + self._show_merge_list()
34327 + writemsg("\n!!! --buildpkgonly requires all "
34328 + "dependencies to be merged.\n", noiselevel=-1)
34329 + writemsg("!!! Cannot merge requested packages. "
34330 + "Merge deps and try again.\n\n", noiselevel=-1)
34331
34332 def saveNomergeFavorites(self):
34333 """Find atoms in favorites that are not in the mergelist and add them
34334 @@ -6808,16 +7876,31 @@ class depgraph(object):
34335 all_added.append(SETPREFIX + k)
34336 all_added.extend(added_favorites)
34337 all_added.sort()
34338 - for a in all_added:
34339 - if a.startswith(SETPREFIX):
34340 - filename = "world_sets"
34341 - else:
34342 - filename = "world"
34343 - writemsg_stdout(
34344 - ">>> Recording %s in \"%s\" favorites file...\n" %
34345 - (colorize("INFORM", _unicode(a)), filename), noiselevel=-1)
34346 if all_added:
34347 - world_set.update(all_added)
34348 + skip = False
34349 + if "--ask" in self._frozen_config.myopts:
34350 + writemsg_stdout("\n", noiselevel=-1)
34351 + for a in all_added:
34352 + writemsg_stdout(" %s %s\n" % (colorize("GOOD", "*"), a),
34353 + noiselevel=-1)
34354 + writemsg_stdout("\n", noiselevel=-1)
34355 + prompt = "Would you like to add these packages to your world " \
34356 + "favorites?"
34357 + enter_invalid = '--ask-enter-invalid' in \
34358 + self._frozen_config.myopts
34359 + if userquery(prompt, enter_invalid) == "No":
34360 + skip = True
34361 +
34362 + if not skip:
34363 + for a in all_added:
34364 + if a.startswith(SETPREFIX):
34365 + filename = "world_sets"
34366 + else:
34367 + filename = "world"
34368 + writemsg_stdout(
34369 + ">>> Recording %s in \"%s\" favorites file...\n" %
34370 + (colorize("INFORM", _unicode(a)), filename), noiselevel=-1)
34371 + world_set.update(all_added)
34372
34373 if world_locked:
34374 world_set.unlock()
34375 @@ -6844,7 +7927,6 @@ class depgraph(object):
34376 else:
34377 args = []
34378
34379 - fakedb = self._dynamic_config.mydbapi
34380 serialized_tasks = []
34381 masked_tasks = []
34382 for x in mergelist:
34383 @@ -6902,7 +7984,7 @@ class depgraph(object):
34384 self._dynamic_config._unsatisfied_deps_for_display.append(
34385 ((pkg.root, "="+pkg.cpv), {"myparent":None}))
34386
34387 - fakedb[myroot].cpv_inject(pkg)
34388 + self._dynamic_config._package_tracker.add_pkg(pkg)
34389 serialized_tasks.append(pkg)
34390 self._spinner_update()
34391
34392 @@ -7092,14 +8174,15 @@ class depgraph(object):
34393 try:
34394 for pargs, kwargs in self._dynamic_config._unsatisfied_deps_for_display:
34395 self._show_unsatisfied_dep(
34396 - *pargs, check_autounmask_breakage=True, **kwargs)
34397 + *pargs, check_autounmask_breakage=True,
34398 + **portage._native_kwargs(kwargs))
34399 except self._autounmask_breakage:
34400 return True
34401 return False
34402
34403 def get_backtrack_infos(self):
34404 return self._dynamic_config._backtrack_infos
34405 -
34406 +
34407
34408 class _dep_check_composite_db(dbapi):
34409 """
34410 @@ -7214,8 +8297,9 @@ class _dep_check_composite_db(dbapi):
34411 elif not self._depgraph._equiv_ebuild_visible(pkg):
34412 return False
34413
34414 - in_graph = self._depgraph._dynamic_config._slot_pkg_map[
34415 - self._root].get(pkg.slot_atom)
34416 + in_graph = next(self._depgraph._dynamic_config._package_tracker.match(
34417 + self._root, pkg.slot_atom, installed=False), None)
34418 +
34419 if in_graph is None:
34420 # Mask choices for packages which are not the highest visible
34421 # version within their slot (since they usually trigger slot
34422 @@ -7234,7 +8318,7 @@ class _dep_check_composite_db(dbapi):
34423 return True
34424
34425 def aux_get(self, cpv, wants):
34426 - metadata = self._cpv_pkg_map[cpv].metadata
34427 + metadata = self._cpv_pkg_map[cpv]._metadata
34428 return [metadata.get(x, "") for x in wants]
34429
34430 def match_pkgs(self, atom):
34431 @@ -7308,14 +8392,14 @@ def _spinner_stop(spinner):
34432
34433 portage.writemsg_stdout("... done!\n")
34434
34435 -def backtrack_depgraph(settings, trees, myopts, myparams,
34436 +def backtrack_depgraph(settings, trees, myopts, myparams,
34437 myaction, myfiles, spinner):
34438 """
34439 Raises PackageSetNotFound if myfiles contains a missing package set.
34440 """
34441 _spinner_start(spinner, myopts)
34442 try:
34443 - return _backtrack_depgraph(settings, trees, myopts, myparams,
34444 + return _backtrack_depgraph(settings, trees, myopts, myparams,
34445 myaction, myfiles, spinner)
34446 finally:
34447 _spinner_stop(spinner)
34448 @@ -7412,7 +8496,7 @@ def _resume_depgraph(settings, trees, mtimedb, myopts, myparams, spinner):
34449 skip_masked = True
34450 skip_unsatisfied = True
34451 mergelist = mtimedb["resume"]["mergelist"]
34452 - dropped_tasks = set()
34453 + dropped_tasks = {}
34454 frozen_config = _frozen_depgraph_config(settings, trees,
34455 myopts, spinner)
34456 while True:
34457 @@ -7426,12 +8510,21 @@ def _resume_depgraph(settings, trees, mtimedb, myopts, myparams, spinner):
34458 raise
34459
34460 graph = mydepgraph._dynamic_config.digraph
34461 - unsatisfied_parents = dict((dep.parent, dep.parent) \
34462 - for dep in e.value)
34463 + unsatisfied_parents = {}
34464 traversed_nodes = set()
34465 - unsatisfied_stack = list(unsatisfied_parents)
34466 + unsatisfied_stack = [(dep.parent, dep.atom) for dep in e.value]
34467 while unsatisfied_stack:
34468 - pkg = unsatisfied_stack.pop()
34469 + pkg, atom = unsatisfied_stack.pop()
34470 + if atom is not None and \
34471 + mydepgraph._select_pkg_from_installed(
34472 + pkg.root, atom)[0] is not None:
34473 + continue
34474 + atoms = unsatisfied_parents.get(pkg)
34475 + if atoms is None:
34476 + atoms = []
34477 + unsatisfied_parents[pkg] = atoms
34478 + if atom is not None:
34479 + atoms.append(atom)
34480 if pkg in traversed_nodes:
34481 continue
34482 traversed_nodes.add(pkg)
34483 @@ -7440,7 +8533,8 @@ def _resume_depgraph(settings, trees, mtimedb, myopts, myparams, spinner):
34484 # package scheduled for merge, removing this
34485 # package may cause the the parent package's
34486 # dependency to become unsatisfied.
34487 - for parent_node in graph.parent_nodes(pkg):
34488 + for parent_node, atom in \
34489 + mydepgraph._dynamic_config._parent_atoms.get(pkg, []):
34490 if not isinstance(parent_node, Package) \
34491 or parent_node.operation not in ("merge", "nomerge"):
34492 continue
34493 @@ -7448,8 +8542,7 @@ def _resume_depgraph(settings, trees, mtimedb, myopts, myparams, spinner):
34494 # ensure that a package with an unsatisfied depenedency
34495 # won't get pulled in, even indirectly via a soft
34496 # dependency.
34497 - unsatisfied_parents[parent_node] = parent_node
34498 - unsatisfied_stack.append(parent_node)
34499 + unsatisfied_stack.append((parent_node, atom))
34500
34501 unsatisfied_tuples = frozenset(tuple(parent_node)
34502 for parent_node in unsatisfied_parents
34503 @@ -7470,8 +8563,8 @@ def _resume_depgraph(settings, trees, mtimedb, myopts, myparams, spinner):
34504 # Exclude installed packages that have been removed from the graph due
34505 # to failure to build/install runtime dependencies after the dependent
34506 # package has already been installed.
34507 - dropped_tasks.update(pkg for pkg in \
34508 - unsatisfied_parents if pkg.operation != "nomerge")
34509 + dropped_tasks.update((pkg, atoms) for pkg, atoms in \
34510 + unsatisfied_parents.items() if pkg.operation != "nomerge")
34511
34512 del e, graph, traversed_nodes, \
34513 unsatisfied_parents, unsatisfied_stack
34514 @@ -7557,9 +8650,11 @@ def show_masked_packages(masked_packages):
34515 shown_comments.add(comment)
34516 portdb = root_config.trees["porttree"].dbapi
34517 for l in missing_licenses:
34518 - l_path = portdb.findLicensePath(l)
34519 if l in shown_licenses:
34520 continue
34521 + l_path = portdb.findLicensePath(l)
34522 + if l_path is None:
34523 + continue
34524 msg = ("A copy of the '%s' license" + \
34525 " is located at '%s'.\n\n") % (l, l_path)
34526 writemsg(msg, noiselevel=-1)
34527 @@ -7586,9 +8681,9 @@ def _get_masking_status(pkg, pkgsettings, root_config, myrepo=None, use=None):
34528 portdb=root_config.trees["porttree"].dbapi, myrepo=myrepo)
34529
34530 if not pkg.installed:
34531 - if not pkgsettings._accept_chost(pkg.cpv, pkg.metadata):
34532 + if not pkgsettings._accept_chost(pkg.cpv, pkg._metadata):
34533 mreasons.append(_MaskReason("CHOST", "CHOST: %s" % \
34534 - pkg.metadata["CHOST"]))
34535 + pkg._metadata["CHOST"]))
34536
34537 if pkg.invalid:
34538 for msgs in pkg.invalid.values():
34539 @@ -7596,7 +8691,7 @@ def _get_masking_status(pkg, pkgsettings, root_config, myrepo=None, use=None):
34540 mreasons.append(
34541 _MaskReason("invalid", "invalid: %s" % (msg,)))
34542
34543 - if not pkg.metadata["SLOT"]:
34544 + if not pkg._metadata["SLOT"]:
34545 mreasons.append(
34546 _MaskReason("invalid", "SLOT: undefined"))
34547
34548
34549 diff --git a/pym/_emerge/emergelog.py b/pym/_emerge/emergelog.py
34550 index b1b093f..aea94f7 100644
34551 --- a/pym/_emerge/emergelog.py
34552 +++ b/pym/_emerge/emergelog.py
34553 @@ -1,7 +1,7 @@
34554 -# Copyright 1999-2011 Gentoo Foundation
34555 +# Copyright 1999-2013 Gentoo Foundation
34556 # Distributed under the terms of the GNU General Public License v2
34557
34558 -from __future__ import print_function
34559 +from __future__ import unicode_literals
34560
34561 import io
34562 import sys
34563 @@ -20,10 +20,6 @@ from portage.output import xtermTitle
34564 _disable = True
34565 _emerge_log_dir = '/var/log'
34566
34567 -# Coerce to unicode, in order to prevent TypeError when writing
34568 -# raw bytes to TextIOWrapper with python2.
34569 -_log_fmt = _unicode_decode("%.0f: %s\n")
34570 -
34571 def emergelog(xterm_titles, mystr, short_msg=None):
34572
34573 if _disable:
34574 @@ -51,10 +47,10 @@ def emergelog(xterm_titles, mystr, short_msg=None):
34575 mode=0o660)
34576 mylock = portage.locks.lockfile(file_path)
34577 try:
34578 - mylogfile.write(_log_fmt % (time.time(), mystr))
34579 + mylogfile.write("%.0f: %s\n" % (time.time(), mystr))
34580 mylogfile.close()
34581 finally:
34582 portage.locks.unlockfile(mylock)
34583 except (IOError,OSError,portage.exception.PortageException) as e:
34584 if secpass >= 1:
34585 - print("emergelog():",e, file=sys.stderr)
34586 + portage.util.writemsg("emergelog(): %s\n" % (e,), noiselevel=-1)
34587
34588 diff --git a/pym/_emerge/getloadavg.py b/pym/_emerge/getloadavg.py
34589 index e9babf1..6a2794f 100644
34590 --- a/pym/_emerge/getloadavg.py
34591 +++ b/pym/_emerge/getloadavg.py
34592 @@ -1,4 +1,4 @@
34593 -# Copyright 1999-2009 Gentoo Foundation
34594 +# Copyright 1999-2013 Gentoo Foundation
34595 # Distributed under the terms of the GNU General Public License v2
34596
34597 from portage import os
34598 @@ -11,7 +11,8 @@ if getloadavg is None:
34599 Raises OSError if the load average was unobtainable.
34600 """
34601 try:
34602 - loadavg_str = open('/proc/loadavg').readline()
34603 + with open('/proc/loadavg') as f:
34604 + loadavg_str = f.readline()
34605 except IOError:
34606 # getloadavg() is only supposed to raise OSError, so convert
34607 raise OSError('unknown')
34608
34609 diff --git a/pym/_emerge/help.py b/pym/_emerge/help.py
34610 index a1dbb37..8e241a8 100644
34611 --- a/pym/_emerge/help.py
34612 +++ b/pym/_emerge/help.py
34613 @@ -1,4 +1,4 @@
34614 -# Copyright 1999-2011 Gentoo Foundation
34615 +# Copyright 1999-2014 Gentoo Foundation
34616 # Distributed under the terms of the GNU General Public License v2
34617
34618 from __future__ import print_function
34619 @@ -9,15 +9,15 @@ def help():
34620 print(bold("emerge:")+" the other white meat (command-line interface to the Portage system)")
34621 print(bold("Usage:"))
34622 print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] [ "+turquoise("ebuild")+" | "+turquoise("tbz2")+" | "+turquoise("file")+" | "+turquoise("@set")+" | "+turquoise("atom")+" ] [ ... ]")
34623 - print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("system")+" | "+turquoise("world")+" >")
34624 + print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("@system")+" | "+turquoise("@world")+" >")
34625 print(" "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >")
34626 print(" "+turquoise("emerge")+" "+turquoise("--resume")+" [ "+green("--pretend")+" | "+green("--ask")+" | "+green("--skipfirst")+" ]")
34627 - print(" "+turquoise("emerge")+" "+turquoise("--help")+" [ "+green("--verbose")+" ] ")
34628 - print(bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpPqrsStuvV")+"]")
34629 + print(" "+turquoise("emerge")+" "+turquoise("--help"))
34630 + print(bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpPqrsStuvVw")+"]")
34631 print(" [ " + green("--color")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ] [ "+green("--columns")+" ]")
34632 print(" [ "+green("--complete-graph")+" ] [ "+green("--deep")+" ]")
34633 print(" [ "+green("--jobs") + " " + turquoise("JOBS")+" ] [ "+green("--keep-going")+" ] [ " + green("--load-average")+" " + turquoise("LOAD") + " ]")
34634 - print(" [ "+green("--newuse")+" ] [ "+green("--noconfmem")+" ] [ "+green("--nospinner")+" ]")
34635 + print(" [ "+green("--newrepo")+" ] [ "+green("--newuse")+" ] [ "+green("--noconfmem")+" ] [ "+green("--nospinner")+" ]")
34636 print(" [ "+green("--oneshot")+" ] [ "+green("--onlydeps")+" ] [ "+ green("--quiet-build")+" [ " + turquoise("y") + " | "+ turquoise("n")+" ] ]")
34637 print(" [ "+green("--reinstall ")+turquoise("changed-use")+" ] [ " + green("--with-bdeps")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ]")
34638 print(bold("Actions:")+" [ "+green("--depclean")+" | "+green("--list-sets")+" | "+green("--search")+" | "+green("--sync")+" | "+green("--version")+" ]")
34639
34640 diff --git a/pym/_emerge/is_valid_package_atom.py b/pym/_emerge/is_valid_package_atom.py
34641 index 7cb2a5b..112afc1 100644
34642 --- a/pym/_emerge/is_valid_package_atom.py
34643 +++ b/pym/_emerge/is_valid_package_atom.py
34644 @@ -1,11 +1,12 @@
34645 -# Copyright 1999-2011 Gentoo Foundation
34646 +# Copyright 1999-2013 Gentoo Foundation
34647 # Distributed under the terms of the GNU General Public License v2
34648
34649 import re
34650 from portage.dep import isvalidatom
34651
34652 def insert_category_into_atom(atom, category):
34653 - alphanum = re.search(r'\w', atom)
34654 + # Handle '*' character for "extended syntax" wildcard support.
34655 + alphanum = re.search(r'[\*\w]', atom, re.UNICODE)
34656 if alphanum:
34657 ret = atom[:alphanum.start()] + "%s/" % category + \
34658 atom[alphanum.start():]
34659 @@ -14,7 +15,7 @@ def insert_category_into_atom(atom, category):
34660 return ret
34661
34662 def is_valid_package_atom(x, allow_repo=False):
34663 - if "/" not in x:
34664 + if "/" not in x.split(":")[0]:
34665 x2 = insert_category_into_atom(x, 'cat')
34666 if x2 != None:
34667 x = x2
34668
34669 diff --git a/pym/_emerge/main.py b/pym/_emerge/main.py
34670 index f19994c..cfe1332 100644
34671 --- a/pym/_emerge/main.py
34672 +++ b/pym/_emerge/main.py
34673 @@ -1,53 +1,24 @@
34674 -# Copyright 1999-2012 Gentoo Foundation
34675 +# Copyright 1999-2014 Gentoo Foundation
34676 # Distributed under the terms of the GNU General Public License v2
34677
34678 from __future__ import print_function
34679
34680 -import logging
34681 -import signal
34682 -import stat
34683 -import subprocess
34684 -import sys
34685 -import textwrap
34686 import platform
34687 +import sys
34688 +
34689 import portage
34690 portage.proxy.lazyimport.lazyimport(globals(),
34691 - 'portage.news:count_unread_news,display_news_notifications',
34692 - 'portage.emaint.modules.logs.logs:CleanLogs',
34693 + 'logging',
34694 + 'portage.dep:Atom',
34695 + 'portage.util:writemsg_level',
34696 + 'textwrap',
34697 + '_emerge.actions:load_emerge_config,run_action,' + \
34698 + 'validate_ebuild_environment',
34699 + '_emerge.help:help@emerge_help',
34700 + '_emerge.is_valid_package_atom:insert_category_into_atom'
34701 )
34702 from portage import os
34703 -from portage import _encodings
34704 -from portage import _unicode_decode
34705 -import _emerge.help
34706 -import portage.xpak, errno, re, time
34707 -from portage.output import colorize, xtermTitle, xtermTitleReset
34708 -from portage.output import create_color_func
34709 -good = create_color_func("GOOD")
34710 -bad = create_color_func("BAD")
34711 -
34712 -from portage.const import _ENABLE_DYN_LINK_MAP
34713 -import portage.elog
34714 -import portage.util
34715 -import portage.locks
34716 -import portage.exception
34717 -from portage.data import secpass
34718 -from portage.dbapi.dep_expand import dep_expand
34719 -from portage.util import normalize_path as normpath
34720 -from portage.util import (shlex_split, varexpand,
34721 - writemsg_level, writemsg_stdout)
34722 -from portage._sets import SETPREFIX
34723 -from portage._global_updates import _global_updates
34724 -
34725 -from _emerge.actions import action_config, action_sync, action_metadata, \
34726 - action_regen, action_search, action_uninstall, action_info, action_build, \
34727 - adjust_configs, chk_updated_cfg_files, display_missing_pkg_set, \
34728 - display_news_notification, getportageversion, load_emerge_config
34729 -import _emerge
34730 -from _emerge.emergelog import emergelog
34731 -from _emerge._flush_elog_mod_echo import _flush_elog_mod_echo
34732 -from _emerge.is_valid_package_atom import is_valid_package_atom
34733 -from _emerge.stdout_spinner import stdout_spinner
34734 -from _emerge.userquery import userquery
34735 +from portage.util._argparse import ArgumentParser
34736
34737 if sys.hexversion >= 0x3000000:
34738 long = int
34739 @@ -61,9 +32,11 @@ options=[
34740 "--debug",
34741 "--digest",
34742 "--emptytree",
34743 +"--verbose-conflicts",
34744 "--fetchonly", "--fetch-all-uri",
34745 "--ignore-default-opts",
34746 "--noconfmem",
34747 +"--newrepo",
34748 "--newuse",
34749 "--nodeps", "--noreplace",
34750 "--nospinner", "--oneshot",
34751 @@ -76,7 +49,6 @@ options=[
34752 "--tree",
34753 "--unordered-display",
34754 "--update",
34755 -"--verbose",
34756 "--verbose-main-repo-display",
34757 ]
34758
34759 @@ -97,7 +69,7 @@ shortmapping={
34760 "s":"--search", "S":"--searchdesc",
34761 "t":"--tree",
34762 "u":"--update",
34763 -"v":"--verbose", "V":"--version"
34764 +"V":"--version"
34765 }
34766
34767 COWSAY_MOO = """
34768 @@ -109,331 +81,12 @@ COWSAY_MOO = """
34769 -----------------------
34770 \ ^__^
34771 \ (oo)\_______
34772 - (__)\ )\/\
34773 + (__)\ )\/\\
34774 ||----w |
34775 || ||
34776
34777 """
34778
34779 -def chk_updated_info_files(root, infodirs, prev_mtimes, retval):
34780 -
34781 - if os.path.exists("/usr/bin/install-info"):
34782 - out = portage.output.EOutput()
34783 - regen_infodirs=[]
34784 - for z in infodirs:
34785 - if z=='':
34786 - continue
34787 - inforoot=normpath(root+z)
34788 - if os.path.isdir(inforoot) and \
34789 - not [x for x in os.listdir(inforoot) \
34790 - if x.startswith('.keepinfodir')]:
34791 - infomtime = os.stat(inforoot)[stat.ST_MTIME]
34792 - if inforoot not in prev_mtimes or \
34793 - prev_mtimes[inforoot] != infomtime:
34794 - regen_infodirs.append(inforoot)
34795 -
34796 - if not regen_infodirs:
34797 - portage.writemsg_stdout("\n")
34798 - if portage.util.noiselimit >= 0:
34799 - out.einfo("GNU info directory index is up-to-date.")
34800 - else:
34801 - portage.writemsg_stdout("\n")
34802 - if portage.util.noiselimit >= 0:
34803 - out.einfo("Regenerating GNU info directory index...")
34804 -
34805 - dir_extensions = ("", ".gz", ".bz2")
34806 - icount=0
34807 - badcount=0
34808 - errmsg = ""
34809 - for inforoot in regen_infodirs:
34810 - if inforoot=='':
34811 - continue
34812 -
34813 - if not os.path.isdir(inforoot) or \
34814 - not os.access(inforoot, os.W_OK):
34815 - continue
34816 -
34817 - file_list = os.listdir(inforoot)
34818 - file_list.sort()
34819 - dir_file = os.path.join(inforoot, "dir")
34820 - moved_old_dir = False
34821 - processed_count = 0
34822 - for x in file_list:
34823 - if x.startswith(".") or \
34824 - os.path.isdir(os.path.join(inforoot, x)):
34825 - continue
34826 - if x.startswith("dir"):
34827 - skip = False
34828 - for ext in dir_extensions:
34829 - if x == "dir" + ext or \
34830 - x == "dir" + ext + ".old":
34831 - skip = True
34832 - break
34833 - if skip:
34834 - continue
34835 - if processed_count == 0:
34836 - for ext in dir_extensions:
34837 - try:
34838 - os.rename(dir_file + ext, dir_file + ext + ".old")
34839 - moved_old_dir = True
34840 - except EnvironmentError as e:
34841 - if e.errno != errno.ENOENT:
34842 - raise
34843 - del e
34844 - processed_count += 1
34845 - try:
34846 - proc = subprocess.Popen(
34847 - ['/usr/bin/install-info',
34848 - '--dir-file=%s' % os.path.join(inforoot, "dir"),
34849 - os.path.join(inforoot, x)],
34850 - env=dict(os.environ, LANG="C", LANGUAGE="C"),
34851 - stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
34852 - except OSError:
34853 - myso = None
34854 - else:
34855 - myso = _unicode_decode(
34856 - proc.communicate()[0]).rstrip("\n")
34857 - proc.wait()
34858 - existsstr="already exists, for file `"
34859 - if myso:
34860 - if re.search(existsstr,myso):
34861 - # Already exists... Don't increment the count for this.
34862 - pass
34863 - elif myso[:44]=="install-info: warning: no info dir entry in ":
34864 - # This info file doesn't contain a DIR-header: install-info produces this
34865 - # (harmless) warning (the --quiet switch doesn't seem to work).
34866 - # Don't increment the count for this.
34867 - pass
34868 - else:
34869 - badcount=badcount+1
34870 - errmsg += myso + "\n"
34871 - icount=icount+1
34872 -
34873 - if moved_old_dir and not os.path.exists(dir_file):
34874 - # We didn't generate a new dir file, so put the old file
34875 - # back where it was originally found.
34876 - for ext in dir_extensions:
34877 - try:
34878 - os.rename(dir_file + ext + ".old", dir_file + ext)
34879 - except EnvironmentError as e:
34880 - if e.errno != errno.ENOENT:
34881 - raise
34882 - del e
34883 -
34884 - # Clean dir.old cruft so that they don't prevent
34885 - # unmerge of otherwise empty directories.
34886 - for ext in dir_extensions:
34887 - try:
34888 - os.unlink(dir_file + ext + ".old")
34889 - except EnvironmentError as e:
34890 - if e.errno != errno.ENOENT:
34891 - raise
34892 - del e
34893 -
34894 - #update mtime so we can potentially avoid regenerating.
34895 - prev_mtimes[inforoot] = os.stat(inforoot)[stat.ST_MTIME]
34896 -
34897 - if badcount:
34898 - out.eerror("Processed %d info files; %d errors." % \
34899 - (icount, badcount))
34900 - writemsg_level(errmsg, level=logging.ERROR, noiselevel=-1)
34901 - else:
34902 - if icount > 0 and portage.util.noiselimit >= 0:
34903 - out.einfo("Processed %d info files." % (icount,))
34904 -
34905 -def display_preserved_libs(vardbapi, myopts):
34906 - MAX_DISPLAY = 3
34907 -
34908 - if vardbapi._linkmap is None or \
34909 - vardbapi._plib_registry is None:
34910 - # preserve-libs is entirely disabled
34911 - return
34912 -
34913 - # Explicitly load and prune the PreservedLibsRegistry in order
34914 - # to ensure that we do not display stale data.
34915 - vardbapi._plib_registry.load()
34916 -
34917 - if vardbapi._plib_registry.hasEntries():
34918 - if "--quiet" in myopts:
34919 - print()
34920 - print(colorize("WARN", "!!!") + " existing preserved libs found")
34921 - return
34922 - else:
34923 - print()
34924 - print(colorize("WARN", "!!!") + " existing preserved libs:")
34925 -
34926 - plibdata = vardbapi._plib_registry.getPreservedLibs()
34927 - linkmap = vardbapi._linkmap
34928 - consumer_map = {}
34929 - owners = {}
34930 -
34931 - try:
34932 - linkmap.rebuild()
34933 - except portage.exception.CommandNotFound as e:
34934 - writemsg_level("!!! Command Not Found: %s\n" % (e,),
34935 - level=logging.ERROR, noiselevel=-1)
34936 - del e
34937 - else:
34938 - search_for_owners = set()
34939 - for cpv in plibdata:
34940 - internal_plib_keys = set(linkmap._obj_key(f) \
34941 - for f in plibdata[cpv])
34942 - for f in plibdata[cpv]:
34943 - if f in consumer_map:
34944 - continue
34945 - consumers = []
34946 - for c in linkmap.findConsumers(f):
34947 - # Filter out any consumers that are also preserved libs
34948 - # belonging to the same package as the provider.
34949 - if linkmap._obj_key(c) not in internal_plib_keys:
34950 - consumers.append(c)
34951 - consumers.sort()
34952 - consumer_map[f] = consumers
34953 - search_for_owners.update(consumers[:MAX_DISPLAY+1])
34954 -
34955 - owners = {}
34956 - for f in search_for_owners:
34957 - owner_set = set()
34958 - for owner in linkmap.getOwners(f):
34959 - owner_dblink = vardbapi._dblink(owner)
34960 - if owner_dblink.exists():
34961 - owner_set.add(owner_dblink)
34962 - if owner_set:
34963 - owners[f] = owner_set
34964 -
34965 - for cpv in plibdata:
34966 - print(colorize("WARN", ">>>") + " package: %s" % cpv)
34967 - samefile_map = {}
34968 - for f in plibdata[cpv]:
34969 - obj_key = linkmap._obj_key(f)
34970 - alt_paths = samefile_map.get(obj_key)
34971 - if alt_paths is None:
34972 - alt_paths = set()
34973 - samefile_map[obj_key] = alt_paths
34974 - alt_paths.add(f)
34975 -
34976 - for alt_paths in samefile_map.values():
34977 - alt_paths = sorted(alt_paths)
34978 - for p in alt_paths:
34979 - print(colorize("WARN", " * ") + " - %s" % (p,))
34980 - f = alt_paths[0]
34981 - consumers = consumer_map.get(f, [])
34982 - for c in consumers[:MAX_DISPLAY]:
34983 - print(colorize("WARN", " * ") + " used by %s (%s)" % \
34984 - (c, ", ".join(x.mycpv for x in owners.get(c, []))))
34985 - if len(consumers) == MAX_DISPLAY + 1:
34986 - print(colorize("WARN", " * ") + " used by %s (%s)" % \
34987 - (consumers[MAX_DISPLAY], ", ".join(x.mycpv \
34988 - for x in owners.get(consumers[MAX_DISPLAY], []))))
34989 - elif len(consumers) > MAX_DISPLAY:
34990 - print(colorize("WARN", " * ") + " used by %d other files" % (len(consumers) - MAX_DISPLAY))
34991 - print("Use " + colorize("GOOD", "emerge @preserved-rebuild") + " to rebuild packages using these libraries")
34992 -
34993 -def post_emerge(myaction, myopts, myfiles,
34994 - target_root, trees, mtimedb, retval):
34995 - """
34996 - Misc. things to run at the end of a merge session.
34997 -
34998 - Update Info Files
34999 - Update Config Files
35000 - Update News Items
35001 - Commit mtimeDB
35002 - Display preserved libs warnings
35003 -
35004 - @param myaction: The action returned from parse_opts()
35005 - @type myaction: String
35006 - @param myopts: emerge options
35007 - @type myopts: dict
35008 - @param myfiles: emerge arguments
35009 - @type myfiles: list
35010 - @param target_root: The target EROOT for myaction
35011 - @type target_root: String
35012 - @param trees: A dictionary mapping each ROOT to it's package databases
35013 - @type trees: dict
35014 - @param mtimedb: The mtimeDB to store data needed across merge invocations
35015 - @type mtimedb: MtimeDB class instance
35016 - @param retval: Emerge's return value
35017 - @type retval: Int
35018 - """
35019 -
35020 - root_config = trees[target_root]["root_config"]
35021 - vardbapi = trees[target_root]['vartree'].dbapi
35022 - settings = vardbapi.settings
35023 - info_mtimes = mtimedb["info"]
35024 -
35025 - # Load the most current variables from ${ROOT}/etc/profile.env
35026 - settings.unlock()
35027 - settings.reload()
35028 - settings.regenerate()
35029 - settings.lock()
35030 -
35031 - config_protect = shlex_split(settings.get("CONFIG_PROTECT", ""))
35032 - infodirs = settings.get("INFOPATH","").split(":") + \
35033 - settings.get("INFODIR","").split(":")
35034 -
35035 - os.chdir("/")
35036 -
35037 - if retval == os.EX_OK:
35038 - exit_msg = " *** exiting successfully."
35039 - else:
35040 - exit_msg = " *** exiting unsuccessfully with status '%s'." % retval
35041 - emergelog("notitles" not in settings.features, exit_msg)
35042 -
35043 - _flush_elog_mod_echo()
35044 -
35045 - if not vardbapi._pkgs_changed:
35046 - # GLEP 42 says to display news *after* an emerge --pretend
35047 - if "--pretend" in myopts:
35048 - display_news_notification(root_config, myopts)
35049 - # If vdb state has not changed then there's nothing else to do.
35050 - return
35051 -
35052 - vdb_path = os.path.join(root_config.settings['EROOT'], portage.VDB_PATH)
35053 - portage.util.ensure_dirs(vdb_path)
35054 - vdb_lock = None
35055 - if os.access(vdb_path, os.W_OK) and not "--pretend" in myopts:
35056 - vardbapi.lock()
35057 - vdb_lock = True
35058 -
35059 - if vdb_lock:
35060 - try:
35061 - if "noinfo" not in settings.features:
35062 - chk_updated_info_files(target_root,
35063 - infodirs, info_mtimes, retval)
35064 - mtimedb.commit()
35065 - finally:
35066 - if vdb_lock:
35067 - vardbapi.unlock()
35068 -
35069 - display_preserved_libs(vardbapi, myopts)
35070 - chk_updated_cfg_files(settings['EROOT'], config_protect)
35071 -
35072 - display_news_notification(root_config, myopts)
35073 -
35074 - postemerge = os.path.join(settings["PORTAGE_CONFIGROOT"],
35075 - portage.USER_CONFIG_PATH, "bin", "post_emerge")
35076 - if os.access(postemerge, os.X_OK):
35077 - hook_retval = portage.process.spawn(
35078 - [postemerge], env=settings.environ())
35079 - if hook_retval != os.EX_OK:
35080 - writemsg_level(
35081 - " %s spawn failed of %s\n" % (bad("*"), postemerge,),
35082 - level=logging.ERROR, noiselevel=-1)
35083 -
35084 - clean_logs(settings)
35085 -
35086 - if "--quiet" not in myopts and \
35087 - myaction is None and "@world" in myfiles:
35088 - show_depclean_suggestion()
35089 -
35090 -def show_depclean_suggestion():
35091 - out = portage.output.EOutput()
35092 - msg = "After world updates, it is important to remove " + \
35093 - "obsolete packages with emerge --depclean. Refer " + \
35094 - "to `man emerge` for more information."
35095 - for line in textwrap.wrap(msg, 72):
35096 - out.ewarn(line)
35097 -
35098 def multiple_actions(action1, action2):
35099 sys.stderr.write("\n!!! Multiple actions requested... Please choose one only.\n")
35100 sys.stderr.write("!!! '%s' or '%s'\n\n" % (action1, action2))
35101 @@ -455,6 +108,16 @@ def insert_optional_args(args):
35102 return False
35103
35104 valid_integers = valid_integers()
35105 +
35106 + class valid_floats(object):
35107 + def __contains__(self, s):
35108 + try:
35109 + return float(s) >= 0
35110 + except (ValueError, OverflowError):
35111 + return False
35112 +
35113 + valid_floats = valid_floats()
35114 +
35115 y_or_n = ('y', 'n',)
35116
35117 new_args = []
35118 @@ -468,6 +131,7 @@ def insert_optional_args(args):
35119 '--buildpkg' : y_or_n,
35120 '--complete-graph' : y_or_n,
35121 '--deep' : valid_integers,
35122 + '--depclean-lib-check' : y_or_n,
35123 '--deselect' : y_or_n,
35124 '--binpkg-respect-use' : y_or_n,
35125 '--fail-clean' : y_or_n,
35126 @@ -475,10 +139,12 @@ def insert_optional_args(args):
35127 '--getbinpkgonly' : y_or_n,
35128 '--jobs' : valid_integers,
35129 '--keep-going' : y_or_n,
35130 + '--load-average' : valid_floats,
35131 '--package-moves' : y_or_n,
35132 '--quiet' : y_or_n,
35133 '--quiet-build' : y_or_n,
35134 - '--rebuild-if-new-slot-abi': y_or_n,
35135 + '--quiet-fail' : y_or_n,
35136 + '--rebuild-if-new-slot': y_or_n,
35137 '--rebuild-if-new-rev' : y_or_n,
35138 '--rebuild-if-new-ver' : y_or_n,
35139 '--rebuild-if-unbuilt' : y_or_n,
35140 @@ -489,11 +155,10 @@ def insert_optional_args(args):
35141 "--use-ebuild-visibility": y_or_n,
35142 '--usepkg' : y_or_n,
35143 '--usepkgonly' : y_or_n,
35144 + '--verbose' : y_or_n,
35145 + '--verbose-slot-rebuilds': y_or_n,
35146 }
35147
35148 - if _ENABLE_DYN_LINK_MAP:
35149 - default_arg_opts['--depclean-lib-check'] = y_or_n
35150 -
35151 short_arg_opts = {
35152 'D' : valid_integers,
35153 'j' : valid_integers,
35154 @@ -509,6 +174,8 @@ def insert_optional_args(args):
35155 'k' : y_or_n,
35156 'K' : y_or_n,
35157 'q' : y_or_n,
35158 + 'v' : y_or_n,
35159 + 'w' : y_or_n,
35160 }
35161
35162 arg_stack = args[:]
35163 @@ -597,14 +264,17 @@ def _find_bad_atoms(atoms, less_strict=False):
35164 """
35165 bad_atoms = []
35166 for x in ' '.join(atoms).split():
35167 + atom = x
35168 + if "/" not in x.split(":")[0]:
35169 + x_cat = insert_category_into_atom(x, 'dummy-category')
35170 + if x_cat is not None:
35171 + atom = x_cat
35172 +
35173 bad_atom = False
35174 try:
35175 - atom = portage.dep.Atom(x, allow_wildcard=True, allow_repo=less_strict)
35176 + atom = Atom(atom, allow_wildcard=True, allow_repo=less_strict)
35177 except portage.exception.InvalidAtom:
35178 - try:
35179 - atom = portage.dep.Atom("*/"+x, allow_wildcard=True, allow_repo=less_strict)
35180 - except portage.exception.InvalidAtom:
35181 - bad_atom = True
35182 + bad_atom = True
35183
35184 if bad_atom or (atom.operator and not less_strict) or atom.blocker or atom.use:
35185 bad_atoms.append(x)
35186 @@ -632,31 +302,26 @@ def parse_opts(tmpcmdline, silent=False):
35187 "--ask": {
35188 "shortopt" : "-a",
35189 "help" : "prompt before performing any actions",
35190 - "type" : "choice",
35191 "choices" : true_y_or_n
35192 },
35193
35194 "--autounmask": {
35195 "help" : "automatically unmask packages",
35196 - "type" : "choice",
35197 "choices" : true_y_or_n
35198 },
35199
35200 "--autounmask-unrestricted-atoms": {
35201 "help" : "write autounmask changes with >= atoms if possible",
35202 - "type" : "choice",
35203 "choices" : true_y_or_n
35204 },
35205
35206 "--autounmask-keep-masks": {
35207 "help" : "don't add package.unmask entries",
35208 - "type" : "choice",
35209 "choices" : true_y_or_n
35210 },
35211
35212 "--autounmask-write": {
35213 "help" : "write changes made by --autounmask to disk",
35214 - "type" : "choice",
35215 "choices" : true_y_or_n
35216 },
35217
35218 @@ -665,6 +330,11 @@ def parse_opts(tmpcmdline, silent=False):
35219 "action":"store"
35220 },
35221
35222 + "--accept-restrict": {
35223 + "help":"temporarily override ACCEPT_RESTRICT",
35224 + "action":"store"
35225 + },
35226 +
35227 "--backtrack": {
35228
35229 "help" : "Specifies how many times to backtrack if dependency " + \
35230 @@ -676,7 +346,6 @@ def parse_opts(tmpcmdline, silent=False):
35231 "--buildpkg": {
35232 "shortopt" : "-b",
35233 "help" : "build binary packages",
35234 - "type" : "choice",
35235 "choices" : true_y_or_n
35236 },
35237
35238 @@ -694,25 +363,21 @@ def parse_opts(tmpcmdline, silent=False):
35239 },
35240 "--color": {
35241 "help":"enable or disable color output",
35242 - "type":"choice",
35243 "choices":("y", "n")
35244 },
35245
35246 "--complete-graph": {
35247 "help" : "completely account for all known dependencies",
35248 - "type" : "choice",
35249 "choices" : true_y_or_n
35250 },
35251
35252 "--complete-graph-if-new-use": {
35253 "help" : "trigger --complete-graph behavior if USE or IUSE will change for an installed package",
35254 - "type" : "choice",
35255 "choices" : y_or_n
35256 },
35257
35258 "--complete-graph-if-new-ver": {
35259 "help" : "trigger --complete-graph behavior if an installed package version will change (upgrade or downgrade)",
35260 - "type" : "choice",
35261 "choices" : y_or_n
35262 },
35263
35264 @@ -728,15 +393,18 @@ def parse_opts(tmpcmdline, silent=False):
35265 "action" : "store"
35266 },
35267
35268 + "--depclean-lib-check": {
35269 + "help" : "check for consumers of libraries before removing them",
35270 + "choices" : true_y_or_n
35271 + },
35272 +
35273 "--deselect": {
35274 "help" : "remove atoms/sets from the world file",
35275 - "type" : "choice",
35276 "choices" : true_y_or_n
35277 },
35278
35279 "--dynamic-deps": {
35280 "help": "substitute the dependencies of installed packages with the dependencies of unbuilt ebuilds",
35281 - "type": "choice",
35282 "choices": y_or_n
35283 },
35284
35285 @@ -750,17 +418,15 @@ def parse_opts(tmpcmdline, silent=False):
35286
35287 "--fail-clean": {
35288 "help" : "clean temp files after build failure",
35289 - "type" : "choice",
35290 "choices" : true_y_or_n
35291 },
35292
35293 - "--ignore-built-slot-abi-deps": {
35294 - "help": "Ignore the SLOT/ABI := operator parts of dependencies that have "
35295 + "--ignore-built-slot-operator-deps": {
35296 + "help": "Ignore the slot/sub-slot := operator parts of dependencies that have "
35297 "been recorded when packages where built. This option is intended "
35298 "only for debugging purposes, and it only affects built packages "
35299 - "that specify SLOT/ABI := operator dependencies using the "
35300 + "that specify slot/sub-slot := operator dependencies using the "
35301 "experimental \"4-slot-abi\" EAPI.",
35302 - "type": "choice",
35303 "choices": y_or_n
35304 },
35305
35306 @@ -776,7 +442,6 @@ def parse_opts(tmpcmdline, silent=False):
35307
35308 "--keep-going": {
35309 "help" : "continue as much as possible after an error",
35310 - "type" : "choice",
35311 "choices" : true_y_or_n
35312 },
35313
35314 @@ -791,18 +456,15 @@ def parse_opts(tmpcmdline, silent=False):
35315
35316 "--misspell-suggestions": {
35317 "help" : "enable package name misspell suggestions",
35318 - "type" : "choice",
35319 "choices" : ("y", "n")
35320 },
35321
35322 "--with-bdeps": {
35323 "help":"include unnecessary build time dependencies",
35324 - "type":"choice",
35325 "choices":("y", "n")
35326 },
35327 "--reinstall": {
35328 "help":"specify conditions to trigger package reinstallation",
35329 - "type":"choice",
35330 "choices":["changed-use"]
35331 },
35332
35333 @@ -817,21 +479,18 @@ def parse_opts(tmpcmdline, silent=False):
35334 "--binpkg-respect-use": {
35335 "help" : "discard binary packages if their use flags \
35336 don't match the current configuration",
35337 - "type" : "choice",
35338 "choices" : true_y_or_n
35339 },
35340
35341 "--getbinpkg": {
35342 "shortopt" : "-g",
35343 "help" : "fetch binary packages",
35344 - "type" : "choice",
35345 "choices" : true_y_or_n
35346 },
35347
35348 "--getbinpkgonly": {
35349 "shortopt" : "-G",
35350 "help" : "fetch binary packages only",
35351 - "type" : "choice",
35352 "choices" : true_y_or_n
35353 },
35354
35355 @@ -860,29 +519,40 @@ def parse_opts(tmpcmdline, silent=False):
35356
35357 "--package-moves": {
35358 "help" : "perform package moves when necessary",
35359 - "type" : "choice",
35360 "choices" : true_y_or_n
35361 },
35362
35363 + "--prefix": {
35364 + "help" : "specify the installation prefix",
35365 + "action" : "store"
35366 + },
35367 +
35368 + "--pkg-format": {
35369 + "help" : "format of result binary package",
35370 + "action" : "store",
35371 + },
35372 +
35373 "--quiet": {
35374 "shortopt" : "-q",
35375 "help" : "reduced or condensed output",
35376 - "type" : "choice",
35377 "choices" : true_y_or_n
35378 },
35379
35380 "--quiet-build": {
35381 "help" : "redirect build output to logs",
35382 - "type" : "choice",
35383 "choices" : true_y_or_n,
35384 },
35385
35386 - "--rebuild-if-new-slot-abi": {
35387 - "help" : ("Automatically rebuild or reinstall packages when SLOT/ABI := "
35388 + "--quiet-fail": {
35389 + "help" : "suppresses display of the build log on stdout",
35390 + "choices" : true_y_or_n,
35391 + },
35392 +
35393 + "--rebuild-if-new-slot": {
35394 + "help" : ("Automatically rebuild or reinstall packages when slot/sub-slot := "
35395 "operator dependencies can be satisfied by a newer slot, so that "
35396 "older packages slots will become eligible for removal by the "
35397 "--depclean action as soon as possible."),
35398 - "type" : "choice",
35399 "choices" : true_y_or_n
35400 },
35401
35402 @@ -891,7 +561,6 @@ def parse_opts(tmpcmdline, silent=False):
35403 "used at both build-time and run-time are built, " + \
35404 "if the dependency is not already installed with the " + \
35405 "same version and revision.",
35406 - "type" : "choice",
35407 "choices" : true_y_or_n
35408 },
35409
35410 @@ -900,24 +569,21 @@ def parse_opts(tmpcmdline, silent=False):
35411 "used at both build-time and run-time are built, " + \
35412 "if the dependency is not already installed with the " + \
35413 "same version. Revision numbers are ignored.",
35414 - "type" : "choice",
35415 "choices" : true_y_or_n
35416 },
35417
35418 "--rebuild-if-unbuilt": {
35419 "help" : "Rebuild packages when dependencies that are " + \
35420 "used at both build-time and run-time are built.",
35421 - "type" : "choice",
35422 "choices" : true_y_or_n
35423 },
35424
35425 "--rebuilt-binaries": {
35426 "help" : "replace installed packages with binary " + \
35427 "packages that have been rebuilt",
35428 - "type" : "choice",
35429 "choices" : true_y_or_n
35430 },
35431 -
35432 +
35433 "--rebuilt-binaries-timestamp": {
35434 "help" : "use only binaries that are newer than this " + \
35435 "timestamp for --rebuilt-binaries",
35436 @@ -931,26 +597,23 @@ def parse_opts(tmpcmdline, silent=False):
35437
35438 "--root-deps": {
35439 "help" : "modify interpretation of depedencies",
35440 - "type" : "choice",
35441 "choices" :("True", "rdeps")
35442 },
35443
35444 "--select": {
35445 + "shortopt" : "-w",
35446 "help" : "add specified packages to the world set " + \
35447 "(inverse of --oneshot)",
35448 - "type" : "choice",
35449 "choices" : true_y_or_n
35450 },
35451
35452 "--selective": {
35453 "help" : "identical to --noreplace",
35454 - "type" : "choice",
35455 "choices" : true_y_or_n
35456 },
35457
35458 "--use-ebuild-visibility": {
35459 "help" : "use unbuilt ebuild metadata for visibility checks on built packages",
35460 - "type" : "choice",
35461 "choices" : true_y_or_n
35462 },
35463
35464 @@ -964,42 +627,39 @@ def parse_opts(tmpcmdline, silent=False):
35465 "--usepkg": {
35466 "shortopt" : "-k",
35467 "help" : "use binary packages",
35468 - "type" : "choice",
35469 "choices" : true_y_or_n
35470 },
35471
35472 "--usepkgonly": {
35473 "shortopt" : "-K",
35474 "help" : "use only binary packages",
35475 - "type" : "choice",
35476 "choices" : true_y_or_n
35477 },
35478
35479 + "--verbose": {
35480 + "shortopt" : "-v",
35481 + "help" : "verbose output",
35482 + "choices" : true_y_or_n
35483 + },
35484 + "--verbose-slot-rebuilds": {
35485 + "help" : "verbose slot rebuild output",
35486 + "choices" : true_y_or_n
35487 + },
35488 }
35489
35490 - if _ENABLE_DYN_LINK_MAP:
35491 - argument_options["--depclean-lib-check"] = {
35492 - "help" : "check for consumers of libraries before removing them",
35493 - "type" : "choice",
35494 - "choices" : true_y_or_n
35495 - }
35496 -
35497 - from optparse import OptionParser
35498 - parser = OptionParser()
35499 - if parser.has_option("--help"):
35500 - parser.remove_option("--help")
35501 + parser = ArgumentParser(add_help=False)
35502
35503 for action_opt in actions:
35504 - parser.add_option("--" + action_opt, action="store_true",
35505 + parser.add_argument("--" + action_opt, action="store_true",
35506 dest=action_opt.replace("-", "_"), default=False)
35507 for myopt in options:
35508 - parser.add_option(myopt, action="store_true",
35509 + parser.add_argument(myopt, action="store_true",
35510 dest=myopt.lstrip("--").replace("-", "_"), default=False)
35511 for shortopt, longopt in shortmapping.items():
35512 - parser.add_option("-" + shortopt, action="store_true",
35513 + parser.add_argument("-" + shortopt, action="store_true",
35514 dest=longopt.lstrip("--").replace("-", "_"), default=False)
35515 for myalias, myopt in longopt_aliases.items():
35516 - parser.add_option(myalias, action="store_true",
35517 + parser.add_argument(myalias, action="store_true",
35518 dest=myopt.lstrip("--").replace("-", "_"), default=False)
35519
35520 for myopt, kwargs in argument_options.items():
35521 @@ -1007,12 +667,12 @@ def parse_opts(tmpcmdline, silent=False):
35522 args = [myopt]
35523 if shortopt is not None:
35524 args.append(shortopt)
35525 - parser.add_option(dest=myopt.lstrip("--").replace("-", "_"),
35526 + parser.add_argument(dest=myopt.lstrip("--").replace("-", "_"),
35527 *args, **kwargs)
35528
35529 tmpcmdline = insert_optional_args(tmpcmdline)
35530
35531 - myoptions, myargs = parser.parse_args(args=tmpcmdline)
35532 + myoptions, myargs = parser.parse_known_args(args=tmpcmdline)
35533
35534 if myoptions.ask in true_y:
35535 myoptions.ask = True
35536 @@ -1058,9 +718,8 @@ def parse_opts(tmpcmdline, silent=False):
35537 else:
35538 myoptions.complete_graph = None
35539
35540 - if _ENABLE_DYN_LINK_MAP:
35541 - if myoptions.depclean_lib_check in true_y:
35542 - myoptions.depclean_lib_check = True
35543 + if myoptions.depclean_lib_check in true_y:
35544 + myoptions.depclean_lib_check = True
35545
35546 if myoptions.exclude:
35547 bad_atoms = _find_bad_atoms(myoptions.exclude)
35548 @@ -1127,8 +786,11 @@ def parse_opts(tmpcmdline, silent=False):
35549 if myoptions.quiet_build in true_y:
35550 myoptions.quiet_build = 'y'
35551
35552 - if myoptions.rebuild_if_new_slot_abi in true_y:
35553 - myoptions.rebuild_if_new_slot_abi = 'y'
35554 + if myoptions.quiet_fail in true_y:
35555 + myoptions.quiet_fail = 'y'
35556 +
35557 + if myoptions.rebuild_if_new_slot in true_y:
35558 + myoptions.rebuild_if_new_slot = 'y'
35559
35560 if myoptions.rebuild_if_new_ver in true_y:
35561 myoptions.rebuild_if_new_ver = True
35562 @@ -1215,6 +877,9 @@ def parse_opts(tmpcmdline, silent=False):
35563
35564 myoptions.jobs = jobs
35565
35566 + if myoptions.load_average == "True":
35567 + myoptions.load_average = None
35568 +
35569 if myoptions.load_average:
35570 try:
35571 load_average = float(myoptions.load_average)
35572 @@ -1228,7 +893,7 @@ def parse_opts(tmpcmdline, silent=False):
35573 (myoptions.load_average,))
35574
35575 myoptions.load_average = load_average
35576 -
35577 +
35578 if myoptions.rebuilt_binaries_timestamp:
35579 try:
35580 rebuilt_binaries_timestamp = int(myoptions.rebuilt_binaries_timestamp)
35581 @@ -1259,6 +924,11 @@ def parse_opts(tmpcmdline, silent=False):
35582 else:
35583 myoptions.usepkgonly = None
35584
35585 + if myoptions.verbose in true_y:
35586 + myoptions.verbose = True
35587 + else:
35588 + myoptions.verbose = None
35589 +
35590 for myopt in options:
35591 v = getattr(myoptions, myopt.lstrip("--").replace("-", "_"))
35592 if v:
35593 @@ -1283,309 +953,10 @@ def parse_opts(tmpcmdline, silent=False):
35594 if myaction is None and myoptions.deselect is True:
35595 myaction = 'deselect'
35596
35597 - if myargs and isinstance(myargs[0], bytes):
35598 - for i in range(len(myargs)):
35599 - myargs[i] = portage._unicode_decode(myargs[i])
35600 -
35601 myfiles += myargs
35602
35603 return myaction, myopts, myfiles
35604
35605 -# Warn about features that may confuse users and
35606 -# lead them to report invalid bugs.
35607 -_emerge_features_warn = frozenset(['keeptemp', 'keepwork'])
35608 -
35609 -def validate_ebuild_environment(trees):
35610 - features_warn = set()
35611 - for myroot in trees:
35612 - settings = trees[myroot]["vartree"].settings
35613 - settings.validate()
35614 - features_warn.update(
35615 - _emerge_features_warn.intersection(settings.features))
35616 -
35617 - if features_warn:
35618 - msg = "WARNING: The FEATURES variable contains one " + \
35619 - "or more values that should be disabled under " + \
35620 - "normal circumstances: %s" % " ".join(features_warn)
35621 - out = portage.output.EOutput()
35622 - for line in textwrap.wrap(msg, 65):
35623 - out.ewarn(line)
35624 -
35625 -def apply_priorities(settings):
35626 - ionice(settings)
35627 - nice(settings)
35628 -
35629 -def nice(settings):
35630 - try:
35631 - os.nice(int(settings.get("PORTAGE_NICENESS", "0")))
35632 - except (OSError, ValueError) as e:
35633 - out = portage.output.EOutput()
35634 - out.eerror("Failed to change nice value to '%s'" % \
35635 - settings["PORTAGE_NICENESS"])
35636 - out.eerror("%s\n" % str(e))
35637 -
35638 -def ionice(settings):
35639 -
35640 - ionice_cmd = settings.get("PORTAGE_IONICE_COMMAND")
35641 - if ionice_cmd:
35642 - ionice_cmd = portage.util.shlex_split(ionice_cmd)
35643 - if not ionice_cmd:
35644 - return
35645 -
35646 - variables = {"PID" : str(os.getpid())}
35647 - cmd = [varexpand(x, mydict=variables) for x in ionice_cmd]
35648 -
35649 - try:
35650 - rval = portage.process.spawn(cmd, env=os.environ)
35651 - except portage.exception.CommandNotFound:
35652 - # The OS kernel probably doesn't support ionice,
35653 - # so return silently.
35654 - return
35655 -
35656 - if rval != os.EX_OK:
35657 - out = portage.output.EOutput()
35658 - out.eerror("PORTAGE_IONICE_COMMAND returned %d" % (rval,))
35659 - out.eerror("See the make.conf(5) man page for PORTAGE_IONICE_COMMAND usage instructions.")
35660 -
35661 -def clean_logs(settings):
35662 -
35663 - if "clean-logs" not in settings.features:
35664 - return
35665 -
35666 - logdir = settings.get("PORT_LOGDIR")
35667 - if logdir is None or not os.path.isdir(logdir):
35668 - return
35669 -
35670 - options = {
35671 - 'eerror': portage.output.EOutput().eerror,
35672 - # uncomment next line to output a succeeded message
35673 - #'einfo': portage.output.EOutput().einfo
35674 - }
35675 - cleanlogs = CleanLogs()
35676 - cleanlogs.clean(settings=settings, options=options)
35677 -
35678 -def setconfig_fallback(root_config):
35679 - setconfig = root_config.setconfig
35680 - setconfig._create_default_config()
35681 - setconfig._parse(update=True)
35682 - root_config.sets = setconfig.getSets()
35683 -
35684 -def get_missing_sets(root_config):
35685 - # emerge requires existence of "world", "selected", and "system"
35686 - missing_sets = []
35687 -
35688 - for s in ("selected", "system", "world",):
35689 - if s not in root_config.sets:
35690 - missing_sets.append(s)
35691 -
35692 - return missing_sets
35693 -
35694 -def missing_sets_warning(root_config, missing_sets):
35695 - if len(missing_sets) > 2:
35696 - missing_sets_str = ", ".join('"%s"' % s for s in missing_sets[:-1])
35697 - missing_sets_str += ', and "%s"' % missing_sets[-1]
35698 - elif len(missing_sets) == 2:
35699 - missing_sets_str = '"%s" and "%s"' % tuple(missing_sets)
35700 - else:
35701 - missing_sets_str = '"%s"' % missing_sets[-1]
35702 - msg = ["emerge: incomplete set configuration, " + \
35703 - "missing set(s): %s" % missing_sets_str]
35704 - if root_config.sets:
35705 - msg.append(" sets defined: %s" % ", ".join(root_config.sets))
35706 - global_config_path = portage.const.GLOBAL_CONFIG_PATH
35707 - if root_config.settings['EPREFIX']:
35708 - global_config_path = os.path.join(root_config.settings['EPREFIX'],
35709 - portage.const.GLOBAL_CONFIG_PATH.lstrip(os.sep))
35710 - msg.append(" This usually means that '%s'" % \
35711 - (os.path.join(global_config_path, "sets/portage.conf"),))
35712 - msg.append(" is missing or corrupt.")
35713 - msg.append(" Falling back to default world and system set configuration!!!")
35714 - for line in msg:
35715 - writemsg_level(line + "\n", level=logging.ERROR, noiselevel=-1)
35716 -
35717 -def ensure_required_sets(trees):
35718 - warning_shown = False
35719 - for root_trees in trees.values():
35720 - missing_sets = get_missing_sets(root_trees["root_config"])
35721 - if missing_sets and not warning_shown:
35722 - warning_shown = True
35723 - missing_sets_warning(root_trees["root_config"], missing_sets)
35724 - if missing_sets:
35725 - setconfig_fallback(root_trees["root_config"])
35726 -
35727 -def expand_set_arguments(myfiles, myaction, root_config):
35728 - retval = os.EX_OK
35729 - setconfig = root_config.setconfig
35730 -
35731 - sets = setconfig.getSets()
35732 -
35733 - # In order to know exactly which atoms/sets should be added to the
35734 - # world file, the depgraph performs set expansion later. It will get
35735 - # confused about where the atoms came from if it's not allowed to
35736 - # expand them itself.
35737 - do_not_expand = (None, )
35738 - newargs = []
35739 - for a in myfiles:
35740 - if a in ("system", "world"):
35741 - newargs.append(SETPREFIX+a)
35742 - else:
35743 - newargs.append(a)
35744 - myfiles = newargs
35745 - del newargs
35746 - newargs = []
35747 -
35748 - # separators for set arguments
35749 - ARG_START = "{"
35750 - ARG_END = "}"
35751 -
35752 - for i in range(0, len(myfiles)):
35753 - if myfiles[i].startswith(SETPREFIX):
35754 - start = 0
35755 - end = 0
35756 - x = myfiles[i][len(SETPREFIX):]
35757 - newset = ""
35758 - while x:
35759 - start = x.find(ARG_START)
35760 - end = x.find(ARG_END)
35761 - if start > 0 and start < end:
35762 - namepart = x[:start]
35763 - argpart = x[start+1:end]
35764 -
35765 - # TODO: implement proper quoting
35766 - args = argpart.split(",")
35767 - options = {}
35768 - for a in args:
35769 - if "=" in a:
35770 - k, v = a.split("=", 1)
35771 - options[k] = v
35772 - else:
35773 - options[a] = "True"
35774 - setconfig.update(namepart, options)
35775 - newset += (x[:start-len(namepart)]+namepart)
35776 - x = x[end+len(ARG_END):]
35777 - else:
35778 - newset += x
35779 - x = ""
35780 - myfiles[i] = SETPREFIX+newset
35781 -
35782 - sets = setconfig.getSets()
35783 -
35784 - # display errors that occurred while loading the SetConfig instance
35785 - for e in setconfig.errors:
35786 - print(colorize("BAD", "Error during set creation: %s" % e))
35787 -
35788 - unmerge_actions = ("unmerge", "prune", "clean", "depclean")
35789 -
35790 - for a in myfiles:
35791 - if a.startswith(SETPREFIX):
35792 - s = a[len(SETPREFIX):]
35793 - if s not in sets:
35794 - display_missing_pkg_set(root_config, s)
35795 - return (None, 1)
35796 - setconfig.active.append(s)
35797 - try:
35798 - set_atoms = setconfig.getSetAtoms(s)
35799 - except portage.exception.PackageSetNotFound as e:
35800 - writemsg_level(("emerge: the given set '%s' " + \
35801 - "contains a non-existent set named '%s'.\n") % \
35802 - (s, e), level=logging.ERROR, noiselevel=-1)
35803 - if s in ('world', 'selected') and \
35804 - SETPREFIX + e.value in sets['selected']:
35805 - writemsg_level(("Use `emerge --deselect %s%s` to "
35806 - "remove this set from world_sets.\n") %
35807 - (SETPREFIX, e,), level=logging.ERROR,
35808 - noiselevel=-1)
35809 - return (None, 1)
35810 - if myaction in unmerge_actions and \
35811 - not sets[s].supportsOperation("unmerge"):
35812 - sys.stderr.write("emerge: the given set '%s' does " % s + \
35813 - "not support unmerge operations\n")
35814 - retval = 1
35815 - elif not set_atoms:
35816 - print("emerge: '%s' is an empty set" % s)
35817 - elif myaction not in do_not_expand:
35818 - newargs.extend(set_atoms)
35819 - else:
35820 - newargs.append(SETPREFIX+s)
35821 - for e in sets[s].errors:
35822 - print(e)
35823 - else:
35824 - newargs.append(a)
35825 - return (newargs, retval)
35826 -
35827 -def repo_name_check(trees):
35828 - missing_repo_names = set()
35829 - for root_trees in trees.values():
35830 - porttree = root_trees.get("porttree")
35831 - if porttree:
35832 - portdb = porttree.dbapi
35833 - missing_repo_names.update(portdb.getMissingRepoNames())
35834 - if portdb.porttree_root in missing_repo_names and \
35835 - not os.path.exists(os.path.join(
35836 - portdb.porttree_root, "profiles")):
35837 - # This is normal if $PORTDIR happens to be empty,
35838 - # so don't warn about it.
35839 - missing_repo_names.remove(portdb.porttree_root)
35840 -
35841 - if missing_repo_names:
35842 - msg = []
35843 - msg.append("WARNING: One or more repositories " + \
35844 - "have missing repo_name entries:")
35845 - msg.append("")
35846 - for p in missing_repo_names:
35847 - msg.append("\t%s/profiles/repo_name" % (p,))
35848 - msg.append("")
35849 - msg.extend(textwrap.wrap("NOTE: Each repo_name entry " + \
35850 - "should be a plain text file containing a unique " + \
35851 - "name for the repository on the first line.", 70))
35852 - msg.append("\n")
35853 - writemsg_level("".join("%s\n" % l for l in msg),
35854 - level=logging.WARNING, noiselevel=-1)
35855 -
35856 - return bool(missing_repo_names)
35857 -
35858 -def repo_name_duplicate_check(trees):
35859 - ignored_repos = {}
35860 - for root, root_trees in trees.items():
35861 - if 'porttree' in root_trees:
35862 - portdb = root_trees['porttree'].dbapi
35863 - if portdb.settings.get('PORTAGE_REPO_DUPLICATE_WARN') != '0':
35864 - for repo_name, paths in portdb.getIgnoredRepos():
35865 - k = (root, repo_name, portdb.getRepositoryPath(repo_name))
35866 - ignored_repos.setdefault(k, []).extend(paths)
35867 -
35868 - if ignored_repos:
35869 - msg = []
35870 - msg.append('WARNING: One or more repositories ' + \
35871 - 'have been ignored due to duplicate')
35872 - msg.append(' profiles/repo_name entries:')
35873 - msg.append('')
35874 - for k in sorted(ignored_repos):
35875 - msg.append(' %s overrides' % ", ".join(k))
35876 - for path in ignored_repos[k]:
35877 - msg.append(' %s' % (path,))
35878 - msg.append('')
35879 - msg.extend(' ' + x for x in textwrap.wrap(
35880 - "All profiles/repo_name entries must be unique in order " + \
35881 - "to avoid having duplicates ignored. " + \
35882 - "Set PORTAGE_REPO_DUPLICATE_WARN=\"0\" in " + \
35883 - "/etc/make.conf if you would like to disable this warning."))
35884 - msg.append("\n")
35885 - writemsg_level(''.join('%s\n' % l for l in msg),
35886 - level=logging.WARNING, noiselevel=-1)
35887 -
35888 - return bool(ignored_repos)
35889 -
35890 -def config_protect_check(trees):
35891 - for root, root_trees in trees.items():
35892 - settings = root_trees["root_config"].settings
35893 - if not settings.get("CONFIG_PROTECT"):
35894 - msg = "!!! CONFIG_PROTECT is empty"
35895 - if settings["ROOT"] != "/":
35896 - msg += " for '%s'" % root
35897 - msg += "\n"
35898 - writemsg_level(msg, level=logging.WARN, noiselevel=-1)
35899 -
35900 def profile_check(trees, myaction):
35901 if myaction in ("help", "info", "search", "sync", "version"):
35902 return os.EX_OK
35903 @@ -1603,16 +974,6 @@ def profile_check(trees, myaction):
35904 return 1
35905 return os.EX_OK
35906
35907 -def check_procfs():
35908 - procfs_path = '/proc'
35909 - if platform.system() not in ("Linux",) or \
35910 - os.path.ismount(procfs_path):
35911 - return os.EX_OK
35912 - msg = "It seems that %s is not mounted. You have been warned." % procfs_path
35913 - writemsg_level("".join("!!! %s\n" % l for l in textwrap.wrap(msg, 70)),
35914 - level=logging.ERROR, noiselevel=-1)
35915 - return 1
35916 -
35917 def emerge_main(args=None):
35918 """
35919 @param args: command arguments (default: sys.argv[1:])
35920 @@ -1621,11 +982,12 @@ def emerge_main(args=None):
35921 if args is None:
35922 args = sys.argv[1:]
35923
35924 - portage._disable_legacy_globals()
35925 - portage.dep._internal_warnings = True
35926 + args = portage._decode_argv(args)
35927 +
35928 # Disable color until we're sure that it should be enabled (after
35929 # EMERGE_DEFAULT_OPTS has been parsed).
35930 portage.output.havecolor = 0
35931 +
35932 # This first pass is just for options that need to be known as early as
35933 # possible, such as --config-root. They will be parsed again later,
35934 # together with EMERGE_DEFAULT_OPTS (which may vary depending on the
35935 @@ -1637,428 +999,45 @@ def emerge_main(args=None):
35936 os.environ["PORTAGE_CONFIGROOT"] = myopts["--config-root"]
35937 if "--root" in myopts:
35938 os.environ["ROOT"] = myopts["--root"]
35939 + if "--prefix" in myopts:
35940 + os.environ["EPREFIX"] = myopts["--prefix"]
35941 if "--accept-properties" in myopts:
35942 os.environ["ACCEPT_PROPERTIES"] = myopts["--accept-properties"]
35943 + if "--accept-restrict" in myopts:
35944 + os.environ["ACCEPT_RESTRICT"] = myopts["--accept-restrict"]
35945 +
35946 + # optimize --help (no need to load config / EMERGE_DEFAULT_OPTS)
35947 + if myaction == "help":
35948 + emerge_help()
35949 + return os.EX_OK
35950 + elif myaction == "moo":
35951 + print(COWSAY_MOO % platform.system())
35952 + return os.EX_OK
35953
35954 # Portage needs to ensure a sane umask for the files it creates.
35955 os.umask(0o22)
35956 - settings, trees, mtimedb = load_emerge_config()
35957 - portdb = trees[settings['EROOT']]['porttree'].dbapi
35958 - rval = profile_check(trees, myaction)
35959 + if myaction == "sync":
35960 + portage._sync_mode = True
35961 + emerge_config = load_emerge_config(
35962 + action=myaction, args=myfiles, opts=myopts)
35963 + rval = profile_check(emerge_config.trees, emerge_config.action)
35964 if rval != os.EX_OK:
35965 return rval
35966
35967 tmpcmdline = []
35968 if "--ignore-default-opts" not in myopts:
35969 - tmpcmdline.extend(settings["EMERGE_DEFAULT_OPTS"].split())
35970 + tmpcmdline.extend(portage.util.shlex_split(
35971 + emerge_config.target_config.settings.get(
35972 + "EMERGE_DEFAULT_OPTS", "")))
35973 tmpcmdline.extend(args)
35974 - myaction, myopts, myfiles = parse_opts(tmpcmdline)
35975 -
35976 - # skip global updates prior to sync, since it's called after sync
35977 - if myaction not in ('help', 'info', 'sync', 'version') and \
35978 - myopts.get('--package-moves') != 'n' and \
35979 - _global_updates(trees, mtimedb["updates"], quiet=("--quiet" in myopts)):
35980 - mtimedb.commit()
35981 - # Reload the whole config from scratch.
35982 - settings, trees, mtimedb = load_emerge_config(trees=trees)
35983 - portdb = trees[settings['EROOT']]['porttree'].dbapi
35984 -
35985 - xterm_titles = "notitles" not in settings.features
35986 - if xterm_titles:
35987 - xtermTitle("emerge")
35988 -
35989 - if "--digest" in myopts:
35990 - os.environ["FEATURES"] = os.environ.get("FEATURES","") + " digest"
35991 - # Reload the whole config from scratch so that the portdbapi internal
35992 - # config is updated with new FEATURES.
35993 - settings, trees, mtimedb = load_emerge_config(trees=trees)
35994 - portdb = trees[settings['EROOT']]['porttree'].dbapi
35995 -
35996 - # NOTE: adjust_configs() can map options to FEATURES, so any relevant
35997 - # options adjustments should be made prior to calling adjust_configs().
35998 - if "--buildpkgonly" in myopts:
35999 - myopts["--buildpkg"] = True
36000 -
36001 - adjust_configs(myopts, trees)
36002 - apply_priorities(settings)
36003 -
36004 - if myaction == 'version':
36005 - writemsg_stdout(getportageversion(
36006 - settings["PORTDIR"], None,
36007 - settings.profile_path, settings["CHOST"],
36008 - trees[settings['EROOT']]['vartree'].dbapi) + '\n', noiselevel=-1)
36009 - return 0
36010 - elif myaction == 'help':
36011 - _emerge.help.help()
36012 - return 0
36013 -
36014 - spinner = stdout_spinner()
36015 - if "candy" in settings.features:
36016 - spinner.update = spinner.update_scroll
36017 -
36018 - if "--quiet" not in myopts:
36019 - portage.deprecated_profile_check(settings=settings)
36020 - if portage.const._ENABLE_REPO_NAME_WARN:
36021 - # Bug #248603 - Disable warnings about missing
36022 - # repo_name entries for stable branch.
36023 - repo_name_check(trees)
36024 - repo_name_duplicate_check(trees)
36025 - config_protect_check(trees)
36026 - check_procfs()
36027 -
36028 - if "getbinpkg" in settings.features:
36029 - myopts["--getbinpkg"] = True
36030 -
36031 - if "--getbinpkgonly" in myopts:
36032 - myopts["--getbinpkg"] = True
36033 -
36034 - if "--getbinpkgonly" in myopts:
36035 - myopts["--usepkgonly"] = True
36036 -
36037 - if "--getbinpkg" in myopts:
36038 - myopts["--usepkg"] = True
36039 -
36040 - if "--usepkgonly" in myopts:
36041 - myopts["--usepkg"] = True
36042 -
36043 - if "--buildpkgonly" in myopts:
36044 - # --buildpkgonly will not merge anything, so
36045 - # it cancels all binary package options.
36046 - for opt in ("--getbinpkg", "--getbinpkgonly",
36047 - "--usepkg", "--usepkgonly"):
36048 - myopts.pop(opt, None)
36049 -
36050 - for mytrees in trees.values():
36051 - mydb = mytrees["porttree"].dbapi
36052 - # Freeze the portdbapi for performance (memoize all xmatch results).
36053 - mydb.freeze()
36054 -
36055 - if myaction in ('search', None) and \
36056 - "--usepkg" in myopts:
36057 - # Populate the bintree with current --getbinpkg setting.
36058 - # This needs to happen before expand_set_arguments(), in case
36059 - # any sets use the bintree.
36060 - mytrees["bintree"].populate(
36061 - getbinpkgs="--getbinpkg" in myopts)
36062 -
36063 - del mytrees, mydb
36064 -
36065 - if "moo" in myfiles:
36066 - print(COWSAY_MOO % platform.system())
36067 - msg = ("The above `emerge moo` display is deprecated. "
36068 - "Please use `emerge --moo` instead.")
36069 - for line in textwrap.wrap(msg, 50):
36070 - print(" %s %s" % (colorize("WARN", "*"), line))
36071 -
36072 - for x in myfiles:
36073 - ext = os.path.splitext(x)[1]
36074 - if (ext == ".ebuild" or ext == ".tbz2") and os.path.exists(os.path.abspath(x)):
36075 - print(colorize("BAD", "\n*** emerging by path is broken and may not always work!!!\n"))
36076 - break
36077 -
36078 - root_config = trees[settings['EROOT']]['root_config']
36079 - if myaction == "moo":
36080 - print(COWSAY_MOO % platform.system())
36081 - return os.EX_OK
36082 - elif myaction == "list-sets":
36083 - writemsg_stdout("".join("%s\n" % s for s in sorted(root_config.sets)))
36084 - return os.EX_OK
36085 - elif myaction == "check-news":
36086 - news_counts = count_unread_news(
36087 - root_config.trees["porttree"].dbapi,
36088 - root_config.trees["vartree"].dbapi)
36089 - if any(news_counts.values()):
36090 - display_news_notifications(news_counts)
36091 - elif "--quiet" not in myopts:
36092 - print("", colorize("GOOD", "*"), "No news items were found.")
36093 - return os.EX_OK
36094 -
36095 - ensure_required_sets(trees)
36096 -
36097 - # only expand sets for actions taking package arguments
36098 - oldargs = myfiles[:]
36099 - if myaction in ("clean", "config", "depclean", "info", "prune", "unmerge", None):
36100 - myfiles, retval = expand_set_arguments(myfiles, myaction, root_config)
36101 - if retval != os.EX_OK:
36102 - return retval
36103 -
36104 - # Need to handle empty sets specially, otherwise emerge will react
36105 - # with the help message for empty argument lists
36106 - if oldargs and not myfiles:
36107 - print("emerge: no targets left after set expansion")
36108 - return 0
36109 -
36110 - if ("--tree" in myopts) and ("--columns" in myopts):
36111 - print("emerge: can't specify both of \"--tree\" and \"--columns\".")
36112 - return 1
36113 -
36114 - if '--emptytree' in myopts and '--noreplace' in myopts:
36115 - writemsg_level("emerge: can't specify both of " + \
36116 - "\"--emptytree\" and \"--noreplace\".\n",
36117 - level=logging.ERROR, noiselevel=-1)
36118 - return 1
36119 -
36120 - if ("--quiet" in myopts):
36121 - spinner.update = spinner.update_quiet
36122 - portage.util.noiselimit = -1
36123 -
36124 - if "--fetch-all-uri" in myopts:
36125 - myopts["--fetchonly"] = True
36126 -
36127 - if "--skipfirst" in myopts and "--resume" not in myopts:
36128 - myopts["--resume"] = True
36129 + emerge_config.action, emerge_config.opts, emerge_config.args = \
36130 + parse_opts(tmpcmdline)
36131
36132 - # Allow -p to remove --ask
36133 - if "--pretend" in myopts:
36134 - myopts.pop("--ask", None)
36135 -
36136 - # forbid --ask when not in a terminal
36137 - # note: this breaks `emerge --ask | tee logfile`, but that doesn't work anyway.
36138 - if ("--ask" in myopts) and (not sys.stdin.isatty()):
36139 - portage.writemsg("!!! \"--ask\" should only be used in a terminal. Exiting.\n",
36140 - noiselevel=-1)
36141 - return 1
36142 -
36143 - if settings.get("PORTAGE_DEBUG", "") == "1":
36144 - spinner.update = spinner.update_quiet
36145 - portage.util.noiselimit = 0
36146 - if "python-trace" in settings.features:
36147 - import portage.debug as portage_debug
36148 - portage_debug.set_trace(True)
36149 -
36150 - if not ("--quiet" in myopts):
36151 - if '--nospinner' in myopts or \
36152 - settings.get('TERM') == 'dumb' or \
36153 - not sys.stdout.isatty():
36154 - spinner.update = spinner.update_basic
36155 -
36156 - if "--debug" in myopts:
36157 - print("myaction", myaction)
36158 - print("myopts", myopts)
36159 -
36160 - if not myaction and not myfiles and "--resume" not in myopts:
36161 - _emerge.help.help()
36162 - return 1
36163 -
36164 - pretend = "--pretend" in myopts
36165 - fetchonly = "--fetchonly" in myopts or "--fetch-all-uri" in myopts
36166 - buildpkgonly = "--buildpkgonly" in myopts
36167 -
36168 - # check if root user is the current user for the actions where emerge needs this
36169 - if portage.secpass < 2:
36170 - # We've already allowed "--version" and "--help" above.
36171 - if "--pretend" not in myopts and myaction not in ("search","info"):
36172 - need_superuser = myaction in ('clean', 'depclean', 'deselect',
36173 - 'prune', 'unmerge') or not \
36174 - (fetchonly or \
36175 - (buildpkgonly and secpass >= 1) or \
36176 - myaction in ("metadata", "regen", "sync"))
36177 - if portage.secpass < 1 or \
36178 - need_superuser:
36179 - if need_superuser:
36180 - access_desc = "superuser"
36181 - else:
36182 - access_desc = "portage group"
36183 - # Always show portage_group_warning() when only portage group
36184 - # access is required but the user is not in the portage group.
36185 - from portage.data import portage_group_warning
36186 - if "--ask" in myopts:
36187 - writemsg_stdout("This action requires %s access...\n" % \
36188 - (access_desc,), noiselevel=-1)
36189 - if portage.secpass < 1 and not need_superuser:
36190 - portage_group_warning()
36191 - if userquery("Would you like to add --pretend to options?",
36192 - "--ask-enter-invalid" in myopts) == "No":
36193 - return 128 + signal.SIGINT
36194 - myopts["--pretend"] = True
36195 - del myopts["--ask"]
36196 - else:
36197 - sys.stderr.write(("emerge: %s access is required\n") \
36198 - % access_desc)
36199 - if portage.secpass < 1 and not need_superuser:
36200 - portage_group_warning()
36201 - return 1
36202 -
36203 - # Disable emergelog for everything except build or unmerge operations.
36204 - # This helps minimize parallel emerge.log entries that can confuse log
36205 - # parsers like genlop.
36206 - disable_emergelog = False
36207 - for x in ("--pretend", "--fetchonly", "--fetch-all-uri"):
36208 - if x in myopts:
36209 - disable_emergelog = True
36210 - break
36211 - if disable_emergelog:
36212 - pass
36213 - elif myaction in ("search", "info"):
36214 - disable_emergelog = True
36215 - elif portage.data.secpass < 1:
36216 - disable_emergelog = True
36217 -
36218 - _emerge.emergelog._disable = disable_emergelog
36219 -
36220 - if not disable_emergelog:
36221 - if 'EMERGE_LOG_DIR' in settings:
36222 - try:
36223 - # At least the parent needs to exist for the lock file.
36224 - portage.util.ensure_dirs(settings['EMERGE_LOG_DIR'])
36225 - except portage.exception.PortageException as e:
36226 - writemsg_level("!!! Error creating directory for " + \
36227 - "EMERGE_LOG_DIR='%s':\n!!! %s\n" % \
36228 - (settings['EMERGE_LOG_DIR'], e),
36229 - noiselevel=-1, level=logging.ERROR)
36230 - portage.util.ensure_dirs(_emerge.emergelog._emerge_log_dir)
36231 - else:
36232 - _emerge.emergelog._emerge_log_dir = settings["EMERGE_LOG_DIR"]
36233 - else:
36234 - _emerge.emergelog._emerge_log_dir = os.path.join(os.sep,
36235 - settings["EPREFIX"].lstrip(os.sep), "var", "log")
36236 - portage.util.ensure_dirs(_emerge.emergelog._emerge_log_dir)
36237 -
36238 - if not "--pretend" in myopts:
36239 - emergelog(xterm_titles, "Started emerge on: "+\
36240 - _unicode_decode(
36241 - time.strftime("%b %d, %Y %H:%M:%S", time.localtime()),
36242 - encoding=_encodings['content'], errors='replace'))
36243 - myelogstr=""
36244 - if myopts:
36245 - opt_list = []
36246 - for opt, arg in myopts.items():
36247 - if arg is True:
36248 - opt_list.append(opt)
36249 - elif isinstance(arg, list):
36250 - # arguments like --exclude that use 'append' action
36251 - for x in arg:
36252 - opt_list.append("%s=%s" % (opt, x))
36253 - else:
36254 - opt_list.append("%s=%s" % (opt, arg))
36255 - myelogstr=" ".join(opt_list)
36256 - if myaction:
36257 - myelogstr += " --" + myaction
36258 - if myfiles:
36259 - myelogstr += " " + " ".join(oldargs)
36260 - emergelog(xterm_titles, " *** emerge " + myelogstr)
36261 - del oldargs
36262 -
36263 - def emergeexitsig(signum, frame):
36264 - signal.signal(signal.SIGINT, signal.SIG_IGN)
36265 - signal.signal(signal.SIGTERM, signal.SIG_IGN)
36266 - portage.util.writemsg("\n\nExiting on signal %(signal)s\n" % {"signal":signum})
36267 - sys.exit(128 + signum)
36268 - signal.signal(signal.SIGINT, emergeexitsig)
36269 - signal.signal(signal.SIGTERM, emergeexitsig)
36270 -
36271 - def emergeexit():
36272 - """This gets out final log message in before we quit."""
36273 - if "--pretend" not in myopts:
36274 - emergelog(xterm_titles, " *** terminating.")
36275 - if xterm_titles:
36276 - xtermTitleReset()
36277 - portage.atexit_register(emergeexit)
36278 -
36279 - if myaction in ("config", "metadata", "regen", "sync"):
36280 - if "--pretend" in myopts:
36281 - sys.stderr.write(("emerge: The '%s' action does " + \
36282 - "not support '--pretend'.\n") % myaction)
36283 - return 1
36284 -
36285 - if "sync" == myaction:
36286 - return action_sync(settings, trees, mtimedb, myopts, myaction)
36287 - elif "metadata" == myaction:
36288 - action_metadata(settings, portdb, myopts)
36289 - elif myaction=="regen":
36290 - validate_ebuild_environment(trees)
36291 - return action_regen(settings, portdb, myopts.get("--jobs"),
36292 - myopts.get("--load-average"))
36293 - # HELP action
36294 - elif "config"==myaction:
36295 - validate_ebuild_environment(trees)
36296 - action_config(settings, trees, myopts, myfiles)
36297 -
36298 - # SEARCH action
36299 - elif "search"==myaction:
36300 - validate_ebuild_environment(trees)
36301 - action_search(trees[settings['EROOT']]['root_config'],
36302 - myopts, myfiles, spinner)
36303 -
36304 - elif myaction in ('clean', 'depclean', 'deselect', 'prune', 'unmerge'):
36305 - validate_ebuild_environment(trees)
36306 - rval = action_uninstall(settings, trees, mtimedb["ldpath"],
36307 - myopts, myaction, myfiles, spinner)
36308 - if not (myaction == 'deselect' or buildpkgonly or fetchonly or pretend):
36309 - post_emerge(myaction, myopts, myfiles, settings['EROOT'],
36310 - trees, mtimedb, rval)
36311 - return rval
36312 -
36313 - elif myaction == 'info':
36314 -
36315 - # Ensure atoms are valid before calling unmerge().
36316 - vardb = trees[settings['EROOT']]['vartree'].dbapi
36317 - portdb = trees[settings['EROOT']]['porttree'].dbapi
36318 - bindb = trees[settings['EROOT']]["bintree"].dbapi
36319 - valid_atoms = []
36320 - for x in myfiles:
36321 - if is_valid_package_atom(x, allow_repo=True):
36322 - try:
36323 - #look at the installed files first, if there is no match
36324 - #look at the ebuilds, since EAPI 4 allows running pkg_info
36325 - #on non-installed packages
36326 - valid_atom = dep_expand(x, mydb=vardb, settings=settings)
36327 - if valid_atom.cp.split("/")[0] == "null":
36328 - valid_atom = dep_expand(x, mydb=portdb, settings=settings)
36329 - if valid_atom.cp.split("/")[0] == "null" and "--usepkg" in myopts:
36330 - valid_atom = dep_expand(x, mydb=bindb, settings=settings)
36331 - valid_atoms.append(valid_atom)
36332 - except portage.exception.AmbiguousPackageName as e:
36333 - msg = "The short ebuild name \"" + x + \
36334 - "\" is ambiguous. Please specify " + \
36335 - "one of the following " + \
36336 - "fully-qualified ebuild names instead:"
36337 - for line in textwrap.wrap(msg, 70):
36338 - writemsg_level("!!! %s\n" % (line,),
36339 - level=logging.ERROR, noiselevel=-1)
36340 - for i in e.args[0]:
36341 - writemsg_level(" %s\n" % colorize("INFORM", i),
36342 - level=logging.ERROR, noiselevel=-1)
36343 - writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
36344 - return 1
36345 - continue
36346 - msg = []
36347 - msg.append("'%s' is not a valid package atom." % (x,))
36348 - msg.append("Please check ebuild(5) for full details.")
36349 - writemsg_level("".join("!!! %s\n" % line for line in msg),
36350 - level=logging.ERROR, noiselevel=-1)
36351 - return 1
36352 -
36353 - return action_info(settings, trees, myopts, valid_atoms)
36354 -
36355 - # "update", "system", or just process files:
36356 - else:
36357 - validate_ebuild_environment(trees)
36358 -
36359 - for x in myfiles:
36360 - if x.startswith(SETPREFIX) or \
36361 - is_valid_package_atom(x, allow_repo=True):
36362 - continue
36363 - if x[:1] == os.sep:
36364 - continue
36365 - try:
36366 - os.lstat(x)
36367 + try:
36368 + return run_action(emerge_config)
36369 + finally:
36370 + # Call destructors for our portdbapi instances.
36371 + for x in emerge_config.trees.values():
36372 + if "porttree" in x.lazy_items:
36373 continue
36374 - except OSError:
36375 - pass
36376 - msg = []
36377 - msg.append("'%s' is not a valid package atom." % (x,))
36378 - msg.append("Please check ebuild(5) for full details.")
36379 - writemsg_level("".join("!!! %s\n" % line for line in msg),
36380 - level=logging.ERROR, noiselevel=-1)
36381 - return 1
36382 -
36383 - # GLEP 42 says to display news *after* an emerge --pretend
36384 - if "--pretend" not in myopts:
36385 - display_news_notification(root_config, myopts)
36386 - retval = action_build(settings, trees, mtimedb,
36387 - myopts, myaction, myfiles, spinner)
36388 - post_emerge(myaction, myopts, myfiles, settings['EROOT'],
36389 - trees, mtimedb, retval)
36390 -
36391 - return retval
36392 + x["porttree"].dbapi.close_caches()
36393
36394 diff --git a/pym/_emerge/post_emerge.py b/pym/_emerge/post_emerge.py
36395 new file mode 100644
36396 index 0000000..d5f1ba5
36397 --- /dev/null
36398 +++ b/pym/_emerge/post_emerge.py
36399 @@ -0,0 +1,165 @@
36400 +# Copyright 1999-2012 Gentoo Foundation
36401 +# Distributed under the terms of the GNU General Public License v2
36402 +
36403 +from __future__ import print_function
36404 +
36405 +import logging
36406 +import textwrap
36407 +
36408 +import portage
36409 +from portage import os
36410 +from portage.emaint.modules.logs.logs import CleanLogs
36411 +from portage.news import count_unread_news, display_news_notifications
36412 +from portage.output import colorize
36413 +from portage.util._dyn_libs.display_preserved_libs import \
36414 + display_preserved_libs
36415 +from portage.util._info_files import chk_updated_info_files
36416 +
36417 +from .chk_updated_cfg_files import chk_updated_cfg_files
36418 +from .emergelog import emergelog
36419 +from ._flush_elog_mod_echo import _flush_elog_mod_echo
36420 +
36421 +def clean_logs(settings):
36422 +
36423 + if "clean-logs" not in settings.features:
36424 + return
36425 +
36426 + logdir = settings.get("PORT_LOGDIR")
36427 + if logdir is None or not os.path.isdir(logdir):
36428 + return
36429 +
36430 + cleanlogs = CleanLogs()
36431 + errors = cleanlogs.clean(settings=settings)
36432 + if errors:
36433 + out = portage.output.EOutput()
36434 + for msg in errors:
36435 + out.eerror(msg)
36436 +
36437 +def display_news_notification(root_config, myopts):
36438 + if "news" not in root_config.settings.features:
36439 + return
36440 + portdb = root_config.trees["porttree"].dbapi
36441 + vardb = root_config.trees["vartree"].dbapi
36442 + news_counts = count_unread_news(portdb, vardb)
36443 + display_news_notifications(news_counts)
36444 +
36445 +def show_depclean_suggestion():
36446 + out = portage.output.EOutput()
36447 + msg = "After world updates, it is important to remove " + \
36448 + "obsolete packages with emerge --depclean. Refer " + \
36449 + "to `man emerge` for more information."
36450 + for line in textwrap.wrap(msg, 72):
36451 + out.ewarn(line)
36452 +
36453 +def post_emerge(myaction, myopts, myfiles,
36454 + target_root, trees, mtimedb, retval):
36455 + """
36456 + Misc. things to run at the end of a merge session.
36457 +
36458 + Update Info Files
36459 + Update Config Files
36460 + Update News Items
36461 + Commit mtimeDB
36462 + Display preserved libs warnings
36463 +
36464 + @param myaction: The action returned from parse_opts()
36465 + @type myaction: String
36466 + @param myopts: emerge options
36467 + @type myopts: dict
36468 + @param myfiles: emerge arguments
36469 + @type myfiles: list
36470 + @param target_root: The target EROOT for myaction
36471 + @type target_root: String
36472 + @param trees: A dictionary mapping each ROOT to it's package databases
36473 + @type trees: dict
36474 + @param mtimedb: The mtimeDB to store data needed across merge invocations
36475 + @type mtimedb: MtimeDB class instance
36476 + @param retval: Emerge's return value
36477 + @type retval: Int
36478 + """
36479 +
36480 + root_config = trees[target_root]["root_config"]
36481 + vardbapi = trees[target_root]['vartree'].dbapi
36482 + settings = vardbapi.settings
36483 + info_mtimes = mtimedb["info"]
36484 +
36485 + # Load the most current variables from ${ROOT}/etc/profile.env
36486 + settings.unlock()
36487 + settings.reload()
36488 + settings.regenerate()
36489 + settings.lock()
36490 +
36491 + config_protect = portage.util.shlex_split(
36492 + settings.get("CONFIG_PROTECT", ""))
36493 + infodirs = settings.get("INFOPATH","").split(":") + \
36494 + settings.get("INFODIR","").split(":")
36495 +
36496 + os.chdir("/")
36497 +
36498 + if retval == os.EX_OK:
36499 + exit_msg = " *** exiting successfully."
36500 + else:
36501 + exit_msg = " *** exiting unsuccessfully with status '%s'." % retval
36502 + emergelog("notitles" not in settings.features, exit_msg)
36503 +
36504 + _flush_elog_mod_echo()
36505 +
36506 + if not vardbapi._pkgs_changed:
36507 + # GLEP 42 says to display news *after* an emerge --pretend
36508 + if "--pretend" in myopts:
36509 + display_news_notification(root_config, myopts)
36510 + # If vdb state has not changed then there's nothing else to do.
36511 + return
36512 +
36513 + vdb_path = os.path.join(root_config.settings['EROOT'], portage.VDB_PATH)
36514 + portage.util.ensure_dirs(vdb_path)
36515 + vdb_lock = None
36516 + if os.access(vdb_path, os.W_OK) and not "--pretend" in myopts:
36517 + vardbapi.lock()
36518 + vdb_lock = True
36519 +
36520 + if vdb_lock:
36521 + try:
36522 + if "noinfo" not in settings.features:
36523 + chk_updated_info_files(target_root,
36524 + infodirs, info_mtimes)
36525 + mtimedb.commit()
36526 + finally:
36527 + if vdb_lock:
36528 + vardbapi.unlock()
36529 +
36530 + # Explicitly load and prune the PreservedLibsRegistry in order
36531 + # to ensure that we do not display stale data.
36532 + vardbapi._plib_registry.load()
36533 +
36534 + if vardbapi._plib_registry.hasEntries():
36535 + if "--quiet" in myopts:
36536 + print()
36537 + print(colorize("WARN", "!!!") + " existing preserved libs found")
36538 + else:
36539 + print()
36540 + print(colorize("WARN", "!!!") + " existing preserved libs:")
36541 + display_preserved_libs(vardbapi)
36542 + print("Use " + colorize("GOOD", "emerge @preserved-rebuild") +
36543 + " to rebuild packages using these libraries")
36544 +
36545 + chk_updated_cfg_files(settings['EROOT'], config_protect)
36546 +
36547 + display_news_notification(root_config, myopts)
36548 +
36549 + postemerge = os.path.join(settings["PORTAGE_CONFIGROOT"],
36550 + portage.USER_CONFIG_PATH, "bin", "post_emerge")
36551 + if os.access(postemerge, os.X_OK):
36552 + hook_retval = portage.process.spawn(
36553 + [postemerge], env=settings.environ())
36554 + if hook_retval != os.EX_OK:
36555 + portage.util.writemsg_level(
36556 + " %s spawn failed of %s\n" %
36557 + (colorize("BAD", "*"), postemerge,),
36558 + level=logging.ERROR, noiselevel=-1)
36559 +
36560 + clean_logs(settings)
36561 +
36562 + if "--quiet" not in myopts and \
36563 + myaction is None and "@world" in myfiles:
36564 + show_depclean_suggestion()
36565
36566 diff --git a/pym/_emerge/resolver/backtracking.py b/pym/_emerge/resolver/backtracking.py
36567 index d8f49c6..c29b9d4 100644
36568 --- a/pym/_emerge/resolver/backtracking.py
36569 +++ b/pym/_emerge/resolver/backtracking.py
36570 @@ -1,4 +1,4 @@
36571 -# Copyright 2010-2011 Gentoo Foundation
36572 +# Copyright 2010-2012 Gentoo Foundation
36573 # Distributed under the terms of the GNU General Public License v2
36574
36575 import copy
36576 @@ -7,8 +7,8 @@ class BacktrackParameter(object):
36577
36578 __slots__ = (
36579 "needed_unstable_keywords", "runtime_pkg_mask", "needed_use_config_changes", "needed_license_changes",
36580 - "rebuild_list", "reinstall_list", "needed_p_mask_changes",
36581 - "slot_abi_replace_installed"
36582 + "prune_rebuilds", "rebuild_list", "reinstall_list", "needed_p_mask_changes",
36583 + "slot_operator_mask_built", "slot_operator_replace_installed"
36584 )
36585
36586 def __init__(self):
36587 @@ -19,7 +19,9 @@ class BacktrackParameter(object):
36588 self.needed_license_changes = {}
36589 self.rebuild_list = set()
36590 self.reinstall_list = set()
36591 - self.slot_abi_replace_installed = set()
36592 + self.slot_operator_replace_installed = set()
36593 + self.slot_operator_mask_built = set()
36594 + self.prune_rebuilds = False
36595
36596 def __deepcopy__(self, memo=None):
36597 if memo is None:
36598 @@ -35,7 +37,9 @@ class BacktrackParameter(object):
36599 result.needed_license_changes = copy.copy(self.needed_license_changes)
36600 result.rebuild_list = copy.copy(self.rebuild_list)
36601 result.reinstall_list = copy.copy(self.reinstall_list)
36602 - result.slot_abi_replace_installed = copy.copy(self.slot_abi_replace_installed)
36603 + result.slot_operator_replace_installed = copy.copy(self.slot_operator_replace_installed)
36604 + result.slot_operator_mask_built = self.slot_operator_mask_built.copy()
36605 + result.prune_rebuilds = self.prune_rebuilds
36606
36607 # runtime_pkg_mask contains nested dicts that must also be copied
36608 result.runtime_pkg_mask = {}
36609 @@ -52,7 +56,9 @@ class BacktrackParameter(object):
36610 self.needed_license_changes == other.needed_license_changes and \
36611 self.rebuild_list == other.rebuild_list and \
36612 self.reinstall_list == other.reinstall_list and \
36613 - self.slot_abi_replace_installed == other.slot_abi_replace_installed
36614 + self.slot_operator_replace_installed == other.slot_operator_replace_installed and \
36615 + self.slot_operator_mask_built == other.slot_operator_mask_built and \
36616 + self.prune_rebuilds == other.prune_rebuilds
36617
36618
36619 class _BacktrackNode(object):
36620 @@ -125,7 +131,7 @@ class Backtracker(object):
36621 for pkg, mask_info in runtime_pkg_mask.items():
36622
36623 if "missing dependency" in mask_info or \
36624 - "slot_abi_mask_built" in mask_info:
36625 + "slot_operator_mask_built" in mask_info:
36626 continue
36627
36628 entry_is_valid = False
36629 @@ -192,16 +198,28 @@ class Backtracker(object):
36630 para.needed_use_config_changes[pkg] = (new_use, new_changes)
36631 elif change == "slot_conflict_abi":
36632 new_node.terminal = False
36633 - elif change == "slot_abi_mask_built":
36634 + elif change == "slot_operator_mask_built":
36635 + para.slot_operator_mask_built.update(data)
36636 for pkg, mask_reasons in data.items():
36637 para.runtime_pkg_mask.setdefault(pkg,
36638 {}).update(mask_reasons)
36639 - elif change == "slot_abi_replace_installed":
36640 - para.slot_abi_replace_installed.update(data)
36641 + elif change == "slot_operator_replace_installed":
36642 + para.slot_operator_replace_installed.update(data)
36643 elif change == "rebuild_list":
36644 para.rebuild_list.update(data)
36645 elif change == "reinstall_list":
36646 para.reinstall_list.update(data)
36647 + elif change == "prune_rebuilds":
36648 + para.prune_rebuilds = True
36649 + para.slot_operator_replace_installed.clear()
36650 + for pkg in para.slot_operator_mask_built:
36651 + runtime_masks = para.runtime_pkg_mask.get(pkg)
36652 + if runtime_masks is None:
36653 + continue
36654 + runtime_masks.pop("slot_operator_mask_built", None)
36655 + if not runtime_masks:
36656 + para.runtime_pkg_mask.pop(pkg)
36657 + para.slot_operator_mask_built.clear()
36658
36659 self._add(new_node, explore=explore)
36660 self._current_node = new_node
36661
36662 diff --git a/pym/_emerge/resolver/circular_dependency.py b/pym/_emerge/resolver/circular_dependency.py
36663 index aca81fa..b710671 100644
36664 --- a/pym/_emerge/resolver/circular_dependency.py
36665 +++ b/pym/_emerge/resolver/circular_dependency.py
36666 @@ -1,7 +1,7 @@
36667 -# Copyright 2010-2011 Gentoo Foundation
36668 +# Copyright 2010-2013 Gentoo Foundation
36669 # Distributed under the terms of the GNU General Public License v2
36670
36671 -from __future__ import print_function
36672 +from __future__ import print_function, unicode_literals
36673
36674 from itertools import chain, product
36675 import logging
36676 @@ -11,6 +11,7 @@ from portage.exception import InvalidDependString
36677 from portage.output import colorize
36678 from portage.util import writemsg_level
36679 from _emerge.DepPrioritySatisfiedRange import DepPrioritySatisfiedRange
36680 +from _emerge.Package import Package
36681
36682 class circular_dependency_handler(object):
36683
36684 @@ -61,8 +62,7 @@ class circular_dependency_handler(object):
36685 node = nodes[0]
36686 display_order.append(node)
36687 tempgraph.remove(node)
36688 - display_order.reverse()
36689 - return display_order
36690 + return tuple(display_order)
36691
36692 def _prepare_circular_dep_message(self):
36693 """
36694 @@ -113,9 +113,10 @@ class circular_dependency_handler(object):
36695 parent_atoms = self.all_parent_atoms.get(pkg)
36696
36697 if priorities[-1].buildtime:
36698 - dep = parent.metadata["DEPEND"]
36699 + dep = " ".join(parent._metadata[k]
36700 + for k in Package._buildtime_keys)
36701 elif priorities[-1].runtime:
36702 - dep = parent.metadata["RDEPEND"]
36703 + dep = parent._metadata["RDEPEND"]
36704
36705 for ppkg, atom in parent_atoms:
36706 if ppkg == parent:
36707 @@ -125,7 +126,7 @@ class circular_dependency_handler(object):
36708
36709 try:
36710 affecting_use = extract_affecting_use(dep, parent_atom,
36711 - eapi=parent.metadata["EAPI"])
36712 + eapi=parent.eapi)
36713 except InvalidDependString:
36714 if not parent.installed:
36715 raise
36716 @@ -144,7 +145,8 @@ class circular_dependency_handler(object):
36717 #If any of the flags we're going to touch is in REQUIRED_USE, add all
36718 #other flags in REQUIRED_USE to affecting_use, to not lose any solution.
36719 required_use_flags = get_required_use_flags(
36720 - parent.metadata.get("REQUIRED_USE", ""))
36721 + parent._metadata.get("REQUIRED_USE", ""),
36722 + eapi=parent.eapi)
36723
36724 if affecting_use.intersection(required_use_flags):
36725 # TODO: Find out exactly which REQUIRED_USE flags are
36726 @@ -186,9 +188,11 @@ class circular_dependency_handler(object):
36727 parent_atom not in reduced_dep:
36728 #We found an assignment that removes the atom from 'dep'.
36729 #Make sure it doesn't conflict with REQUIRED_USE.
36730 - required_use = parent.metadata.get("REQUIRED_USE", "")
36731 + required_use = parent._metadata.get("REQUIRED_USE", "")
36732
36733 - if check_required_use(required_use, current_use, parent.iuse.is_valid_flag):
36734 + if check_required_use(required_use, current_use,
36735 + parent.iuse.is_valid_flag,
36736 + eapi=parent.eapi):
36737 use = self.depgraph._pkg_use_enabled(parent)
36738 solution = set()
36739 for flag, state in zip(affecting_use, use_state):
36740
36741 diff --git a/pym/_emerge/resolver/output.py b/pym/_emerge/resolver/output.py
36742 index 61cfe9e..5f550be 100644
36743 --- a/pym/_emerge/resolver/output.py
36744 +++ b/pym/_emerge/resolver/output.py
36745 @@ -1,26 +1,31 @@
36746 -# Copyright 2010-2012 Gentoo Foundation
36747 +# Copyright 2010-2014 Gentoo Foundation
36748 # Distributed under the terms of the GNU General Public License v2
36749
36750 """Resolver output display operation.
36751 """
36752
36753 +from __future__ import unicode_literals
36754 +
36755 __all__ = (
36756 - "Display",
36757 + "Display", "format_unmatched_atom",
36758 )
36759
36760 import sys
36761
36762 +import portage
36763 from portage import os
36764 -from portage import _unicode_decode
36765 from portage.dbapi.dep_expand import dep_expand
36766 -from portage.dep import cpvequal, _repo_separator
36767 +from portage.dep import cpvequal, _repo_separator, _slot_separator
36768 +from portage.eapi import _get_eapi_attrs
36769 from portage.exception import InvalidDependString, SignatureException
36770 +from portage.package.ebuild.config import _get_feature_flags
36771 from portage.package.ebuild._spawn_nofetch import spawn_nofetch
36772 from portage.output import ( blue, colorize, create_color_func,
36773 - darkblue, darkgreen, green, nc_len, red, teal, turquoise, yellow )
36774 + darkblue, darkgreen, green, nc_len, teal)
36775 bad = create_color_func("BAD")
36776 +from portage._sets.base import InternalPackageSet
36777 from portage.util import writemsg_stdout
36778 -from portage.versions import best, catpkgsplit
36779 +from portage.versions import best, cpv_getversion
36780
36781 from _emerge.Blocker import Blocker
36782 from _emerge.create_world_atom import create_world_atom
36783 @@ -30,7 +35,9 @@ from _emerge.show_invalid_depstring_notice import show_invalid_depstring_notice
36784
36785 if sys.hexversion >= 0x3000000:
36786 basestring = str
36787 -
36788 + _unicode = str
36789 +else:
36790 + _unicode = unicode
36791
36792 class Display(object):
36793 """Formats and outputs the depgrah supplied it for merge/re-merge, etc.
36794 @@ -54,11 +61,6 @@ class Display(object):
36795 self.oldlp = None
36796 self.myfetchlist = None
36797 self.indent = ''
36798 - self.is_new = True
36799 - self.cur_use = None
36800 - self.cur_iuse = None
36801 - self.old_use = ''
36802 - self.old_iuse = ''
36803 self.use_expand = None
36804 self.use_expand_hidden = None
36805 self.pkgsettings = None
36806 @@ -68,93 +70,54 @@ class Display(object):
36807 self.blocker_style = None
36808
36809
36810 - def _blockers(self, pkg, fetch_symbol):
36811 - """Processes pkg for blockers and adds colorized strings to
36812 + def _blockers(self, blocker):
36813 + """Adds colorized strings to
36814 self.print_msg and self.blockers
36815
36816 - @param pkg: _emerge.Package.Package instance
36817 - @param fetch_symbol: string
36818 + @param blocker: _emerge.Blocker.Blocker instance
36819 @rtype: bool
36820 Modifies class globals: self.blocker_style, self.resolved,
36821 self.print_msg
36822 """
36823 - if pkg.satisfied:
36824 + if blocker.satisfied:
36825 self.blocker_style = "PKG_BLOCKER_SATISFIED"
36826 - addl = "%s %s " % (colorize(self.blocker_style, "b"),
36827 - fetch_symbol)
36828 + addl = "%s " % (colorize(self.blocker_style, "b"),)
36829 else:
36830 self.blocker_style = "PKG_BLOCKER"
36831 - addl = "%s %s " % (colorize(self.blocker_style, "B"),
36832 - fetch_symbol)
36833 + addl = "%s " % (colorize(self.blocker_style, "B"),)
36834 addl += self.empty_space_in_brackets()
36835 self.resolved = dep_expand(
36836 - str(pkg.atom).lstrip("!"), mydb=self.vardb,
36837 + _unicode(blocker.atom).lstrip("!"), mydb=self.vardb,
36838 settings=self.pkgsettings
36839 )
36840 if self.conf.columns and self.conf.quiet:
36841 - addl += " " + colorize(self.blocker_style, str(self.resolved))
36842 + addl += " " + colorize(self.blocker_style, _unicode(self.resolved))
36843 else:
36844 addl = "[%s %s] %s%s" % \
36845 (colorize(self.blocker_style, "blocks"),
36846 addl, self.indent,
36847 - colorize(self.blocker_style, str(self.resolved))
36848 + colorize(self.blocker_style, _unicode(self.resolved))
36849 )
36850 - block_parents = self.conf.blocker_parents.parent_nodes(pkg)
36851 - block_parents = set([pnode[2] for pnode in block_parents])
36852 + block_parents = self.conf.blocker_parents.parent_nodes(blocker)
36853 + block_parents = set(_unicode(pnode.cpv) for pnode in block_parents)
36854 block_parents = ", ".join(block_parents)
36855 - if self.resolved != pkg[2]:
36856 + if blocker.atom.blocker.overlap.forbid:
36857 + blocking_desc = "hard blocking"
36858 + else:
36859 + blocking_desc = "blocking"
36860 + if self.resolved != blocker.atom:
36861 addl += colorize(self.blocker_style,
36862 - " (\"%s\" is blocking %s)") % \
36863 - (str(pkg.atom).lstrip("!"), block_parents)
36864 + " (\"%s\" is %s %s)" %
36865 + (_unicode(blocker.atom).lstrip("!"),
36866 + blocking_desc, block_parents))
36867 else:
36868 addl += colorize(self.blocker_style,
36869 - " (is blocking %s)") % block_parents
36870 - if isinstance(pkg, Blocker) and pkg.satisfied:
36871 - if self.conf.columns:
36872 - return True
36873 - self.print_msg.append(addl)
36874 + " (is %s %s)" % (blocking_desc, block_parents))
36875 + if blocker.satisfied:
36876 + if not self.conf.columns:
36877 + self.print_msg.append(addl)
36878 else:
36879 self.blockers.append(addl)
36880 - return False
36881 -
36882 -
36883 - def _display_use(self, pkg, myoldbest, myinslotlist):
36884 - """ USE flag display
36885 -
36886 - @param pkg: _emerge.Package.Package instance
36887 - @param myoldbest: list of installed versions
36888 - @param myinslotlist: list of installed slots
36889 - Modifies class globals: self.forced_flags, self.cur_iuse,
36890 - self.old_iuse, self.old_use, self.use_expand
36891 - """
36892 -
36893 - self.forced_flags = set()
36894 - self.forced_flags.update(pkg.use.force)
36895 - self.forced_flags.update(pkg.use.mask)
36896 -
36897 - self.cur_use = [flag for flag in self.conf.pkg_use_enabled(pkg) \
36898 - if flag in pkg.iuse.all]
36899 - self.cur_iuse = sorted(pkg.iuse.all)
36900 -
36901 - if myoldbest and myinslotlist:
36902 - previous_cpv = myoldbest[0].cpv
36903 - else:
36904 - previous_cpv = pkg.cpv
36905 - if self.vardb.cpv_exists(previous_cpv):
36906 - previous_pkg = self.vardb.match_pkgs('=' + previous_cpv)[0]
36907 - self.old_iuse = sorted(previous_pkg.iuse.all)
36908 - self.old_use = previous_pkg.use.enabled
36909 - self.is_new = False
36910 - else:
36911 - self.old_iuse = []
36912 - self.old_use = []
36913 - self.is_new = True
36914 -
36915 - self.old_use = [flag for flag in self.old_use if flag in self.old_iuse]
36916 -
36917 - self.use_expand = pkg.use.expand
36918 - self.use_expand_hidden = pkg.use.expand_hidden
36919 - return
36920
36921 def include_mask_str(self):
36922 return self.conf.verbosity > 1
36923 @@ -219,13 +182,40 @@ class Display(object):
36924 return ret
36925
36926
36927 - def recheck_hidden(self, pkg):
36928 - """ Prevent USE_EXPAND_HIDDEN flags from being hidden if they
36929 - are the only thing that triggered reinstallation.
36930 + def _display_use(self, pkg, pkg_info):
36931 + """ USE flag display
36932
36933 @param pkg: _emerge.Package.Package instance
36934 - Modifies self.use_expand_hidden, self.use_expand, self.verboseadd
36935 + @param pkg_info: PkgInfo instance
36936 + Modifies self.use_expand_hidden, self.use_expand, self.verboseadd,
36937 + self.forced_flags
36938 """
36939 +
36940 + self.forced_flags = set()
36941 + self.forced_flags.update(pkg.use.force)
36942 + self.forced_flags.update(pkg.use.mask)
36943 +
36944 + cur_use = [flag for flag in self.conf.pkg_use_enabled(pkg) \
36945 + if flag in pkg.iuse.all]
36946 + cur_iuse = sorted(pkg.iuse.all)
36947 +
36948 + if pkg_info.previous_pkg is not None:
36949 + previous_pkg = pkg_info.previous_pkg
36950 + old_iuse = sorted(previous_pkg.iuse.all)
36951 + old_use = previous_pkg.use.enabled
36952 + is_new = False
36953 + else:
36954 + old_iuse = []
36955 + old_use = []
36956 + is_new = True
36957 +
36958 + old_use = [flag for flag in old_use if flag in old_iuse]
36959 +
36960 + self.use_expand = pkg.use.expand
36961 + self.use_expand_hidden = pkg.use.expand_hidden
36962 +
36963 + # Prevent USE_EXPAND_HIDDEN flags from being hidden if they
36964 + # are the only thing that triggered reinstallation.
36965 reinst_flags_map = {}
36966 reinstall_for_flags = self.conf.reinstall_nodes.get(pkg)
36967 reinst_expand_map = None
36968 @@ -246,13 +236,14 @@ class Display(object):
36969 reinst_expand_map)
36970
36971 cur_iuse_map, iuse_forced = \
36972 - self.map_to_use_expand(self.cur_iuse, forced_flags=True)
36973 - cur_use_map = self.map_to_use_expand(self.cur_use)
36974 - old_iuse_map = self.map_to_use_expand(self.old_iuse)
36975 - old_use_map = self.map_to_use_expand(self.old_use)
36976 + self.map_to_use_expand(cur_iuse, forced_flags=True)
36977 + cur_use_map = self.map_to_use_expand(cur_use)
36978 + old_iuse_map = self.map_to_use_expand(old_iuse)
36979 + old_use_map = self.map_to_use_expand(old_use)
36980
36981 use_expand = sorted(self.use_expand)
36982 use_expand.insert(0, "USE")
36983 + feature_flags = _get_feature_flags(_get_eapi_attrs(pkg.eapi))
36984
36985 for key in use_expand:
36986 if key in self.use_expand_hidden:
36987 @@ -260,7 +251,7 @@ class Display(object):
36988 self.verboseadd += _create_use_string(self.conf, key.upper(),
36989 cur_iuse_map[key], iuse_forced[key],
36990 cur_use_map[key], old_iuse_map[key],
36991 - old_use_map[key], self.is_new,
36992 + old_use_map[key], is_new, feature_flags,
36993 reinst_flags_map.get(key))
36994 return
36995
36996 @@ -318,13 +309,14 @@ class Display(object):
36997 kwargs["myrepo"] = pkg.repo
36998 myfilesdict = None
36999 try:
37000 - myfilesdict = db.getfetchsizes(pkg.cpv, **kwargs)
37001 + myfilesdict = db.getfetchsizes(pkg.cpv,
37002 + **portage._native_kwargs(kwargs))
37003 except InvalidDependString as e:
37004 # FIXME: validate SRC_URI earlier
37005 depstr, = db.aux_get(pkg.cpv,
37006 ["SRC_URI"], myrepo=pkg.repo)
37007 show_invalid_depstring_notice(
37008 - pkg, depstr, str(e))
37009 + pkg, depstr, _unicode(e))
37010 raise
37011 except SignatureException:
37012 # missing/invalid binary package SIZE signature
37013 @@ -343,15 +335,13 @@ class Display(object):
37014 if self.quiet_repo_display:
37015 # overlay verbose
37016 # assign index for a previous version in the same slot
37017 - slot_matches = self.vardb.match(pkg.slot_atom)
37018 - if slot_matches:
37019 - repo_name_prev = self.vardb.aux_get(slot_matches[0],
37020 - ["repository"])[0]
37021 + if pkg_info.previous_pkg is not None:
37022 + repo_name_prev = pkg_info.previous_pkg.repo
37023 else:
37024 repo_name_prev = None
37025
37026 # now use the data to generate output
37027 - if pkg.installed or not slot_matches:
37028 + if pkg.installed or pkg_info.previous_pkg is None:
37029 self.repoadd = self.conf.repo_display.repoStr(
37030 pkg_info.repo_path_real)
37031 else:
37032 @@ -370,58 +360,86 @@ class Display(object):
37033 repoadd_set.add(self.repoadd)
37034
37035
37036 - def convert_myoldbest(self, pkg, myoldbest):
37037 + def convert_myoldbest(self, pkg, pkg_info):
37038 """converts and colorizes a version list to a string
37039
37040 @param pkg: _emerge.Package.Package instance
37041 - @param myoldbest: list
37042 + @param pkg_info: dictionary
37043 @rtype string.
37044 """
37045 + myoldbest = pkg_info.oldbest_list
37046 # Convert myoldbest from a list to a string.
37047 myoldbest_str = ""
37048 if myoldbest:
37049 versions = []
37050 for pos, old_pkg in enumerate(myoldbest):
37051 - key = catpkgsplit(old_pkg.cpv)[2] + "-" + catpkgsplit(old_pkg.cpv)[3]
37052 + key = old_pkg.version
37053 if key[-3:] == "-r0":
37054 key = key[:-3]
37055 - if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
37056 - any(x.repo != self.portdb.repositories.mainRepo().name for x in myoldbest + [pkg])):
37057 - key += _repo_separator + old_pkg.repo
37058 + if self.conf.verbosity == 3:
37059 + if pkg_info.attr_display.new_slot:
37060 + key += _slot_separator + old_pkg.slot
37061 + if old_pkg.slot != old_pkg.sub_slot:
37062 + key += "/" + old_pkg.sub_slot
37063 + elif any(x.slot + "/" + x.sub_slot != "0/0" for x in myoldbest + [pkg]):
37064 + key += _slot_separator + old_pkg.slot
37065 + if old_pkg.slot != old_pkg.sub_slot or \
37066 + old_pkg.slot == pkg.slot and old_pkg.sub_slot != pkg.sub_slot:
37067 + key += "/" + old_pkg.sub_slot
37068 + if not self.quiet_repo_display and (self.verbose_main_repo_display or
37069 + self.portdb.repositories.mainRepo() is None or
37070 + any(x.repo != self.portdb.repositories.mainRepo().name for x in myoldbest + [pkg])):
37071 + key += _repo_separator + old_pkg.repo
37072 versions.append(key)
37073 myoldbest_str = blue("["+", ".join(versions)+"]")
37074 return myoldbest_str
37075
37076 + def _append_slot(self, pkg_str, pkg, pkg_info):
37077 + """Potentially appends slot and subslot to package string.
37078
37079 - def set_interactive(self, pkg, ordered, addl):
37080 - """Increments counters.interactive if the pkg is to
37081 - be merged and it's metadata has interactive set True
37082 + @param pkg_str: string
37083 + @param pkg: _emerge.Package.Package instance
37084 + @param pkg_info: dictionary
37085 + @rtype string
37086 + """
37087 + if pkg_info.attr_display.new_slot:
37088 + pkg_str += _slot_separator + pkg_info.slot
37089 + if pkg_info.slot != pkg_info.sub_slot:
37090 + pkg_str += "/" + pkg_info.sub_slot
37091 + elif any(x.slot + "/" + x.sub_slot != "0/0" for x in pkg_info.oldbest_list + [pkg]):
37092 + pkg_str += _slot_separator + pkg_info.slot
37093 + if pkg_info.slot != pkg_info.sub_slot or \
37094 + any(x.slot == pkg_info.slot and x.sub_slot != pkg_info.sub_slot for x in pkg_info.oldbest_list):
37095 + pkg_str += "/" + pkg_info.sub_slot
37096 + return pkg_str
37097 +
37098 + def _append_repository(self, pkg_str, pkg, pkg_info):
37099 + """Potentially appends repository to package string.
37100
37101 + @param pkg_str: string
37102 @param pkg: _emerge.Package.Package instance
37103 - @param ordered: boolean
37104 - @param addl: already defined string to add to
37105 + @param pkg_info: dictionary
37106 + @rtype string
37107 """
37108 - if 'interactive' in pkg.metadata.properties and \
37109 - pkg.operation == 'merge':
37110 - addl = colorize("WARN", "I") + addl[1:]
37111 - if ordered:
37112 - self.counters.interactive += 1
37113 - return addl
37114 -
37115 - def _set_non_root_columns(self, addl, pkg_info, pkg):
37116 + if not self.quiet_repo_display and (self.verbose_main_repo_display or
37117 + self.portdb.repositories.mainRepo() is None or
37118 + any(x.repo != self.portdb.repositories.mainRepo().name for x in pkg_info.oldbest_list + [pkg])):
37119 + pkg_str += _repo_separator + pkg.repo
37120 + return pkg_str
37121 +
37122 + def _set_non_root_columns(self, pkg, pkg_info):
37123 """sets the indent level and formats the output
37124
37125 - @param addl: already defined string to add to
37126 - @param pkg_info: dictionary
37127 @param pkg: _emerge.Package.Package instance
37128 + @param pkg_info: dictionary
37129 @rtype string
37130 """
37131 ver_str = pkg_info.ver
37132 - if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
37133 - any(x.repo != self.portdb.repositories.mainRepo().name for x in pkg_info.oldbest_list + [pkg])):
37134 - ver_str += _repo_separator + pkg.repo
37135 + if self.conf.verbosity == 3:
37136 + ver_str = self._append_slot(ver_str, pkg, pkg_info)
37137 + ver_str = self._append_repository(ver_str, pkg, pkg_info)
37138 if self.conf.quiet:
37139 - myprint = addl + " " + self.indent + \
37140 + myprint = _unicode(pkg_info.attr_display) + " " + self.indent + \
37141 self.pkgprint(pkg_info.cp, pkg_info)
37142 myprint = myprint+darkblue(" "+ver_str)+" "
37143 myprint = myprint+pkg_info.oldbest
37144 @@ -434,7 +452,8 @@ class Display(object):
37145 self.indent, self.pkgprint(pkg.cp, pkg_info))
37146 else:
37147 myprint = "[%s %s] %s%s" % \
37148 - (self.pkgprint(pkg.type_name, pkg_info), addl,
37149 + (self.pkgprint(pkg.type_name, pkg_info),
37150 + pkg_info.attr_display,
37151 self.indent, self.pkgprint(pkg.cp, pkg_info))
37152 if (self.newlp-nc_len(myprint)) > 0:
37153 myprint = myprint+(" "*(self.newlp-nc_len(myprint)))
37154 @@ -446,21 +465,20 @@ class Display(object):
37155 return myprint
37156
37157
37158 - def _set_root_columns(self, addl, pkg_info, pkg):
37159 + def _set_root_columns(self, pkg, pkg_info):
37160 """sets the indent level and formats the output
37161
37162 - @param addl: already defined string to add to
37163 - @param pkg_info: dictionary
37164 @param pkg: _emerge.Package.Package instance
37165 + @param pkg_info: dictionary
37166 @rtype string
37167 Modifies self.verboseadd
37168 """
37169 ver_str = pkg_info.ver
37170 - if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
37171 - any(x.repo != self.portdb.repositories.mainRepo().name for x in pkg_info.oldbest_list + [pkg])):
37172 - ver_str += _repo_separator + pkg.repo
37173 + if self.conf.verbosity == 3:
37174 + ver_str = self._append_slot(ver_str, pkg, pkg_info)
37175 + ver_str = self._append_repository(ver_str, pkg, pkg_info)
37176 if self.conf.quiet:
37177 - myprint = addl + " " + self.indent + \
37178 + myprint = _unicode(pkg_info.attr_display) + " " + self.indent + \
37179 self.pkgprint(pkg_info.cp, pkg_info)
37180 myprint = myprint+" "+green(ver_str)+" "
37181 myprint = myprint+pkg_info.oldbest
37182 @@ -473,7 +491,8 @@ class Display(object):
37183 addl, self.indent, self.pkgprint(pkg.cp, pkg_info))
37184 else:
37185 myprint = "[%s %s] %s%s" % \
37186 - (self.pkgprint(pkg.type_name, pkg_info), addl,
37187 + (self.pkgprint(pkg.type_name, pkg_info),
37188 + pkg_info.attr_display,
37189 self.indent, self.pkgprint(pkg.cp, pkg_info))
37190 if (self.newlp-nc_len(myprint)) > 0:
37191 myprint = myprint+(" "*(self.newlp-nc_len(myprint)))
37192 @@ -484,18 +503,17 @@ class Display(object):
37193 return myprint
37194
37195
37196 - def _set_no_columns(self, pkg, pkg_info, addl):
37197 + def _set_no_columns(self, pkg, pkg_info):
37198 """prints pkg info without column indentation.
37199
37200 @param pkg: _emerge.Package.Package instance
37201 @param pkg_info: dictionary
37202 - @param addl: the current text to add for the next line to output
37203 @rtype the updated addl
37204 """
37205 pkg_str = pkg.cpv
37206 - if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
37207 - any(x.repo != self.portdb.repositories.mainRepo().name for x in pkg_info.oldbest_list + [pkg])):
37208 - pkg_str += _repo_separator + pkg.repo
37209 + if self.conf.verbosity == 3:
37210 + pkg_str = self._append_slot(pkg_str, pkg, pkg_info)
37211 + pkg_str = self._append_repository(pkg_str, pkg, pkg_info)
37212 if not pkg_info.merge:
37213 addl = self.empty_space_in_brackets()
37214 myprint = "[%s%s] %s%s %s" % \
37215 @@ -506,46 +524,10 @@ class Display(object):
37216 else:
37217 myprint = "[%s %s] %s%s %s" % \
37218 (self.pkgprint(pkg.type_name, pkg_info),
37219 - addl, self.indent,
37220 + pkg_info.attr_display, self.indent,
37221 self.pkgprint(pkg_str, pkg_info), pkg_info.oldbest)
37222 return myprint
37223
37224 -
37225 - def _insert_slot(self, pkg, pkg_info, myinslotlist):
37226 - """Adds slot info to the message
37227 -
37228 - @return addl: formatted slot info
37229 - @return myoldbest: installed version list
37230 - Modifies self.counters.downgrades, self.counters.upgrades
37231 - """
37232 - addl = " " + pkg_info.fetch_symbol
37233 - if not cpvequal(pkg.cpv,
37234 - best([pkg.cpv] + [x.cpv for x in myinslotlist])):
37235 - # Downgrade in slot
37236 - addl += turquoise("U")+blue("D")
37237 - if pkg_info.ordered:
37238 - self.counters.downgrades += 1
37239 - else:
37240 - # Update in slot
37241 - addl += turquoise("U") + " "
37242 - if pkg_info.ordered:
37243 - self.counters.upgrades += 1
37244 - return addl
37245 -
37246 -
37247 - def _new_slot(self, pkg, pkg_info):
37248 - """New slot, mark it new.
37249 -
37250 - @return addl: formatted slot info
37251 - @return myoldbest: installed version list
37252 - Modifies self.counters.newslot
37253 - """
37254 - addl = " " + green("NS") + pkg_info.fetch_symbol + " "
37255 - if pkg_info.ordered:
37256 - self.counters.newslot += 1
37257 - return addl
37258 -
37259 -
37260 def print_messages(self, show_repos):
37261 """Performs the actual output printing of the pre-formatted
37262 messages
37263 @@ -581,9 +563,9 @@ class Display(object):
37264 """
37265 writemsg_stdout('\n%s\n' % (self.counters,), noiselevel=-1)
37266 if show_repos:
37267 - # Use _unicode_decode() to force unicode format string so
37268 + # Use unicode_literals to force unicode format string so
37269 # that RepoDisplay.__unicode__() is called in python2.
37270 - writemsg_stdout(_unicode_decode("%s") % (self.conf.repo_display,),
37271 + writemsg_stdout("%s" % (self.conf.repo_display,),
37272 noiselevel=-1)
37273 return
37274
37275 @@ -635,15 +617,18 @@ class Display(object):
37276 self.counters.restrict_fetch_satisfied
37277 """
37278 pkg_info = PkgInfo()
37279 + pkg_info.cp = pkg.cp
37280 + pkg_info.ver = self.get_ver_str(pkg)
37281 + pkg_info.slot = pkg.slot
37282 + pkg_info.sub_slot = pkg.sub_slot
37283 + pkg_info.repo_name = pkg.repo
37284 pkg_info.ordered = ordered
37285 - pkg_info.fetch_symbol = " "
37286 pkg_info.operation = pkg.operation
37287 pkg_info.merge = ordered and pkg_info.operation == "merge"
37288 if not pkg_info.merge and pkg_info.operation == "merge":
37289 pkg_info.operation = "nomerge"
37290 pkg_info.built = pkg.type_name != "ebuild"
37291 pkg_info.ebuild_path = None
37292 - pkg_info.repo_name = pkg.repo
37293 if ordered:
37294 if pkg_info.merge:
37295 if pkg.type_name == "binary":
37296 @@ -659,22 +644,30 @@ class Display(object):
37297 pkg_info.repo_path_real = os.path.dirname(os.path.dirname(
37298 os.path.dirname(pkg_info.ebuild_path)))
37299 else:
37300 - pkg_info.repo_path_real = \
37301 - self.portdb.getRepositoryPath(pkg.metadata["repository"])
37302 + pkg_info.repo_path_real = self.portdb.getRepositoryPath(pkg.repo)
37303 pkg_info.use = list(self.conf.pkg_use_enabled(pkg))
37304 if not pkg.built and pkg.operation == 'merge' and \
37305 - 'fetch' in pkg.metadata.restrict:
37306 + 'fetch' in pkg.restrict:
37307 if pkg_info.ordered:
37308 self.counters.restrict_fetch += 1
37309 + pkg_info.attr_display.fetch_restrict = True
37310 if not self.portdb.getfetchsizes(pkg.cpv,
37311 useflags=pkg_info.use, myrepo=pkg.repo):
37312 - pkg_info.fetch_symbol = green("f")
37313 + pkg_info.attr_display.fetch_restrict_satisfied = True
37314 if pkg_info.ordered:
37315 self.counters.restrict_fetch_satisfied += 1
37316 else:
37317 - pkg_info.fetch_symbol = red("F")
37318 if pkg_info.ebuild_path is not None:
37319 self.restrict_fetch_list[pkg] = pkg_info
37320 +
37321 + if self.vardb.cpv_exists(pkg.cpv):
37322 + # Do a cpv match first, in case the SLOT has changed.
37323 + pkg_info.previous_pkg = self.vardb.match_pkgs('=' + pkg.cpv)[0]
37324 + else:
37325 + slot_matches = self.vardb.match_pkgs(pkg.slot_atom)
37326 + if slot_matches:
37327 + pkg_info.previous_pkg = slot_matches[0]
37328 +
37329 return pkg_info
37330
37331
37332 @@ -685,15 +678,14 @@ class Display(object):
37333 @param pkg_info: dictionay
37334 Modifies self.changelogs
37335 """
37336 - inst_matches = self.vardb.match(pkg.slot_atom)
37337 - if inst_matches:
37338 + if pkg_info.previous_pkg is not None:
37339 ebuild_path_cl = pkg_info.ebuild_path
37340 if ebuild_path_cl is None:
37341 # binary package
37342 ebuild_path_cl = self.portdb.findname(pkg.cpv, myrepo=pkg.repo)
37343 if ebuild_path_cl is not None:
37344 self.changelogs.extend(_calc_changelog(
37345 - ebuild_path_cl, inst_matches[0], pkg.cpv))
37346 + ebuild_path_cl, pkg_info.previous_pkg, pkg.cpv))
37347 return
37348
37349
37350 @@ -733,12 +725,10 @@ class Display(object):
37351 @param pkg: _emerge.Package.Package instance
37352 @rtype string
37353 """
37354 - ver_str = list(catpkgsplit(pkg.cpv)[2:])
37355 - if ver_str[1] == "r0":
37356 - ver_str[1] = ""
37357 - else:
37358 - ver_str[1] = "-" + ver_str[1]
37359 - return ver_str[0]+ver_str[1]
37360 + ver_str = pkg.cpv.version
37361 + if ver_str.endswith("-r0"):
37362 + ver_str = ver_str[:-3]
37363 + return ver_str
37364
37365
37366 def _get_installed_best(self, pkg, pkg_info):
37367 @@ -757,9 +747,10 @@ class Display(object):
37368 myinslotlist = None
37369 installed_versions = self.vardb.match_pkgs(pkg.cp)
37370 if self.vardb.cpv_exists(pkg.cpv):
37371 - addl = " "+yellow("R")+pkg_info.fetch_symbol+" "
37372 - installed_version = self.vardb.match_pkgs(pkg.cpv)[0]
37373 - if not self.quiet_repo_display and installed_version.repo != pkg.repo:
37374 + pkg_info.attr_display.replace = True
37375 + installed_version = pkg_info.previous_pkg
37376 + if installed_version.slot != pkg.slot or installed_version.sub_slot != pkg.sub_slot or \
37377 + not self.quiet_repo_display and installed_version.repo != pkg.repo:
37378 myoldbest = [installed_version]
37379 if pkg_info.ordered:
37380 if pkg_info.merge:
37381 @@ -775,17 +766,31 @@ class Display(object):
37382 myinslotlist = None
37383 if myinslotlist:
37384 myoldbest = myinslotlist[:]
37385 - addl = self._insert_slot(pkg, pkg_info, myinslotlist)
37386 + if not cpvequal(pkg.cpv,
37387 + best([pkg.cpv] + [x.cpv for x in myinslotlist])):
37388 + # Downgrade in slot
37389 + pkg_info.attr_display.new_version = True
37390 + pkg_info.attr_display.downgrade = True
37391 + if pkg_info.ordered:
37392 + self.counters.downgrades += 1
37393 + else:
37394 + # Update in slot
37395 + pkg_info.attr_display.new_version = True
37396 + if pkg_info.ordered:
37397 + self.counters.upgrades += 1
37398 else:
37399 myoldbest = installed_versions
37400 - addl = self._new_slot(pkg, pkg_info)
37401 + pkg_info.attr_display.new = True
37402 + pkg_info.attr_display.new_slot = True
37403 + if pkg_info.ordered:
37404 + self.counters.newslot += 1
37405 if self.conf.changelog:
37406 self.do_changelog(pkg, pkg_info)
37407 else:
37408 - addl = " " + green("N") + " " + pkg_info.fetch_symbol + " "
37409 + pkg_info.attr_display.new = True
37410 if pkg_info.ordered:
37411 self.counters.new += 1
37412 - return addl, myoldbest, myinslotlist
37413 + return myoldbest, myinslotlist
37414
37415
37416 def __call__(self, depgraph, mylist, favorites=None, verbosity=None):
37417 @@ -806,7 +811,7 @@ class Display(object):
37418 # files to fetch list - avoids counting a same file twice
37419 # in size display (verbose mode)
37420 self.myfetchlist = set()
37421 -
37422 +
37423 self.quiet_repo_display = "--quiet-repo-display" in depgraph._frozen_config.myopts
37424 if self.quiet_repo_display:
37425 # Use this set to detect when all the "repoadd" strings are "[0]"
37426 @@ -824,47 +829,52 @@ class Display(object):
37427 self.indent = " " * depth
37428
37429 if isinstance(pkg, Blocker):
37430 - if self._blockers(pkg, fetch_symbol=" "):
37431 - continue
37432 + self._blockers(pkg)
37433 else:
37434 pkg_info = self.set_pkg_info(pkg, ordered)
37435 - addl, pkg_info.oldbest_list, myinslotlist = \
37436 + pkg_info.oldbest_list, myinslotlist = \
37437 self._get_installed_best(pkg, pkg_info)
37438 + if ordered and pkg_info.merge and \
37439 + not pkg_info.attr_display.new:
37440 + for arg, atom in depgraph._iter_atoms_for_pkg(pkg):
37441 + if arg.force_reinstall:
37442 + pkg_info.attr_display.force_reinstall = True
37443 + break
37444 +
37445 self.verboseadd = ""
37446 if self.quiet_repo_display:
37447 self.repoadd = None
37448 - self._display_use(pkg, pkg_info.oldbest_list, myinslotlist)
37449 - self.recheck_hidden(pkg)
37450 + self._display_use(pkg, pkg_info)
37451 if self.conf.verbosity == 3:
37452 if self.quiet_repo_display:
37453 self.verbose_size(pkg, repoadd_set, pkg_info)
37454 else:
37455 self.verbose_size(pkg, None, pkg_info)
37456
37457 - pkg_info.cp = pkg.cp
37458 - pkg_info.ver = self.get_ver_str(pkg)
37459 -
37460 self.oldlp = self.conf.columnwidth - 30
37461 self.newlp = self.oldlp - 30
37462 - pkg_info.oldbest = self.convert_myoldbest(pkg, pkg_info.oldbest_list)
37463 + pkg_info.oldbest = self.convert_myoldbest(pkg, pkg_info)
37464 pkg_info.system, pkg_info.world = \
37465 self.check_system_world(pkg)
37466 - addl = self.set_interactive(pkg, pkg_info.ordered, addl)
37467 + if 'interactive' in pkg.properties and \
37468 + pkg.operation == 'merge':
37469 + pkg_info.attr_display.interactive = True
37470 + if ordered:
37471 + self.counters.interactive += 1
37472
37473 if self.include_mask_str():
37474 - addl += self.gen_mask_str(pkg)
37475 + pkg_info.attr_display.mask = self.gen_mask_str(pkg)
37476
37477 if pkg.root_config.settings["ROOT"] != "/":
37478 if pkg_info.oldbest:
37479 pkg_info.oldbest += " "
37480 if self.conf.columns:
37481 - myprint = self._set_non_root_columns(
37482 - addl, pkg_info, pkg)
37483 + myprint = self._set_non_root_columns(pkg, pkg_info)
37484 else:
37485 pkg_str = pkg.cpv
37486 - if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
37487 - any(x.repo != self.portdb.repositories.mainRepo().name for x in pkg_info.oldbest_list + [pkg])):
37488 - pkg_str += _repo_separator + pkg.repo
37489 + if self.conf.verbosity == 3:
37490 + pkg_str = self._append_slot(pkg_str, pkg, pkg_info)
37491 + pkg_str = self._append_repository(pkg_str, pkg, pkg_info)
37492 if not pkg_info.merge:
37493 addl = self.empty_space_in_brackets()
37494 myprint = "[%s%s] " % (
37495 @@ -873,17 +883,16 @@ class Display(object):
37496 )
37497 else:
37498 myprint = "[%s %s] " % (
37499 - self.pkgprint(pkg.type_name, pkg_info), addl)
37500 + self.pkgprint(pkg.type_name, pkg_info),
37501 + pkg_info.attr_display)
37502 myprint += self.indent + \
37503 self.pkgprint(pkg_str, pkg_info) + " " + \
37504 pkg_info.oldbest + darkgreen("to " + pkg.root)
37505 else:
37506 if self.conf.columns:
37507 - myprint = self._set_root_columns(
37508 - addl, pkg_info, pkg)
37509 + myprint = self._set_root_columns(pkg, pkg_info)
37510 else:
37511 - myprint = self._set_no_columns(
37512 - pkg, pkg_info, addl)
37513 + myprint = self._set_no_columns(pkg, pkg_info)
37514
37515 if self.conf.columns and pkg.operation == "uninstall":
37516 continue
37517 @@ -908,3 +917,105 @@ class Display(object):
37518 self.print_changelog()
37519
37520 return os.EX_OK
37521 +
37522 +
37523 +def format_unmatched_atom(pkg, atom, pkg_use_enabled):
37524 + """
37525 + Returns two strings. The first string contains the
37526 + 'atom' with parts of the atom colored, which 'pkg'
37527 + doesn't match. The second string has the same number
37528 + of characters as the first one, but consists of only
37529 + white space or ^. The ^ characters have the same position
37530 + as the colored parts of the first string.
37531 + """
37532 + # Things to check:
37533 + # 1. Version
37534 + # 2. cp
37535 + # 3. slot/sub_slot
37536 + # 4. repository
37537 + # 5. USE
37538 +
37539 + highlight = set()
37540 +
37541 + def perform_coloring():
37542 + atom_str = ""
37543 + marker_str = ""
37544 + for ii, x in enumerate(atom):
37545 + if ii in highlight:
37546 + atom_str += colorize("BAD", x)
37547 + marker_str += "^"
37548 + else:
37549 + atom_str += x
37550 + marker_str += " "
37551 + return atom_str, marker_str
37552 +
37553 + if atom.cp != pkg.cp:
37554 + # Highlight the cp part only.
37555 + ii = atom.find(atom.cp)
37556 + highlight.update(range(ii, ii + len(atom.cp)))
37557 + return perform_coloring()
37558 +
37559 + version_atom = atom.without_repo.without_slot.without_use
37560 + version_atom_set = InternalPackageSet(initial_atoms=(version_atom,))
37561 + highlight_version = not bool(version_atom_set.findAtomForPackage(pkg,
37562 + modified_use=pkg_use_enabled(pkg)))
37563 +
37564 + highlight_slot = False
37565 + if (atom.slot and atom.slot != pkg.slot) or \
37566 + (atom.sub_slot and atom.sub_slot != pkg.sub_slot):
37567 + highlight_slot = True
37568 +
37569 + if highlight_version:
37570 + op = atom.operator
37571 + ver = None
37572 + if atom.cp != atom.cpv:
37573 + ver = cpv_getversion(atom.cpv)
37574 +
37575 + if op == "=*":
37576 + op = "="
37577 + ver += "*"
37578 +
37579 + if op is not None:
37580 + highlight.update(range(len(op)))
37581 +
37582 + if ver is not None:
37583 + start = atom.rfind(ver)
37584 + end = start + len(ver)
37585 + highlight.update(range(start, end))
37586 +
37587 + if highlight_slot:
37588 + slot_str = ":" + atom.slot
37589 + if atom.sub_slot:
37590 + slot_str += "/" + atom.sub_slot
37591 + if atom.slot_operator:
37592 + slot_str += atom.slot_operator
37593 + start = atom.find(slot_str)
37594 + end = start + len(slot_str)
37595 + highlight.update(range(start, end))
37596 +
37597 + highlight_use = set()
37598 + if atom.use:
37599 + use_atom = "%s[%s]" % (atom.cp, str(atom.use))
37600 + use_atom_set = InternalPackageSet(initial_atoms=(use_atom,))
37601 + if not use_atom_set.findAtomForPackage(pkg, \
37602 + modified_use=pkg_use_enabled(pkg)):
37603 + missing_iuse = pkg.iuse.get_missing_iuse(
37604 + atom.unevaluated_atom.use.required)
37605 + if missing_iuse:
37606 + highlight_use = set(missing_iuse)
37607 + else:
37608 + #Use conditionals not met.
37609 + violated_atom = atom.violated_conditionals(
37610 + pkg_use_enabled(pkg), pkg.iuse.is_valid_flag)
37611 + if violated_atom.use is not None:
37612 + highlight_use = set(violated_atom.use.enabled.union(
37613 + violated_atom.use.disabled))
37614 +
37615 + if highlight_use:
37616 + ii = atom.find("[") + 1
37617 + for token in atom.use.tokens:
37618 + if token.lstrip("-!").rstrip("=?") in highlight_use:
37619 + highlight.update(range(ii, ii + len(token)))
37620 + ii += len(token) + 1
37621 +
37622 + return perform_coloring()
37623
37624 diff --git a/pym/_emerge/resolver/output_helpers.py b/pym/_emerge/resolver/output_helpers.py
37625 index e751dd8..58b2694 100644
37626 --- a/pym/_emerge/resolver/output_helpers.py
37627 +++ b/pym/_emerge/resolver/output_helpers.py
37628 @@ -1,9 +1,12 @@
37629 -# Copyright 2010-2011 Gentoo Foundation
37630 +# Copyright 2010-2013 Gentoo Foundation
37631 # Distributed under the terms of the GNU General Public License v2
37632
37633 """Contains private support functions for the Display class
37634 in output.py
37635 """
37636 +
37637 +from __future__ import unicode_literals
37638 +
37639 __all__ = (
37640 )
37641
37642 @@ -15,9 +18,10 @@ from portage import os
37643 from portage import _encodings, _unicode_encode
37644 from portage._sets.base import InternalPackageSet
37645 from portage.output import (blue, bold, colorize, create_color_func,
37646 - green, red, teal, yellow)
37647 + green, red, teal, turquoise, yellow)
37648 bad = create_color_func("BAD")
37649 from portage.util import shlex_split, writemsg
37650 +from portage.util.SlotObject import SlotObject
37651 from portage.versions import catpkgsplit
37652
37653 from _emerge.Blocker import Blocker
37654 @@ -223,7 +227,7 @@ class _DisplayConfig(object):
37655 self.reinstall_nodes = dynamic_config._reinstall_nodes
37656 self.digraph = dynamic_config.digraph
37657 self.blocker_uninstalls = dynamic_config._blocker_uninstalls
37658 - self.slot_pkg_map = dynamic_config._slot_pkg_map
37659 + self.package_tracker = dynamic_config._package_tracker
37660 self.set_nodes = dynamic_config._set_nodes
37661
37662 self.pkg_use_enabled = depgraph._pkg_use_enabled
37663 @@ -245,10 +249,9 @@ def _format_size(mysize):
37664 mystr=mystr[:mycount]+","+mystr[mycount:]
37665 return mystr+" kB"
37666
37667 -
37668 def _create_use_string(conf, name, cur_iuse, iuse_forced, cur_use,
37669 old_iuse, old_use,
37670 - is_new, reinst_flags):
37671 + is_new, feature_flags, reinst_flags):
37672
37673 if not conf.print_use_string:
37674 return ""
37675 @@ -266,6 +269,7 @@ def _create_use_string(conf, name, cur_iuse, iuse_forced, cur_use,
37676 any_iuse = cur_iuse.union(old_iuse)
37677 any_iuse = list(any_iuse)
37678 any_iuse.sort()
37679 +
37680 for flag in any_iuse:
37681 flag_str = None
37682 isEnabled = False
37683 @@ -299,7 +303,9 @@ def _create_use_string(conf, name, cur_iuse, iuse_forced, cur_use,
37684 elif flag in old_use:
37685 flag_str = green("-" + flag) + "*"
37686 if flag_str:
37687 - if flag in iuse_forced:
37688 + if flag in feature_flags:
37689 + flag_str = "{" + flag_str + "}"
37690 + elif flag in iuse_forced:
37691 flag_str = "(" + flag_str + ")"
37692 if isEnabled:
37693 enabled.append(flag_str)
37694 @@ -364,8 +370,9 @@ def _tree_display(conf, mylist):
37695 # If the uninstall task did not need to be executed because
37696 # of an upgrade, display Blocker -> Upgrade edges since the
37697 # corresponding Blocker -> Uninstall edges will not be shown.
37698 - upgrade_node = \
37699 - conf.slot_pkg_map[uninstall.root].get(uninstall.slot_atom)
37700 + upgrade_node = next(conf.package_tracker.match(
37701 + uninstall.root, uninstall.slot_atom), None)
37702 +
37703 if upgrade_node is not None and \
37704 uninstall not in executed_uninstalls:
37705 for blocker in uninstall_parents:
37706 @@ -611,9 +618,10 @@ class PkgInfo(object):
37707 information about the pkg being printed.
37708 """
37709
37710 - __slots__ = ("built", "cp", "ebuild_path", "fetch_symbol", "merge",
37711 - "oldbest", "oldbest_list", "operation", "ordered",
37712 - "repo_name", "repo_path_real", "system", "use", "ver", "world")
37713 + __slots__ = ("attr_display", "built", "cp",
37714 + "ebuild_path", "fetch_symbol", "merge",
37715 + "oldbest", "oldbest_list", "operation", "ordered", "previous_pkg",
37716 + "repo_name", "repo_path_real", "slot", "sub_slot", "system", "use", "ver", "world")
37717
37718
37719 def __init__(self):
37720 @@ -626,9 +634,74 @@ class PkgInfo(object):
37721 self.oldbest_list = []
37722 self.operation = ''
37723 self.ordered = False
37724 + self.previous_pkg = None
37725 self.repo_path_real = ''
37726 self.repo_name = ''
37727 + self.slot = ''
37728 + self.sub_slot = ''
37729 self.system = False
37730 self.use = ''
37731 self.ver = ''
37732 self.world = False
37733 + self.attr_display = PkgAttrDisplay()
37734 +
37735 +class PkgAttrDisplay(SlotObject):
37736 +
37737 + __slots__ = ("downgrade", "fetch_restrict", "fetch_restrict_satisfied",
37738 + "force_reinstall",
37739 + "interactive", "mask", "new", "new_slot", "new_version", "replace")
37740 +
37741 + def __str__(self):
37742 + output = []
37743 +
37744 + if self.interactive:
37745 + output.append(colorize("WARN", "I"))
37746 + else:
37747 + output.append(" ")
37748 +
37749 + if self.new or self.force_reinstall:
37750 + if self.force_reinstall:
37751 + output.append(red("r"))
37752 + else:
37753 + output.append(green("N"))
37754 + else:
37755 + output.append(" ")
37756 +
37757 + if self.new_slot or self.replace:
37758 + if self.replace:
37759 + output.append(yellow("R"))
37760 + else:
37761 + output.append(green("S"))
37762 + else:
37763 + output.append(" ")
37764 +
37765 + if self.fetch_restrict or self.fetch_restrict_satisfied:
37766 + if self.fetch_restrict_satisfied:
37767 + output.append(green("f"))
37768 + else:
37769 + output.append(red("F"))
37770 + else:
37771 + output.append(" ")
37772 +
37773 + if self.new_version:
37774 + output.append(turquoise("U"))
37775 + else:
37776 + output.append(" ")
37777 +
37778 + if self.downgrade:
37779 + output.append(blue("D"))
37780 + else:
37781 + output.append(" ")
37782 +
37783 + if self.mask is not None:
37784 + output.append(self.mask)
37785 +
37786 + return "".join(output)
37787 +
37788 + if sys.hexversion < 0x3000000:
37789 +
37790 + __unicode__ = __str__
37791 +
37792 + def __str__(self):
37793 + return _unicode_encode(self.__unicode__(),
37794 + encoding=_encodings['content'])
37795
37796 diff --git a/pym/_emerge/resolver/package_tracker.py b/pym/_emerge/resolver/package_tracker.py
37797 new file mode 100644
37798 index 0000000..5982750
37799 --- /dev/null
37800 +++ b/pym/_emerge/resolver/package_tracker.py
37801 @@ -0,0 +1,301 @@
37802 +# Copyright 2014 Gentoo Foundation
37803 +# Distributed under the terms of the GNU General Public License v2
37804 +
37805 +from __future__ import print_function
37806 +
37807 +import collections
37808 +
37809 +import portage
37810 +portage.proxy.lazyimport.lazyimport(globals(),
37811 + 'portage.dep:Atom,match_from_list',
37812 + 'portage.util:cmp_sort_key',
37813 + 'portage.versions:vercmp',
37814 +)
37815 +
37816 +_PackageConflict = collections.namedtuple("_PackageConflict", ["root", "pkgs", "atom", "description"])
37817 +
37818 +class PackageConflict(_PackageConflict):
37819 + """
37820 + Class to track the reason for a conflict and the conflicting packages.
37821 + """
37822 + def __iter__(self):
37823 + return iter(self.pkgs)
37824 +
37825 + def __contains__(self, pkg):
37826 + return pkg in self.pkgs
37827 +
37828 + def __len__(self):
37829 + return len(self.pkgs)
37830 +
37831 +
37832 +class PackageTracker(object):
37833 + """
37834 + This class tracks packages which are currently
37835 + installed and packages which have been pulled into
37836 + the dependency graph.
37837 +
37838 + It automatically tracks conflicts between packages.
37839 +
37840 + Possible conflicts:
37841 + 1) Packages that share the same SLOT.
37842 + 2) Packages with the same cpv.
37843 + Not yet implemented:
37844 + 3) Packages that block each other.
37845 + """
37846 +
37847 + def __init__(self):
37848 + # Mapping from package keys to set of packages.
37849 + self._cp_pkg_map = collections.defaultdict(list)
37850 + self._cp_vdb_pkg_map = collections.defaultdict(list)
37851 + # List of package keys that may contain conflicts.
37852 + # The insetation order must be preserved.
37853 + self._multi_pkgs = []
37854 +
37855 + # Cache for result of conflicts().
37856 + self._conflicts_cache = None
37857 +
37858 + # Records for each pulled package which installed package
37859 + # are replaced.
37860 + self._replacing = collections.defaultdict(list)
37861 + # Records which pulled packages replace this package.
37862 + self._replaced_by = collections.defaultdict(list)
37863 +
37864 + self._match_cache = collections.defaultdict(dict)
37865 +
37866 + def add_pkg(self, pkg):
37867 + """
37868 + Add a new package to the tracker. Records conflicts as necessary.
37869 + """
37870 + cp_key = pkg.root, pkg.cp
37871 +
37872 + if any(other is pkg for other in self._cp_pkg_map[cp_key]):
37873 + return
37874 +
37875 + self._cp_pkg_map[cp_key].append(pkg)
37876 +
37877 + if len(self._cp_pkg_map[cp_key]) > 1:
37878 + self._conflicts_cache = None
37879 + if len(self._cp_pkg_map[cp_key]) == 2:
37880 + self._multi_pkgs.append(cp_key)
37881 +
37882 + self._replacing[pkg] = []
37883 + for installed in self._cp_vdb_pkg_map.get(cp_key, []):
37884 + if installed.slot_atom == pkg.slot_atom or \
37885 + installed.cpv == pkg.cpv:
37886 + self._replacing[pkg].append(installed)
37887 + self._replaced_by[installed].append(pkg)
37888 +
37889 + self._match_cache.pop(cp_key, None)
37890 +
37891 + def add_installed_pkg(self, installed):
37892 + """
37893 + Add an installed package during vdb load. These packages
37894 + are not returned by matched_pull as long as add_pkg hasn't
37895 + been called with them. They are only returned by match_final.
37896 + """
37897 + cp_key = installed.root, installed.cp
37898 + if any(other is installed for other in self._cp_vdb_pkg_map[cp_key]):
37899 + return
37900 +
37901 + self._cp_vdb_pkg_map[cp_key].append(installed)
37902 +
37903 + for pkg in self._cp_pkg_map.get(cp_key, []):
37904 + if installed.slot_atom == pkg.slot_atom or \
37905 + installed.cpv == pkg.cpv:
37906 + self._replacing[pkg].append(installed)
37907 + self._replaced_by[installed].append(pkg)
37908 +
37909 + self._match_cache.pop(cp_key, None)
37910 +
37911 + def remove_pkg(self, pkg):
37912 + """
37913 + Removes the package from the tracker.
37914 + Raises KeyError if it isn't present.
37915 + """
37916 + cp_key = pkg.root, pkg.cp
37917 + try:
37918 + self._cp_pkg_map.get(cp_key, []).remove(pkg)
37919 + except ValueError:
37920 + raise KeyError(pkg)
37921 +
37922 + if self._cp_pkg_map[cp_key]:
37923 + self._conflicts_cache = None
37924 +
37925 + if not self._cp_pkg_map[cp_key]:
37926 + del self._cp_pkg_map[cp_key]
37927 + elif len(self._cp_pkg_map[cp_key]) == 1:
37928 + self._multi_pkgs = [other_cp_key for other_cp_key in self._multi_pkgs \
37929 + if other_cp_key != cp_key]
37930 +
37931 + for installed in self._replacing[pkg]:
37932 + self._replaced_by[installed].remove(pkg)
37933 + if not self._replaced_by[installed]:
37934 + del self._replaced_by[installed]
37935 + del self._replacing[pkg]
37936 +
37937 + self._match_cache.pop(cp_key, None)
37938 +
37939 + def discard_pkg(self, pkg):
37940 + """
37941 + Removes the package from the tracker.
37942 + Does not raises KeyError if it is not present.
37943 + """
37944 + try:
37945 + self.remove_pkg(pkg)
37946 + except KeyError:
37947 + pass
37948 +
37949 + def match(self, root, atom, installed=True):
37950 + """
37951 + Iterates over the packages matching 'atom'.
37952 + If 'installed' is True, installed non-replaced
37953 + packages may also be returned.
37954 + """
37955 + cp_key = root, atom.cp
37956 + cache_key = root, atom, installed
37957 + try:
37958 + return iter(self._match_cache.get(cp_key, {})[cache_key])
37959 + except KeyError:
37960 + pass
37961 +
37962 + candidates = self._cp_pkg_map.get(cp_key, [])[:]
37963 +
37964 + if installed:
37965 + for installed in self._cp_vdb_pkg_map.get(cp_key, []):
37966 + if installed not in self._replaced_by:
37967 + candidates.append(installed)
37968 +
37969 + ret = match_from_list(atom, candidates)
37970 + ret.sort(key=cmp_sort_key(lambda x, y: vercmp(x.version, y.version)))
37971 + self._match_cache[cp_key][cache_key] = ret
37972 +
37973 + return iter(ret)
37974 +
37975 + def conflicts(self):
37976 + """
37977 + Iterates over the curently existing conflicts.
37978 + """
37979 + if self._conflicts_cache is None:
37980 + self._conflicts_cache = []
37981 +
37982 + for cp_key in self._multi_pkgs:
37983 +
37984 + # Categorize packages according to cpv and slot.
37985 + slot_map = collections.defaultdict(list)
37986 + cpv_map = collections.defaultdict(list)
37987 + for pkg in self._cp_pkg_map[cp_key]:
37988 + slot_key = pkg.root, pkg.slot_atom
37989 + cpv_key = pkg.root, pkg.cpv
37990 + slot_map[slot_key].append(pkg)
37991 + cpv_map[cpv_key].append(pkg)
37992 +
37993 + # Slot conflicts.
37994 + for slot_key in slot_map:
37995 + slot_pkgs = slot_map[slot_key]
37996 + if len(slot_pkgs) > 1:
37997 + self._conflicts_cache.append(PackageConflict(
37998 + description = "slot conflict",
37999 + root = slot_key[0],
38000 + atom = slot_key[1],
38001 + pkgs = tuple(slot_pkgs),
38002 + ))
38003 +
38004 + # CPV conflicts.
38005 + for cpv_key in cpv_map:
38006 + cpv_pkgs = cpv_map[cpv_key]
38007 + if len(cpv_pkgs) > 1:
38008 + # Make sure this cpv conflict is not a slot conflict at the same time.
38009 + # Ignore it if it is.
38010 + slots = set(pkg.slot for pkg in cpv_pkgs)
38011 + if len(slots) > 1:
38012 + self._conflicts_cache.append(PackageConflict(
38013 + description = "cpv conflict",
38014 + root = cpv_key[0],
38015 + atom = cpv_key[1],
38016 + pkgs = tuple(cpv_pkgs),
38017 + ))
38018 +
38019 + return iter(self._conflicts_cache)
38020 +
38021 + def slot_conflicts(self):
38022 + """
38023 + Iterates over present slot conflicts.
38024 + This is only intended for consumers that haven't been
38025 + updated to deal with other kinds of conflicts.
38026 + This funcion should be removed once all consumers are updated.
38027 + """
38028 + return (conflict for conflict in self.conflicts() \
38029 + if conflict.description == "slot conflict")
38030 +
38031 + def all_pkgs(self, root):
38032 + """
38033 + Iterates over all packages for the given root
38034 + present in the tracker, including the installed
38035 + packages.
38036 + """
38037 + for cp_key in self._cp_pkg_map:
38038 + if cp_key[0] == root:
38039 + for pkg in self._cp_pkg_map[cp_key]:
38040 + yield pkg
38041 +
38042 + for cp_key in self._cp_vdb_pkg_map:
38043 + if cp_key[0] == root:
38044 + for installed in self._cp_vdb_pkg_map[cp_key]:
38045 + if installed not in self._replaced_by:
38046 + yield installed
38047 +
38048 + def contains(self, pkg, installed=True):
38049 + """
38050 + Checks if the package is in the tracker.
38051 + If 'installed' is True, returns True for
38052 + non-replaced installed packages.
38053 + """
38054 + cp_key = pkg.root, pkg.cp
38055 + for other in self._cp_pkg_map.get(cp_key, []):
38056 + if other is pkg:
38057 + return True
38058 +
38059 + if installed:
38060 + for installed in self._cp_vdb_pkg_map.get(cp_key, []):
38061 + if installed is pkg and \
38062 + installed not in self._replaced_by:
38063 + return True
38064 +
38065 + return False
38066 +
38067 + def __contains__(self, pkg):
38068 + """
38069 + Checks if the package is in the tracker.
38070 + Returns True for non-replaced installed packages.
38071 + """
38072 + return self.contains(pkg, installed=True)
38073 +
38074 +
38075 +class PackageTrackerDbapiWrapper(object):
38076 + """
38077 + A wrpper class that provides parts of the legacy
38078 + dbapi interface. Remove it once all consumers have
38079 + died.
38080 + """
38081 + def __init__(self, root, package_tracker):
38082 + self._root = root
38083 + self._package_tracker = package_tracker
38084 +
38085 + def cpv_inject(self, pkg):
38086 + self._package_tracker.add_pkg(pkg)
38087 +
38088 + def match_pkgs(self, atom):
38089 + if not isinstance(atom, Atom):
38090 + atom = Atom(atom)
38091 + ret = sorted(self._package_tracker.match(self._root, atom),
38092 + key=cmp_sort_key(lambda x, y: vercmp(x.version, y.version)))
38093 + return ret
38094 +
38095 + def __iter__(self):
38096 + return self._package_tracker.all_pkgs(self._root)
38097 +
38098 + def match(self, atom, use_cache=None):
38099 + return self.match_pkgs(atom)
38100 +
38101 + def cp_list(self, cp):
38102 + return self.match_pkgs(cp)
38103
38104 diff --git a/pym/_emerge/resolver/slot_collision.py b/pym/_emerge/resolver/slot_collision.py
38105 index 783a648..baeab08 100644
38106 --- a/pym/_emerge/resolver/slot_collision.py
38107 +++ b/pym/_emerge/resolver/slot_collision.py
38108 @@ -1,10 +1,11 @@
38109 -# Copyright 2010-2012 Gentoo Foundation
38110 +# Copyright 2010-2014 Gentoo Foundation
38111 # Distributed under the terms of the GNU General Public License v2
38112
38113 -from __future__ import print_function
38114 +from __future__ import print_function, unicode_literals
38115
38116 import sys
38117
38118 +from portage import _encodings, _unicode_encode
38119 from _emerge.AtomArg import AtomArg
38120 from _emerge.Package import Package
38121 from _emerge.PackageArg import PackageArg
38122 @@ -88,10 +89,11 @@ class slot_conflict_handler(object):
38123 self.debug = "--debug" in self.myopts
38124 if self.debug:
38125 writemsg("Starting slot conflict handler\n", noiselevel=-1)
38126 - #slot_collision_info is a dict mapping (slot atom, root) to set
38127 - #of packages. The packages in the set all belong to the same
38128 - #slot.
38129 - self.slot_collision_info = depgraph._dynamic_config._slot_collision_info
38130 +
38131 + # List of tuples, where each tuple represents a slot conflict.
38132 + self.all_conflicts = []
38133 + for conflict in depgraph._dynamic_config._package_tracker.slot_conflicts():
38134 + self.all_conflicts.append((conflict.root, conflict.atom, conflict.pkgs))
38135
38136 #A dict mapping packages to pairs of parent package
38137 #and parent atom
38138 @@ -108,8 +110,7 @@ class slot_conflict_handler(object):
38139 all_conflict_atoms_by_slotatom = []
38140
38141 #fill conflict_pkgs, all_conflict_atoms_by_slotatom
38142 - for (atom, root), pkgs \
38143 - in self.slot_collision_info.items():
38144 + for root, atom, pkgs in self.all_conflicts:
38145 conflict_pkgs.append(list(pkgs))
38146 all_conflict_atoms_by_slotatom.append(set())
38147
38148 @@ -150,7 +151,7 @@ class slot_conflict_handler(object):
38149 if self.debug:
38150 writemsg("\nNew configuration:\n", noiselevel=-1)
38151 for pkg in config:
38152 - writemsg(" " + str(pkg) + "\n", noiselevel=-1)
38153 + writemsg(" %s\n" % (pkg,), noiselevel=-1)
38154 writemsg("\n", noiselevel=-1)
38155
38156 new_solutions = self._check_configuration(config, all_conflict_atoms_by_slotatom, conflict_nodes)
38157 @@ -225,10 +226,14 @@ class slot_conflict_handler(object):
38158 new_change = {}
38159 for pkg in solution:
38160 for flag, state in solution[pkg].items():
38161 + real_flag = pkg.iuse.get_real_flag(flag)
38162 + if real_flag is None:
38163 + # Triggered by use-dep defaults.
38164 + continue
38165 if state == "enabled" and flag not in _pkg_use_enabled(pkg):
38166 - new_change.setdefault(pkg, {})[flag] = True
38167 + new_change.setdefault(pkg, {})[real_flag] = True
38168 elif state == "disabled" and flag in _pkg_use_enabled(pkg):
38169 - new_change.setdefault(pkg, {})[flag] = False
38170 + new_change.setdefault(pkg, {})[real_flag] = False
38171 return new_change
38172
38173 def _prepare_conflict_msg_and_check_for_specificity(self):
38174 @@ -236,6 +241,7 @@ class slot_conflict_handler(object):
38175 Print all slot conflicts in a human readable way.
38176 """
38177 _pkg_use_enabled = self.depgraph._pkg_use_enabled
38178 + verboseconflicts = "--verbose-conflicts" in self.myopts
38179 msg = self.conflict_msg
38180 indent = " "
38181 msg.append("\n!!! Multiple package instances within a single " + \
38182 @@ -243,16 +249,15 @@ class slot_conflict_handler(object):
38183 msg.append("!!! into the dependency graph, resulting" + \
38184 " in a slot conflict:\n\n")
38185
38186 - for (slot_atom, root), pkgs \
38187 - in self.slot_collision_info.items():
38188 - msg.append(str(slot_atom))
38189 + for root, slot_atom, pkgs in self.all_conflicts:
38190 + msg.append("%s" % (slot_atom,))
38191 if root != self.depgraph._frozen_config._running_root.root:
38192 msg.append(" for %s" % (root,))
38193 msg.append("\n\n")
38194
38195 for pkg in pkgs:
38196 msg.append(indent)
38197 - msg.append(str(pkg))
38198 + msg.append("%s" % (pkg,))
38199 parent_atoms = self.all_parents.get(pkg)
38200 if parent_atoms:
38201 #Create a list of collision reasons and map them to sets
38202 @@ -268,12 +273,14 @@ class slot_conflict_handler(object):
38203 for ppkg, atom in parent_atoms:
38204 atom_set = InternalPackageSet(initial_atoms=(atom,))
38205 atom_without_use_set = InternalPackageSet(initial_atoms=(atom.without_use,))
38206 + atom_without_use_and_slot_set = InternalPackageSet(initial_atoms=(
38207 + atom.without_use.without_slot,))
38208
38209 for other_pkg in pkgs:
38210 if other_pkg == pkg:
38211 continue
38212
38213 - if not atom_without_use_set.findAtomForPackage(other_pkg, \
38214 + if not atom_without_use_and_slot_set.findAtomForPackage(other_pkg, \
38215 modified_use=_pkg_use_enabled(other_pkg)):
38216 if atom.operator is not None:
38217 # The version range does not match.
38218 @@ -290,9 +297,11 @@ class slot_conflict_handler(object):
38219 atoms.add((ppkg, atom, other_pkg))
38220 num_all_specific_atoms += 1
38221 collision_reasons[key] = atoms
38222 - else:
38223 - # The slot_abi does not match.
38224 - key = ("sub-slot", atom.slot_abi)
38225 +
38226 + elif not atom_without_use_set.findAtomForPackage(other_pkg, \
38227 + modified_use=_pkg_use_enabled(other_pkg)):
38228 + # The slot and/or sub_slot does not match.
38229 + key = ("slot", (atom.slot, atom.sub_slot, atom.slot_operator))
38230 atoms = collision_reasons.get(key, set())
38231 atoms.add((ppkg, atom, other_pkg))
38232 num_all_specific_atoms += 1
38233 @@ -312,11 +321,36 @@ class slot_conflict_handler(object):
38234 #Use conditionals not met.
38235 violated_atom = atom.violated_conditionals(_pkg_use_enabled(other_pkg), \
38236 other_pkg.iuse.is_valid_flag)
38237 + if violated_atom.use is None:
38238 + # Something like bug #453400 caused the
38239 + # above findAtomForPackage call to
38240 + # return None unexpectedly.
38241 + msg = ("\n\n!!! BUG: Detected "
38242 + "USE dep match inconsistency:\n"
38243 + "\tppkg: %s\n"
38244 + "\tviolated_atom: %s\n"
38245 + "\tatom: %s unevaluated: %s\n"
38246 + "\tother_pkg: %s IUSE: %s USE: %s\n" %
38247 + (ppkg,
38248 + violated_atom,
38249 + atom,
38250 + atom.unevaluated_atom,
38251 + other_pkg,
38252 + sorted(other_pkg.iuse.all),
38253 + sorted(_pkg_use_enabled(other_pkg))))
38254 + writemsg(msg, noiselevel=-2)
38255 + raise AssertionError(
38256 + 'BUG: USE dep match inconsistency')
38257 for flag in violated_atom.use.enabled.union(violated_atom.use.disabled):
38258 atoms = collision_reasons.get(("use", flag), set())
38259 atoms.add((ppkg, atom, other_pkg))
38260 collision_reasons[("use", flag)] = atoms
38261 num_all_specific_atoms += 1
38262 + elif isinstance(ppkg, AtomArg) and other_pkg.installed:
38263 + parent_atoms = collision_reasons.get(("AtomArg", None), set())
38264 + parent_atoms.add((ppkg, atom))
38265 + collision_reasons[("AtomArg", None)] = parent_atoms
38266 + num_all_specific_atoms += 1
38267
38268 msg.append(" pulled in by\n")
38269
38270 @@ -342,10 +376,16 @@ class slot_conflict_handler(object):
38271 best_matches[atom.cp] = (ppkg, atom)
38272 else:
38273 best_matches[atom.cp] = (ppkg, atom)
38274 - selected_for_display.update(best_matches.values())
38275 - elif type == "sub-slot":
38276 + if verboseconflicts:
38277 + selected_for_display.add((ppkg, atom))
38278 + if not verboseconflicts:
38279 + selected_for_display.update(
38280 + best_matches.values())
38281 + elif type == "slot":
38282 for ppkg, atom, other_pkg in parents:
38283 selected_for_display.add((ppkg, atom))
38284 + if not verboseconflicts:
38285 + break
38286 elif type == "use":
38287 #Prefer atoms with unconditional use deps over, because it's
38288 #not possible to change them on the parent, which means there
38289 @@ -387,21 +427,50 @@ class slot_conflict_handler(object):
38290 # If the list is long, people can simply
38291 # use a pager.
38292 selected_for_display.add((ppkg, atom))
38293 + elif type == "AtomArg":
38294 + for ppkg, atom in parents:
38295 + selected_for_display.add((ppkg, atom))
38296
38297 - def highlight_violations(atom, version, use=[]):
38298 + def highlight_violations(atom, version, use, slot_violated):
38299 """Colorize parts of an atom"""
38300 - atom_str = str(atom)
38301 + atom_str = "%s" % (atom,)
38302 + colored_idx = set()
38303 if version:
38304 op = atom.operator
38305 ver = None
38306 if atom.cp != atom.cpv:
38307 ver = cpv_getversion(atom.cpv)
38308 slot = atom.slot
38309 + sub_slot = atom.sub_slot
38310 + slot_operator = atom.slot_operator
38311
38312 if op == "=*":
38313 op = "="
38314 ver += "*"
38315
38316 + slot_str = ""
38317 + if slot:
38318 + slot_str = ":" + slot
38319 + if sub_slot:
38320 + slot_str += "/" + sub_slot
38321 + if slot_operator:
38322 + slot_str += slot_operator
38323 +
38324 + # Compute color_idx before adding the color codes
38325 + # as these change the indices of the letters.
38326 + if op is not None:
38327 + colored_idx.update(range(len(op)))
38328 +
38329 + if ver is not None:
38330 + start = atom_str.rfind(ver)
38331 + end = start + len(ver)
38332 + colored_idx.update(range(start, end))
38333 +
38334 + if slot_str:
38335 + ii = atom_str.find(slot_str)
38336 + colored_idx.update(range(ii, ii + len(slot_str)))
38337 +
38338 +
38339 if op is not None:
38340 atom_str = atom_str.replace(op, colorize("BAD", op), 1)
38341
38342 @@ -411,25 +480,48 @@ class slot_conflict_handler(object):
38343 atom_str = atom_str[:start] + \
38344 colorize("BAD", ver) + \
38345 atom_str[end:]
38346 +
38347 + if slot_str:
38348 + atom_str = atom_str.replace(slot_str, colorize("BAD", slot_str), 1)
38349 +
38350 + elif slot_violated:
38351 + slot = atom.slot
38352 + sub_slot = atom.sub_slot
38353 + slot_operator = atom.slot_operator
38354 +
38355 + slot_str = ""
38356 if slot:
38357 - atom_str = atom_str.replace(":" + slot, colorize("BAD", ":" + slot))
38358 + slot_str = ":" + slot
38359 + if sub_slot:
38360 + slot_str += "/" + sub_slot
38361 + if slot_operator:
38362 + slot_str += slot_operator
38363 +
38364 + if slot_str:
38365 + ii = atom_str.find(slot_str)
38366 + colored_idx.update(range(ii, ii + len(slot_str)))
38367 + atom_str = atom_str.replace(slot_str, colorize("BAD", slot_str), 1)
38368
38369 if use and atom.use.tokens:
38370 use_part_start = atom_str.find("[")
38371 use_part_end = atom_str.find("]")
38372
38373 new_tokens = []
38374 + # Compute start index in non-colored atom.
38375 + ii = str(atom).find("[") + 1
38376 for token in atom.use.tokens:
38377 if token.lstrip("-!").rstrip("=?") in use:
38378 new_tokens.append(colorize("BAD", token))
38379 + colored_idx.update(range(ii, ii + len(token)))
38380 else:
38381 new_tokens.append(token)
38382 + ii += 1 + len(token)
38383
38384 atom_str = atom_str[:use_part_start] \
38385 + "[%s]" % (",".join(new_tokens),) + \
38386 atom_str[use_part_end+1:]
38387
38388 - return atom_str
38389 + return atom_str, colored_idx
38390
38391 # Show unconditional use deps first, since those
38392 # are more problematic than the conditional kind.
38393 @@ -440,37 +532,49 @@ class slot_conflict_handler(object):
38394 ordered_list.append(parent_atom)
38395 for parent_atom in ordered_list:
38396 parent, atom = parent_atom
38397 - msg.append(2*indent)
38398 - if isinstance(parent,
38399 - (PackageArg, AtomArg)):
38400 - # For PackageArg and AtomArg types, it's
38401 + if isinstance(parent, PackageArg):
38402 + # For PackageArg it's
38403 # redundant to display the atom attribute.
38404 - msg.append(str(parent))
38405 + msg.append("%s\n" % (parent,))
38406 + elif isinstance(parent, AtomArg):
38407 + msg.append(2*indent)
38408 + msg.append("%s (Argument)\n" % (atom,))
38409 else:
38410 # Display the specific atom from SetArg or
38411 # Package types.
38412 version_violated = False
38413 - sub_slot_violated = False
38414 + slot_violated = False
38415 use = []
38416 for (type, sub_type), parents in collision_reasons.items():
38417 for x in parents:
38418 if parent == x[0] and atom == x[1]:
38419 if type == "version":
38420 version_violated = True
38421 - elif type == "sub-slot":
38422 - sub_slot_violated = True
38423 + elif type == "slot":
38424 + slot_violated = True
38425 elif type == "use":
38426 use.append(sub_type)
38427 break
38428
38429 - atom_str = highlight_violations(atom.unevaluated_atom, version_violated, use)
38430 + atom_str, colored_idx = highlight_violations(atom.unevaluated_atom,
38431 + version_violated, use, slot_violated)
38432
38433 - if version_violated or sub_slot_violated:
38434 + if version_violated or slot_violated:
38435 self.is_a_version_conflict = True
38436
38437 - msg.append("%s required by %s" % (atom_str, parent))
38438 - msg.append("\n")
38439 -
38440 + cur_line = "%s required by %s\n" % (atom_str, parent)
38441 + marker_line = ""
38442 + for ii in range(len(cur_line)):
38443 + if ii in colored_idx:
38444 + marker_line += "^"
38445 + else:
38446 + marker_line += " "
38447 + marker_line += "\n"
38448 + msg.append(2*indent)
38449 + msg.append(cur_line)
38450 + msg.append(2*indent)
38451 + msg.append(marker_line)
38452 +
38453 if not selected_for_display:
38454 msg.append(2*indent)
38455 msg.append("(no parents that aren't satisfied by other packages in this slot)\n")
38456 @@ -490,7 +594,6 @@ class slot_conflict_handler(object):
38457
38458 def get_explanation(self):
38459 msg = ""
38460 - _pkg_use_enabled = self.depgraph._pkg_use_enabled
38461
38462 if self.is_a_version_conflict:
38463 return None
38464 @@ -506,13 +609,13 @@ class slot_conflict_handler(object):
38465 return None
38466
38467 if len(solutions)==1:
38468 - if len(self.slot_collision_info)==1:
38469 + if len(self.all_conflicts) == 1:
38470 msg += "It might be possible to solve this slot collision\n"
38471 else:
38472 msg += "It might be possible to solve these slot collisions\n"
38473 msg += "by applying all of the following changes:\n"
38474 else:
38475 - if len(self.slot_collision_info)==1:
38476 + if len(self.all_conflicts) == 1:
38477 msg += "It might be possible to solve this slot collision\n"
38478 else:
38479 msg += "It might be possible to solve these slot collisions\n"
38480 @@ -553,8 +656,7 @@ class slot_conflict_handler(object):
38481 if not pkg.installed:
38482 continue
38483
38484 - for (atom, root), pkgs \
38485 - in self.slot_collision_info.items():
38486 + for root, atom, pkgs in self.all_conflicts:
38487 if pkg not in pkgs:
38488 continue
38489 for other_pkg in pkgs:
38490 @@ -563,7 +665,9 @@ class slot_conflict_handler(object):
38491 if pkg.iuse.all.symmetric_difference(other_pkg.iuse.all) \
38492 or _pkg_use_enabled(pkg).symmetric_difference(_pkg_use_enabled(other_pkg)):
38493 if self.debug:
38494 - writemsg(str(pkg) + " has pending USE changes. Rejecting configuration.\n", noiselevel=-1)
38495 + writemsg(("%s has pending USE changes. "
38496 + "Rejecting configuration.\n") % (pkg,),
38497 + noiselevel=-1)
38498 return False
38499
38500 #A list of dicts. Keeps one dict per slot conflict. [ { flag1: "enabled" }, { flag2: "disabled" } ]
38501 @@ -586,16 +690,18 @@ class slot_conflict_handler(object):
38502 if not i.findAtomForPackage(pkg, modified_use=_pkg_use_enabled(pkg)):
38503 #Version range does not match.
38504 if self.debug:
38505 - writemsg(str(pkg) + " does not satify all version requirements." + \
38506 - " Rejecting configuration.\n", noiselevel=-1)
38507 + writemsg(("%s does not satify all version "
38508 + "requirements. Rejecting configuration.\n") %
38509 + (pkg,), noiselevel=-1)
38510 return False
38511
38512 if not pkg.iuse.is_valid_flag(atom.unevaluated_atom.use.required):
38513 #Missing IUSE.
38514 #FIXME: This needs to support use dep defaults.
38515 if self.debug:
38516 - writemsg(str(pkg) + " misses needed flags from IUSE." + \
38517 - " Rejecting configuration.\n", noiselevel=-1)
38518 + writemsg(("%s misses needed flags from IUSE."
38519 + " Rejecting configuration.\n") % (pkg,),
38520 + noiselevel=-1)
38521 return False
38522
38523 if not isinstance(ppkg, Package) or ppkg.installed:
38524 @@ -620,8 +726,9 @@ class slot_conflict_handler(object):
38525 #We can't change USE of an installed package (only of an ebuild, but that is already
38526 #part of the conflict, isn't it?
38527 if self.debug:
38528 - writemsg(str(pkg) + ": installed package would need USE changes." + \
38529 - " Rejecting configuration.\n", noiselevel=-1)
38530 + writemsg(("%s: installed package would need USE"
38531 + " changes. Rejecting configuration.\n") % (pkg,),
38532 + noiselevel=-1)
38533 return False
38534
38535 #Compute the required USE changes. A flag can be forced to "enabled" or "disabled",
38536 @@ -675,7 +782,7 @@ class slot_conflict_handler(object):
38537 if self.debug:
38538 writemsg("All involved flags:\n", noiselevel=-1)
38539 for id, involved_flags in enumerate(all_involved_flags):
38540 - writemsg(" " + str(config[id]) + "\n", noiselevel=-1)
38541 + writemsg(" %s\n" % (config[id],), noiselevel=-1)
38542 for flag, state in involved_flags.items():
38543 writemsg(" " + flag + ": " + state + "\n", noiselevel=-1)
38544
38545 @@ -758,7 +865,7 @@ class slot_conflict_handler(object):
38546 inner_first = False
38547 else:
38548 msg += ", "
38549 - msg += flag + ": " + str(state)
38550 + msg += flag + ": %s" % (state,)
38551 msg += "}"
38552 msg += "]\n"
38553 writemsg(msg, noiselevel=-1)
38554 @@ -862,8 +969,9 @@ class slot_conflict_handler(object):
38555 #We managed to create a new problem with our changes.
38556 is_valid_solution = False
38557 if self.debug:
38558 - writemsg("new conflict introduced: " + str(pkg) + \
38559 - " does not match " + new_atom + " from " + str(ppkg) + "\n", noiselevel=-1)
38560 + writemsg(("new conflict introduced: %s"
38561 + " does not match %s from %s\n") %
38562 + (pkg, new_atom, ppkg), noiselevel=-1)
38563 break
38564
38565 if not is_valid_solution:
38566 @@ -871,7 +979,7 @@ class slot_conflict_handler(object):
38567
38568 #Make sure the changes don't violate REQUIRED_USE
38569 for pkg in required_changes:
38570 - required_use = pkg.metadata.get("REQUIRED_USE")
38571 + required_use = pkg._metadata.get("REQUIRED_USE")
38572 if not required_use:
38573 continue
38574
38575 @@ -950,8 +1058,16 @@ class _solution_candidate_generator(object):
38576 else:
38577 return self.value == other.value
38578 def __str__(self):
38579 - return str(self.value)
38580 -
38581 + return "%s" % (self.value,)
38582 +
38583 + if sys.hexversion < 0x3000000:
38584 +
38585 + __unicode__ = __str__
38586 +
38587 + def __str__(self):
38588 + return _unicode_encode(self.__unicode__(),
38589 + encoding=_encodings['content'], errors='backslashreplace')
38590 +
38591 def __init__(self, all_involved_flags):
38592 #A copy of all_involved_flags with all "cond" values
38593 #replaced by a _value_helper object.
38594
38595 diff --git a/pym/_emerge/search.py b/pym/_emerge/search.py
38596 index 5abc8a0..bd74fb7 100644
38597 --- a/pym/_emerge/search.py
38598 +++ b/pym/_emerge/search.py
38599 @@ -1,4 +1,4 @@
38600 -# Copyright 1999-2011 Gentoo Foundation
38601 +# Copyright 1999-2013 Gentoo Foundation
38602 # Distributed under the terms of the GNU General Public License v2
38603
38604 from __future__ import print_function
38605 @@ -69,7 +69,7 @@ class search(object):
38606 return db.aux_get(*args, **kwargs)
38607 except KeyError:
38608 pass
38609 - raise
38610 + raise KeyError(args[0])
38611
38612 def _findname(self, *args, **kwargs):
38613 for db in self._dbs:
38614
38615 diff --git a/pym/_emerge/stdout_spinner.py b/pym/_emerge/stdout_spinner.py
38616 index 5ad31f0..670686a 100644
38617 --- a/pym/_emerge/stdout_spinner.py
38618 +++ b/pym/_emerge/stdout_spinner.py
38619 @@ -1,4 +1,4 @@
38620 -# Copyright 1999-2009 Gentoo Foundation
38621 +# Copyright 1999-2013 Gentoo Foundation
38622 # Distributed under the terms of the GNU General Public License v2
38623
38624 import platform
38625 @@ -53,17 +53,18 @@ class stdout_spinner(object):
38626 def update_basic(self):
38627 self.spinpos = (self.spinpos + 1) % 500
38628 if self._return_early():
38629 - return
38630 + return True
38631 if (self.spinpos % 100) == 0:
38632 if self.spinpos == 0:
38633 sys.stdout.write(". ")
38634 else:
38635 sys.stdout.write(".")
38636 sys.stdout.flush()
38637 + return True
38638
38639 def update_scroll(self):
38640 if self._return_early():
38641 - return
38642 + return True
38643 if(self.spinpos >= len(self.scroll_sequence)):
38644 sys.stdout.write(darkgreen(" \b\b\b" + self.scroll_sequence[
38645 len(self.scroll_sequence) - 1 - (self.spinpos % len(self.scroll_sequence))]))
38646 @@ -71,13 +72,15 @@ class stdout_spinner(object):
38647 sys.stdout.write(green("\b " + self.scroll_sequence[self.spinpos]))
38648 sys.stdout.flush()
38649 self.spinpos = (self.spinpos + 1) % (2 * len(self.scroll_sequence))
38650 + return True
38651
38652 def update_twirl(self):
38653 self.spinpos = (self.spinpos + 1) % len(self.twirl_sequence)
38654 if self._return_early():
38655 - return
38656 + return True
38657 sys.stdout.write("\b\b " + self.twirl_sequence[self.spinpos])
38658 sys.stdout.flush()
38659 + return True
38660
38661 def update_quiet(self):
38662 - return
38663 + return True
38664
38665 diff --git a/pym/_emerge/unmerge.py b/pym/_emerge/unmerge.py
38666 index b46b89c..b04f8f3 100644
38667 --- a/pym/_emerge/unmerge.py
38668 +++ b/pym/_emerge/unmerge.py
38669 @@ -1,4 +1,4 @@
38670 -# Copyright 1999-2012 Gentoo Foundation
38671 +# Copyright 1999-2014 Gentoo Foundation
38672 # Distributed under the terms of the GNU General Public License v2
38673
38674 from __future__ import print_function
38675 @@ -457,9 +457,6 @@ def _unmerge_display(root_config, myopts, unmerge_action,
38676 writemsg_level(colorize("WARN","!!! Unmerging it may " + \
38677 "be damaging to your system.\n\n"),
38678 level=logging.WARNING, noiselevel=-1)
38679 - if clean_delay and "--pretend" not in myopts and "--ask" not in myopts:
38680 - countdown(int(settings["EMERGE_WARNING_DELAY"]),
38681 - colorize("UNMERGE_WARN", "Press Ctrl-C to Stop"))
38682 if not quiet:
38683 writemsg_level("\n %s\n" % (bold(cp),), noiselevel=-1)
38684 else:
38685
38686 diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
38687 index 46bdc96..fdbc4a8 100644
38688 --- a/pym/portage/__init__.py
38689 +++ b/pym/portage/__init__.py
38690 @@ -1,8 +1,9 @@
38691 -# portage.py -- core Portage functionality
38692 -# Copyright 1998-2011 Gentoo Foundation
38693 +# Copyright 1998-2014 Gentoo Foundation
38694 # Distributed under the terms of the GNU General Public License v2
38695
38696 -VERSION="HEAD"
38697 +from __future__ import unicode_literals
38698 +
38699 +VERSION = "HEAD"
38700
38701 # ===========================================================================
38702 # START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT
38703 @@ -16,14 +17,6 @@ try:
38704 errno.ESTALE = -1
38705 import re
38706 import types
38707 -
38708 - # Try the commands module first, since this allows us to eliminate
38709 - # the subprocess module from the baseline imports under python2.
38710 - try:
38711 - from commands import getstatusoutput as subprocess_getstatusoutput
38712 - except ImportError:
38713 - from subprocess import getstatusoutput as subprocess_getstatusoutput
38714 -
38715 import platform
38716
38717 # Temporarily delete these imports, to ensure that only the
38718 @@ -41,7 +34,7 @@ except ImportError as e:
38719
38720 sys.stderr.write("!!! You might consider starting python with verbose flags to see what has\n")
38721 sys.stderr.write("!!! gone wrong. Here is the information we got for this exception:\n")
38722 - sys.stderr.write(" "+str(e)+"\n\n");
38723 + sys.stderr.write(" "+str(e)+"\n\n")
38724 raise
38725
38726 try:
38727 @@ -70,6 +63,7 @@ try:
38728 'match_from_list,match_to_list',
38729 'portage.dep.dep_check:dep_check,dep_eval,dep_wordreduce,dep_zapdeps',
38730 'portage.eclass_cache',
38731 + 'portage.elog',
38732 'portage.exception',
38733 'portage.getbinpkg',
38734 'portage.locks',
38735 @@ -114,6 +108,7 @@ try:
38736 'cpv_getkey@getCPFromCPV,endversion_keys,' + \
38737 'suffix_value@endversion,pkgcmp,pkgsplit,vercmp,ververify',
38738 'portage.xpak',
38739 + 'subprocess',
38740 'time',
38741 )
38742
38743 @@ -145,6 +140,7 @@ except ImportError as e:
38744 raise
38745
38746 if sys.hexversion >= 0x3000000:
38747 + # pylint: disable=W0622
38748 basestring = str
38749 long = int
38750
38751 @@ -178,6 +174,15 @@ _encodings = {
38752 }
38753
38754 if sys.hexversion >= 0x3000000:
38755 +
38756 + def _decode_argv(argv):
38757 + # With Python 3, the surrogateescape encoding error handler makes it
38758 + # possible to access the original argv bytes, which can be useful
38759 + # if their actual encoding does no match the filesystem encoding.
38760 + fs_encoding = sys.getfilesystemencoding()
38761 + return [_unicode_decode(x.encode(fs_encoding, 'surrogateescape'))
38762 + for x in argv]
38763 +
38764 def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
38765 if isinstance(s, str):
38766 s = s.encode(encoding, errors)
38767 @@ -187,7 +192,13 @@ if sys.hexversion >= 0x3000000:
38768 if isinstance(s, bytes):
38769 s = str(s, encoding=encoding, errors=errors)
38770 return s
38771 +
38772 + _native_string = _unicode_decode
38773 else:
38774 +
38775 + def _decode_argv(argv):
38776 + return [_unicode_decode(x) for x in argv]
38777 +
38778 def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'):
38779 if isinstance(s, unicode):
38780 s = s.encode(encoding, errors)
38781 @@ -198,6 +209,17 @@ else:
38782 s = unicode(s, encoding=encoding, errors=errors)
38783 return s
38784
38785 + _native_string = _unicode_encode
38786 +
38787 +if sys.hexversion >= 0x20605f0:
38788 + def _native_kwargs(kwargs):
38789 + return kwargs
38790 +else:
38791 + # Avoid "TypeError: keywords must be strings" issue triggered
38792 + # by unicode_literals: http://bugs.python.org/issue4978
38793 + def _native_kwargs(kwargs):
38794 + return dict((_native_string(k), v) for k, v in kwargs.iteritems())
38795 +
38796 class _unicode_func_wrapper(object):
38797 """
38798 Wraps a function, converts arguments from unicode to bytes,
38799 @@ -215,7 +237,7 @@ class _unicode_func_wrapper(object):
38800 self._func = func
38801 self._encoding = encoding
38802
38803 - def __call__(self, *args, **kwargs):
38804 + def _process_args(self, args, kwargs):
38805
38806 encoding = self._encoding
38807 wrapped_args = [_unicode_encode(x, encoding=encoding, errors='strict')
38808 @@ -227,6 +249,13 @@ class _unicode_func_wrapper(object):
38809 else:
38810 wrapped_kwargs = {}
38811
38812 + return (wrapped_args, wrapped_kwargs)
38813 +
38814 + def __call__(self, *args, **kwargs):
38815 +
38816 + encoding = self._encoding
38817 + wrapped_args, wrapped_kwargs = self._process_args(args, kwargs)
38818 +
38819 rval = self._func(*wrapped_args, **wrapped_kwargs)
38820
38821 # Don't use isinstance() since we don't want to convert subclasses
38822 @@ -294,12 +323,17 @@ class _unicode_module_wrapper(object):
38823 import os as _os
38824 _os_overrides = {
38825 id(_os.fdopen) : _os.fdopen,
38826 - id(_os.mkfifo) : _os.mkfifo,
38827 id(_os.popen) : _os.popen,
38828 id(_os.read) : _os.read,
38829 id(_os.system) : _os.system,
38830 }
38831
38832 +
38833 +try:
38834 + _os_overrides[id(_os.mkfifo)] = _os.mkfifo
38835 +except AttributeError:
38836 + pass # Jython
38837 +
38838 if hasattr(_os, 'statvfs'):
38839 _os_overrides[id(_os.statvfs)] = _os.statvfs
38840
38841 @@ -334,6 +368,25 @@ except (ImportError, OSError) as e:
38842 _python_interpreter = os.path.realpath(sys.executable)
38843 _bin_path = PORTAGE_BIN_PATH
38844 _pym_path = PORTAGE_PYM_PATH
38845 +_not_installed = os.path.isfile(os.path.join(PORTAGE_BASE_PATH, ".portage_not_installed"))
38846 +
38847 +# Api consumers included in portage should set this to True.
38848 +_internal_caller = False
38849 +
38850 +_sync_mode = False
38851 +
38852 +def _get_stdin():
38853 + """
38854 + Buggy code in python's multiprocessing/process.py closes sys.stdin
38855 + and reassigns it to open(os.devnull), but fails to update the
38856 + corresponding __stdin__ reference. So, detect that case and handle
38857 + it appropriately.
38858 + """
38859 + if not sys.__stdin__.closed:
38860 + return sys.__stdin__
38861 + return sys.stdin
38862 +
38863 +_shell_quote_re = re.compile(r"[\s><=*\\\"'$`]")
38864
38865 def _shell_quote(s):
38866 """
38867 @@ -341,6 +394,8 @@ def _shell_quote(s):
38868 escape any backslashes, double-quotes, dollar signs, or
38869 backquotes in the string.
38870 """
38871 + if _shell_quote_re.search(s) is None:
38872 + return s
38873 for letter in "\\\"$`":
38874 if letter in s:
38875 s = s.replace(letter, "\\" + letter)
38876 @@ -354,8 +409,27 @@ if platform.system() in ('FreeBSD',):
38877
38878 @classmethod
38879 def chflags(cls, path, flags, opts=""):
38880 - cmd = 'chflags %s %o %s' % (opts, flags, _shell_quote(path))
38881 - status, output = subprocess_getstatusoutput(cmd)
38882 + cmd = ['chflags']
38883 + if opts:
38884 + cmd.append(opts)
38885 + cmd.append('%o' % (flags,))
38886 + cmd.append(path)
38887 +
38888 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000:
38889 + # Python 3.1 _execvp throws TypeError for non-absolute executable
38890 + # path passed as bytes (see http://bugs.python.org/issue8513).
38891 + fullname = process.find_binary(cmd[0])
38892 + if fullname is None:
38893 + raise exception.CommandNotFound(cmd[0])
38894 + cmd[0] = fullname
38895 +
38896 + encoding = _encodings['fs']
38897 + cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
38898 + for x in cmd]
38899 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
38900 + stderr=subprocess.STDOUT)
38901 + output = proc.communicate()[0]
38902 + status = proc.wait()
38903 if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
38904 return
38905 # Try to generate an ENOENT error if appropriate.
38906 @@ -368,6 +442,7 @@ if platform.system() in ('FreeBSD',):
38907 raise portage.exception.CommandNotFound('chflags')
38908 # Now we're not sure exactly why it failed or what
38909 # the real errno was, so just report EPERM.
38910 + output = _unicode_decode(output, encoding=encoding)
38911 e = OSError(errno.EPERM, output)
38912 e.errno = errno.EPERM
38913 e.filename = path
38914 @@ -396,20 +471,29 @@ def getcwd():
38915 getcwd()
38916
38917 def abssymlink(symlink, target=None):
38918 - "This reads symlinks, resolving the relative symlinks, and returning the absolute."
38919 + """
38920 + This reads symlinks, resolving the relative symlinks,
38921 + and returning the absolute.
38922 + @param symlink: path of symlink (must be absolute)
38923 + @param target: the target of the symlink (as returned
38924 + by readlink)
38925 + @rtype: str
38926 + @return: the absolute path of the symlink target
38927 + """
38928 if target is not None:
38929 mylink = target
38930 else:
38931 mylink = os.readlink(symlink)
38932 if mylink[0] != '/':
38933 - mydir=os.path.dirname(symlink)
38934 - mylink=mydir+"/"+mylink
38935 + mydir = os.path.dirname(symlink)
38936 + mylink = mydir + "/" + mylink
38937 return os.path.normpath(mylink)
38938
38939 _doebuild_manifest_exempt_depend = 0
38940
38941 -_testing_eapis = frozenset(["4-python", "4-slot-abi"])
38942 -_deprecated_eapis = frozenset(["4_pre1", "3_pre2", "3_pre1"])
38943 +_testing_eapis = frozenset(["4-python", "4-slot-abi", "5-progress", "5-hdepend"])
38944 +_deprecated_eapis = frozenset(["4_pre1", "3_pre2", "3_pre1", "5_pre1", "5_pre2"])
38945 +_supported_eapis = frozenset([str(x) for x in range(portage.const.EAPI)] + list(_testing_eapis) + list(_deprecated_eapis))
38946
38947 def _eapi_is_deprecated(eapi):
38948 return eapi in _deprecated_eapis
38949 @@ -466,13 +550,13 @@ auxdbkeys = (
38950 'RESTRICT', 'HOMEPAGE', 'LICENSE', 'DESCRIPTION',
38951 'KEYWORDS', 'INHERITED', 'IUSE', 'REQUIRED_USE',
38952 'PDEPEND', 'PROVIDE', 'EAPI',
38953 - 'PROPERTIES', 'DEFINED_PHASES', 'UNUSED_05', 'UNUSED_04',
38954 + 'PROPERTIES', 'DEFINED_PHASES', 'HDEPEND', 'UNUSED_04',
38955 'UNUSED_03', 'UNUSED_02', 'UNUSED_01',
38956 )
38957 -auxdbkeylen=len(auxdbkeys)
38958 +auxdbkeylen = len(auxdbkeys)
38959
38960 def portageexit():
38961 - close_portdbapi_caches()
38962 + pass
38963
38964 class _trees_dict(dict):
38965 __slots__ = ('_running_eroot', '_target_eroot',)
38966 @@ -483,13 +567,6 @@ class _trees_dict(dict):
38967
38968 def create_trees(config_root=None, target_root=None, trees=None, env=None,
38969 eprefix=None):
38970 - if trees is not None:
38971 - # clean up any existing portdbapi instances
38972 - for myroot in trees:
38973 - portdb = trees[myroot]["porttree"].dbapi
38974 - portdb.close_caches()
38975 - portdbapi.portdbapi_instances.remove(portdb)
38976 - del trees[myroot]["porttree"], myroot, portdb
38977
38978 if trees is None:
38979 trees = _trees_dict()
38980 @@ -507,7 +584,7 @@ def create_trees(config_root=None, target_root=None, trees=None, env=None,
38981
38982 trees._target_eroot = settings['EROOT']
38983 myroots = [(settings['EROOT'], settings)]
38984 - if settings["ROOT"] == "/":
38985 + if settings["ROOT"] == "/" and settings["EPREFIX"] == const.EPREFIX:
38986 trees._running_eroot = trees._target_eroot
38987 else:
38988
38989 @@ -515,15 +592,15 @@ def create_trees(config_root=None, target_root=None, trees=None, env=None,
38990 # environment to apply to the config that's associated
38991 # with ROOT != "/", so pass a nearly empty dict for the env parameter.
38992 clean_env = {}
38993 - for k in ('PATH', 'PORTAGE_GRPNAME', 'PORTAGE_USERNAME',
38994 - 'SSH_AGENT_PID', 'SSH_AUTH_SOCK', 'TERM',
38995 + for k in ('PATH', 'PORTAGE_GRPNAME', 'PORTAGE_REPOSITORIES', 'PORTAGE_USERNAME',
38996 + 'PYTHONPATH', 'SSH_AGENT_PID', 'SSH_AUTH_SOCK', 'TERM',
38997 'ftp_proxy', 'http_proxy', 'no_proxy',
38998 '__PORTAGE_TEST_HARDLINK_LOCKS'):
38999 v = settings.get(k)
39000 if v is not None:
39001 clean_env[k] = v
39002 settings = config(config_root=None, target_root="/",
39003 - env=clean_env, eprefix=eprefix)
39004 + env=clean_env, eprefix=None)
39005 settings.lock()
39006 trees._running_eroot = settings['EROOT']
39007 myroots.append((settings['EROOT'], settings))
39008 @@ -547,11 +624,17 @@ if VERSION == 'HEAD':
39009 if VERSION is not self:
39010 return VERSION
39011 if os.path.isdir(os.path.join(PORTAGE_BASE_PATH, '.git')):
39012 - status, output = subprocess_getstatusoutput((
39013 - "cd %s ; git describe --tags || exit $? ; " + \
39014 + encoding = _encodings['fs']
39015 + cmd = [BASH_BINARY, "-c", ("cd %s ; git describe --tags || exit $? ; " + \
39016 "if [ -n \"`git diff-index --name-only --diff-filter=M HEAD`\" ] ; " + \
39017 "then echo modified ; git rev-list --format=%%ct -n 1 HEAD ; fi ; " + \
39018 - "exit 0") % _shell_quote(PORTAGE_BASE_PATH))
39019 + "exit 0") % _shell_quote(PORTAGE_BASE_PATH)]
39020 + cmd = [_unicode_encode(x, encoding=encoding, errors='strict')
39021 + for x in cmd]
39022 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
39023 + stderr=subprocess.STDOUT)
39024 + output = _unicode_decode(proc.communicate()[0], encoding=encoding)
39025 + status = proc.wait()
39026 if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
39027 output_lines = output.splitlines()
39028 if output_lines:
39029 @@ -561,7 +644,7 @@ if VERSION == 'HEAD':
39030 patchlevel = False
39031 if len(version_split) > 1:
39032 patchlevel = True
39033 - VERSION = "%s_p%s" %(VERSION, version_split[1])
39034 + VERSION = "%s_p%s" % (VERSION, version_split[1])
39035 if len(output_lines) > 1 and output_lines[1] == 'modified':
39036 head_timestamp = None
39037 if len(output_lines) > 3:
39038 @@ -580,34 +663,17 @@ if VERSION == 'HEAD':
39039 return VERSION
39040 VERSION = _LazyVersion()
39041
39042 -if "_legacy_globals_constructed" in globals():
39043 - # The module has been reloaded, so perform any relevant cleanup
39044 - # and prevent memory leaks.
39045 - if "db" in _legacy_globals_constructed:
39046 - try:
39047 - db
39048 - except NameError:
39049 - pass
39050 - else:
39051 - if isinstance(db, dict) and db:
39052 - for _x in db.values():
39053 - try:
39054 - if "porttree" in _x.lazy_items:
39055 - continue
39056 - except (AttributeError, TypeError):
39057 - continue
39058 - try:
39059 - _x = _x["porttree"].dbapi
39060 - except (AttributeError, KeyError):
39061 - continue
39062 - if not isinstance(_x, portdbapi):
39063 - continue
39064 - _x.close_caches()
39065 - try:
39066 - portdbapi.portdbapi_instances.remove(_x)
39067 - except ValueError:
39068 - pass
39069 - del _x
39070 +_legacy_global_var_names = ("archlist", "db", "features",
39071 + "groups", "mtimedb", "mtimedbfile", "pkglines",
39072 + "portdb", "profiledir", "root", "selinux_enabled",
39073 + "settings", "thirdpartymirrors")
39074 +
39075 +def _reset_legacy_globals():
39076 +
39077 + global _legacy_globals_constructed
39078 + _legacy_globals_constructed = set()
39079 + for k in _legacy_global_var_names:
39080 + globals()[k] = _LegacyGlobalProxy(k)
39081
39082 class _LegacyGlobalProxy(proxy.objectproxy.ObjectProxy):
39083
39084 @@ -622,16 +688,7 @@ class _LegacyGlobalProxy(proxy.objectproxy.ObjectProxy):
39085 from portage._legacy_globals import _get_legacy_global
39086 return _get_legacy_global(name)
39087
39088 -_legacy_global_var_names = ("archlist", "db", "features",
39089 - "groups", "mtimedb", "mtimedbfile", "pkglines",
39090 - "portdb", "profiledir", "root", "selinux_enabled",
39091 - "settings", "thirdpartymirrors")
39092 -
39093 -for k in _legacy_global_var_names:
39094 - globals()[k] = _LegacyGlobalProxy(k)
39095 -del k
39096 -
39097 -_legacy_globals_constructed = set()
39098 +_reset_legacy_globals()
39099
39100 def _disable_legacy_globals():
39101 """
39102
39103 diff --git a/pym/portage/_emirrordist/Config.py b/pym/portage/_emirrordist/Config.py
39104 new file mode 100644
39105 index 0000000..db4bfeb
39106 --- /dev/null
39107 +++ b/pym/portage/_emirrordist/Config.py
39108 @@ -0,0 +1,132 @@
39109 +# Copyright 2013 Gentoo Foundation
39110 +# Distributed under the terms of the GNU General Public License v2
39111 +
39112 +import copy
39113 +import io
39114 +import logging
39115 +import shelve
39116 +import sys
39117 +import time
39118 +
39119 +import portage
39120 +from portage import os
39121 +from portage.util import grabdict, grablines
39122 +from portage.util._ShelveUnicodeWrapper import ShelveUnicodeWrapper
39123 +
39124 +class Config(object):
39125 + def __init__(self, options, portdb, event_loop):
39126 + self.options = options
39127 + self.portdb = portdb
39128 + self.event_loop = event_loop
39129 + self.added_byte_count = 0
39130 + self.added_file_count = 0
39131 + self.scheduled_deletion_count = 0
39132 + self.delete_count = 0
39133 + self.file_owners = {}
39134 + self.file_failures = {}
39135 + self.start_time = time.time()
39136 + self._open_files = []
39137 +
39138 + self.log_success = self._open_log('success', options.success_log, 'a')
39139 + self.log_failure = self._open_log('failure', options.failure_log, 'a')
39140 +
39141 + self.distfiles = None
39142 + if options.distfiles is not None:
39143 + self.distfiles = options.distfiles
39144 +
39145 + self.mirrors = copy.copy(portdb.settings.thirdpartymirrors())
39146 +
39147 + if options.mirror_overrides is not None:
39148 + self.mirrors.update(grabdict(options.mirror_overrides))
39149 +
39150 + if options.mirror_skip is not None:
39151 + for x in options.mirror_skip.split(","):
39152 + self.mirrors[x] = []
39153 +
39154 + self.whitelist = None
39155 + if options.whitelist_from is not None:
39156 + self.whitelist = set()
39157 + for filename in options.whitelist_from:
39158 + for line in grablines(filename):
39159 + line = line.strip()
39160 + if line and not line.startswith("#"):
39161 + self.whitelist.add(line)
39162 +
39163 + self.restrict_mirror_exemptions = None
39164 + if options.restrict_mirror_exemptions is not None:
39165 + self.restrict_mirror_exemptions = frozenset(
39166 + options.restrict_mirror_exemptions.split(","))
39167 +
39168 + self.recycle_db = None
39169 + if options.recycle_db is not None:
39170 + self.recycle_db = self._open_shelve(
39171 + options.recycle_db, 'recycle')
39172 +
39173 + self.distfiles_db = None
39174 + if options.distfiles_db is not None:
39175 + self.distfiles_db = self._open_shelve(
39176 + options.distfiles_db, 'distfiles')
39177 +
39178 + self.deletion_db = None
39179 + if options.deletion_db is not None:
39180 + self.deletion_db = self._open_shelve(
39181 + options.deletion_db, 'deletion')
39182 +
39183 + def _open_log(self, log_desc, log_path, mode):
39184 +
39185 + if log_path is None or self.options.dry_run:
39186 + log_func = logging.info
39187 + line_format = "%s: %%s" % log_desc
39188 + add_newline = False
39189 + if log_path is not None:
39190 + logging.warn(("dry-run: %s log "
39191 + "redirected to logging.info") % log_desc)
39192 + else:
39193 + self._open_files.append(io.open(log_path, mode=mode,
39194 + encoding='utf_8'))
39195 + line_format = "%s\n"
39196 + log_func = self._open_files[-1].write
39197 +
39198 + return self._LogFormatter(line_format, log_func)
39199 +
39200 + class _LogFormatter(object):
39201 +
39202 + __slots__ = ('_line_format', '_log_func')
39203 +
39204 + def __init__(self, line_format, log_func):
39205 + self._line_format = line_format
39206 + self._log_func = log_func
39207 +
39208 + def __call__(self, msg):
39209 + self._log_func(self._line_format % (msg,))
39210 +
39211 + def _open_shelve(self, db_file, db_desc):
39212 + if self.options.dry_run:
39213 + open_flag = "r"
39214 + else:
39215 + open_flag = "c"
39216 +
39217 + if self.options.dry_run and not os.path.exists(db_file):
39218 + db = {}
39219 + else:
39220 + db = shelve.open(db_file, flag=open_flag)
39221 + if sys.hexversion < 0x3000000:
39222 + db = ShelveUnicodeWrapper(db)
39223 +
39224 + if self.options.dry_run:
39225 + logging.warn("dry-run: %s db opened in readonly mode" % db_desc)
39226 + if not isinstance(db, dict):
39227 + volatile_db = dict((k, db[k]) for k in db)
39228 + db.close()
39229 + db = volatile_db
39230 + else:
39231 + self._open_files.append(db)
39232 +
39233 + return db
39234 +
39235 + def __enter__(self):
39236 + return self
39237 +
39238 + def __exit__(self, exc_type, exc_value, traceback):
39239 + while self._open_files:
39240 + self._open_files.pop().close()
39241
39242 diff --git a/pym/portage/_emirrordist/DeletionIterator.py b/pym/portage/_emirrordist/DeletionIterator.py
39243 new file mode 100644
39244 index 0000000..dff52c0
39245 --- /dev/null
39246 +++ b/pym/portage/_emirrordist/DeletionIterator.py
39247 @@ -0,0 +1,83 @@
39248 +# Copyright 2013 Gentoo Foundation
39249 +# Distributed under the terms of the GNU General Public License v2
39250 +
39251 +import logging
39252 +import stat
39253 +
39254 +from portage import os
39255 +from .DeletionTask import DeletionTask
39256 +
39257 +class DeletionIterator(object):
39258 +
39259 + def __init__(self, config):
39260 + self._config = config
39261 +
39262 + def __iter__(self):
39263 + distdir = self._config.options.distfiles
39264 + file_owners = self._config.file_owners
39265 + whitelist = self._config.whitelist
39266 + distfiles_local = self._config.options.distfiles_local
39267 + deletion_db = self._config.deletion_db
39268 + deletion_delay = self._config.options.deletion_delay
39269 + start_time = self._config.start_time
39270 + distfiles_set = set(os.listdir(self._config.options.distfiles))
39271 + for filename in distfiles_set:
39272 + try:
39273 + st = os.stat(os.path.join(distdir, filename))
39274 + except OSError as e:
39275 + logging.error("stat failed on '%s' in distfiles: %s\n" %
39276 + (filename, e))
39277 + continue
39278 + if not stat.S_ISREG(st.st_mode):
39279 + continue
39280 + elif filename in file_owners:
39281 + if deletion_db is not None:
39282 + try:
39283 + del deletion_db[filename]
39284 + except KeyError:
39285 + pass
39286 + elif whitelist is not None and filename in whitelist:
39287 + if deletion_db is not None:
39288 + try:
39289 + del deletion_db[filename]
39290 + except KeyError:
39291 + pass
39292 + elif distfiles_local is not None and \
39293 + os.path.exists(os.path.join(distfiles_local, filename)):
39294 + if deletion_db is not None:
39295 + try:
39296 + del deletion_db[filename]
39297 + except KeyError:
39298 + pass
39299 + else:
39300 + self._config.scheduled_deletion_count += 1
39301 +
39302 + if deletion_db is None or deletion_delay is None:
39303 +
39304 + yield DeletionTask(background=True,
39305 + distfile=filename,
39306 + config=self._config)
39307 +
39308 + else:
39309 + deletion_entry = deletion_db.get(filename)
39310 +
39311 + if deletion_entry is None:
39312 + logging.debug("add '%s' to deletion db" % filename)
39313 + deletion_db[filename] = start_time
39314 +
39315 + elif deletion_entry + deletion_delay <= start_time:
39316 +
39317 + yield DeletionTask(background=True,
39318 + distfile=filename,
39319 + config=self._config)
39320 +
39321 + if deletion_db is not None:
39322 + for filename in list(deletion_db):
39323 + if filename not in distfiles_set:
39324 + try:
39325 + del deletion_db[filename]
39326 + except KeyError:
39327 + pass
39328 + else:
39329 + logging.debug("drop '%s' from deletion db" %
39330 + filename)
39331
39332 diff --git a/pym/portage/_emirrordist/DeletionTask.py b/pym/portage/_emirrordist/DeletionTask.py
39333 new file mode 100644
39334 index 0000000..7d10957
39335 --- /dev/null
39336 +++ b/pym/portage/_emirrordist/DeletionTask.py
39337 @@ -0,0 +1,129 @@
39338 +# Copyright 2013 Gentoo Foundation
39339 +# Distributed under the terms of the GNU General Public License v2
39340 +
39341 +import errno
39342 +import logging
39343 +
39344 +from portage import os
39345 +from portage.util._async.FileCopier import FileCopier
39346 +from _emerge.CompositeTask import CompositeTask
39347 +
39348 +class DeletionTask(CompositeTask):
39349 +
39350 + __slots__ = ('distfile', 'config')
39351 +
39352 + def _start(self):
39353 +
39354 + distfile_path = os.path.join(
39355 + self.config.options.distfiles, self.distfile)
39356 +
39357 + if self.config.options.recycle_dir is not None:
39358 + distfile_path = os.path.join(self.config.options.distfiles, self.distfile)
39359 + recycle_path = os.path.join(
39360 + self.config.options.recycle_dir, self.distfile)
39361 + if self.config.options.dry_run:
39362 + logging.info(("dry-run: move '%s' from "
39363 + "distfiles to recycle") % self.distfile)
39364 + else:
39365 + logging.debug(("move '%s' from "
39366 + "distfiles to recycle") % self.distfile)
39367 + try:
39368 + os.rename(distfile_path, recycle_path)
39369 + except OSError as e:
39370 + if e.errno != errno.EXDEV:
39371 + logging.error(("rename %s from distfiles to "
39372 + "recycle failed: %s") % (self.distfile, e))
39373 + else:
39374 + self.returncode = os.EX_OK
39375 + self._async_wait()
39376 + return
39377 +
39378 + self._start_task(
39379 + FileCopier(src_path=distfile_path,
39380 + dest_path=recycle_path,
39381 + background=False),
39382 + self._recycle_copier_exit)
39383 + return
39384 +
39385 + success = True
39386 +
39387 + if self.config.options.dry_run:
39388 + logging.info(("dry-run: delete '%s' from "
39389 + "distfiles") % self.distfile)
39390 + else:
39391 + logging.debug(("delete '%s' from "
39392 + "distfiles") % self.distfile)
39393 + try:
39394 + os.unlink(distfile_path)
39395 + except OSError as e:
39396 + if e.errno not in (errno.ENOENT, errno.ESTALE):
39397 + logging.error("%s unlink failed in distfiles: %s" %
39398 + (self.distfile, e))
39399 + success = False
39400 +
39401 + if success:
39402 + self._success()
39403 + self.returncode = os.EX_OK
39404 + else:
39405 + self.returncode = 1
39406 +
39407 + self._async_wait()
39408 +
39409 + def _recycle_copier_exit(self, copier):
39410 +
39411 + self._assert_current(copier)
39412 + if self._was_cancelled():
39413 + self.wait()
39414 + return
39415 +
39416 + success = True
39417 + if copier.returncode == os.EX_OK:
39418 +
39419 + try:
39420 + os.unlink(copier.src_path)
39421 + except OSError as e:
39422 + if e.errno not in (errno.ENOENT, errno.ESTALE):
39423 + logging.error("%s unlink failed in distfiles: %s" %
39424 + (self.distfile, e))
39425 + success = False
39426 +
39427 + else:
39428 + logging.error(("%s copy from distfiles "
39429 + "to recycle failed: %s") % (self.distfile, e))
39430 + success = False
39431 +
39432 + if success:
39433 + self._success()
39434 + self.returncode = os.EX_OK
39435 + else:
39436 + self.returncode = 1
39437 +
39438 + self._current_task = None
39439 + self.wait()
39440 +
39441 + def _success(self):
39442 +
39443 + cpv = "unknown"
39444 + if self.config.distfiles_db is not None:
39445 + cpv = self.config.distfiles_db.get(self.distfile, cpv)
39446 +
39447 + self.config.delete_count += 1
39448 + self.config.log_success("%s\t%s\tremoved" % (cpv, self.distfile))
39449 +
39450 + if self.config.distfiles_db is not None:
39451 + try:
39452 + del self.config.distfiles_db[self.distfile]
39453 + except KeyError:
39454 + pass
39455 + else:
39456 + logging.debug(("drop '%s' from "
39457 + "distfiles db") % self.distfile)
39458 +
39459 + if self.config.deletion_db is not None:
39460 + try:
39461 + del self.config.deletion_db[self.distfile]
39462 + except KeyError:
39463 + pass
39464 + else:
39465 + logging.debug(("drop '%s' from "
39466 + "deletion db") % self.distfile)
39467
39468 diff --git a/pym/portage/_emirrordist/FetchIterator.py b/pym/portage/_emirrordist/FetchIterator.py
39469 new file mode 100644
39470 index 0000000..16a0b04
39471 --- /dev/null
39472 +++ b/pym/portage/_emirrordist/FetchIterator.py
39473 @@ -0,0 +1,147 @@
39474 +# Copyright 2013 Gentoo Foundation
39475 +# Distributed under the terms of the GNU General Public License v2
39476 +
39477 +from portage import os
39478 +from portage.checksum import (_apply_hash_filter,
39479 + _filter_unaccelarated_hashes, _hash_filter)
39480 +from portage.dep import use_reduce
39481 +from portage.exception import PortageException
39482 +from .FetchTask import FetchTask
39483 +
39484 +class FetchIterator(object):
39485 +
39486 + def __init__(self, config):
39487 + self._config = config
39488 + self._log_failure = config.log_failure
39489 +
39490 + def _iter_every_cp(self):
39491 + # List categories individually, in order to start yielding quicker,
39492 + # and in order to reduce latency in case of a signal interrupt.
39493 + cp_all = self._config.portdb.cp_all
39494 + for category in sorted(self._config.portdb.categories):
39495 + for cp in cp_all(categories=(category,)):
39496 + yield cp
39497 +
39498 + def __iter__(self):
39499 +
39500 + portdb = self._config.portdb
39501 + get_repo_for_location = portdb.repositories.get_repo_for_location
39502 + file_owners = self._config.file_owners
39503 + file_failures = self._config.file_failures
39504 + restrict_mirror_exemptions = self._config.restrict_mirror_exemptions
39505 +
39506 + hash_filter = _hash_filter(
39507 + portdb.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
39508 + if hash_filter.transparent:
39509 + hash_filter = None
39510 +
39511 + for cp in self._iter_every_cp():
39512 +
39513 + for tree in portdb.porttrees:
39514 +
39515 + # Reset state so the Manifest is pulled once
39516 + # for this cp / tree combination.
39517 + digests = None
39518 + repo_config = get_repo_for_location(tree)
39519 +
39520 + for cpv in portdb.cp_list(cp, mytree=tree):
39521 +
39522 + try:
39523 + restrict, = portdb.aux_get(cpv, ("RESTRICT",),
39524 + mytree=tree)
39525 + except (KeyError, PortageException) as e:
39526 + self._log_failure("%s\t\taux_get exception %s" %
39527 + (cpv, e))
39528 + continue
39529 +
39530 + # Here we use matchnone=True to ignore conditional parts
39531 + # of RESTRICT since they don't apply unconditionally.
39532 + # Assume such conditionals only apply on the client side.
39533 + try:
39534 + restrict = frozenset(use_reduce(restrict,
39535 + flat=True, matchnone=True))
39536 + except PortageException as e:
39537 + self._log_failure("%s\t\tuse_reduce exception %s" %
39538 + (cpv, e))
39539 + continue
39540 +
39541 + if "fetch" in restrict:
39542 + continue
39543 +
39544 + try:
39545 + uri_map = portdb.getFetchMap(cpv)
39546 + except PortageException as e:
39547 + self._log_failure("%s\t\tgetFetchMap exception %s" %
39548 + (cpv, e))
39549 + continue
39550 +
39551 + if not uri_map:
39552 + continue
39553 +
39554 + if "mirror" in restrict:
39555 + skip = False
39556 + if restrict_mirror_exemptions is not None:
39557 + new_uri_map = {}
39558 + for filename, uri_tuple in uri_map.items():
39559 + for uri in uri_tuple:
39560 + if uri[:9] == "mirror://":
39561 + i = uri.find("/", 9)
39562 + if i != -1 and uri[9:i].strip("/") in \
39563 + restrict_mirror_exemptions:
39564 + new_uri_map[filename] = uri_tuple
39565 + break
39566 + if new_uri_map:
39567 + uri_map = new_uri_map
39568 + else:
39569 + skip = True
39570 + else:
39571 + skip = True
39572 +
39573 + if skip:
39574 + continue
39575 +
39576 + # Parse Manifest for this cp if we haven't yet.
39577 + if digests is None:
39578 + try:
39579 + digests = repo_config.load_manifest(
39580 + os.path.join(repo_config.location, cp)
39581 + ).getTypeDigests("DIST")
39582 + except (EnvironmentError, PortageException) as e:
39583 + for filename in uri_map:
39584 + self._log_failure(
39585 + "%s\t%s\tManifest exception %s" %
39586 + (cpv, filename, e))
39587 + file_failures[filename] = cpv
39588 + continue
39589 +
39590 + if not digests:
39591 + for filename in uri_map:
39592 + self._log_failure("%s\t%s\tdigest entry missing" %
39593 + (cpv, filename))
39594 + file_failures[filename] = cpv
39595 + continue
39596 +
39597 + for filename, uri_tuple in uri_map.items():
39598 + file_digests = digests.get(filename)
39599 + if file_digests is None:
39600 + self._log_failure("%s\t%s\tdigest entry missing" %
39601 + (cpv, filename))
39602 + file_failures[filename] = cpv
39603 + continue
39604 + if filename in file_owners:
39605 + continue
39606 + file_owners[filename] = cpv
39607 +
39608 + file_digests = \
39609 + _filter_unaccelarated_hashes(file_digests)
39610 + if hash_filter is not None:
39611 + file_digests = _apply_hash_filter(
39612 + file_digests, hash_filter)
39613 +
39614 + yield FetchTask(cpv=cpv,
39615 + background=True,
39616 + digests=file_digests,
39617 + distfile=filename,
39618 + restrict=restrict,
39619 + uri_tuple=uri_tuple,
39620 + config=self._config)
39621
39622 diff --git a/pym/portage/_emirrordist/FetchTask.py b/pym/portage/_emirrordist/FetchTask.py
39623 new file mode 100644
39624 index 0000000..66c41c1
39625 --- /dev/null
39626 +++ b/pym/portage/_emirrordist/FetchTask.py
39627 @@ -0,0 +1,629 @@
39628 +# Copyright 2013 Gentoo Foundation
39629 +# Distributed under the terms of the GNU General Public License v2
39630 +
39631 +import collections
39632 +import errno
39633 +import logging
39634 +import random
39635 +import stat
39636 +import subprocess
39637 +import sys
39638 +
39639 +import portage
39640 +from portage import _encodings, _unicode_encode
39641 +from portage import os
39642 +from portage.util._async.FileCopier import FileCopier
39643 +from portage.util._async.FileDigester import FileDigester
39644 +from portage.util._async.PipeLogger import PipeLogger
39645 +from portage.util._async.PopenProcess import PopenProcess
39646 +from _emerge.CompositeTask import CompositeTask
39647 +
39648 +default_hash_name = portage.const.MANIFEST2_REQUIRED_HASH
39649 +
39650 +# Use --no-check-certificate since Manifest digests should provide
39651 +# enough security, and certificates can be self-signed or whatnot.
39652 +default_fetchcommand = "wget -c -v -t 1 --passive-ftp --no-check-certificate --timeout=60 -O \"${DISTDIR}/${FILE}\" \"${URI}\""
39653 +
39654 +class FetchTask(CompositeTask):
39655 +
39656 + __slots__ = ('distfile', 'digests', 'config', 'cpv',
39657 + 'restrict', 'uri_tuple', '_current_mirror',
39658 + '_current_stat', '_fetch_tmp_dir_info', '_fetch_tmp_file',
39659 + '_fs_mirror_stack', '_mirror_stack',
39660 + '_previously_added',
39661 + '_primaryuri_stack', '_log_path', '_tried_uris')
39662 +
39663 + def _start(self):
39664 +
39665 + if self.config.options.fetch_log_dir is not None and \
39666 + not self.config.options.dry_run:
39667 + self._log_path = os.path.join(
39668 + self.config.options.fetch_log_dir,
39669 + self.distfile + '.log')
39670 +
39671 + self._previously_added = True
39672 + if self.config.distfiles_db is not None and \
39673 + self.distfile not in self.config.distfiles_db:
39674 + self._previously_added = False
39675 + self.config.distfiles_db[self.distfile] = self.cpv
39676 +
39677 + if not self._have_needed_digests():
39678 + msg = "incomplete digests: %s" % " ".join(self.digests)
39679 + self.scheduler.output(msg, background=self.background,
39680 + log_path=self._log_path)
39681 + self.config.log_failure("%s\t%s\t%s" %
39682 + (self.cpv, self.distfile, msg))
39683 + self.config.file_failures[self.distfile] = self.cpv
39684 + self.returncode = os.EX_OK
39685 + self._async_wait()
39686 + return
39687 +
39688 + distfile_path = os.path.join(
39689 + self.config.options.distfiles, self.distfile)
39690 +
39691 + st = None
39692 + size_ok = False
39693 + try:
39694 + st = os.stat(distfile_path)
39695 + except OSError as e:
39696 + if e.errno not in (errno.ENOENT, errno.ESTALE):
39697 + msg = "%s stat failed in %s: %s" % \
39698 + (self.distfile, "distfiles", e)
39699 + self.scheduler.output(msg + '\n', background=True,
39700 + log_path=self._log_path)
39701 + logging.error(msg)
39702 + else:
39703 + size_ok = st.st_size == self.digests["size"]
39704 +
39705 + if not size_ok:
39706 + if self.config.options.dry_run:
39707 + if st is not None:
39708 + logging.info(("dry-run: delete '%s' with "
39709 + "wrong size from distfiles") % (self.distfile,))
39710 + else:
39711 + # Do the unlink in order to ensure that the path is clear,
39712 + # even if stat raised ENOENT, since a broken symlink can
39713 + # trigger ENOENT.
39714 + if self._unlink_file(distfile_path, "distfiles"):
39715 + if st is not None:
39716 + logging.debug(("delete '%s' with "
39717 + "wrong size from distfiles") % (self.distfile,))
39718 + else:
39719 + self.config.log_failure("%s\t%s\t%s" %
39720 + (self.cpv, self.distfile, "unlink failed in distfiles"))
39721 + self.returncode = os.EX_OK
39722 + self._async_wait()
39723 + return
39724 +
39725 + if size_ok:
39726 + if self.config.options.verify_existing_digest:
39727 + self._start_task(
39728 + FileDigester(file_path=distfile_path,
39729 + hash_names=(self._select_hash(),),
39730 + background=self.background,
39731 + logfile=self._log_path), self._distfiles_digester_exit)
39732 + return
39733 +
39734 + self._success()
39735 + self.returncode = os.EX_OK
39736 + self._async_wait()
39737 + return
39738 +
39739 + self._start_fetch()
39740 +
39741 + def _success(self):
39742 + if not self._previously_added:
39743 + size = self.digests["size"]
39744 + self.config.added_byte_count += size
39745 + self.config.added_file_count += 1
39746 + self.config.log_success("%s\t%s\tadded %i bytes" %
39747 + (self.cpv, self.distfile, size))
39748 +
39749 + if self._log_path is not None:
39750 + if not self.config.options.dry_run:
39751 + try:
39752 + os.unlink(self._log_path)
39753 + except OSError:
39754 + pass
39755 +
39756 + if self.config.options.recycle_dir is not None:
39757 +
39758 + recycle_file = os.path.join(
39759 + self.config.options.recycle_dir, self.distfile)
39760 +
39761 + if self.config.options.dry_run:
39762 + if os.path.exists(recycle_file):
39763 + logging.info("dry-run: delete '%s' from recycle" %
39764 + (self.distfile,))
39765 + else:
39766 + try:
39767 + os.unlink(recycle_file)
39768 + except OSError:
39769 + pass
39770 + else:
39771 + logging.debug("delete '%s' from recycle" %
39772 + (self.distfile,))
39773 +
39774 + def _distfiles_digester_exit(self, digester):
39775 +
39776 + self._assert_current(digester)
39777 + if self._was_cancelled():
39778 + self.wait()
39779 + return
39780 +
39781 + if self._default_exit(digester) != os.EX_OK:
39782 + # IOError reading file in our main distfiles directory? This
39783 + # is a bad situation which normally does not occur, so
39784 + # skip this file and report it, in order to draw attention
39785 + # from the administrator.
39786 + msg = "%s distfiles digester failed unexpectedly" % \
39787 + (self.distfile,)
39788 + self.scheduler.output(msg + '\n', background=True,
39789 + log_path=self._log_path)
39790 + logging.error(msg)
39791 + self.config.log_failure("%s\t%s\t%s" %
39792 + (self.cpv, self.distfile, msg))
39793 + self.config.file_failures[self.distfile] = self.cpv
39794 + self.wait()
39795 + return
39796 +
39797 + wrong_digest = self._find_bad_digest(digester.digests)
39798 + if wrong_digest is None:
39799 + self._success()
39800 + self.returncode = os.EX_OK
39801 + self.wait()
39802 + return
39803 +
39804 + self._start_fetch()
39805 +
39806 + _mirror_info = collections.namedtuple('_mirror_info',
39807 + 'name location')
39808 +
39809 + def _start_fetch(self):
39810 +
39811 + self._previously_added = False
39812 + self._fs_mirror_stack = []
39813 + if self.config.options.distfiles_local is not None:
39814 + self._fs_mirror_stack.append(self._mirror_info(
39815 + 'distfiles-local', self.config.options.distfiles_local))
39816 + if self.config.options.recycle_dir is not None:
39817 + self._fs_mirror_stack.append(self._mirror_info(
39818 + 'recycle', self.config.options.recycle_dir))
39819 +
39820 + self._primaryuri_stack = []
39821 + self._mirror_stack = []
39822 + for uri in reversed(self.uri_tuple):
39823 + if uri.startswith('mirror://'):
39824 + self._mirror_stack.append(
39825 + self._mirror_iterator(uri, self.config.mirrors))
39826 + else:
39827 + self._primaryuri_stack.append(uri)
39828 +
39829 + self._tried_uris = set()
39830 + self._try_next_mirror()
39831 +
39832 + @staticmethod
39833 + def _mirror_iterator(uri, mirrors_dict):
39834 +
39835 + slash_index = uri.find("/", 9)
39836 + if slash_index != -1:
39837 + mirror_name = uri[9:slash_index].strip("/")
39838 + mirrors = mirrors_dict.get(mirror_name)
39839 + if not mirrors:
39840 + return
39841 + mirrors = list(mirrors)
39842 + while mirrors:
39843 + mirror = mirrors.pop(random.randint(0, len(mirrors) - 1))
39844 + yield mirror.rstrip("/") + "/" + uri[slash_index+1:]
39845 +
39846 + def _try_next_mirror(self):
39847 + if self._fs_mirror_stack:
39848 + self._fetch_fs(self._fs_mirror_stack.pop())
39849 + return
39850 + else:
39851 + uri = self._next_uri()
39852 + if uri is not None:
39853 + self._tried_uris.add(uri)
39854 + self._fetch_uri(uri)
39855 + return
39856 +
39857 + if self._tried_uris:
39858 + msg = "all uris failed"
39859 + else:
39860 + msg = "no fetchable uris"
39861 +
39862 + self.config.log_failure("%s\t%s\t%s" %
39863 + (self.cpv, self.distfile, msg))
39864 + self.config.file_failures[self.distfile] = self.cpv
39865 + self.returncode = os.EX_OK
39866 + self.wait()
39867 +
39868 + def _next_uri(self):
39869 + remaining_tries = self.config.options.tries - len(self._tried_uris)
39870 + if remaining_tries > 0:
39871 +
39872 + if remaining_tries <= self.config.options.tries / 2:
39873 + while self._primaryuri_stack:
39874 + uri = self._primaryuri_stack.pop()
39875 + if uri not in self._tried_uris:
39876 + return uri
39877 +
39878 + while self._mirror_stack:
39879 + uri = next(self._mirror_stack[-1], None)
39880 + if uri is None:
39881 + self._mirror_stack.pop()
39882 + else:
39883 + if uri not in self._tried_uris:
39884 + return uri
39885 +
39886 + while self._primaryuri_stack:
39887 + uri = self._primaryuri_stack.pop()
39888 + if uri not in self._tried_uris:
39889 + return uri
39890 +
39891 + return None
39892 +
39893 + def _fetch_fs(self, mirror_info):
39894 + file_path = os.path.join(mirror_info.location, self.distfile)
39895 +
39896 + st = None
39897 + size_ok = False
39898 + try:
39899 + st = os.stat(file_path)
39900 + except OSError as e:
39901 + if e.errno not in (errno.ENOENT, errno.ESTALE):
39902 + msg = "%s stat failed in %s: %s" % \
39903 + (self.distfile, mirror_info.name, e)
39904 + self.scheduler.output(msg + '\n', background=True,
39905 + log_path=self._log_path)
39906 + logging.error(msg)
39907 + else:
39908 + size_ok = st.st_size == self.digests["size"]
39909 + self._current_stat = st
39910 +
39911 + if size_ok:
39912 + self._current_mirror = mirror_info
39913 + self._start_task(
39914 + FileDigester(file_path=file_path,
39915 + hash_names=(self._select_hash(),),
39916 + background=self.background,
39917 + logfile=self._log_path),
39918 + self._fs_mirror_digester_exit)
39919 + else:
39920 + self._try_next_mirror()
39921 +
39922 + def _fs_mirror_digester_exit(self, digester):
39923 +
39924 + self._assert_current(digester)
39925 + if self._was_cancelled():
39926 + self.wait()
39927 + return
39928 +
39929 + current_mirror = self._current_mirror
39930 + if digester.returncode != os.EX_OK:
39931 + msg = "%s %s digester failed unexpectedly" % \
39932 + (self.distfile, current_mirror.name)
39933 + self.scheduler.output(msg + '\n', background=True,
39934 + log_path=self._log_path)
39935 + logging.error(msg)
39936 + else:
39937 + bad_digest = self._find_bad_digest(digester.digests)
39938 + if bad_digest is not None:
39939 + msg = "%s %s has bad %s digest: expected %s, got %s" % \
39940 + (self.distfile, current_mirror.name, bad_digest,
39941 + self.digests[bad_digest], digester.digests[bad_digest])
39942 + self.scheduler.output(msg + '\n', background=True,
39943 + log_path=self._log_path)
39944 + logging.error(msg)
39945 + elif self.config.options.dry_run:
39946 + # Report success without actually touching any files
39947 + if self._same_device(current_mirror.location,
39948 + self.config.options.distfiles):
39949 + logging.info(("dry-run: hardlink '%s' from %s "
39950 + "to distfiles") % (self.distfile, current_mirror.name))
39951 + else:
39952 + logging.info("dry-run: copy '%s' from %s to distfiles" %
39953 + (self.distfile, current_mirror.name))
39954 + self._success()
39955 + self.returncode = os.EX_OK
39956 + self.wait()
39957 + return
39958 + else:
39959 + src = os.path.join(current_mirror.location, self.distfile)
39960 + dest = os.path.join(self.config.options.distfiles, self.distfile)
39961 + if self._hardlink_atomic(src, dest,
39962 + "%s to %s" % (current_mirror.name, "distfiles")):
39963 + logging.debug("hardlink '%s' from %s to distfiles" %
39964 + (self.distfile, current_mirror.name))
39965 + self._success()
39966 + self.returncode = os.EX_OK
39967 + self.wait()
39968 + return
39969 + else:
39970 + self._start_task(
39971 + FileCopier(src_path=src, dest_path=dest,
39972 + background=(self.background and
39973 + self._log_path is not None),
39974 + logfile=self._log_path),
39975 + self._fs_mirror_copier_exit)
39976 + return
39977 +
39978 + self._try_next_mirror()
39979 +
39980 + def _fs_mirror_copier_exit(self, copier):
39981 +
39982 + self._assert_current(copier)
39983 + if self._was_cancelled():
39984 + self.wait()
39985 + return
39986 +
39987 + current_mirror = self._current_mirror
39988 + if copier.returncode != os.EX_OK:
39989 + msg = "%s %s copy failed unexpectedly" % \
39990 + (self.distfile, current_mirror.name)
39991 + self.scheduler.output(msg + '\n', background=True,
39992 + log_path=self._log_path)
39993 + logging.error(msg)
39994 + else:
39995 +
39996 + logging.debug("copy '%s' from %s to distfiles" %
39997 + (self.distfile, current_mirror.name))
39998 +
39999 + # Apply the timestamp from the source file, but
40000 + # just rely on umask for permissions.
40001 + try:
40002 + if sys.hexversion >= 0x3030000:
40003 + os.utime(copier.dest_path,
40004 + ns=(self._current_stat.st_mtime_ns,
40005 + self._current_stat.st_mtime_ns))
40006 + else:
40007 + os.utime(copier.dest_path,
40008 + (self._current_stat[stat.ST_MTIME],
40009 + self._current_stat[stat.ST_MTIME]))
40010 + except OSError as e:
40011 + msg = "%s %s utime failed unexpectedly: %s" % \
40012 + (self.distfile, current_mirror.name, e)
40013 + self.scheduler.output(msg + '\n', background=True,
40014 + log_path=self._log_path)
40015 + logging.error(msg)
40016 +
40017 + self._success()
40018 + self.returncode = os.EX_OK
40019 + self.wait()
40020 + return
40021 +
40022 + self._try_next_mirror()
40023 +
40024 + def _fetch_uri(self, uri):
40025 +
40026 + if self.config.options.dry_run:
40027 + # Simply report success.
40028 + logging.info("dry-run: fetch '%s' from '%s'" %
40029 + (self.distfile, uri))
40030 + self._success()
40031 + self.returncode = os.EX_OK
40032 + self.wait()
40033 + return
40034 +
40035 + if self.config.options.temp_dir:
40036 + self._fetch_tmp_dir_info = 'temp-dir'
40037 + distdir = self.config.options.temp_dir
40038 + else:
40039 + self._fetch_tmp_dir_info = 'distfiles'
40040 + distdir = self.config.options.distfiles
40041 +
40042 + tmp_basename = self.distfile + '._emirrordist_fetch_.%s' % os.getpid()
40043 +
40044 + variables = {
40045 + "DISTDIR": distdir,
40046 + "URI": uri,
40047 + "FILE": tmp_basename
40048 + }
40049 +
40050 + self._fetch_tmp_file = os.path.join(distdir, tmp_basename)
40051 +
40052 + try:
40053 + os.unlink(self._fetch_tmp_file)
40054 + except OSError:
40055 + pass
40056 +
40057 + args = portage.util.shlex_split(default_fetchcommand)
40058 + args = [portage.util.varexpand(x, mydict=variables)
40059 + for x in args]
40060 +
40061 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
40062 + not os.path.isabs(args[0]):
40063 + # Python 3.1 _execvp throws TypeError for non-absolute executable
40064 + # path passed as bytes (see http://bugs.python.org/issue8513).
40065 + fullname = portage.process.find_binary(args[0])
40066 + if fullname is None:
40067 + raise portage.exception.CommandNotFound(args[0])
40068 + args[0] = fullname
40069 +
40070 + args = [_unicode_encode(x,
40071 + encoding=_encodings['fs'], errors='strict') for x in args]
40072 +
40073 + null_fd = os.open(os.devnull, os.O_RDONLY)
40074 + fetcher = PopenProcess(background=self.background,
40075 + proc=subprocess.Popen(args, stdin=null_fd,
40076 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
40077 + scheduler=self.scheduler)
40078 + os.close(null_fd)
40079 +
40080 + fetcher.pipe_reader = PipeLogger(background=self.background,
40081 + input_fd=fetcher.proc.stdout, log_file_path=self._log_path,
40082 + scheduler=self.scheduler)
40083 +
40084 + self._start_task(fetcher, self._fetcher_exit)
40085 +
40086 + def _fetcher_exit(self, fetcher):
40087 +
40088 + self._assert_current(fetcher)
40089 + if self._was_cancelled():
40090 + self.wait()
40091 + return
40092 +
40093 + if os.path.exists(self._fetch_tmp_file):
40094 + self._start_task(
40095 + FileDigester(file_path=self._fetch_tmp_file,
40096 + hash_names=(self._select_hash(),),
40097 + background=self.background,
40098 + logfile=self._log_path),
40099 + self._fetch_digester_exit)
40100 + else:
40101 + self._try_next_mirror()
40102 +
40103 + def _fetch_digester_exit(self, digester):
40104 +
40105 + self._assert_current(digester)
40106 + if self._was_cancelled():
40107 + self.wait()
40108 + return
40109 +
40110 + if digester.returncode != os.EX_OK:
40111 + msg = "%s %s digester failed unexpectedly" % \
40112 + (self.distfile, self._fetch_tmp_dir_info)
40113 + self.scheduler.output(msg + '\n', background=True,
40114 + log_path=self._log_path)
40115 + logging.error(msg)
40116 + else:
40117 + bad_digest = self._find_bad_digest(digester.digests)
40118 + if bad_digest is not None:
40119 + msg = "%s has bad %s digest: expected %s, got %s" % \
40120 + (self.distfile, bad_digest,
40121 + self.digests[bad_digest], digester.digests[bad_digest])
40122 + self.scheduler.output(msg + '\n', background=True,
40123 + log_path=self._log_path)
40124 + try:
40125 + os.unlink(self._fetch_tmp_file)
40126 + except OSError:
40127 + pass
40128 + else:
40129 + dest = os.path.join(self.config.options.distfiles, self.distfile)
40130 + try:
40131 + os.rename(self._fetch_tmp_file, dest)
40132 + except OSError:
40133 + self._start_task(
40134 + FileCopier(src_path=self._fetch_tmp_file,
40135 + dest_path=dest,
40136 + background=(self.background and
40137 + self._log_path is not None),
40138 + logfile=self._log_path),
40139 + self._fetch_copier_exit)
40140 + return
40141 + else:
40142 + self._success()
40143 + self.returncode = os.EX_OK
40144 + self.wait()
40145 + return
40146 +
40147 + self._try_next_mirror()
40148 +
40149 + def _fetch_copier_exit(self, copier):
40150 +
40151 + self._assert_current(copier)
40152 +
40153 + try:
40154 + os.unlink(self._fetch_tmp_file)
40155 + except OSError:
40156 + pass
40157 +
40158 + if self._was_cancelled():
40159 + self.wait()
40160 + return
40161 +
40162 + if copier.returncode == os.EX_OK:
40163 + self._success()
40164 + self.returncode = os.EX_OK
40165 + self.wait()
40166 + else:
40167 + # out of space?
40168 + msg = "%s %s copy failed unexpectedly" % \
40169 + (self.distfile, self._fetch_tmp_dir_info)
40170 + self.scheduler.output(msg + '\n', background=True,
40171 + log_path=self._log_path)
40172 + logging.error(msg)
40173 + self.config.log_failure("%s\t%s\t%s" %
40174 + (self.cpv, self.distfile, msg))
40175 + self.config.file_failures[self.distfile] = self.cpv
40176 + self.returncode = 1
40177 + self.wait()
40178 +
40179 + def _unlink_file(self, file_path, dir_info):
40180 + try:
40181 + os.unlink(file_path)
40182 + except OSError as e:
40183 + if e.errno not in (errno.ENOENT, errno.ESTALE):
40184 + msg = "unlink '%s' failed in %s: %s" % \
40185 + (self.distfile, dir_info, e)
40186 + self.scheduler.output(msg + '\n', background=True,
40187 + log_path=self._log_path)
40188 + logging.error(msg)
40189 + return False
40190 + return True
40191 +
40192 + def _have_needed_digests(self):
40193 + return "size" in self.digests and \
40194 + self._select_hash() is not None
40195 +
40196 + def _select_hash(self):
40197 + if default_hash_name in self.digests:
40198 + return default_hash_name
40199 + else:
40200 + for hash_name in self.digests:
40201 + if hash_name != "size" and \
40202 + hash_name in portage.checksum.hashfunc_map:
40203 + return hash_name
40204 +
40205 + return None
40206 +
40207 + def _find_bad_digest(self, digests):
40208 + for hash_name, hash_value in digests.items():
40209 + if self.digests[hash_name] != hash_value:
40210 + return hash_name
40211 + return None
40212 +
40213 + @staticmethod
40214 + def _same_device(path1, path2):
40215 + try:
40216 + st1 = os.stat(path1)
40217 + st2 = os.stat(path2)
40218 + except OSError:
40219 + return False
40220 + else:
40221 + return st1.st_dev == st2.st_dev
40222 +
40223 + def _hardlink_atomic(self, src, dest, dir_info):
40224 +
40225 + head, tail = os.path.split(dest)
40226 + hardlink_tmp = os.path.join(head, ".%s._mirrordist_hardlink_.%s" % \
40227 + (tail, os.getpid()))
40228 +
40229 + try:
40230 + try:
40231 + os.link(src, hardlink_tmp)
40232 + except OSError as e:
40233 + if e.errno != errno.EXDEV:
40234 + msg = "hardlink %s from %s failed: %s" % \
40235 + (self.distfile, dir_info, e)
40236 + self.scheduler.output(msg + '\n', background=True,
40237 + log_path=self._log_path)
40238 + logging.error(msg)
40239 + return False
40240 +
40241 + try:
40242 + os.rename(hardlink_tmp, dest)
40243 + except OSError as e:
40244 + msg = "hardlink rename '%s' from %s failed: %s" % \
40245 + (self.distfile, dir_info, e)
40246 + self.scheduler.output(msg + '\n', background=True,
40247 + log_path=self._log_path)
40248 + logging.error(msg)
40249 + return False
40250 + finally:
40251 + try:
40252 + os.unlink(hardlink_tmp)
40253 + except OSError:
40254 + pass
40255 +
40256 + return True
40257
40258 diff --git a/pym/portage/_emirrordist/MirrorDistTask.py b/pym/portage/_emirrordist/MirrorDistTask.py
40259 new file mode 100644
40260 index 0000000..571caa5
40261 --- /dev/null
40262 +++ b/pym/portage/_emirrordist/MirrorDistTask.py
40263 @@ -0,0 +1,219 @@
40264 +# Copyright 2013-2014 Gentoo Foundation
40265 +# Distributed under the terms of the GNU General Public License v2
40266 +
40267 +import errno
40268 +import logging
40269 +import sys
40270 +import time
40271 +
40272 +try:
40273 + import threading
40274 +except ImportError:
40275 + import dummy_threading as threading
40276 +
40277 +import portage
40278 +from portage import os
40279 +from portage.util._async.TaskScheduler import TaskScheduler
40280 +from _emerge.CompositeTask import CompositeTask
40281 +from .FetchIterator import FetchIterator
40282 +from .DeletionIterator import DeletionIterator
40283 +
40284 +if sys.hexversion >= 0x3000000:
40285 + # pylint: disable=W0622
40286 + long = int
40287 +
40288 +class MirrorDistTask(CompositeTask):
40289 +
40290 + __slots__ = ('_config', '_terminated', '_term_check_id')
40291 +
40292 + def __init__(self, config):
40293 + CompositeTask.__init__(self, scheduler=config.event_loop)
40294 + self._config = config
40295 + self._terminated = threading.Event()
40296 +
40297 + def _start(self):
40298 + self._term_check_id = self.scheduler.idle_add(self._termination_check)
40299 + fetch = TaskScheduler(iter(FetchIterator(self._config)),
40300 + max_jobs=self._config.options.jobs,
40301 + max_load=self._config.options.load_average,
40302 + event_loop=self._config.event_loop)
40303 + self._start_task(fetch, self._fetch_exit)
40304 +
40305 + def _fetch_exit(self, fetch):
40306 +
40307 + self._assert_current(fetch)
40308 + if self._was_cancelled():
40309 + self.wait()
40310 + return
40311 +
40312 + if self._config.options.delete:
40313 + deletion = TaskScheduler(iter(DeletionIterator(self._config)),
40314 + max_jobs=self._config.options.jobs,
40315 + max_load=self._config.options.load_average,
40316 + event_loop=self._config.event_loop)
40317 + self._start_task(deletion, self._deletion_exit)
40318 + return
40319 +
40320 + self._post_deletion()
40321 +
40322 + def _deletion_exit(self, deletion):
40323 +
40324 + self._assert_current(deletion)
40325 + if self._was_cancelled():
40326 + self.wait()
40327 + return
40328 +
40329 + self._post_deletion()
40330 +
40331 + def _post_deletion(self):
40332 +
40333 + if self._config.options.recycle_db is not None:
40334 + self._update_recycle_db()
40335 +
40336 + if self._config.options.scheduled_deletion_log is not None:
40337 + self._scheduled_deletion_log()
40338 +
40339 + self._summary()
40340 +
40341 + self.returncode = os.EX_OK
40342 + self._current_task = None
40343 + self.wait()
40344 +
40345 + def _update_recycle_db(self):
40346 +
40347 + start_time = self._config.start_time
40348 + recycle_dir = self._config.options.recycle_dir
40349 + recycle_db = self._config.recycle_db
40350 + r_deletion_delay = self._config.options.recycle_deletion_delay
40351 +
40352 + # Use a dict optimize access.
40353 + recycle_db_cache = dict(recycle_db.items())
40354 +
40355 + for filename in os.listdir(recycle_dir):
40356 +
40357 + recycle_file = os.path.join(recycle_dir, filename)
40358 +
40359 + try:
40360 + st = os.stat(recycle_file)
40361 + except OSError as e:
40362 + if e.errno not in (errno.ENOENT, errno.ESTALE):
40363 + logging.error(("stat failed for '%s' in "
40364 + "recycle: %s") % (filename, e))
40365 + continue
40366 +
40367 + value = recycle_db_cache.pop(filename, None)
40368 + if value is None:
40369 + logging.debug(("add '%s' to "
40370 + "recycle db") % filename)
40371 + recycle_db[filename] = (st.st_size, start_time)
40372 + else:
40373 + r_size, r_time = value
40374 + if long(r_size) != st.st_size:
40375 + recycle_db[filename] = (st.st_size, start_time)
40376 + elif r_time + r_deletion_delay < start_time:
40377 + if self._config.options.dry_run:
40378 + logging.info(("dry-run: delete '%s' from "
40379 + "recycle") % filename)
40380 + logging.info(("drop '%s' from "
40381 + "recycle db") % filename)
40382 + else:
40383 + try:
40384 + os.unlink(recycle_file)
40385 + except OSError as e:
40386 + if e.errno not in (errno.ENOENT, errno.ESTALE):
40387 + logging.error(("delete '%s' from "
40388 + "recycle failed: %s") % (filename, e))
40389 + else:
40390 + logging.debug(("delete '%s' from "
40391 + "recycle") % filename)
40392 + try:
40393 + del recycle_db[filename]
40394 + except KeyError:
40395 + pass
40396 + else:
40397 + logging.debug(("drop '%s' from "
40398 + "recycle db") % filename)
40399 +
40400 + # Existing files were popped from recycle_db_cache,
40401 + # so any remaining entries are for files that no
40402 + # longer exist.
40403 + for filename in recycle_db_cache:
40404 + try:
40405 + del recycle_db[filename]
40406 + except KeyError:
40407 + pass
40408 + else:
40409 + logging.debug(("drop non-existent '%s' from "
40410 + "recycle db") % filename)
40411 +
40412 + def _scheduled_deletion_log(self):
40413 +
40414 + start_time = self._config.start_time
40415 + dry_run = self._config.options.dry_run
40416 + deletion_delay = self._config.options.deletion_delay
40417 + distfiles_db = self._config.distfiles_db
40418 +
40419 + date_map = {}
40420 + for filename, timestamp in self._config.deletion_db.items():
40421 + date = timestamp + deletion_delay
40422 + if date < start_time:
40423 + date = start_time
40424 + date = time.strftime("%Y-%m-%d", time.gmtime(date))
40425 + date_files = date_map.get(date)
40426 + if date_files is None:
40427 + date_files = []
40428 + date_map[date] = date_files
40429 + date_files.append(filename)
40430 +
40431 + if dry_run:
40432 + logging.warn(("dry-run: scheduled-deletions log "
40433 + "will be summarized via logging.info"))
40434 +
40435 + lines = []
40436 + for date in sorted(date_map):
40437 + date_files = date_map[date]
40438 + if dry_run:
40439 + logging.info(("dry-run: scheduled deletions for %s: %s files") %
40440 + (date, len(date_files)))
40441 + lines.append("%s\n" % date)
40442 + for filename in date_files:
40443 + cpv = "unknown"
40444 + if distfiles_db is not None:
40445 + cpv = distfiles_db.get(filename, cpv)
40446 + lines.append("\t%s\t%s\n" % (filename, cpv))
40447 +
40448 + if not dry_run:
40449 + portage.util.write_atomic(
40450 + self._config.options.scheduled_deletion_log,
40451 + "".join(lines))
40452 +
40453 + def _summary(self):
40454 + elapsed_time = time.time() - self._config.start_time
40455 + fail_count = len(self._config.file_failures)
40456 + delete_count = self._config.delete_count
40457 + scheduled_deletion_count = self._config.scheduled_deletion_count - delete_count
40458 + added_file_count = self._config.added_file_count
40459 + added_byte_count = self._config.added_byte_count
40460 +
40461 + logging.info("finished in %i seconds" % elapsed_time)
40462 + logging.info("failed to fetch %i files" % fail_count)
40463 + logging.info("deleted %i files" % delete_count)
40464 + logging.info("deletion of %i files scheduled" %
40465 + scheduled_deletion_count)
40466 + logging.info("added %i files" % added_file_count)
40467 + logging.info("added %i bytes total" % added_byte_count)
40468 +
40469 + def terminate(self):
40470 + self._terminated.set()
40471 +
40472 + def _termination_check(self):
40473 + if self._terminated.is_set():
40474 + self.cancel()
40475 + self.wait()
40476 + return True
40477 +
40478 + def _wait(self):
40479 + CompositeTask._wait(self)
40480 + if self._term_check_id is not None:
40481 + self.scheduler.source_remove(self._term_check_id)
40482 + self._term_check_id = None
40483
40484 diff --git a/pym/portage/_emirrordist/__init__.py b/pym/portage/_emirrordist/__init__.py
40485 new file mode 100644
40486 index 0000000..6cde932
40487 --- /dev/null
40488 +++ b/pym/portage/_emirrordist/__init__.py
40489 @@ -0,0 +1,2 @@
40490 +# Copyright 2013 Gentoo Foundation
40491 +# Distributed under the terms of the GNU General Public License v2
40492
40493 diff --git a/pym/portage/_emirrordist/main.py b/pym/portage/_emirrordist/main.py
40494 new file mode 100644
40495 index 0000000..ce92c2a
40496 --- /dev/null
40497 +++ b/pym/portage/_emirrordist/main.py
40498 @@ -0,0 +1,463 @@
40499 +# Copyright 2013-2014 Gentoo Foundation
40500 +# Distributed under the terms of the GNU General Public License v2
40501 +
40502 +import logging
40503 +import sys
40504 +
40505 +import portage
40506 +from portage import os
40507 +from portage.util import normalize_path, writemsg_level, _recursive_file_list
40508 +from portage.util._argparse import ArgumentParser
40509 +from portage.util._async.run_main_scheduler import run_main_scheduler
40510 +from portage.util._async.SchedulerInterface import SchedulerInterface
40511 +from portage.util._eventloop.global_event_loop import global_event_loop
40512 +from .Config import Config
40513 +from .MirrorDistTask import MirrorDistTask
40514 +
40515 +if sys.hexversion >= 0x3000000:
40516 + # pylint: disable=W0622
40517 + long = int
40518 +
40519 +seconds_per_day = 24 * 60 * 60
40520 +
40521 +common_options = (
40522 + {
40523 + "longopt" : "--dry-run",
40524 + "help" : "perform a trial run with no changes made (usually combined "
40525 + "with --verbose)",
40526 + "action" : "store_true"
40527 + },
40528 + {
40529 + "longopt" : "--verbose",
40530 + "shortopt" : "-v",
40531 + "help" : "display extra information on stderr "
40532 + "(multiple occurences increase verbosity)",
40533 + "action" : "count",
40534 + "default" : 0,
40535 + },
40536 + {
40537 + "longopt" : "--ignore-default-opts",
40538 + "help" : "do not use the EMIRRORDIST_DEFAULT_OPTS environment variable",
40539 + "action" : "store_true"
40540 + },
40541 + {
40542 + "longopt" : "--distfiles",
40543 + "help" : "distfiles directory to use (required)",
40544 + "metavar" : "DIR"
40545 + },
40546 + {
40547 + "longopt" : "--jobs",
40548 + "shortopt" : "-j",
40549 + "help" : "number of concurrent jobs to run",
40550 + "type" : int
40551 + },
40552 + {
40553 + "longopt" : "--load-average",
40554 + "shortopt" : "-l",
40555 + "help" : "load average limit for spawning of new concurrent jobs",
40556 + "metavar" : "LOAD",
40557 + "type" : float
40558 + },
40559 + {
40560 + "longopt" : "--tries",
40561 + "help" : "maximum number of tries per file, 0 means unlimited (default is 10)",
40562 + "default" : 10,
40563 + "type" : int
40564 + },
40565 + {
40566 + "longopt" : "--repo",
40567 + "help" : "name of repo to operate on"
40568 + },
40569 + {
40570 + "longopt" : "--config-root",
40571 + "help" : "location of portage config files",
40572 + "metavar" : "DIR"
40573 + },
40574 + {
40575 + "longopt" : "--portdir",
40576 + "help" : "override the PORTDIR variable (deprecated in favor of --repositories-configuration)",
40577 + "metavar" : "DIR"
40578 + },
40579 + {
40580 + "longopt" : "--portdir-overlay",
40581 + "help" : "override the PORTDIR_OVERLAY variable (deprecated in favor of --repositories-configuration)"
40582 + },
40583 + {
40584 + "longopt" : "--repositories-configuration",
40585 + "help" : "override configuration of repositories (in format of repos.conf)"
40586 + },
40587 + {
40588 + "longopt" : "--strict-manifests",
40589 + "help" : "manually override \"strict\" FEATURES setting",
40590 + "choices" : ("y", "n"),
40591 + "metavar" : "<y|n>",
40592 + },
40593 + {
40594 + "longopt" : "--failure-log",
40595 + "help" : "log file for fetch failures, with tab-delimited "
40596 + "output, for reporting purposes",
40597 + "metavar" : "FILE"
40598 + },
40599 + {
40600 + "longopt" : "--success-log",
40601 + "help" : "log file for fetch successes, with tab-delimited "
40602 + "output, for reporting purposes",
40603 + "metavar" : "FILE"
40604 + },
40605 + {
40606 + "longopt" : "--scheduled-deletion-log",
40607 + "help" : "log file for scheduled deletions, with tab-delimited "
40608 + "output, for reporting purposes",
40609 + "metavar" : "FILE"
40610 + },
40611 + {
40612 + "longopt" : "--delete",
40613 + "help" : "enable deletion of unused distfiles",
40614 + "action" : "store_true"
40615 + },
40616 + {
40617 + "longopt" : "--deletion-db",
40618 + "help" : "database file used to track lifetime of files "
40619 + "scheduled for delayed deletion",
40620 + "metavar" : "FILE"
40621 + },
40622 + {
40623 + "longopt" : "--deletion-delay",
40624 + "help" : "delay time for deletion, measured in seconds",
40625 + "metavar" : "SECONDS"
40626 + },
40627 + {
40628 + "longopt" : "--temp-dir",
40629 + "help" : "temporary directory for downloads",
40630 + "metavar" : "DIR"
40631 + },
40632 + {
40633 + "longopt" : "--mirror-overrides",
40634 + "help" : "file holding a list of mirror overrides",
40635 + "metavar" : "FILE"
40636 + },
40637 + {
40638 + "longopt" : "--mirror-skip",
40639 + "help" : "comma delimited list of mirror targets to skip "
40640 + "when fetching"
40641 + },
40642 + {
40643 + "longopt" : "--restrict-mirror-exemptions",
40644 + "help" : "comma delimited list of mirror targets for which to "
40645 + "ignore RESTRICT=\"mirror\""
40646 + },
40647 + {
40648 + "longopt" : "--verify-existing-digest",
40649 + "help" : "use digest as a verification of whether existing "
40650 + "distfiles are valid",
40651 + "action" : "store_true"
40652 + },
40653 + {
40654 + "longopt" : "--distfiles-local",
40655 + "help" : "distfiles-local directory to use",
40656 + "metavar" : "DIR"
40657 + },
40658 + {
40659 + "longopt" : "--distfiles-db",
40660 + "help" : "database file used to track which ebuilds a "
40661 + "distfile belongs to",
40662 + "metavar" : "FILE"
40663 + },
40664 + {
40665 + "longopt" : "--recycle-dir",
40666 + "help" : "directory for extended retention of files that "
40667 + "are removed from distdir with the --delete option",
40668 + "metavar" : "DIR"
40669 + },
40670 + {
40671 + "longopt" : "--recycle-db",
40672 + "help" : "database file used to track lifetime of files "
40673 + "in recycle dir",
40674 + "metavar" : "FILE"
40675 + },
40676 + {
40677 + "longopt" : "--recycle-deletion-delay",
40678 + "help" : "delay time for deletion of unused files from "
40679 + "recycle dir, measured in seconds (defaults to "
40680 + "the equivalent of 60 days)",
40681 + "default" : 60 * seconds_per_day,
40682 + "metavar" : "SECONDS",
40683 + "type" : int
40684 + },
40685 + {
40686 + "longopt" : "--fetch-log-dir",
40687 + "help" : "directory for individual fetch logs",
40688 + "metavar" : "DIR"
40689 + },
40690 + {
40691 + "longopt" : "--whitelist-from",
40692 + "help" : "specifies a file containing a list of files to "
40693 + "whitelist, one per line, # prefixed lines ignored",
40694 + "action" : "append",
40695 + "metavar" : "FILE"
40696 + },
40697 +)
40698 +
40699 +def parse_args(args):
40700 + description = "emirrordist - a fetch tool for mirroring " \
40701 + "of package distfiles"
40702 + usage = "emirrordist [options] <action>"
40703 + parser = ArgumentParser(description=description, usage=usage)
40704 +
40705 + actions = parser.add_argument_group('Actions')
40706 + actions.add_argument("--version",
40707 + action="store_true",
40708 + help="display portage version and exit")
40709 + actions.add_argument("--mirror",
40710 + action="store_true",
40711 + help="mirror distfiles for the selected repository")
40712 +
40713 + common = parser.add_argument_group('Common options')
40714 + for opt_info in common_options:
40715 + opt_pargs = [opt_info["longopt"]]
40716 + if opt_info.get("shortopt"):
40717 + opt_pargs.append(opt_info["shortopt"])
40718 + opt_kwargs = {"help" : opt_info["help"]}
40719 + for k in ("action", "choices", "default", "metavar", "type"):
40720 + if k in opt_info:
40721 + opt_kwargs[k] = opt_info[k]
40722 + common.add_argument(*opt_pargs, **opt_kwargs)
40723 +
40724 + options, args = parser.parse_known_args(args)
40725 +
40726 + return (parser, options, args)
40727 +
40728 +def emirrordist_main(args):
40729 +
40730 + # The calling environment is ignored, so the program is
40731 + # completely controlled by commandline arguments.
40732 + env = {}
40733 +
40734 + if not sys.stdout.isatty():
40735 + portage.output.nocolor()
40736 + env['NOCOLOR'] = 'true'
40737 +
40738 + parser, options, args = parse_args(args)
40739 +
40740 + if options.version:
40741 + sys.stdout.write("Portage %s\n" % portage.VERSION)
40742 + return os.EX_OK
40743 +
40744 + config_root = options.config_root
40745 +
40746 + if options.portdir is not None:
40747 + writemsg_level("emirrordist: warning: --portdir option is deprecated in favor of --repositories-configuration option\n",
40748 + level=logging.WARNING, noiselevel=-1)
40749 + if options.portdir_overlay is not None:
40750 + writemsg_level("emirrordist: warning: --portdir-overlay option is deprecated in favor of --repositories-configuration option\n",
40751 + level=logging.WARNING, noiselevel=-1)
40752 +
40753 + if options.repositories_configuration is not None:
40754 + env['PORTAGE_REPOSITORIES'] = options.repositories_configuration
40755 + elif options.portdir_overlay is not None:
40756 + env['PORTDIR_OVERLAY'] = options.portdir_overlay
40757 +
40758 + if options.portdir is not None:
40759 + env['PORTDIR'] = options.portdir
40760 +
40761 + settings = portage.config(config_root=config_root,
40762 + local_config=False, env=env)
40763 +
40764 + default_opts = None
40765 + if not options.ignore_default_opts:
40766 + default_opts = settings.get('EMIRRORDIST_DEFAULT_OPTS', '').split()
40767 +
40768 + if default_opts:
40769 + parser, options, args = parse_args(default_opts + args)
40770 +
40771 + settings = portage.config(config_root=config_root,
40772 + local_config=False, env=env)
40773 +
40774 + if options.repo is None:
40775 + if len(settings.repositories.prepos) == 2:
40776 + for repo in settings.repositories:
40777 + if repo.name != "DEFAULT":
40778 + options.repo = repo.name
40779 + break
40780 +
40781 + if options.repo is None:
40782 + parser.error("--repo option is required")
40783 +
40784 + repo_path = settings.repositories.treemap.get(options.repo)
40785 + if repo_path is None:
40786 + parser.error("Unable to locate repository named '%s'" % (options.repo,))
40787 +
40788 + if options.jobs is not None:
40789 + options.jobs = int(options.jobs)
40790 +
40791 + if options.load_average is not None:
40792 + options.load_average = float(options.load_average)
40793 +
40794 + if options.failure_log is not None:
40795 + options.failure_log = normalize_path(
40796 + os.path.abspath(options.failure_log))
40797 +
40798 + parent_dir = os.path.dirname(options.failure_log)
40799 + if not (os.path.isdir(parent_dir) and
40800 + os.access(parent_dir, os.W_OK|os.X_OK)):
40801 + parser.error(("--failure-log '%s' parent is not a "
40802 + "writable directory") % options.failure_log)
40803 +
40804 + if options.success_log is not None:
40805 + options.success_log = normalize_path(
40806 + os.path.abspath(options.success_log))
40807 +
40808 + parent_dir = os.path.dirname(options.success_log)
40809 + if not (os.path.isdir(parent_dir) and
40810 + os.access(parent_dir, os.W_OK|os.X_OK)):
40811 + parser.error(("--success-log '%s' parent is not a "
40812 + "writable directory") % options.success_log)
40813 +
40814 + if options.scheduled_deletion_log is not None:
40815 + options.scheduled_deletion_log = normalize_path(
40816 + os.path.abspath(options.scheduled_deletion_log))
40817 +
40818 + parent_dir = os.path.dirname(options.scheduled_deletion_log)
40819 + if not (os.path.isdir(parent_dir) and
40820 + os.access(parent_dir, os.W_OK|os.X_OK)):
40821 + parser.error(("--scheduled-deletion-log '%s' parent is not a "
40822 + "writable directory") % options.scheduled_deletion_log)
40823 +
40824 + if options.deletion_db is None:
40825 + parser.error("--scheduled-deletion-log requires --deletion-db")
40826 +
40827 + if options.deletion_delay is not None:
40828 + options.deletion_delay = long(options.deletion_delay)
40829 + if options.deletion_db is None:
40830 + parser.error("--deletion-delay requires --deletion-db")
40831 +
40832 + if options.deletion_db is not None:
40833 + if options.deletion_delay is None:
40834 + parser.error("--deletion-db requires --deletion-delay")
40835 + options.deletion_db = normalize_path(
40836 + os.path.abspath(options.deletion_db))
40837 +
40838 + if options.temp_dir is not None:
40839 + options.temp_dir = normalize_path(
40840 + os.path.abspath(options.temp_dir))
40841 +
40842 + if not (os.path.isdir(options.temp_dir) and
40843 + os.access(options.temp_dir, os.W_OK|os.X_OK)):
40844 + parser.error(("--temp-dir '%s' is not a "
40845 + "writable directory") % options.temp_dir)
40846 +
40847 + if options.distfiles is not None:
40848 + options.distfiles = normalize_path(
40849 + os.path.abspath(options.distfiles))
40850 +
40851 + if not (os.path.isdir(options.distfiles) and
40852 + os.access(options.distfiles, os.W_OK|os.X_OK)):
40853 + parser.error(("--distfiles '%s' is not a "
40854 + "writable directory") % options.distfiles)
40855 + else:
40856 + parser.error("missing required --distfiles parameter")
40857 +
40858 + if options.mirror_overrides is not None:
40859 + options.mirror_overrides = normalize_path(
40860 + os.path.abspath(options.mirror_overrides))
40861 +
40862 + if not (os.access(options.mirror_overrides, os.R_OK) and
40863 + os.path.isfile(options.mirror_overrides)):
40864 + parser.error(
40865 + "--mirror-overrides-file '%s' is not a readable file" %
40866 + options.mirror_overrides)
40867 +
40868 + if options.distfiles_local is not None:
40869 + options.distfiles_local = normalize_path(
40870 + os.path.abspath(options.distfiles_local))
40871 +
40872 + if not (os.path.isdir(options.distfiles_local) and
40873 + os.access(options.distfiles_local, os.W_OK|os.X_OK)):
40874 + parser.error(("--distfiles-local '%s' is not a "
40875 + "writable directory") % options.distfiles_local)
40876 +
40877 + if options.distfiles_db is not None:
40878 + options.distfiles_db = normalize_path(
40879 + os.path.abspath(options.distfiles_db))
40880 +
40881 + if options.tries is not None:
40882 + options.tries = int(options.tries)
40883 +
40884 + if options.recycle_dir is not None:
40885 + options.recycle_dir = normalize_path(
40886 + os.path.abspath(options.recycle_dir))
40887 + if not (os.path.isdir(options.recycle_dir) and
40888 + os.access(options.recycle_dir, os.W_OK|os.X_OK)):
40889 + parser.error(("--recycle-dir '%s' is not a "
40890 + "writable directory") % options.recycle_dir)
40891 +
40892 + if options.recycle_db is not None:
40893 + if options.recycle_dir is None:
40894 + parser.error("--recycle-db requires "
40895 + "--recycle-dir to be specified")
40896 + options.recycle_db = normalize_path(
40897 + os.path.abspath(options.recycle_db))
40898 +
40899 + if options.recycle_deletion_delay is not None:
40900 + options.recycle_deletion_delay = \
40901 + long(options.recycle_deletion_delay)
40902 +
40903 + if options.fetch_log_dir is not None:
40904 + options.fetch_log_dir = normalize_path(
40905 + os.path.abspath(options.fetch_log_dir))
40906 +
40907 + if not (os.path.isdir(options.fetch_log_dir) and
40908 + os.access(options.fetch_log_dir, os.W_OK|os.X_OK)):
40909 + parser.error(("--fetch-log-dir '%s' is not a "
40910 + "writable directory") % options.fetch_log_dir)
40911 +
40912 + if options.whitelist_from:
40913 + normalized_paths = []
40914 + for x in options.whitelist_from:
40915 + path = normalize_path(os.path.abspath(x))
40916 + if not os.access(path, os.R_OK):
40917 + parser.error("--whitelist-from '%s' is not readable" % x)
40918 + if os.path.isfile(path):
40919 + normalized_paths.append(path)
40920 + elif os.path.isdir(path):
40921 + for file in _recursive_file_list(path):
40922 + if not os.access(file, os.R_OK):
40923 + parser.error("--whitelist-from '%s' directory contains not readable file '%s'" % (x, file))
40924 + normalized_paths.append(file)
40925 + else:
40926 + parser.error("--whitelist-from '%s' is not a regular file or a directory" % x)
40927 + options.whitelist_from = normalized_paths
40928 +
40929 + if options.strict_manifests is not None:
40930 + if options.strict_manifests == "y":
40931 + settings.features.add("strict")
40932 + else:
40933 + settings.features.discard("strict")
40934 +
40935 + settings.lock()
40936 +
40937 + portdb = portage.portdbapi(mysettings=settings)
40938 +
40939 + # Limit ebuilds to the specified repo.
40940 + portdb.porttrees = [repo_path]
40941 +
40942 + portage.util.initialize_logger()
40943 +
40944 + if options.verbose > 0:
40945 + l = logging.getLogger()
40946 + l.setLevel(l.getEffectiveLevel() - 10 * options.verbose)
40947 +
40948 + with Config(options, portdb,
40949 + SchedulerInterface(global_event_loop())) as config:
40950 +
40951 + if not options.mirror:
40952 + parser.error('No action specified')
40953 +
40954 + returncode = os.EX_OK
40955 +
40956 + if options.mirror:
40957 + signum = run_main_scheduler(MirrorDistTask(config))
40958 + if signum is not None:
40959 + sys.exit(128 + signum)
40960 +
40961 + return returncode
40962
40963 diff --git a/pym/portage/_global_updates.py b/pym/portage/_global_updates.py
40964 index c0f3df0..dde7268 100644
40965 --- a/pym/portage/_global_updates.py
40966 +++ b/pym/portage/_global_updates.py
40967 @@ -1,4 +1,4 @@
40968 -# Copyright 2010-2012 Gentoo Foundation
40969 +# Copyright 2010-2014 Gentoo Foundation
40970 # Distributed under the terms of the GNU General Public License v2
40971
40972 from __future__ import print_function
40973 @@ -46,12 +46,6 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
40974 portdb = trees[root]["porttree"].dbapi
40975 vardb = trees[root]["vartree"].dbapi
40976 bindb = trees[root]["bintree"].dbapi
40977 - if not os.access(bindb.bintree.pkgdir, os.W_OK):
40978 - bindb = None
40979 - else:
40980 - # Call binarytree.populate(), since we want to make sure it's
40981 - # only populated with local packages here (getbinpkgs=0).
40982 - bindb.bintree.populate()
40983
40984 world_file = os.path.join(mysettings['EROOT'], WORLD_FILE)
40985 world_list = grabfile(world_file)
40986 @@ -92,14 +86,10 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
40987 if not update_notice_printed:
40988 update_notice_printed = True
40989 writemsg_stdout("\n")
40990 - if quiet:
40991 - writemsg_stdout(colorize("GOOD",
40992 - _("Performing Global Updates\n")))
40993 - writemsg_stdout(_("(Could take a couple of minutes if you have a lot of binary packages.)\n"))
40994 - else:
40995 - writemsg_stdout(colorize("GOOD",
40996 - _("Performing Global Updates:\n")))
40997 - writemsg_stdout(_("(Could take a couple of minutes if you have a lot of binary packages.)\n"))
40998 + writemsg_stdout(colorize("GOOD",
40999 + _("Performing Global Updates\n")))
41000 + writemsg_stdout(_("(Could take a couple of minutes if you have a lot of binary packages.)\n"))
41001 + if not quiet:
41002 writemsg_stdout(_(" %s='update pass' %s='binary update' "
41003 "%s='/var/db update' %s='/var/db move'\n"
41004 " %s='/var/db SLOT move' %s='binary move' "
41005 @@ -120,63 +110,71 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
41006 if myupd:
41007 retupd = True
41008
41009 + if retupd:
41010 + if os.access(bindb.bintree.pkgdir, os.W_OK):
41011 + # Call binarytree.populate(), since we want to make sure it's
41012 + # only populated with local packages here (getbinpkgs=0).
41013 + bindb.bintree.populate()
41014 + else:
41015 + bindb = None
41016 +
41017 master_repo = portdb.getRepositoryName(portdb.porttree_root)
41018 if master_repo in repo_map:
41019 repo_map['DEFAULT'] = repo_map[master_repo]
41020
41021 for repo_name, myupd in repo_map.items():
41022 - if repo_name == 'DEFAULT':
41023 - continue
41024 - if not myupd:
41025 - continue
41026 -
41027 - def repo_match(repository):
41028 - return repository == repo_name or \
41029 - (repo_name == master_repo and repository not in repo_map)
41030 -
41031 - def _world_repo_match(atoma, atomb):
41032 - """
41033 - Check whether to perform a world change from atoma to atomb.
41034 - If best vardb match for atoma comes from the same repository
41035 - as the update file, allow that. Additionally, if portdb still
41036 - can find a match for old atom name, warn about that.
41037 - """
41038 - matches = vardb.match(atoma)
41039 - if not matches:
41040 - matches = vardb.match(atomb)
41041 - if matches and \
41042 - repo_match(vardb.aux_get(best(matches), ['repository'])[0]):
41043 - if portdb.match(atoma):
41044 - world_warnings.add((atoma, atomb))
41045 - return True
41046 - else:
41047 - return False
41048 + if repo_name == 'DEFAULT':
41049 + continue
41050 + if not myupd:
41051 + continue
41052
41053 - for update_cmd in myupd:
41054 - for pos, atom in enumerate(world_list):
41055 - new_atom = update_dbentry(update_cmd, atom)
41056 - if atom != new_atom:
41057 - if _world_repo_match(atom, new_atom):
41058 - world_list[pos] = new_atom
41059 - world_modified = True
41060 -
41061 - for update_cmd in myupd:
41062 - if update_cmd[0] == "move":
41063 - moves = vardb.move_ent(update_cmd, repo_match=repo_match)
41064 + def repo_match(repository):
41065 + return repository == repo_name or \
41066 + (repo_name == master_repo and repository not in repo_map)
41067 +
41068 + def _world_repo_match(atoma, atomb):
41069 + """
41070 + Check whether to perform a world change from atoma to atomb.
41071 + If best vardb match for atoma comes from the same repository
41072 + as the update file, allow that. Additionally, if portdb still
41073 + can find a match for old atom name, warn about that.
41074 + """
41075 + matches = vardb.match(atoma)
41076 + if not matches:
41077 + matches = vardb.match(atomb)
41078 + if matches and \
41079 + repo_match(vardb.aux_get(best(matches), ['repository'])[0]):
41080 + if portdb.match(atoma):
41081 + world_warnings.add((atoma, atomb))
41082 + return True
41083 + else:
41084 + return False
41085 +
41086 + for update_cmd in myupd:
41087 + for pos, atom in enumerate(world_list):
41088 + new_atom = update_dbentry(update_cmd, atom)
41089 + if atom != new_atom:
41090 + if _world_repo_match(atom, new_atom):
41091 + world_list[pos] = new_atom
41092 + world_modified = True
41093 +
41094 + for update_cmd in myupd:
41095 + if update_cmd[0] == "move":
41096 + moves = vardb.move_ent(update_cmd, repo_match=repo_match)
41097 + if moves:
41098 + writemsg_stdout(moves * "@")
41099 + if bindb:
41100 + moves = bindb.move_ent(update_cmd, repo_match=repo_match)
41101 if moves:
41102 - writemsg_stdout(moves * "@")
41103 - if bindb:
41104 - moves = bindb.move_ent(update_cmd, repo_match=repo_match)
41105 - if moves:
41106 - writemsg_stdout(moves * "%")
41107 - elif update_cmd[0] == "slotmove":
41108 - moves = vardb.move_slot_ent(update_cmd, repo_match=repo_match)
41109 + writemsg_stdout(moves * "%")
41110 + elif update_cmd[0] == "slotmove":
41111 + moves = vardb.move_slot_ent(update_cmd, repo_match=repo_match)
41112 + if moves:
41113 + writemsg_stdout(moves * "s")
41114 + if bindb:
41115 + moves = bindb.move_slot_ent(update_cmd, repo_match=repo_match)
41116 if moves:
41117 - writemsg_stdout(moves * "s")
41118 - if bindb:
41119 - moves = bindb.move_slot_ent(update_cmd, repo_match=repo_match)
41120 - if moves:
41121 - writemsg_stdout(moves * "S")
41122 + writemsg_stdout(moves * "S")
41123
41124 if world_modified:
41125 world_list.sort()
41126 @@ -189,65 +187,65 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
41127
41128 if retupd:
41129
41130 - def _config_repo_match(repo_name, atoma, atomb):
41131 - """
41132 - Check whether to perform a world change from atoma to atomb.
41133 - If best vardb match for atoma comes from the same repository
41134 - as the update file, allow that. Additionally, if portdb still
41135 - can find a match for old atom name, warn about that.
41136 - """
41137 - matches = vardb.match(atoma)
41138 + def _config_repo_match(repo_name, atoma, atomb):
41139 + """
41140 + Check whether to perform a world change from atoma to atomb.
41141 + If best vardb match for atoma comes from the same repository
41142 + as the update file, allow that. Additionally, if portdb still
41143 + can find a match for old atom name, warn about that.
41144 + """
41145 + matches = vardb.match(atoma)
41146 + if not matches:
41147 + matches = vardb.match(atomb)
41148 if not matches:
41149 - matches = vardb.match(atomb)
41150 - if not matches:
41151 - return False
41152 - repository = vardb.aux_get(best(matches), ['repository'])[0]
41153 - return repository == repo_name or \
41154 - (repo_name == master_repo and repository not in repo_map)
41155 -
41156 - update_config_files(root,
41157 - shlex_split(mysettings.get("CONFIG_PROTECT", "")),
41158 - shlex_split(mysettings.get("CONFIG_PROTECT_MASK", "")),
41159 - repo_map, match_callback=_config_repo_match)
41160 -
41161 - # The above global updates proceed quickly, so they
41162 - # are considered a single mtimedb transaction.
41163 - if timestamps:
41164 - # We do not update the mtime in the mtimedb
41165 - # until after _all_ of the above updates have
41166 - # been processed because the mtimedb will
41167 - # automatically commit when killed by ctrl C.
41168 - for mykey, mtime in timestamps.items():
41169 - prev_mtimes[mykey] = mtime
41170 -
41171 - do_upgrade_packagesmessage = False
41172 - # We gotta do the brute force updates for these now.
41173 - if True:
41174 - def onUpdate(maxval, curval):
41175 + return False
41176 + repository = vardb.aux_get(best(matches), ['repository'])[0]
41177 + return repository == repo_name or \
41178 + (repo_name == master_repo and repository not in repo_map)
41179 +
41180 + update_config_files(root,
41181 + shlex_split(mysettings.get("CONFIG_PROTECT", "")),
41182 + shlex_split(mysettings.get("CONFIG_PROTECT_MASK", "")),
41183 + repo_map, match_callback=_config_repo_match)
41184 +
41185 + # The above global updates proceed quickly, so they
41186 + # are considered a single mtimedb transaction.
41187 + if timestamps:
41188 + # We do not update the mtime in the mtimedb
41189 + # until after _all_ of the above updates have
41190 + # been processed because the mtimedb will
41191 + # automatically commit when killed by ctrl C.
41192 + for mykey, mtime in timestamps.items():
41193 + prev_mtimes[mykey] = mtime
41194 +
41195 + do_upgrade_packagesmessage = False
41196 + # We gotta do the brute force updates for these now.
41197 + if True:
41198 + def onUpdate(_maxval, curval):
41199 + if curval > 0:
41200 + writemsg_stdout("#")
41201 + if quiet:
41202 + onUpdate = None
41203 + vardb.update_ents(repo_map, onUpdate=onUpdate)
41204 + if bindb:
41205 + def onUpdate(_maxval, curval):
41206 if curval > 0:
41207 - writemsg_stdout("#")
41208 + writemsg_stdout("*")
41209 if quiet:
41210 onUpdate = None
41211 - vardb.update_ents(repo_map, onUpdate=onUpdate)
41212 - if bindb:
41213 - def onUpdate(maxval, curval):
41214 - if curval > 0:
41215 - writemsg_stdout("*")
41216 - if quiet:
41217 - onUpdate = None
41218 - bindb.update_ents(repo_map, onUpdate=onUpdate)
41219 - else:
41220 - do_upgrade_packagesmessage = 1
41221 -
41222 - # Update progress above is indicated by characters written to stdout so
41223 - # we print a couple new lines here to separate the progress output from
41224 - # what follows.
41225 - writemsg_stdout("\n\n")
41226 -
41227 - if do_upgrade_packagesmessage and bindb and \
41228 - bindb.cpv_all():
41229 - writemsg_stdout(_(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n"))
41230 - writemsg_stdout(bold(_("Note: This can take a very long time.")))
41231 - writemsg_stdout("\n")
41232 + bindb.update_ents(repo_map, onUpdate=onUpdate)
41233 + else:
41234 + do_upgrade_packagesmessage = 1
41235 +
41236 + # Update progress above is indicated by characters written to stdout so
41237 + # we print a couple new lines here to separate the progress output from
41238 + # what follows.
41239 + writemsg_stdout("\n\n")
41240 +
41241 + if do_upgrade_packagesmessage and bindb and \
41242 + bindb.cpv_all():
41243 + writemsg_stdout(_(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n"))
41244 + writemsg_stdout(bold(_("Note: This can take a very long time.")))
41245 + writemsg_stdout("\n")
41246
41247 return retupd
41248
41249 diff --git a/pym/portage/_legacy_globals.py b/pym/portage/_legacy_globals.py
41250 index abffa0e..bb9691a 100644
41251 --- a/pym/portage/_legacy_globals.py
41252 +++ b/pym/portage/_legacy_globals.py
41253 @@ -27,7 +27,8 @@ def _get_legacy_global(name):
41254 os.umask(0o22)
41255
41256 kwargs = {}
41257 - for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"), ("target_root", "ROOT")):
41258 + for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"),
41259 + ("target_root", "ROOT"), ("eprefix", "EPREFIX")):
41260 kwargs[k] = os.environ.get(envvar)
41261
41262 portage._initializing_globals = True
41263
41264 diff --git a/pym/portage/_selinux.py b/pym/portage/_selinux.py
41265 index 1737145..2a7194c 100644
41266 --- a/pym/portage/_selinux.py
41267 +++ b/pym/portage/_selinux.py
41268 @@ -1,4 +1,4 @@
41269 -# Copyright 1999-2011 Gentoo Foundation
41270 +# Copyright 1999-2014 Gentoo Foundation
41271 # Distributed under the terms of the GNU General Public License v2
41272
41273 # Don't use the unicode-wrapped os and shutil modules here since
41274 @@ -8,18 +8,18 @@ import shutil
41275
41276 import portage
41277 from portage import _encodings
41278 -from portage import _unicode_decode
41279 -from portage import _unicode_encode
41280 +from portage import _native_string, _unicode_decode
41281 from portage.localization import _
41282 portage.proxy.lazyimport.lazyimport(globals(),
41283 'selinux')
41284
41285 def copyfile(src, dest):
41286 - src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
41287 - dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
41288 + src = _native_string(src, encoding=_encodings['fs'], errors='strict')
41289 + dest = _native_string(dest, encoding=_encodings['fs'], errors='strict')
41290 (rc, ctx) = selinux.lgetfilecon(src)
41291 if rc < 0:
41292 - src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
41293 + if sys.hexversion < 0x3000000:
41294 + src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
41295 raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
41296
41297 setfscreate(ctx)
41298 @@ -39,12 +39,12 @@ def is_selinux_enabled():
41299 return selinux.is_selinux_enabled()
41300
41301 def mkdir(target, refdir):
41302 - target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
41303 - refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
41304 + target = _native_string(target, encoding=_encodings['fs'], errors='strict')
41305 + refdir = _native_string(refdir, encoding=_encodings['fs'], errors='strict')
41306 (rc, ctx) = selinux.getfilecon(refdir)
41307 if rc < 0:
41308 - refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
41309 - errors='replace')
41310 + if sys.hexversion < 0x3000000:
41311 + refdir = _unicode_decode(refdir, encoding=_encodings['fs'], errors='replace')
41312 raise OSError(
41313 _("mkdir: Failed getting context of reference directory \"%s\".") \
41314 % refdir)
41315 @@ -56,16 +56,17 @@ def mkdir(target, refdir):
41316 setfscreate()
41317
41318 def rename(src, dest):
41319 - src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
41320 - dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
41321 + src = _native_string(src, encoding=_encodings['fs'], errors='strict')
41322 + dest = _native_string(dest, encoding=_encodings['fs'], errors='strict')
41323 (rc, ctx) = selinux.lgetfilecon(src)
41324 if rc < 0:
41325 - src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
41326 + if sys.hexversion < 0x3000000:
41327 + src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
41328 raise OSError(_("rename: Failed getting context of \"%s\".") % src)
41329
41330 setfscreate(ctx)
41331 try:
41332 - os.rename(src,dest)
41333 + os.rename(src, dest)
41334 finally:
41335 setfscreate()
41336
41337 @@ -75,10 +76,10 @@ def settype(newtype):
41338 return ":".join(ret)
41339
41340 def setexec(ctx="\n"):
41341 - ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
41342 + ctx = _native_string(ctx, encoding=_encodings['content'], errors='strict')
41343 if selinux.setexeccon(ctx) < 0:
41344 - ctx = _unicode_decode(ctx, encoding=_encodings['content'],
41345 - errors='replace')
41346 + if sys.hexversion < 0x3000000:
41347 + ctx = _unicode_decode(ctx, encoding=_encodings['content'], errors='replace')
41348 if selinux.security_getenforce() == 1:
41349 raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
41350 else:
41351 @@ -87,11 +88,10 @@ def setexec(ctx="\n"):
41352 noiselevel=-1)
41353
41354 def setfscreate(ctx="\n"):
41355 - ctx = _unicode_encode(ctx,
41356 - encoding=_encodings['content'], errors='strict')
41357 + ctx = _native_string(ctx, encoding=_encodings['content'], errors='strict')
41358 if selinux.setfscreatecon(ctx) < 0:
41359 - ctx = _unicode_decode(ctx,
41360 - encoding=_encodings['content'], errors='replace')
41361 + if sys.hexversion < 0x3000000:
41362 + ctx = _unicode_decode(ctx, encoding=_encodings['content'], errors='replace')
41363 raise OSError(
41364 _("setfscreate: Failed setting fs create context \"%s\".") % ctx)
41365
41366 @@ -106,8 +106,7 @@ class spawn_wrapper(object):
41367
41368 def __init__(self, spawn_func, selinux_type):
41369 self._spawn_func = spawn_func
41370 - selinux_type = _unicode_encode(selinux_type,
41371 - encoding=_encodings['content'], errors='strict')
41372 + selinux_type = _native_string(selinux_type, encoding=_encodings['content'], errors='strict')
41373 self._con = settype(selinux_type)
41374
41375 def __call__(self, *args, **kwargs):
41376 @@ -123,13 +122,13 @@ class spawn_wrapper(object):
41377 return self._spawn_func(*args, **kwargs)
41378
41379 def symlink(target, link, reflnk):
41380 - target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
41381 - link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
41382 - reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
41383 + target = _native_string(target, encoding=_encodings['fs'], errors='strict')
41384 + link = _native_string(link, encoding=_encodings['fs'], errors='strict')
41385 + reflnk = _native_string(reflnk, encoding=_encodings['fs'], errors='strict')
41386 (rc, ctx) = selinux.lgetfilecon(reflnk)
41387 if rc < 0:
41388 - reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
41389 - errors='replace')
41390 + if sys.hexversion < 0x3000000:
41391 + reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'], errors='replace')
41392 raise OSError(
41393 _("symlink: Failed getting context of reference symlink \"%s\".") \
41394 % reflnk)
41395
41396 diff --git a/pym/portage/_sets/__init__.py b/pym/portage/_sets/__init__.py
41397 index c3b590e..75d1df7 100644
41398 --- a/pym/portage/_sets/__init__.py
41399 +++ b/pym/portage/_sets/__init__.py
41400 @@ -17,6 +17,7 @@ try:
41401 from configparser import SafeConfigParser
41402 except ImportError:
41403 from ConfigParser import SafeConfigParser, NoOptionError, ParsingError
41404 +import portage
41405 from portage import os
41406 from portage import load_mod
41407 from portage import _unicode_decode
41408 @@ -124,6 +125,10 @@ class SetConfig(object):
41409 parser.add_section("system")
41410 parser.set("system", "class", "portage.sets.profiles.PackagesSystemSet")
41411
41412 + parser.remove_section("security")
41413 + parser.add_section("security")
41414 + parser.set("security", "class", "portage.sets.security.NewAffectedSet")
41415 +
41416 parser.remove_section("usersets")
41417 parser.add_section("usersets")
41418 parser.set("usersets", "class", "portage.sets.files.StaticFileSet")
41419 @@ -131,6 +136,27 @@ class SetConfig(object):
41420 parser.set("usersets", "directory", "%(PORTAGE_CONFIGROOT)setc/portage/sets")
41421 parser.set("usersets", "world-candidate", "true")
41422
41423 + parser.remove_section("live-rebuild")
41424 + parser.add_section("live-rebuild")
41425 + parser.set("live-rebuild", "class", "portage.sets.dbapi.VariableSet")
41426 + parser.set("live-rebuild", "variable", "INHERITED")
41427 + parser.set("live-rebuild", "includes", " ".join(sorted(portage.const.LIVE_ECLASSES)))
41428 +
41429 + parser.remove_section("module-rebuild")
41430 + parser.add_section("module-rebuild")
41431 + parser.set("module-rebuild", "class", "portage.sets.dbapi.OwnerSet")
41432 + parser.set("module-rebuild", "files", "/lib/modules")
41433 +
41434 + parser.remove_section("preserved-rebuild")
41435 + parser.add_section("preserved-rebuild")
41436 + parser.set("preserved-rebuild", "class", "portage.sets.libs.PreservedLibraryConsumerSet")
41437 +
41438 + parser.remove_section("x11-module-rebuild")
41439 + parser.add_section("x11-module-rebuild")
41440 + parser.set("x11-module-rebuild", "class", "portage.sets.dbapi.OwnerSet")
41441 + parser.set("x11-module-rebuild", "files", "/usr/lib/xorg/modules")
41442 + parser.set("x11-module-rebuild", "exclude-files", "/usr/bin/Xorg")
41443 +
41444 def update(self, setname, options):
41445 parser = self._parser
41446 self.errors = []
41447 @@ -270,8 +296,8 @@ def load_default_config(settings, trees):
41448 return SetConfig(None, settings, trees)
41449
41450 global_config_path = GLOBAL_CONFIG_PATH
41451 - if settings['EPREFIX']:
41452 - global_config_path = os.path.join(settings['EPREFIX'],
41453 + if portage.const.EPREFIX:
41454 + global_config_path = os.path.join(portage.const.EPREFIX,
41455 GLOBAL_CONFIG_PATH.lstrip(os.sep))
41456 def _getfiles():
41457 for path, dirs, files in os.walk(os.path.join(global_config_path, "sets")):
41458
41459 diff --git a/pym/portage/_sets/base.py b/pym/portage/_sets/base.py
41460 index c8d3ae4..ee20d36 100644
41461 --- a/pym/portage/_sets/base.py
41462 +++ b/pym/portage/_sets/base.py
41463 @@ -1,4 +1,4 @@
41464 -# Copyright 2007-2011 Gentoo Foundation
41465 +# Copyright 2007-2014 Gentoo Foundation
41466 # Distributed under the terms of the GNU General Public License v2
41467
41468 import sys
41469 @@ -7,6 +7,7 @@ from portage.exception import InvalidAtom
41470 from portage.versions import cpv_getkey
41471
41472 if sys.hexversion >= 0x3000000:
41473 + # pylint: disable=W0622
41474 basestring = str
41475
41476 OPERATIONS = ["merge", "unmerge"]
41477 @@ -126,7 +127,7 @@ class PackageSet(object):
41478
41479 if modified_use is not None and modified_use is not pkg.use.enabled:
41480 pkg = pkg.copy()
41481 - pkg.metadata["USE"] = " ".join(modified_use)
41482 + pkg._metadata["USE"] = " ".join(modified_use)
41483
41484 # Atoms matched via PROVIDE must be temporarily transformed since
41485 # match_from_list() only works correctly when atom.cp == pkg.cp.
41486 @@ -156,7 +157,7 @@ class PackageSet(object):
41487 for atom in atoms:
41488 if match_from_list(atom, cpv_slot_list):
41489 yield atom
41490 - provides = pkg.metadata['PROVIDE']
41491 + provides = pkg._metadata['PROVIDE']
41492 if not provides:
41493 return
41494 provides = provides.split()
41495
41496 diff --git a/pym/portage/_sets/dbapi.py b/pym/portage/_sets/dbapi.py
41497 index 4982a92..384fb3a 100644
41498 --- a/pym/portage/_sets/dbapi.py
41499 +++ b/pym/portage/_sets/dbapi.py
41500 @@ -26,8 +26,7 @@ class EverythingSet(PackageSet):
41501
41502 def load(self):
41503 myatoms = []
41504 - db_keys = ["SLOT"]
41505 - aux_get = self._db.aux_get
41506 + pkg_str = self._db._pkg_str
41507 cp_list = self._db.cp_list
41508
41509 for cp in self._db.cp_all():
41510 @@ -35,8 +34,8 @@ class EverythingSet(PackageSet):
41511 # NOTE: Create SLOT atoms even when there is only one
41512 # SLOT installed, in order to avoid the possibility
41513 # of unwanted upgrades as reported in bug #338959.
41514 - slot, = aux_get(cpv, db_keys)
41515 - atom = Atom("%s:%s" % (cp, slot))
41516 + pkg = pkg_str(cpv, None)
41517 + atom = Atom("%s:%s" % (pkg.cp, pkg.slot))
41518 if self._filter:
41519 if self._filter(atom):
41520 myatoms.append(atom)
41521 @@ -68,20 +67,19 @@ class OwnerSet(PackageSet):
41522 """
41523 rValue = set()
41524 vardb = self._db
41525 - aux_get = vardb.aux_get
41526 - aux_keys = ["SLOT"]
41527 + pkg_str = vardb._pkg_str
41528 if exclude_paths is None:
41529 for link, p in vardb._owners.iter_owners(paths):
41530 - slot, = aux_get(link.mycpv, aux_keys)
41531 - rValue.add("%s:%s" % (link.mycpv.cp, slot))
41532 + pkg = pkg_str(link.mycpv, None)
41533 + rValue.add("%s:%s" % (pkg.cp, pkg.slot))
41534 else:
41535 all_paths = set()
41536 all_paths.update(paths)
41537 all_paths.update(exclude_paths)
41538 exclude_atoms = set()
41539 for link, p in vardb._owners.iter_owners(all_paths):
41540 - slot, = aux_get(link.mycpv, aux_keys)
41541 - atom = "%s:%s" % (link.mycpv.cp, slot)
41542 + pkg = pkg_str(link.mycpv, None)
41543 + atom = "%s:%s" % (pkg.cp, pkg.slot)
41544 rValue.add(atom)
41545 if p in exclude_paths:
41546 exclude_atoms.add(atom)
41547 @@ -173,12 +171,11 @@ class DowngradeSet(PackageSet):
41548 xmatch = self._portdb.xmatch
41549 xmatch_level = "bestmatch-visible"
41550 cp_list = self._vardb.cp_list
41551 - aux_get = self._vardb.aux_get
41552 - aux_keys = ["SLOT"]
41553 + pkg_str = self._vardb._pkg_str
41554 for cp in self._vardb.cp_all():
41555 for cpv in cp_list(cp):
41556 - slot, = aux_get(cpv, aux_keys)
41557 - slot_atom = "%s:%s" % (cp, slot)
41558 + pkg = pkg_str(cpv, None)
41559 + slot_atom = "%s:%s" % (pkg.cp, pkg.slot)
41560 ebuild = xmatch(xmatch_level, slot_atom)
41561 if not ebuild:
41562 continue
41563 @@ -326,6 +323,7 @@ class CategorySet(PackageSet):
41564
41565 class AgeSet(EverythingSet):
41566 _operations = ["merge", "unmerge"]
41567 + _aux_keys = ('BUILD_TIME',)
41568
41569 def __init__(self, vardb, mode="older", age=7):
41570 super(AgeSet, self).__init__(vardb)
41571 @@ -335,8 +333,12 @@ class AgeSet(EverythingSet):
41572 def _filter(self, atom):
41573
41574 cpv = self._db.match(atom)[0]
41575 - path = self._db.getpath(cpv, filename="COUNTER")
41576 - age = (time.time() - os.stat(path).st_mtime) / (3600 * 24)
41577 + try:
41578 + date, = self._db.aux_get(cpv, self._aux_keys)
41579 + date = int(date)
41580 + except (KeyError, ValueError):
41581 + return bool(self._mode == "older")
41582 + age = (time.time() - date) / (3600 * 24)
41583 if ((self._mode == "older" and age <= self._age) \
41584 or (self._mode == "newer" and age >= self._age)):
41585 return False
41586 @@ -355,6 +357,83 @@ class AgeSet(EverythingSet):
41587
41588 singleBuilder = classmethod(singleBuilder)
41589
41590 +class DateSet(EverythingSet):
41591 + _operations = ["merge", "unmerge"]
41592 + _aux_keys = ('BUILD_TIME',)
41593 +
41594 + def __init__(self, vardb, date, mode="older"):
41595 + super(DateSet, self).__init__(vardb)
41596 + self._mode = mode
41597 + self._date = date
41598 +
41599 + def _filter(self, atom):
41600 +
41601 + cpv = self._db.match(atom)[0]
41602 + try:
41603 + date, = self._db.aux_get(cpv, self._aux_keys)
41604 + date = int(date)
41605 + except (KeyError, ValueError):
41606 + return bool(self._mode == "older")
41607 + # Make sure inequality is _strict_ to exclude tested package
41608 + if ((self._mode == "older" and date < self._date) \
41609 + or (self._mode == "newer" and date > self._date)):
41610 + return True
41611 + else:
41612 + return False
41613 +
41614 + def singleBuilder(cls, options, settings, trees):
41615 + vardbapi = trees["vartree"].dbapi
41616 + mode = options.get("mode", "older")
41617 + if str(mode).lower() not in ["newer", "older"]:
41618 + raise SetConfigError(_("invalid 'mode' value %s (use either 'newer' or 'older')") % mode)
41619 +
41620 + formats = []
41621 + if options.get("package") is not None:
41622 + formats.append("package")
41623 + if options.get("filestamp") is not None:
41624 + formats.append("filestamp")
41625 + if options.get("seconds") is not None:
41626 + formats.append("seconds")
41627 + if options.get("date") is not None:
41628 + formats.append("date")
41629 +
41630 + if not formats:
41631 + raise SetConfigError(_("none of these options specified: 'package', 'filestamp', 'seconds', 'date'"))
41632 + elif len(formats) > 1:
41633 + raise SetConfigError(_("no more than one of these options is allowed: 'package', 'filestamp', 'seconds', 'date'"))
41634 +
41635 + format = formats[0]
41636 +
41637 + if (format == "package"):
41638 + package = options.get("package")
41639 + try:
41640 + cpv = vardbapi.match(package)[0]
41641 + date, = vardbapi.aux_get(cpv, ('BUILD_TIME',))
41642 + date = int(date)
41643 + except (KeyError, ValueError):
41644 + raise SetConfigError(_("cannot determine installation date of package %s") % package)
41645 + elif (format == "filestamp"):
41646 + filestamp = options.get("filestamp")
41647 + try:
41648 + date = int(os.stat(filestamp).st_mtime)
41649 + except (OSError, ValueError):
41650 + raise SetConfigError(_("cannot determine 'filestamp' of '%s'") % filestamp)
41651 + elif (format == "seconds"):
41652 + try:
41653 + date = int(options.get("seconds"))
41654 + except ValueError:
41655 + raise SetConfigError(_("option 'seconds' must be an integer"))
41656 + else:
41657 + dateopt = options.get("date")
41658 + try:
41659 + dateformat = options.get("dateformat", "%x %X")
41660 + date = int(time.mktime(time.strptime(dateopt, dateformat)))
41661 + except ValueError:
41662 + raise SetConfigError(_("'date=%s' does not match 'dateformat=%s'") % (dateopt, dateformat))
41663 + return DateSet(vardb=vardbapi, date=date, mode=mode)
41664 +
41665 + singleBuilder = classmethod(singleBuilder)
41666 +
41667 class RebuiltBinaries(EverythingSet):
41668 _operations = ('merge',)
41669 _aux_keys = ('BUILD_TIME',)
41670
41671 diff --git a/pym/portage/_sets/files.py b/pym/portage/_sets/files.py
41672 index b891ea4..2fb64de 100644
41673 --- a/pym/portage/_sets/files.py
41674 +++ b/pym/portage/_sets/files.py
41675 @@ -1,4 +1,4 @@
41676 -# Copyright 2007-2012 Gentoo Foundation
41677 +# Copyright 2007-2013 Gentoo Foundation
41678 # Distributed under the terms of the GNU General Public License v2
41679
41680 import errno
41681 @@ -86,8 +86,8 @@ class StaticFileSet(EditablePackageSet):
41682 for a in data:
41683 matches = self.dbapi.match(a)
41684 for cpv in matches:
41685 - atoms.append("%s:%s" % (cpv_getkey(cpv),
41686 - self.dbapi.aux_get(cpv, ["SLOT"])[0]))
41687 + pkg = self.dbapi._pkg_str(cpv, None)
41688 + atoms.append("%s:%s" % (pkg.cp, pkg.slot))
41689 # In addition to any installed slots, also try to pull
41690 # in the latest new slot that may be available.
41691 atoms.append(a)
41692 @@ -296,10 +296,14 @@ class WorldSelectedSet(EditablePackageSet):
41693 ensure_dirs(os.path.dirname(self._filename), gid=portage_gid, mode=0o2750, mask=0o2)
41694
41695 def lock(self):
41696 + if self._lock is not None:
41697 + raise AssertionError("already locked")
41698 self._ensure_dirs()
41699 self._lock = lockfile(self._filename, wantnewlockfile=1)
41700
41701 def unlock(self):
41702 + if self._lock is None:
41703 + raise AssertionError("not locked")
41704 unlockfile(self._lock)
41705 self._lock = None
41706
41707
41708 diff --git a/pym/portage/_sets/libs.py b/pym/portage/_sets/libs.py
41709 index 6c5babc..022e076 100644
41710 --- a/pym/portage/_sets/libs.py
41711 +++ b/pym/portage/_sets/libs.py
41712 @@ -1,12 +1,12 @@
41713 -# Copyright 2007-2011 Gentoo Foundation
41714 +# Copyright 2007-2013 Gentoo Foundation
41715 # Distributed under the terms of the GNU General Public License v2
41716
41717 from __future__ import print_function
41718
41719 +from portage.exception import InvalidData
41720 from portage.localization import _
41721 from portage._sets.base import PackageSet
41722 from portage._sets import get_boolean, SetConfigError
41723 -from portage.versions import cpv_getkey
41724 import portage
41725
41726 class LibraryConsumerSet(PackageSet):
41727 @@ -22,14 +22,14 @@ class LibraryConsumerSet(PackageSet):
41728 for p in paths:
41729 for cpv in self.dbapi._linkmap.getOwners(p):
41730 try:
41731 - slot, = self.dbapi.aux_get(cpv, ["SLOT"])
41732 - except KeyError:
41733 + pkg = self.dbapi._pkg_str(cpv, None)
41734 + except (KeyError, InvalidData):
41735 # This is expected for preserved libraries
41736 # of packages that have been uninstalled
41737 # without replacement.
41738 pass
41739 else:
41740 - rValue.add("%s:%s" % (cpv_getkey(cpv), slot))
41741 + rValue.add("%s:%s" % (pkg.cp, pkg.slot))
41742 return rValue
41743
41744 class LibraryFileConsumerSet(LibraryConsumerSet):
41745 @@ -49,7 +49,8 @@ class LibraryFileConsumerSet(LibraryConsumerSet):
41746 def load(self):
41747 consumers = set()
41748 for lib in self.files:
41749 - consumers.update(self.dbapi._linkmap.findConsumers(lib))
41750 + consumers.update(
41751 + self.dbapi._linkmap.findConsumers(lib, greedy=False))
41752
41753 if not consumers:
41754 return
41755 @@ -77,10 +78,10 @@ class PreservedLibraryConsumerSet(LibraryConsumerSet):
41756 for lib in libs:
41757 if self.debug:
41758 print(lib)
41759 - for x in sorted(self.dbapi._linkmap.findConsumers(lib)):
41760 + for x in sorted(self.dbapi._linkmap.findConsumers(lib, greedy=False)):
41761 print(" ", x)
41762 print("-"*40)
41763 - consumers.update(self.dbapi._linkmap.findConsumers(lib))
41764 + consumers.update(self.dbapi._linkmap.findConsumers(lib, greedy=False))
41765 # Don't rebuild packages just because they contain preserved
41766 # libs that happen to be consumers of other preserved libs.
41767 for libs in plib_dict.values():
41768
41769 diff --git a/pym/portage/_sets/security.py b/pym/portage/_sets/security.py
41770 index 7e856bc..f8dbef2 100644
41771 --- a/pym/portage/_sets/security.py
41772 +++ b/pym/portage/_sets/security.py
41773 @@ -44,8 +44,8 @@ class SecuritySet(PackageSet):
41774 mydict = {}
41775 for atom in atomlist[:]:
41776 cpv = self._portdbapi.xmatch("match-all", atom)[0]
41777 - slot = self._portdbapi.aux_get(cpv, ["SLOT"])[0]
41778 - cps = "%s:%s" % (cpv.cp, slot)
41779 + pkg = self._portdbapi._pkg_str(cpv, None)
41780 + cps = "%s:%s" % (pkg.cp, pkg.slot)
41781 if not cps in mydict:
41782 mydict[cps] = (atom, cpv)
41783 else:
41784
41785 diff --git a/pym/portage/cache/ebuild_xattr.py b/pym/portage/cache/ebuild_xattr.py
41786 index 0086e40..db6e177 100644
41787 --- a/pym/portage/cache/ebuild_xattr.py
41788 +++ b/pym/portage/cache/ebuild_xattr.py
41789 @@ -1,4 +1,4 @@
41790 -# -*- coding: UTF8 -*-
41791 +# -*- coding: utf-8 -*-
41792 # Copyright: 2009-2011 Gentoo Foundation
41793 # Author(s): Petteri Räty (betelgeuse@g.o)
41794 # License: GPL2
41795
41796 diff --git a/pym/portage/cache/flat_hash.py b/pym/portage/cache/flat_hash.py
41797 index 2eae9f6..5304296 100644
41798 --- a/pym/portage/cache/flat_hash.py
41799 +++ b/pym/portage/cache/flat_hash.py
41800 @@ -1,7 +1,9 @@
41801 -# Copyright: 2005-2011 Gentoo Foundation
41802 +# Copyright 2005-2014 Gentoo Foundation
41803 # Distributed under the terms of the GNU General Public License v2
41804 # Author(s): Brian Harring (ferringb@g.o)
41805
41806 +from __future__ import unicode_literals
41807 +
41808 from portage.cache import fs_template
41809 from portage.cache import cache_errors
41810 import errno
41811 @@ -11,16 +13,14 @@ import sys
41812 import os as _os
41813 from portage import os
41814 from portage import _encodings
41815 -from portage import _unicode_decode
41816 from portage import _unicode_encode
41817 +from portage.exception import InvalidData
41818 +from portage.versions import _pkg_str
41819
41820 if sys.hexversion >= 0x3000000:
41821 + # pylint: disable=W0622
41822 long = int
41823
41824 -# Coerce to unicode, in order to prevent TypeError when writing
41825 -# raw bytes to TextIOWrapper with python2.
41826 -_setitem_fmt = _unicode_decode("%s=%s\n")
41827 -
41828 class database(fs_template.FsBased):
41829
41830 autocommits = True
41831 @@ -40,11 +40,10 @@ class database(fs_template.FsBased):
41832 # Don't use os.path.join, for better performance.
41833 fp = self.location + _os.sep + cpv
41834 try:
41835 - myf = io.open(_unicode_encode(fp,
41836 + with io.open(_unicode_encode(fp,
41837 encoding=_encodings['fs'], errors='strict'),
41838 mode='r', encoding=_encodings['repo.content'],
41839 - errors='replace')
41840 - try:
41841 + errors='replace') as myf:
41842 lines = myf.read().split("\n")
41843 if not lines[-1]:
41844 lines.pop()
41845 @@ -54,8 +53,6 @@ class database(fs_template.FsBased):
41846 # that uses mtime mangling.
41847 d['_mtime_'] = _os.fstat(myf.fileno())[stat.ST_MTIME]
41848 return d
41849 - finally:
41850 - myf.close()
41851 except (IOError, OSError) as e:
41852 if e.errno != errno.ENOENT:
41853 raise cache_errors.CacheCorruption(cpv, e)
41854 @@ -94,7 +91,10 @@ class database(fs_template.FsBased):
41855 v = values.get(k)
41856 if not v:
41857 continue
41858 - myf.write(_setitem_fmt % (k, v))
41859 + # NOTE: This format string requires unicode_literals, so that
41860 + # k and v are coerced to unicode, in order to prevent TypeError
41861 + # when writing raw bytes to TextIOWrapper with Python 2.
41862 + myf.write("%s=%s\n" % (k, v))
41863 finally:
41864 myf.close()
41865 self._ensure_access(fp)
41866 @@ -135,8 +135,6 @@ class database(fs_template.FsBased):
41867 del e
41868 continue
41869 for l in dir_list:
41870 - if l.endswith(".cpickle"):
41871 - continue
41872 p = os.path.join(dir_path, l)
41873 try:
41874 st = os.lstat(p)
41875 @@ -151,7 +149,11 @@ class database(fs_template.FsBased):
41876 if depth < 1:
41877 dirs.append((depth+1, p))
41878 continue
41879 - yield p[len_base+1:]
41880 +
41881 + try:
41882 + yield _pkg_str(p[len_base+1:])
41883 + except InvalidData:
41884 + continue
41885
41886
41887 class md5_database(database):
41888
41889 diff --git a/pym/portage/cache/flat_list.py b/pym/portage/cache/flat_list.py
41890 deleted file mode 100644
41891 index 7288307..0000000
41892 --- a/pym/portage/cache/flat_list.py
41893 +++ /dev/null
41894 @@ -1,134 +0,0 @@
41895 -# Copyright 2005-2011 Gentoo Foundation
41896 -# Distributed under the terms of the GNU General Public License v2
41897 -
41898 -from portage.cache import fs_template
41899 -from portage.cache import cache_errors
41900 -from portage import os
41901 -from portage import _encodings
41902 -from portage import _unicode_decode
41903 -from portage import _unicode_encode
41904 -import errno
41905 -import io
41906 -import stat
41907 -import sys
41908 -
41909 -if sys.hexversion >= 0x3000000:
41910 - long = int
41911 -
41912 -# Coerce to unicode, in order to prevent TypeError when writing
41913 -# raw bytes to TextIOWrapper with python2.
41914 -_setitem_fmt = _unicode_decode("%s\n")
41915 -
41916 -# store the current key order *here*.
41917 -class database(fs_template.FsBased):
41918 -
41919 - autocommits = True
41920 -
41921 - # do not screw with this ordering. _eclasses_ needs to be last
41922 - auxdbkey_order=('DEPEND', 'RDEPEND', 'SLOT', 'SRC_URI',
41923 - 'RESTRICT', 'HOMEPAGE', 'LICENSE', 'DESCRIPTION',
41924 - 'KEYWORDS', 'IUSE', 'REQUIRED_USE',
41925 - 'PDEPEND', 'PROVIDE', 'EAPI', 'PROPERTIES', 'DEFINED_PHASES')
41926 -
41927 - def __init__(self, *args, **config):
41928 - super(database,self).__init__(*args, **config)
41929 - self.location = os.path.join(self.location,
41930 - self.label.lstrip(os.path.sep).rstrip(os.path.sep))
41931 -
41932 - if len(self._known_keys) > len(self.auxdbkey_order) + 2:
41933 - raise Exception("less ordered keys then auxdbkeys")
41934 - if not os.path.exists(self.location):
41935 - self._ensure_dirs()
41936 -
41937 -
41938 - def _getitem(self, cpv):
41939 - d = {}
41940 - try:
41941 - myf = io.open(_unicode_encode(os.path.join(self.location, cpv),
41942 - encoding=_encodings['fs'], errors='strict'),
41943 - mode='r', encoding=_encodings['repo.content'],
41944 - errors='replace')
41945 - for k,v in zip(self.auxdbkey_order, myf):
41946 - d[k] = v.rstrip("\n")
41947 - except (OSError, IOError) as e:
41948 - if errno.ENOENT == e.errno:
41949 - raise KeyError(cpv)
41950 - raise cache_errors.CacheCorruption(cpv, e)
41951 -
41952 - try:
41953 - d["_mtime_"] = os.fstat(myf.fileno())[stat.ST_MTIME]
41954 - except OSError as e:
41955 - myf.close()
41956 - raise cache_errors.CacheCorruption(cpv, e)
41957 - myf.close()
41958 - return d
41959 -
41960 -
41961 - def _setitem(self, cpv, values):
41962 - s = cpv.rfind("/")
41963 - fp=os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:]))
41964 - try:
41965 - myf = io.open(_unicode_encode(fp,
41966 - encoding=_encodings['fs'], errors='strict'),
41967 - mode='w', encoding=_encodings['repo.content'],
41968 - errors='backslashreplace')
41969 - except (OSError, IOError) as e:
41970 - if errno.ENOENT == e.errno:
41971 - try:
41972 - self._ensure_dirs(cpv)
41973 - myf = io.open(_unicode_encode(fp,
41974 - encoding=_encodings['fs'], errors='strict'),
41975 - mode='w', encoding=_encodings['repo.content'],
41976 - errors='backslashreplace')
41977 - except (OSError, IOError) as e:
41978 - raise cache_errors.CacheCorruption(cpv, e)
41979 - else:
41980 - raise cache_errors.CacheCorruption(cpv, e)
41981 -
41982 -
41983 - for x in self.auxdbkey_order:
41984 - myf.write(_setitem_fmt % (values.get(x, ""),))
41985 -
41986 - myf.close()
41987 - self._ensure_access(fp, mtime=values["_mtime_"])
41988 - #update written. now we move it.
41989 - new_fp = os.path.join(self.location,cpv)
41990 - try:
41991 - os.rename(fp, new_fp)
41992 - except (OSError, IOError) as e:
41993 - os.remove(fp)
41994 - raise cache_errors.CacheCorruption(cpv, e)
41995 -
41996 -
41997 - def _delitem(self, cpv):
41998 - try:
41999 - os.remove(os.path.join(self.location,cpv))
42000 - except OSError as e:
42001 - if errno.ENOENT == e.errno:
42002 - raise KeyError(cpv)
42003 - else:
42004 - raise cache_errors.CacheCorruption(cpv, e)
42005 -
42006 -
42007 - def __contains__(self, cpv):
42008 - return os.path.exists(os.path.join(self.location, cpv))
42009 -
42010 -
42011 - def __iter__(self):
42012 - """generator for walking the dir struct"""
42013 - dirs = [self.location]
42014 - len_base = len(self.location)
42015 - while len(dirs):
42016 - for l in os.listdir(dirs[0]):
42017 - if l.endswith(".cpickle"):
42018 - continue
42019 - p = os.path.join(dirs[0],l)
42020 - st = os.lstat(p)
42021 - if stat.S_ISDIR(st.st_mode):
42022 - dirs.append(p)
42023 - continue
42024 - yield p[len_base+1:]
42025 - dirs.pop(0)
42026 -
42027 -
42028 - def commit(self): pass
42029
42030 diff --git a/pym/portage/cache/fs_template.py b/pym/portage/cache/fs_template.py
42031 index 8f0636e..de4fe4b 100644
42032 --- a/pym/portage/cache/fs_template.py
42033 +++ b/pym/portage/cache/fs_template.py
42034 @@ -1,4 +1,4 @@
42035 -# Copyright 2005-2012 Gentoo Foundation
42036 +# Copyright 2005-2014 Gentoo Foundation
42037 # Distributed under the terms of the GNU General Public License v2
42038 # Author(s): Brian Harring (ferringb@g.o)
42039
42040 @@ -15,6 +15,7 @@ lazyimport(globals(),
42041 del lazyimport
42042
42043 if sys.hexversion >= 0x3000000:
42044 + # pylint: disable=W0622
42045 long = int
42046
42047 class FsBased(template.database):
42048 @@ -25,7 +26,8 @@ class FsBased(template.database):
42049
42050 for x, y in (("gid", -1), ("perms", -1)):
42051 if x in config:
42052 - setattr(self, "_"+x, config[x])
42053 + # Since Python 3.4, chown requires int type (no proxies).
42054 + setattr(self, "_" + x, int(config[x]))
42055 del config[x]
42056 else:
42057 setattr(self, "_"+x, y)
42058
42059 diff --git a/pym/portage/cache/mappings.py b/pym/portage/cache/mappings.py
42060 index bc8ce9a..cd39a6e 100644
42061 --- a/pym/portage/cache/mappings.py
42062 +++ b/pym/portage/cache/mappings.py
42063 @@ -199,10 +199,10 @@ class OrderedDict(UserDict):
42064 return iter(self._order)
42065
42066 def __setitem__(self, key, item):
42067 - if key in self:
42068 - self._order.remove(key)
42069 + new_key = key not in self
42070 UserDict.__setitem__(self, key, item)
42071 - self._order.append(key)
42072 + if new_key:
42073 + self._order.append(key)
42074
42075 def __delitem__(self, key):
42076 UserDict.__delitem__(self, key)
42077
42078 diff --git a/pym/portage/cache/metadata.py b/pym/portage/cache/metadata.py
42079 index 9d2c3a5..0c588bd 100644
42080 --- a/pym/portage/cache/metadata.py
42081 +++ b/pym/portage/cache/metadata.py
42082 @@ -1,4 +1,4 @@
42083 -# Copyright: 2005 Gentoo Foundation
42084 +# Copyright 2005-2014 Gentoo Foundation
42085 # Author(s): Brian Harring (ferringb@g.o)
42086 # License: GPL2
42087
42088 @@ -16,6 +16,7 @@ from portage.cache.template import reconstruct_eclasses
42089 from portage.cache.mappings import ProtectedDict
42090
42091 if sys.hexversion >= 0x3000000:
42092 + # pylint: disable=W0622
42093 basestring = str
42094 long = int
42095
42096 @@ -28,7 +29,8 @@ class database(flat_hash.database):
42097 auxdbkey_order=('DEPEND', 'RDEPEND', 'SLOT', 'SRC_URI',
42098 'RESTRICT', 'HOMEPAGE', 'LICENSE', 'DESCRIPTION',
42099 'KEYWORDS', 'INHERITED', 'IUSE', 'REQUIRED_USE',
42100 - 'PDEPEND', 'PROVIDE', 'EAPI', 'PROPERTIES', 'DEFINED_PHASES')
42101 + 'PDEPEND', 'PROVIDE', 'EAPI', 'PROPERTIES',
42102 + 'DEFINED_PHASES', 'HDEPEND')
42103
42104 autocommits = True
42105 serialize_eclasses = False
42106
42107 diff --git a/pym/portage/cache/sqlite.py b/pym/portage/cache/sqlite.py
42108 index a6a3e06..42a2399 100644
42109 --- a/pym/portage/cache/sqlite.py
42110 +++ b/pym/portage/cache/sqlite.py
42111 @@ -1,6 +1,8 @@
42112 -# Copyright 1999-2012 Gentoo Foundation
42113 +# Copyright 1999-2014 Gentoo Foundation
42114 # Distributed under the terms of the GNU General Public License v2
42115
42116 +from __future__ import unicode_literals
42117 +
42118 import re
42119 import sys
42120 from portage.cache import fs_template
42121 @@ -11,6 +13,7 @@ from portage.util import writemsg
42122 from portage.localization import _
42123
42124 if sys.hexversion >= 0x3000000:
42125 + # pylint: disable=W0622
42126 basestring = str
42127
42128 class database(fs_template.FsBased):
42129 @@ -21,7 +24,6 @@ class database(fs_template.FsBased):
42130 # to calculate the number of pages requested, according to the following
42131 # equation: cache_bytes = page_bytes * page_count
42132 cache_bytes = 1024 * 1024 * 10
42133 - _db_table = None
42134
42135 def __init__(self, *args, **config):
42136 super(database, self).__init__(*args, **config)
42137 @@ -29,6 +31,7 @@ class database(fs_template.FsBased):
42138 self._allowed_keys = ["_mtime_", "_eclasses_"]
42139 self._allowed_keys.extend(self._known_keys)
42140 self._allowed_keys.sort()
42141 + self._allowed_keys_set = frozenset(self._allowed_keys)
42142 self.location = os.path.join(self.location,
42143 self.label.lstrip(os.path.sep).rstrip(os.path.sep))
42144
42145 @@ -38,8 +41,8 @@ class database(fs_template.FsBased):
42146 config.setdefault("autocommit", self.autocommits)
42147 config.setdefault("cache_bytes", self.cache_bytes)
42148 config.setdefault("synchronous", self.synchronous)
42149 - # Timeout for throwing a "database is locked" exception (pysqlite
42150 - # default is 5.0 seconds).
42151 + # Set longer timeout for throwing a "database is locked" exception.
42152 + # Default timeout in sqlite3 module is 5.0 seconds.
42153 config.setdefault("timeout", 15)
42154 self._db_init_connection(config)
42155 self._db_init_structures()
42156 @@ -48,11 +51,8 @@ class database(fs_template.FsBased):
42157 # sqlite3 is optional with >=python-2.5
42158 try:
42159 import sqlite3 as db_module
42160 - except ImportError:
42161 - try:
42162 - from pysqlite2 import dbapi2 as db_module
42163 - except ImportError as e:
42164 - raise cache_errors.InitializationError(self.__class__, e)
42165 + except ImportError as e:
42166 + raise cache_errors.InitializationError(self.__class__, e)
42167
42168 self._db_module = db_module
42169 self._db_error = db_module.Error
42170 @@ -63,7 +63,6 @@ class database(fs_template.FsBased):
42171 # Avoid potential UnicodeEncodeError in python-2.x by
42172 # only calling str() when it's absolutely necessary.
42173 s = str(s)
42174 - # This is equivalent to the _quote function from pysqlite 1.1.
42175 return "'%s'" % s.replace("'", "''")
42176
42177 def _db_init_connection(self, config):
42178 @@ -93,9 +92,6 @@ class database(fs_template.FsBased):
42179 self._db_table["packages"]["table_name"] = mytable
42180 self._db_table["packages"]["package_id"] = "internal_db_package_id"
42181 self._db_table["packages"]["package_key"] = "portage_package_key"
42182 - self._db_table["packages"]["internal_columns"] = \
42183 - [self._db_table["packages"]["package_id"],
42184 - self._db_table["packages"]["package_key"]]
42185 create_statement = []
42186 create_statement.append("CREATE TABLE")
42187 create_statement.append(mytable)
42188 @@ -110,9 +106,6 @@ class database(fs_template.FsBased):
42189 create_statement.append(")")
42190
42191 self._db_table["packages"]["create"] = " ".join(create_statement)
42192 - self._db_table["packages"]["columns"] = \
42193 - self._db_table["packages"]["internal_columns"] + \
42194 - self._allowed_keys
42195
42196 cursor = self._db_cursor
42197 for k, v in self._db_table.items():
42198 @@ -211,13 +204,17 @@ class database(fs_template.FsBased):
42199 raise KeyError(cpv)
42200 else:
42201 raise cache_errors.CacheCorruption(cpv, "key is not unique")
42202 + result = result[0]
42203 d = {}
42204 - internal_columns = self._db_table["packages"]["internal_columns"]
42205 - column_index = -1
42206 - for k in self._db_table["packages"]["columns"]:
42207 - column_index +=1
42208 - if k not in internal_columns:
42209 - d[k] = result[0][column_index]
42210 + allowed_keys_set = self._allowed_keys_set
42211 + for column_index, column_info in enumerate(cursor.description):
42212 + k = column_info[0]
42213 + if k in allowed_keys_set:
42214 + v = result[column_index]
42215 + if v is None:
42216 + # This happens after a new empty column has been added.
42217 + v = ""
42218 + d[k] = v
42219
42220 return d
42221
42222
42223 diff --git a/pym/portage/cache/template.py b/pym/portage/cache/template.py
42224 index cf1e8ae..bc81b86 100644
42225 --- a/pym/portage/cache/template.py
42226 +++ b/pym/portage/cache/template.py
42227 @@ -1,6 +1,6 @@
42228 -# Copyright: 2005-2012 Gentoo Foundation
42229 +# Copyright 2005-2014 Gentoo Foundation
42230 +# Distributed under the terms of the GNU General Public License v2
42231 # Author(s): Brian Harring (ferringb@g.o)
42232 -# License: GPL2
42233
42234 from portage.cache import cache_errors
42235 from portage.cache.cache_errors import InvalidRestriction
42236 @@ -10,6 +10,7 @@ import warnings
42237 import operator
42238
42239 if sys.hexversion >= 0x3000000:
42240 + # pylint: disable=W0622
42241 _unicode = str
42242 basestring = str
42243 long = int
42244 @@ -164,7 +165,14 @@ class database(object):
42245
42246 def commit(self):
42247 if not self.autocommits:
42248 - raise NotImplementedError
42249 + raise NotImplementedError(self)
42250 +
42251 + def __del__(self):
42252 + # This used to be handled by an atexit hook that called
42253 + # close_portdbapi_caches() for all portdbapi instances, but that was
42254 + # prone to memory leaks for API consumers that needed to create/destroy
42255 + # many portdbapi instances. So, instead we rely on __del__.
42256 + self.sync()
42257
42258 def __contains__(self, cpv):
42259 """This method should always be overridden. It is provided only for
42260
42261 diff --git a/pym/portage/checksum.py b/pym/portage/checksum.py
42262 index daf4a0c..f24a90f 100644
42263 --- a/pym/portage/checksum.py
42264 +++ b/pym/portage/checksum.py
42265 @@ -1,15 +1,16 @@
42266 # checksum.py -- core Portage functionality
42267 -# Copyright 1998-2012 Gentoo Foundation
42268 +# Copyright 1998-2014 Gentoo Foundation
42269 # Distributed under the terms of the GNU General Public License v2
42270
42271 import portage
42272 -from portage.const import PRELINK_BINARY,HASHING_BLOCKSIZE
42273 +from portage.const import PRELINK_BINARY, HASHING_BLOCKSIZE
42274 from portage.localization import _
42275 from portage import os
42276 from portage import _encodings
42277 from portage import _unicode_encode
42278 import errno
42279 import stat
42280 +import subprocess
42281 import tempfile
42282
42283 #dict of all available hash functions
42284 @@ -48,16 +49,15 @@ class _generate_hash_function(object):
42285 @type filename: String
42286 @return: The hash and size of the data
42287 """
42288 - f = _open_file(filename)
42289 - blocksize = HASHING_BLOCKSIZE
42290 - data = f.read(blocksize)
42291 - size = 0
42292 - checksum = self._hashobject()
42293 - while data:
42294 - checksum.update(data)
42295 - size = size + len(data)
42296 + with _open_file(filename) as f:
42297 + blocksize = HASHING_BLOCKSIZE
42298 + size = 0
42299 + checksum = self._hashobject()
42300 data = f.read(blocksize)
42301 - f.close()
42302 + while data:
42303 + checksum.update(data)
42304 + size = size + len(data)
42305 + data = f.read(blocksize)
42306
42307 return (checksum.hexdigest(), size)
42308
42309 @@ -163,11 +163,16 @@ hashfunc_map["size"] = getsize
42310
42311 prelink_capable = False
42312 if os.path.exists(PRELINK_BINARY):
42313 - results = portage.subprocess_getstatusoutput(
42314 - "%s --version > /dev/null 2>&1" % (PRELINK_BINARY,))
42315 - if (results[0] >> 8) == 0:
42316 - prelink_capable=1
42317 - del results
42318 + cmd = [PRELINK_BINARY, "--version"]
42319 + cmd = [_unicode_encode(x, encoding=_encodings['fs'], errors='strict')
42320 + for x in cmd]
42321 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
42322 + stderr=subprocess.STDOUT)
42323 + proc.communicate()
42324 + status = proc.wait()
42325 + if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
42326 + prelink_capable = 1
42327 + del cmd, proc, status
42328
42329 def is_prelinkable_elf(filename):
42330 f = _open_file(filename)
42331 @@ -217,6 +222,64 @@ def _filter_unaccelarated_hashes(digests):
42332
42333 return digests
42334
42335 +class _hash_filter(object):
42336 + """
42337 + Implements filtering for PORTAGE_CHECKSUM_FILTER.
42338 + """
42339 +
42340 + __slots__ = ('transparent', '_tokens',)
42341 +
42342 + def __init__(self, filter_str):
42343 + tokens = filter_str.upper().split()
42344 + if not tokens or tokens[-1] == "*":
42345 + del tokens[:]
42346 + self.transparent = not tokens
42347 + tokens.reverse()
42348 + self._tokens = tuple(tokens)
42349 +
42350 + def __call__(self, hash_name):
42351 + if self.transparent:
42352 + return True
42353 + matches = ("*", hash_name)
42354 + for token in self._tokens:
42355 + if token in matches:
42356 + return True
42357 + elif token[:1] == "-":
42358 + if token[1:] in matches:
42359 + return False
42360 + return False
42361 +
42362 +def _apply_hash_filter(digests, hash_filter):
42363 + """
42364 + Return a new dict containing the filtered digests, or the same
42365 + dict if no changes are necessary. This will always preserve at
42366 + at least one digest, in order to ensure that they are not all
42367 + discarded.
42368 + @param digests: dictionary of digests
42369 + @type digests: dict
42370 + @param hash_filter: A callable that takes a single hash name
42371 + argument, and returns True if the hash is to be used or
42372 + False otherwise
42373 + @type hash_filter: callable
42374 + """
42375 +
42376 + verifiable_hash_types = set(digests).intersection(hashfunc_map)
42377 + verifiable_hash_types.discard("size")
42378 + modified = False
42379 + if len(verifiable_hash_types) > 1:
42380 + for k in list(verifiable_hash_types):
42381 + if not hash_filter(k):
42382 + modified = True
42383 + verifiable_hash_types.remove(k)
42384 + if len(verifiable_hash_types) == 1:
42385 + break
42386 +
42387 + if modified:
42388 + digests = dict((k, v) for (k, v) in digests.items()
42389 + if k == "size" or k in verifiable_hash_types)
42390 +
42391 + return digests
42392 +
42393 def verify_all(filename, mydict, calc_prelink=0, strict=0):
42394 """
42395 Verify all checksums against a file.
42396 @@ -275,9 +338,10 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
42397 {"file" : filename, "type" : x})
42398 else:
42399 file_is_ok = False
42400 - reason = (("Failed on %s verification" % x), myhash,mydict[x])
42401 + reason = (("Failed on %s verification" % x), myhash, mydict[x])
42402 break
42403 - return file_is_ok,reason
42404 +
42405 + return file_is_ok, reason
42406
42407 def perform_checksum(filename, hashname="MD5", calc_prelink=0):
42408 """
42409
42410 diff --git a/pym/portage/const.py b/pym/portage/const.py
42411 index ceef5c5..1785bff 100644
42412 --- a/pym/portage/const.py
42413 +++ b/pym/portage/const.py
42414 @@ -1,7 +1,9 @@
42415 # portage: Constants
42416 -# Copyright 1998-2012 Gentoo Foundation
42417 +# Copyright 1998-2013 Gentoo Foundation
42418 # Distributed under the terms of the GNU General Public License v2
42419
42420 +from __future__ import unicode_literals
42421 +
42422 import os
42423
42424 # ===========================================================================
42425 @@ -27,8 +29,8 @@ import os
42426 # The variables in this file are grouped by config_root, target_root.
42427
42428 # variables used with config_root (these need to be relative)
42429 -MAKE_CONF_FILE = "etc/make.conf"
42430 USER_CONFIG_PATH = "etc/portage"
42431 +MAKE_CONF_FILE = USER_CONFIG_PATH + "/make.conf"
42432 MODULES_FILE_PATH = USER_CONFIG_PATH + "/modules"
42433 CUSTOM_PROFILE_PATH = USER_CONFIG_PATH + "/profile"
42434 USER_VIRTUALS_FILE = USER_CONFIG_PATH + "/virtuals"
42435 @@ -36,7 +38,7 @@ EBUILD_SH_ENV_FILE = USER_CONFIG_PATH + "/bashrc"
42436 EBUILD_SH_ENV_DIR = USER_CONFIG_PATH + "/env"
42437 CUSTOM_MIRRORS_FILE = USER_CONFIG_PATH + "/mirrors"
42438 COLOR_MAP_FILE = USER_CONFIG_PATH + "/color.map"
42439 -PROFILE_PATH = "etc/make.profile"
42440 +PROFILE_PATH = USER_CONFIG_PATH + "/make.profile"
42441 MAKE_DEFAULTS_FILE = PROFILE_PATH + "/make.defaults" # FIXME: not used
42442 DEPRECATED_PROFILE_FILE = PROFILE_PATH + "/deprecated"
42443
42444 @@ -56,7 +58,10 @@ DEPCACHE_PATH = "/var/cache/edb/dep"
42445 GLOBAL_CONFIG_PATH = "/usr/share/portage/config"
42446
42447 # these variables are not used with target_root or config_root
42448 -PORTAGE_BASE_PATH = os.path.join(os.sep, os.sep.join(__file__.split(os.sep)[:-3]))
42449 +# NOTE: Use realpath(__file__) so that python module symlinks in site-packages
42450 +# are followed back to the real location of the whole portage installation.
42451 +PORTAGE_BASE_PATH = os.path.join(os.sep, os.sep.join(os.path.realpath(
42452 + __file__.rstrip("co")).split(os.sep)[:-3]))
42453 PORTAGE_BIN_PATH = PORTAGE_BASE_PATH + "/bin"
42454 PORTAGE_PYM_PATH = PORTAGE_BASE_PATH + "/pym"
42455 LOCALE_DATA_PATH = PORTAGE_BASE_PATH + "/locale" # FIXME: not used
42456 @@ -75,40 +80,123 @@ REPO_NAME_LOC = "profiles" + "/" + REPO_NAME_FILE
42457 PORTAGE_PACKAGE_ATOM = "sys-apps/portage"
42458 LIBC_PACKAGE_ATOM = "virtual/libc"
42459 OS_HEADERS_PACKAGE_ATOM = "virtual/os-headers"
42460 +CVS_PACKAGE_ATOM = "dev-vcs/cvs"
42461 +GIT_PACKAGE_ATOM = "dev-vcs/git"
42462 +RSYNC_PACKAGE_ATOM = "net-misc/rsync"
42463
42464 -INCREMENTALS = ("USE", "USE_EXPAND", "USE_EXPAND_HIDDEN",
42465 - "FEATURES", "ACCEPT_KEYWORDS",
42466 - "CONFIG_PROTECT_MASK", "CONFIG_PROTECT",
42467 - "PRELINK_PATH", "PRELINK_PATH_MASK",
42468 - "PROFILE_ONLY_VARIABLES")
42469 -EBUILD_PHASES = ("pretend", "setup", "unpack", "prepare", "configure",
42470 - "compile", "test", "install",
42471 - "package", "preinst", "postinst","prerm", "postrm",
42472 - "nofetch", "config", "info", "other")
42473 +INCREMENTALS = (
42474 + "ACCEPT_KEYWORDS",
42475 + "CONFIG_PROTECT",
42476 + "CONFIG_PROTECT_MASK",
42477 + "FEATURES",
42478 + "IUSE_IMPLICIT",
42479 + "PRELINK_PATH",
42480 + "PRELINK_PATH_MASK",
42481 + "PROFILE_ONLY_VARIABLES",
42482 + "USE",
42483 + "USE_EXPAND",
42484 + "USE_EXPAND_HIDDEN",
42485 + "USE_EXPAND_IMPLICIT",
42486 + "USE_EXPAND_UNPREFIXED",
42487 +)
42488 +EBUILD_PHASES = (
42489 + "pretend",
42490 + "setup",
42491 + "unpack",
42492 + "prepare",
42493 + "configure",
42494 + "compile",
42495 + "test",
42496 + "install",
42497 + "package",
42498 + "preinst",
42499 + "postinst",
42500 + "prerm",
42501 + "postrm",
42502 + "nofetch",
42503 + "config",
42504 + "info",
42505 + "other",
42506 +)
42507 SUPPORTED_FEATURES = frozenset([
42508 - "assume-digests", "binpkg-logs", "buildpkg", "buildsyspkg", "candy",
42509 - "ccache", "chflags", "clean-logs",
42510 - "collision-protect", "compress-build-logs", "compressdebug",
42511 - "config-protect-if-modified",
42512 - "digest", "distcc", "distcc-pump", "distlocks",
42513 - "downgrade-backup", "ebuild-locks", "fakeroot",
42514 - "fail-clean", "force-mirror", "force-prefix", "getbinpkg",
42515 - "installsources", "keeptemp", "keepwork", "fixlafiles", "lmirror",
42516 - "metadata-transfer", "mirror", "multilib-strict", "news",
42517 - "noauto", "noclean", "nodoc", "noinfo", "noman",
42518 - "nostrip", "notitles", "parallel-fetch", "parallel-install",
42519 - "parse-eapi-ebuild-head",
42520 - "prelink-checksums",
42521 - "protect-owned", "python-trace", "sandbox",
42522 - "selinux", "sesandbox", "sfperms",
42523 - "sign", "skiprocheck", "split-elog", "split-log", "splitdebug",
42524 - "strict", "stricter", "suidctl", "test", "test-fail-continue",
42525 - "unknown-features-filter", "unknown-features-warn",
42526 - "unmerge-backup",
42527 - "unmerge-logs", "unmerge-orphans", "userfetch", "userpriv",
42528 - "usersandbox", "usersync", "webrsync-gpg", "xattr"])
42529 -
42530 -EAPI = 4
42531 + "assume-digests",
42532 + "binpkg-logs",
42533 + "buildpkg",
42534 + "buildsyspkg",
42535 + "candy",
42536 + "ccache",
42537 + "cgroup",
42538 + "chflags",
42539 + "clean-logs",
42540 + "collision-protect",
42541 + "compress-build-logs",
42542 + "compressdebug",
42543 + "compress-index",
42544 + "config-protect-if-modified",
42545 + "digest",
42546 + "distcc",
42547 + "distcc-pump",
42548 + "distlocks",
42549 + "downgrade-backup",
42550 + "ebuild-locks",
42551 + "fail-clean",
42552 + "fakeroot",
42553 + "fixlafiles",
42554 + "force-mirror",
42555 + "force-prefix",
42556 + "getbinpkg",
42557 + "installsources",
42558 + "ipc-sandbox",
42559 + "keeptemp",
42560 + "keepwork",
42561 + "lmirror",
42562 + "merge-sync",
42563 + "metadata-transfer",
42564 + "mirror",
42565 + "multilib-strict",
42566 + "network-sandbox",
42567 + "news",
42568 + "noauto",
42569 + "noclean",
42570 + "nodoc",
42571 + "noinfo",
42572 + "noman",
42573 + "nostrip",
42574 + "notitles",
42575 + "parallel-fetch",
42576 + "parallel-install",
42577 + "prelink-checksums",
42578 + "preserve-libs",
42579 + "protect-owned",
42580 + "python-trace",
42581 + "sandbox",
42582 + "selinux",
42583 + "sesandbox",
42584 + "sfperms",
42585 + "sign",
42586 + "skiprocheck",
42587 + "splitdebug",
42588 + "split-elog",
42589 + "split-log",
42590 + "strict",
42591 + "stricter",
42592 + "suidctl",
42593 + "test",
42594 + "test-fail-continue",
42595 + "unknown-features-filter",
42596 + "unknown-features-warn",
42597 + "unmerge-backup",
42598 + "unmerge-logs",
42599 + "unmerge-orphans",
42600 + "userfetch",
42601 + "userpriv",
42602 + "usersandbox",
42603 + "usersync",
42604 + "webrsync-gpg",
42605 + "xattr",
42606 +])
42607 +
42608 +EAPI = 5
42609
42610 HASHING_BLOCKSIZE = 32768
42611 MANIFEST1_HASH_FUNCTIONS = ("MD5", "SHA256", "RMD160")
42612 @@ -151,13 +239,35 @@ MANIFEST2_IDENTIFIERS = ("AUX", "MISC", "DIST", "EBUILD")
42613 # a config instance (since it's possible to contruct a config instance with
42614 # a different EPREFIX). Therefore, the EPREFIX constant should *NOT* be used
42615 # in the definition of any other constants within this file.
42616 -EPREFIX=""
42617 +EPREFIX = ""
42618
42619 # pick up EPREFIX from the environment if set
42620 if "PORTAGE_OVERRIDE_EPREFIX" in os.environ:
42621 EPREFIX = os.environ["PORTAGE_OVERRIDE_EPREFIX"]
42622 if EPREFIX:
42623 EPREFIX = os.path.normpath(EPREFIX)
42624 + if EPREFIX == os.sep:
42625 + EPREFIX = ""
42626 +
42627 +VCS_DIRS = ("CVS", "RCS", "SCCS", ".bzr", ".git", ".hg", ".svn")
42628 +
42629 +# List of known live eclasses. Keep it in sync with cnf/sets/portage.conf
42630 +LIVE_ECLASSES = frozenset([
42631 + "bzr",
42632 + "cvs",
42633 + "darcs",
42634 + "git",
42635 + "git-2",
42636 + "git-r3",
42637 + "mercurial",
42638 + "subversion",
42639 + "tla",
42640 +])
42641 +
42642 +SUPPORTED_BINPKG_FORMATS = ("tar", "rpm")
42643 +
42644 +# Time formats used in various places like metadata.chk.
42645 +TIMESTAMP_FORMAT = "%a, %d %b %Y %H:%M:%S +0000" # to be used with time.gmtime()
42646
42647 # ===========================================================================
42648 # END OF CONSTANTS -- END OF CONSTANTS -- END OF CONSTANTS -- END OF CONSTANT
42649 @@ -165,17 +275,5 @@ if "PORTAGE_OVERRIDE_EPREFIX" in os.environ:
42650
42651 # Private constants for use in conditional code in order to minimize the diff
42652 # between branches.
42653 -_ENABLE_DYN_LINK_MAP = True
42654 -_ENABLE_PRESERVE_LIBS = True
42655 -_ENABLE_REPO_NAME_WARN = True
42656 +_DEPCLEAN_LIB_CHECK_DEFAULT = True
42657 _ENABLE_SET_CONFIG = True
42658 -_ENABLE_INHERIT_CHECK = True
42659 -
42660 -
42661 -# The definitions above will differ between branches, so it's useful to have
42662 -# common lines of diff context here in order to avoid merge conflicts.
42663 -
42664 -if _ENABLE_PRESERVE_LIBS:
42665 - SUPPORTED_FEATURES = set(SUPPORTED_FEATURES)
42666 - SUPPORTED_FEATURES.add("preserve-libs")
42667 - SUPPORTED_FEATURES = frozenset(SUPPORTED_FEATURES)
42668
42669 diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py
42670 index 3680ae4..4a3afae 100644
42671 --- a/pym/portage/cvstree.py
42672 +++ b/pym/portage/cvstree.py
42673 @@ -1,5 +1,5 @@
42674 # cvstree.py -- cvs tree utilities
42675 -# Copyright 1998-2011 Gentoo Foundation
42676 +# Copyright 1998-2014 Gentoo Foundation
42677 # Distributed under the terms of the GNU General Public License v2
42678
42679 from __future__ import print_function
42680 @@ -15,20 +15,20 @@ from portage import _encodings
42681 from portage import _unicode_encode
42682
42683 if sys.hexversion >= 0x3000000:
42684 + # pylint: disable=W0622
42685 long = int
42686
42687 # [D]/Name/Version/Date/Flags/Tags
42688
42689 def pathdata(entries, path):
42690 - """(entries,path)
42691 - Returns the data(dict) for a specific file/dir at the path specified."""
42692 - mysplit=path.split("/")
42693 - myentries=entries
42694 - mytarget=mysplit[-1]
42695 - mysplit=mysplit[:-1]
42696 + """Returns the data(dict) for a specific file/dir at the path specified."""
42697 + mysplit = path.split("/")
42698 + myentries = entries
42699 + mytarget = mysplit[-1]
42700 + mysplit = mysplit[:-1]
42701 for mys in mysplit:
42702 if mys in myentries["dirs"]:
42703 - myentries=myentries["dirs"][mys]
42704 + myentries = myentries["dirs"][mys]
42705 else:
42706 return None
42707 if mytarget in myentries["dirs"]:
42708 @@ -39,18 +39,17 @@ def pathdata(entries, path):
42709 return None
42710
42711 def fileat(entries, path):
42712 - return pathdata(entries,path)
42713 + return pathdata(entries, path)
42714
42715 def isadded(entries, path):
42716 - """(entries,path)
42717 - Returns true if the path exists and is added to the cvs tree."""
42718 - mytarget=pathdata(entries, path)
42719 + """Returns True if the path exists and is added to the cvs tree."""
42720 + mytarget = pathdata(entries, path)
42721 if mytarget:
42722 if "cvs" in mytarget["status"]:
42723 return 1
42724
42725 - basedir=os.path.dirname(path)
42726 - filename=os.path.basename(path)
42727 + basedir = os.path.dirname(path)
42728 + filename = os.path.basename(path)
42729
42730 try:
42731 myfile = io.open(
42732 @@ -59,234 +58,250 @@ def isadded(entries, path):
42733 mode='r', encoding=_encodings['content'], errors='strict')
42734 except IOError:
42735 return 0
42736 - mylines=myfile.readlines()
42737 + mylines = myfile.readlines()
42738 myfile.close()
42739
42740 - rep=re.compile("^\/"+re.escape(filename)+"\/");
42741 + rep = re.compile("^\/%s\/" % re.escape(filename))
42742 for x in mylines:
42743 if rep.search(x):
42744 return 1
42745
42746 return 0
42747
42748 -def findnew(entries,recursive=0,basedir=""):
42749 - """(entries,recursive=0,basedir="")
42750 - Recurses the entries tree to find all elements that have been added but
42751 +def findnew(entries, recursive=0, basedir=""):
42752 + """Recurses the entries tree to find all elements that have been added but
42753 have not yet been committed. Returns a list of paths, optionally prepended
42754 - with a basedir."""
42755 - if basedir and basedir[-1]!="/":
42756 - basedir=basedir+"/"
42757 - mylist=[]
42758 + with a basedir.
42759 + """
42760 + if basedir and basedir[-1] != "/":
42761 + basedir += "/"
42762 +
42763 + mylist = []
42764 for myfile in entries["files"]:
42765 if "cvs" in entries["files"][myfile]["status"]:
42766 if "0" == entries["files"][myfile]["revision"]:
42767 - mylist.append(basedir+myfile)
42768 + mylist.append(basedir + myfile)
42769 +
42770 if recursive:
42771 for mydir in entries["dirs"]:
42772 - mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir)
42773 + mylist += findnew(entries["dirs"][mydir], recursive, basedir + mydir)
42774 +
42775 return mylist
42776
42777 def findoption(entries, pattern, recursive=0, basedir=""):
42778 - """(entries, pattern, recursive=0, basedir="")
42779 - Iterate over paths of cvs entries for which the pattern.search() method
42780 + """Iterate over paths of cvs entries for which the pattern.search() method
42781 finds a match. Returns a list of paths, optionally prepended with a
42782 - basedir."""
42783 + basedir.
42784 + """
42785 if not basedir.endswith("/"):
42786 basedir += "/"
42787 +
42788 for myfile, mydata in entries["files"].items():
42789 if "cvs" in mydata["status"]:
42790 if pattern.search(mydata["flags"]):
42791 - yield basedir+myfile
42792 + yield basedir + myfile
42793 +
42794 if recursive:
42795 for mydir, mydata in entries["dirs"].items():
42796 for x in findoption(mydata, pattern,
42797 - recursive, basedir+mydir):
42798 + recursive, basedir + mydir):
42799 yield x
42800
42801 -def findchanged(entries,recursive=0,basedir=""):
42802 - """(entries,recursive=0,basedir="")
42803 - Recurses the entries tree to find all elements that exist in the cvs tree
42804 +def findchanged(entries, recursive=0, basedir=""):
42805 + """Recurses the entries tree to find all elements that exist in the cvs tree
42806 and differ from the committed version. Returns a list of paths, optionally
42807 - prepended with a basedir."""
42808 - if basedir and basedir[-1]!="/":
42809 - basedir=basedir+"/"
42810 - mylist=[]
42811 + prepended with a basedir.
42812 + """
42813 + if basedir and basedir[-1] != "/":
42814 + basedir += "/"
42815 +
42816 + mylist = []
42817 for myfile in entries["files"]:
42818 if "cvs" in entries["files"][myfile]["status"]:
42819 if "current" not in entries["files"][myfile]["status"]:
42820 if "exists" in entries["files"][myfile]["status"]:
42821 - if entries["files"][myfile]["revision"]!="0":
42822 - mylist.append(basedir+myfile)
42823 + if entries["files"][myfile]["revision"] != "0":
42824 + mylist.append(basedir + myfile)
42825 +
42826 if recursive:
42827 for mydir in entries["dirs"]:
42828 - mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir)
42829 + mylist += findchanged(entries["dirs"][mydir], recursive, basedir + mydir)
42830 +
42831 return mylist
42832
42833 -def findmissing(entries,recursive=0,basedir=""):
42834 - """(entries,recursive=0,basedir="")
42835 - Recurses the entries tree to find all elements that are listed in the cvs
42836 +def findmissing(entries, recursive=0, basedir=""):
42837 + """Recurses the entries tree to find all elements that are listed in the cvs
42838 tree but do not exist on the filesystem. Returns a list of paths,
42839 - optionally prepended with a basedir."""
42840 - if basedir and basedir[-1]!="/":
42841 - basedir=basedir+"/"
42842 - mylist=[]
42843 + optionally prepended with a basedir.
42844 + """
42845 + if basedir and basedir[-1] != "/":
42846 + basedir += "/"
42847 +
42848 + mylist = []
42849 for myfile in entries["files"]:
42850 if "cvs" in entries["files"][myfile]["status"]:
42851 if "exists" not in entries["files"][myfile]["status"]:
42852 if "removed" not in entries["files"][myfile]["status"]:
42853 - mylist.append(basedir+myfile)
42854 + mylist.append(basedir + myfile)
42855 +
42856 if recursive:
42857 for mydir in entries["dirs"]:
42858 - mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir)
42859 + mylist += findmissing(entries["dirs"][mydir], recursive, basedir + mydir)
42860 +
42861 return mylist
42862
42863 -def findunadded(entries,recursive=0,basedir=""):
42864 - """(entries,recursive=0,basedir="")
42865 - Recurses the entries tree to find all elements that are in valid cvs
42866 +def findunadded(entries, recursive=0, basedir=""):
42867 + """Recurses the entries tree to find all elements that are in valid cvs
42868 directories but are not part of the cvs tree. Returns a list of paths,
42869 - optionally prepended with a basedir."""
42870 - if basedir and basedir[-1]!="/":
42871 - basedir=basedir+"/"
42872 - mylist=[]
42873 + optionally prepended with a basedir.
42874 + """
42875 + if basedir and basedir[-1] != "/":
42876 + basedir += "/"
42877
42878 - #ignore what cvs ignores.
42879 + # Ignore what cvs ignores.
42880 + mylist = []
42881 for myfile in entries["files"]:
42882 if "cvs" not in entries["files"][myfile]["status"]:
42883 - mylist.append(basedir+myfile)
42884 + mylist.append(basedir + myfile)
42885 +
42886 if recursive:
42887 for mydir in entries["dirs"]:
42888 - mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir)
42889 + mylist += findunadded(entries["dirs"][mydir], recursive, basedir + mydir)
42890 +
42891 return mylist
42892
42893 -def findremoved(entries,recursive=0,basedir=""):
42894 - """(entries,recursive=0,basedir="")
42895 - Recurses the entries tree to find all elements that are in flagged for cvs
42896 - deletions. Returns a list of paths, optionally prepended with a basedir."""
42897 - if basedir and basedir[-1]!="/":
42898 - basedir=basedir+"/"
42899 - mylist=[]
42900 +def findremoved(entries, recursive=0, basedir=""):
42901 + """Recurses the entries tree to find all elements that are in flagged for cvs
42902 + deletions. Returns a list of paths, optionally prepended with a basedir.
42903 + """
42904 + if basedir and basedir[-1] != "/":
42905 + basedir += "/"
42906 +
42907 + mylist = []
42908 for myfile in entries["files"]:
42909 if "removed" in entries["files"][myfile]["status"]:
42910 - mylist.append(basedir+myfile)
42911 + mylist.append(basedir + myfile)
42912 +
42913 if recursive:
42914 for mydir in entries["dirs"]:
42915 - mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir)
42916 + mylist += findremoved(entries["dirs"][mydir], recursive, basedir + mydir)
42917 +
42918 return mylist
42919
42920 def findall(entries, recursive=0, basedir=""):
42921 - """(entries,recursive=0,basedir="")
42922 - Recurses the entries tree to find all new, changed, missing, and unadded
42923 - entities. Returns a 4 element list of lists as returned from each find*()."""
42924 -
42925 - if basedir and basedir[-1]!="/":
42926 - basedir=basedir+"/"
42927 - mynew = findnew(entries,recursive,basedir)
42928 - mychanged = findchanged(entries,recursive,basedir)
42929 - mymissing = findmissing(entries,recursive,basedir)
42930 - myunadded = findunadded(entries,recursive,basedir)
42931 - myremoved = findremoved(entries,recursive,basedir)
42932 + """Recurses the entries tree to find all new, changed, missing, and unadded
42933 + entities. Returns a 4 element list of lists as returned from each find*().
42934 + """
42935 + if basedir and basedir[-1] != "/":
42936 + basedir += "/"
42937 + mynew = findnew(entries, recursive, basedir)
42938 + mychanged = findchanged(entries, recursive, basedir)
42939 + mymissing = findmissing(entries, recursive, basedir)
42940 + myunadded = findunadded(entries, recursive, basedir)
42941 + myremoved = findremoved(entries, recursive, basedir)
42942 return [mynew, mychanged, mymissing, myunadded, myremoved]
42943
42944 ignore_list = re.compile("(^|/)(RCS(|LOG)|SCCS|CVS(|\.adm)|cvslog\..*|tags|TAGS|\.(make\.state|nse_depinfo)|.*~|(\.|)#.*|,.*|_$.*|.*\$|\.del-.*|.*\.(old|BAK|bak|orig|rej|a|olb|o|obj|so|exe|Z|elc|ln)|core)$")
42945 def apply_cvsignore_filter(list):
42946 - x=0
42947 + x = 0
42948 while x < len(list):
42949 if ignore_list.match(list[x].split("/")[-1]):
42950 list.pop(x)
42951 else:
42952 - x+=1
42953 + x += 1
42954 return list
42955
42956 -def getentries(mydir,recursive=0):
42957 - """(basedir,recursive=0)
42958 - Scans the given directory and returns a datadict of all the entries in
42959 - the directory separated as a dirs dict and a files dict."""
42960 - myfn=mydir+"/CVS/Entries"
42961 +def getentries(mydir, recursive=0):
42962 + """Scans the given directory and returns a datadict of all the entries in
42963 + the directory separated as a dirs dict and a files dict.
42964 + """
42965 + myfn = mydir + "/CVS/Entries"
42966 # entries=[dirs, files]
42967 - entries={"dirs":{},"files":{}}
42968 + entries = {"dirs":{}, "files":{}}
42969 if not os.path.exists(mydir):
42970 return entries
42971 try:
42972 myfile = io.open(_unicode_encode(myfn,
42973 encoding=_encodings['fs'], errors='strict'),
42974 mode='r', encoding=_encodings['content'], errors='strict')
42975 - mylines=myfile.readlines()
42976 + mylines = myfile.readlines()
42977 myfile.close()
42978 except SystemExit as e:
42979 raise
42980 except:
42981 - mylines=[]
42982 + mylines = []
42983 +
42984 for line in mylines:
42985 - if line and line[-1]=="\n":
42986 - line=line[:-1]
42987 + if line and line[-1] == "\n":
42988 + line = line[:-1]
42989 if not line:
42990 continue
42991 - if line=="D": # End of entries file
42992 + if line == "D": # End of entries file
42993 break
42994 - mysplit=line.split("/")
42995 - if len(mysplit)!=6:
42996 - print("Confused:",mysplit)
42997 + mysplit = line.split("/")
42998 + if len(mysplit) != 6:
42999 + print("Confused:", mysplit)
43000 continue
43001 - if mysplit[0]=="D":
43002 - entries["dirs"][mysplit[1]]={"dirs":{},"files":{},"status":[]}
43003 - entries["dirs"][mysplit[1]]["status"]=["cvs"]
43004 + if mysplit[0] == "D":
43005 + entries["dirs"][mysplit[1]] = {"dirs":{}, "files":{}, "status":[]}
43006 + entries["dirs"][mysplit[1]]["status"] = ["cvs"]
43007 if os.path.isdir(mydir+"/"+mysplit[1]):
43008 - entries["dirs"][mysplit[1]]["status"]+=["exists"]
43009 - entries["dirs"][mysplit[1]]["flags"]=mysplit[2:]
43010 + entries["dirs"][mysplit[1]]["status"] += ["exists"]
43011 + entries["dirs"][mysplit[1]]["flags"] = mysplit[2:]
43012 if recursive:
43013 - rentries=getentries(mydir+"/"+mysplit[1],recursive)
43014 - entries["dirs"][mysplit[1]]["dirs"]=rentries["dirs"]
43015 - entries["dirs"][mysplit[1]]["files"]=rentries["files"]
43016 + rentries = getentries(mydir + "/" + mysplit[1], recursive)
43017 + entries["dirs"][mysplit[1]]["dirs"] = rentries["dirs"]
43018 + entries["dirs"][mysplit[1]]["files"] = rentries["files"]
43019 else:
43020 # [D]/Name/revision/Date/Flags/Tags
43021 - entries["files"][mysplit[1]]={}
43022 - entries["files"][mysplit[1]]["revision"]=mysplit[2]
43023 - entries["files"][mysplit[1]]["date"]=mysplit[3]
43024 - entries["files"][mysplit[1]]["flags"]=mysplit[4]
43025 - entries["files"][mysplit[1]]["tags"]=mysplit[5]
43026 - entries["files"][mysplit[1]]["status"]=["cvs"]
43027 - if entries["files"][mysplit[1]]["revision"][0]=="-":
43028 - entries["files"][mysplit[1]]["status"]+=["removed"]
43029 + entries["files"][mysplit[1]] = {}
43030 + entries["files"][mysplit[1]]["revision"] = mysplit[2]
43031 + entries["files"][mysplit[1]]["date"] = mysplit[3]
43032 + entries["files"][mysplit[1]]["flags"] = mysplit[4]
43033 + entries["files"][mysplit[1]]["tags"] = mysplit[5]
43034 + entries["files"][mysplit[1]]["status"] = ["cvs"]
43035 + if entries["files"][mysplit[1]]["revision"][0] == "-":
43036 + entries["files"][mysplit[1]]["status"] += ["removed"]
43037
43038 for file in os.listdir(mydir):
43039 - if file=="CVS":
43040 + if file == "CVS":
43041 continue
43042 - if os.path.isdir(mydir+"/"+file):
43043 + if os.path.isdir(mydir + "/" + file):
43044 if file not in entries["dirs"]:
43045 if ignore_list.match(file) is not None:
43046 continue
43047 - entries["dirs"][file]={"dirs":{},"files":{}}
43048 + entries["dirs"][file] = {"dirs":{}, "files":{}}
43049 # It's normal for a directory to be unlisted in Entries
43050 # when checked out without -P (see bug #257660).
43051 - rentries=getentries(mydir+"/"+file,recursive)
43052 - entries["dirs"][file]["dirs"]=rentries["dirs"]
43053 - entries["dirs"][file]["files"]=rentries["files"]
43054 + rentries = getentries(mydir + "/" + file, recursive)
43055 + entries["dirs"][file]["dirs"] = rentries["dirs"]
43056 + entries["dirs"][file]["files"] = rentries["files"]
43057 if "status" in entries["dirs"][file]:
43058 if "exists" not in entries["dirs"][file]["status"]:
43059 - entries["dirs"][file]["status"]+=["exists"]
43060 + entries["dirs"][file]["status"] += ["exists"]
43061 else:
43062 - entries["dirs"][file]["status"]=["exists"]
43063 - elif os.path.isfile(mydir+"/"+file):
43064 + entries["dirs"][file]["status"] = ["exists"]
43065 + elif os.path.isfile(mydir + "/" + file):
43066 if file not in entries["files"]:
43067 if ignore_list.match(file) is not None:
43068 continue
43069 - entries["files"][file]={"revision":"","date":"","flags":"","tags":""}
43070 + entries["files"][file] = {"revision":"", "date":"", "flags":"", "tags":""}
43071 if "status" in entries["files"][file]:
43072 if "exists" not in entries["files"][file]["status"]:
43073 - entries["files"][file]["status"]+=["exists"]
43074 + entries["files"][file]["status"] += ["exists"]
43075 else:
43076 - entries["files"][file]["status"]=["exists"]
43077 + entries["files"][file]["status"] = ["exists"]
43078 try:
43079 - mystat=os.stat(mydir+"/"+file)
43080 + mystat = os.stat(mydir + "/" + file)
43081 mytime = time.asctime(time.gmtime(mystat[stat.ST_MTIME]))
43082 if "status" not in entries["files"][file]:
43083 - entries["files"][file]["status"]=[]
43084 - if mytime==entries["files"][file]["date"]:
43085 - entries["files"][file]["status"]+=["current"]
43086 + entries["files"][file]["status"] = []
43087 + if mytime == entries["files"][file]["date"]:
43088 + entries["files"][file]["status"] += ["current"]
43089 except SystemExit as e:
43090 raise
43091 except Exception as e:
43092 - print("failed to stat",file)
43093 + print("failed to stat", file)
43094 print(e)
43095 return
43096
43097 @@ -294,6 +309,7 @@ def getentries(mydir,recursive=0):
43098 pass
43099 else:
43100 print()
43101 - print("File of unknown type:",mydir+"/"+file)
43102 + print("File of unknown type:", mydir + "/" + file)
43103 print()
43104 +
43105 return entries
43106
43107 diff --git a/pym/portage/data.py b/pym/portage/data.py
43108 index c4d967a..54e3a8d 100644
43109 --- a/pym/portage/data.py
43110 +++ b/pym/portage/data.py
43111 @@ -1,17 +1,18 @@
43112 # data.py -- Calculated/Discovered Data Values
43113 -# Copyright 1998-2011 Gentoo Foundation
43114 +# Copyright 1998-2014 Gentoo Foundation
43115 # Distributed under the terms of the GNU General Public License v2
43116
43117 -import os, pwd, grp, platform
43118 +import os, pwd, grp, platform, sys
43119
43120 import portage
43121 portage.proxy.lazyimport.lazyimport(globals(),
43122 'portage.output:colorize',
43123 'portage.util:writemsg',
43124 + 'subprocess'
43125 )
43126 from portage.localization import _
43127
43128 -ostype=platform.system()
43129 +ostype = platform.system()
43130 userland = None
43131 if ostype == "DragonFly" or ostype.endswith("BSD"):
43132 userland = "BSD"
43133 @@ -22,10 +23,10 @@ lchown = getattr(os, "lchown", None)
43134
43135 if not lchown:
43136 if ostype == "Darwin":
43137 - def lchown(*pos_args, **key_args):
43138 + def lchown(*_args, **_kwargs):
43139 pass
43140 else:
43141 - def lchown(*pargs, **kwargs):
43142 + def lchown(*_args, **_kwargs):
43143 writemsg(colorize("BAD", "!!!") + _(
43144 " It seems that os.lchown does not"
43145 " exist. Please rebuild python.\n"), noiselevel=-1)
43146 @@ -58,11 +59,10 @@ def portage_group_warning():
43147 # If the "wheel" group does not exist then wheelgid falls back to 0.
43148 # If the "portage" group does not exist then portage_uid falls back to wheelgid.
43149
43150 -uid=os.getuid()
43151 -wheelgid=0
43152 -
43153 +uid = os.getuid()
43154 +wheelgid = 0
43155 try:
43156 - wheelgid=grp.getgrnam("wheel")[2]
43157 + wheelgid = grp.getgrnam("wheel")[2]
43158 except KeyError:
43159 pass
43160
43161 @@ -85,19 +85,27 @@ def _get_global(k):
43162 elif portage.const.EPREFIX:
43163 secpass = 2
43164 #Discover the uid and gid of the portage user/group
43165 + keyerror = False
43166 try:
43167 portage_uid = pwd.getpwnam(_get_global('_portage_username')).pw_uid
43168 - _portage_grpname = _get_global('_portage_grpname')
43169 - if platform.python_implementation() == 'PyPy':
43170 - # Somehow this prevents "TypeError: expected string" errors
43171 - # from grp.getgrnam() with PyPy 1.7
43172 - _portage_grpname = str(_portage_grpname)
43173 - portage_gid = grp.getgrnam(_portage_grpname).gr_gid
43174 - if secpass < 1 and portage_gid in os.getgroups():
43175 - secpass = 1
43176 except KeyError:
43177 + keyerror = True
43178 portage_uid = 0
43179 +
43180 + try:
43181 + portage_gid = grp.getgrnam(_get_global('_portage_grpname')).gr_gid
43182 + except KeyError:
43183 + keyerror = True
43184 portage_gid = 0
43185 +
43186 + if secpass < 1 and portage_gid in os.getgroups():
43187 + secpass = 1
43188 +
43189 + # Suppress this error message if both PORTAGE_GRPNAME and
43190 + # PORTAGE_USERNAME are set to "root", for things like
43191 + # Android (see bug #454060).
43192 + if keyerror and not (_get_global('_portage_username') == "root" and
43193 + _get_global('_portage_grpname') == "root"):
43194 writemsg(colorize("BAD",
43195 _("portage: 'portage' user or group missing.")) + "\n", noiselevel=-1)
43196 writemsg(_(
43197 @@ -129,10 +137,28 @@ def _get_global(k):
43198 # Get a list of group IDs for the portage user. Do not use
43199 # grp.getgrall() since it is known to trigger spurious
43200 # SIGPIPE problems with nss_ldap.
43201 - mystatus, myoutput = \
43202 - portage.subprocess_getstatusoutput("id -G %s" % _portage_username)
43203 - if mystatus == os.EX_OK:
43204 - for x in myoutput.split():
43205 + cmd = ["id", "-G", _portage_username]
43206 +
43207 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000:
43208 + # Python 3.1 _execvp throws TypeError for non-absolute executable
43209 + # path passed as bytes (see http://bugs.python.org/issue8513).
43210 + fullname = portage.process.find_binary(cmd[0])
43211 + if fullname is None:
43212 + globals()[k] = v
43213 + _initialized_globals.add(k)
43214 + return v
43215 + cmd[0] = fullname
43216 +
43217 + encoding = portage._encodings['content']
43218 + cmd = [portage._unicode_encode(x,
43219 + encoding=encoding, errors='strict') for x in cmd]
43220 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
43221 + stderr=subprocess.STDOUT)
43222 + myoutput = proc.communicate()[0]
43223 + status = proc.wait()
43224 + if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
43225 + for x in portage._unicode_decode(myoutput,
43226 + encoding=encoding, errors='strict').split():
43227 try:
43228 v.append(int(x))
43229 except ValueError:
43230 @@ -213,10 +239,18 @@ def _init(settings):
43231 if '_portage_grpname' not in _initialized_globals and \
43232 '_portage_username' not in _initialized_globals:
43233
43234 + # Prevents "TypeError: expected string" errors
43235 + # from grp.getgrnam() with PyPy
43236 + native_string = platform.python_implementation() == 'PyPy'
43237 +
43238 v = settings.get('PORTAGE_GRPNAME', 'portage')
43239 + if native_string:
43240 + v = portage._native_string(v)
43241 globals()['_portage_grpname'] = v
43242 _initialized_globals.add('_portage_grpname')
43243
43244 v = settings.get('PORTAGE_USERNAME', 'portage')
43245 + if native_string:
43246 + v = portage._native_string(v)
43247 globals()['_portage_username'] = v
43248 _initialized_globals.add('_portage_username')
43249
43250 diff --git a/pym/portage/dbapi/_MergeProcess.py b/pym/portage/dbapi/_MergeProcess.py
43251 index b5f6a0b..956dbb9 100644
43252 --- a/pym/portage/dbapi/_MergeProcess.py
43253 +++ b/pym/portage/dbapi/_MergeProcess.py
43254 @@ -1,7 +1,8 @@
43255 -# Copyright 2010-2012 Gentoo Foundation
43256 +# Copyright 2010-2013 Gentoo Foundation
43257 # Distributed under the terms of the GNU General Public License v2
43258
43259 import io
43260 +import platform
43261 import signal
43262 import sys
43263 import traceback
43264 @@ -10,10 +11,11 @@ import errno
43265 import fcntl
43266 import portage
43267 from portage import os, _unicode_decode
43268 +from portage.util._ctypes import find_library
43269 import portage.elog.messages
43270 -from _emerge.SpawnProcess import SpawnProcess
43271 +from portage.util._async.ForkProcess import ForkProcess
43272
43273 -class MergeProcess(SpawnProcess):
43274 +class MergeProcess(ForkProcess):
43275 """
43276 Merge packages in a subprocess, so the Scheduler can run in the main
43277 thread while files are moved or copied asynchronously.
43278 @@ -40,11 +42,20 @@ class MergeProcess(SpawnProcess):
43279 settings.reset()
43280 settings.setcpv(cpv, mydb=self.mydbapi)
43281
43282 + # This caches the libc library lookup in the current
43283 + # process, so that it's only done once rather than
43284 + # for each child process.
43285 + if platform.system() == "Linux" and \
43286 + "merge-sync" in settings.features:
43287 + find_library("c")
43288 +
43289 # Inherit stdin by default, so that the pdb SIGUSR1
43290 # handler is usable for the subprocess.
43291 if self.fd_pipes is None:
43292 self.fd_pipes = {}
43293 - self.fd_pipes.setdefault(0, sys.stdin.fileno())
43294 + else:
43295 + self.fd_pipes = self.fd_pipes.copy()
43296 + self.fd_pipes.setdefault(0, portage._get_stdin().fileno())
43297
43298 super(MergeProcess, self)._start()
43299
43300 @@ -90,7 +101,7 @@ class MergeProcess(SpawnProcess):
43301 reporter(msg, phase=phase, key=key, out=out)
43302
43303 if event & self.scheduler.IO_HUP:
43304 - self.scheduler.unregister(self._elog_reg_id)
43305 + self.scheduler.source_remove(self._elog_reg_id)
43306 self._elog_reg_id = None
43307 os.close(self._elog_reader_fd)
43308 self._elog_reader_fd = None
43309 @@ -101,12 +112,24 @@ class MergeProcess(SpawnProcess):
43310 def _spawn(self, args, fd_pipes, **kwargs):
43311 """
43312 Fork a subprocess, apply local settings, and call
43313 - dblink.merge().
43314 + dblink.merge(). TODO: Share code with ForkProcess.
43315 """
43316
43317 elog_reader_fd, elog_writer_fd = os.pipe()
43318 +
43319 fcntl.fcntl(elog_reader_fd, fcntl.F_SETFL,
43320 fcntl.fcntl(elog_reader_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
43321 +
43322 + # FD_CLOEXEC is enabled by default in Python >=3.4.
43323 + if sys.hexversion < 0x3040000:
43324 + try:
43325 + fcntl.FD_CLOEXEC
43326 + except AttributeError:
43327 + pass
43328 + else:
43329 + fcntl.fcntl(elog_reader_fd, fcntl.F_SETFD,
43330 + fcntl.fcntl(elog_reader_fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
43331 +
43332 blockers = None
43333 if self.blockers is not None:
43334 # Query blockers in the main process, since closing
43335 @@ -116,10 +139,9 @@ class MergeProcess(SpawnProcess):
43336 blockers = self.blockers()
43337 mylink = portage.dblink(self.mycat, self.mypkg, settings=self.settings,
43338 treetype=self.treetype, vartree=self.vartree,
43339 - blockers=blockers, scheduler=self.scheduler,
43340 - pipe=elog_writer_fd)
43341 + blockers=blockers, pipe=elog_writer_fd)
43342 fd_pipes[elog_writer_fd] = elog_writer_fd
43343 - self._elog_reg_id = self.scheduler.register(elog_reader_fd,
43344 + self._elog_reg_id = self.scheduler.io_add_watch(elog_reader_fd,
43345 self._registered_events, self._elog_output_handler)
43346
43347 # If a concurrent emerge process tries to install a package
43348 @@ -133,88 +155,100 @@ class MergeProcess(SpawnProcess):
43349 if not self.unmerge:
43350 counter = self.vartree.dbapi.counter_tick()
43351
43352 - pid = os.fork()
43353 - if pid != 0:
43354 - if not isinstance(pid, int):
43355 - raise AssertionError(
43356 - "fork returned non-integer: %s" % (repr(pid),))
43357 -
43358 - os.close(elog_writer_fd)
43359 - self._elog_reader_fd = elog_reader_fd
43360 - self._buf = ""
43361 - self._elog_keys = set()
43362 -
43363 - # invalidate relevant vardbapi caches
43364 - if self.vartree.dbapi._categories is not None:
43365 - self.vartree.dbapi._categories = None
43366 - self.vartree.dbapi._pkgs_changed = True
43367 - self.vartree.dbapi._clear_pkg_cache(mylink)
43368 -
43369 - portage.process.spawned_pids.append(pid)
43370 - return [pid]
43371 -
43372 - os.close(elog_reader_fd)
43373 - portage.locks._close_fds()
43374 - # Disable close_fds since we don't exec (see _setup_pipes docstring).
43375 - portage.process._setup_pipes(fd_pipes, close_fds=False)
43376 -
43377 - # Use default signal handlers since the ones inherited
43378 - # from the parent process are irrelevant here.
43379 - signal.signal(signal.SIGINT, signal.SIG_DFL)
43380 - signal.signal(signal.SIGTERM, signal.SIG_DFL)
43381 -
43382 - portage.output.havecolor = self.settings.get('NOCOLOR') \
43383 - not in ('yes', 'true')
43384 -
43385 - # In this subprocess we want mylink._display_merge() to use
43386 - # stdout/stderr directly since they are pipes. This behavior
43387 - # is triggered when mylink._scheduler is None.
43388 - mylink._scheduler = None
43389 -
43390 - # Avoid wastful updates of the vdb cache.
43391 - self.vartree.dbapi._flush_cache_enabled = False
43392 -
43393 - # In this subprocess we don't want PORTAGE_BACKGROUND to
43394 - # suppress stdout/stderr output since they are pipes. We
43395 - # also don't want to open PORTAGE_LOG_FILE, since it will
43396 - # already be opened by the parent process, so we set the
43397 - # "subprocess" value for use in conditional logging code
43398 - # involving PORTAGE_LOG_FILE.
43399 - if not self.unmerge:
43400 - # unmerge phases have separate logs
43401 - if self.settings.get("PORTAGE_BACKGROUND") == "1":
43402 - self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "1"
43403 - else:
43404 - self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "0"
43405 - self.settings.backup_changes("PORTAGE_BACKGROUND_UNMERGE")
43406 - self.settings["PORTAGE_BACKGROUND"] = "subprocess"
43407 - self.settings.backup_changes("PORTAGE_BACKGROUND")
43408 -
43409 - rval = 1
43410 + parent_pid = os.getpid()
43411 + pid = None
43412 try:
43413 - if self.unmerge:
43414 - if not mylink.exists():
43415 - rval = os.EX_OK
43416 - elif mylink.unmerge(
43417 - ldpath_mtimes=self.prev_mtimes) == os.EX_OK:
43418 - mylink.lockdb()
43419 - try:
43420 - mylink.delete()
43421 - finally:
43422 - mylink.unlockdb()
43423 - rval = os.EX_OK
43424 - else:
43425 - rval = mylink.merge(self.pkgloc, self.infloc,
43426 - myebuild=self.myebuild, mydbapi=self.mydbapi,
43427 - prev_mtimes=self.prev_mtimes, counter=counter)
43428 - except SystemExit:
43429 - raise
43430 - except:
43431 - traceback.print_exc()
43432 + pid = os.fork()
43433 +
43434 + if pid != 0:
43435 + if not isinstance(pid, int):
43436 + raise AssertionError(
43437 + "fork returned non-integer: %s" % (repr(pid),))
43438 +
43439 + os.close(elog_writer_fd)
43440 + self._elog_reader_fd = elog_reader_fd
43441 + self._buf = ""
43442 + self._elog_keys = set()
43443 + # Discard messages which will be collected by the subprocess,
43444 + # in order to avoid duplicates (bug #446136).
43445 + portage.elog.messages.collect_messages(key=mylink.mycpv)
43446 +
43447 + # invalidate relevant vardbapi caches
43448 + if self.vartree.dbapi._categories is not None:
43449 + self.vartree.dbapi._categories = None
43450 + self.vartree.dbapi._pkgs_changed = True
43451 + self.vartree.dbapi._clear_pkg_cache(mylink)
43452 +
43453 + return [pid]
43454 +
43455 + os.close(elog_reader_fd)
43456 +
43457 + # Use default signal handlers in order to avoid problems
43458 + # killing subprocesses as reported in bug #353239.
43459 + signal.signal(signal.SIGINT, signal.SIG_DFL)
43460 + signal.signal(signal.SIGTERM, signal.SIG_DFL)
43461 +
43462 + portage.locks._close_fds()
43463 + # We don't exec, so use close_fds=False
43464 + # (see _setup_pipes docstring).
43465 + portage.process._setup_pipes(fd_pipes, close_fds=False)
43466 +
43467 + portage.output.havecolor = self.settings.get('NOCOLOR') \
43468 + not in ('yes', 'true')
43469 +
43470 + # Avoid wastful updates of the vdb cache.
43471 + self.vartree.dbapi._flush_cache_enabled = False
43472 +
43473 + # In this subprocess we don't want PORTAGE_BACKGROUND to
43474 + # suppress stdout/stderr output since they are pipes. We
43475 + # also don't want to open PORTAGE_LOG_FILE, since it will
43476 + # already be opened by the parent process, so we set the
43477 + # "subprocess" value for use in conditional logging code
43478 + # involving PORTAGE_LOG_FILE.
43479 + if not self.unmerge:
43480 + # unmerge phases have separate logs
43481 + if self.settings.get("PORTAGE_BACKGROUND") == "1":
43482 + self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "1"
43483 + else:
43484 + self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "0"
43485 + self.settings.backup_changes("PORTAGE_BACKGROUND_UNMERGE")
43486 + self.settings["PORTAGE_BACKGROUND"] = "subprocess"
43487 + self.settings.backup_changes("PORTAGE_BACKGROUND")
43488 +
43489 + rval = 1
43490 + try:
43491 + if self.unmerge:
43492 + if not mylink.exists():
43493 + rval = os.EX_OK
43494 + elif mylink.unmerge(
43495 + ldpath_mtimes=self.prev_mtimes) == os.EX_OK:
43496 + mylink.lockdb()
43497 + try:
43498 + mylink.delete()
43499 + finally:
43500 + mylink.unlockdb()
43501 + rval = os.EX_OK
43502 + else:
43503 + rval = mylink.merge(self.pkgloc, self.infloc,
43504 + myebuild=self.myebuild, mydbapi=self.mydbapi,
43505 + prev_mtimes=self.prev_mtimes, counter=counter)
43506 + except SystemExit:
43507 + raise
43508 + except:
43509 + traceback.print_exc()
43510 + # os._exit() skips stderr flush!
43511 + sys.stderr.flush()
43512 + finally:
43513 + os._exit(rval)
43514 +
43515 finally:
43516 - # Call os._exit() from finally block, in order to suppress any
43517 - # finally blocks from earlier in the call stack. See bug #345289.
43518 - os._exit(rval)
43519 + if pid == 0 or (pid is None and os.getpid() != parent_pid):
43520 + # Call os._exit() from a finally block in order
43521 + # to suppress any finally blocks from earlier
43522 + # in the call stack (see bug #345289). This
43523 + # finally block has to be setup before the fork
43524 + # in order to avoid a race condition.
43525 + os._exit(1)
43526
43527 def _unregister(self):
43528 """
43529 @@ -231,7 +265,7 @@ class MergeProcess(SpawnProcess):
43530
43531 self._unlock_vdb()
43532 if self._elog_reg_id is not None:
43533 - self.scheduler.unregister(self._elog_reg_id)
43534 + self.scheduler.source_remove(self._elog_reg_id)
43535 self._elog_reg_id = None
43536 if self._elog_reader_fd is not None:
43537 os.close(self._elog_reader_fd)
43538
43539 diff --git a/pym/portage/dbapi/_SyncfsProcess.py b/pym/portage/dbapi/_SyncfsProcess.py
43540 new file mode 100644
43541 index 0000000..7518214
43542 --- /dev/null
43543 +++ b/pym/portage/dbapi/_SyncfsProcess.py
43544 @@ -0,0 +1,53 @@
43545 +# Copyright 2012 Gentoo Foundation
43546 +# Distributed under the terms of the GNU General Public License v2
43547 +
43548 +from portage import os
43549 +from portage.util._ctypes import find_library, LoadLibrary
43550 +from portage.util._async.ForkProcess import ForkProcess
43551 +
43552 +class SyncfsProcess(ForkProcess):
43553 + """
43554 + Isolate ctypes usage in a subprocess, in order to avoid
43555 + potential problems with stale cached libraries as
43556 + described in bug #448858, comment #14 (also see
43557 + http://bugs.python.org/issue14597).
43558 + """
43559 +
43560 + __slots__ = ('paths',)
43561 +
43562 + @staticmethod
43563 + def _get_syncfs():
43564 +
43565 + filename = find_library("c")
43566 + if filename is not None:
43567 + library = LoadLibrary(filename)
43568 + if library is not None:
43569 + try:
43570 + return library.syncfs
43571 + except AttributeError:
43572 + pass
43573 +
43574 + return None
43575 +
43576 + def _run(self):
43577 +
43578 + syncfs_failed = False
43579 + syncfs = self._get_syncfs()
43580 +
43581 + if syncfs is not None:
43582 + for path in self.paths:
43583 + try:
43584 + fd = os.open(path, os.O_RDONLY)
43585 + except OSError:
43586 + pass
43587 + else:
43588 + try:
43589 + if syncfs(fd) != 0:
43590 + # Happens with PyPy (bug #446610)
43591 + syncfs_failed = True
43592 + finally:
43593 + os.close(fd)
43594 +
43595 + if syncfs is None or syncfs_failed:
43596 + return 1
43597 + return os.EX_OK
43598
43599 diff --git a/pym/portage/dbapi/__init__.py b/pym/portage/dbapi/__init__.py
43600 index b999fb5..a20a1e8 100644
43601 --- a/pym/portage/dbapi/__init__.py
43602 +++ b/pym/portage/dbapi/__init__.py
43603 @@ -1,6 +1,8 @@
43604 -# Copyright 1998-2012 Gentoo Foundation
43605 +# Copyright 1998-2013 Gentoo Foundation
43606 # Distributed under the terms of the GNU General Public License v2
43607
43608 +from __future__ import unicode_literals
43609 +
43610 __all__ = ["dbapi"]
43611
43612 import re
43613 @@ -16,16 +18,18 @@ portage.proxy.lazyimport.lazyimport(globals(),
43614
43615 from portage import os
43616 from portage import auxdbkeys
43617 +from portage.eapi import _get_eapi_attrs
43618 from portage.exception import InvalidData
43619 from portage.localization import _
43620 +from _emerge.Package import Package
43621
43622 class dbapi(object):
43623 - _category_re = re.compile(r'^\w[-.+\w]*$')
43624 + _category_re = re.compile(r'^\w[-.+\w]*$', re.UNICODE)
43625 _categories = None
43626 _use_mutable = False
43627 _known_keys = frozenset(x for x in auxdbkeys
43628 if not x.startswith("UNUSED_0"))
43629 - _pkg_str_aux_keys = ("EAPI", "SLOT", "repository")
43630 + _pkg_str_aux_keys = ("EAPI", "KEYWORDS", "SLOT", "repository")
43631
43632 def __init__(self):
43633 pass
43634 @@ -153,8 +157,7 @@ class dbapi(object):
43635 metadata = dict(zip(self._pkg_str_aux_keys,
43636 self.aux_get(cpv, self._pkg_str_aux_keys, myrepo=repo)))
43637
43638 - return _pkg_str(cpv, slot=metadata["SLOT"],
43639 - repo=metadata["repository"], eapi=metadata["EAPI"])
43640 + return _pkg_str(cpv, metadata=metadata, settings=self.settings)
43641
43642 def _iter_match_repo(self, atom, cpv_iter):
43643 for cpv in cpv_iter:
43644 @@ -182,7 +185,7 @@ class dbapi(object):
43645 2) Check enabled/disabled flag states.
43646 """
43647
43648 - aux_keys = ["IUSE", "SLOT", "USE", "repository"]
43649 + aux_keys = ["EAPI", "IUSE", "KEYWORDS", "SLOT", "USE", "repository"]
43650 for cpv in cpv_iter:
43651 try:
43652 metadata = dict(zip(aux_keys,
43653 @@ -190,17 +193,31 @@ class dbapi(object):
43654 except KeyError:
43655 continue
43656
43657 + try:
43658 + cpv.slot
43659 + except AttributeError:
43660 + try:
43661 + cpv = _pkg_str(cpv, metadata=metadata,
43662 + settings=self.settings)
43663 + except InvalidData:
43664 + continue
43665 +
43666 if not self._match_use(atom, cpv, metadata):
43667 continue
43668
43669 yield cpv
43670
43671 - def _match_use(self, atom, cpv, metadata):
43672 - iuse_implicit_match = self.settings._iuse_implicit_match
43673 - iuse = frozenset(x.lstrip('+-') for x in metadata["IUSE"].split())
43674 + def _match_use(self, atom, pkg, metadata):
43675 + eapi_attrs = _get_eapi_attrs(metadata["EAPI"])
43676 + if eapi_attrs.iuse_effective:
43677 + iuse_implicit_match = self.settings._iuse_effective_match
43678 + else:
43679 + iuse_implicit_match = self.settings._iuse_implicit_match
43680 + usealiases = self.settings._use_manager.getUseAliases(pkg)
43681 + iuse = Package._iuse(None, metadata["IUSE"].split(), iuse_implicit_match, usealiases, metadata["EAPI"])
43682
43683 for x in atom.unevaluated_atom.use.required:
43684 - if x not in iuse and not iuse_implicit_match(x):
43685 + if iuse.get_real_flag(x) is None:
43686 return False
43687
43688 if atom.use is None:
43689 @@ -210,44 +227,54 @@ class dbapi(object):
43690 # Use IUSE to validate USE settings for built packages,
43691 # in case the package manager that built this package
43692 # failed to do that for some reason (or in case of
43693 - # data corruption).
43694 - use = frozenset(x for x in metadata["USE"].split()
43695 - if x in iuse or iuse_implicit_match(x))
43696 - missing_enabled = atom.use.missing_enabled.difference(iuse)
43697 - missing_disabled = atom.use.missing_disabled.difference(iuse)
43698 -
43699 - if atom.use.enabled:
43700 - if any(x in atom.use.enabled for x in missing_disabled):
43701 + # data corruption). The enabled flags must be consistent
43702 + # with implicit IUSE, in order to avoid potential
43703 + # inconsistencies in USE dep matching (see bug #453400).
43704 + use = frozenset(x for x in metadata["USE"].split() if iuse.get_real_flag(x) is not None)
43705 + missing_enabled = frozenset(x for x in atom.use.missing_enabled if iuse.get_real_flag(x) is None)
43706 + missing_disabled = frozenset(x for x in atom.use.missing_disabled if iuse.get_real_flag(x) is None)
43707 + enabled = frozenset((iuse.get_real_flag(x) or x) for x in atom.use.enabled)
43708 + disabled = frozenset((iuse.get_real_flag(x) or x) for x in atom.use.disabled)
43709 +
43710 + if enabled:
43711 + if any(x in enabled for x in missing_disabled):
43712 return False
43713 - need_enabled = atom.use.enabled.difference(use)
43714 + need_enabled = enabled.difference(use)
43715 if need_enabled:
43716 if any(x not in missing_enabled for x in need_enabled):
43717 return False
43718
43719 - if atom.use.disabled:
43720 - if any(x in atom.use.disabled for x in missing_enabled):
43721 + if disabled:
43722 + if any(x in disabled for x in missing_enabled):
43723 return False
43724 - need_disabled = atom.use.disabled.intersection(use)
43725 + need_disabled = disabled.intersection(use)
43726 if need_disabled:
43727 if any(x not in missing_disabled for x in need_disabled):
43728 return False
43729
43730 elif not self.settings.local_config:
43731 # Check masked and forced flags for repoman.
43732 - if hasattr(cpv, 'slot'):
43733 - pkg = cpv
43734 - else:
43735 - pkg = _pkg_str(cpv, slot=metadata["SLOT"],
43736 - repo=metadata.get("repository"))
43737 - usemask = self.settings._getUseMask(pkg)
43738 + usemask = self.settings._getUseMask(pkg,
43739 + stable=self.settings._parent_stable)
43740 if any(x in usemask for x in atom.use.enabled):
43741 return False
43742
43743 - useforce = self.settings._getUseForce(pkg)
43744 + useforce = self.settings._getUseForce(pkg,
43745 + stable=self.settings._parent_stable)
43746 if any(x in useforce and x not in usemask
43747 for x in atom.use.disabled):
43748 return False
43749
43750 + # Check unsatisfied use-default deps
43751 + if atom.use.enabled:
43752 + missing_disabled = frozenset(x for x in atom.use.missing_disabled if iuse.get_real_flag(x) is None)
43753 + if any(x in atom.use.enabled for x in missing_disabled):
43754 + return False
43755 + if atom.use.disabled:
43756 + missing_enabled = frozenset(x for x in atom.use.missing_enabled if iuse.get_real_flag(x) is None)
43757 + if any(x in atom.use.disabled for x in missing_enabled):
43758 + return False
43759 +
43760 return True
43761
43762 def invalidentry(self, mypath):
43763 @@ -275,7 +302,8 @@ class dbapi(object):
43764 maxval = len(cpv_all)
43765 aux_get = self.aux_get
43766 aux_update = self.aux_update
43767 - meta_keys = ["DEPEND", "EAPI", "RDEPEND", "PDEPEND", "PROVIDE", 'repository']
43768 + update_keys = Package._dep_keys + ("PROVIDE",)
43769 + meta_keys = update_keys + self._pkg_str_aux_keys
43770 repo_dict = None
43771 if isinstance(updates, dict):
43772 repo_dict = updates
43773 @@ -284,14 +312,20 @@ class dbapi(object):
43774 if onProgress:
43775 onProgress(maxval, 0)
43776 for i, cpv in enumerate(cpv_all):
43777 - metadata = dict(zip(meta_keys, aux_get(cpv, meta_keys)))
43778 - eapi = metadata.pop('EAPI')
43779 - repo = metadata.pop('repository')
43780 + try:
43781 + metadata = dict(zip(meta_keys, aux_get(cpv, meta_keys)))
43782 + except KeyError:
43783 + continue
43784 + try:
43785 + pkg = _pkg_str(cpv, metadata=metadata, settings=self.settings)
43786 + except InvalidData:
43787 + continue
43788 + metadata = dict((k, metadata[k]) for k in update_keys)
43789 if repo_dict is None:
43790 updates_list = updates
43791 else:
43792 try:
43793 - updates_list = repo_dict[repo]
43794 + updates_list = repo_dict[pkg.repo]
43795 except KeyError:
43796 try:
43797 updates_list = repo_dict['DEFAULT']
43798 @@ -302,7 +336,7 @@ class dbapi(object):
43799 continue
43800
43801 metadata_updates = \
43802 - portage.update_dbentries(updates_list, metadata, eapi=eapi)
43803 + portage.update_dbentries(updates_list, metadata, parent=pkg)
43804 if metadata_updates:
43805 aux_update(cpv, metadata_updates)
43806 if onUpdate:
43807 @@ -343,9 +377,9 @@ class dbapi(object):
43808 continue
43809 moves += 1
43810 if "/" not in newslot and \
43811 - mycpv.slot_abi and \
43812 - mycpv.slot_abi not in (mycpv.slot, newslot):
43813 - newslot = "%s/%s" % (newslot, mycpv.slot_abi)
43814 + mycpv.sub_slot and \
43815 + mycpv.sub_slot not in (mycpv.slot, newslot):
43816 + newslot = "%s/%s" % (newslot, mycpv.sub_slot)
43817 mydata = {"SLOT": newslot+"\n"}
43818 self.aux_update(mycpv, mydata)
43819 return moves
43820
43821 diff --git a/pym/portage/dbapi/_expand_new_virt.py b/pym/portage/dbapi/_expand_new_virt.py
43822 index d379b4c..9aa603d 100644
43823 --- a/pym/portage/dbapi/_expand_new_virt.py
43824 +++ b/pym/portage/dbapi/_expand_new_virt.py
43825 @@ -1,8 +1,11 @@
43826 -# Copyright 2011 Gentoo Foundation
43827 +# Copyright 2011-2013 Gentoo Foundation
43828 # Distributed under the terms of the GNU General Public License v2
43829
43830 +from __future__ import unicode_literals
43831 +
43832 import portage
43833 from portage.dep import Atom, _get_useflag_re
43834 +from portage.eapi import _get_eapi_attrs
43835
43836 def expand_new_virt(vardb, atom):
43837 """
43838 @@ -44,6 +47,7 @@ def expand_new_virt(vardb, atom):
43839 yield atom
43840 continue
43841
43842 + eapi_attrs = _get_eapi_attrs(eapi)
43843 # Validate IUSE and IUSE, for early detection of vardb corruption.
43844 useflag_re = _get_useflag_re(eapi)
43845 valid_iuse = []
43846 @@ -54,7 +58,11 @@ def expand_new_virt(vardb, atom):
43847 valid_iuse.append(x)
43848 valid_iuse = frozenset(valid_iuse)
43849
43850 - iuse_implicit_match = vardb.settings._iuse_implicit_match
43851 + if eapi_attrs.iuse_effective:
43852 + iuse_implicit_match = vardb.settings._iuse_effective_match
43853 + else:
43854 + iuse_implicit_match = vardb.settings._iuse_implicit_match
43855 +
43856 valid_use = []
43857 for x in use.split():
43858 if x in valid_iuse or iuse_implicit_match(x):
43859
43860 diff --git a/pym/portage/dbapi/_similar_name_search.py b/pym/portage/dbapi/_similar_name_search.py
43861 new file mode 100644
43862 index 0000000..b6e4a1f
43863 --- /dev/null
43864 +++ b/pym/portage/dbapi/_similar_name_search.py
43865 @@ -0,0 +1,57 @@
43866 +# Copyright 2011-2012 Gentoo Foundation
43867 +# Distributed under the terms of the GNU General Public License v2
43868 +
43869 +import difflib
43870 +
43871 +from portage.versions import catsplit
43872 +
43873 +def similar_name_search(dbs, atom):
43874 +
43875 + cp_lower = atom.cp.lower()
43876 + cat, pkg = catsplit(cp_lower)
43877 + if cat == "null":
43878 + cat = None
43879 +
43880 + all_cp = set()
43881 + for db in dbs:
43882 + all_cp.update(db.cp_all())
43883 +
43884 + # discard dir containing no ebuilds
43885 + all_cp.discard(atom.cp)
43886 +
43887 + orig_cp_map = {}
43888 + for cp_orig in all_cp:
43889 + orig_cp_map.setdefault(cp_orig.lower(), []).append(cp_orig)
43890 + all_cp = set(orig_cp_map)
43891 +
43892 + if cat:
43893 + matches = difflib.get_close_matches(cp_lower, all_cp)
43894 + else:
43895 + pkg_to_cp = {}
43896 + for other_cp in list(all_cp):
43897 + other_pkg = catsplit(other_cp)[1]
43898 + if other_pkg == pkg:
43899 + # Check for non-identical package that
43900 + # differs only by upper/lower case.
43901 + identical = True
43902 + for cp_orig in orig_cp_map[other_cp]:
43903 + if catsplit(cp_orig)[1] != \
43904 + catsplit(atom.cp)[1]:
43905 + identical = False
43906 + break
43907 + if identical:
43908 + # discard dir containing no ebuilds
43909 + all_cp.discard(other_cp)
43910 + continue
43911 + pkg_to_cp.setdefault(other_pkg, set()).add(other_cp)
43912 +
43913 + pkg_matches = difflib.get_close_matches(pkg, pkg_to_cp)
43914 + matches = []
43915 + for pkg_match in pkg_matches:
43916 + matches.extend(pkg_to_cp[pkg_match])
43917 +
43918 + matches_orig_case = []
43919 + for cp in matches:
43920 + matches_orig_case.extend(orig_cp_map[cp])
43921 +
43922 + return matches_orig_case
43923
43924 diff --git a/pym/portage/dbapi/bintree.py b/pym/portage/dbapi/bintree.py
43925 index 9527b07..229ce3b 100644
43926 --- a/pym/portage/dbapi/bintree.py
43927 +++ b/pym/portage/dbapi/bintree.py
43928 @@ -1,11 +1,14 @@
43929 -# Copyright 1998-2012 Gentoo Foundation
43930 +# Copyright 1998-2014 Gentoo Foundation
43931 # Distributed under the terms of the GNU General Public License v2
43932
43933 +from __future__ import unicode_literals
43934 +
43935 __all__ = ["bindbapi", "binarytree"]
43936
43937 import portage
43938 portage.proxy.lazyimport.lazyimport(globals(),
43939 - 'portage.checksum:hashfunc_map,perform_multiple_checksums,verify_all',
43940 + 'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
43941 + 'verify_all,_apply_hash_filter,_hash_filter',
43942 'portage.dbapi.dep_expand:dep_expand',
43943 'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list',
43944 'portage.output:EOutput,colorize',
43945 @@ -24,7 +27,7 @@ from portage.const import CACHE_PATH
43946 from portage.dbapi.virtual import fakedbapi
43947 from portage.dep import Atom, use_reduce, paren_enclose
43948 from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \
43949 - PermissionDenied, PortageException
43950 + ParseError, PermissionDenied, PortageException
43951 from portage.localization import _
43952 from portage import _movefile
43953 from portage import os
43954 @@ -40,7 +43,9 @@ import subprocess
43955 import sys
43956 import tempfile
43957 import textwrap
43958 +import traceback
43959 import warnings
43960 +from gzip import GzipFile
43961 from itertools import chain
43962 try:
43963 from urllib.parse import urlparse
43964 @@ -48,12 +53,18 @@ except ImportError:
43965 from urlparse import urlparse
43966
43967 if sys.hexversion >= 0x3000000:
43968 + # pylint: disable=W0622
43969 _unicode = str
43970 basestring = str
43971 long = int
43972 else:
43973 _unicode = unicode
43974
43975 +class UseCachedCopyOfRemoteIndex(Exception):
43976 + # If the local copy is recent enough
43977 + # then fetching the remote index can be skipped.
43978 + pass
43979 +
43980 class bindbapi(fakedbapi):
43981 _known_keys = frozenset(list(fakedbapi._known_keys) + \
43982 ["CHOST", "repository", "USE"])
43983 @@ -65,9 +76,10 @@ class bindbapi(fakedbapi):
43984 self.cpdict={}
43985 # Selectively cache metadata in order to optimize dep matching.
43986 self._aux_cache_keys = set(
43987 - ["BUILD_TIME", "CHOST", "DEPEND", "EAPI", "IUSE", "KEYWORDS",
43988 + ["BUILD_TIME", "CHOST", "DEPEND", "EAPI",
43989 + "HDEPEND", "IUSE", "KEYWORDS",
43990 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
43991 - "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES",
43992 + "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES"
43993 ])
43994 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
43995 self._aux_cache = {}
43996 @@ -130,15 +142,15 @@ class bindbapi(fakedbapi):
43997 if myval:
43998 mydata[x] = " ".join(myval.split())
43999
44000 - if not mydata.setdefault('EAPI', _unicode_decode('0')):
44001 - mydata['EAPI'] = _unicode_decode('0')
44002 + if not mydata.setdefault('EAPI', '0'):
44003 + mydata['EAPI'] = '0'
44004
44005 if cache_me:
44006 aux_cache = self._aux_cache_slot_dict()
44007 for x in self._aux_cache_keys:
44008 - aux_cache[x] = mydata.get(x, _unicode_decode(''))
44009 + aux_cache[x] = mydata.get(x, '')
44010 self._aux_cache[mycpv] = aux_cache
44011 - return [mydata.get(x, _unicode_decode('')) for x in wants]
44012 + return [mydata.get(x, '') for x in wants]
44013
44014 def aux_update(self, cpv, values):
44015 if not self.bintree.populated:
44016 @@ -250,7 +262,7 @@ def _pkgindex_cpv_map_latest_build(pkgindex):
44017
44018 class binarytree(object):
44019 "this tree scans for a list of all packages available in PKGDIR"
44020 - def __init__(self, _unused=None, pkgdir=None,
44021 + def __init__(self, _unused=DeprecationWarning, pkgdir=None,
44022 virtual=DeprecationWarning, settings=None):
44023
44024 if pkgdir is None:
44025 @@ -259,11 +271,11 @@ class binarytree(object):
44026 if settings is None:
44027 raise TypeError("settings parameter is required")
44028
44029 - if _unused is not None and _unused != settings['ROOT']:
44030 - warnings.warn("The root parameter of the "
44031 + if _unused is not DeprecationWarning:
44032 + warnings.warn("The first parameter of the "
44033 "portage.dbapi.bintree.binarytree"
44034 - " constructor is now unused. Use "
44035 - "settings['ROOT'] instead.",
44036 + " constructor is now unused. Instead "
44037 + "settings['ROOT'] is used.",
44038 DeprecationWarning, stacklevel=2)
44039
44040 if virtual is not DeprecationWarning:
44041 @@ -295,22 +307,26 @@ class binarytree(object):
44042 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
44043 self._pkgindex_aux_keys = \
44044 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
44045 - "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
44046 - "PROVIDE", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
44047 + "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
44048 + "PROVIDE", "RESTRICT", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
44049 "BASE_URI"]
44050 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
44051 self._pkgindex_use_evaluated_keys = \
44052 - ("LICENSE", "RDEPEND", "DEPEND",
44053 - "PDEPEND", "PROPERTIES", "PROVIDE")
44054 + ("DEPEND", "HDEPEND", "LICENSE", "RDEPEND",
44055 + "PDEPEND", "PROPERTIES", "PROVIDE", "RESTRICT")
44056 self._pkgindex_header_keys = set([
44057 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
44058 - "ACCEPT_PROPERTIES", "CBUILD",
44059 + "ACCEPT_PROPERTIES", "ACCEPT_RESTRICT", "CBUILD",
44060 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
44061 - "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"])
44062 + "GENTOO_MIRRORS", "INSTALL_MASK", "IUSE_IMPLICIT", "USE",
44063 + "USE_EXPAND", "USE_EXPAND_HIDDEN", "USE_EXPAND_IMPLICIT",
44064 + "USE_EXPAND_UNPREFIXED"])
44065 self._pkgindex_default_pkg_data = {
44066 "BUILD_TIME" : "",
44067 + "DEFINED_PHASES" : "",
44068 "DEPEND" : "",
44069 "EAPI" : "0",
44070 + "HDEPEND" : "",
44071 "IUSE" : "",
44072 "KEYWORDS": "",
44073 "LICENSE" : "",
44074 @@ -322,7 +338,6 @@ class binarytree(object):
44075 "RESTRICT": "",
44076 "SLOT" : "0",
44077 "USE" : "",
44078 - "DEFINED_PHASES" : "",
44079 }
44080 self._pkgindex_inherited_keys = ["CHOST", "repository"]
44081
44082 @@ -416,7 +431,7 @@ class binarytree(object):
44083 moves += 1
44084 mytbz2 = portage.xpak.tbz2(tbz2path)
44085 mydata = mytbz2.get_data()
44086 - updated_items = update_dbentries([mylist], mydata, eapi=mycpv.eapi)
44087 + updated_items = update_dbentries([mylist], mydata, parent=mycpv)
44088 mydata.update(updated_items)
44089 mydata[b'PF'] = \
44090 _unicode_encode(mynewpkg + "\n",
44091 @@ -552,6 +567,20 @@ class binarytree(object):
44092 if not os.path.isdir(path):
44093 raise
44094
44095 + def _file_permissions(self, path):
44096 + try:
44097 + pkgdir_st = os.stat(self.pkgdir)
44098 + except OSError:
44099 + pass
44100 + else:
44101 + pkgdir_gid = pkgdir_st.st_gid
44102 + pkgdir_grp_mode = 0o0060 & pkgdir_st.st_mode
44103 + try:
44104 + portage.util.apply_permissions(path, gid=pkgdir_gid,
44105 + mode=pkgdir_grp_mode, mask=0)
44106 + except PortageException:
44107 + pass
44108 +
44109 def _move_to_all(self, cpv):
44110 """If the file exists, move it. Whether or not it exists, update state
44111 for future getname() calls."""
44112 @@ -807,9 +836,7 @@ class binarytree(object):
44113 del pkgindex.packages[:]
44114 pkgindex.packages.extend(iter(metadata.values()))
44115 self._update_pkgindex_header(pkgindex.header)
44116 - f = atomic_ofstream(self._pkgindex_file)
44117 - pkgindex.write(f)
44118 - f.close()
44119 + self._pkgindex_write(pkgindex)
44120
44121 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
44122 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
44123 @@ -852,6 +879,7 @@ class binarytree(object):
44124 if e.errno != errno.ENOENT:
44125 raise
44126 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
44127 + remote_timestamp = None
44128 rmt_idx = self._new_pkgindex()
44129 proc = None
44130 tmp_filename = None
44131 @@ -860,41 +888,79 @@ class binarytree(object):
44132 # protocols and requires the base url to have a trailing
44133 # slash, so join manually...
44134 url = base_url.rstrip("/") + "/Packages"
44135 - try:
44136 - f = _urlopen(url)
44137 - except IOError:
44138 - path = parsed_url.path.rstrip("/") + "/Packages"
44139 + f = None
44140 +
44141 + # Don't use urlopen for https, since it doesn't support
44142 + # certificate/hostname verification (bug #469888).
44143 + if parsed_url.scheme not in ('https',):
44144 + try:
44145 + f = _urlopen(url, if_modified_since=local_timestamp)
44146 + if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
44147 + remote_timestamp = f.headers.get('timestamp')
44148 + except IOError as err:
44149 + if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
44150 + raise UseCachedCopyOfRemoteIndex()
44151 +
44152 + if parsed_url.scheme in ('ftp', 'http', 'https'):
44153 + # This protocol is supposedly supported by urlopen,
44154 + # so apparently there's a problem with the url
44155 + # or a bug in urlopen.
44156 + if self.settings.get("PORTAGE_DEBUG", "0") != "0":
44157 + traceback.print_exc()
44158
44159 - if parsed_url.scheme == 'sftp':
44160 - # The sftp command complains about 'Illegal seek' if
44161 - # we try to make it write to /dev/stdout, so use a
44162 - # temp file instead.
44163 - fd, tmp_filename = tempfile.mkstemp()
44164 - os.close(fd)
44165 - if port is not None:
44166 - port_args = ['-P', "%s" % (port,)]
44167 - proc = subprocess.Popen(['sftp'] + port_args + \
44168 - [user_passwd + host + ":" + path, tmp_filename])
44169 - if proc.wait() != os.EX_OK:
44170 raise
44171 - f = open(tmp_filename, 'rb')
44172 - elif parsed_url.scheme == 'ssh':
44173 + except ValueError:
44174 + raise ParseError("Invalid Portage BINHOST value '%s'"
44175 + % url.lstrip())
44176 +
44177 + if f is None:
44178 +
44179 + path = parsed_url.path.rstrip("/") + "/Packages"
44180 +
44181 + if parsed_url.scheme == 'ssh':
44182 + # Use a pipe so that we can terminate the download
44183 + # early if we detect that the TIMESTAMP header
44184 + # matches that of the cached Packages file.
44185 + ssh_args = ['ssh']
44186 if port is not None:
44187 - port_args = ['-p', "%s" % (port,)]
44188 - proc = subprocess.Popen(['ssh'] + port_args + \
44189 - [user_passwd + host, '--', 'cat', path],
44190 + ssh_args.append("-p%s" % (port,))
44191 + # NOTE: shlex evaluates embedded quotes
44192 + ssh_args.extend(portage.util.shlex_split(
44193 + self.settings.get("PORTAGE_SSH_OPTS", "")))
44194 + ssh_args.append(user_passwd + host)
44195 + ssh_args.append('--')
44196 + ssh_args.append('cat')
44197 + ssh_args.append(path)
44198 +
44199 + proc = subprocess.Popen(ssh_args,
44200 stdout=subprocess.PIPE)
44201 f = proc.stdout
44202 else:
44203 setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
44204 fcmd = self.settings.get(setting)
44205 if not fcmd:
44206 - raise
44207 + fcmd = self.settings.get('FETCHCOMMAND')
44208 + if not fcmd:
44209 + raise EnvironmentError("FETCHCOMMAND is unset")
44210 +
44211 fd, tmp_filename = tempfile.mkstemp()
44212 tmp_dirname, tmp_basename = os.path.split(tmp_filename)
44213 os.close(fd)
44214 - success = portage.getbinpkg.file_get(url,
44215 - tmp_dirname, fcmd=fcmd, filename=tmp_basename)
44216 +
44217 + fcmd_vars = {
44218 + "DISTDIR": tmp_dirname,
44219 + "FILE": tmp_basename,
44220 + "URI": url
44221 + }
44222 +
44223 + for k in ("PORTAGE_SSH_OPTS",):
44224 + try:
44225 + fcmd_vars[k] = self.settings[k]
44226 + except KeyError:
44227 + pass
44228 +
44229 + success = portage.getbinpkg.file_get(
44230 + fcmd=fcmd, fcmd_vars=fcmd_vars)
44231 if not success:
44232 raise EnvironmentError("%s failed" % (setting,))
44233 f = open(tmp_filename, 'rb')
44234 @@ -903,7 +969,8 @@ class binarytree(object):
44235 _encodings['repo.content'], errors='replace')
44236 try:
44237 rmt_idx.readHeader(f_dec)
44238 - remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
44239 + if not remote_timestamp: # in case it had not been read from HTTP header
44240 + remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
44241 if not remote_timestamp:
44242 # no timestamp in the header, something's wrong
44243 pkgindex = None
44244 @@ -931,6 +998,12 @@ class binarytree(object):
44245 writemsg("\n\n!!! %s\n" % \
44246 _("Timed out while closing connection to binhost"),
44247 noiselevel=-1)
44248 + except UseCachedCopyOfRemoteIndex:
44249 + writemsg_stdout("\n")
44250 + writemsg_stdout(
44251 + colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
44252 + "\n")
44253 + rmt_idx = pkgindex
44254 except EnvironmentError as e:
44255 writemsg(_("\n\n!!! Error fetching binhost package" \
44256 " info from '%s'\n") % _hide_url_passwd(base_url))
44257 @@ -999,75 +1072,7 @@ class binarytree(object):
44258 # Local package instances override remote instances.
44259 for cpv in metadata:
44260 self._remotepkgs.pop(cpv, None)
44261 - continue
44262 - try:
44263 - chunk_size = long(self.settings["PORTAGE_BINHOST_CHUNKSIZE"])
44264 - if chunk_size < 8:
44265 - chunk_size = 8
44266 - except (ValueError, KeyError):
44267 - chunk_size = 3000
44268 - writemsg_stdout("\n")
44269 - writemsg_stdout(
44270 - colorize("GOOD", _("Fetching bininfo from ")) + \
44271 - _hide_url_passwd(base_url) + "\n")
44272 - remotepkgs = portage.getbinpkg.dir_get_metadata(
44273 - base_url, chunk_size=chunk_size)
44274 -
44275 - for mypkg, remote_metadata in remotepkgs.items():
44276 - mycat = remote_metadata.get("CATEGORY")
44277 - if mycat is None:
44278 - #old-style or corrupt package
44279 - writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
44280 - noiselevel=-1)
44281 - continue
44282 - mycat = mycat.strip()
44283 - try:
44284 - fullpkg = _pkg_str(mycat+"/"+mypkg[:-5])
44285 - except InvalidData:
44286 - writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
44287 - noiselevel=-1)
44288 - continue
44289 -
44290 - if fullpkg in metadata:
44291 - # When using this old protocol, comparison with the remote
44292 - # package isn't supported, so the local package is always
44293 - # preferred even if getbinpkgsonly is enabled.
44294 - continue
44295 -
44296 - if not self.dbapi._category_re.match(mycat):
44297 - writemsg(_("!!! Remote binary package has an " \
44298 - "unrecognized category: '%s'\n") % fullpkg,
44299 - noiselevel=-1)
44300 - writemsg(_("!!! '%s' has a category that is not" \
44301 - " listed in %setc/portage/categories\n") % \
44302 - (fullpkg, self.settings["PORTAGE_CONFIGROOT"]),
44303 - noiselevel=-1)
44304 - continue
44305 - mykey = portage.cpv_getkey(fullpkg)
44306 - try:
44307 - # invalid tbz2's can hurt things.
44308 - self.dbapi.cpv_inject(fullpkg)
44309 - for k, v in remote_metadata.items():
44310 - remote_metadata[k] = v.strip()
44311 - remote_metadata["BASE_URI"] = base_url
44312 -
44313 - # Eliminate metadata values with names that digestCheck
44314 - # uses, since they are not valid when using the old
44315 - # protocol. Typically this is needed for SIZE metadata
44316 - # which corresponds to the size of the unpacked files
44317 - # rather than the binpkg file size, triggering digest
44318 - # verification failures as reported in bug #303211.
44319 - remote_metadata.pop('SIZE', None)
44320 - for k in portage.checksum.hashfunc_map:
44321 - remote_metadata.pop(k, None)
44322 -
44323 - self._remotepkgs[fullpkg] = remote_metadata
44324 - except SystemExit as e:
44325 - raise
44326 - except:
44327 - writemsg(_("!!! Failed to inject remote binary package: %s\n") % fullpkg,
44328 - noiselevel=-1)
44329 - continue
44330 +
44331 self.populated=1
44332
44333 def inject(self, cpv, filename=None):
44334 @@ -1121,6 +1126,10 @@ class binarytree(object):
44335 if not samefile:
44336 self._ensure_dir(os.path.dirname(new_filename))
44337 _movefile(filename, new_filename, mysettings=self.settings)
44338 + full_path = new_filename
44339 +
44340 + self._file_permissions(full_path)
44341 +
44342 if self._all_directory and \
44343 self.getname(cpv).split(os.path.sep)[-2] == "All":
44344 self._create_symlink(cpv)
44345 @@ -1168,13 +1177,35 @@ class binarytree(object):
44346 pkgindex.packages.append(d)
44347
44348 self._update_pkgindex_header(pkgindex.header)
44349 - f = atomic_ofstream(os.path.join(self.pkgdir, "Packages"))
44350 - pkgindex.write(f)
44351 - f.close()
44352 + self._pkgindex_write(pkgindex)
44353 +
44354 finally:
44355 if pkgindex_lock:
44356 unlockfile(pkgindex_lock)
44357
44358 + def _pkgindex_write(self, pkgindex):
44359 + contents = codecs.getwriter(_encodings['repo.content'])(io.BytesIO())
44360 + pkgindex.write(contents)
44361 + contents = contents.getvalue()
44362 + atime = mtime = long(pkgindex.header["TIMESTAMP"])
44363 + output_files = [(atomic_ofstream(self._pkgindex_file, mode="wb"),
44364 + self._pkgindex_file, None)]
44365 +
44366 + if "compress-index" in self.settings.features:
44367 + gz_fname = self._pkgindex_file + ".gz"
44368 + fileobj = atomic_ofstream(gz_fname, mode="wb")
44369 + output_files.append((GzipFile(filename='', mode="wb",
44370 + fileobj=fileobj, mtime=mtime), gz_fname, fileobj))
44371 +
44372 + for f, fname, f_close in output_files:
44373 + f.write(contents)
44374 + f.close()
44375 + if f_close is not None:
44376 + f_close.close()
44377 + self._file_permissions(fname)
44378 + # some seconds might have elapsed since TIMESTAMP
44379 + os.utime(fname, (atime, mtime))
44380 +
44381 def _pkgindex_entry(self, cpv):
44382 """
44383 Performs checksums and evaluates USE flag conditionals.
44384 @@ -1234,6 +1265,16 @@ class binarytree(object):
44385 else:
44386 header.pop(k, None)
44387
44388 + # These values may be useful for using a binhost without
44389 + # having a local copy of the profile (bug #470006).
44390 + for k in self.settings.get("USE_EXPAND_IMPLICIT", "").split():
44391 + k = "USE_EXPAND_VALUES_" + k
44392 + v = self.settings.get(k)
44393 + if v:
44394 + header[k] = v
44395 + else:
44396 + header.pop(k, None)
44397 +
44398 def _pkgindex_version_supported(self, pkgindex):
44399 version = pkgindex.header.get("VERSION")
44400 if version:
44401 @@ -1246,11 +1287,6 @@ class binarytree(object):
44402
44403 def _eval_use_flags(self, cpv, metadata):
44404 use = frozenset(metadata["USE"].split())
44405 - raw_use = use
44406 - iuse = set(f.lstrip("-+") for f in metadata["IUSE"].split())
44407 - use = [f for f in use if f in iuse]
44408 - use.sort()
44409 - metadata["USE"] = " ".join(use)
44410 for k in self._pkgindex_use_evaluated_keys:
44411 if k.endswith('DEPEND'):
44412 token_class = Atom
44413 @@ -1259,7 +1295,7 @@ class binarytree(object):
44414
44415 try:
44416 deps = metadata[k]
44417 - deps = use_reduce(deps, uselist=raw_use, token_class=token_class)
44418 + deps = use_reduce(deps, uselist=use, token_class=token_class)
44419 deps = paren_enclose(deps)
44420 except portage.exception.InvalidDependString as e:
44421 writemsg("%s: %s\n" % (k, str(e)),
44422 @@ -1383,19 +1419,14 @@ class binarytree(object):
44423 f.close()
44424 return pkgindex
44425
44426 - def digestCheck(self, pkg):
44427 - """
44428 - Verify digests for the given package and raise DigestException
44429 - if verification fails.
44430 - @rtype: bool
44431 - @return: True if digests could be located, False otherwise.
44432 - """
44433 - cpv = pkg
44434 - if not isinstance(cpv, basestring):
44435 + def _get_digests(self, pkg):
44436 +
44437 + try:
44438 cpv = pkg.cpv
44439 - pkg = None
44440 + except AttributeError:
44441 + cpv = pkg
44442
44443 - pkg_path = self.getname(cpv)
44444 + digests = {}
44445 metadata = None
44446 if self._remotepkgs is None or cpv not in self._remotepkgs:
44447 for d in self._load_pkgindex().packages:
44448 @@ -1405,9 +1436,8 @@ class binarytree(object):
44449 else:
44450 metadata = self._remotepkgs[cpv]
44451 if metadata is None:
44452 - return False
44453 + return digests
44454
44455 - digests = {}
44456 for k in hashfunc_map:
44457 v = metadata.get(k)
44458 if not v:
44459 @@ -1421,9 +1451,31 @@ class binarytree(object):
44460 writemsg(_("!!! Malformed SIZE attribute in remote " \
44461 "metadata for '%s'\n") % cpv)
44462
44463 + return digests
44464 +
44465 + def digestCheck(self, pkg):
44466 + """
44467 + Verify digests for the given package and raise DigestException
44468 + if verification fails.
44469 + @rtype: bool
44470 + @return: True if digests could be located, False otherwise.
44471 + """
44472 +
44473 + digests = self._get_digests(pkg)
44474 +
44475 if not digests:
44476 return False
44477
44478 + try:
44479 + cpv = pkg.cpv
44480 + except AttributeError:
44481 + cpv = pkg
44482 +
44483 + pkg_path = self.getname(cpv)
44484 + hash_filter = _hash_filter(
44485 + self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
44486 + if not hash_filter.transparent:
44487 + digests = _apply_hash_filter(digests, hash_filter)
44488 eout = EOutput()
44489 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
44490 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
44491 @@ -1439,9 +1491,7 @@ class binarytree(object):
44492 "Get a slot for a catpkg; assume it exists."
44493 myslot = ""
44494 try:
44495 - myslot = self.dbapi.aux_get(mycatpkg,["SLOT"])[0]
44496 - except SystemExit as e:
44497 - raise
44498 - except Exception as e:
44499 + myslot = self.dbapi._pkg_str(mycatpkg, None).slot
44500 + except KeyError:
44501 pass
44502 return myslot
44503
44504 diff --git a/pym/portage/dbapi/cpv_expand.py b/pym/portage/dbapi/cpv_expand.py
44505 index 947194c..70ee782 100644
44506 --- a/pym/portage/dbapi/cpv_expand.py
44507 +++ b/pym/portage/dbapi/cpv_expand.py
44508 @@ -1,6 +1,8 @@
44509 -# Copyright 2010-2011 Gentoo Foundation
44510 +# Copyright 2010-2013 Gentoo Foundation
44511 # Distributed under the terms of the GNU General Public License v2
44512
44513 +from __future__ import unicode_literals
44514 +
44515 __all__ = ["cpv_expand"]
44516
44517 import portage
44518
44519 diff --git a/pym/portage/dbapi/dep_expand.py b/pym/portage/dbapi/dep_expand.py
44520 index ac8ccf4..3de5d8f 100644
44521 --- a/pym/portage/dbapi/dep_expand.py
44522 +++ b/pym/portage/dbapi/dep_expand.py
44523 @@ -1,6 +1,8 @@
44524 -# Copyright 2010 Gentoo Foundation
44525 +# Copyright 2010-2013 Gentoo Foundation
44526 # Distributed under the terms of the GNU General Public License v2
44527
44528 +from __future__ import unicode_literals
44529 +
44530 __all__ = ["dep_expand"]
44531
44532 import re
44533 @@ -23,7 +25,7 @@ def dep_expand(mydep, mydb=None, use_cache=1, settings=None):
44534 if mydep[0] == "*":
44535 mydep = mydep[1:]
44536 orig_dep = mydep
44537 - has_cat = '/' in orig_dep
44538 + has_cat = '/' in orig_dep.split(':')[0]
44539 if not has_cat:
44540 alphanum = re.search(r'\w', orig_dep)
44541 if alphanum:
44542
44543 diff --git a/pym/portage/dbapi/porttree.py b/pym/portage/dbapi/porttree.py
44544 index 945c22c..590e3c5 100644
44545 --- a/pym/portage/dbapi/porttree.py
44546 +++ b/pym/portage/dbapi/porttree.py
44547 @@ -1,6 +1,8 @@
44548 -# Copyright 1998-2012 Gentoo Foundation
44549 +# Copyright 1998-2014 Gentoo Foundation
44550 # Distributed under the terms of the GNU General Public License v2
44551
44552 +from __future__ import unicode_literals
44553 +
44554 __all__ = [
44555 "close_portdbapi_caches", "FetchlistDict", "portagetree", "portdbapi"
44556 ]
44557 @@ -33,21 +35,75 @@ from portage import os
44558 from portage import _encodings
44559 from portage import _unicode_encode
44560 from portage import OrderedDict
44561 +from portage.util._eventloop.EventLoop import EventLoop
44562 +from portage.util._eventloop.global_event_loop import global_event_loop
44563 from _emerge.EbuildMetadataPhase import EbuildMetadataPhase
44564 -from _emerge.PollScheduler import PollScheduler
44565
44566 import os as _os
44567 import sys
44568 import traceback
44569 import warnings
44570
44571 +try:
44572 + from urllib.parse import urlparse
44573 +except ImportError:
44574 + from urlparse import urlparse
44575 +
44576 if sys.hexversion >= 0x3000000:
44577 + # pylint: disable=W0622
44578 basestring = str
44579 long = int
44580
44581 +def close_portdbapi_caches():
44582 + # The python interpreter does _not_ guarantee that destructors are
44583 + # called for objects that remain when the interpreter exits, so we
44584 + # use an atexit hook to call destructors for any global portdbapi
44585 + # instances that may have been constructed.
44586 + try:
44587 + portage._legacy_globals_constructed
44588 + except AttributeError:
44589 + pass
44590 + else:
44591 + if "db" in portage._legacy_globals_constructed:
44592 + try:
44593 + db = portage.db
44594 + except AttributeError:
44595 + pass
44596 + else:
44597 + if isinstance(db, dict):
44598 + for x in db.values():
44599 + try:
44600 + if "porttree" in x.lazy_items:
44601 + continue
44602 + except (AttributeError, TypeError):
44603 + continue
44604 + try:
44605 + x = x.pop("porttree").dbapi
44606 + except (AttributeError, KeyError):
44607 + continue
44608 + if not isinstance(x, portdbapi):
44609 + continue
44610 + x.close_caches()
44611 +
44612 +portage.process.atexit_register(close_portdbapi_caches)
44613 +
44614 +# It used to be necessary for API consumers to remove portdbapi instances
44615 +# from portdbapi_instances, in order to avoid having accumulated instances
44616 +# consume memory. Now, portdbapi_instances is just an empty dummy list, so
44617 +# for backward compatibility, ignore ValueError for removal on non-existent
44618 +# items.
44619 +class _dummy_list(list):
44620 + def remove(self, item):
44621 + # TODO: Trigger a DeprecationWarning here, after stable portage
44622 + # has dummy portdbapi_instances.
44623 + try:
44624 + list.remove(self, item)
44625 + except ValueError:
44626 + pass
44627 +
44628 class portdbapi(dbapi):
44629 """this tree will scan a portage directory located at root (passed to init)"""
44630 - portdbapi_instances = []
44631 + portdbapi_instances = _dummy_list()
44632 _use_mutable = True
44633
44634 @property
44635 @@ -56,23 +112,28 @@ class portdbapi(dbapi):
44636
44637 @property
44638 def porttree_root(self):
44639 + warnings.warn("portage.dbapi.porttree.portdbapi.porttree_root is deprecated in favor of portage.repository.config.RepoConfig.location "
44640 + "(available as repositories[repo_name].location attribute of instances of portage.dbapi.porttree.portdbapi class)",
44641 + DeprecationWarning, stacklevel=2)
44642 return self.settings.repositories.mainRepoLocation()
44643
44644 @property
44645 def eclassdb(self):
44646 + warnings.warn("portage.dbapi.porttree.portdbapi.eclassdb is deprecated in favor of portage.repository.config.RepoConfig.eclass_db "
44647 + "(available as repositories[repo_name].eclass_db attribute of instances of portage.dbapi.porttree.portdbapi class)",
44648 + DeprecationWarning, stacklevel=2)
44649 main_repo = self.repositories.mainRepo()
44650 if main_repo is None:
44651 return None
44652 return main_repo.eclass_db
44653
44654 - def __init__(self, _unused_param=None, mysettings=None):
44655 + def __init__(self, _unused_param=DeprecationWarning, mysettings=None):
44656 """
44657 @param _unused_param: deprecated, use mysettings['PORTDIR'] instead
44658 @type _unused_param: None
44659 @param mysettings: an immutable config instance
44660 @type mysettings: portage.config
44661 """
44662 - portdbapi.portdbapi_instances.append(self)
44663
44664 from portage import config
44665 if mysettings:
44666 @@ -81,7 +142,7 @@ class portdbapi(dbapi):
44667 from portage import settings
44668 self.settings = config(clone=settings)
44669
44670 - if _unused_param is not None:
44671 + if _unused_param is not DeprecationWarning:
44672 warnings.warn("The first parameter of the " + \
44673 "portage.dbapi.porttree.portdbapi" + \
44674 " constructor is unused since portage-2.1.8. " + \
44675 @@ -96,7 +157,6 @@ class portdbapi(dbapi):
44676 # this purpose because doebuild makes many changes to the config
44677 # instance that is passed in.
44678 self.doebuild_settings = config(clone=self.settings)
44679 - self._scheduler = PollScheduler().sched_iface
44680 self.depcachedir = os.path.realpath(self.settings.depcachedir)
44681
44682 if os.environ.get("SANDBOX_ON") == "1":
44683 @@ -153,10 +213,10 @@ class portdbapi(dbapi):
44684 # portage group.
44685 depcachedir_unshared = True
44686 else:
44687 - cache_kwargs.update({
44688 + cache_kwargs.update(portage._native_kwargs({
44689 'gid' : portage_gid,
44690 'perms' : 0o664
44691 - })
44692 + }))
44693
44694 # If secpass < 1, we don't want to write to the cache
44695 # since then we won't be able to apply group permissions
44696 @@ -187,13 +247,25 @@ class portdbapi(dbapi):
44697 self._pregen_auxdb[x] = cache
44698 # Selectively cache metadata in order to optimize dep matching.
44699 self._aux_cache_keys = set(
44700 - ["DEPEND", "EAPI", "INHERITED", "IUSE", "KEYWORDS", "LICENSE",
44701 + ["DEPEND", "EAPI", "HDEPEND",
44702 + "INHERITED", "IUSE", "KEYWORDS", "LICENSE",
44703 "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository",
44704 "RESTRICT", "SLOT", "DEFINED_PHASES", "REQUIRED_USE"])
44705
44706 self._aux_cache = {}
44707 self._broken_ebuilds = set()
44708
44709 + @property
44710 + def _event_loop(self):
44711 + if portage._internal_caller:
44712 + # For internal portage usage, the global_event_loop is safe.
44713 + return global_event_loop()
44714 + else:
44715 + # For external API consumers, use a local EventLoop, since
44716 + # we don't want to assume that it's safe to override the
44717 + # global SIGCHLD handler.
44718 + return EventLoop(main=False)
44719 +
44720 def _create_pregen_cache(self, tree):
44721 conf = self.repositories.get_repo_for_location(tree)
44722 cache = conf.get_pregenerated_cache(
44723 @@ -203,6 +275,13 @@ class portdbapi(dbapi):
44724 cache.ec = self.repositories.get_repo_for_location(tree).eclass_db
44725 except AttributeError:
44726 pass
44727 +
44728 + if not cache.complete_eclass_entries:
44729 + warnings.warn(
44730 + ("Repository '%s' used deprecated 'pms' cache format. "
44731 + "Please migrate to 'md5-dict' format.") % (conf.name,),
44732 + DeprecationWarning)
44733 +
44734 return cache
44735
44736 def _init_cache_dirs(self):
44737 @@ -447,7 +526,7 @@ class portdbapi(dbapi):
44738
44739 proc = EbuildMetadataPhase(cpv=mycpv,
44740 ebuild_hash=ebuild_hash, portdb=self,
44741 - repo_path=mylocation, scheduler=self._scheduler,
44742 + repo_path=mylocation, scheduler=self._event_loop,
44743 settings=self.doebuild_settings)
44744
44745 proc.start()
44746 @@ -627,13 +706,14 @@ class portdbapi(dbapi):
44747 else:
44748 return 0
44749
44750 - def cp_all(self, categories=None, trees=None):
44751 + def cp_all(self, categories=None, trees=None, reverse=False):
44752 """
44753 This returns a list of all keys in our tree or trees
44754 @param categories: optional list of categories to search or
44755 defaults to self.settings.categories
44756 @param trees: optional list of trees to search the categories in or
44757 defaults to self.porttrees
44758 + @param reverse: reverse sort order (default is False)
44759 @rtype list of [cat/pkg,...]
44760 """
44761 d = {}
44762 @@ -652,7 +732,7 @@ class portdbapi(dbapi):
44763 continue
44764 d[atom.cp] = None
44765 l = list(d)
44766 - l.sort()
44767 + l.sort(reverse=reverse)
44768 return l
44769
44770 def cp_list(self, mycp, use_cache=1, mytree=None):
44771 @@ -827,8 +907,8 @@ class portdbapi(dbapi):
44772 continue
44773
44774 try:
44775 - pkg_str = _pkg_str(cpv, slot=metadata["SLOT"],
44776 - repo=metadata["repository"], eapi=metadata["EAPI"])
44777 + pkg_str = _pkg_str(cpv, metadata=metadata,
44778 + settings=self.settings)
44779 except InvalidData:
44780 continue
44781
44782 @@ -966,19 +1046,16 @@ class portdbapi(dbapi):
44783 return False
44784 if settings._getMissingProperties(cpv, metadata):
44785 return False
44786 + if settings._getMissingRestrict(cpv, metadata):
44787 + return False
44788 except InvalidDependString:
44789 return False
44790
44791 return True
44792
44793 -def close_portdbapi_caches():
44794 - for i in portdbapi.portdbapi_instances:
44795 - i.close_caches()
44796 -
44797 -portage.process.atexit_register(portage.portageexit)
44798 -
44799 class portagetree(object):
44800 - def __init__(self, root=None, virtual=DeprecationWarning, settings=None):
44801 + def __init__(self, root=DeprecationWarning, virtual=DeprecationWarning,
44802 + settings=None):
44803 """
44804 Constructor for a PortageTree
44805
44806 @@ -994,7 +1071,7 @@ class portagetree(object):
44807 settings = portage.settings
44808 self.settings = settings
44809
44810 - if root is not None and root != settings['ROOT']:
44811 + if root is not DeprecationWarning:
44812 warnings.warn("The root parameter of the " + \
44813 "portage.dbapi.porttree.portagetree" + \
44814 " constructor is now unused. Use " + \
44815 @@ -1062,10 +1139,8 @@ class portagetree(object):
44816 "Get a slot for a catpkg; assume it exists."
44817 myslot = ""
44818 try:
44819 - myslot = self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
44820 - except SystemExit:
44821 - raise
44822 - except Exception:
44823 + myslot = self.dbapi._pkg_str(mycatpkg, None).slot
44824 + except KeyError:
44825 pass
44826 return myslot
44827
44828 @@ -1137,9 +1212,18 @@ def _parse_uri_map(cpv, metadata, use=None):
44829
44830 uri_set = uri_map.get(distfile)
44831 if uri_set is None:
44832 - uri_set = set()
44833 + # Use OrderedDict to preserve order from SRC_URI
44834 + # while ensuring uniqueness.
44835 + uri_set = OrderedDict()
44836 uri_map[distfile] = uri_set
44837 - uri_set.add(uri)
44838 - uri = None
44839 +
44840 + # SRC_URI may contain a file name with no scheme, and in
44841 + # this case it does not belong in uri_set.
44842 + if urlparse(uri).scheme:
44843 + uri_set[uri] = True
44844 +
44845 + # Convert OrderedDicts to tuples.
44846 + for k, v in uri_map.items():
44847 + uri_map[k] = tuple(v)
44848
44849 return uri_map
44850
44851 diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
44852 index ea62f6b..6417a56 100644
44853 --- a/pym/portage/dbapi/vartree.py
44854 +++ b/pym/portage/dbapi/vartree.py
44855 @@ -1,6 +1,8 @@
44856 -# Copyright 1998-2012 Gentoo Foundation
44857 +# Copyright 1998-2014 Gentoo Foundation
44858 # Distributed under the terms of the GNU General Public License v2
44859
44860 +from __future__ import unicode_literals
44861 +
44862 __all__ = [
44863 "vardbapi", "vartree", "dblink"] + \
44864 ["write_contents", "tar_contents"]
44865 @@ -11,8 +13,9 @@ portage.proxy.lazyimport.lazyimport(globals(),
44866 'portage.data:portage_gid,portage_uid,secpass',
44867 'portage.dbapi.dep_expand:dep_expand',
44868 'portage.dbapi._MergeProcess:MergeProcess',
44869 + 'portage.dbapi._SyncfsProcess:SyncfsProcess',
44870 'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list,' + \
44871 - 'use_reduce,_get_slot_re',
44872 + 'use_reduce,_slot_separator,_repo_separator',
44873 'portage.eapi:_get_eapi_attrs',
44874 'portage.elog:collect_ebuild_messages,collect_messages,' + \
44875 'elog_process,_merge_logentries',
44876 @@ -22,7 +25,6 @@ portage.proxy.lazyimport.lazyimport(globals(),
44877 '_merge_unicode_error', '_spawn_phase',
44878 'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
44879 'portage.package.ebuild._ipc.QueryCommand:QueryCommand',
44880 - 'portage.update:fixdbentries',
44881 'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
44882 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
44883 'grabdict,normalize_path,new_protect_filename',
44884 @@ -30,17 +32,20 @@ portage.proxy.lazyimport.lazyimport(globals(),
44885 'portage.util.env_update:env_update',
44886 'portage.util.listdir:dircache,listdir',
44887 'portage.util.movefile:movefile',
44888 + 'portage.util.writeable_check:get_ro_checker',
44889 'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
44890 'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
44891 + 'portage.util._async.SchedulerInterface:SchedulerInterface',
44892 + 'portage.util._eventloop.EventLoop:EventLoop',
44893 + 'portage.util._eventloop.global_event_loop:global_event_loop',
44894 'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,vercmp,' + \
44895 - '_pkgsplit@pkgsplit,_pkg_str',
44896 + '_get_slot_re,_pkgsplit@pkgsplit,_pkg_str,_unknown_repo',
44897 'subprocess',
44898 'tarfile',
44899 )
44900
44901 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
44902 PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
44903 -from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
44904 from portage.dbapi import dbapi
44905 from portage.exception import CommandNotFound, \
44906 InvalidData, InvalidLocation, InvalidPackageName, \
44907 @@ -61,7 +66,6 @@ from portage import _unicode_encode
44908 from _emerge.EbuildBuildDir import EbuildBuildDir
44909 from _emerge.EbuildPhase import EbuildPhase
44910 from _emerge.emergelog import emergelog
44911 -from _emerge.PollScheduler import PollScheduler
44912 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
44913 from _emerge.SpawnProcess import SpawnProcess
44914
44915 @@ -73,6 +77,7 @@ import io
44916 from itertools import chain
44917 import logging
44918 import os as _os
44919 +import platform
44920 import pwd
44921 import re
44922 import stat
44923 @@ -88,6 +93,7 @@ except ImportError:
44924 import pickle
44925
44926 if sys.hexversion >= 0x3000000:
44927 + # pylint: disable=W0622
44928 basestring = str
44929 long = int
44930 _unicode = str
44931 @@ -111,7 +117,8 @@ class vardbapi(dbapi):
44932 _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
44933 _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
44934
44935 - def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
44936 + def __init__(self, _unused_param=DeprecationWarning,
44937 + categories=None, settings=None, vartree=None):
44938 """
44939 The categories parameter is unused since the dbapi class
44940 now has a categories property that is generated from the
44941 @@ -141,11 +148,11 @@ class vardbapi(dbapi):
44942 settings = portage.settings
44943 self.settings = settings
44944
44945 - if _unused_param is not None and _unused_param != settings['ROOT']:
44946 + if _unused_param is not DeprecationWarning:
44947 warnings.warn("The first parameter of the "
44948 "portage.dbapi.vartree.vardbapi"
44949 - " constructor is now unused. Use "
44950 - "settings['ROOT'] instead.",
44951 + " constructor is now unused. Instead "
44952 + "settings['ROOT'] is used.",
44953 DeprecationWarning, stacklevel=2)
44954
44955 self._eroot = settings['EROOT']
44956 @@ -162,7 +169,7 @@ class vardbapi(dbapi):
44957 self.vartree = vartree
44958 self._aux_cache_keys = set(
44959 ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
44960 - "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
44961 + "EAPI", "HDEPEND", "HOMEPAGE", "IUSE", "KEYWORDS",
44962 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
44963 "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
44964 ])
44965 @@ -172,15 +179,9 @@ class vardbapi(dbapi):
44966 self._counter_path = os.path.join(self._eroot,
44967 CACHE_PATH, "counter")
44968
44969 - self._plib_registry = None
44970 - if _ENABLE_PRESERVE_LIBS:
44971 - self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
44972 - os.path.join(self._eroot, PRIVATE_PATH,
44973 - "preserved_libs_registry"))
44974 -
44975 - self._linkmap = None
44976 - if _ENABLE_DYN_LINK_MAP:
44977 - self._linkmap = LinkageMap(self)
44978 + self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
44979 + os.path.join(self._eroot, PRIVATE_PATH, "preserved_libs_registry"))
44980 + self._linkmap = LinkageMap(self)
44981 self._owners = self._owners_db(self)
44982
44983 self._cached_counter = None
44984 @@ -368,7 +369,7 @@ class vardbapi(dbapi):
44985 del e
44986 write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
44987 write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
44988 - fixdbentries([mylist], newpath, eapi=mycpv.eapi)
44989 +
44990 return moves
44991
44992 def cp_list(self, mycp, use_cache=1):
44993 @@ -376,7 +377,10 @@ class vardbapi(dbapi):
44994 if mysplit[0] == '*':
44995 mysplit[0] = mysplit[0][1:]
44996 try:
44997 - mystat = os.stat(self.getpath(mysplit[0])).st_mtime
44998 + if sys.hexversion >= 0x3030000:
44999 + mystat = os.stat(self.getpath(mysplit[0])).st_mtime_ns
45000 + else:
45001 + mystat = os.stat(self.getpath(mysplit[0])).st_mtime
45002 except OSError:
45003 mystat = 0
45004 if use_cache and mycp in self.cpcache:
45005 @@ -511,7 +515,10 @@ class vardbapi(dbapi):
45006 return list(self._iter_match(mydep,
45007 self.cp_list(mydep.cp, use_cache=use_cache)))
45008 try:
45009 - curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
45010 + if sys.hexversion >= 0x3030000:
45011 + curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime_ns
45012 + else:
45013 + curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
45014 except (IOError, OSError):
45015 curmtime=0
45016
45017 @@ -566,31 +573,32 @@ class vardbapi(dbapi):
45018 def _aux_cache_init(self):
45019 aux_cache = None
45020 open_kwargs = {}
45021 - if sys.hexversion >= 0x3000000:
45022 + if sys.hexversion >= 0x3000000 and sys.hexversion < 0x3020000:
45023 # Buffered io triggers extreme performance issues in
45024 # Unpickler.load() (problem observed with python-3.0.1).
45025 # Unfortunately, performance is still poor relative to
45026 - # python-2.x, but buffering makes it much worse.
45027 + # python-2.x, but buffering makes it much worse (problem
45028 + # appears to be solved in Python >=3.2 at least).
45029 open_kwargs["buffering"] = 0
45030 try:
45031 - f = open(_unicode_encode(self._aux_cache_filename,
45032 + with open(_unicode_encode(self._aux_cache_filename,
45033 encoding=_encodings['fs'], errors='strict'),
45034 - mode='rb', **open_kwargs)
45035 - mypickle = pickle.Unpickler(f)
45036 - try:
45037 - mypickle.find_global = None
45038 - except AttributeError:
45039 - # TODO: If py3k, override Unpickler.find_class().
45040 - pass
45041 - aux_cache = mypickle.load()
45042 - f.close()
45043 - del f
45044 - except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
45045 + mode='rb', **open_kwargs) as f:
45046 + mypickle = pickle.Unpickler(f)
45047 + try:
45048 + mypickle.find_global = None
45049 + except AttributeError:
45050 + # TODO: If py3k, override Unpickler.find_class().
45051 + pass
45052 + aux_cache = mypickle.load()
45053 + except (SystemExit, KeyboardInterrupt):
45054 + raise
45055 + except Exception as e:
45056 if isinstance(e, EnvironmentError) and \
45057 getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
45058 pass
45059 else:
45060 - writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
45061 + writemsg(_("!!! Error loading '%s': %s\n") % \
45062 (self._aux_cache_filename, e), noiselevel=-1)
45063 del e
45064
45065 @@ -710,7 +718,7 @@ class vardbapi(dbapi):
45066 if _get_slot_re(eapi_attrs).match(mydata['SLOT']) is None:
45067 # Empty or invalid slot triggers InvalidAtom exceptions when
45068 # generating slot atoms for packages, so translate it to '0' here.
45069 - mydata['SLOT'] = _unicode_decode('0')
45070 + mydata['SLOT'] = '0'
45071
45072 return [mydata[x] for x in wants]
45073
45074 @@ -735,21 +743,18 @@ class vardbapi(dbapi):
45075 results[x] = st[stat.ST_MTIME]
45076 continue
45077 try:
45078 - myf = io.open(
45079 + with io.open(
45080 _unicode_encode(os.path.join(mydir, x),
45081 encoding=_encodings['fs'], errors='strict'),
45082 mode='r', encoding=_encodings['repo.content'],
45083 - errors='replace')
45084 - try:
45085 - myd = myf.read()
45086 - finally:
45087 - myf.close()
45088 + errors='replace') as f:
45089 + myd = f.read()
45090 except IOError:
45091 if x not in self._aux_cache_keys and \
45092 self._aux_cache_keys_re.match(x) is None:
45093 env_keys.append(x)
45094 continue
45095 - myd = _unicode_decode('')
45096 + myd = ''
45097
45098 # Preserve \n for metadata that is known to
45099 # contain multiple lines.
45100 @@ -763,13 +768,13 @@ class vardbapi(dbapi):
45101 for k in env_keys:
45102 v = env_results.get(k)
45103 if v is None:
45104 - v = _unicode_decode('')
45105 + v = ''
45106 if self._aux_multi_line_re.match(k) is None:
45107 v = " ".join(v.split())
45108 results[k] = v
45109
45110 if results.get("EAPI") == "":
45111 - results[_unicode_decode("EAPI")] = _unicode_decode('0')
45112 + results["EAPI"] = '0'
45113
45114 return results
45115
45116 @@ -889,11 +894,17 @@ class vardbapi(dbapi):
45117 del myroot
45118 counter = -1
45119 try:
45120 - cfile = io.open(
45121 + with io.open(
45122 _unicode_encode(self._counter_path,
45123 encoding=_encodings['fs'], errors='strict'),
45124 mode='r', encoding=_encodings['repo.content'],
45125 - errors='replace')
45126 + errors='replace') as f:
45127 + try:
45128 + counter = long(f.readline().strip())
45129 + except (OverflowError, ValueError) as e:
45130 + writemsg(_("!!! COUNTER file is corrupt: '%s'\n") %
45131 + self._counter_path, noiselevel=-1)
45132 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
45133 except EnvironmentError as e:
45134 # Silently allow ENOENT since files under
45135 # /var/cache/ are allowed to disappear.
45136 @@ -902,17 +913,6 @@ class vardbapi(dbapi):
45137 self._counter_path, noiselevel=-1)
45138 writemsg("!!! %s\n" % str(e), noiselevel=-1)
45139 del e
45140 - else:
45141 - try:
45142 - try:
45143 - counter = long(cfile.readline().strip())
45144 - finally:
45145 - cfile.close()
45146 - except (OverflowError, ValueError) as e:
45147 - writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
45148 - self._counter_path, noiselevel=-1)
45149 - writemsg("!!! %s\n" % str(e), noiselevel=-1)
45150 - del e
45151
45152 if self._cached_counter == counter:
45153 max_counter = counter
45154 @@ -1004,16 +1004,31 @@ class vardbapi(dbapi):
45155 relative_filename = filename[root_len:]
45156 contents_key = pkg._match_contents(relative_filename)
45157 if contents_key:
45158 - del new_contents[contents_key]
45159 + # It's possible for two different paths to refer to the same
45160 + # contents_key, due to directory symlinks. Therefore, pass a
45161 + # default value to pop, in order to avoid a KeyError which
45162 + # could otherwise be triggered (see bug #454400).
45163 + new_contents.pop(contents_key, None)
45164 removed += 1
45165
45166 if removed:
45167 - self._bump_mtime(pkg.mycpv)
45168 - f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
45169 - write_contents(new_contents, root, f)
45170 - f.close()
45171 - self._bump_mtime(pkg.mycpv)
45172 - pkg._clear_contents_cache()
45173 + self.writeContentsToContentsFile(pkg, new_contents)
45174 +
45175 + def writeContentsToContentsFile(self, pkg, new_contents):
45176 + """
45177 + @param pkg: package to write contents file for
45178 + @type pkg: dblink
45179 + @param new_contents: contents to write to CONTENTS file
45180 + @type new_contents: contents dictionary of the form
45181 + {u'/path/to/file' : (contents_attribute 1, ...), ...}
45182 + """
45183 + root = self.settings['ROOT']
45184 + self._bump_mtime(pkg.mycpv)
45185 + f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
45186 + write_contents(new_contents, root, f)
45187 + f.close()
45188 + self._bump_mtime(pkg.mycpv)
45189 + pkg._clear_contents_cache()
45190
45191 class _owners_cache(object):
45192 """
45193 @@ -1258,18 +1273,35 @@ class vardbapi(dbapi):
45194 name = os.path.basename(path.rstrip(os.path.sep))
45195 path_info_list.append((path, name, is_basename))
45196
45197 + # Do work via the global event loop, so that it can be used
45198 + # for indication of progress during the search (bug #461412).
45199 + event_loop = (portage._internal_caller and
45200 + global_event_loop() or EventLoop(main=False))
45201 root = self._vardb._eroot
45202 - for cpv in self._vardb.cpv_all():
45203 - dblnk = self._vardb._dblink(cpv)
45204
45205 + def search_pkg(cpv):
45206 + dblnk = self._vardb._dblink(cpv)
45207 for path, name, is_basename in path_info_list:
45208 if is_basename:
45209 for p in dblnk.getcontents():
45210 if os.path.basename(p) == name:
45211 - yield dblnk, p[len(root):]
45212 + search_pkg.results.append((dblnk, p[len(root):]))
45213 else:
45214 if dblnk.isowner(path):
45215 - yield dblnk, path
45216 + search_pkg.results.append((dblnk, path))
45217 + search_pkg.complete = True
45218 + return False
45219 +
45220 + search_pkg.results = []
45221 +
45222 + for cpv in self._vardb.cpv_all():
45223 + del search_pkg.results[:]
45224 + search_pkg.complete = False
45225 + event_loop.idle_add(search_pkg, cpv)
45226 + while not search_pkg.complete:
45227 + event_loop.iteration()
45228 + for result in search_pkg.results:
45229 + yield result
45230
45231 class vartree(object):
45232 "this tree will scan a var/db/pkg database located at root (passed to init)"
45233 @@ -1390,7 +1422,7 @@ class vartree(object):
45234 def getslot(self, mycatpkg):
45235 "Get a slot for a catpkg; assume it exists."
45236 try:
45237 - return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
45238 + return self.dbapi._pkg_str(mycatpkg, None).slot
45239 except KeyError:
45240 return ""
45241
45242 @@ -1483,11 +1515,16 @@ class dblink(object):
45243 self._contents_inodes = None
45244 self._contents_basenames = None
45245 self._linkmap_broken = False
45246 + self._device_path_map = {}
45247 self._hardlink_merge_map = {}
45248 self._hash_key = (self._eroot, self.mycpv)
45249 self._protect_obj = None
45250 self._pipe = pipe
45251
45252 + # When necessary, this attribute is modified for
45253 + # compliance with RESTRICT=preserve-libs.
45254 + self._preserve_libs = "preserve-libs" in mysettings.features
45255 +
45256 def __hash__(self):
45257 return hash(self._hash_key)
45258
45259 @@ -1530,7 +1567,11 @@ class dblink(object):
45260 """
45261 Remove this entry from the database
45262 """
45263 - if not os.path.exists(self.dbdir):
45264 + try:
45265 + os.lstat(self.dbdir)
45266 + except OSError as e:
45267 + if e.errno not in (errno.ENOENT, errno.ENOTDIR, errno.ESTALE):
45268 + raise
45269 return
45270
45271 # Check validity of self.dbdir before attempting to remove it.
45272 @@ -1547,6 +1588,14 @@ class dblink(object):
45273 pass
45274 self.vartree.dbapi._remove(self)
45275
45276 + # Use self.dbroot since we need an existing path for syncfs.
45277 + try:
45278 + self._merged_path(self.dbroot, os.lstat(self.dbroot))
45279 + except OSError:
45280 + pass
45281 +
45282 + self._post_merge_sync()
45283 +
45284 def clearcontents(self):
45285 """
45286 For a given db entry (self), erase the CONTENTS values.
45287 @@ -1572,18 +1621,18 @@ class dblink(object):
45288 return self.contentscache
45289 pkgfiles = {}
45290 try:
45291 - myc = io.open(_unicode_encode(contents_file,
45292 + with io.open(_unicode_encode(contents_file,
45293 encoding=_encodings['fs'], errors='strict'),
45294 mode='r', encoding=_encodings['repo.content'],
45295 - errors='replace')
45296 + errors='replace') as f:
45297 + mylines = f.readlines()
45298 except EnvironmentError as e:
45299 if e.errno != errno.ENOENT:
45300 raise
45301 del e
45302 self.contentscache = pkgfiles
45303 return pkgfiles
45304 - mylines = myc.readlines()
45305 - myc.close()
45306 +
45307 null_byte = "\0"
45308 normalize_needed = self._normalize_needed
45309 contents_re = self._contents_re
45310 @@ -1598,7 +1647,7 @@ class dblink(object):
45311 if myroot == os.path.sep:
45312 myroot = None
45313 # used to generate parent dir entries
45314 - dir_entry = (_unicode_decode("dir"),)
45315 + dir_entry = ("dir",)
45316 eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
45317 pos = 0
45318 errors = []
45319 @@ -1698,8 +1747,11 @@ class dblink(object):
45320 unmerge_preserve = \
45321 self._find_libs_to_preserve(unmerge=True)
45322 counter = self.vartree.dbapi.cpv_counter(self.mycpv)
45323 - plib_registry.unregister(self.mycpv,
45324 - self.settings["SLOT"], counter)
45325 + try:
45326 + slot = self.mycpv.slot
45327 + except AttributeError:
45328 + slot = _pkg_str(self.mycpv, slot=self.settings["SLOT"]).slot
45329 + plib_registry.unregister(self.mycpv, slot, counter)
45330 if unmerge_preserve:
45331 for path in sorted(unmerge_preserve):
45332 contents_key = self._match_contents(path)
45333 @@ -1709,7 +1761,7 @@ class dblink(object):
45334 self._display_merge(_(">>> needed %s %s\n") % \
45335 (obj_type, contents_key), noiselevel=-1)
45336 plib_registry.register(self.mycpv,
45337 - self.settings["SLOT"], counter, unmerge_preserve)
45338 + slot, counter, unmerge_preserve)
45339 # Remove the preserved files from our contents
45340 # so that they won't be unmerged.
45341 self.vartree.dbapi.removeFromContents(self,
45342 @@ -1779,7 +1831,8 @@ class dblink(object):
45343 if self._scheduler is None:
45344 # We create a scheduler instance and use it to
45345 # log unmerge output separately from merge output.
45346 - self._scheduler = PollScheduler().sched_iface
45347 + self._scheduler = SchedulerInterface(portage._internal_caller and
45348 + global_event_loop() or EventLoop(main=False))
45349 if self.settings.get("PORTAGE_BACKGROUND") == "subprocess":
45350 if self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "1":
45351 self.settings["PORTAGE_BACKGROUND"] = "1"
45352 @@ -1804,7 +1857,7 @@ class dblink(object):
45353 # done for this slot, so it shouldn't be repeated until the next
45354 # replacement or unmerge operation.
45355 if others_in_slot is None:
45356 - slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
45357 + slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
45358 slot_matches = self.vartree.dbapi.match(
45359 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
45360 others_in_slot = []
45361 @@ -1848,13 +1901,17 @@ class dblink(object):
45362 except UnsupportedAPIException as e:
45363 eapi_unsupported = e
45364
45365 + if self._preserve_libs and "preserve-libs" in \
45366 + self.settings["PORTAGE_RESTRICT"].split():
45367 + self._preserve_libs = False
45368 +
45369 builddir_lock = None
45370 scheduler = self._scheduler
45371 retval = os.EX_OK
45372 try:
45373 # Only create builddir_lock if the caller
45374 # has not already acquired the lock.
45375 - if "PORTAGE_BUILDIR_LOCKED" not in self.settings:
45376 + if "PORTAGE_BUILDDIR_LOCKED" not in self.settings:
45377 builddir_lock = EbuildBuildDir(
45378 scheduler=scheduler,
45379 settings=self.settings)
45380 @@ -1883,7 +1940,7 @@ class dblink(object):
45381 showMessage(_("!!! FAILED prerm: %s\n") % \
45382 os.path.join(self.dbdir, "EAPI"),
45383 level=logging.ERROR, noiselevel=-1)
45384 - showMessage(_unicode_decode("%s\n") % (eapi_unsupported,),
45385 + showMessage("%s\n" % (eapi_unsupported,),
45386 level=logging.ERROR, noiselevel=-1)
45387 elif os.path.isfile(myebuildpath):
45388 phase = EbuildPhase(background=background,
45389 @@ -2072,7 +2129,7 @@ class dblink(object):
45390
45391 if others_in_slot is None:
45392 others_in_slot = []
45393 - slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
45394 + slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
45395 slot_matches = self.vartree.dbapi.match(
45396 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
45397 for cur_cpv in slot_matches:
45398 @@ -2129,6 +2186,14 @@ class dblink(object):
45399 self._eerror("postrm",
45400 ["Could not chmod or unlink '%s': %s" % \
45401 (file_name, ose)])
45402 + else:
45403 +
45404 + # Even though the file no longer exists, we log it
45405 + # here so that _unmerge_dirs can see that we've
45406 + # removed a file from this device, and will record
45407 + # the parent directory for a syncfs call.
45408 + self._merged_path(file_name, lstatobj, exists=False)
45409 +
45410 finally:
45411 if bsd_chflags and pflags != 0:
45412 # Restore the parent flags we saved before unlinking
45413 @@ -2549,15 +2614,19 @@ class dblink(object):
45414 raise
45415 del e
45416 show_unmerge("!!!", "", "obj", child)
45417 +
45418 try:
45419 + parent_name = os.path.dirname(obj)
45420 + parent_stat = os.stat(parent_name)
45421 +
45422 if bsd_chflags:
45423 lstatobj = os.lstat(obj)
45424 if lstatobj.st_flags != 0:
45425 bsd_chflags.lchflags(obj, 0)
45426 - parent_name = os.path.dirname(obj)
45427 +
45428 # Use normal stat/chflags for the parent since we want to
45429 # follow any symlinks to the real parent directory.
45430 - pflags = os.stat(parent_name).st_flags
45431 + pflags = parent_stat.st_flags
45432 if pflags != 0:
45433 bsd_chflags.chflags(parent_name, 0)
45434 try:
45435 @@ -2566,13 +2635,34 @@ class dblink(object):
45436 if bsd_chflags and pflags != 0:
45437 # Restore the parent flags we saved before unlinking
45438 bsd_chflags.chflags(parent_name, pflags)
45439 +
45440 + # Record the parent directory for use in syncfs calls.
45441 + # Note that we use a realpath and a regular stat here, since
45442 + # we want to follow any symlinks back to the real device where
45443 + # the real parent directory resides.
45444 + self._merged_path(os.path.realpath(parent_name), parent_stat)
45445 +
45446 show_unmerge("<<<", "", "dir", obj)
45447 except EnvironmentError as e:
45448 if e.errno not in ignored_rmdir_errnos:
45449 raise
45450 if e.errno != errno.ENOENT:
45451 show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
45452 - del e
45453 +
45454 + # Since we didn't remove this directory, record the directory
45455 + # itself for use in syncfs calls, if we have removed another
45456 + # file from the same device.
45457 + # Note that we use a realpath and a regular stat here, since
45458 + # we want to follow any symlinks back to the real device where
45459 + # the real directory resides.
45460 + try:
45461 + dir_stat = os.stat(obj)
45462 + except OSError:
45463 + pass
45464 + else:
45465 + if dir_stat.st_dev in self._device_path_map:
45466 + self._merged_path(os.path.realpath(obj), dir_stat)
45467 +
45468 else:
45469 # When a directory is successfully removed, there's
45470 # no need to protect symlinks that point to it.
45471 @@ -2799,7 +2889,7 @@ class dblink(object):
45472 self.vartree.dbapi._linkmap is None or \
45473 self.vartree.dbapi._plib_registry is None or \
45474 (not unmerge and self._installed_instance is None) or \
45475 - "preserve-libs" not in self.settings.features:
45476 + not self._preserve_libs:
45477 return set()
45478
45479 os = _os_merge
45480 @@ -3383,7 +3473,10 @@ class dblink(object):
45481 else:
45482 logdir = os.path.join(self.settings["T"], "logging")
45483 ebuild_logentries = collect_ebuild_messages(logdir)
45484 - py_logentries = collect_messages(key=cpv).get(cpv, {})
45485 + # phasefilter is irrelevant for the above collect_ebuild_messages
45486 + # call, since this package instance has a private logdir. However,
45487 + # it may be relevant for the following collect_messages call.
45488 + py_logentries = collect_messages(key=cpv, phasefilter=phasefilter).get(cpv, {})
45489 logentries = _merge_logentries(py_logentries, ebuild_logentries)
45490 funcnames = {
45491 "INFO": "einfo",
45492 @@ -3404,7 +3497,9 @@ class dblink(object):
45493 str_buffer.append(' '.join(fields))
45494 str_buffer.append('\n')
45495 if str_buffer:
45496 - os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
45497 + str_buffer = _unicode_encode(''.join(str_buffer))
45498 + while str_buffer:
45499 + str_buffer = str_buffer[os.write(self._pipe, str_buffer):]
45500
45501 def _emerge_log(self, msg):
45502 emergelog(False, msg)
45503 @@ -3415,6 +3510,8 @@ class dblink(object):
45504
45505 This function does the following:
45506
45507 + calls get_ro_checker to retrieve a function for checking whether Portage
45508 + will write to a read-only filesystem, then runs it against the directory list
45509 calls self._preserve_libs if FEATURES=preserve-libs
45510 calls self._collision_protect if FEATURES=collision-protect
45511 calls doebuild(mydo=pkg_preinst)
45512 @@ -3462,6 +3559,7 @@ class dblink(object):
45513 level=logging.ERROR, noiselevel=-1)
45514 return 1
45515
45516 + is_binpkg = self.settings.get("EMERGE_FROM") == "binary"
45517 slot = ''
45518 for var_name in ('CHOST', 'SLOT'):
45519 if var_name == 'CHOST' and self.cat == 'virtual':
45520 @@ -3471,22 +3569,18 @@ class dblink(object):
45521 pass
45522 continue
45523
45524 - f = None
45525 try:
45526 - f = io.open(_unicode_encode(
45527 + with io.open(_unicode_encode(
45528 os.path.join(inforoot, var_name),
45529 encoding=_encodings['fs'], errors='strict'),
45530 mode='r', encoding=_encodings['repo.content'],
45531 - errors='replace')
45532 - val = f.readline().strip()
45533 + errors='replace') as f:
45534 + val = f.readline().strip()
45535 except EnvironmentError as e:
45536 if e.errno != errno.ENOENT:
45537 raise
45538 del e
45539 val = ''
45540 - finally:
45541 - if f is not None:
45542 - f.close()
45543
45544 if var_name == 'SLOT':
45545 slot = val
45546 @@ -3499,7 +3593,9 @@ class dblink(object):
45547 return 1
45548 write_atomic(os.path.join(inforoot, var_name), slot + '\n')
45549
45550 - if val != self.settings.get(var_name, ''):
45551 + # This check only applies when built from source, since
45552 + # inforoot values are written just after src_install.
45553 + if not is_binpkg and val != self.settings.get(var_name, ''):
45554 self._eqawarn('preinst',
45555 [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
45556 {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
45557 @@ -3517,27 +3613,40 @@ class dblink(object):
45558 cp = self.mysplit[0]
45559 slot_atom = "%s:%s" % (cp, slot)
45560
45561 - # filter any old-style virtual matches
45562 - slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
45563 - if cpv_getkey(cpv) == cp]
45564 -
45565 - if self.mycpv not in slot_matches and \
45566 - self.vartree.dbapi.cpv_exists(self.mycpv):
45567 - # handle multislot or unapplied slotmove
45568 - slot_matches.append(self.mycpv)
45569 -
45570 - others_in_slot = []
45571 - from portage import config
45572 - for cur_cpv in slot_matches:
45573 - # Clone the config in case one of these has to be unmerged since
45574 - # we need it to have private ${T} etc... for things like elog.
45575 - settings_clone = config(clone=self.settings)
45576 - settings_clone.pop("PORTAGE_BUILDIR_LOCKED", None)
45577 - settings_clone.reset()
45578 - others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
45579 - settings=settings_clone,
45580 - vartree=self.vartree, treetype="vartree",
45581 - scheduler=self._scheduler, pipe=self._pipe))
45582 + self.lockdb()
45583 + try:
45584 + # filter any old-style virtual matches
45585 + slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom)
45586 + if cpv_getkey(cpv) == cp]
45587 +
45588 + if self.mycpv not in slot_matches and \
45589 + self.vartree.dbapi.cpv_exists(self.mycpv):
45590 + # handle multislot or unapplied slotmove
45591 + slot_matches.append(self.mycpv)
45592 +
45593 + others_in_slot = []
45594 + for cur_cpv in slot_matches:
45595 + # Clone the config in case one of these has to be unmerged,
45596 + # since we need it to have private ${T} etc... for things
45597 + # like elog.
45598 + settings_clone = portage.config(clone=self.settings)
45599 + settings_clone.pop("PORTAGE_BUILDDIR_LOCKED", None)
45600 + settings_clone.setcpv(cur_cpv, mydb=self.vartree.dbapi)
45601 + if self._preserve_libs and "preserve-libs" in \
45602 + settings_clone["PORTAGE_RESTRICT"].split():
45603 + self._preserve_libs = False
45604 + others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
45605 + settings=settings_clone,
45606 + vartree=self.vartree, treetype="vartree",
45607 + scheduler=self._scheduler, pipe=self._pipe))
45608 + finally:
45609 + self.unlockdb()
45610 +
45611 + # If any instance has RESTRICT=preserve-libs, then
45612 + # restrict it for all instances.
45613 + if not self._preserve_libs:
45614 + for dblnk in others_in_slot:
45615 + dblnk._preserve_libs = False
45616
45617 retval = self._security_check(others_in_slot)
45618 if retval:
45619 @@ -3579,8 +3688,9 @@ class dblink(object):
45620 unicode_error = False
45621 eagain_error = False
45622
45623 - myfilelist = []
45624 - mylinklist = []
45625 + filelist = []
45626 + dirlist = []
45627 + linklist = []
45628 paths_with_newlines = []
45629 def onerror(e):
45630 raise
45631 @@ -3612,6 +3722,9 @@ class dblink(object):
45632 unicode_errors.append(new_parent[ed_len:])
45633 break
45634
45635 + relative_path = parent[srcroot_len:]
45636 + dirlist.append(os.path.join("/", relative_path))
45637 +
45638 for fname in files:
45639 try:
45640 fname = _unicode_decode(fname,
45641 @@ -3641,12 +3754,19 @@ class dblink(object):
45642
45643 file_mode = os.lstat(fpath).st_mode
45644 if stat.S_ISREG(file_mode):
45645 - myfilelist.append(relative_path)
45646 + filelist.append(relative_path)
45647 elif stat.S_ISLNK(file_mode):
45648 # Note: os.walk puts symlinks to directories in the "dirs"
45649 # list and it does not traverse them since that could lead
45650 # to an infinite recursion loop.
45651 - mylinklist.append(relative_path)
45652 + linklist.append(relative_path)
45653 +
45654 + myto = _unicode_decode(
45655 + _os.readlink(_unicode_encode(fpath,
45656 + encoding=_encodings['merge'], errors='strict')),
45657 + encoding=_encodings['merge'], errors='replace')
45658 + if line_ending_re.search(myto) is not None:
45659 + paths_with_newlines.append(relative_path)
45660
45661 if unicode_error:
45662 break
45663 @@ -3674,7 +3794,7 @@ class dblink(object):
45664 # If there are no files to merge, and an installed package in the same
45665 # slot has files, it probably means that something went wrong.
45666 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
45667 - not myfilelist and not mylinklist and others_in_slot:
45668 + not filelist and not linklist and others_in_slot:
45669 installed_files = None
45670 for other_dblink in others_in_slot:
45671 installed_files = other_dblink.getcontents()
45672 @@ -3699,7 +3819,7 @@ class dblink(object):
45673 _("Manually run `emerge --unmerge =%s` if you "
45674 "really want to remove the above files. Set "
45675 "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
45676 - "/etc/make.conf if you do not want to "
45677 + "/etc/portage/make.conf if you do not want to "
45678 "abort in cases like this.") % other_dblink.mycpv,
45679 wrap_width))
45680 eerror(msg)
45681 @@ -3717,13 +3837,38 @@ class dblink(object):
45682 for other in others_in_slot])
45683 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
45684
45685 + # Check for read-only filesystems.
45686 + ro_checker = get_ro_checker()
45687 + rofilesystems = ro_checker(dirlist)
45688 +
45689 + if rofilesystems:
45690 + msg = _("One or more files installed to this package are "
45691 + "set to be installed to read-only filesystems. "
45692 + "Please mount the following filesystems as read-write "
45693 + "and retry.")
45694 + msg = textwrap.wrap(msg, 70)
45695 + msg.append("")
45696 + for f in rofilesystems:
45697 + msg.append("\t%s" % os.path.join(destroot,
45698 + f.lstrip(os.path.sep)))
45699 + msg.append("")
45700 + self._elog("eerror", "preinst", msg)
45701 +
45702 + msg = _("Package '%s' NOT merged due to read-only file systems.") % \
45703 + self.settings.mycpv
45704 + msg += _(" If necessary, refer to your elog "
45705 + "messages for the whole content of the above message.")
45706 + msg = textwrap.wrap(msg, 70)
45707 + eerror(msg)
45708 + return 1
45709 +
45710 # check for package collisions
45711 blockers = self._blockers
45712 if blockers is None:
45713 blockers = []
45714 collisions, symlink_collisions, plib_collisions = \
45715 self._collision_protect(srcroot, destroot,
45716 - others_in_slot + blockers, myfilelist, mylinklist)
45717 + others_in_slot + blockers, filelist, linklist)
45718
45719 if symlink_collisions:
45720 # Symlink collisions need to be distinguished from other types
45721 @@ -3765,7 +3910,9 @@ class dblink(object):
45722 " enough information to determine if a real problem"
45723 " exists. Please do NOT file a bug report at"
45724 " http://bugs.gentoo.org unless you report exactly which"
45725 - " two packages install the same file(s). Once again,"
45726 + " two packages install the same file(s). See"
45727 + " http://wiki.gentoo.org/wiki/Knowledge_Base:Blockers"
45728 + " for tips on how to solve the problem. And once again,"
45729 " please do NOT file a bug report unless you have"
45730 " completely understood the above message.")
45731
45732 @@ -3800,17 +3947,28 @@ class dblink(object):
45733 # get_owners is slow for large numbers of files, so
45734 # don't look them all up.
45735 collisions = collisions[:20]
45736 +
45737 + pkg_info_strs = {}
45738 self.lockdb()
45739 try:
45740 owners = self.vartree.dbapi._owners.get_owners(collisions)
45741 self.vartree.dbapi.flush_cache()
45742 +
45743 + for pkg in owners:
45744 + pkg = self.vartree.dbapi._pkg_str(pkg.mycpv, None)
45745 + pkg_info_str = "%s%s%s" % (pkg,
45746 + _slot_separator, pkg.slot)
45747 + if pkg.repo != _unknown_repo:
45748 + pkg_info_str += "%s%s" % (_repo_separator,
45749 + pkg.repo)
45750 + pkg_info_strs[pkg] = pkg_info_str
45751 +
45752 finally:
45753 self.unlockdb()
45754
45755 for pkg, owned_files in owners.items():
45756 - cpv = pkg.mycpv
45757 msg = []
45758 - msg.append("%s" % cpv)
45759 + msg.append(pkg_info_strs[pkg.mycpv])
45760 for f in sorted(owned_files):
45761 msg.append("\t%s" % os.path.join(destroot,
45762 f.lstrip(os.path.sep)))
45763 @@ -3901,12 +4059,11 @@ class dblink(object):
45764 # write local package counter for recording
45765 if counter is None:
45766 counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
45767 - f = io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
45768 + with io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
45769 encoding=_encodings['fs'], errors='strict'),
45770 mode='w', encoding=_encodings['repo.content'],
45771 - errors='backslashreplace')
45772 - f.write(_unicode_decode(str(counter)))
45773 - f.close()
45774 + errors='backslashreplace') as f:
45775 + f.write("%s" % counter)
45776
45777 self.updateprotect()
45778
45779 @@ -4031,6 +4188,7 @@ class dblink(object):
45780 try:
45781 self.delete()
45782 _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
45783 + self._merged_path(self.dbpkgdir, os.lstat(self.dbpkgdir))
45784 finally:
45785 self.unlockdb()
45786
45787 @@ -4075,9 +4233,9 @@ class dblink(object):
45788 self.vartree.dbapi.lock()
45789 try:
45790 try:
45791 - slot, counter = self.vartree.dbapi.aux_get(
45792 - cpv, ["SLOT", "COUNTER"])
45793 - except KeyError:
45794 + slot = self.vartree.dbapi._pkg_str(cpv, None).slot
45795 + counter = self.vartree.dbapi.cpv_counter(cpv)
45796 + except (KeyError, InvalidData):
45797 pass
45798 else:
45799 has_vdb_entry = True
45800 @@ -4146,6 +4304,7 @@ class dblink(object):
45801 # For gcc upgrades, preserved libs have to be removed after the
45802 # the library path has been updated.
45803 self._prune_plib_registry()
45804 + self._post_merge_sync()
45805
45806 return os.EX_OK
45807
45808 @@ -4161,7 +4320,7 @@ class dblink(object):
45809 x = -1
45810 while True:
45811 x += 1
45812 - backup_p = p + '.backup.' + str(x).rjust(4, '0')
45813 + backup_p = '%s.backup.%04d' % (p, x)
45814 try:
45815 os.lstat(backup_p)
45816 except OSError:
45817 @@ -4262,8 +4421,9 @@ class dblink(object):
45818 @type stufftomerge: String or List
45819 @param cfgfiledict: { File:mtime } mapping for config_protected files
45820 @type cfgfiledict: Dictionary
45821 - @param thismtime: The current time (typically long(time.time())
45822 - @type thismtime: Long
45823 + @param thismtime: None or new mtime for merged files (expressed in seconds
45824 + in Python <3.3 and nanoseconds in Python >=3.3)
45825 + @type thismtime: None or Int
45826 @rtype: None or Boolean
45827 @return:
45828 1. True on failure
45829 @@ -4288,18 +4448,18 @@ class dblink(object):
45830 # this is supposed to merge a list of files. There will be 2 forms of argument passing.
45831 if isinstance(stufftomerge, basestring):
45832 #A directory is specified. Figure out protection paths, listdir() it and process it.
45833 - mergelist = os.listdir(join(srcroot, stufftomerge))
45834 - offset = stufftomerge
45835 + mergelist = [join(stufftomerge, child) for child in \
45836 + os.listdir(join(srcroot, stufftomerge))]
45837 else:
45838 - mergelist = stufftomerge
45839 - offset = ""
45840 + mergelist = stufftomerge[:]
45841
45842 - for i, x in enumerate(mergelist):
45843 + while mergelist:
45844
45845 - mysrc = join(srcroot, offset, x)
45846 - mydest = join(destroot, offset, x)
45847 + relative_path = mergelist.pop()
45848 + mysrc = join(srcroot, relative_path)
45849 + mydest = join(destroot, relative_path)
45850 # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
45851 - myrealdest = join(sep, offset, x)
45852 + myrealdest = join(sep, relative_path)
45853 # stat file once, test using S_* macros many times (faster that way)
45854 mystat = os.lstat(mysrc)
45855 mymode = mystat[stat.ST_MODE]
45856 @@ -4394,9 +4554,26 @@ class dblink(object):
45857 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
45858 sstat=mystat, mysettings=self.settings,
45859 encoding=_encodings['merge'])
45860 +
45861 + try:
45862 + self._merged_path(mydest, os.lstat(mydest))
45863 + except OSError:
45864 + pass
45865 +
45866 if mymtime != None:
45867 + # Use lexists, since if the target happens to be a broken
45868 + # symlink then that should trigger an independent warning.
45869 + if not (os.path.lexists(myrealto) or
45870 + os.path.lexists(join(srcroot, myabsto))):
45871 + self._eqawarn('preinst',
45872 + [_("QA Notice: Symbolic link /%s points to /%s which does not exist.")
45873 + % (relative_path, myabsto)])
45874 +
45875 showMessage(">>> %s -> %s\n" % (mydest, myto))
45876 - outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
45877 + if sys.hexversion >= 0x3030000:
45878 + outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
45879 + else:
45880 + outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
45881 else:
45882 showMessage(_("!!! Failed to move file.\n"),
45883 level=logging.ERROR, noiselevel=-1)
45884 @@ -4490,11 +4667,17 @@ class dblink(object):
45885 os.chmod(mydest, mystat[0])
45886 os.chown(mydest, mystat[4], mystat[5])
45887 showMessage(">>> %s/\n" % mydest)
45888 +
45889 + try:
45890 + self._merged_path(mydest, os.lstat(mydest))
45891 + except OSError:
45892 + pass
45893 +
45894 outfile.write("dir "+myrealdest+"\n")
45895 # recurse and merge this directory
45896 - if self.mergeme(srcroot, destroot, outfile, secondhand,
45897 - join(offset, x), cfgfiledict, thismtime):
45898 - return 1
45899 + mergelist.extend(join(relative_path, child) for child in
45900 + os.listdir(join(srcroot, relative_path)))
45901 +
45902 elif stat.S_ISREG(mymode):
45903 # we are merging a regular file
45904 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
45905 @@ -4550,7 +4733,10 @@ class dblink(object):
45906 cfgprot = cfgfiledict["IGNORE"]
45907 if not moveme:
45908 zing = "---"
45909 - mymtime = mystat[stat.ST_MTIME]
45910 + if sys.hexversion >= 0x3030000:
45911 + mymtime = mystat.st_mtime_ns
45912 + else:
45913 + mymtime = mystat[stat.ST_MTIME]
45914 else:
45915 moveme = 1
45916 cfgprot = 1
45917 @@ -4586,8 +4772,16 @@ class dblink(object):
45918 hardlink_candidates.append(mydest)
45919 zing = ">>>"
45920
45921 + try:
45922 + self._merged_path(mydest, os.lstat(mydest))
45923 + except OSError:
45924 + pass
45925 +
45926 if mymtime != None:
45927 - outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
45928 + if sys.hexversion >= 0x3030000:
45929 + outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
45930 + else:
45931 + outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
45932 showMessage("%s %s\n" % (zing,mydest))
45933 else:
45934 # we are merging a fifo or device node
45935 @@ -4598,6 +4792,12 @@ class dblink(object):
45936 sstat=mystat, mysettings=self.settings,
45937 encoding=_encodings['merge']) is not None:
45938 zing = ">>>"
45939 +
45940 + try:
45941 + self._merged_path(mydest, os.lstat(mydest))
45942 + except OSError:
45943 + pass
45944 +
45945 else:
45946 return 1
45947 if stat.S_ISFIFO(mymode):
45948 @@ -4606,6 +4806,52 @@ class dblink(object):
45949 outfile.write("dev %s\n" % myrealdest)
45950 showMessage(zing + " " + mydest + "\n")
45951
45952 + def _merged_path(self, path, lstatobj, exists=True):
45953 + previous_path = self._device_path_map.get(lstatobj.st_dev)
45954 + if previous_path is None or previous_path is False or \
45955 + (exists and len(path) < len(previous_path)):
45956 + if exists:
45957 + self._device_path_map[lstatobj.st_dev] = path
45958 + else:
45959 + # This entry is used to indicate that we've unmerged
45960 + # a file from this device, and later, this entry is
45961 + # replaced by a parent directory.
45962 + self._device_path_map[lstatobj.st_dev] = False
45963 +
45964 + def _post_merge_sync(self):
45965 + """
45966 + Call this after merge or unmerge, in order to sync relevant files to
45967 + disk and avoid data-loss in the event of a power failure. This method
45968 + does nothing if FEATURES=merge-sync is disabled.
45969 + """
45970 + if not self._device_path_map or \
45971 + "merge-sync" not in self.settings.features:
45972 + return
45973 +
45974 + returncode = None
45975 + if platform.system() == "Linux":
45976 +
45977 + paths = []
45978 + for path in self._device_path_map.values():
45979 + if path is not False:
45980 + paths.append(path)
45981 + paths = tuple(paths)
45982 +
45983 + proc = SyncfsProcess(paths=paths,
45984 + scheduler=(self._scheduler or
45985 + portage._internal_caller and global_event_loop() or
45986 + EventLoop(main=False)))
45987 + proc.start()
45988 + returncode = proc.wait()
45989 +
45990 + if returncode is None or returncode != os.EX_OK:
45991 + try:
45992 + proc = subprocess.Popen(["sync"])
45993 + except EnvironmentError:
45994 + pass
45995 + else:
45996 + proc.wait()
45997 +
45998 def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
45999 mydbapi=None, prev_mtimes=None, counter=None):
46000 """
46001 @@ -4618,7 +4864,8 @@ class dblink(object):
46002 self.lockdb()
46003 self.vartree.dbapi._bump_mtime(self.mycpv)
46004 if self._scheduler is None:
46005 - self._scheduler = PollScheduler().sched_iface
46006 + self._scheduler = SchedulerInterface(portage._internal_caller and
46007 + global_event_loop() or EventLoop(main=False))
46008 try:
46009 retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
46010 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes,
46011 @@ -4669,11 +4916,12 @@ class dblink(object):
46012 "returns contents of a file with whitespace converted to spaces"
46013 if not os.path.exists(self.dbdir+"/"+name):
46014 return ""
46015 - mydata = io.open(
46016 + with io.open(
46017 _unicode_encode(os.path.join(self.dbdir, name),
46018 encoding=_encodings['fs'], errors='strict'),
46019 mode='r', encoding=_encodings['repo.content'], errors='replace'
46020 - ).read().split()
46021 + ) as f:
46022 + mydata = f.read().split()
46023 return " ".join(mydata)
46024
46025 def copyfile(self,fname):
46026 @@ -4682,10 +4930,11 @@ class dblink(object):
46027 def getfile(self,fname):
46028 if not os.path.exists(self.dbdir+"/"+fname):
46029 return ""
46030 - return io.open(_unicode_encode(os.path.join(self.dbdir, fname),
46031 + with io.open(_unicode_encode(os.path.join(self.dbdir, fname),
46032 encoding=_encodings['fs'], errors='strict'),
46033 mode='r', encoding=_encodings['repo.content'], errors='replace'
46034 - ).read()
46035 + ) as f:
46036 + return f.read()
46037
46038 def setfile(self,fname,data):
46039 kwargs = {}
46040 @@ -4694,16 +4943,18 @@ class dblink(object):
46041 else:
46042 kwargs['mode'] = 'w'
46043 kwargs['encoding'] = _encodings['repo.content']
46044 - write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
46045 + write_atomic(os.path.join(self.dbdir, fname), data,
46046 + **portage._native_kwargs(kwargs))
46047
46048 def getelements(self,ename):
46049 if not os.path.exists(self.dbdir+"/"+ename):
46050 return []
46051 - mylines = io.open(_unicode_encode(
46052 + with io.open(_unicode_encode(
46053 os.path.join(self.dbdir, ename),
46054 encoding=_encodings['fs'], errors='strict'),
46055 mode='r', encoding=_encodings['repo.content'], errors='replace'
46056 - ).readlines()
46057 + ) as f:
46058 + mylines = f.readlines()
46059 myreturn = []
46060 for x in mylines:
46061 for y in x[:-1].split():
46062 @@ -4711,14 +4962,13 @@ class dblink(object):
46063 return myreturn
46064
46065 def setelements(self,mylist,ename):
46066 - myelement = io.open(_unicode_encode(
46067 + with io.open(_unicode_encode(
46068 os.path.join(self.dbdir, ename),
46069 encoding=_encodings['fs'], errors='strict'),
46070 mode='w', encoding=_encodings['repo.content'],
46071 - errors='backslashreplace')
46072 - for x in mylist:
46073 - myelement.write(_unicode_decode(x+"\n"))
46074 - myelement.close()
46075 + errors='backslashreplace') as f:
46076 + for x in mylist:
46077 + f.write("%s\n" % x)
46078
46079 def isregular(self):
46080 "Is this a regular package (does it have a CATEGORY file? A dblink can be virtual *and* regular)"
46081 @@ -4787,7 +5037,7 @@ class dblink(object):
46082 def merge(mycat, mypkg, pkgloc, infloc,
46083 myroot=None, settings=None, myebuild=None,
46084 mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
46085 - scheduler=None):
46086 + scheduler=None, fd_pipes=None):
46087 """
46088 @param myroot: ignored, settings['EROOT'] is used instead
46089 """
46090 @@ -4802,10 +5052,12 @@ def merge(mycat, mypkg, pkgloc, infloc,
46091 merge_task = MergeProcess(
46092 mycat=mycat, mypkg=mypkg, settings=settings,
46093 treetype=mytree, vartree=vartree,
46094 - scheduler=(scheduler or PollScheduler().sched_iface),
46095 + scheduler=(scheduler or portage._internal_caller and
46096 + global_event_loop() or EventLoop(main=False)),
46097 background=background, blockers=blockers, pkgloc=pkgloc,
46098 infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
46099 - prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'))
46100 + prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'),
46101 + fd_pipes=fd_pipes)
46102 merge_task.start()
46103 retcode = merge_task.wait()
46104 return retcode
46105 @@ -4985,13 +5237,11 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
46106 tar.addfile(tarinfo, f)
46107 f.close()
46108 else:
46109 - f = open(_unicode_encode(path,
46110 + with open(_unicode_encode(path,
46111 encoding=encoding,
46112 - errors='strict'), 'rb')
46113 - try:
46114 + errors='strict'), 'rb') as f:
46115 tar.addfile(tarinfo, f)
46116 - finally:
46117 - f.close()
46118 +
46119 else:
46120 tar.addfile(tarinfo)
46121 if onProgress:
46122
46123 diff --git a/pym/portage/dbapi/virtual.py b/pym/portage/dbapi/virtual.py
46124 index 213708c..ba9745c 100644
46125 --- a/pym/portage/dbapi/virtual.py
46126 +++ b/pym/portage/dbapi/virtual.py
46127 @@ -1,6 +1,7 @@
46128 -# Copyright 1998-2012 Gentoo Foundation
46129 +# Copyright 1998-2013 Gentoo Foundation
46130 # Distributed under the terms of the GNU General Public License v2
46131
46132 +from __future__ import unicode_literals
46133
46134 from portage.dbapi import dbapi
46135 from portage.dbapi.dep_expand import dep_expand
46136 @@ -89,8 +90,8 @@ class fakedbapi(dbapi):
46137 if metadata is None:
46138 mycpv = _pkg_str(mycpv)
46139 else:
46140 - mycpv = _pkg_str(mycpv, slot=metadata.get('SLOT'),
46141 - repo=metadata.get('repository'), eapi=metadata.get('EAPI'))
46142 + mycpv = _pkg_str(mycpv, metadata=metadata,
46143 + settings=self.settings)
46144
46145 mycp = mycpv.cp
46146 try:
46147
46148 diff --git a/pym/portage/debug.py b/pym/portage/debug.py
46149 index ebf1a13..d5a8cfb 100644
46150 --- a/pym/portage/debug.py
46151 +++ b/pym/portage/debug.py
46152 @@ -1,4 +1,4 @@
46153 -# Copyright 1999-2012 Gentoo Foundation
46154 +# Copyright 1999-2014 Gentoo Foundation
46155 # Distributed under the terms of the GNU General Public License v2
46156
46157 import os
46158 @@ -38,7 +38,7 @@ class trace_handler(object):
46159 self.max_repr_length = 200
46160
46161 def event_handler(self, *args):
46162 - frame, event, arg = args
46163 + frame, event, _arg = args
46164 if "line" == event:
46165 if self.show_local_lines:
46166 self.trace_line(*args)
46167 @@ -56,7 +56,7 @@ class trace_handler(object):
46168 self.arg_repr(frame, event, arg),
46169 self.locals_repr(frame, event, arg)))
46170
46171 - def arg_repr(self, frame, event, arg):
46172 + def arg_repr(self, _frame, event, arg):
46173 my_repr = None
46174 if "return" == event:
46175 my_repr = repr(arg)
46176 @@ -71,7 +71,7 @@ class trace_handler(object):
46177
46178 return ""
46179
46180 - def trace_line(self, frame, event, arg):
46181 + def trace_line(self, frame, _event, _arg):
46182 writemsg("%s line=%d\n" % (self.trim_filename(frame.f_code.co_filename), frame.f_lineno))
46183
46184 def ignore_filename(self, filename):
46185 @@ -81,7 +81,7 @@ class trace_handler(object):
46186 return True
46187 return False
46188
46189 - def locals_repr(self, frame, event, arg):
46190 + def locals_repr(self, frame, _event, _arg):
46191 """Create a representation of the locals dict that is suitable for
46192 tracing output."""
46193
46194
46195 diff --git a/pym/portage/dep/__init__.py b/pym/portage/dep/__init__.py
46196 index e547deb..c457df0 100644
46197 --- a/pym/portage/dep/__init__.py
46198 +++ b/pym/portage/dep/__init__.py
46199 @@ -1,7 +1,9 @@
46200 # deps.py -- Portage dependency resolution functions
46201 -# Copyright 2003-2012 Gentoo Foundation
46202 +# Copyright 2003-2014 Gentoo Foundation
46203 # Distributed under the terms of the GNU General Public License v2
46204
46205 +from __future__ import unicode_literals
46206 +
46207 __all__ = [
46208 'Atom', 'best_match_to_list', 'cpvequal',
46209 'dep_getcpv', 'dep_getkey', 'dep_getslot',
46210 @@ -27,26 +29,21 @@ from portage.eapi import _get_eapi_attrs
46211 from portage.exception import InvalidAtom, InvalidData, InvalidDependString
46212 from portage.localization import _
46213 from portage.versions import catpkgsplit, catsplit, \
46214 - vercmp, ververify, _cp, _cpv, _pkg_str, _unknown_repo
46215 + vercmp, ververify, _cp, _cpv, _pkg_str, _slot, _unknown_repo, _vr
46216 import portage.cache.mappings
46217
46218 if sys.hexversion >= 0x3000000:
46219 + # pylint: disable=W0622
46220 basestring = str
46221 _unicode = str
46222 else:
46223 _unicode = unicode
46224
46225 -# Api consumers included in portage should set this to True.
46226 -# Once the relevant api changes are in a portage release with
46227 -# stable keywords, make these warnings unconditional.
46228 -_internal_warnings = False
46229 -
46230 # \w is [a-zA-Z0-9_]
46231
46232 # PMS 3.1.3: A slot name may contain any of the characters [A-Za-z0-9+_.-].
46233 # It must not begin with a hyphen or a dot.
46234 _slot_separator = ":"
46235 -_slot = r'([\w+][\w+.-]*)'
46236 # loosly match SLOT, which may have an optional ABI part
46237 _slot_loose = r'([\w+./*=-]+)'
46238
46239 @@ -55,51 +52,34 @@ _op = r'([=~]|[><]=?)'
46240
46241 _repo_separator = "::"
46242 _repo_name = r'[\w][\w-]*'
46243 +_repo_name_re = re.compile('^' + _repo_name + '$', re.UNICODE)
46244 _repo = r'(?:' + _repo_separator + '(' + _repo_name + ')' + ')?'
46245
46246 _extended_cat = r'[\w+*][\w+.*-]*'
46247
46248 -_slot_re_cache = {}
46249 -
46250 -def _get_slot_re(eapi_attrs):
46251 - cache_key = eapi_attrs.slot_abi
46252 - slot_re = _slot_re_cache.get(cache_key)
46253 - if slot_re is not None:
46254 - return slot_re
46255 -
46256 - if eapi_attrs.slot_abi:
46257 - slot_re = _slot + r'(/' + _slot + r'=?)?'
46258 - else:
46259 - slot_re = _slot
46260 -
46261 - slot_re = re.compile('^' + slot_re + '$', re.VERBOSE)
46262 -
46263 - _slot_re_cache[cache_key] = slot_re
46264 - return slot_re
46265 -
46266 _slot_dep_re_cache = {}
46267
46268 def _get_slot_dep_re(eapi_attrs):
46269 - cache_key = eapi_attrs.slot_abi
46270 + cache_key = eapi_attrs.slot_operator
46271 slot_re = _slot_dep_re_cache.get(cache_key)
46272 if slot_re is not None:
46273 return slot_re
46274
46275 - if eapi_attrs.slot_abi:
46276 + if eapi_attrs.slot_operator:
46277 slot_re = _slot + r'?(\*|=|/' + _slot + r'=?)?'
46278 else:
46279 slot_re = _slot
46280
46281 - slot_re = re.compile('^' + slot_re + '$', re.VERBOSE)
46282 + slot_re = re.compile('^' + slot_re + '$', re.VERBOSE | re.UNICODE)
46283
46284 _slot_dep_re_cache[cache_key] = slot_re
46285 return slot_re
46286
46287 def _match_slot(atom, pkg):
46288 if pkg.slot == atom.slot:
46289 - if not atom.slot_abi:
46290 + if not atom.sub_slot:
46291 return True
46292 - elif atom.slot_abi == pkg.slot_abi:
46293 + elif atom.sub_slot == pkg.sub_slot:
46294 return True
46295 return False
46296
46297 @@ -123,7 +103,7 @@ def _get_atom_re(eapi_attrs):
46298 '(?P<star>=' + cpv_re + r'\*)|' +
46299 '(?P<simple>' + cp_re + '))' +
46300 '(' + _slot_separator + _slot_loose + ')?' +
46301 - _repo + ')(' + _use + ')?$', re.VERBOSE)
46302 + _repo + ')(' + _use + ')?$', re.VERBOSE | re.UNICODE)
46303
46304 _atom_re_cache[cache_key] = atom_re
46305 return atom_re
46306 @@ -142,10 +122,10 @@ def _get_atom_wildcard_re(eapi_attrs):
46307 pkg_re = r'[\w+*][\w+*-]*?'
46308
46309 atom_re = re.compile(r'((?P<simple>(' +
46310 - _extended_cat + r')/(' + pkg_re + r'))' + \
46311 - '|(?P<star>=((' + _extended_cat + r')/(' + pkg_re + r'))-(?P<version>\*\d+\*)))' + \
46312 + _extended_cat + r')/(' + pkg_re + r'(-' + _vr + ')?))' + \
46313 + '|(?P<star>=((' + _extended_cat + r')/(' + pkg_re + r'))-(?P<version>\*\w+\*)))' + \
46314 '(:(?P<slot>' + _slot_loose + r'))?(' +
46315 - _repo_separator + r'(?P<repo>' + _repo_name + r'))?$')
46316 + _repo_separator + r'(?P<repo>' + _repo_name + r'))?$', re.UNICODE)
46317
46318 _atom_wildcard_re_cache[cache_key] = atom_re
46319 return atom_re
46320 @@ -259,7 +239,7 @@ def strip_empty(myarr):
46321 ('portage.dep.strip_empty',), DeprecationWarning, stacklevel=2)
46322 return [x for x in myarr if x]
46323
46324 -def paren_reduce(mystr):
46325 +def paren_reduce(mystr, _deprecation_warn=True):
46326 """
46327 Take a string and convert all paren enclosed entities into sublists and
46328 split the list elements by spaces. All redundant brackets are removed.
46329 @@ -273,7 +253,7 @@ def paren_reduce(mystr):
46330 @rtype: Array
46331 @return: The reduced string in an array
46332 """
46333 - if _internal_warnings:
46334 + if portage._internal_caller and _deprecation_warn:
46335 warnings.warn(_("%s is deprecated and will be removed without replacement.") % \
46336 ('portage.dep.paren_reduce',), DeprecationWarning, stacklevel=2)
46337 mysplit = mystr.split()
46338 @@ -365,7 +345,7 @@ class paren_normalize(list):
46339 """Take a dependency structure as returned by paren_reduce or use_reduce
46340 and generate an equivalent structure that has no redundant lists."""
46341 def __init__(self, src):
46342 - if _internal_warnings:
46343 + if portage._internal_caller:
46344 warnings.warn(_("%s is deprecated and will be removed without replacement.") % \
46345 ('portage.dep.paren_normalize',), DeprecationWarning, stacklevel=2)
46346 list.__init__(self)
46347 @@ -461,7 +441,7 @@ def use_reduce(depstr, uselist=[], masklist=[], matchall=False, excludeall=[], i
46348 @return: The use reduced depend array
46349 """
46350 if isinstance(depstr, list):
46351 - if _internal_warnings:
46352 + if portage._internal_caller:
46353 warnings.warn(_("Passing paren_reduced dep arrays to %s is deprecated. " + \
46354 "Pass the original dep string instead.") % \
46355 ('portage.dep.use_reduce',), DeprecationWarning, stacklevel=2)
46356 @@ -762,7 +742,7 @@ def dep_opconvert(deplist):
46357 @return:
46358 The new list with the new ordering
46359 """
46360 - if _internal_warnings:
46361 + if portage._internal_caller:
46362 warnings.warn(_("%s is deprecated. Use %s with the opconvert parameter set to True instead.") % \
46363 ('portage.dep.dep_opconvert', 'portage.dep.use_reduce'), DeprecationWarning, stacklevel=2)
46364
46365 @@ -793,7 +773,7 @@ def flatten(mylist):
46366 @rtype: List
46367 @return: A single list containing only non-list elements.
46368 """
46369 - if _internal_warnings:
46370 + if portage._internal_caller:
46371 warnings.warn(_("%s is deprecated and will be removed without replacement.") % \
46372 ('portage.dep.flatten',), DeprecationWarning, stacklevel=2)
46373
46374 @@ -1233,11 +1213,14 @@ class Atom(_unicode):
46375 if allow_repo is None:
46376 allow_repo = True
46377
46378 + blocker_prefix = ""
46379 if "!" == s[:1]:
46380 blocker = self._blocker(forbid_overlap=("!" == s[1:2]))
46381 if blocker.overlap.forbid:
46382 + blocker_prefix = s[:2]
46383 s = s[2:]
46384 else:
46385 + blocker_prefix = s[:1]
46386 s = s[1:]
46387 else:
46388 blocker = False
46389 @@ -1261,6 +1244,8 @@ class Atom(_unicode):
46390 else:
46391 op = None
46392 cpv = cp = m.group('simple')
46393 + if m.group(atom_re.groupindex['simple'] + 3) is not None:
46394 + raise InvalidAtom(self)
46395 if cpv.find("**") != -1:
46396 raise InvalidAtom(self)
46397 slot = m.group('slot')
46398 @@ -1311,32 +1296,34 @@ class Atom(_unicode):
46399 self.__dict__['repo'] = repo
46400 if slot is None:
46401 self.__dict__['slot'] = None
46402 - self.__dict__['slot_abi'] = None
46403 - self.__dict__['slot_abi_op'] = None
46404 + self.__dict__['sub_slot'] = None
46405 + self.__dict__['slot_operator'] = None
46406 else:
46407 slot_re = _get_slot_dep_re(eapi_attrs)
46408 slot_match = slot_re.match(slot)
46409 if slot_match is None:
46410 raise InvalidAtom(self)
46411 - if eapi_attrs.slot_abi:
46412 + if eapi_attrs.slot_operator:
46413 self.__dict__['slot'] = slot_match.group(1)
46414 - slot_abi = slot_match.group(2)
46415 - if slot_abi is not None:
46416 - slot_abi = slot_abi.lstrip("/")
46417 - if slot_abi in ("*", "="):
46418 - self.__dict__['slot_abi'] = None
46419 - self.__dict__['slot_abi_op'] = slot_abi
46420 + sub_slot = slot_match.group(2)
46421 + if sub_slot is not None:
46422 + sub_slot = sub_slot.lstrip("/")
46423 + if sub_slot in ("*", "="):
46424 + self.__dict__['sub_slot'] = None
46425 + self.__dict__['slot_operator'] = sub_slot
46426 else:
46427 - slot_abi_op = None
46428 - if slot_abi is not None and slot_abi[-1:] == "=":
46429 - slot_abi_op = slot_abi[-1:]
46430 - slot_abi = slot_abi[:-1]
46431 - self.__dict__['slot_abi'] = slot_abi
46432 - self.__dict__['slot_abi_op'] = slot_abi_op
46433 + slot_operator = None
46434 + if sub_slot is not None and sub_slot[-1:] == "=":
46435 + slot_operator = sub_slot[-1:]
46436 + sub_slot = sub_slot[:-1]
46437 + self.__dict__['sub_slot'] = sub_slot
46438 + self.__dict__['slot_operator'] = slot_operator
46439 + if self.slot is not None and self.slot_operator == "*":
46440 + raise InvalidAtom(self)
46441 else:
46442 self.__dict__['slot'] = slot
46443 - self.__dict__['slot_abi'] = None
46444 - self.__dict__['slot_abi_op'] = None
46445 + self.__dict__['sub_slot'] = None
46446 + self.__dict__['slot_operator'] = None
46447 self.__dict__['operator'] = op
46448 self.__dict__['extended_syntax'] = extended_syntax
46449
46450 @@ -1348,15 +1335,18 @@ class Atom(_unicode):
46451 use = _use
46452 else:
46453 use = _use_dep(use_str[1:-1].split(","), eapi_attrs)
46454 - without_use = Atom(m.group('without_use'), allow_repo=allow_repo)
46455 + without_use = Atom(blocker_prefix + m.group('without_use'),
46456 + allow_repo=allow_repo)
46457 else:
46458 use = None
46459 if unevaluated_atom is not None and \
46460 unevaluated_atom.use is not None:
46461 # unevaluated_atom.use is used for IUSE checks when matching
46462 # packages, so it must not propagate to without_use
46463 - without_use = Atom(s, allow_wildcard=allow_wildcard,
46464 - allow_repo=allow_repo)
46465 + without_use = Atom(_unicode(self),
46466 + allow_wildcard=allow_wildcard,
46467 + allow_repo=allow_repo,
46468 + eapi=eapi)
46469 else:
46470 without_use = self
46471
46472 @@ -1410,13 +1400,13 @@ class Atom(_unicode):
46473 % (eapi, self), category='EAPI.incompatible')
46474
46475 @property
46476 - def slot_abi_built(self):
46477 + def slot_operator_built(self):
46478 """
46479 - Returns True if slot_abi_op == "=" and slot_abi is not None.
46480 + Returns True if slot_operator == "=" and sub_slot is not None.
46481 NOTE: foo/bar:2= is unbuilt and returns False, whereas foo/bar:2/2=
46482 is built and returns True.
46483 """
46484 - return self.slot_abi_op == "=" and self.slot_abi is not None
46485 + return self.slot_operator == "=" and self.sub_slot is not None
46486
46487 @property
46488 def without_repo(self):
46489 @@ -1427,7 +1417,7 @@ class Atom(_unicode):
46490
46491 @property
46492 def without_slot(self):
46493 - if self.slot is None and self.slot_abi_op is None:
46494 + if self.slot is None and self.slot_operator is None:
46495 return self
46496 atom = remove_slot(self)
46497 if self.repo is not None:
46498 @@ -1439,14 +1429,14 @@ class Atom(_unicode):
46499
46500 def with_repo(self, repo):
46501 atom = remove_slot(self)
46502 - if self.slot is not None or self.slot_abi_op is not None:
46503 + if self.slot is not None or self.slot_operator is not None:
46504 atom += _slot_separator
46505 if self.slot is not None:
46506 atom += self.slot
46507 - if self.slot_abi is not None:
46508 - atom += "/%s" % self.slot_abi
46509 - if self.slot_abi_op is not None:
46510 - atom += self.slot_abi_op
46511 + if self.sub_slot is not None:
46512 + atom += "/%s" % self.sub_slot
46513 + if self.slot_operator is not None:
46514 + atom += self.slot_operator
46515 atom += _repo_separator + repo
46516 if self.use is not None:
46517 atom += _unicode(self.use)
46518 @@ -1506,14 +1496,14 @@ class Atom(_unicode):
46519 if not (self.use and self.use.conditional):
46520 return self
46521 atom = remove_slot(self)
46522 - if self.slot is not None or self.slot_abi_op is not None:
46523 + if self.slot is not None or self.slot_operator is not None:
46524 atom += _slot_separator
46525 if self.slot is not None:
46526 atom += self.slot
46527 - if self.slot_abi is not None:
46528 - atom += "/%s" % self.slot_abi
46529 - if self.slot_abi_op is not None:
46530 - atom += self.slot_abi_op
46531 + if self.sub_slot is not None:
46532 + atom += "/%s" % self.sub_slot
46533 + if self.slot_operator is not None:
46534 + atom += self.slot_operator
46535 use_dep = self.use.evaluate_conditionals(use)
46536 atom += _unicode(use_dep)
46537 return Atom(atom, unevaluated_atom=self, allow_repo=(self.repo is not None), _use=use_dep)
46538 @@ -1534,14 +1524,14 @@ class Atom(_unicode):
46539 if not self.use:
46540 return self
46541 atom = remove_slot(self)
46542 - if self.slot is not None or self.slot_abi_op is not None:
46543 + if self.slot is not None or self.slot_operator is not None:
46544 atom += _slot_separator
46545 if self.slot is not None:
46546 atom += self.slot
46547 - if self.slot_abi is not None:
46548 - atom += "/%s" % self.slot_abi
46549 - if self.slot_abi_op is not None:
46550 - atom += self.slot_abi_op
46551 + if self.sub_slot is not None:
46552 + atom += "/%s" % self.sub_slot
46553 + if self.slot_operator is not None:
46554 + atom += self.slot_operator
46555 use_dep = self.use.violated_conditionals(other_use, is_valid_flag, parent_use)
46556 atom += _unicode(use_dep)
46557 return Atom(atom, unevaluated_atom=self, allow_repo=(self.repo is not None), _use=use_dep)
46558 @@ -1550,14 +1540,14 @@ class Atom(_unicode):
46559 if not (self.use and self.use.conditional):
46560 return self
46561 atom = remove_slot(self)
46562 - if self.slot is not None or self.slot_abi_op is not None:
46563 + if self.slot is not None or self.slot_operator is not None:
46564 atom += _slot_separator
46565 if self.slot is not None:
46566 atom += self.slot
46567 - if self.slot_abi is not None:
46568 - atom += "/%s" % self.slot_abi
46569 - if self.slot_abi_op is not None:
46570 - atom += self.slot_abi_op
46571 + if self.sub_slot is not None:
46572 + atom += "/%s" % self.sub_slot
46573 + if self.slot_operator is not None:
46574 + atom += self.slot_operator
46575 use_dep = self.use._eval_qa_conditionals(use_mask, use_force)
46576 atom += _unicode(use_dep)
46577 return Atom(atom, unevaluated_atom=self, allow_repo=(self.repo is not None), _use=use_dep)
46578 @@ -1583,7 +1573,7 @@ def extended_cp_match(extended_cp, other_cp):
46579 extended_cp_re = _extended_cp_re_cache.get(extended_cp)
46580 if extended_cp_re is None:
46581 extended_cp_re = re.compile("^" + re.escape(extended_cp).replace(
46582 - r'\*', '[^/]*') + "$")
46583 + r'\*', '[^/]*') + "$", re.UNICODE)
46584 _extended_cp_re_cache[extended_cp] = extended_cp_re
46585 return extended_cp_re.match(other_cp) is not None
46586
46587 @@ -2132,7 +2122,7 @@ def match_from_list(mydep, candidate_list):
46588
46589 candidate_list = mylist
46590 mylist = []
46591 - # Currently, only \*\d+\* is supported.
46592 + # Currently, only \*\w+\* is supported.
46593 ver = mydep.version[1:-1]
46594
46595 for x in candidate_list:
46596 @@ -2172,19 +2162,40 @@ def match_from_list(mydep, candidate_list):
46597 # XXX: Nasty special casing for leading zeros
46598 # Required as =* is a literal prefix match, so can't
46599 # use vercmp
46600 - mysplit = catpkgsplit(mycpv)
46601 - myver = mysplit[2].lstrip("0")
46602 + myver = mycpv_cps[2].lstrip("0")
46603 if not myver or not myver[0].isdigit():
46604 myver = "0"+myver
46605 - mycpv_cmp = mysplit[0]+"/"+mysplit[1]+"-"+myver
46606 + if myver == mycpv_cps[2]:
46607 + mycpv_cmp = mycpv
46608 + else:
46609 + # Use replace to preserve the revision part if it exists
46610 + # (mycpv_cps[3] can't be trusted because in contains r0
46611 + # even when the input has no revision part).
46612 + mycpv_cmp = mycpv.replace(
46613 + mydep.cp + "-" + mycpv_cps[2],
46614 + mydep.cp + "-" + myver, 1)
46615 for x in candidate_list:
46616 - xs = getattr(x, "cpv_split", None)
46617 - if xs is None:
46618 - xs = catpkgsplit(remove_slot(x))
46619 + try:
46620 + x.cp
46621 + except AttributeError:
46622 + try:
46623 + pkg = _pkg_str(remove_slot(x))
46624 + except InvalidData:
46625 + continue
46626 + else:
46627 + pkg = x
46628 +
46629 + xs = pkg.cpv_split
46630 myver = xs[2].lstrip("0")
46631 if not myver or not myver[0].isdigit():
46632 myver = "0"+myver
46633 - xcpv = xs[0]+"/"+xs[1]+"-"+myver
46634 + if myver == xs[2]:
46635 + xcpv = pkg.cpv
46636 + else:
46637 + # Use replace to preserve the revision part if it exists.
46638 + xcpv = pkg.cpv.replace(
46639 + pkg.cp + "-" + xs[2],
46640 + pkg.cp + "-" + myver, 1)
46641 if xcpv.startswith(mycpv_cmp):
46642 mylist.append(x)
46643
46644 @@ -2277,9 +2288,11 @@ def match_from_list(mydep, candidate_list):
46645 continue
46646
46647 if mydep.use:
46648 -
46649 - missing_enabled = mydep.use.missing_enabled.difference(x.iuse.all)
46650 - missing_disabled = mydep.use.missing_disabled.difference(x.iuse.all)
46651 + is_valid_flag = x.iuse.is_valid_flag
46652 + missing_enabled = frozenset(flag for flag in
46653 + mydep.use.missing_enabled if not is_valid_flag(flag))
46654 + missing_disabled = frozenset(flag for flag in
46655 + mydep.use.missing_disabled if not is_valid_flag(flag))
46656
46657 if mydep.use.enabled:
46658 if any(f in mydep.use.enabled for f in missing_disabled):
46659 @@ -2314,9 +2327,9 @@ def match_from_list(mydep, candidate_list):
46660 return mylist
46661
46662 def human_readable_required_use(required_use):
46663 - return required_use.replace("^^", "exactly-one-of").replace("||", "any-of")
46664 + return required_use.replace("^^", "exactly-one-of").replace("||", "any-of").replace("??", "at-most-one-of")
46665
46666 -def get_required_use_flags(required_use):
46667 +def get_required_use_flags(required_use, eapi=None):
46668 """
46669 Returns a set of use flags that are used in the given REQUIRED_USE string
46670
46671 @@ -2326,6 +2339,12 @@ def get_required_use_flags(required_use):
46672 @return: Set of use flags that are used in the given REQUIRED_USE string
46673 """
46674
46675 + eapi_attrs = _get_eapi_attrs(eapi)
46676 + if eapi_attrs.required_use_at_most_one_of:
46677 + valid_operators = ("||", "^^", "??")
46678 + else:
46679 + valid_operators = ("||", "^^")
46680 +
46681 mysplit = required_use.split()
46682 level = 0
46683 stack = [[]]
46684 @@ -2354,7 +2373,7 @@ def get_required_use_flags(required_use):
46685 l = stack.pop()
46686 ignore = False
46687 if stack[level]:
46688 - if stack[level][-1] in ("||", "^^") or \
46689 + if stack[level][-1] in valid_operators or \
46690 (not isinstance(stack[level][-1], bool) and \
46691 stack[level][-1][-1] == "?"):
46692 ignore = True
46693 @@ -2366,15 +2385,14 @@ def get_required_use_flags(required_use):
46694 else:
46695 raise InvalidDependString(
46696 _("malformed syntax: '%s'") % required_use)
46697 - elif token in ("||", "^^"):
46698 + elif token in valid_operators:
46699 if need_bracket:
46700 raise InvalidDependString(
46701 _("malformed syntax: '%s'") % required_use)
46702 need_bracket = True
46703 stack[level].append(token)
46704 else:
46705 - if need_bracket or "(" in token or ")" in token or \
46706 - "|" in token or "^" in token:
46707 + if need_bracket:
46708 raise InvalidDependString(
46709 _("malformed syntax: '%s'") % required_use)
46710
46711 @@ -2429,7 +2447,7 @@ class _RequiredUseBranch(object):
46712 complex_nesting = False
46713 node = self
46714 while node != None and not complex_nesting:
46715 - if node._operator in ("||", "^^"):
46716 + if node._operator in ("||", "^^", "??"):
46717 complex_nesting = True
46718 else:
46719 node = node._parent
46720 @@ -2450,7 +2468,7 @@ class _RequiredUseBranch(object):
46721 if sys.hexversion < 0x3000000:
46722 __nonzero__ = __bool__
46723
46724 -def check_required_use(required_use, use, iuse_match):
46725 +def check_required_use(required_use, use, iuse_match, eapi=None):
46726 """
46727 Checks if the use flags listed in 'use' satisfy all
46728 constraints specified in 'constraints'.
46729 @@ -2466,6 +2484,12 @@ def check_required_use(required_use, use, iuse_match):
46730 @return: Indicates if REQUIRED_USE constraints are satisfied
46731 """
46732
46733 + eapi_attrs = _get_eapi_attrs(eapi)
46734 + if eapi_attrs.required_use_at_most_one_of:
46735 + valid_operators = ("||", "^^", "??")
46736 + else:
46737 + valid_operators = ("||", "^^")
46738 +
46739 def is_active(token):
46740 if token.startswith("!"):
46741 flag = token[1:]
46742 @@ -2475,6 +2499,11 @@ def check_required_use(required_use, use, iuse_match):
46743 is_negated = False
46744
46745 if not flag or not iuse_match(flag):
46746 + if not eapi_attrs.required_use_at_most_one_of and flag == "?":
46747 + msg = _("Operator '??' is not supported with EAPI '%s'") \
46748 + % (eapi,)
46749 + e = InvalidData(msg, category='EAPI.incompatible')
46750 + raise InvalidDependString(msg, errors=(e,))
46751 msg = _("USE flag '%s' is not in IUSE") \
46752 % (flag,)
46753 e = InvalidData(msg, category='IUSE.missing')
46754 @@ -2492,6 +2521,8 @@ def check_required_use(required_use, use, iuse_match):
46755 return (True in argument)
46756 elif operator == "^^":
46757 return (argument.count(True) == 1)
46758 + elif operator == "??":
46759 + return (argument.count(True) <= 1)
46760 elif operator[-1] == "?":
46761 return (False not in argument)
46762
46763 @@ -2521,7 +2552,7 @@ def check_required_use(required_use, use, iuse_match):
46764 l = stack.pop()
46765 op = None
46766 if stack[level]:
46767 - if stack[level][-1] in ("||", "^^"):
46768 + if stack[level][-1] in valid_operators:
46769 op = stack[level].pop()
46770 satisfied = is_satisfied(op, l)
46771 stack[level].append(satisfied)
46772 @@ -2550,7 +2581,7 @@ def check_required_use(required_use, use, iuse_match):
46773 stack[level].append(satisfied)
46774
46775 if len(node._children) <= 1 or \
46776 - node._parent._operator not in ("||", "^^"):
46777 + node._parent._operator not in valid_operators:
46778 last_node = node._parent._children.pop()
46779 if last_node is not node:
46780 raise AssertionError(
46781 @@ -2566,7 +2597,7 @@ def check_required_use(required_use, use, iuse_match):
46782 raise AssertionError(
46783 "node is not last child of parent")
46784
46785 - elif len(node._children) == 1 and op in ("||", "^^"):
46786 + elif len(node._children) == 1 and op in valid_operators:
46787 last_node = node._parent._children.pop()
46788 if last_node is not node:
46789 raise AssertionError(
46790 @@ -2576,7 +2607,7 @@ def check_required_use(required_use, use, iuse_match):
46791 node._children[0]._parent = node._parent
46792 node = node._children[0]
46793 if node._operator is None and \
46794 - node._parent._operator not in ("||", "^^"):
46795 + node._parent._operator not in valid_operators:
46796 last_node = node._parent._children.pop()
46797 if last_node is not node:
46798 raise AssertionError(
46799 @@ -2590,7 +2621,7 @@ def check_required_use(required_use, use, iuse_match):
46800 else:
46801 raise InvalidDependString(
46802 _("malformed syntax: '%s'") % required_use)
46803 - elif token in ("||", "^^"):
46804 + elif token in valid_operators:
46805 if need_bracket:
46806 raise InvalidDependString(
46807 _("malformed syntax: '%s'") % required_use)
46808 @@ -2600,8 +2631,7 @@ def check_required_use(required_use, use, iuse_match):
46809 node._children.append(child)
46810 node = child
46811 else:
46812 - if need_bracket or "(" in token or ")" in token or \
46813 - "|" in token or "^" in token:
46814 + if need_bracket:
46815 raise InvalidDependString(
46816 _("malformed syntax: '%s'") % required_use)
46817
46818 @@ -2629,16 +2659,16 @@ def extract_affecting_use(mystr, atom, eapi=None):
46819 that decide if the given atom is in effect.
46820
46821 Example usage:
46822 - >>> extract_use_cond('sasl? ( dev-libs/cyrus-sasl ) \
46823 + >>> extract_affecting_use('sasl? ( dev-libs/cyrus-sasl ) \
46824 !minimal? ( cxx? ( dev-libs/cyrus-sasl ) )', 'dev-libs/cyrus-sasl')
46825 - (['sasl', 'minimal', 'cxx'])
46826 + {'cxx', 'minimal', 'sasl'}
46827
46828 - @param dep: The dependency string
46829 + @param mystr: The dependency string
46830 @type mystr: String
46831 @param atom: The atom to get into effect
46832 @type atom: String
46833 - @rtype: Tuple of two lists of strings
46834 - @return: List of use flags that need to be enabled, List of use flag that need to be disabled
46835 + @rtype: Set of strings
46836 + @return: Set of use flags affecting given atom
46837 """
46838 useflag_re = _get_useflag_re(eapi)
46839 mysplit = mystr.split()
46840 @@ -2744,3 +2774,48 @@ def extract_affecting_use(mystr, atom, eapi=None):
46841 _("malformed syntax: '%s'") % mystr)
46842
46843 return affecting_use
46844 +
46845 +def extract_unpack_dependencies(src_uri, unpackers):
46846 + """
46847 + Return unpack dependencies string for given SRC_URI string.
46848 +
46849 + @param src_uri: SRC_URI string
46850 + @type src_uri: String
46851 + @param unpackers: Dictionary mapping archive suffixes to dependency strings
46852 + @type unpackers: Dictionary
46853 + @rtype: String
46854 + @return: Dependency string specifying packages required to unpack archives.
46855 + """
46856 + src_uri = src_uri.split()
46857 +
46858 + depend = []
46859 + for i in range(len(src_uri)):
46860 + if src_uri[i][-1] == "?" or src_uri[i] in ("(", ")"):
46861 + depend.append(src_uri[i])
46862 + elif (i+1 < len(src_uri) and src_uri[i+1] == "->") or src_uri[i] == "->":
46863 + continue
46864 + else:
46865 + for suffix in sorted(unpackers, key=lambda x: len(x), reverse=True):
46866 + suffix = suffix.lower()
46867 + if src_uri[i].lower().endswith(suffix):
46868 + depend.append(unpackers[suffix])
46869 + break
46870 +
46871 + while True:
46872 + cleaned_depend = depend[:]
46873 + for i in range(len(cleaned_depend)):
46874 + if cleaned_depend[i] is None:
46875 + continue
46876 + elif cleaned_depend[i] == "(" and cleaned_depend[i+1] == ")":
46877 + cleaned_depend[i] = None
46878 + cleaned_depend[i+1] = None
46879 + elif cleaned_depend[i][-1] == "?" and cleaned_depend[i+1] == "(" and cleaned_depend[i+2] == ")":
46880 + cleaned_depend[i] = None
46881 + cleaned_depend[i+1] = None
46882 + cleaned_depend[i+2] = None
46883 + if depend == cleaned_depend:
46884 + break
46885 + else:
46886 + depend = [x for x in cleaned_depend if x is not None]
46887 +
46888 + return " ".join(depend)
46889
46890 diff --git a/pym/portage/dep/_slot_abi.py b/pym/portage/dep/_slot_operator.py
46891 similarity index 58%
46892 rename from pym/portage/dep/_slot_abi.py
46893 rename to pym/portage/dep/_slot_operator.py
46894 index 7c36e52..7b64444 100644
46895 --- a/pym/portage/dep/_slot_abi.py
46896 +++ b/pym/portage/dep/_slot_operator.py
46897 @@ -1,59 +1,64 @@
46898 -# Copyright 2012 Gentoo Foundation
46899 +# Copyright 2012-2013 Gentoo Foundation
46900 # Distributed under the terms of the GNU General Public License v2
46901
46902 +from __future__ import unicode_literals
46903 +
46904 from portage.dep import Atom, paren_enclose, use_reduce
46905 +from portage.eapi import _get_eapi_attrs
46906 from portage.exception import InvalidData
46907 +from _emerge.Package import Package
46908
46909 -_dep_keys = ('DEPEND', 'PDEPEND', 'RDEPEND')
46910 -_runtime_keys = ('PDEPEND', 'RDEPEND')
46911 -
46912 -def find_built_slot_abi_atoms(pkg):
46913 +def find_built_slot_operator_atoms(pkg):
46914 atoms = {}
46915 - for k in _dep_keys:
46916 - atom_list = list(_find_built_slot_abi_op(use_reduce(pkg.metadata[k],
46917 - uselist=pkg.use.enabled, eapi=pkg.metadata['EAPI'],
46918 + for k in Package._dep_keys:
46919 + atom_list = list(_find_built_slot_operator(use_reduce(pkg._metadata[k],
46920 + uselist=pkg.use.enabled, eapi=pkg.eapi,
46921 token_class=Atom)))
46922 if atom_list:
46923 atoms[k] = atom_list
46924 return atoms
46925
46926 -def _find_built_slot_abi_op(dep_struct):
46927 +def _find_built_slot_operator(dep_struct):
46928 for x in dep_struct:
46929 if isinstance(x, list):
46930 - for atom in _find_built_slot_abi_op(x):
46931 + for atom in _find_built_slot_operator(x):
46932 yield atom
46933 - elif isinstance(x, Atom) and x.slot_abi_built:
46934 + elif isinstance(x, Atom) and x.slot_operator_built:
46935 yield x
46936
46937 -def ignore_built_slot_abi_deps(dep_struct):
46938 +def ignore_built_slot_operator_deps(dep_struct):
46939 for i, x in enumerate(dep_struct):
46940 if isinstance(x, list):
46941 - ignore_built_slot_abi_deps(x)
46942 - elif isinstance(x, Atom) and x.slot_abi_built:
46943 + ignore_built_slot_operator_deps(x)
46944 + elif isinstance(x, Atom) and x.slot_operator_built:
46945 # There's no way of knowing here whether the SLOT
46946 - # part of the SLOT/ABI pair should be kept, so we
46947 + # part of the slot/sub-slot pair should be kept, so we
46948 # ignore both parts.
46949 dep_struct[i] = x.without_slot
46950
46951 -def evaluate_slot_abi_equal_deps(settings, use, trees):
46952 +def evaluate_slot_operator_equal_deps(settings, use, trees):
46953
46954 metadata = settings.configdict['pkg']
46955 eapi = metadata['EAPI']
46956 + eapi_attrs = _get_eapi_attrs(eapi)
46957 running_vardb = trees[trees._running_eroot]["vartree"].dbapi
46958 target_vardb = trees[trees._target_eroot]["vartree"].dbapi
46959 vardbs = [target_vardb]
46960 deps = {}
46961 - for k in _dep_keys:
46962 + for k in Package._dep_keys:
46963 deps[k] = use_reduce(metadata[k],
46964 uselist=use, eapi=eapi, token_class=Atom)
46965
46966 - for k in _runtime_keys:
46967 + for k in Package._runtime_keys:
46968 _eval_deps(deps[k], vardbs)
46969
46970 - if running_vardb is not target_vardb:
46971 - vardbs.append(running_vardb)
46972 -
46973 - _eval_deps(deps["DEPEND"], vardbs)
46974 + if eapi_attrs.hdepend:
46975 + _eval_deps(deps["HDEPEND"], [running_vardb])
46976 + _eval_deps(deps["DEPEND"], [target_vardb])
46977 + else:
46978 + if running_vardb is not target_vardb:
46979 + vardbs.append(running_vardb)
46980 + _eval_deps(deps["DEPEND"], vardbs)
46981
46982 result = {}
46983 for k, v in deps.items():
46984 @@ -65,7 +70,7 @@ def _eval_deps(dep_struct, vardbs):
46985 for i, x in enumerate(dep_struct):
46986 if isinstance(x, list):
46987 _eval_deps(x, vardbs)
46988 - elif isinstance(x, Atom) and x.slot_abi_op == "=":
46989 + elif isinstance(x, Atom) and x.slot_operator == "=":
46990 for vardb in vardbs:
46991 best_version = vardb.match(x)
46992 if best_version:
46993 @@ -77,7 +82,7 @@ def _eval_deps(dep_struct, vardbs):
46994 pass
46995 else:
46996 slot_part = "%s/%s=" % \
46997 - (best_version.slot, best_version.slot_abi)
46998 + (best_version.slot, best_version.sub_slot)
46999 x = x.with_slot(slot_part)
47000 dep_struct[i] = x
47001 break
47002
47003 diff --git a/pym/portage/dep/dep_check.py b/pym/portage/dep/dep_check.py
47004 index d575ab3..b5ace3d 100644
47005 --- a/pym/portage/dep/dep_check.py
47006 +++ b/pym/portage/dep/dep_check.py
47007 @@ -1,16 +1,19 @@
47008 -# Copyright 2010-2012 Gentoo Foundation
47009 +# Copyright 2010-2013 Gentoo Foundation
47010 # Distributed under the terms of the GNU General Public License v2
47011
47012 +from __future__ import unicode_literals
47013 +
47014 __all__ = ['dep_check', 'dep_eval', 'dep_wordreduce', 'dep_zapdeps']
47015
47016 import logging
47017 +import operator
47018
47019 import portage
47020 -from portage import _unicode_decode
47021 from portage.dep import Atom, match_from_list, use_reduce
47022 from portage.exception import InvalidDependString, ParseError
47023 from portage.localization import _
47024 from portage.util import writemsg, writemsg_level
47025 +from portage.util.SlotObject import SlotObject
47026 from portage.versions import vercmp, _pkg_str
47027
47028 def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
47029 @@ -160,7 +163,7 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
47030 # According to GLEP 37, RDEPEND is the only dependency
47031 # type that is valid for new-style virtuals. Repoman
47032 # should enforce this.
47033 - depstring = pkg.metadata['RDEPEND']
47034 + depstring = pkg._metadata['RDEPEND']
47035 pkg_kwargs = kwargs.copy()
47036 pkg_kwargs["myuse"] = pkg_use_enabled(pkg)
47037 if edebug:
47038 @@ -183,7 +186,7 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/",
47039 del mytrees["virt_parent"]
47040
47041 if not mycheck[0]:
47042 - raise ParseError(_unicode_decode("%s: %s '%s'") % \
47043 + raise ParseError("%s: %s '%s'" % \
47044 (pkg, mycheck[1], depstring))
47045
47046 # pull in the new-style virtual
47047 @@ -254,6 +257,10 @@ def dep_eval(deplist):
47048 return 0
47049 return 1
47050
47051 +class _dep_choice(SlotObject):
47052 + __slots__ = ('atoms', 'slot_map', 'cp_map', 'all_available',
47053 + 'all_installed_slots')
47054 +
47055 def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47056 """
47057 Takes an unreduced and reduced deplist and removes satisfied dependencies.
47058 @@ -316,6 +323,7 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47059 priority = trees[myroot].get("priority")
47060 graph_db = trees[myroot].get("graph_db")
47061 graph = trees[myroot].get("graph")
47062 + want_update_pkg = trees[myroot].get("want_update_pkg")
47063 vardb = None
47064 if "vartree" in trees[myroot]:
47065 vardb = trees[myroot]["vartree"].dbapi
47066 @@ -324,6 +332,13 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47067 else:
47068 mydbapi = trees[myroot]["porttree"].dbapi
47069
47070 + try:
47071 + mydbapi_match_pkgs = mydbapi.match_pkgs
47072 + except AttributeError:
47073 + def mydbapi_match_pkgs(atom):
47074 + return [mydbapi._pkg_str(cpv, atom.repo)
47075 + for cpv in mydbapi.match(atom)]
47076 +
47077 # Sort the deps into installed, not installed but already
47078 # in the graph and other, not installed and not in the graph
47079 # and other, with values of [[required_atom], availablility]
47080 @@ -347,24 +362,17 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47081 continue
47082 # Ignore USE dependencies here since we don't want USE
47083 # settings to adversely affect || preference evaluation.
47084 - avail_pkg = mydbapi.match(atom.without_use)
47085 + avail_pkg = mydbapi_match_pkgs(atom.without_use)
47086 if avail_pkg:
47087 avail_pkg = avail_pkg[-1] # highest (ascending order)
47088 - try:
47089 - slot = avail_pkg.slot
47090 - except AttributeError:
47091 - eapi, slot, repo = mydbapi.aux_get(avail_pkg,
47092 - ["EAPI", "SLOT", "repository"])
47093 - avail_pkg = _pkg_str(avail_pkg, eapi=eapi,
47094 - slot=slot, repo=repo)
47095 - avail_slot = Atom("%s:%s" % (atom.cp, slot))
47096 + avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
47097 if not avail_pkg:
47098 all_available = False
47099 all_use_satisfied = False
47100 break
47101
47102 if atom.use:
47103 - avail_pkg_use = mydbapi.match(atom)
47104 + avail_pkg_use = mydbapi_match_pkgs(atom)
47105 if not avail_pkg_use:
47106 all_use_satisfied = False
47107 else:
47108 @@ -372,13 +380,7 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47109 avail_pkg_use = avail_pkg_use[-1]
47110 if avail_pkg_use != avail_pkg:
47111 avail_pkg = avail_pkg_use
47112 - try:
47113 - slot = avail_pkg.slot
47114 - except AttributeError:
47115 - eapi, slot, repo = mydbapi.aux_get(avail_pkg,
47116 - ["EAPI", "SLOT", "repository"])
47117 - avail_pkg = _pkg_str(avail_pkg,
47118 - eapi=eapi, slot=slot, repo=repo)
47119 + avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
47120
47121 slot_map[avail_slot] = avail_pkg
47122 highest_cpv = cp_map.get(avail_pkg.cp)
47123 @@ -386,7 +388,9 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47124 vercmp(avail_pkg.version, highest_cpv.version) > 0:
47125 cp_map[avail_pkg.cp] = avail_pkg
47126
47127 - this_choice = (atoms, slot_map, cp_map, all_available)
47128 + this_choice = _dep_choice(atoms=atoms, slot_map=slot_map,
47129 + cp_map=cp_map, all_available=all_available,
47130 + all_installed_slots=False)
47131 if all_available:
47132 # The "all installed" criterion is not version or slot specific.
47133 # If any version of a package is already in the graph then we
47134 @@ -407,6 +411,7 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47135 not slot_atom.startswith("virtual/"):
47136 all_installed_slots = False
47137 break
47138 + this_choice.all_installed_slots = all_installed_slots
47139 if graph_db is None:
47140 if all_use_satisfied:
47141 if all_installed:
47142 @@ -468,8 +473,27 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47143 elif all_installed:
47144 if all_installed_slots:
47145 preferred_installed.append(this_choice)
47146 - else:
47147 + elif parent is None or want_update_pkg is None:
47148 preferred_any_slot.append(this_choice)
47149 + else:
47150 + # When appropriate, prefer a slot that is not
47151 + # installed yet for bug #478188.
47152 + want_update = True
47153 + for slot_atom, avail_pkg in slot_map.items():
47154 + if avail_pkg in graph:
47155 + continue
47156 + # New-style virtuals have zero cost to install.
47157 + if slot_atom.startswith("virtual/") or \
47158 + vardb.match(slot_atom):
47159 + continue
47160 + if not want_update_pkg(parent, avail_pkg):
47161 + want_update = False
47162 + break
47163 +
47164 + if want_update:
47165 + preferred_installed.append(this_choice)
47166 + else:
47167 + preferred_any_slot.append(this_choice)
47168 else:
47169 preferred_non_installed.append(this_choice)
47170 else:
47171 @@ -490,6 +514,7 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47172 all_installed = False
47173
47174 if all_installed:
47175 + this_choice.all_installed_slots = True
47176 other_installed.append(this_choice)
47177 elif some_installed:
47178 other_installed_some.append(this_choice)
47179 @@ -506,22 +531,23 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47180 for choices in choice_bins:
47181 if len(choices) < 2:
47182 continue
47183 + # Prefer choices with all_installed_slots for bug #480736.
47184 + choices.sort(key=operator.attrgetter('all_installed_slots'),
47185 + reverse=True)
47186 for choice_1 in choices[1:]:
47187 - atoms_1, slot_map_1, cp_map_1, all_available_1 = choice_1
47188 - cps = set(cp_map_1)
47189 + cps = set(choice_1.cp_map)
47190 for choice_2 in choices:
47191 if choice_1 is choice_2:
47192 # choice_1 will not be promoted, so move on
47193 break
47194 - atoms_2, slot_map_2, cp_map_2, all_available_2 = choice_2
47195 - intersecting_cps = cps.intersection(cp_map_2)
47196 + intersecting_cps = cps.intersection(choice_2.cp_map)
47197 if not intersecting_cps:
47198 continue
47199 has_upgrade = False
47200 has_downgrade = False
47201 for cp in intersecting_cps:
47202 - version_1 = cp_map_1[cp]
47203 - version_2 = cp_map_2[cp]
47204 + version_1 = choice_1.cp_map[cp]
47205 + version_2 = choice_2.cp_map[cp]
47206 difference = vercmp(version_1.version, version_2.version)
47207 if difference != 0:
47208 if difference > 0:
47209 @@ -538,9 +564,9 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None):
47210
47211 for allow_masked in (False, True):
47212 for choices in choice_bins:
47213 - for atoms, slot_map, cp_map, all_available in choices:
47214 - if all_available or allow_masked:
47215 - return atoms
47216 + for choice in choices:
47217 + if choice.all_available or allow_masked:
47218 + return choice.atoms
47219
47220 assert(False) # This point should not be reachable
47221
47222 @@ -575,18 +601,15 @@ def dep_check(depstring, mydbapi, mysettings, use="yes", mode=None, myuse=None,
47223
47224 mymasks = set()
47225 useforce = set()
47226 - useforce.add(mysettings["ARCH"])
47227 if use == "all":
47228 - # This masking/forcing is only for repoman. In other cases, relevant
47229 - # masking/forcing should have already been applied via
47230 - # config.regenerate(). Also, binary or installed packages may have
47231 - # been built with flags that are now masked, and it would be
47232 - # inconsistent to mask them now. Additionally, myuse may consist of
47233 - # flags from a parent package that is being merged to a $ROOT that is
47234 - # different from the one that mysettings represents.
47235 + # This is only for repoman, in order to constrain the use_reduce
47236 + # matchall behavior to account for profile use.mask/force. The
47237 + # ARCH/archlist code here may be redundant, since the profile
47238 + # really should be handling ARCH masking/forcing itself.
47239 mymasks.update(mysettings.usemask)
47240 mymasks.update(mysettings.archlist())
47241 mymasks.discard(mysettings["ARCH"])
47242 + useforce.add(mysettings["ARCH"])
47243 useforce.update(mysettings.useforce)
47244 useforce.difference_update(mymasks)
47245
47246 @@ -609,7 +632,7 @@ def dep_check(depstring, mydbapi, mysettings, use="yes", mode=None, myuse=None,
47247 # dependencies so that things like --depclean work as well as possible
47248 # in spite of partial invalidity.
47249 if not current_parent.installed:
47250 - eapi = current_parent.metadata['EAPI']
47251 + eapi = current_parent.eapi
47252
47253 if isinstance(depstring, list):
47254 mysplit = depstring
47255 @@ -619,7 +642,7 @@ def dep_check(depstring, mydbapi, mysettings, use="yes", mode=None, myuse=None,
47256 masklist=mymasks, matchall=(use=="all"), excludeall=useforce,
47257 opconvert=True, token_class=Atom, eapi=eapi)
47258 except InvalidDependString as e:
47259 - return [0, _unicode_decode("%s") % (e,)]
47260 + return [0, "%s" % (e,)]
47261
47262 if mysplit == []:
47263 #dependencies were reduced to nothing
47264 @@ -633,10 +656,10 @@ def dep_check(depstring, mydbapi, mysettings, use="yes", mode=None, myuse=None,
47265 use_force=useforce, use_mask=mymasks, use_cache=use_cache,
47266 use_binaries=use_binaries, myroot=myroot, trees=trees)
47267 except ParseError as e:
47268 - return [0, _unicode_decode("%s") % (e,)]
47269 + return [0, "%s" % (e,)]
47270
47271 - mysplit2=mysplit[:]
47272 - mysplit2=dep_wordreduce(mysplit2,mysettings,mydbapi,mode,use_cache=use_cache)
47273 + mysplit2 = dep_wordreduce(mysplit,
47274 + mysettings, mydbapi, mode, use_cache=use_cache)
47275 if mysplit2 is None:
47276 return [0, _("Invalid token")]
47277
47278
47279 diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py
47280 index 4c68dfc..f975ccd 100644
47281 --- a/pym/portage/dispatch_conf.py
47282 +++ b/pym/portage/dispatch_conf.py
47283 @@ -1,5 +1,5 @@
47284 # archive_conf.py -- functionality common to archive-conf and dispatch-conf
47285 -# Copyright 2003-2012 Gentoo Foundation
47286 +# Copyright 2003-2014 Gentoo Foundation
47287 # Distributed under the terms of the GNU General Public License v2
47288
47289
47290 @@ -24,175 +24,187 @@ RCS_MERGE = "rcsmerge -p -r" + RCS_BRANCH + " '%s' > '%s'"
47291 DIFF3_MERGE = "diff3 -mE '%s' '%s' '%s' > '%s'"
47292
47293 def diffstatusoutput(cmd, file1, file2):
47294 - """
47295 - Execute the string cmd in a shell with getstatusoutput() and return a
47296 - 2-tuple (status, output).
47297 - """
47298 - # Use Popen to emulate getstatusoutput(), since getstatusoutput() may
47299 - # raise a UnicodeDecodeError which makes the output inaccessible.
47300 - args = shlex_split(cmd % (file1, file2))
47301 - if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
47302 - # Python 3.1 does not support bytes in Popen args.
47303 - args = [portage._unicode_encode(x, errors='strict') for x in args]
47304 - proc = subprocess.Popen(args,
47305 - stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
47306 - output = portage._unicode_decode(proc.communicate()[0])
47307 - if output and output[-1] == "\n":
47308 - # getstatusoutput strips one newline
47309 - output = output[:-1]
47310 - return (proc.wait(), output)
47311 + """
47312 + Execute the string cmd in a shell with getstatusoutput() and return a
47313 + 2-tuple (status, output).
47314 + """
47315 + # Use Popen to emulate getstatusoutput(), since getstatusoutput() may
47316 + # raise a UnicodeDecodeError which makes the output inaccessible.
47317 + args = shlex_split(cmd % (file1, file2))
47318 +
47319 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
47320 + not os.path.isabs(args[0]):
47321 + # Python 3.1 _execvp throws TypeError for non-absolute executable
47322 + # path passed as bytes (see http://bugs.python.org/issue8513).
47323 + fullname = portage.process.find_binary(args[0])
47324 + if fullname is None:
47325 + raise portage.exception.CommandNotFound(args[0])
47326 + args[0] = fullname
47327 +
47328 + args = [portage._unicode_encode(x, errors='strict') for x in args]
47329 + proc = subprocess.Popen(args,
47330 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
47331 + output = portage._unicode_decode(proc.communicate()[0])
47332 + if output and output[-1] == "\n":
47333 + # getstatusoutput strips one newline
47334 + output = output[:-1]
47335 + return (proc.wait(), output)
47336
47337 def read_config(mandatory_opts):
47338 - eprefix = portage.const.EPREFIX
47339 - config_path = os.path.join(eprefix or os.sep, "etc/dispatch-conf.conf")
47340 - loader = KeyValuePairFileLoader(config_path, None)
47341 - opts, errors = loader.load()
47342 - if not opts:
47343 - print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
47344 - sys.exit(1)
47345 + eprefix = portage.settings["EPREFIX"]
47346 + if portage._not_installed:
47347 + config_path = os.path.join(portage.PORTAGE_BASE_PATH, "cnf", "dispatch-conf.conf")
47348 + else:
47349 + config_path = os.path.join(eprefix or os.sep, "etc/dispatch-conf.conf")
47350 + loader = KeyValuePairFileLoader(config_path, None)
47351 + opts, _errors = loader.load()
47352 + if not opts:
47353 + print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
47354 + sys.exit(1)
47355
47356 # Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
47357 - quotes = "\"'"
47358 - for k, v in opts.items():
47359 - if v[:1] in quotes and v[:1] == v[-1:]:
47360 - opts[k] = v[1:-1]
47361 -
47362 - for key in mandatory_opts:
47363 - if key not in opts:
47364 - if key == "merge":
47365 - opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
47366 - else:
47367 - print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
47368 -
47369 - # archive-dir supports ${EPREFIX} expansion, in order to avoid hardcoding
47370 - variables = {"EPREFIX": eprefix}
47371 - opts['archive-dir'] = varexpand(opts['archive-dir'], mydict=variables)
47372 -
47373 - if not os.path.exists(opts['archive-dir']):
47374 - os.mkdir(opts['archive-dir'])
47375 - # Use restrictive permissions by default, in order to protect
47376 - # against vulnerabilities (like bug #315603 involving rcs).
47377 - os.chmod(opts['archive-dir'], 0o700)
47378 - elif not os.path.isdir(opts['archive-dir']):
47379 - print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
47380 - sys.exit(1)
47381 -
47382 - return opts
47383 + quotes = "\"'"
47384 + for k, v in opts.items():
47385 + if v[:1] in quotes and v[:1] == v[-1:]:
47386 + opts[k] = v[1:-1]
47387 +
47388 + for key in mandatory_opts:
47389 + if key not in opts:
47390 + if key == "merge":
47391 + opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
47392 + else:
47393 + print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
47394 +
47395 + # archive-dir supports ${EPREFIX} expansion, in order to avoid hardcoding
47396 + variables = {"EPREFIX": eprefix}
47397 + opts['archive-dir'] = varexpand(opts['archive-dir'], mydict=variables)
47398 +
47399 + if not os.path.exists(opts['archive-dir']):
47400 + os.mkdir(opts['archive-dir'])
47401 + # Use restrictive permissions by default, in order to protect
47402 + # against vulnerabilities (like bug #315603 involving rcs).
47403 + os.chmod(opts['archive-dir'], 0o700)
47404 + elif not os.path.isdir(opts['archive-dir']):
47405 + print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
47406 + sys.exit(1)
47407 +
47408 + return opts
47409
47410
47411 def rcs_archive(archive, curconf, newconf, mrgconf):
47412 - """Archive existing config in rcs (on trunk). Then, if mrgconf is
47413 - specified and an old branch version exists, merge the user's changes
47414 - and the distributed changes and put the result into mrgconf. Lastly,
47415 - if newconf was specified, leave it in the archive dir with a .dist.new
47416 - suffix along with the last 1.1.1 branch version with a .dist suffix."""
47417 -
47418 - try:
47419 - os.makedirs(os.path.dirname(archive))
47420 - except OSError:
47421 - pass
47422 -
47423 - if os.path.isfile(curconf):
47424 - try:
47425 - shutil.copy2(curconf, archive)
47426 - except(IOError, os.error) as why:
47427 - print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
47428 - {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47429 -
47430 - if os.path.exists(archive + ',v'):
47431 - os.system(RCS_LOCK + ' ' + archive)
47432 - os.system(RCS_PUT + ' ' + archive)
47433 -
47434 - ret = 0
47435 - if newconf != '':
47436 - os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
47437 - has_branch = os.path.exists(archive)
47438 - if has_branch:
47439 - os.rename(archive, archive + '.dist')
47440 -
47441 - try:
47442 - shutil.copy2(newconf, archive)
47443 - except(IOError, os.error) as why:
47444 - print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
47445 - {"newconf": newconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47446 -
47447 - if has_branch:
47448 - if mrgconf != '':
47449 - # This puts the results of the merge into mrgconf.
47450 - ret = os.system(RCS_MERGE % (archive, mrgconf))
47451 - mystat = os.lstat(newconf)
47452 - os.chmod(mrgconf, mystat.st_mode)
47453 - os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
47454 - os.rename(archive, archive + '.dist.new')
47455 - return ret
47456 + """Archive existing config in rcs (on trunk). Then, if mrgconf is
47457 + specified and an old branch version exists, merge the user's changes
47458 + and the distributed changes and put the result into mrgconf. Lastly,
47459 + if newconf was specified, leave it in the archive dir with a .dist.new
47460 + suffix along with the last 1.1.1 branch version with a .dist suffix."""
47461 +
47462 + try:
47463 + os.makedirs(os.path.dirname(archive))
47464 + except OSError:
47465 + pass
47466 +
47467 + if os.path.isfile(curconf):
47468 + try:
47469 + shutil.copy2(curconf, archive)
47470 + except(IOError, os.error) as why:
47471 + print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
47472 + {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47473 +
47474 + if os.path.exists(archive + ',v'):
47475 + os.system(RCS_LOCK + ' ' + archive)
47476 + os.system(RCS_PUT + ' ' + archive)
47477 +
47478 + ret = 0
47479 + if newconf != '':
47480 + os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
47481 + has_branch = os.path.exists(archive)
47482 + if has_branch:
47483 + os.rename(archive, archive + '.dist')
47484 +
47485 + try:
47486 + shutil.copy2(newconf, archive)
47487 + except(IOError, os.error) as why:
47488 + print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
47489 + {"newconf": newconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47490 +
47491 + if has_branch:
47492 + if mrgconf != '':
47493 + # This puts the results of the merge into mrgconf.
47494 + ret = os.system(RCS_MERGE % (archive, mrgconf))
47495 + mystat = os.lstat(newconf)
47496 + os.chmod(mrgconf, mystat.st_mode)
47497 + os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
47498 + os.rename(archive, archive + '.dist.new')
47499 +
47500 + return ret
47501
47502
47503 def file_archive(archive, curconf, newconf, mrgconf):
47504 - """Archive existing config to the archive-dir, bumping old versions
47505 - out of the way into .# versions (log-rotate style). Then, if mrgconf
47506 - was specified and there is a .dist version, merge the user's changes
47507 - and the distributed changes and put the result into mrgconf. Lastly,
47508 - if newconf was specified, archive it as a .dist.new version (which
47509 - gets moved to the .dist version at the end of the processing)."""
47510 -
47511 - try:
47512 - os.makedirs(os.path.dirname(archive))
47513 - except OSError:
47514 - pass
47515 -
47516 - # Archive the current config file if it isn't already saved
47517 - if os.path.exists(archive) \
47518 - and len(diffstatusoutput("diff -aq '%s' '%s'", curconf, archive)[1]) != 0:
47519 - suf = 1
47520 - while suf < 9 and os.path.exists(archive + '.' + str(suf)):
47521 - suf += 1
47522 -
47523 - while suf > 1:
47524 - os.rename(archive + '.' + str(suf-1), archive + '.' + str(suf))
47525 - suf -= 1
47526 -
47527 - os.rename(archive, archive + '.1')
47528 -
47529 - if os.path.isfile(curconf):
47530 - try:
47531 - shutil.copy2(curconf, archive)
47532 - except(IOError, os.error) as why:
47533 - print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
47534 - {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47535 -
47536 - if newconf != '':
47537 - # Save off new config file in the archive dir with .dist.new suffix
47538 - try:
47539 - shutil.copy2(newconf, archive + '.dist.new')
47540 - except(IOError, os.error) as why:
47541 - print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
47542 - {"newconf": newconf, "archive": archive + '.dist.new', "reason": str(why)}, file=sys.stderr)
47543 -
47544 - ret = 0
47545 - if mrgconf != '' and os.path.exists(archive + '.dist'):
47546 - # This puts the results of the merge into mrgconf.
47547 - ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
47548 - mystat = os.lstat(newconf)
47549 - os.chmod(mrgconf, mystat.st_mode)
47550 - os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
47551 -
47552 - return ret
47553 + """Archive existing config to the archive-dir, bumping old versions
47554 + out of the way into .# versions (log-rotate style). Then, if mrgconf
47555 + was specified and there is a .dist version, merge the user's changes
47556 + and the distributed changes and put the result into mrgconf. Lastly,
47557 + if newconf was specified, archive it as a .dist.new version (which
47558 + gets moved to the .dist version at the end of the processing)."""
47559 +
47560 + try:
47561 + os.makedirs(os.path.dirname(archive))
47562 + except OSError:
47563 + pass
47564 +
47565 + # Archive the current config file if it isn't already saved
47566 + if (os.path.exists(archive) and
47567 + len(diffstatusoutput("diff -aq '%s' '%s'", curconf, archive)[1]) != 0):
47568 + suf = 1
47569 + while suf < 9 and os.path.exists(archive + '.' + str(suf)):
47570 + suf += 1
47571 +
47572 + while suf > 1:
47573 + os.rename(archive + '.' + str(suf-1), archive + '.' + str(suf))
47574 + suf -= 1
47575 +
47576 + os.rename(archive, archive + '.1')
47577 +
47578 + if os.path.isfile(curconf):
47579 + try:
47580 + shutil.copy2(curconf, archive)
47581 + except(IOError, os.error) as why:
47582 + print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
47583 + {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
47584 +
47585 + if newconf != '':
47586 + # Save off new config file in the archive dir with .dist.new suffix
47587 + try:
47588 + shutil.copy2(newconf, archive + '.dist.new')
47589 + except(IOError, os.error) as why:
47590 + print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
47591 + {"newconf": newconf, "archive": archive + '.dist.new', "reason": str(why)}, file=sys.stderr)
47592 +
47593 + ret = 0
47594 + if mrgconf != '' and os.path.exists(archive + '.dist'):
47595 + # This puts the results of the merge into mrgconf.
47596 + ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
47597 + mystat = os.lstat(newconf)
47598 + os.chmod(mrgconf, mystat.st_mode)
47599 + os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
47600 +
47601 + return ret
47602
47603
47604 def rcs_archive_post_process(archive):
47605 - """Check in the archive file with the .dist.new suffix on the branch
47606 - and remove the one with the .dist suffix."""
47607 - os.rename(archive + '.dist.new', archive)
47608 - if os.path.exists(archive + '.dist'):
47609 - # Commit the last-distributed version onto the branch.
47610 - os.system(RCS_LOCK + RCS_BRANCH + ' ' + archive)
47611 - os.system(RCS_PUT + ' -r' + RCS_BRANCH + ' ' + archive)
47612 - os.unlink(archive + '.dist')
47613 - else:
47614 - # Forcefully commit the last-distributed version onto the branch.
47615 - os.system(RCS_PUT + ' -f -r' + RCS_BRANCH + ' ' + archive)
47616 + """Check in the archive file with the .dist.new suffix on the branch
47617 + and remove the one with the .dist suffix."""
47618 + os.rename(archive + '.dist.new', archive)
47619 + if os.path.exists(archive + '.dist'):
47620 + # Commit the last-distributed version onto the branch.
47621 + os.system(RCS_LOCK + RCS_BRANCH + ' ' + archive)
47622 + os.system(RCS_PUT + ' -r' + RCS_BRANCH + ' ' + archive)
47623 + os.unlink(archive + '.dist')
47624 + else:
47625 + # Forcefully commit the last-distributed version onto the branch.
47626 + os.system(RCS_PUT + ' -f -r' + RCS_BRANCH + ' ' + archive)
47627
47628
47629 def file_archive_post_process(archive):
47630 - """Rename the archive file with the .dist.new suffix to a .dist suffix"""
47631 - os.rename(archive + '.dist.new', archive + '.dist')
47632 + """Rename the archive file with the .dist.new suffix to a .dist suffix"""
47633 + os.rename(archive + '.dist.new', archive + '.dist')
47634
47635 diff --git a/pym/portage/eapi.py b/pym/portage/eapi.py
47636 index 8b03f83..4f77910 100644
47637 --- a/pym/portage/eapi.py
47638 +++ b/pym/portage/eapi.py
47639 @@ -3,14 +3,19 @@
47640
47641 import collections
47642
47643 +from portage import eapi_is_supported
47644 +
47645 def eapi_has_iuse_defaults(eapi):
47646 return eapi != "0"
47647
47648 +def eapi_has_iuse_effective(eapi):
47649 + return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
47650 +
47651 def eapi_has_slot_deps(eapi):
47652 return eapi != "0"
47653
47654 -def eapi_has_slot_abi(eapi):
47655 - return eapi in ("4-slot-abi",)
47656 +def eapi_has_slot_operator(eapi):
47657 + return eapi not in ("0", "1", "2", "3", "4", "4-python")
47658
47659 def eapi_has_src_uri_arrows(eapi):
47660 return eapi not in ("0", "1")
47661 @@ -39,8 +44,11 @@ def eapi_exports_merge_type(eapi):
47662 def eapi_exports_replace_vars(eapi):
47663 return eapi not in ("0", "1", "2", "3")
47664
47665 +def eapi_exports_EBUILD_PHASE_FUNC(eapi):
47666 + return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
47667 +
47668 def eapi_exports_REPOSITORY(eapi):
47669 - return eapi in ("4-python",)
47670 + return eapi in ("4-python", "5-progress")
47671
47672 def eapi_has_pkg_pretend(eapi):
47673 return eapi not in ("0", "1", "2", "3")
47674 @@ -54,21 +62,44 @@ def eapi_has_dosed_dohard(eapi):
47675 def eapi_has_required_use(eapi):
47676 return eapi not in ("0", "1", "2", "3")
47677
47678 +def eapi_has_required_use_at_most_one_of(eapi):
47679 + return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
47680 +
47681 def eapi_has_use_dep_defaults(eapi):
47682 return eapi not in ("0", "1", "2", "3")
47683
47684 def eapi_has_repo_deps(eapi):
47685 - return eapi in ("4-python",)
47686 + return eapi in ("4-python", "5-progress")
47687
47688 def eapi_allows_dots_in_PN(eapi):
47689 - return eapi in ("4-python",)
47690 + return eapi in ("4-python", "5-progress")
47691
47692 def eapi_allows_dots_in_use_flags(eapi):
47693 - return eapi in ("4-python",)
47694 + return eapi in ("4-python", "5-progress")
47695 +
47696 +def eapi_supports_stable_use_forcing_and_masking(eapi):
47697 + return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
47698 +
47699 +def eapi_allows_directories_on_profile_level_and_repository_level(eapi):
47700 + return eapi in ("4-python", "5-progress")
47701 +
47702 +def eapi_has_use_aliases(eapi):
47703 + return eapi in ("4-python", "5-progress")
47704 +
47705 +def eapi_has_automatic_unpack_dependencies(eapi):
47706 + return eapi in ("5-progress",)
47707 +
47708 +def eapi_has_hdepend(eapi):
47709 + return eapi in ("5-hdepend",)
47710 +
47711 +def eapi_has_targetroot(eapi):
47712 + return eapi in ("5-hdepend",)
47713
47714 _eapi_attrs = collections.namedtuple('_eapi_attrs',
47715 - 'dots_in_PN dots_in_use_flags iuse_defaults '
47716 - 'repo_deps required_use slot_abi slot_deps '
47717 + 'dots_in_PN dots_in_use_flags exports_EBUILD_PHASE_FUNC '
47718 + 'feature_flag_test feature_flag_targetroot '
47719 + 'hdepend iuse_defaults iuse_effective '
47720 + 'repo_deps required_use required_use_at_most_one_of slot_operator slot_deps '
47721 'src_uri_arrows strong_blocks use_deps use_dep_defaults')
47722
47723 _eapi_attrs_cache = {}
47724 @@ -77,24 +108,37 @@ def _get_eapi_attrs(eapi):
47725 """
47726 When eapi is None then validation is not as strict, since we want the
47727 same to work for multiple EAPIs that may have slightly different rules.
47728 + An unsupported eapi is handled the same as when eapi is None, which may
47729 + be helpful for handling of corrupt EAPI metadata in essential functions
47730 + such as pkgsplit.
47731 """
47732 eapi_attrs = _eapi_attrs_cache.get(eapi)
47733 if eapi_attrs is not None:
47734 return eapi_attrs
47735
47736 + orig_eapi = eapi
47737 + if eapi is not None and not eapi_is_supported(eapi):
47738 + eapi = None
47739 +
47740 eapi_attrs = _eapi_attrs(
47741 dots_in_PN = (eapi is None or eapi_allows_dots_in_PN(eapi)),
47742 dots_in_use_flags = (eapi is None or eapi_allows_dots_in_use_flags(eapi)),
47743 + exports_EBUILD_PHASE_FUNC = (eapi is None or eapi_exports_EBUILD_PHASE_FUNC(eapi)),
47744 + feature_flag_test = True,
47745 + feature_flag_targetroot = (eapi is not None and eapi_has_targetroot(eapi)),
47746 + hdepend = (eapi is not None and eapi_has_hdepend(eapi)),
47747 iuse_defaults = (eapi is None or eapi_has_iuse_defaults(eapi)),
47748 + iuse_effective = (eapi is not None and eapi_has_iuse_effective(eapi)),
47749 repo_deps = (eapi is None or eapi_has_repo_deps(eapi)),
47750 required_use = (eapi is None or eapi_has_required_use(eapi)),
47751 + required_use_at_most_one_of = (eapi is None or eapi_has_required_use_at_most_one_of(eapi)),
47752 slot_deps = (eapi is None or eapi_has_slot_deps(eapi)),
47753 - slot_abi = (eapi is None or eapi_has_slot_abi(eapi)),
47754 + slot_operator = (eapi is None or eapi_has_slot_operator(eapi)),
47755 src_uri_arrows = (eapi is None or eapi_has_src_uri_arrows(eapi)),
47756 strong_blocks = (eapi is None or eapi_has_strong_blocks(eapi)),
47757 use_deps = (eapi is None or eapi_has_use_deps(eapi)),
47758 use_dep_defaults = (eapi is None or eapi_has_use_dep_defaults(eapi))
47759 )
47760
47761 - _eapi_attrs_cache[eapi] = eapi_attrs
47762 + _eapi_attrs_cache[orig_eapi] = eapi_attrs
47763 return eapi_attrs
47764
47765 diff --git a/pym/portage/eclass_cache.py b/pym/portage/eclass_cache.py
47766 index cb2cf8a..2988d25 100644
47767 --- a/pym/portage/eclass_cache.py
47768 +++ b/pym/portage/eclass_cache.py
47769 @@ -1,19 +1,24 @@
47770 -# Copyright 2005-2011 Gentoo Foundation
47771 +# Copyright 2005-2014 Gentoo Foundation
47772 # Distributed under the terms of the GNU General Public License v2
47773 # Author(s): Nicholas Carpaski (carpaski@g.o), Brian Harring (ferringb@g.o)
47774
47775 +from __future__ import unicode_literals
47776 +
47777 __all__ = ["cache"]
47778
47779 import stat
47780 import sys
47781 import operator
47782 +import warnings
47783 from portage.util import normalize_path
47784 import errno
47785 from portage.exception import FileNotFound, PermissionDenied
47786 from portage import os
47787 from portage import checksum
47788 +from portage import _shell_quote
47789
47790 if sys.hexversion >= 0x3000000:
47791 + # pylint: disable=W0622
47792 long = int
47793
47794
47795 @@ -56,17 +61,20 @@ class cache(object):
47796 """
47797 Maintains the cache information about eclasses used in ebuild.
47798 """
47799 - def __init__(self, porttree_root, overlays=[]):
47800 + def __init__(self, porttree_root, overlays=None):
47801 + if overlays is not None:
47802 + warnings.warn("overlays parameter of portage.eclass_cache.cache constructor is deprecated and no longer used",
47803 + DeprecationWarning, stacklevel=2)
47804
47805 self.eclasses = {} # {"Name": hashed_path}
47806 self._eclass_locations = {}
47807 + self._eclass_locations_str = None
47808
47809 # screw with the porttree ordering, w/out having bash inherit match it, and I'll hurt you.
47810 # ~harring
47811 if porttree_root:
47812 self.porttree_root = porttree_root
47813 - self.porttrees = [self.porttree_root] + overlays
47814 - self.porttrees = tuple(map(normalize_path, self.porttrees))
47815 + self.porttrees = (normalize_path(self.porttree_root),)
47816 self._master_eclass_root = os.path.join(self.porttrees[0], "eclass")
47817 self.update_eclasses()
47818 else:
47819 @@ -98,6 +106,7 @@ class cache(object):
47820 self.porttrees = self.porttrees + other.porttrees
47821 self.eclasses.update(other.eclasses)
47822 self._eclass_locations.update(other._eclass_locations)
47823 + self._eclass_locations_str = None
47824
47825 def update_eclasses(self):
47826 self.eclasses = {}
47827 @@ -124,7 +133,7 @@ class cache(object):
47828 mtime = obj.mtime
47829 except FileNotFound:
47830 continue
47831 - ys=y[:-eclass_len]
47832 + ys = y[:-eclass_len]
47833 if x == self._master_eclass_root:
47834 master_eclasses[ys] = mtime
47835 self.eclasses[ys] = obj
47836 @@ -169,3 +178,10 @@ class cache(object):
47837 ec_dict[x] = self.eclasses[x]
47838
47839 return ec_dict
47840 +
47841 + @property
47842 + def eclass_locations_string(self):
47843 + if self._eclass_locations_str is None:
47844 + self._eclass_locations_str = " ".join(_shell_quote(x)
47845 + for x in reversed(self.porttrees))
47846 + return self._eclass_locations_str
47847
47848 diff --git a/pym/portage/elog/__init__.py b/pym/portage/elog/__init__.py
47849 index 33dac17..cc08612 100644
47850 --- a/pym/portage/elog/__init__.py
47851 +++ b/pym/portage/elog/__init__.py
47852 @@ -1,9 +1,10 @@
47853 # elog/__init__.py - elog core functions
47854 -# Copyright 2006-2011 Gentoo Foundation
47855 +# Copyright 2006-2014 Gentoo Foundation
47856 # Distributed under the terms of the GNU General Public License v2
47857
47858 import sys
47859 if sys.hexversion >= 0x3000000:
47860 + # pylint: disable=W0622
47861 basestring = str
47862
47863 import portage
47864
47865 diff --git a/pym/portage/elog/mod_echo.py b/pym/portage/elog/mod_echo.py
47866 index 59117be..f9cc537 100644
47867 --- a/pym/portage/elog/mod_echo.py
47868 +++ b/pym/portage/elog/mod_echo.py
47869 @@ -1,5 +1,5 @@
47870 # elog/mod_echo.py - elog dispatch module
47871 -# Copyright 2007 Gentoo Foundation
47872 +# Copyright 2007-2014 Gentoo Foundation
47873 # Distributed under the terms of the GNU General Public License v2
47874
47875 from __future__ import print_function
47876 @@ -10,6 +10,7 @@ from portage.const import EBUILD_PHASES
47877 from portage.localization import _
47878
47879 if sys.hexversion >= 0x3000000:
47880 + # pylint: disable=W0622
47881 basestring = str
47882
47883 _items = []
47884
47885 diff --git a/pym/portage/elog/mod_save.py b/pym/portage/elog/mod_save.py
47886 index c69f4a3..7b1cd46 100644
47887 --- a/pym/portage/elog/mod_save.py
47888 +++ b/pym/portage/elog/mod_save.py
47889 @@ -1,7 +1,8 @@
47890 # elog/mod_save.py - elog dispatch module
47891 -# Copyright 2006-2011 Gentoo Foundation
47892 +# Copyright 2006-2012 Gentoo Foundation
47893 # Distributed under the terms of the GNU General Public License v2
47894
47895 +import errno
47896 import io
47897 import time
47898 import portage
47899 @@ -47,11 +48,22 @@ def process(mysettings, key, logentries, fulltext):
47900 elogfilename = os.path.join(log_subdir, cat + ':' + elogfilename)
47901 _ensure_log_subdirs(logdir, log_subdir)
47902
47903 - elogfile = io.open(_unicode_encode(elogfilename,
47904 - encoding=_encodings['fs'], errors='strict'),
47905 - mode='w', encoding=_encodings['content'], errors='backslashreplace')
47906 - elogfile.write(_unicode_decode(fulltext))
47907 - elogfile.close()
47908 + try:
47909 + with io.open(_unicode_encode(elogfilename,
47910 + encoding=_encodings['fs'], errors='strict'), mode='w',
47911 + encoding=_encodings['content'],
47912 + errors='backslashreplace') as elogfile:
47913 + elogfile.write(_unicode_decode(fulltext))
47914 + except IOError as e:
47915 + func_call = "open('%s', 'w')" % elogfilename
47916 + if e.errno == errno.EACCES:
47917 + raise portage.exception.PermissionDenied(func_call)
47918 + elif e.errno == errno.EPERM:
47919 + raise portage.exception.OperationNotPermitted(func_call)
47920 + elif e.errno == errno.EROFS:
47921 + raise portage.exception.ReadOnlyFileSystem(func_call)
47922 + else:
47923 + raise
47924
47925 # Copy group permission bits from parent directory.
47926 elogdir_st = os.stat(log_subdir)
47927
47928 diff --git a/pym/portage/elog/mod_save_summary.py b/pym/portage/elog/mod_save_summary.py
47929 index 347f66e..786f894 100644
47930 --- a/pym/portage/elog/mod_save_summary.py
47931 +++ b/pym/portage/elog/mod_save_summary.py
47932 @@ -1,8 +1,12 @@
47933 # elog/mod_save_summary.py - elog dispatch module
47934 -# Copyright 2006-2011 Gentoo Foundation
47935 +# Copyright 2006-2013 Gentoo Foundation
47936 # Distributed under the terms of the GNU General Public License v2
47937
47938 +from __future__ import unicode_literals
47939 +
47940 +import errno
47941 import io
47942 +import sys
47943 import time
47944 import portage
47945 from portage import os
47946 @@ -37,9 +41,21 @@ def process(mysettings, key, logentries, fulltext):
47947
47948 # TODO: Locking
47949 elogfilename = elogdir+"/summary.log"
47950 - elogfile = io.open(_unicode_encode(elogfilename,
47951 - encoding=_encodings['fs'], errors='strict'),
47952 - mode='a', encoding=_encodings['content'], errors='backslashreplace')
47953 + try:
47954 + elogfile = io.open(_unicode_encode(elogfilename,
47955 + encoding=_encodings['fs'], errors='strict'),
47956 + mode='a', encoding=_encodings['content'],
47957 + errors='backslashreplace')
47958 + except IOError as e:
47959 + func_call = "open('%s', 'a')" % elogfilename
47960 + if e.errno == errno.EACCES:
47961 + raise portage.exception.PermissionDenied(func_call)
47962 + elif e.errno == errno.EPERM:
47963 + raise portage.exception.OperationNotPermitted(func_call)
47964 + elif e.errno == errno.EROFS:
47965 + raise portage.exception.ReadOnlyFileSystem(func_call)
47966 + else:
47967 + raise
47968
47969 # Copy group permission bits from parent directory.
47970 elogdir_st = os.stat(elogdir)
47971 @@ -58,17 +74,19 @@ def process(mysettings, key, logentries, fulltext):
47972 apply_permissions(elogfilename, uid=logfile_uid, gid=elogdir_gid,
47973 mode=elogdir_grp_mode, mask=0)
47974
47975 - time_str = time.strftime("%Y-%m-%d %H:%M:%S %Z",
47976 - time.localtime(time.time()))
47977 - # Avoid potential UnicodeDecodeError later.
47978 + time_fmt = "%Y-%m-%d %H:%M:%S %Z"
47979 + if sys.hexversion < 0x3000000:
47980 + time_fmt = _unicode_encode(time_fmt)
47981 + time_str = time.strftime(time_fmt, time.localtime(time.time()))
47982 + # Avoid potential UnicodeDecodeError in Python 2, since strftime
47983 + # returns bytes in Python 2, and %Z may contain non-ascii chars.
47984 time_str = _unicode_decode(time_str,
47985 encoding=_encodings['content'], errors='replace')
47986 - elogfile.write(_unicode_decode(
47987 - _(">>> Messages generated by process " +
47988 + elogfile.write(_(">>> Messages generated by process "
47989 "%(pid)d on %(time)s for package %(pkg)s:\n\n") %
47990 - {"pid": os.getpid(), "time": time_str, "pkg": key}))
47991 + {"pid": os.getpid(), "time": time_str, "pkg": key})
47992 elogfile.write(_unicode_decode(fulltext))
47993 - elogfile.write(_unicode_decode("\n"))
47994 + elogfile.write("\n")
47995 elogfile.close()
47996
47997 return elogfilename
47998
47999 diff --git a/pym/portage/elog/mod_syslog.py b/pym/portage/elog/mod_syslog.py
48000 index c8bf441..8b26ffa 100644
48001 --- a/pym/portage/elog/mod_syslog.py
48002 +++ b/pym/portage/elog/mod_syslog.py
48003 @@ -1,5 +1,5 @@
48004 # elog/mod_syslog.py - elog dispatch module
48005 -# Copyright 2006-2011 Gentoo Foundation
48006 +# Copyright 2006-2014 Gentoo Foundation
48007 # Distributed under the terms of the GNU General Public License v2
48008
48009 import sys
48010 @@ -8,12 +8,13 @@ from portage.const import EBUILD_PHASES
48011 from portage import _encodings
48012
48013 if sys.hexversion >= 0x3000000:
48014 + # pylint: disable=W0622
48015 basestring = str
48016
48017 _pri = {
48018 - "INFO" : syslog.LOG_INFO,
48019 - "WARN" : syslog.LOG_WARNING,
48020 - "ERROR" : syslog.LOG_ERR,
48021 + "INFO" : syslog.LOG_INFO,
48022 + "WARN" : syslog.LOG_WARNING,
48023 + "ERROR" : syslog.LOG_ERR,
48024 "LOG" : syslog.LOG_NOTICE,
48025 "QA" : syslog.LOG_WARNING
48026 }
48027 @@ -23,14 +24,14 @@ def process(mysettings, key, logentries, fulltext):
48028 for phase in EBUILD_PHASES:
48029 if not phase in logentries:
48030 continue
48031 - for msgtype,msgcontent in logentries[phase]:
48032 + for msgtype, msgcontent in logentries[phase]:
48033 if isinstance(msgcontent, basestring):
48034 msgcontent = [msgcontent]
48035 for line in msgcontent:
48036 line = "%s: %s: %s" % (key, phase, line)
48037 if sys.hexversion < 0x3000000 and not isinstance(line, bytes):
48038 # Avoid TypeError from syslog.syslog()
48039 - line = line.encode(_encodings['content'],
48040 + line = line.encode(_encodings['content'],
48041 'backslashreplace')
48042 syslog.syslog(_pri[msgtype], line.rstrip("\n"))
48043 syslog.closelog()
48044
48045 diff --git a/pym/portage/emaint/__init__.py b/pym/portage/emaint/__init__.py
48046 index 5e0ae70..48bc6e2 100644
48047 --- a/pym/portage/emaint/__init__.py
48048 +++ b/pym/portage/emaint/__init__.py
48049 @@ -1,7 +1,5 @@
48050 # Copyright 2005-2012 Gentoo Foundation
48051 # Distributed under the terms of the GNU General Public License v2
48052
48053 -"""'The emaint program provides checks and maintenance
48054 -on a gentoo system.
48055 +"""System health checks and maintenance utilities.
48056 """
48057 -
48058
48059 diff --git a/pym/portage/emaint/defaults.py b/pym/portage/emaint/defaults.py
48060 index d9d83ff..30f36af 100644
48061 --- a/pym/portage/emaint/defaults.py
48062 +++ b/pym/portage/emaint/defaults.py
48063 @@ -1,18 +1,25 @@
48064 -# Copyright 2005-2012 Gentoo Foundation
48065 +# Copyright 2005-2013 Gentoo Foundation
48066 # Distributed under the terms of the GNU General Public License v2
48067
48068 # parser option data
48069 CHECK = {"short": "-c", "long": "--check",
48070 "help": "Check for problems (a default option for most modules)",
48071 'status': "Checking %s for problems",
48072 + 'action': 'store_true',
48073 'func': 'check'
48074 }
48075
48076 FIX = {"short": "-f", "long": "--fix",
48077 "help": "Attempt to fix problems (a default option for most modules)",
48078 'status': "Attempting to fix %s",
48079 + 'action': 'store_true',
48080 'func': 'fix'
48081 }
48082
48083 +VERSION = {"long": "--version",
48084 + "help": "show program's version number and exit",
48085 + 'action': 'store_true',
48086 + }
48087 +
48088 # parser options
48089 -DEFAULT_OPTIONS = {'check': CHECK, 'fix': FIX}
48090 +DEFAULT_OPTIONS = {'check': CHECK, 'fix': FIX, 'version': VERSION}
48091
48092 diff --git a/pym/portage/emaint/main.py b/pym/portage/emaint/main.py
48093 index dbc5f18..6a17027 100644
48094 --- a/pym/portage/emaint/main.py
48095 +++ b/pym/portage/emaint/main.py
48096 @@ -1,4 +1,4 @@
48097 -# Copyright 2005-2012 Gentoo Foundation
48098 +# Copyright 2005-2014 Gentoo Foundation
48099 # Distributed under the terms of the GNU General Public License v2
48100
48101 from __future__ import print_function
48102 @@ -6,61 +6,59 @@ from __future__ import print_function
48103
48104 import sys
48105 import textwrap
48106 -from optparse import OptionParser, OptionValueError
48107 -
48108
48109 import portage
48110 from portage import os
48111 from portage.emaint.module import Modules
48112 from portage.emaint.progress import ProgressBar
48113 from portage.emaint.defaults import DEFAULT_OPTIONS
48114 +from portage.util._argparse import ArgumentParser
48115
48116 class OptionItem(object):
48117 - """class to hold module OptionParser options data
48118 + """class to hold module ArgumentParser options data
48119 """
48120
48121 - def __init__(self, opt, parser):
48122 + def __init__(self, opt):
48123 """
48124 @type opt: dictionary
48125 @param opt: options parser options
48126 """
48127 - self.parser = parser
48128 - self.short = opt['short']
48129 - self.long = opt['long']
48130 - self.help = opt['help']
48131 - self.status = opt['status']
48132 - self.func = opt['func']
48133 - self.action = opt.get('action', "callback")
48134 - self.type = opt.get('type', None)
48135 - self.dest = opt.get('dest', None)
48136 - self.callback = opt.get('callback', self._exclusive)
48137 - self.callback_kwargs = opt.get('callback_kwargs', {"var":"action"})
48138 -
48139 -
48140 - def _exclusive(self, option, *args, **kw):
48141 - """Generic check for the 2 default options
48142 - """
48143 - var = kw.get("var", None)
48144 - if var is None:
48145 - raise ValueError("var not specified to exclusive()")
48146 - if getattr(self.parser, var, ""):
48147 - raise OptionValueError("%s and %s are exclusive options"
48148 - % (getattr(self.parser, var), option))
48149 - setattr(self.parser, var, str(option))
48150 -
48151 - def check_action(self, action):
48152 - """Checks if 'action' is the same as this option
48153 -
48154 - @type action: string
48155 - @param action: the action to compare
48156 - @rtype: boolean
48157 - """
48158 - if action == self.action:
48159 - return True
48160 - elif action == '/'.join([self.short, self.long]):
48161 - return True
48162 - return False
48163 -
48164 + self.short = opt.get('short')
48165 + self.long = opt.get('long')
48166 + # '-' are not allowed in python identifiers
48167 + # so store the sanitized target variable name
48168 + self.target = self.long[2:].replace('-','_')
48169 + self.help = opt.get('help')
48170 + self.status = opt.get('status')
48171 + self.func = opt.get('func')
48172 + self.action = opt.get('action')
48173 + self.type = opt.get('type')
48174 + self.dest = opt.get('dest')
48175 +
48176 + @property
48177 + def pargs(self):
48178 + pargs = []
48179 + if self.short is not None:
48180 + pargs.append(self.short)
48181 + if self.long is not None:
48182 + pargs.append(self.long)
48183 + return pargs
48184 +
48185 + @property
48186 + def kwargs(self):
48187 + # Support for keyword arguments varies depending on the action,
48188 + # so only pass in the keywords that are needed, in order
48189 + # to avoid a TypeError.
48190 + kwargs = {}
48191 + if self.help is not None:
48192 + kwargs['help'] = self.help
48193 + if self.action is not None:
48194 + kwargs['action'] = self.action
48195 + if self.type is not None:
48196 + kwargs['type'] = self.type
48197 + if self.dest is not None:
48198 + kwargs['dest'] = self.dest
48199 + return kwargs
48200
48201 def usage(module_controller):
48202 _usage = "usage: emaint [options] COMMAND"
48203 @@ -91,15 +89,14 @@ def module_opts(module_controller, module):
48204 opts = DEFAULT_OPTIONS
48205 for opt in sorted(opts):
48206 optd = opts[opt]
48207 - opto = " %s, %s" %(optd['short'], optd['long'])
48208 - _usage += '%s %s\n' % (opto.ljust(15),optd['help'])
48209 + opto = " %s, %s" % (optd['short'], optd['long'])
48210 + _usage += '%s %s\n' % (opto.ljust(15), optd['help'])
48211 _usage += '\n'
48212 return _usage
48213
48214
48215 class TaskHandler(object):
48216 - """Handles the running of the tasks it is given
48217 - """
48218 + """Handles the running of the tasks it is given"""
48219
48220 def __init__(self, show_progress_bar=True, verbose=True, callback=None):
48221 self.show_progress_bar = show_progress_bar
48222 @@ -108,14 +105,13 @@ class TaskHandler(object):
48223 self.isatty = os.environ.get('TERM') != 'dumb' and sys.stdout.isatty()
48224 self.progress_bar = ProgressBar(self.isatty, title="Emaint", max_desc_length=27)
48225
48226 -
48227 def run_tasks(self, tasks, func, status=None, verbose=True, options=None):
48228 """Runs the module tasks"""
48229 if tasks is None or func is None:
48230 return
48231 for task in tasks:
48232 inst = task()
48233 - show_progress = self.show_progress_bar
48234 + show_progress = self.show_progress_bar and self.isatty
48235 # check if the function is capable of progressbar
48236 # and possibly override it off
48237 if show_progress and hasattr(inst, 'can_progressbar'):
48238 @@ -133,7 +129,7 @@ class TaskHandler(object):
48239 'options': options.copy()
48240 }
48241 result = getattr(inst, func)(**kwargs)
48242 - if self.isatty and show_progress:
48243 + if show_progress:
48244 # make sure the final progress is displayed
48245 self.progress_bar.display()
48246 print()
48247 @@ -160,59 +156,68 @@ def emaint_main(myargv):
48248 module_names.insert(0, "all")
48249
48250
48251 - parser = OptionParser(usage=usage(module_controller), version=portage.VERSION)
48252 + parser = ArgumentParser(usage=usage(module_controller))
48253 # add default options
48254 parser_options = []
48255 for opt in DEFAULT_OPTIONS:
48256 - parser_options.append(OptionItem(DEFAULT_OPTIONS[opt], parser))
48257 + parser_options.append(OptionItem(DEFAULT_OPTIONS[opt]))
48258 for mod in module_names[1:]:
48259 desc = module_controller.get_func_descriptions(mod)
48260 if desc:
48261 for opt in desc:
48262 - parser_options.append(OptionItem(desc[opt], parser))
48263 + parser_options.append(OptionItem(desc[opt]))
48264 for opt in parser_options:
48265 - parser.add_option(opt.short, opt.long, help=opt.help, action=opt.action,
48266 - type=opt.type, dest=opt.dest,
48267 - callback=opt.callback, callback_kwargs=opt.callback_kwargs)
48268 + parser.add_argument(*opt.pargs, **opt.kwargs)
48269
48270 - parser.action = None
48271 + options, args = parser.parse_known_args(args=myargv)
48272 +
48273 + if options.version:
48274 + print(portage.VERSION)
48275 + return os.EX_OK
48276
48277 - (options, args) = parser.parse_args(args=myargv)
48278 - #print('options', options, '\nargs', args, '\naction', parser.action)
48279 if len(args) != 1:
48280 parser.error("Incorrect number of arguments")
48281 if args[0] not in module_names:
48282 parser.error("%s target is not a known target" % args[0])
48283
48284 - if parser.action:
48285 - action = parser.action
48286 - else:
48287 - action = "-c/--check"
48288 - long_action = action.split('/')[1].lstrip('-')
48289 - #print("DEBUG: action = ", action, long_action)
48290 + check_opt = None
48291 + func = status = long_action = None
48292 + for opt in parser_options:
48293 + if opt.long == '--check':
48294 + # Default action
48295 + check_opt = opt
48296 + if opt.status and getattr(options, opt.target, False):
48297 + if long_action is not None:
48298 + parser.error("--%s and %s are exclusive options" %
48299 + (long_action, opt.long))
48300 + status = opt.status
48301 + func = opt.func
48302 + long_action = opt.long.lstrip('-')
48303 +
48304 + if long_action is None:
48305 + #print("DEBUG: long_action is None: setting to 'check'")
48306 + long_action = 'check'
48307 + func = check_opt.func
48308 + status = check_opt.status
48309
48310 if args[0] == "all":
48311 tasks = []
48312 for m in module_names[1:]:
48313 - #print("DEBUG: module: %s, functions: " %(m, str(module_controller.get_functions(m))))
48314 - if long_action in module_controller.get_functions(m):
48315 + #print("DEBUG: module: %s, functions: " % (m, str(module_controller.get_functions(m))))
48316 + if func in module_controller.get_functions(m):
48317 tasks.append(module_controller.get_class(m))
48318 - elif long_action in module_controller.get_functions(args[0]):
48319 + elif func in module_controller.get_functions(args[0]):
48320 tasks = [module_controller.get_class(args[0] )]
48321 else:
48322 - print("\nERROR: module '%s' does not have option '%s'\n" %(args[0], action))
48323 - print(module_opts(module_controller, args[0]))
48324 + portage.util.writemsg(
48325 + "\nERROR: module '%s' does not have option '--%s'\n\n" %
48326 + (args[0], long_action), noiselevel=-1)
48327 + portage.util.writemsg(module_opts(module_controller, args[0]),
48328 + noiselevel=-1)
48329 sys.exit(1)
48330 - func = status = None
48331 - for opt in parser_options:
48332 - if opt.check_action(action):
48333 - status = opt.status
48334 - func = opt.func
48335 - break
48336
48337 # need to pass the parser options dict to the modules
48338 # so they are available if needed.
48339 task_opts = options.__dict__
48340 taskmaster = TaskHandler(callback=print_results)
48341 taskmaster.run_tasks(tasks, func, status, options=task_opts)
48342 -
48343
48344 diff --git a/pym/portage/emaint/module.py b/pym/portage/emaint/module.py
48345 index 64b0c64..bf7d25f 100644
48346 --- a/pym/portage/emaint/module.py
48347 +++ b/pym/portage/emaint/module.py
48348 @@ -1,4 +1,4 @@
48349 -# Copyright 2005-2012 Gentoo Foundation
48350 +# Copyright 2005-2014 Gentoo Foundation
48351 # Distributed under the terms of the GNU General Public License v2
48352
48353
48354 @@ -37,10 +37,10 @@ class Module(object):
48355 self.valid = False
48356 try:
48357 mod_name = ".".join([self._namepath, self.name])
48358 - self._module = __import__(mod_name, [],[], ["not empty"])
48359 + self._module = __import__(mod_name, [], [], ["not empty"])
48360 self.valid = True
48361 except ImportError as e:
48362 - print("MODULE; failed import", mod_name, " error was:",e)
48363 + print("MODULE; failed import", mod_name, " error was:", e)
48364 return False
48365 self.module_spec = self._module.module_spec
48366 for submodule in self.module_spec['provides']:
48367 @@ -61,7 +61,7 @@ class Module(object):
48368 module = kid['instance']
48369 else:
48370 try:
48371 - module = __import__(kid['module_name'], [],[], ["not empty"])
48372 + module = __import__(kid['module_name'], [], [], ["not empty"])
48373 kid['instance'] = module
48374 kid['is_imported'] = True
48375 except ImportError:
48376
48377 diff --git a/pym/portage/emaint/modules/__init__.py b/pym/portage/emaint/modules/__init__.py
48378 index 35674e3..f67197d 100644
48379 --- a/pym/portage/emaint/modules/__init__.py
48380 +++ b/pym/portage/emaint/modules/__init__.py
48381 @@ -1,7 +1,5 @@
48382 # Copyright 2005-2012 Gentoo Foundation
48383 # Distributed under the terms of the GNU General Public License v2
48384
48385 -"""'The emaint program plug-in module provides an automatic method
48386 -of adding/removing modules to perform checks and maintenance
48387 -on a gentoo system.
48388 +"""Plug-in modules for system health checks and maintenance.
48389 """
48390
48391 diff --git a/pym/portage/emaint/modules/binhost/__init__.py b/pym/portage/emaint/modules/binhost/__init__.py
48392 index 1a61af4..c60e8bc 100644
48393 --- a/pym/portage/emaint/modules/binhost/__init__.py
48394 +++ b/pym/portage/emaint/modules/binhost/__init__.py
48395 @@ -1,20 +1,18 @@
48396 # Copyright 2005-2012 Gentoo Foundation
48397 # Distributed under the terms of the GNU General Public License v2
48398
48399 -"""'The emaint program module provides checks and maintenancefor:
48400 - Scanning, checking and fixing problems in the world file.
48401 +"""Scan and generate metadata indexes for binary packages.
48402 """
48403
48404
48405 module_spec = {
48406 'name': 'binhost',
48407 - 'description': "Provides functions to scan, check and " + \
48408 - "Generate a metadata index for binary packages",
48409 + 'description': __doc__,
48410 'provides':{
48411 'module1': {
48412 'name': "binhost",
48413 'class': "BinhostHandler",
48414 - 'description': "Generate a metadata index for binary packages",
48415 + 'description': __doc__,
48416 'functions': ['check', 'fix'],
48417 'func_desc': {}
48418 }
48419
48420 diff --git a/pym/portage/emaint/modules/binhost/binhost.py b/pym/portage/emaint/modules/binhost/binhost.py
48421 index b540d76..1138a8c 100644
48422 --- a/pym/portage/emaint/modules/binhost/binhost.py
48423 +++ b/pym/portage/emaint/modules/binhost/binhost.py
48424 @@ -1,4 +1,4 @@
48425 -# Copyright 2005-2012 Gentoo Foundation
48426 +# Copyright 2005-2014 Gentoo Foundation
48427 # Distributed under the terms of the GNU General Public License v2
48428
48429 import errno
48430 @@ -9,7 +9,9 @@ from portage import os
48431 from portage.util import writemsg
48432
48433 import sys
48434 +
48435 if sys.hexversion >= 0x3000000:
48436 + # pylint: disable=W0622
48437 long = int
48438
48439 class BinhostHandler(object):
48440 @@ -151,12 +153,8 @@ class BinhostHandler(object):
48441
48442 del pkgindex.packages[:]
48443 pkgindex.packages.extend(metadata.values())
48444 - from portage.util import atomic_ofstream
48445 - f = atomic_ofstream(self._pkgindex_file)
48446 - try:
48447 - self._pkgindex.write(f)
48448 - finally:
48449 - f.close()
48450 + bintree._pkgindex_write(self._pkgindex)
48451 +
48452 finally:
48453 locks.unlockfile(pkgindex_lock)
48454
48455
48456 diff --git a/pym/portage/emaint/modules/config/__init__.py b/pym/portage/emaint/modules/config/__init__.py
48457 index 22abb07..f0585b3 100644
48458 --- a/pym/portage/emaint/modules/config/__init__.py
48459 +++ b/pym/portage/emaint/modules/config/__init__.py
48460 @@ -1,20 +1,18 @@
48461 # Copyright 2005-2012 Gentoo Foundation
48462 # Distributed under the terms of the GNU General Public License v2
48463
48464 -"""'This emaint module provides checks and maintenance for:
48465 -Cleaning the emerge config tracker list
48466 +"""Check and clean the config tracker list for uninstalled packages.
48467 """
48468
48469
48470 module_spec = {
48471 'name': 'config',
48472 - 'description': "Provides functions to scan, check for and fix no " +\
48473 - "longer installed config files in emerge's tracker file",
48474 + 'description': __doc__,
48475 'provides':{
48476 'module1': {
48477 'name': "cleanconfmem",
48478 'class': "CleanConfig",
48479 - 'description': "Discard no longer installed config tracker entries",
48480 + 'description': __doc__,
48481 'functions': ['check', 'fix'],
48482 'func_desc': {}
48483 }
48484
48485 diff --git a/pym/portage/emaint/modules/config/config.py b/pym/portage/emaint/modules/config/config.py
48486 index a80d87d..dad024b 100644
48487 --- a/pym/portage/emaint/modules/config/config.py
48488 +++ b/pym/portage/emaint/modules/config/config.py
48489 @@ -4,14 +4,14 @@
48490 import portage
48491 from portage import os
48492 from portage.const import PRIVATE_PATH
48493 -from portage.checksum import perform_md5
48494 -
48495 +from portage.util import grabdict, writedict
48496
48497 class CleanConfig(object):
48498
48499 short_desc = "Discard any no longer installed configs from emerge's tracker list"
48500
48501 def __init__(self):
48502 + self._root = portage.settings["ROOT"]
48503 self.target = os.path.join(portage.settings["EROOT"], PRIVATE_PATH, 'config')
48504
48505 def name():
48506 @@ -19,70 +19,55 @@ class CleanConfig(object):
48507 name = staticmethod(name)
48508
48509 def load_configlist(self):
48510 -
48511 - configs = {}
48512 - with open(self.target, 'r') as configfile:
48513 - lines = configfile.readlines()
48514 - for line in lines:
48515 - ls = line.split()
48516 - configs[ls[0]] = ls[1]
48517 - return configs
48518 + return grabdict(self.target)
48519
48520 def check(self, **kwargs):
48521 onProgress = kwargs.get('onProgress', None)
48522 configs = self.load_configlist()
48523 messages = []
48524 - chksums = []
48525 maxval = len(configs)
48526 if onProgress:
48527 onProgress(maxval, 0)
48528 i = 0
48529 keys = sorted(configs)
48530 for config in keys:
48531 - if os.path.exists(config):
48532 - md5sumactual = perform_md5(config)
48533 - if md5sumactual != configs[config]:
48534 - chksums.append(" %s" % config)
48535 - else:
48536 + if not os.path.exists(config):
48537 messages.append(" %s" % config)
48538 if onProgress:
48539 onProgress(maxval, i+1)
48540 i += 1
48541 - return self._format_output(messages, chksums)
48542 + return self._format_output(messages)
48543
48544 def fix(self, **kwargs):
48545 onProgress = kwargs.get('onProgress', None)
48546 configs = self.load_configlist()
48547 messages = []
48548 - chksums = []
48549 maxval = len(configs)
48550 if onProgress:
48551 onProgress(maxval, 0)
48552 i = 0
48553 - keys = sorted(configs)
48554 - for config in keys:
48555 - if os.path.exists(config):
48556 - md5sumactual = perform_md5(config)
48557 - if md5sumactual != configs[config]:
48558 - chksums.append(" %s" % config)
48559 - configs.pop(config)
48560 +
48561 + root = self._root
48562 + if root == "/":
48563 + root = None
48564 + modified = False
48565 + for config in sorted(configs):
48566 + if root is None:
48567 + full_path = config
48568 else:
48569 - configs.pop(config)
48570 - messages.append(" %s" % config)
48571 + full_path = os.path.join(root, config.lstrip(os.sep))
48572 + if not os.path.exists(full_path):
48573 + modified = True
48574 + configs.pop(config)
48575 + messages.append(" %s" % config)
48576 if onProgress:
48577 onProgress(maxval, i+1)
48578 i += 1
48579 - lines = []
48580 - keys = sorted(configs)
48581 - for key in keys:
48582 - line = ' '.join([key, configs[key]])
48583 - lines.append(line)
48584 - lines.append('')
48585 - with open(self.target, 'w') as configfile:
48586 - configfile.write('\n'.join(lines))
48587 - return self._format_output(messages, chksums, True)
48588 + if modified:
48589 + writedict(configs, self.target)
48590 + return self._format_output(messages, True)
48591
48592 - def _format_output(self, messages=[], chksums=[], cleaned=False):
48593 + def _format_output(self, messages=[], cleaned=False):
48594 output = []
48595 if messages:
48596 output.append('Not Installed:')
48597 @@ -91,11 +76,4 @@ class CleanConfig(object):
48598 if cleaned:
48599 tot += ' ...Cleaned'
48600 output.append(tot % len(messages))
48601 - if chksums:
48602 - output.append('\nChecksums did not match:')
48603 - output += chksums
48604 - tot = '------------------------------------\n Total %i Checksums did not match'
48605 - if cleaned:
48606 - tot += ' ...Cleaned'
48607 - output.append(tot % len(chksums))
48608 return output
48609
48610 diff --git a/pym/portage/emaint/modules/logs/__init__.py b/pym/portage/emaint/modules/logs/__init__.py
48611 index 005b608..0407efe 100644
48612 --- a/pym/portage/emaint/modules/logs/__init__.py
48613 +++ b/pym/portage/emaint/modules/logs/__init__.py
48614 @@ -1,38 +1,34 @@
48615 -# Copyright 2005-2012 Gentoo Foundation
48616 +# Copyright 2005-2013 Gentoo Foundation
48617 # Distributed under the terms of the GNU General Public License v2
48618
48619 -"""'This emaint module provides checks and maintenance for:
48620 -Cleaning the PORT_LOGDIR logs
48621 +"""Check and clean old logs in the PORT_LOGDIR.
48622 """
48623
48624
48625 module_spec = {
48626 'name': 'logs',
48627 - 'description': "Provides functions to scan, check and clean old logs " +\
48628 - "in the PORT_LOGDIR",
48629 + 'description': __doc__,
48630 'provides':{
48631 'module1': {
48632 'name': "logs",
48633 'class': "CleanLogs",
48634 - 'description': "Clean out old logs from the PORT_LOGDIR",
48635 + 'description': __doc__,
48636 'functions': ['check','clean'],
48637 'func_desc': {
48638 'clean': {
48639 "short": "-C", "long": "--clean",
48640 "help": "Cleans out logs more than 7 days old (cleanlogs only)" + \
48641 - " modulke-options: -t, -p",
48642 + " module-options: -t, -p",
48643 'status': "Cleaning %s",
48644 - 'func': 'clean'
48645 + 'action': 'store_true',
48646 + 'func': 'clean',
48647 },
48648 'time': {
48649 "short": "-t", "long": "--time",
48650 "help": "(cleanlogs only): -t, --time Delete logs older than NUM of days",
48651 'status': "",
48652 - 'action': 'store',
48653 - 'type': 'int',
48654 + 'type': int,
48655 'dest': 'NUM',
48656 - 'callback': None,
48657 - 'callback_kwargs': None,
48658 'func': 'clean'
48659 },
48660 'pretend': {
48661 @@ -41,8 +37,6 @@ module_spec = {
48662 'status': "",
48663 'action': 'store_true',
48664 'dest': 'pretend',
48665 - 'callback': None,
48666 - 'callback_kwargs': None,
48667 'func': 'clean'
48668 }
48669 }
48670
48671 diff --git a/pym/portage/emaint/modules/logs/logs.py b/pym/portage/emaint/modules/logs/logs.py
48672 index 32c8508..fe65cf5 100644
48673 --- a/pym/portage/emaint/modules/logs/logs.py
48674 +++ b/pym/portage/emaint/modules/logs/logs.py
48675 @@ -39,11 +39,10 @@ class CleanLogs(object):
48676 options: dict:
48677 'NUM': int: number of days
48678 'pretend': boolean
48679 - 'eerror': defaults to None, optional output module to output errors.
48680 - 'einfo': defaults to None, optional output module to output info msgs.
48681 """
48682 messages = []
48683 num_of_days = None
48684 + pretend = False
48685 if kwargs:
48686 # convuluted, I know, but portage.settings does not exist in
48687 # kwargs.get() when called from _emerge.main.clean_logs()
48688 @@ -54,8 +53,6 @@ class CleanLogs(object):
48689 if options:
48690 num_of_days = options.get('NUM', None)
48691 pretend = options.get('pretend', False)
48692 - eerror = options.get('eerror', None)
48693 - einfo = options.get('einfo', None)
48694
48695 clean_cmd = settings.get("PORT_LOGDIR_CLEAN")
48696 if clean_cmd:
48697 @@ -75,7 +72,7 @@ class CleanLogs(object):
48698 if not clean_cmd:
48699 return []
48700 rval = self._clean_logs(clean_cmd, settings)
48701 - messages += self._convert_errors(rval, eerror, einfo)
48702 + messages += self._convert_errors(rval)
48703 return messages
48704
48705
48706 @@ -96,19 +93,11 @@ class CleanLogs(object):
48707
48708
48709 @staticmethod
48710 - def _convert_errors(rval, eerror=None, einfo=None):
48711 + def _convert_errors(rval):
48712 msg = []
48713 if rval != os.EX_OK:
48714 msg.append("PORT_LOGDIR_CLEAN command returned %s"
48715 % ("%d" % rval if rval else "None"))
48716 msg.append("See the make.conf(5) man page for "
48717 "PORT_LOGDIR_CLEAN usage instructions.")
48718 - if eerror:
48719 - for m in msg:
48720 - eerror(m)
48721 - else:
48722 - msg.append("PORT_LOGDIR_CLEAN command succeeded")
48723 - if einfo:
48724 - for m in msg:
48725 - einfo(m)
48726 return msg
48727
48728 diff --git a/pym/portage/emaint/modules/move/__init__.py b/pym/portage/emaint/modules/move/__init__.py
48729 index 5399440..d31d7b3 100644
48730 --- a/pym/portage/emaint/modules/move/__init__.py
48731 +++ b/pym/portage/emaint/modules/move/__init__.py
48732 @@ -1,21 +1,18 @@
48733 # Copyright 2005-2012 Gentoo Foundation
48734 # Distributed under the terms of the GNU General Public License v2
48735
48736 -"""'This emaint module provides checks and maintenance for:
48737 - 1) "Performing package move updates for installed packages",
48738 - 2)"Perform package move updates for binary packages"
48739 +"""Perform package move updates for installed and binary packages.
48740 """
48741
48742
48743 module_spec = {
48744 'name': 'move',
48745 - 'description': "Provides functions to check for and move packages " +\
48746 - "either installed or binary packages stored on this system",
48747 + 'description': __doc__,
48748 'provides':{
48749 'module1': {
48750 'name': "moveinst",
48751 'class': "MoveInstalled",
48752 - 'description': "Perform package move updates for installed packages",
48753 + 'description': __doc__,
48754 'options': ['check', 'fix'],
48755 'functions': ['check', 'fix'],
48756 'func_desc': {
48757
48758 diff --git a/pym/portage/emaint/modules/move/move.py b/pym/portage/emaint/modules/move/move.py
48759 index 018e6ca..ef674d4 100644
48760 --- a/pym/portage/emaint/modules/move/move.py
48761 +++ b/pym/portage/emaint/modules/move/move.py
48762 @@ -3,14 +3,16 @@
48763
48764 import portage
48765 from portage import os
48766 -
48767 +from portage.exception import InvalidData
48768 +from _emerge.Package import Package
48769 +from portage.versions import _pkg_str
48770
48771 class MoveHandler(object):
48772
48773 def __init__(self, tree, porttree):
48774 self._tree = tree
48775 self._portdb = porttree.dbapi
48776 - self._update_keys = ["DEPEND", "RDEPEND", "PDEPEND", "PROVIDE"]
48777 + self._update_keys = Package._dep_keys + ("PROVIDE",)
48778 self._master_repo = \
48779 self._portdb.getRepositoryName(self._portdb.porttree_root)
48780
48781 @@ -48,6 +50,8 @@ class MoveHandler(object):
48782 # progress bar is updated in indeterminate mode.
48783 match = self._tree.dbapi.match
48784 aux_get = self._tree.dbapi.aux_get
48785 + pkg_str = self._tree.dbapi._pkg_str
48786 + settings = self._tree.dbapi.settings
48787 if onProgress:
48788 onProgress(0, 0)
48789 for repo, updates in allupdates.items():
48790 @@ -65,13 +69,21 @@ class MoveHandler(object):
48791 if update_cmd[0] == "move":
48792 origcp, newcp = update_cmd[1:]
48793 for cpv in match(origcp):
48794 - if repo_match(aux_get(cpv, ["repository"])[0]):
48795 + try:
48796 + cpv = pkg_str(cpv, origcp.repo)
48797 + except (KeyError, InvalidData):
48798 + continue
48799 + if repo_match(cpv.repo):
48800 errors.append("'%s' moved to '%s'" % (cpv, newcp))
48801 elif update_cmd[0] == "slotmove":
48802 pkg, origslot, newslot = update_cmd[1:]
48803 - for cpv in match(pkg):
48804 - slot, prepo = aux_get(cpv, ["SLOT", "repository"])
48805 - if slot == origslot and repo_match(prepo):
48806 + atom = pkg.with_slot(origslot)
48807 + for cpv in match(atom):
48808 + try:
48809 + cpv = pkg_str(cpv, atom.repo)
48810 + except (KeyError, InvalidData):
48811 + continue
48812 + if repo_match(cpv.repo):
48813 errors.append("'%s' slot moved from '%s' to '%s'" % \
48814 (cpv, origslot, newslot))
48815 if onProgress:
48816 @@ -82,15 +94,21 @@ class MoveHandler(object):
48817 cpv_all = self._tree.dbapi.cpv_all()
48818 cpv_all.sort()
48819 maxval = len(cpv_all)
48820 - meta_keys = self._update_keys + ['repository', 'EAPI']
48821 + meta_keys = self._update_keys + self._portdb._pkg_str_aux_keys
48822 if onProgress:
48823 onProgress(maxval, 0)
48824 for i, cpv in enumerate(cpv_all):
48825 - metadata = dict(zip(meta_keys, aux_get(cpv, meta_keys)))
48826 - eapi = metadata.pop('EAPI')
48827 - repository = metadata.pop('repository')
48828 try:
48829 - updates = allupdates[repository]
48830 + metadata = dict(zip(meta_keys, aux_get(cpv, meta_keys)))
48831 + except KeyError:
48832 + continue
48833 + try:
48834 + pkg = _pkg_str(cpv, metadata=metadata, settings=settings)
48835 + except InvalidData:
48836 + continue
48837 + metadata = dict((k, metadata[k]) for k in self._update_keys)
48838 + try:
48839 + updates = allupdates[pkg.repo]
48840 except KeyError:
48841 try:
48842 updates = allupdates['DEFAULT']
48843 @@ -99,7 +117,7 @@ class MoveHandler(object):
48844 if not updates:
48845 continue
48846 metadata_updates = \
48847 - portage.update_dbentries(updates, metadata, eapi=eapi)
48848 + portage.update_dbentries(updates, metadata, parent=pkg)
48849 if metadata_updates:
48850 errors.append("'%s' has outdated metadata" % cpv)
48851 if onProgress:
48852
48853 diff --git a/pym/portage/emaint/modules/resume/__init__.py b/pym/portage/emaint/modules/resume/__init__.py
48854 index 60cffe9..965e8f9 100644
48855 --- a/pym/portage/emaint/modules/resume/__init__.py
48856 +++ b/pym/portage/emaint/modules/resume/__init__.py
48857 @@ -1,15 +1,13 @@
48858 # Copyright 2005-2012 Gentoo Foundation
48859 # Distributed under the terms of the GNU General Public License v2
48860
48861 -"""'This emaint module provides checks and maintenance for:
48862 -Cleaning the "emerge --resume" lists
48863 +"""Check and fix problems in the resume and/or resume_backup files.
48864 """
48865
48866
48867 module_spec = {
48868 'name': 'resume',
48869 - 'description': "Provides functions to scan, check and fix problems " +\
48870 - "in the resume and/or resume_backup files",
48871 + 'description': __doc__,
48872 'provides':{
48873 'module1': {
48874 'name': "cleanresume",
48875
48876 diff --git a/pym/portage/emaint/modules/world/__init__.py b/pym/portage/emaint/modules/world/__init__.py
48877 index 103b5c5..3f62270 100644
48878 --- a/pym/portage/emaint/modules/world/__init__.py
48879 +++ b/pym/portage/emaint/modules/world/__init__.py
48880 @@ -1,20 +1,18 @@
48881 # Copyright 2005-2012 Gentoo Foundation
48882 # Distributed under the terms of the GNU General Public License v2
48883
48884 -"""'This emaint module provides checks and maintenance for:
48885 -Fixing problems with the "world" file.
48886 +"""Check and fix problems in the world file.
48887 """
48888
48889
48890 module_spec = {
48891 'name': 'world',
48892 - 'description': "Provides functions to scan, " +
48893 - "check and fix problems in the world file",
48894 + 'description': __doc__,
48895 'provides':{
48896 'module1':{
48897 'name': "world",
48898 'class': "WorldHandler",
48899 - 'description': "Fix problems in the world file",
48900 + 'description': __doc__,
48901 'functions': ['check', 'fix'],
48902 'func_desc': {}
48903 }
48904
48905 diff --git a/pym/portage/env/loaders.py b/pym/portage/env/loaders.py
48906 index 372bc12..f869884 100644
48907 --- a/pym/portage/env/loaders.py
48908 +++ b/pym/portage/env/loaders.py
48909 @@ -1,10 +1,14 @@
48910 # config.py -- Portage Config
48911 -# Copyright 2007-2011 Gentoo Foundation
48912 +# Copyright 2007-2013 Gentoo Foundation
48913 # Distributed under the terms of the GNU General Public License v2
48914
48915 import errno
48916 import io
48917 import stat
48918 +import portage
48919 +portage.proxy.lazyimport.lazyimport(globals(),
48920 + 'portage.util:writemsg',
48921 +)
48922 from portage import os
48923 from portage import _encodings
48924 from portage import _unicode_decode
48925 @@ -149,17 +153,21 @@ class FileLoader(DataLoader):
48926 func = self.lineParser
48927 for fn in RecursiveFileLoader(self.fname):
48928 try:
48929 - f = io.open(_unicode_encode(fn,
48930 + with io.open(_unicode_encode(fn,
48931 encoding=_encodings['fs'], errors='strict'), mode='r',
48932 - encoding=_encodings['content'], errors='replace')
48933 + encoding=_encodings['content'], errors='replace') as f:
48934 + lines = f.readlines()
48935 except EnvironmentError as e:
48936 - if e.errno not in (errno.ENOENT, errno.ESTALE):
48937 + if e.errno == errno.EACCES:
48938 + writemsg(_("Permission denied: '%s'\n") % fn, noiselevel=-1)
48939 + del e
48940 + elif e.errno in (errno.ENOENT, errno.ESTALE):
48941 + del e
48942 + else:
48943 raise
48944 - del e
48945 - continue
48946 - for line_num, line in enumerate(f):
48947 - func(line, line_num, data, errors)
48948 - f.close()
48949 + else:
48950 + for line_num, line in enumerate(lines):
48951 + func(line, line_num, data, errors)
48952 return (data, errors)
48953
48954 def lineParser(self, line, line_num, data, errors):
48955
48956 diff --git a/pym/portage/exception.py b/pym/portage/exception.py
48957 index 5ccd750..6fa5447 100644
48958 --- a/pym/portage/exception.py
48959 +++ b/pym/portage/exception.py
48960 @@ -1,4 +1,4 @@
48961 -# Copyright 1998-2011 Gentoo Foundation
48962 +# Copyright 1998-2014 Gentoo Foundation
48963 # Distributed under the terms of the GNU General Public License v2
48964
48965 import signal
48966 @@ -7,30 +7,40 @@ from portage import _encodings, _unicode_encode, _unicode_decode
48967 from portage.localization import _
48968
48969 if sys.hexversion >= 0x3000000:
48970 + # pylint: disable=W0622
48971 basestring = str
48972
48973 class PortageException(Exception):
48974 """General superclass for portage exceptions"""
48975 - def __init__(self,value):
48976 - self.value = value[:]
48977 - if isinstance(self.value, basestring):
48978 - self.value = _unicode_decode(self.value,
48979 - encoding=_encodings['content'], errors='replace')
48980 + if sys.hexversion >= 0x3000000:
48981 + def __init__(self, value):
48982 + self.value = value[:]
48983
48984 - def __str__(self):
48985 - if isinstance(self.value, basestring):
48986 - return self.value
48987 - else:
48988 - return _unicode_decode(repr(self.value),
48989 - encoding=_encodings['content'], errors='replace')
48990 -
48991 - if sys.hexversion < 0x3000000:
48992 -
48993 - __unicode__ = __str__
48994 + def __str__(self):
48995 + if isinstance(self.value, str):
48996 + return self.value
48997 + else:
48998 + return repr(self.value)
48999 + else:
49000 + def __init__(self, value):
49001 + self.value = value[:]
49002 + if isinstance(self.value, basestring):
49003 + self.value = _unicode_decode(self.value,
49004 + encoding=_encodings['content'], errors='replace')
49005 +
49006 + def __unicode__(self):
49007 + if isinstance(self.value, unicode):
49008 + return self.value
49009 + else:
49010 + return _unicode_decode(repr(self.value),
49011 + encoding=_encodings['content'], errors='replace')
49012
49013 def __str__(self):
49014 - return _unicode_encode(self.__unicode__(),
49015 - encoding=_encodings['content'], errors='backslashreplace')
49016 + if isinstance(self.value, unicode):
49017 + return _unicode_encode(self.value,
49018 + encoding=_encodings['content'], errors='backslashreplace')
49019 + else:
49020 + return repr(self.value)
49021
49022 class CorruptionError(PortageException):
49023 """Corruption indication"""
49024 @@ -75,20 +85,20 @@ class DirectoryNotFound(InvalidLocation):
49025 """A directory was not found when it was expected to exist"""
49026
49027 class OperationNotPermitted(PortageException):
49028 - from errno import EPERM as errno
49029 """An operation was not permitted operating system"""
49030 + from errno import EPERM as errno
49031
49032 class OperationNotSupported(PortageException):
49033 - from errno import EOPNOTSUPP as errno
49034 """Operation not supported"""
49035 + from errno import EOPNOTSUPP as errno
49036
49037 class PermissionDenied(PortageException):
49038 - from errno import EACCES as errno
49039 """Permission denied"""
49040 + from errno import EACCES as errno
49041
49042 class TryAgain(PortageException):
49043 - from errno import EAGAIN as errno
49044 """Try again"""
49045 + from errno import EAGAIN as errno
49046
49047 class TimeoutException(PortageException):
49048 """Operation timed out"""
49049
49050 diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py
49051 index 212f788..14dc149 100644
49052 --- a/pym/portage/getbinpkg.py
49053 +++ b/pym/portage/getbinpkg.py
49054 @@ -1,7 +1,9 @@
49055 # getbinpkg.py -- Portage binary-package helper functions
49056 -# Copyright 2003-2012 Gentoo Foundation
49057 +# Copyright 2003-2014 Gentoo Foundation
49058 # Distributed under the terms of the GNU General Public License v2
49059
49060 +from __future__ import unicode_literals
49061 +
49062 from portage.output import colorize
49063 from portage.cache.mappings import slot_dict_class
49064 from portage.localization import _
49065 @@ -18,6 +20,7 @@ import socket
49066 import time
49067 import tempfile
49068 import base64
49069 +import warnings
49070
49071 _all_errors = [NotImplementedError, ValueError, socket.error]
49072
49073 @@ -39,7 +42,7 @@ except ImportError:
49074 try:
49075 import ftplib
49076 except ImportError as e:
49077 - sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT FTPLIB: ")+str(e)+"\n")
49078 + sys.stderr.write(colorize("BAD", "!!! CANNOT IMPORT FTPLIB: ") + str(e) + "\n")
49079 else:
49080 _all_errors.extend(ftplib.all_errors)
49081
49082 @@ -55,24 +58,28 @@ try:
49083 from httplib import ResponseNotReady as http_client_ResponseNotReady
49084 from httplib import error as http_client_error
49085 except ImportError as e:
49086 - sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT HTTP.CLIENT: ")+str(e)+"\n")
49087 + sys.stderr.write(colorize("BAD", "!!! CANNOT IMPORT HTTP.CLIENT: ") + str(e) + "\n")
49088 else:
49089 _all_errors.append(http_client_error)
49090
49091 _all_errors = tuple(_all_errors)
49092
49093 if sys.hexversion >= 0x3000000:
49094 + # pylint: disable=W0622
49095 long = int
49096
49097 def make_metadata_dict(data):
49098 - myid,myglob = data
49099 +
49100 + warnings.warn("portage.getbinpkg.make_metadata_dict() is deprecated",
49101 + DeprecationWarning, stacklevel=2)
49102 +
49103 + myid, _myglob = data
49104
49105 mydict = {}
49106 for k_bytes in portage.xpak.getindex_mem(myid):
49107 k = _unicode_decode(k_bytes,
49108 encoding=_encodings['repo.content'], errors='replace')
49109 - if k not in _all_metadata_keys and \
49110 - k != "CATEGORY":
49111 + if k not in _all_metadata_keys and k != "CATEGORY":
49112 continue
49113 v = _unicode_decode(portage.xpak.getitem(data, k_bytes),
49114 encoding=_encodings['repo.content'], errors='replace')
49115 @@ -84,13 +91,17 @@ class ParseLinks(html_parser_HTMLParser):
49116 """Parser class that overrides HTMLParser to grab all anchors from an html
49117 page and provide suffix and prefix limitors"""
49118 def __init__(self):
49119 +
49120 + warnings.warn("portage.getbinpkg.ParseLinks is deprecated",
49121 + DeprecationWarning, stacklevel=2)
49122 +
49123 self.PL_anchors = []
49124 html_parser_HTMLParser.__init__(self)
49125
49126 def get_anchors(self):
49127 return self.PL_anchors
49128
49129 - def get_anchors_by_prefix(self,prefix):
49130 + def get_anchors_by_prefix(self, prefix):
49131 newlist = []
49132 for x in self.PL_anchors:
49133 if x.startswith(prefix):
49134 @@ -98,7 +109,7 @@ class ParseLinks(html_parser_HTMLParser):
49135 newlist.append(x[:])
49136 return newlist
49137
49138 - def get_anchors_by_suffix(self,suffix):
49139 + def get_anchors_by_suffix(self, suffix):
49140 newlist = []
49141 for x in self.PL_anchors:
49142 if x.endswith(suffix):
49143 @@ -106,10 +117,10 @@ class ParseLinks(html_parser_HTMLParser):
49144 newlist.append(x[:])
49145 return newlist
49146
49147 - def handle_endtag(self,tag):
49148 + def handle_endtag(self, tag):
49149 pass
49150
49151 - def handle_starttag(self,tag,attrs):
49152 + def handle_starttag(self, tag, attrs):
49153 if tag == "a":
49154 for x in attrs:
49155 if x[0] == 'href':
49156 @@ -117,16 +128,19 @@ class ParseLinks(html_parser_HTMLParser):
49157 self.PL_anchors.append(urllib_parse_unquote(x[1]))
49158
49159
49160 -def create_conn(baseurl,conn=None):
49161 - """(baseurl,conn) --- Takes a protocol://site:port/address url, and an
49162 +def create_conn(baseurl, conn=None):
49163 + """Takes a protocol://site:port/address url, and an
49164 optional connection. If connection is already active, it is passed on.
49165 baseurl is reduced to address and is returned in tuple (conn,address)"""
49166
49167 - parts = baseurl.split("://",1)
49168 + warnings.warn("portage.getbinpkg.create_conn() is deprecated",
49169 + DeprecationWarning, stacklevel=2)
49170 +
49171 + parts = baseurl.split("://", 1)
49172 if len(parts) != 2:
49173 raise ValueError(_("Provided URI does not "
49174 "contain protocol identifier. '%s'") % baseurl)
49175 - protocol,url_parts = parts
49176 + protocol, url_parts = parts
49177 del parts
49178
49179 url_parts = url_parts.split("/")
49180 @@ -137,7 +151,7 @@ def create_conn(baseurl,conn=None):
49181 address = "/"+"/".join(url_parts[1:])
49182 del url_parts
49183
49184 - userpass_host = host.split("@",1)
49185 + userpass_host = host.split("@", 1)
49186 if len(userpass_host) == 1:
49187 host = userpass_host[0]
49188 userpass = ["anonymous"]
49189 @@ -196,10 +210,10 @@ def create_conn(baseurl,conn=None):
49190 host = host[:-1]
49191 conn = ftplib.FTP(host)
49192 if password:
49193 - conn.login(username,password)
49194 + conn.login(username, password)
49195 else:
49196 sys.stderr.write(colorize("WARN",
49197 - _(" * No password provided for username"))+" '%s'" % \
49198 + _(" * No password provided for username")) + " '%s'" % \
49199 (username,) + "\n\n")
49200 conn.login(username)
49201 conn.set_pasv(passive)
49202 @@ -216,11 +230,15 @@ def create_conn(baseurl,conn=None):
49203 else:
49204 raise NotImplementedError(_("%s is not a supported protocol.") % protocol)
49205
49206 - return (conn,protocol,address, http_params, http_headers)
49207 + return (conn, protocol, address, http_params, http_headers)
49208
49209 def make_ftp_request(conn, address, rest=None, dest=None):
49210 - """(conn,address,rest) --- uses the conn object to request the data
49211 + """Uses the |conn| object to request the data
49212 from address and issuing a rest if it is passed."""
49213 +
49214 + warnings.warn("portage.getbinpkg.make_ftp_request() is deprecated",
49215 + DeprecationWarning, stacklevel=2)
49216 +
49217 try:
49218
49219 if dest:
49220 @@ -235,9 +253,9 @@ def make_ftp_request(conn, address, rest=None, dest=None):
49221 rest = 0
49222
49223 if rest != None:
49224 - mysocket = conn.transfercmd("RETR "+str(address), rest)
49225 + mysocket = conn.transfercmd("RETR %s" % str(address), rest)
49226 else:
49227 - mysocket = conn.transfercmd("RETR "+str(address))
49228 + mysocket = conn.transfercmd("RETR %s" % str(address))
49229
49230 mydata = ""
49231 while 1:
49232 @@ -259,28 +277,31 @@ def make_ftp_request(conn, address, rest=None, dest=None):
49233 conn.voidresp()
49234 conn.voidcmd("TYPE A")
49235
49236 - return mydata,not (fsize==data_size),""
49237 + return mydata, (fsize != data_size), ""
49238
49239 except ValueError as e:
49240 - return None,int(str(e)[:4]),str(e)
49241 + return None, int(str(e)[:4]), str(e)
49242
49243
49244 -def make_http_request(conn, address, params={}, headers={}, dest=None):
49245 - """(conn,address,params,headers) --- uses the conn object to request
49246 +def make_http_request(conn, address, _params={}, headers={}, dest=None):
49247 + """Uses the |conn| object to request
49248 the data from address, performing Location forwarding and using the
49249 optional params and headers."""
49250
49251 + warnings.warn("portage.getbinpkg.make_http_request() is deprecated",
49252 + DeprecationWarning, stacklevel=2)
49253 +
49254 rc = 0
49255 response = None
49256 while (rc == 0) or (rc == 301) or (rc == 302):
49257 try:
49258 - if (rc != 0):
49259 - conn,ignore,ignore,ignore,ignore = create_conn(address)
49260 + if rc != 0:
49261 + conn = create_conn(address)[0]
49262 conn.request("GET", address, body=None, headers=headers)
49263 except SystemExit as e:
49264 raise
49265 except Exception as e:
49266 - return None,None,"Server request failed: "+str(e)
49267 + return None, None, "Server request failed: %s" % str(e)
49268 response = conn.getresponse()
49269 rc = response.status
49270
49271 @@ -289,7 +310,7 @@ def make_http_request(conn, address, params={}, headers={}, dest=None):
49272 ignored_data = response.read()
49273 del ignored_data
49274 for x in str(response.msg).split("\n"):
49275 - parts = x.split(": ",1)
49276 + parts = x.split(": ", 1)
49277 if parts[0] == "Location":
49278 if (rc == 301):
49279 sys.stderr.write(colorize("BAD",
49280 @@ -302,16 +323,20 @@ def make_http_request(conn, address, params={}, headers={}, dest=None):
49281 break
49282
49283 if (rc != 200) and (rc != 206):
49284 - return None,rc,"Server did not respond successfully ("+str(response.status)+": "+str(response.reason)+")"
49285 + return None, rc, "Server did not respond successfully (%s: %s)" % (str(response.status), str(response.reason))
49286
49287 if dest:
49288 dest.write(response.read())
49289 - return "",0,""
49290 + return "", 0, ""
49291
49292 - return response.read(),0,""
49293 + return response.read(), 0, ""
49294
49295
49296 def match_in_array(array, prefix="", suffix="", match_both=1, allow_overlap=0):
49297 +
49298 + warnings.warn("portage.getbinpkg.match_in_array() is deprecated",
49299 + DeprecationWarning, stacklevel=2)
49300 +
49301 myarray = []
49302
49303 if not (prefix and suffix):
49304 @@ -344,20 +369,22 @@ def match_in_array(array, prefix="", suffix="", match_both=1, allow_overlap=0):
49305 continue # Doesn't match.
49306
49307 return myarray
49308 -
49309
49310
49311 -def dir_get_list(baseurl,conn=None):
49312 - """(baseurl[,connection]) -- Takes a base url to connect to and read from.
49313 +def dir_get_list(baseurl, conn=None):
49314 + """Takes a base url to connect to and read from.
49315 URI should be in the form <proto>://<site>[:port]<path>
49316 Connection is used for persistent connection instances."""
49317
49318 + warnings.warn("portage.getbinpkg.dir_get_list() is deprecated",
49319 + DeprecationWarning, stacklevel=2)
49320 +
49321 if not conn:
49322 keepconnection = 0
49323 else:
49324 keepconnection = 1
49325
49326 - conn,protocol,address,params,headers = create_conn(baseurl, conn)
49327 + conn, protocol, address, params, headers = create_conn(baseurl, conn)
49328
49329 listing = None
49330 if protocol in ["http","https"]:
49331 @@ -365,7 +392,7 @@ def dir_get_list(baseurl,conn=None):
49332 # http servers can return a 400 error here
49333 # if the address doesn't end with a slash.
49334 address += "/"
49335 - page,rc,msg = make_http_request(conn,address,params,headers)
49336 + page, rc, msg = make_http_request(conn, address, params, headers)
49337
49338 if page:
49339 parser = ParseLinks()
49340 @@ -395,23 +422,26 @@ def dir_get_list(baseurl,conn=None):
49341
49342 return listing
49343
49344 -def file_get_metadata(baseurl,conn=None, chunk_size=3000):
49345 - """(baseurl[,connection]) -- Takes a base url to connect to and read from.
49346 +def file_get_metadata(baseurl, conn=None, chunk_size=3000):
49347 + """Takes a base url to connect to and read from.
49348 URI should be in the form <proto>://<site>[:port]<path>
49349 Connection is used for persistent connection instances."""
49350
49351 + warnings.warn("portage.getbinpkg.file_get_metadata() is deprecated",
49352 + DeprecationWarning, stacklevel=2)
49353 +
49354 if not conn:
49355 keepconnection = 0
49356 else:
49357 keepconnection = 1
49358
49359 - conn,protocol,address,params,headers = create_conn(baseurl, conn)
49360 + conn, protocol, address, params, headers = create_conn(baseurl, conn)
49361
49362 if protocol in ["http","https"]:
49363 - headers["Range"] = "bytes=-"+str(chunk_size)
49364 - data,rc,msg = make_http_request(conn, address, params, headers)
49365 + headers["Range"] = "bytes=-%s" % str(chunk_size)
49366 + data, _x, _x = make_http_request(conn, address, params, headers)
49367 elif protocol in ["ftp"]:
49368 - data,rc,msg = make_ftp_request(conn, address, -chunk_size)
49369 + data, _x, _x = make_ftp_request(conn, address, -chunk_size)
49370 elif protocol == "sftp":
49371 f = conn.open(address)
49372 try:
49373 @@ -424,21 +454,21 @@ def file_get_metadata(baseurl,conn=None, chunk_size=3000):
49374
49375 if data:
49376 xpaksize = portage.xpak.decodeint(data[-8:-4])
49377 - if (xpaksize+8) > chunk_size:
49378 - myid = file_get_metadata(baseurl, conn, (xpaksize+8))
49379 + if (xpaksize + 8) > chunk_size:
49380 + myid = file_get_metadata(baseurl, conn, xpaksize + 8)
49381 if not keepconnection:
49382 conn.close()
49383 return myid
49384 else:
49385 - xpak_data = data[len(data)-(xpaksize+8):-8]
49386 + xpak_data = data[len(data) - (xpaksize + 8):-8]
49387 del data
49388
49389 myid = portage.xpak.xsplit_mem(xpak_data)
49390 if not myid:
49391 - myid = None,None
49392 + myid = None, None
49393 del xpak_data
49394 else:
49395 - myid = None,None
49396 + myid = None, None
49397
49398 if not keepconnection:
49399 conn.close()
49400 @@ -446,53 +476,79 @@ def file_get_metadata(baseurl,conn=None, chunk_size=3000):
49401 return myid
49402
49403
49404 -def file_get(baseurl,dest,conn=None,fcmd=None,filename=None):
49405 - """(baseurl,dest,fcmd=) -- Takes a base url to connect to and read from.
49406 +def file_get(baseurl=None, dest=None, conn=None, fcmd=None, filename=None,
49407 + fcmd_vars=None):
49408 + """Takes a base url to connect to and read from.
49409 URI should be in the form <proto>://[user[:pass]@]<site>[:port]<path>"""
49410
49411 if not fcmd:
49412 - return file_get_lib(baseurl,dest,conn)
49413 - if not filename:
49414 - filename = os.path.basename(baseurl)
49415 -
49416 - variables = {
49417 - "DISTDIR": dest,
49418 - "URI": baseurl,
49419 - "FILE": filename
49420 - }
49421 +
49422 + warnings.warn("Use of portage.getbinpkg.file_get() without the fcmd "
49423 + "parameter is deprecated", DeprecationWarning, stacklevel=2)
49424 +
49425 + return file_get_lib(baseurl, dest, conn)
49426 +
49427 + variables = {}
49428 +
49429 + if fcmd_vars is not None:
49430 + variables.update(fcmd_vars)
49431 +
49432 + if "DISTDIR" not in variables:
49433 + if dest is None:
49434 + raise portage.exception.MissingParameter(
49435 + _("%s is missing required '%s' key") %
49436 + ("fcmd_vars", "DISTDIR"))
49437 + variables["DISTDIR"] = dest
49438 +
49439 + if "URI" not in variables:
49440 + if baseurl is None:
49441 + raise portage.exception.MissingParameter(
49442 + _("%s is missing required '%s' key") %
49443 + ("fcmd_vars", "URI"))
49444 + variables["URI"] = baseurl
49445 +
49446 + if "FILE" not in variables:
49447 + if filename is None:
49448 + filename = os.path.basename(variables["URI"])
49449 + variables["FILE"] = filename
49450
49451 from portage.util import varexpand
49452 from portage.process import spawn
49453 myfetch = portage.util.shlex_split(fcmd)
49454 myfetch = [varexpand(x, mydict=variables) for x in myfetch]
49455 - fd_pipes= {
49456 - 0:sys.stdin.fileno(),
49457 - 1:sys.stdout.fileno(),
49458 - 2:sys.stdout.fileno()
49459 + fd_pipes = {
49460 + 0: portage._get_stdin().fileno(),
49461 + 1: sys.__stdout__.fileno(),
49462 + 2: sys.__stdout__.fileno()
49463 }
49464 + sys.__stdout__.flush()
49465 + sys.__stderr__.flush()
49466 retval = spawn(myfetch, env=os.environ.copy(), fd_pipes=fd_pipes)
49467 if retval != os.EX_OK:
49468 sys.stderr.write(_("Fetcher exited with a failure condition.\n"))
49469 return 0
49470 return 1
49471
49472 -def file_get_lib(baseurl,dest,conn=None):
49473 - """(baseurl[,connection]) -- Takes a base url to connect to and read from.
49474 +def file_get_lib(baseurl, dest, conn=None):
49475 + """Takes a base url to connect to and read from.
49476 URI should be in the form <proto>://<site>[:port]<path>
49477 Connection is used for persistent connection instances."""
49478
49479 + warnings.warn("portage.getbinpkg.file_get_lib() is deprecated",
49480 + DeprecationWarning, stacklevel=2)
49481 +
49482 if not conn:
49483 keepconnection = 0
49484 else:
49485 keepconnection = 1
49486
49487 - conn,protocol,address,params,headers = create_conn(baseurl, conn)
49488 + conn, protocol, address, params, headers = create_conn(baseurl, conn)
49489
49490 - sys.stderr.write("Fetching '"+str(os.path.basename(address)+"'\n"))
49491 - if protocol in ["http","https"]:
49492 - data,rc,msg = make_http_request(conn, address, params, headers, dest=dest)
49493 + sys.stderr.write("Fetching '" + str(os.path.basename(address)) + "'\n")
49494 + if protocol in ["http", "https"]:
49495 + data, rc, _msg = make_http_request(conn, address, params, headers, dest=dest)
49496 elif protocol in ["ftp"]:
49497 - data,rc,msg = make_ftp_request(conn, address, dest=dest)
49498 + data, rc, _msg = make_ftp_request(conn, address, dest=dest)
49499 elif protocol == "sftp":
49500 rc = 0
49501 try:
49502 @@ -522,8 +578,10 @@ def file_get_lib(baseurl,dest,conn=None):
49503
49504
49505 def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=1, makepickle=None):
49506 - """(baseurl,conn,chunk_size,verbose) --
49507 - """
49508 +
49509 + warnings.warn("portage.getbinpkg.dir_get_metadata() is deprecated",
49510 + DeprecationWarning, stacklevel=2)
49511 +
49512 if not conn:
49513 keepconnection = 0
49514 else:
49515 @@ -536,7 +594,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49516 makepickle = "/var/cache/edb/metadata.idx.most_recent"
49517
49518 try:
49519 - conn, protocol, address, params, headers = create_conn(baseurl, conn)
49520 + conn = create_conn(baseurl, conn)[0]
49521 except _all_errors as e:
49522 # ftplib.FTP(host) can raise errors like this:
49523 # socket.error: (111, 'Connection refused')
49524 @@ -557,18 +615,20 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49525 out.write(_("Loaded metadata pickle.\n"))
49526 out.flush()
49527 metadatafile.close()
49528 - except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError):
49529 + except (SystemExit, KeyboardInterrupt):
49530 + raise
49531 + except Exception:
49532 metadata = {}
49533 if baseurl not in metadata:
49534 - metadata[baseurl]={}
49535 + metadata[baseurl] = {}
49536 if "indexname" not in metadata[baseurl]:
49537 - metadata[baseurl]["indexname"]=""
49538 + metadata[baseurl]["indexname"] = ""
49539 if "timestamp" not in metadata[baseurl]:
49540 - metadata[baseurl]["timestamp"]=0
49541 + metadata[baseurl]["timestamp"] = 0
49542 if "unmodified" not in metadata[baseurl]:
49543 - metadata[baseurl]["unmodified"]=0
49544 + metadata[baseurl]["unmodified"] = 0
49545 if "data" not in metadata[baseurl]:
49546 - metadata[baseurl]["data"]={}
49547 + metadata[baseurl]["data"] = {}
49548
49549 if not os.access(cache_path, os.W_OK):
49550 sys.stderr.write(_("!!! Unable to write binary metadata to disk!\n"))
49551 @@ -594,36 +654,36 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49552 for mfile in metalist:
49553 if usingcache and \
49554 ((metadata[baseurl]["indexname"] != mfile) or \
49555 - (metadata[baseurl]["timestamp"] < int(time.time()-(60*60*24)))):
49556 + (metadata[baseurl]["timestamp"] < int(time.time() - (60 * 60 * 24)))):
49557 # Try to download new cache until we succeed on one.
49558 - data=""
49559 - for trynum in [1,2,3]:
49560 + data = ""
49561 + for trynum in [1, 2, 3]:
49562 mytempfile = tempfile.TemporaryFile()
49563 try:
49564 - file_get(baseurl+"/"+mfile, mytempfile, conn)
49565 + file_get(baseurl + "/" + mfile, mytempfile, conn)
49566 if mytempfile.tell() > len(data):
49567 mytempfile.seek(0)
49568 data = mytempfile.read()
49569 except ValueError as e:
49570 - sys.stderr.write("--- "+str(e)+"\n")
49571 + sys.stderr.write("--- %s\n" % str(e))
49572 if trynum < 3:
49573 sys.stderr.write(_("Retrying...\n"))
49574 sys.stderr.flush()
49575 mytempfile.close()
49576 continue
49577 - if match_in_array([mfile],suffix=".gz"):
49578 + if match_in_array([mfile], suffix=".gz"):
49579 out.write("gzip'd\n")
49580 out.flush()
49581 try:
49582 import gzip
49583 mytempfile.seek(0)
49584 - gzindex = gzip.GzipFile(mfile[:-3],'rb',9,mytempfile)
49585 + gzindex = gzip.GzipFile(mfile[:-3], 'rb', 9, mytempfile)
49586 data = gzindex.read()
49587 except SystemExit as e:
49588 raise
49589 except Exception as e:
49590 mytempfile.close()
49591 - sys.stderr.write(_("!!! Failed to use gzip: ")+str(e)+"\n")
49592 + sys.stderr.write(_("!!! Failed to use gzip: ") + str(e) + "\n")
49593 sys.stderr.flush()
49594 mytempfile.close()
49595 try:
49596 @@ -638,8 +698,8 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49597 except SystemExit as e:
49598 raise
49599 except Exception as e:
49600 - sys.stderr.write(_("!!! Failed to read data from index: ")+str(mfile)+"\n")
49601 - sys.stderr.write("!!! "+str(e)+"\n")
49602 + sys.stderr.write(_("!!! Failed to read data from index: ") + str(mfile) + "\n")
49603 + sys.stderr.write("!!! %s" % str(e))
49604 sys.stderr.flush()
49605 try:
49606 metadatafile = open(_unicode_encode(metadatafilename,
49607 @@ -650,7 +710,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49608 raise
49609 except Exception as e:
49610 sys.stderr.write(_("!!! Failed to write binary metadata to disk!\n"))
49611 - sys.stderr.write("!!! "+str(e)+"\n")
49612 + sys.stderr.write("!!! %s\n" % str(e))
49613 sys.stderr.flush()
49614 break
49615 # We may have metadata... now we run through the tbz2 list and check.
49616 @@ -670,8 +730,8 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49617 self.display()
49618 def display(self):
49619 self.out.write("\r"+colorize("WARN",
49620 - _("cache miss: '")+str(self.misses)+"'") + \
49621 - " --- "+colorize("GOOD", _("cache hit: '")+str(self.hits)+"'"))
49622 + _("cache miss: '") + str(self.misses) + "'") + \
49623 + " --- " + colorize("GOOD", _("cache hit: '") + str(self.hits) + "'"))
49624 self.out.flush()
49625
49626 cache_stats = CacheStats(out)
49627 @@ -688,7 +748,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49628 cache_stats.update()
49629 metadata[baseurl]["modified"] = 1
49630 myid = None
49631 - for retry in range(3):
49632 + for _x in range(3):
49633 try:
49634 myid = file_get_metadata(
49635 "/".join((baseurl.rstrip("/"), x.lstrip("/"))),
49636 @@ -699,22 +759,20 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=
49637 # make_http_request(). The docstring for this error in
49638 # httplib.py says "Presumably, the server closed the
49639 # connection before sending a valid response".
49640 - conn, protocol, address, params, headers = create_conn(
49641 - baseurl)
49642 + conn = create_conn(baseurl)[0]
49643 except http_client_ResponseNotReady:
49644 # With some http servers this error is known to be thrown
49645 # from conn.getresponse() in make_http_request() when the
49646 # remote file does not have appropriate read permissions.
49647 # Maybe it's possible to recover from this exception in
49648 # cases though, so retry.
49649 - conn, protocol, address, params, headers = create_conn(
49650 - baseurl)
49651 + conn = create_conn(baseurl)[0]
49652
49653 if myid and myid[0]:
49654 metadata[baseurl]["data"][x] = make_metadata_dict(myid)
49655 elif verbose:
49656 sys.stderr.write(colorize("BAD",
49657 - _("!!! Failed to retrieve metadata on: "))+str(x)+"\n")
49658 + _("!!! Failed to retrieve metadata on: ")) + str(x) + "\n")
49659 sys.stderr.flush()
49660 else:
49661 cache_stats.hits += 1
49662 @@ -861,7 +919,6 @@ class PackageIndex(object):
49663 for metadata in sorted(self.packages,
49664 key=portage.util.cmp_sort_key(_cmp_cpv)):
49665 metadata = metadata.copy()
49666 - cpv = metadata["CPV"]
49667 if self._inherited_keys:
49668 for k in self._inherited_keys:
49669 v = self.header.get(k)
49670
49671 diff --git a/pym/portage/glsa.py b/pym/portage/glsa.py
49672 index 1857695..834572a 100644
49673 --- a/pym/portage/glsa.py
49674 +++ b/pym/portage/glsa.py
49675 @@ -1,7 +1,7 @@
49676 -# Copyright 2003-2012 Gentoo Foundation
49677 +# Copyright 2003-2014 Gentoo Foundation
49678 # Distributed under the terms of the GNU General Public License v2
49679
49680 -from __future__ import absolute_import
49681 +from __future__ import absolute_import, unicode_literals
49682
49683 import io
49684 import sys
49685 @@ -9,23 +9,27 @@ try:
49686 from urllib.request import urlopen as urllib_request_urlopen
49687 except ImportError:
49688 from urllib import urlopen as urllib_request_urlopen
49689 +import codecs
49690 import re
49691 +import operator
49692 import xml.dom.minidom
49693 +from io import StringIO
49694 +from functools import reduce
49695
49696 import portage
49697 from portage import os
49698 from portage import _encodings
49699 from portage import _unicode_decode
49700 from portage import _unicode_encode
49701 -from portage.versions import pkgsplit, vercmp, best
49702 +from portage.versions import pkgsplit, vercmp
49703 from portage.util import grabfile
49704 -from portage.const import CACHE_PATH
49705 +from portage.const import PRIVATE_PATH
49706 from portage.localization import _
49707 from portage.dep import _slot_separator
49708
49709 # Note: the space for rgt and rlt is important !!
49710 # FIXME: use slot deps instead, requires GLSA format versioning
49711 -opMapping = {"le": "<=", "lt": "<", "eq": "=", "gt": ">", "ge": ">=",
49712 +opMapping = {"le": "<=", "lt": "<", "eq": "=", "gt": ">", "ge": ">=",
49713 "rge": ">=~", "rle": "<=~", "rgt": " >~", "rlt": " <~"}
49714 NEWLINE_ESCAPE = "!;\\n" # some random string to mark newlines that should be preserved
49715 SPACE_ESCAPE = "!;_" # some random string to mark spaces that should be preserved
49716 @@ -39,22 +43,22 @@ def get_applied_glsas(settings):
49717 @rtype: list
49718 @return: list of glsa IDs
49719 """
49720 - return grabfile(os.path.join(settings["EROOT"], CACHE_PATH, "glsa"))
49721 + return grabfile(os.path.join(settings["EROOT"], PRIVATE_PATH, "glsa_injected"))
49722
49723
49724 # TODO: use the textwrap module instead
49725 def wrap(text, width, caption=""):
49726 """
49727 Wraps the given text at column I{width}, optionally indenting
49728 - it so that no text is under I{caption}. It's possible to encode
49729 + it so that no text is under I{caption}. It's possible to encode
49730 hard linebreaks in I{text} with L{NEWLINE_ESCAPE}.
49731 -
49732 +
49733 @type text: String
49734 @param text: the text to be wrapped
49735 @type width: Integer
49736 @param width: the column at which the text should be wrapped
49737 @type caption: String
49738 - @param caption: this string is inserted at the beginning of the
49739 + @param caption: this string is inserted at the beginning of the
49740 return value and the paragraph is indented up to
49741 C{len(caption)}.
49742 @rtype: String
49743 @@ -65,7 +69,7 @@ def wrap(text, width, caption=""):
49744 text = text.replace(2*NEWLINE_ESCAPE, NEWLINE_ESCAPE+" "+NEWLINE_ESCAPE)
49745 words = text.split()
49746 indentLevel = len(caption)+1
49747 -
49748 +
49749 for w in words:
49750 if line != "" and line[-1] == "\n":
49751 rValue += line
49752 @@ -94,10 +98,10 @@ def get_glsa_list(myconfig):
49753 Returns a list of all available GLSAs in the given repository
49754 by comparing the filelist there with the pattern described in
49755 the config.
49756 -
49757 +
49758 @type myconfig: portage.config
49759 @param myconfig: Portage settings instance
49760 -
49761 +
49762 @rtype: List of Strings
49763 @return: a list of GLSA IDs in this repository
49764 """
49765 @@ -113,10 +117,10 @@ def get_glsa_list(myconfig):
49766 dirlist = os.listdir(repository)
49767 prefix = "glsa-"
49768 suffix = ".xml"
49769 -
49770 +
49771 for f in dirlist:
49772 try:
49773 - if f[:len(prefix)] == prefix:
49774 + if f[:len(prefix)] == prefix and f[-1*len(suffix):] == suffix:
49775 rValue.append(f[len(prefix):-1*len(suffix)])
49776 except IndexError:
49777 pass
49778 @@ -125,22 +129,20 @@ def get_glsa_list(myconfig):
49779 def getListElements(listnode):
49780 """
49781 Get all <li> elements for a given <ol> or <ul> node.
49782 -
49783 +
49784 @type listnode: xml.dom.Node
49785 @param listnode: <ul> or <ol> list to get the elements for
49786 @rtype: List of Strings
49787 @return: a list that contains the value of the <li> elements
49788 """
49789 - rValue = []
49790 if not listnode.nodeName in ["ul", "ol"]:
49791 raise GlsaFormatException("Invalid function call: listnode is not <ul> or <ol>")
49792 - for li in listnode.childNodes:
49793 - if li.nodeType != xml.dom.Node.ELEMENT_NODE:
49794 - continue
49795 - rValue.append(getText(li, format="strip"))
49796 + rValue = [getText(li, format="strip") \
49797 + for li in listnode.childNodes \
49798 + if li.nodeType == xml.dom.Node.ELEMENT_NODE]
49799 return rValue
49800
49801 -def getText(node, format):
49802 +def getText(node, format, textfd = None):
49803 """
49804 This is the main parser function. It takes a node and traverses
49805 recursive over the subnodes, getting the text of each (and the
49806 @@ -148,7 +150,7 @@ def getText(node, format):
49807 parameter the text might be formatted by adding/removing newlines,
49808 tabs and spaces. This function is only useful for the GLSA DTD,
49809 it's not applicable for other DTDs.
49810 -
49811 +
49812 @type node: xml.dom.Node
49813 @param node: the root node to start with the parsing
49814 @type format: String
49815 @@ -158,45 +160,54 @@ def getText(node, format):
49816 replaces multiple spaces with one space.
49817 I{xml} does some more formatting, depending on the
49818 type of the encountered nodes.
49819 + @type textfd: writable file-like object
49820 + @param textfd: the file-like object to write the output to
49821 @rtype: String
49822 @return: the (formatted) content of the node and its subnodes
49823 + except if textfd was not none
49824 """
49825 - rValue = ""
49826 + if not textfd:
49827 + textfd = StringIO()
49828 + returnNone = False
49829 + else:
49830 + returnNone = True
49831 if format in ["strip", "keep"]:
49832 if node.nodeName in ["uri", "mail"]:
49833 - rValue += node.childNodes[0].data+": "+node.getAttribute("link")
49834 + textfd.write(node.childNodes[0].data+": "+node.getAttribute("link"))
49835 else:
49836 for subnode in node.childNodes:
49837 if subnode.nodeName == "#text":
49838 - rValue += subnode.data
49839 + textfd.write(subnode.data)
49840 else:
49841 - rValue += getText(subnode, format)
49842 - else:
49843 + getText(subnode, format, textfd)
49844 + else: # format = "xml"
49845 for subnode in node.childNodes:
49846 if subnode.nodeName == "p":
49847 for p_subnode in subnode.childNodes:
49848 if p_subnode.nodeName == "#text":
49849 - rValue += p_subnode.data.strip()
49850 + textfd.write(p_subnode.data.strip())
49851 elif p_subnode.nodeName in ["uri", "mail"]:
49852 - rValue += p_subnode.childNodes[0].data
49853 - rValue += " ( "+p_subnode.getAttribute("link")+" )"
49854 - rValue += NEWLINE_ESCAPE
49855 + textfd.write(p_subnode.childNodes[0].data)
49856 + textfd.write(" ( "+p_subnode.getAttribute("link")+" )")
49857 + textfd.write(NEWLINE_ESCAPE)
49858 elif subnode.nodeName == "ul":
49859 for li in getListElements(subnode):
49860 - rValue += "-"+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" "
49861 + textfd.write("-"+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" ")
49862 elif subnode.nodeName == "ol":
49863 i = 0
49864 for li in getListElements(subnode):
49865 i = i+1
49866 - rValue += str(i)+"."+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" "
49867 + textfd.write(str(i)+"."+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" ")
49868 elif subnode.nodeName == "code":
49869 - rValue += getText(subnode, format="keep").replace("\n", NEWLINE_ESCAPE)
49870 - if rValue[-1*len(NEWLINE_ESCAPE):] != NEWLINE_ESCAPE:
49871 - rValue += NEWLINE_ESCAPE
49872 + textfd.write(getText(subnode, format="keep").lstrip().replace("\n", NEWLINE_ESCAPE))
49873 + textfd.write(NEWLINE_ESCAPE)
49874 elif subnode.nodeName == "#text":
49875 - rValue += subnode.data
49876 + textfd.write(subnode.data)
49877 else:
49878 raise GlsaFormatException(_("Invalid Tag found: "), subnode.nodeName)
49879 + if returnNone:
49880 + return None
49881 + rValue = textfd.getvalue()
49882 if format == "strip":
49883 rValue = rValue.strip(" \n\t")
49884 rValue = re.sub("[\s]{2,}", " ", rValue)
49885 @@ -206,7 +217,7 @@ def getMultiTagsText(rootnode, tagname, format):
49886 """
49887 Returns a list with the text of all subnodes of type I{tagname}
49888 under I{rootnode} (which itself is not parsed) using the given I{format}.
49889 -
49890 +
49891 @type rootnode: xml.dom.Node
49892 @param rootnode: the node to search for I{tagname}
49893 @type tagname: String
49894 @@ -216,16 +227,15 @@ def getMultiTagsText(rootnode, tagname, format):
49895 @rtype: List of Strings
49896 @return: a list containing the text of all I{tagname} childnodes
49897 """
49898 - rValue = []
49899 - for e in rootnode.getElementsByTagName(tagname):
49900 - rValue.append(getText(e, format))
49901 + rValue = [getText(e, format) \
49902 + for e in rootnode.getElementsByTagName(tagname)]
49903 return rValue
49904
49905 def makeAtom(pkgname, versionNode):
49906 """
49907 - creates from the given package name and information in the
49908 + creates from the given package name and information in the
49909 I{versionNode} a (syntactical) valid portage atom.
49910 -
49911 +
49912 @type pkgname: String
49913 @param pkgname: the name of the package for this atom
49914 @type versionNode: xml.dom.Node
49915 @@ -248,9 +258,9 @@ def makeAtom(pkgname, versionNode):
49916
49917 def makeVersion(versionNode):
49918 """
49919 - creates from the information in the I{versionNode} a
49920 + creates from the information in the I{versionNode} a
49921 version string (format <op><version>).
49922 -
49923 +
49924 @type versionNode: xml.dom.Node
49925 @param versionNode: a <vulnerable> or <unaffected> Node that
49926 contains the version information for this atom
49927 @@ -270,17 +280,17 @@ def makeVersion(versionNode):
49928
49929 def match(atom, dbapi, match_type="default"):
49930 """
49931 - wrapper that calls revisionMatch() or portage.dbapi.dbapi.match() depending on
49932 + wrapper that calls revisionMatch() or portage.dbapi.dbapi.match() depending on
49933 the given atom.
49934 -
49935 +
49936 @type atom: string
49937 @param atom: a <~ or >~ atom or a normal portage atom that contains the atom to match against
49938 @type dbapi: portage.dbapi.dbapi
49939 @param dbapi: one of the portage databases to use as information source
49940 @type match_type: string
49941 - @param match_type: if != "default" passed as first argument to dbapi.xmatch
49942 + @param match_type: if != "default" passed as first argument to dbapi.xmatch
49943 to apply the wanted visibility filters
49944 -
49945 +
49946 @rtype: list of strings
49947 @return: a list with the matching versions
49948 """
49949 @@ -296,15 +306,15 @@ def revisionMatch(revisionAtom, dbapi, match_type="default"):
49950 handler for the special >~, >=~, <=~ and <~ atoms that are supposed to behave
49951 as > and < except that they are limited to the same version, the range only
49952 applies to the revision part.
49953 -
49954 +
49955 @type revisionAtom: string
49956 @param revisionAtom: a <~ or >~ atom that contains the atom to match against
49957 @type dbapi: portage.dbapi.dbapi
49958 @param dbapi: one of the portage databases to use as information source
49959 @type match_type: string
49960 - @param match_type: if != "default" passed as first argument to portdb.xmatch
49961 + @param match_type: if != "default" passed as first argument to portdb.xmatch
49962 to apply the wanted visibility filters
49963 -
49964 +
49965 @rtype: list of strings
49966 @return: a list with the matching versions
49967 """
49968 @@ -325,18 +335,19 @@ def revisionMatch(revisionAtom, dbapi, match_type="default"):
49969 if eval(r1+" "+revisionAtom[0:2]+" "+r2):
49970 rValue.append(v)
49971 return rValue
49972 -
49973 +
49974
49975 def getMinUpgrade(vulnerableList, unaffectedList, portdbapi, vardbapi, minimize=True):
49976 """
49977 Checks if the systemstate is matching an atom in
49978 I{vulnerableList} and returns string describing
49979 - the lowest version for the package that matches an atom in
49980 + the lowest version for the package that matches an atom in
49981 I{unaffectedList} and is greater than the currently installed
49982 - version or None if the system is not affected. Both
49983 - I{vulnerableList} and I{unaffectedList} should have the
49984 + version. It will return an empty list if the system is affected,
49985 + and no upgrade is possible or None if the system is not affected.
49986 + Both I{vulnerableList} and I{unaffectedList} should have the
49987 same base package.
49988 -
49989 +
49990 @type vulnerableList: List of Strings
49991 @param vulnerableList: atoms matching vulnerable package versions
49992 @type unaffectedList: List of Strings
49993 @@ -347,46 +358,51 @@ def getMinUpgrade(vulnerableList, unaffectedList, portdbapi, vardbapi, minimize=
49994 @param vardbapi: Installed package repository
49995 @type minimize: Boolean
49996 @param minimize: True for a least-change upgrade, False for emerge-like algorithm
49997 -
49998 +
49999 @rtype: String | None
50000 @return: the lowest unaffected version that is greater than
50001 the installed version.
50002 - """
50003 - rValue = None
50004 - v_installed = []
50005 - u_installed = []
50006 - for v in vulnerableList:
50007 - v_installed += match(v, vardbapi)
50008 + """
50009 + rValue = ""
50010 + v_installed = reduce(operator.add, [match(v, vardbapi) for v in vulnerableList], [])
50011 + u_installed = reduce(operator.add, [match(u, vardbapi) for u in unaffectedList], [])
50012
50013 - for u in unaffectedList:
50014 - u_installed += match(u, vardbapi)
50015 -
50016 - install_unaffected = True
50017 - for i in v_installed:
50018 - if i not in u_installed:
50019 - install_unaffected = False
50020 + # remove all unaffected atoms from vulnerable list
50021 + v_installed = list(set(v_installed).difference(set(u_installed)))
50022
50023 - if install_unaffected:
50024 - return rValue
50025 -
50026 + if not v_installed:
50027 + return None
50028 +
50029 + # this tuple holds all vulnerable atoms, and the related upgrade atom
50030 + vuln_update = []
50031 + avail_updates = set()
50032 for u in unaffectedList:
50033 - mylist = match(u, portdbapi, match_type="match-all")
50034 - for c in mylist:
50035 - i = best(v_installed)
50036 - if vercmp(c.version, i.version) > 0 \
50037 - and (rValue == None \
50038 - or not match("="+rValue, portdbapi) \
50039 - or (minimize ^ (vercmp(c.version, rValue.version) > 0)) \
50040 - and match("="+c, portdbapi)) \
50041 - and portdbapi.aux_get(c, ["SLOT"]) == vardbapi.aux_get(best(v_installed), ["SLOT"]):
50042 - rValue = c
50043 - return rValue
50044 + # TODO: This had match_type="match-all" before. I don't think it should
50045 + # since we disregarded masked items later anyway (match(=rValue, "porttree"))
50046 + avail_updates.update(match(u, portdbapi))
50047 + # if an atom is already installed, we should not consider it for upgrades
50048 + avail_updates.difference_update(u_installed)
50049 +
50050 + for vuln in v_installed:
50051 + update = ""
50052 + for c in avail_updates:
50053 + c_pv = portage.catpkgsplit(c)
50054 + if vercmp(c.version, vuln.version) > 0 \
50055 + and (update == "" \
50056 + or (minimize ^ (vercmp(c.version, update.version) > 0))) \
50057 + and portdbapi._pkg_str(c, None).slot == vardbapi._pkg_str(vuln, None).slot:
50058 + update = c_pv[0]+"/"+c_pv[1]+"-"+c_pv[2]
50059 + if c_pv[3] != "r0": # we don't like -r0 for display
50060 + update += "-"+c_pv[3]
50061 + vuln_update.append([vuln, update])
50062 +
50063 + return vuln_update
50064
50065 def format_date(datestr):
50066 """
50067 Takes a date (announced, revised) date from a GLSA and formats
50068 it as readable text (i.e. "January 1, 2008").
50069 -
50070 +
50071 @type date: String
50072 @param date: the date string to reformat
50073 @rtype: String
50074 @@ -396,16 +412,16 @@ def format_date(datestr):
50075 splitdate = datestr.split("-", 2)
50076 if len(splitdate) != 3:
50077 return datestr
50078 -
50079 +
50080 # This cannot raise an error as we use () instead of []
50081 splitdate = (int(x) for x in splitdate)
50082 -
50083 +
50084 from datetime import date
50085 try:
50086 d = date(*splitdate)
50087 except ValueError:
50088 return datestr
50089 -
50090 +
50091 # TODO We could format to local date format '%x' here?
50092 return _unicode_decode(d.strftime("%B %d, %Y"),
50093 encoding=_encodings['content'], errors='replace')
50094 @@ -417,7 +433,7 @@ class GlsaTypeException(Exception):
50095
50096 class GlsaFormatException(Exception):
50097 pass
50098 -
50099 +
50100 class GlsaArgumentException(Exception):
50101 pass
50102
50103 @@ -429,9 +445,9 @@ class Glsa:
50104 """
50105 def __init__(self, myid, myconfig, vardbapi, portdbapi):
50106 """
50107 - Simple constructor to set the ID, store the config and gets the
50108 + Simple constructor to set the ID, store the config and gets the
50109 XML data by calling C{self.read()}.
50110 -
50111 +
50112 @type myid: String
50113 @param myid: String describing the id for the GLSA object (standard
50114 GLSAs have an ID of the form YYYYMM-nn) or an existing
50115 @@ -461,7 +477,7 @@ class Glsa:
50116 """
50117 Here we build the filename from the config and the ID and pass
50118 it to urllib to fetch it from the filesystem or a remote server.
50119 -
50120 +
50121 @rtype: None
50122 @return: None
50123 """
50124 @@ -473,15 +489,21 @@ class Glsa:
50125 myurl = "file://"+self.nr
50126 else:
50127 myurl = repository + "glsa-%s.xml" % str(self.nr)
50128 - self.parse(urllib_request_urlopen(myurl))
50129 +
50130 + f = urllib_request_urlopen(myurl)
50131 + try:
50132 + self.parse(f)
50133 + finally:
50134 + f.close()
50135 +
50136 return None
50137
50138 def parse(self, myfile):
50139 """
50140 - This method parses the XML file and sets up the internal data
50141 + This method parses the XML file and sets up the internal data
50142 structures by calling the different helper functions in this
50143 module.
50144 -
50145 +
50146 @type myfile: String
50147 @param myfile: Filename to grab the XML data from
50148 @rtype: None
50149 @@ -504,27 +526,27 @@ class Glsa:
50150 self.title = getText(myroot.getElementsByTagName("title")[0], format="strip")
50151 self.synopsis = getText(myroot.getElementsByTagName("synopsis")[0], format="strip")
50152 self.announced = format_date(getText(myroot.getElementsByTagName("announced")[0], format="strip"))
50153 -
50154 - count = 1
50155 +
50156 # Support both formats of revised:
50157 # <revised>December 30, 2007: 02</revised>
50158 # <revised count="2">2007-12-30</revised>
50159 revisedEl = myroot.getElementsByTagName("revised")[0]
50160 self.revised = getText(revisedEl, format="strip")
50161 - if ((sys.hexversion >= 0x3000000 and "count" in revisedEl.attributes) or
50162 - (sys.hexversion < 0x3000000 and revisedEl.attributes.has_key("count"))):
50163 - count = revisedEl.getAttribute("count")
50164 - elif (self.revised.find(":") >= 0):
50165 - (self.revised, count) = self.revised.split(":")
50166 -
50167 + count = revisedEl.attributes.get("count")
50168 + if count is None:
50169 + if self.revised.find(":") >= 0:
50170 + (self.revised, count) = self.revised.split(":")
50171 + else:
50172 + count = 1
50173 +
50174 self.revised = format_date(self.revised)
50175 -
50176 +
50177 try:
50178 self.count = int(count)
50179 except ValueError:
50180 # TODO should this raise a GlsaFormatException?
50181 self.count = 1
50182 -
50183 +
50184 # now the optional and 0-n toplevel, #PCDATA tags and references
50185 try:
50186 self.access = getText(myroot.getElementsByTagName("access")[0], format="strip")
50187 @@ -532,7 +554,7 @@ class Glsa:
50188 self.access = ""
50189 self.bugs = getMultiTagsText(myroot, "bug", format="strip")
50190 self.references = getMultiTagsText(myroot.getElementsByTagName("references")[0], "uri", format="keep")
50191 -
50192 +
50193 # and now the formatted text elements
50194 self.description = getText(myroot.getElementsByTagName("description")[0], format="xml")
50195 self.workaround = getText(myroot.getElementsByTagName("workaround")[0], format="xml")
50196 @@ -542,7 +564,7 @@ class Glsa:
50197 try:
50198 self.background = getText(myroot.getElementsByTagName("background")[0], format="xml")
50199 except IndexError:
50200 - self.background = ""
50201 + self.background = ""
50202
50203 # finally the interesting tags (product, affected, package)
50204 self.glsatype = myroot.getElementsByTagName("product")[0].getAttribute("type")
50205 @@ -572,16 +594,18 @@ class Glsa:
50206 self.services = self.affected.getElementsByTagName("service")
50207 return None
50208
50209 - def dump(self, outstream=sys.stdout):
50210 + def dump(self, outstream=sys.stdout, encoding="utf-8"):
50211 """
50212 - Dumps a plaintext representation of this GLSA to I{outfile} or
50213 + Dumps a plaintext representation of this GLSA to I{outfile} or
50214 B{stdout} if it is ommitted. You can specify an alternate
50215 - I{encoding} if needed (default is latin1).
50216 -
50217 + I{encoding} if needed (default is utf-8).
50218 +
50219 @type outstream: File
50220 @param outfile: Stream that should be used for writing
50221 (defaults to sys.stdout)
50222 """
50223 + outstream = getattr(outstream, "buffer", outstream)
50224 + outstream = codecs.getwriter(encoding)(outstream)
50225 width = 76
50226 outstream.write(("GLSA %s: \n%s" % (self.nr, self.title)).center(width)+"\n")
50227 outstream.write((width*"=")+"\n")
50228 @@ -606,30 +630,24 @@ class Glsa:
50229 pass
50230 if len(self.bugs) > 0:
50231 outstream.write(_("\nRelated bugs: "))
50232 - for i in range(0, len(self.bugs)):
50233 - outstream.write(self.bugs[i])
50234 - if i < len(self.bugs)-1:
50235 - outstream.write(", ")
50236 - else:
50237 - outstream.write("\n")
50238 + outstream.write(", ".join(self.bugs))
50239 + outstream.write("\n")
50240 if self.background:
50241 outstream.write("\n"+wrap(self.background, width, caption=_("Background: ")))
50242 outstream.write("\n"+wrap(self.description, width, caption=_("Description: ")))
50243 outstream.write("\n"+wrap(self.impact_text, width, caption=_("Impact: ")))
50244 outstream.write("\n"+wrap(self.workaround, width, caption=_("Workaround: ")))
50245 outstream.write("\n"+wrap(self.resolution, width, caption=_("Resolution: ")))
50246 - myreferences = ""
50247 - for r in self.references:
50248 - myreferences += (r.replace(" ", SPACE_ESCAPE)+NEWLINE_ESCAPE+" ")
50249 + myreferences = " ".join(r.replace(" ", SPACE_ESCAPE)+NEWLINE_ESCAPE for r in self.references)
50250 outstream.write("\n"+wrap(myreferences, width, caption=_("References: ")))
50251 outstream.write("\n")
50252 -
50253 +
50254 def isVulnerable(self):
50255 """
50256 Tests if the system is affected by this GLSA by checking if any
50257 vulnerable package versions are installed. Also checks for affected
50258 architectures.
50259 -
50260 +
50261 @rtype: Boolean
50262 @return: True if the system is affected, False if not
50263 """
50264 @@ -641,56 +659,67 @@ class Glsa:
50265 for v in path["vul_atoms"]:
50266 rValue = rValue \
50267 or (len(match(v, self.vardbapi)) > 0 \
50268 - and getMinUpgrade(path["vul_atoms"], path["unaff_atoms"], \
50269 + and None != getMinUpgrade(path["vul_atoms"], path["unaff_atoms"], \
50270 self.portdbapi, self.vardbapi))
50271 return rValue
50272 -
50273 - def isApplied(self):
50274 +
50275 + def isInjected(self):
50276 """
50277 - Looks if the GLSA IDis in the GLSA checkfile to check if this
50278 - GLSA was already applied.
50279 -
50280 + Looks if the GLSA ID is in the GLSA checkfile to check if this
50281 + GLSA should be marked as applied.
50282 +
50283 @rtype: Boolean
50284 - @return: True if the GLSA was applied, False if not
50285 + @returns: True if the GLSA is in the inject file, False if not
50286 """
50287 + if not os.access(os.path.join(self.config["EROOT"],
50288 + PRIVATE_PATH, "glsa_injected"), os.R_OK):
50289 + return False
50290 return (self.nr in get_applied_glsas(self.config))
50291
50292 def inject(self):
50293 """
50294 Puts the ID of this GLSA into the GLSA checkfile, so it won't
50295 - show up on future checks. Should be called after a GLSA is
50296 + show up on future checks. Should be called after a GLSA is
50297 applied or on explicit user request.
50298
50299 @rtype: None
50300 @return: None
50301 """
50302 - if not self.isApplied():
50303 + if not self.isInjected():
50304 checkfile = io.open(
50305 _unicode_encode(os.path.join(self.config["EROOT"],
50306 - CACHE_PATH, "glsa"),
50307 - encoding=_encodings['fs'], errors='strict'),
50308 + PRIVATE_PATH, "glsa_injected"),
50309 + encoding=_encodings['fs'], errors='strict'),
50310 mode='a+', encoding=_encodings['content'], errors='strict')
50311 checkfile.write(_unicode_decode(self.nr + "\n"))
50312 checkfile.close()
50313 return None
50314 -
50315 +
50316 def getMergeList(self, least_change=True):
50317 """
50318 Returns the list of package-versions that have to be merged to
50319 - apply this GLSA properly. The versions are as low as possible
50320 + apply this GLSA properly. The versions are as low as possible
50321 while avoiding downgrades (see L{getMinUpgrade}).
50322 -
50323 +
50324 @type least_change: Boolean
50325 @param least_change: True if the smallest possible upgrade should be selected,
50326 False for an emerge-like algorithm
50327 @rtype: List of Strings
50328 @return: list of package-versions that have to be merged
50329 """
50330 - rValue = []
50331 - for pkg in self.packages:
50332 + return list(set(update for (vuln, update) in self.getAffectionTable(least_change) if update))
50333 +
50334 + def getAffectionTable(self, least_change=True):
50335 + """
50336 + Will initialize the self.systemAffection list of
50337 + atoms installed on the system that are affected
50338 + by this GLSA, and the atoms that are minimal upgrades.
50339 + """
50340 + systemAffection = []
50341 + for pkg in self.packages.keys():
50342 for path in self.packages[pkg]:
50343 - update = getMinUpgrade(path["vul_atoms"], path["unaff_atoms"], \
50344 + update = getMinUpgrade(path["vul_atoms"], path["unaff_atoms"],
50345 self.portdbapi, self.vardbapi, minimize=least_change)
50346 if update:
50347 - rValue.append(update)
50348 - return rValue
50349 + systemAffection.extend(update)
50350 + return systemAffection
50351
50352 diff --git a/pym/portage/localization.py b/pym/portage/localization.py
50353 index d16c4b1..b54835a 100644
50354 --- a/pym/portage/localization.py
50355 +++ b/pym/portage/localization.py
50356 @@ -1,12 +1,18 @@
50357 # localization.py -- Code to manage/help portage localization.
50358 -# Copyright 2004 Gentoo Foundation
50359 +# Copyright 2004-2014 Gentoo Foundation
50360 # Distributed under the terms of the GNU General Public License v2
50361
50362 +from portage import _unicode_decode
50363
50364 # We define this to make the transition easier for us.
50365 def _(mystr):
50366 - return mystr
50367 -
50368 + """
50369 + Always returns unicode, regardless of the input type. This is
50370 + helpful for avoiding UnicodeDecodeError from __str__() with
50371 + Python 2, by ensuring that string format operations invoke
50372 + __unicode__() instead of __str__().
50373 + """
50374 + return _unicode_decode(mystr)
50375
50376 def localization_example():
50377 # Dict references allow translators to rearrange word order.
50378 @@ -15,6 +21,7 @@ def localization_example():
50379
50380 a_value = "value.of.a"
50381 b_value = 123
50382 - c_value = [1,2,3,4]
50383 - print(_("A: %(a)s -- B: %(b)s -- C: %(c)s") % {"a":a_value,"b":b_value,"c":c_value})
50384 + c_value = [1, 2, 3, 4]
50385 + print(_("A: %(a)s -- B: %(b)s -- C: %(c)s") %
50386 + {"a": a_value, "b": b_value, "c": c_value})
50387
50388
50389 diff --git a/pym/portage/locks.py b/pym/portage/locks.py
50390 index 59fbc6e..0789f89 100644
50391 --- a/pym/portage/locks.py
50392 +++ b/pym/portage/locks.py
50393 @@ -1,5 +1,5 @@
50394 # portage: Lock management code
50395 -# Copyright 2004-2012 Gentoo Foundation
50396 +# Copyright 2004-2014 Gentoo Foundation
50397 # Distributed under the terms of the GNU General Public License v2
50398
50399 __all__ = ["lockdir", "unlockdir", "lockfile", "unlockfile", \
50400 @@ -17,11 +17,11 @@ import portage
50401 from portage import os, _encodings, _unicode_decode
50402 from portage.exception import DirectoryNotFound, FileNotFound, \
50403 InvalidData, TryAgain, OperationNotPermitted, PermissionDenied
50404 -from portage.data import portage_gid
50405 from portage.util import writemsg
50406 from portage.localization import _
50407
50408 if sys.hexversion >= 0x3000000:
50409 + # pylint: disable=W0622
50410 basestring = str
50411
50412 HARDLINK_FD = -2
50413 @@ -64,6 +64,9 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0,
50414 if not mypath:
50415 raise InvalidData(_("Empty path given"))
50416
50417 + # Since Python 3.4, chown requires int type (no proxies).
50418 + portage_gid = int(portage.data.portage_gid)
50419 +
50420 # Support for file object or integer file descriptor parameters is
50421 # deprecated due to ambiguity in whether or not it's safe to close
50422 # the file descriptor, making it prone to "Bad file descriptor" errors
50423 @@ -148,7 +151,7 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0,
50424 except IOError as e:
50425 if not hasattr(e, "errno"):
50426 raise
50427 - if e.errno in (errno.EACCES, errno.EAGAIN):
50428 + if e.errno in (errno.EACCES, errno.EAGAIN, errno.ENOLCK):
50429 # resource temp unavailable; eg, someone beat us to the lock.
50430 if flags & os.O_NONBLOCK:
50431 os.close(myfd)
50432 @@ -163,19 +166,43 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0,
50433 if isinstance(mypath, int):
50434 waiting_msg = _("waiting for lock on fd %i") % myfd
50435 else:
50436 - waiting_msg = _("waiting for lock on %s\n") % lockfilename
50437 + waiting_msg = _("waiting for lock on %s") % lockfilename
50438 if out is not None:
50439 out.ebegin(waiting_msg)
50440 # try for the exclusive lock now.
50441 - try:
50442 - locking_method(myfd, fcntl.LOCK_EX)
50443 - except EnvironmentError as e:
50444 - if out is not None:
50445 - out.eend(1, str(e))
50446 - raise
50447 + enolock_msg_shown = False
50448 + while True:
50449 + try:
50450 + locking_method(myfd, fcntl.LOCK_EX)
50451 + except EnvironmentError as e:
50452 + if e.errno == errno.ENOLCK:
50453 + # This is known to occur on Solaris NFS (see
50454 + # bug #462694). Assume that the error is due
50455 + # to temporary exhaustion of record locks,
50456 + # and loop until one becomes available.
50457 + if not enolock_msg_shown:
50458 + enolock_msg_shown = True
50459 + if isinstance(mypath, int):
50460 + context_desc = _("Error while waiting "
50461 + "to lock fd %i") % myfd
50462 + else:
50463 + context_desc = _("Error while waiting "
50464 + "to lock '%s'") % lockfilename
50465 + writemsg("\n!!! %s: %s\n" % (context_desc, e),
50466 + noiselevel=-1)
50467 +
50468 + time.sleep(_HARDLINK_POLL_LATENCY)
50469 + continue
50470 +
50471 + if out is not None:
50472 + out.eend(1, str(e))
50473 + raise
50474 + else:
50475 + break
50476 +
50477 if out is not None:
50478 out.eend(os.EX_OK)
50479 - elif e.errno in (errno.ENOSYS, errno.ENOLCK):
50480 + elif e.errno in (errno.ENOSYS,):
50481 # We're not allowed to lock on this FS.
50482 if not isinstance(lockfilename, int):
50483 # If a file object was passed in, it's not safe
50484 @@ -207,10 +234,21 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0,
50485 waiting_msg=waiting_msg, flags=flags)
50486
50487 if myfd != HARDLINK_FD:
50488 +
50489 + # FD_CLOEXEC is enabled by default in Python >=3.4.
50490 + if sys.hexversion < 0x3040000:
50491 + try:
50492 + fcntl.FD_CLOEXEC
50493 + except AttributeError:
50494 + pass
50495 + else:
50496 + fcntl.fcntl(myfd, fcntl.F_SETFD,
50497 + fcntl.fcntl(myfd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
50498 +
50499 _open_fds.add(myfd)
50500
50501 - writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1)
50502 - return (lockfilename,myfd,unlinkfile,locking_method)
50503 + writemsg(str((lockfilename, myfd, unlinkfile)) + "\n", 1)
50504 + return (lockfilename, myfd, unlinkfile, locking_method)
50505
50506 def _fstat_nlink(fd):
50507 """
50508 @@ -232,10 +270,10 @@ def unlockfile(mytuple):
50509
50510 #XXX: Compatability hack.
50511 if len(mytuple) == 3:
50512 - lockfilename,myfd,unlinkfile = mytuple
50513 + lockfilename, myfd, unlinkfile = mytuple
50514 locking_method = fcntl.flock
50515 elif len(mytuple) == 4:
50516 - lockfilename,myfd,unlinkfile,locking_method = mytuple
50517 + lockfilename, myfd, unlinkfile, locking_method = mytuple
50518 else:
50519 raise InvalidData
50520
50521 @@ -246,7 +284,7 @@ def unlockfile(mytuple):
50522 # myfd may be None here due to myfd = mypath in lockfile()
50523 if isinstance(lockfilename, basestring) and \
50524 not os.path.exists(lockfilename):
50525 - writemsg(_("lockfile does not exist '%s'\n") % lockfilename,1)
50526 + writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1)
50527 if myfd is not None:
50528 os.close(myfd)
50529 _open_fds.remove(myfd)
50530 @@ -254,9 +292,9 @@ def unlockfile(mytuple):
50531
50532 try:
50533 if myfd is None:
50534 - myfd = os.open(lockfilename, os.O_WRONLY,0o660)
50535 + myfd = os.open(lockfilename, os.O_WRONLY, 0o660)
50536 unlinkfile = 1
50537 - locking_method(myfd,fcntl.LOCK_UN)
50538 + locking_method(myfd, fcntl.LOCK_UN)
50539 except OSError:
50540 if isinstance(lockfilename, basestring):
50541 os.close(myfd)
50542 @@ -271,14 +309,14 @@ def unlockfile(mytuple):
50543 # commenting until it is proved necessary.
50544 #time.sleep(0.0001)
50545 if unlinkfile:
50546 - locking_method(myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
50547 + locking_method(myfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
50548 # We won the lock, so there isn't competition for it.
50549 # We can safely delete the file.
50550 writemsg(_("Got the lockfile...\n"), 1)
50551 if _fstat_nlink(myfd) == 1:
50552 os.unlink(lockfilename)
50553 writemsg(_("Unlinked lockfile...\n"), 1)
50554 - locking_method(myfd,fcntl.LOCK_UN)
50555 + locking_method(myfd, fcntl.LOCK_UN)
50556 else:
50557 writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1)
50558 os.close(myfd)
50559 @@ -288,7 +326,7 @@ def unlockfile(mytuple):
50560 raise
50561 except Exception as e:
50562 writemsg(_("Failed to get lock... someone took it.\n"), 1)
50563 - writemsg(str(e)+"\n",1)
50564 + writemsg(str(e) + "\n", 1)
50565
50566 # why test lockfilename? because we may have been handed an
50567 # fd originally, and the caller might not like having their
50568 @@ -300,14 +338,12 @@ def unlockfile(mytuple):
50569 return True
50570
50571
50572 -
50573 -
50574 def hardlock_name(path):
50575 base, tail = os.path.split(path)
50576 return os.path.join(base, ".%s.hardlock-%s-%s" %
50577 (tail, os.uname()[1], os.getpid()))
50578
50579 -def hardlink_is_mine(link,lock):
50580 +def hardlink_is_mine(link, lock):
50581 try:
50582 lock_st = os.stat(lock)
50583 if lock_st.st_nlink == 2:
50584 @@ -339,6 +375,9 @@ def hardlink_lockfile(lockfilename, max_wait=DeprecationWarning,
50585 preexisting = os.path.exists(lockfilename)
50586 myhardlock = hardlock_name(lockfilename)
50587
50588 + # Since Python 3.4, chown requires int type (no proxies).
50589 + portage_gid = int(portage.data.portage_gid)
50590 +
50591 # myhardlock must not exist prior to our link() call, and we can
50592 # safely unlink it since its file name is unique to our PID
50593 try:
50594 @@ -456,7 +495,6 @@ def unhardlink_lockfile(lockfilename, unlinkfile=True):
50595 pass
50596
50597 def hardlock_cleanup(path, remove_all_locks=False):
50598 - mypid = str(os.getpid())
50599 myhost = os.uname()[1]
50600 mydl = os.listdir(path)
50601
50602 @@ -465,7 +503,7 @@ def hardlock_cleanup(path, remove_all_locks=False):
50603
50604 mylist = {}
50605 for x in mydl:
50606 - if os.path.isfile(path+"/"+x):
50607 + if os.path.isfile(path + "/" + x):
50608 parts = x.split(".hardlock-")
50609 if len(parts) == 2:
50610 filename = parts[0][1:]
50611 @@ -482,17 +520,17 @@ def hardlock_cleanup(path, remove_all_locks=False):
50612 mycount += 1
50613
50614
50615 - results.append(_("Found %(count)s locks") % {"count":mycount})
50616 + results.append(_("Found %(count)s locks") % {"count": mycount})
50617
50618 for x in mylist:
50619 if myhost in mylist[x] or remove_all_locks:
50620 - mylockname = hardlock_name(path+"/"+x)
50621 - if hardlink_is_mine(mylockname, path+"/"+x) or \
50622 - not os.path.exists(path+"/"+x) or \
50623 + mylockname = hardlock_name(path + "/" + x)
50624 + if hardlink_is_mine(mylockname, path + "/" + x) or \
50625 + not os.path.exists(path + "/" + x) or \
50626 remove_all_locks:
50627 for y in mylist[x]:
50628 for z in mylist[x][y]:
50629 - filename = path+"/."+x+".hardlock-"+y+"-"+z
50630 + filename = path + "/." + x + ".hardlock-" + y + "-" + z
50631 if filename == mylockname:
50632 continue
50633 try:
50634 @@ -502,8 +540,8 @@ def hardlock_cleanup(path, remove_all_locks=False):
50635 except OSError:
50636 pass
50637 try:
50638 - os.unlink(path+"/"+x)
50639 - results.append(_("Unlinked: ") + path+"/"+x)
50640 + os.unlink(path + "/" + x)
50641 + results.append(_("Unlinked: ") + path + "/" + x)
50642 os.unlink(mylockname)
50643 results.append(_("Unlinked: ") + mylockname)
50644 except OSError:
50645
50646 diff --git a/pym/portage/mail.py b/pym/portage/mail.py
50647 index 3fcadd2..723da04 100644
50648 --- a/pym/portage/mail.py
50649 +++ b/pym/portage/mail.py
50650 @@ -1,4 +1,4 @@
50651 -# Copyright 1998-2011 Gentoo Foundation
50652 +# Copyright 1998-2014 Gentoo Foundation
50653 # Distributed under the terms of the GNU General Public License v2
50654
50655 # Since python ebuilds remove the 'email' module when USE=build
50656 @@ -21,6 +21,7 @@ from portage.localization import _
50657 import portage
50658
50659 if sys.hexversion >= 0x3000000:
50660 + # pylint: disable=W0622
50661 basestring = str
50662
50663 def _force_ascii_if_necessary(s):
50664 @@ -117,13 +118,13 @@ def send_mail(mysettings, message):
50665 if "@" in mymailuri:
50666 myauthdata, myconndata = mymailuri.rsplit("@", 1)
50667 try:
50668 - mymailuser,mymailpasswd = myauthdata.split(":")
50669 + mymailuser, mymailpasswd = myauthdata.split(":")
50670 except ValueError:
50671 print(_("!!! invalid SMTP AUTH configuration, trying unauthenticated ..."))
50672 else:
50673 myconndata = mymailuri
50674 if ":" in myconndata:
50675 - mymailhost,mymailport = myconndata.split(":")
50676 + mymailhost, mymailport = myconndata.split(":")
50677 else:
50678 mymailhost = myconndata
50679 else:
50680
50681 diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py
50682 index a04b717..3936b9a 100644
50683 --- a/pym/portage/manifest.py
50684 +++ b/pym/portage/manifest.py
50685 @@ -1,15 +1,19 @@
50686 -# Copyright 1999-2012 Gentoo Foundation
50687 +# Copyright 1999-2014 Gentoo Foundation
50688 # Distributed under the terms of the GNU General Public License v2
50689
50690 +from __future__ import unicode_literals
50691 +
50692 import errno
50693 import io
50694 import re
50695 +import sys
50696 import warnings
50697
50698 import portage
50699 portage.proxy.lazyimport.lazyimport(globals(),
50700 'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
50701 - 'verify_all,_filter_unaccelarated_hashes',
50702 + 'verify_all,_apply_hash_filter,_filter_unaccelarated_hashes',
50703 + 'portage.repository.config:_find_invalid_path_char',
50704 'portage.util:write_atomic',
50705 )
50706
50707 @@ -24,8 +28,16 @@ from portage.const import (MANIFEST1_HASH_FUNCTIONS, MANIFEST2_HASH_DEFAULTS,
50708 MANIFEST2_HASH_FUNCTIONS, MANIFEST2_IDENTIFIERS, MANIFEST2_REQUIRED_HASH)
50709 from portage.localization import _
50710
50711 -# Characters prohibited by repoman's file.name check.
50712 -_prohibited_filename_chars_re = re.compile(r'[^a-zA-Z0-9._\-+:]')
50713 +_manifest_re = re.compile(
50714 + r'^(' + '|'.join(MANIFEST2_IDENTIFIERS) + r') (.*)( \d+( \S+ \S+)+)$',
50715 + re.UNICODE)
50716 +
50717 +if sys.hexversion >= 0x3000000:
50718 + # pylint: disable=W0622
50719 + _unicode = str
50720 + basestring = str
50721 +else:
50722 + _unicode = unicode
50723
50724 class FileNotInManifestException(PortageException):
50725 pass
50726 @@ -38,15 +50,10 @@ def manifest2AuxfileFilter(filename):
50727 for x in mysplit:
50728 if x[:1] == '.':
50729 return False
50730 - if _prohibited_filename_chars_re.search(x) is not None:
50731 - return False
50732 return not filename[:7] == 'digest-'
50733
50734 def manifest2MiscfileFilter(filename):
50735 - filename = filename.strip(os.sep)
50736 - if _prohibited_filename_chars_re.search(filename) is not None:
50737 - return False
50738 - return not (filename in ["CVS", ".svn", "files", "Manifest"] or filename.endswith(".ebuild"))
50739 + return not (filename == "Manifest" or filename.endswith(".ebuild"))
50740
50741 def guessManifestFileType(filename):
50742 """ Perform a best effort guess of which type the given filename is, avoid using this if possible """
50743 @@ -67,18 +74,17 @@ def guessThinManifestFileType(filename):
50744 return None
50745 return "DIST"
50746
50747 -def parseManifest2(mysplit):
50748 +def parseManifest2(line):
50749 + if not isinstance(line, basestring):
50750 + line = ' '.join(line)
50751 myentry = None
50752 - if len(mysplit) > 4 and mysplit[0] in MANIFEST2_IDENTIFIERS:
50753 - mytype = mysplit[0]
50754 - myname = mysplit[1]
50755 - try:
50756 - mysize = int(mysplit[2])
50757 - except ValueError:
50758 - return None
50759 - myhashes = dict(zip(mysplit[3::2], mysplit[4::2]))
50760 - myhashes["size"] = mysize
50761 - myentry = Manifest2Entry(type=mytype, name=myname, hashes=myhashes)
50762 + match = _manifest_re.match(line)
50763 + if match is not None:
50764 + tokens = match.group(3).split()
50765 + hashes = dict(zip(tokens[1::2], tokens[2::2]))
50766 + hashes["size"] = int(tokens[0])
50767 + myentry = Manifest2Entry(type=match.group(1),
50768 + name=match.group(2), hashes=hashes)
50769 return myentry
50770
50771 class ManifestEntry(object):
50772 @@ -108,11 +114,20 @@ class Manifest2Entry(ManifestEntry):
50773 def __ne__(self, other):
50774 return not self.__eq__(other)
50775
50776 + if sys.hexversion < 0x3000000:
50777 +
50778 + __unicode__ = __str__
50779 +
50780 + def __str__(self):
50781 + return _unicode_encode(self.__unicode__(),
50782 + encoding=_encodings['repo.content'], errors='strict')
50783 +
50784 class Manifest(object):
50785 parsers = (parseManifest2,)
50786 - def __init__(self, pkgdir, distdir, fetchlist_dict=None,
50787 + def __init__(self, pkgdir, distdir=None, fetchlist_dict=None,
50788 manifest1_compat=DeprecationWarning, from_scratch=False, thin=False,
50789 - allow_missing=False, allow_create=True, hashes=None):
50790 + allow_missing=False, allow_create=True, hashes=None,
50791 + find_invalid_path_char=None):
50792 """ Create new Manifest instance for package in pkgdir.
50793 Do not parse Manifest file if from_scratch == True (only for internal use)
50794 The fetchlist_dict parameter is required only for generation of
50795 @@ -125,6 +140,9 @@ class Manifest(object):
50796 "portage.manifest.Manifest constructor is deprecated.",
50797 DeprecationWarning, stacklevel=2)
50798
50799 + if find_invalid_path_char is None:
50800 + find_invalid_path_char = _find_invalid_path_char
50801 + self._find_invalid_path_char = find_invalid_path_char
50802 self.pkgdir = _unicode_decode(pkgdir).rstrip(os.sep) + os.sep
50803 self.fhashdict = {}
50804 self.hashes = set()
50805 @@ -173,13 +191,12 @@ class Manifest(object):
50806 """Parse a manifest. If myhashdict is given then data will be added too it.
50807 Otherwise, a new dict will be created and returned."""
50808 try:
50809 - fd = io.open(_unicode_encode(file_path,
50810 + with io.open(_unicode_encode(file_path,
50811 encoding=_encodings['fs'], errors='strict'), mode='r',
50812 - encoding=_encodings['repo.content'], errors='replace')
50813 - if myhashdict is None:
50814 - myhashdict = {}
50815 - self._parseDigests(fd, myhashdict=myhashdict, **kwargs)
50816 - fd.close()
50817 + encoding=_encodings['repo.content'], errors='replace') as f:
50818 + if myhashdict is None:
50819 + myhashdict = {}
50820 + self._parseDigests(f, myhashdict=myhashdict, **kwargs)
50821 return myhashdict
50822 except (OSError, IOError) as e:
50823 if e.errno == errno.ENOENT:
50824 @@ -198,9 +215,8 @@ class Manifest(object):
50825 """Parse manifest lines and return a list of manifest entries."""
50826 for myline in mylines:
50827 myentry = None
50828 - mysplit = myline.split()
50829 for parser in self.parsers:
50830 - myentry = parser(mysplit)
50831 + myentry = parser(myline)
50832 if myentry is not None:
50833 yield myentry
50834 break # go to the next line
50835 @@ -255,9 +271,12 @@ class Manifest(object):
50836 (MANIFEST2_REQUIRED_HASH, t, f))
50837
50838 def write(self, sign=False, force=False):
50839 - """ Write Manifest instance to disk, optionally signing it """
50840 + """ Write Manifest instance to disk, optionally signing it. Returns
50841 + True if the Manifest is actually written, and False if the write
50842 + is skipped due to existing Manifest being identical."""
50843 + rval = False
50844 if not self.allow_create:
50845 - return
50846 + return rval
50847 self.checkIntegrity()
50848 try:
50849 myentries = list(self._createManifestEntries())
50850 @@ -289,7 +308,8 @@ class Manifest(object):
50851 # thin manifests with no DIST entries, myentries is
50852 # non-empty for all currently known use cases.
50853 write_atomic(self.getFullname(), "".join("%s\n" %
50854 - str(myentry) for myentry in myentries))
50855 + _unicode(myentry) for myentry in myentries))
50856 + rval = True
50857 else:
50858 # With thin manifest, there's no need to have
50859 # a Manifest file if there are no DIST entries.
50860 @@ -298,6 +318,7 @@ class Manifest(object):
50861 except OSError as e:
50862 if e.errno != errno.ENOENT:
50863 raise
50864 + rval = True
50865
50866 if sign:
50867 self.sign()
50868 @@ -305,6 +326,7 @@ class Manifest(object):
50869 if e.errno == errno.EACCES:
50870 raise PermissionDenied(str(e))
50871 raise
50872 + return rval
50873
50874 def sign(self):
50875 """ Sign the Manifest """
50876 @@ -363,10 +385,11 @@ class Manifest(object):
50877 distfilehashes = self.fhashdict["DIST"]
50878 else:
50879 distfilehashes = {}
50880 - self.__init__(self.pkgdir, self.distdir,
50881 + self.__init__(self.pkgdir, distdir=self.distdir,
50882 fetchlist_dict=self.fetchlist_dict, from_scratch=True,
50883 thin=self.thin, allow_missing=self.allow_missing,
50884 - allow_create=self.allow_create, hashes=self.hashes)
50885 + allow_create=self.allow_create, hashes=self.hashes,
50886 + find_invalid_path_char=self._find_invalid_path_char)
50887 pn = os.path.basename(self.pkgdir.rstrip(os.path.sep))
50888 cat = self._pkgdir_category()
50889
50890 @@ -461,7 +484,8 @@ class Manifest(object):
50891 if pf is not None:
50892 mytype = "EBUILD"
50893 cpvlist.append(pf)
50894 - elif manifest2MiscfileFilter(f):
50895 + elif self._find_invalid_path_char(f) == -1 and \
50896 + manifest2MiscfileFilter(f):
50897 mytype = "MISC"
50898 else:
50899 continue
50900 @@ -480,7 +504,8 @@ class Manifest(object):
50901 full_path = os.path.join(parentdir, f)
50902 recursive_files.append(full_path[cut_len:])
50903 for f in recursive_files:
50904 - if not manifest2AuxfileFilter(f):
50905 + if self._find_invalid_path_char(f) != -1 or \
50906 + not manifest2AuxfileFilter(f):
50907 continue
50908 self.fhashdict["AUX"][f] = perform_multiple_checksums(
50909 os.path.join(self.pkgdir, "files", f.lstrip(os.sep)), self.hashes)
50910 @@ -502,14 +527,17 @@ class Manifest(object):
50911 for t in MANIFEST2_IDENTIFIERS:
50912 self.checkTypeHashes(t, ignoreMissingFiles=ignoreMissingFiles)
50913
50914 - def checkTypeHashes(self, idtype, ignoreMissingFiles=False):
50915 + def checkTypeHashes(self, idtype, ignoreMissingFiles=False, hash_filter=None):
50916 for f in self.fhashdict[idtype]:
50917 - self.checkFileHashes(idtype, f, ignoreMissing=ignoreMissingFiles)
50918 + self.checkFileHashes(idtype, f, ignoreMissing=ignoreMissingFiles,
50919 + hash_filter=hash_filter)
50920
50921 - def checkFileHashes(self, ftype, fname, ignoreMissing=False):
50922 + def checkFileHashes(self, ftype, fname, ignoreMissing=False, hash_filter=None):
50923 + digests = _filter_unaccelarated_hashes(self.fhashdict[ftype][fname])
50924 + if hash_filter is not None:
50925 + digests = _apply_hash_filter(digests, hash_filter)
50926 try:
50927 - ok, reason = verify_all(self._getAbsname(ftype, fname),
50928 - _filter_unaccelarated_hashes(self.fhashdict[ftype][fname]))
50929 + ok, reason = verify_all(self._getAbsname(ftype, fname), digests)
50930 if not ok:
50931 raise DigestException(tuple([self._getAbsname(ftype, fname)]+list(reason)))
50932 return ok, reason
50933
50934 diff --git a/pym/portage/news.py b/pym/portage/news.py
50935 index bbd9325..408fb5c 100644
50936 --- a/pym/portage/news.py
50937 +++ b/pym/portage/news.py
50938 @@ -1,8 +1,8 @@
50939 # portage: news management code
50940 -# Copyright 2006-2011 Gentoo Foundation
50941 +# Copyright 2006-2013 Gentoo Foundation
50942 # Distributed under the terms of the GNU General Public License v2
50943
50944 -from __future__ import print_function
50945 +from __future__ import print_function, unicode_literals
50946
50947 __all__ = ["NewsManager", "NewsItem", "DisplayRestriction",
50948 "DisplayProfileRestriction", "DisplayKeywordRestriction",
50949 @@ -13,6 +13,7 @@ import io
50950 import logging
50951 import os as _os
50952 import re
50953 +import portage
50954 from portage import OrderedDict
50955 from portage import os
50956 from portage import _encodings
50957 @@ -241,7 +242,8 @@ class NewsItem(object):
50958 for values in self.restrictions.values():
50959 any_match = False
50960 for restriction in values:
50961 - if restriction.checkRestriction(**kwargs):
50962 + if restriction.checkRestriction(
50963 + **portage._native_kwargs(kwargs)):
50964 any_match = True
50965 if not any_match:
50966 all_match = False
50967 @@ -388,7 +390,7 @@ def count_unread_news(portdb, vardb, repos=None, update=True):
50968 # NOTE: The NewsManager typically handles permission errors by
50969 # returning silently, so PermissionDenied won't necessarily be
50970 # raised even if we do trigger a permission error above.
50971 - msg = _unicode_decode("Permission denied: '%s'\n") % (e,)
50972 + msg = "Permission denied: '%s'\n" % (e,)
50973 if msg in permission_msgs:
50974 pass
50975 else:
50976
50977 diff --git a/pym/portage/output.py b/pym/portage/output.py
50978 index e44375e..cd660ac 100644
50979 --- a/pym/portage/output.py
50980 +++ b/pym/portage/output.py
50981 @@ -1,4 +1,4 @@
50982 -# Copyright 1998-2011 Gentoo Foundation
50983 +# Copyright 1998-2014 Gentoo Foundation
50984 # Distributed under the terms of the GNU General Public License v2
50985
50986 __docformat__ = "epytext"
50987 @@ -24,8 +24,8 @@ from portage.exception import CommandNotFound, FileNotFound, \
50988 ParseError, PermissionDenied, PortageException
50989 from portage.localization import _
50990
50991 -havecolor=1
50992 -dotitles=1
50993 +havecolor = 1
50994 +dotitles = 1
50995
50996 _styles = {}
50997 """Maps style class to tuple of attribute names."""
50998 @@ -164,15 +164,12 @@ def _parse_color_map(config_root='/', onerror=None):
50999 token = token[1:-1]
51000 return token
51001
51002 - f = None
51003 try:
51004 - f = io.open(_unicode_encode(myfile,
51005 + with io.open(_unicode_encode(myfile,
51006 encoding=_encodings['fs'], errors='strict'),
51007 - mode='r', encoding=_encodings['content'], errors='replace')
51008 - lineno = 0
51009 - for line in f:
51010 - lineno += 1
51011 -
51012 + mode='r', encoding=_encodings['content'], errors='replace') as f:
51013 + lines = f.readlines()
51014 + for lineno, line in enumerate(lines):
51015 commenter_pos = line.find("#")
51016 line = line[:commenter_pos].strip()
51017
51018 @@ -230,9 +227,6 @@ def _parse_color_map(config_root='/', onerror=None):
51019 elif e.errno == errno.EACCES:
51020 raise PermissionDenied(myfile)
51021 raise
51022 - finally:
51023 - if f is not None:
51024 - f.close()
51025
51026 def nc_len(mystr):
51027 tmp = re.sub(esc_seq + "^m]+m", "", mystr);
51028 @@ -245,7 +239,7 @@ _max_xtermTitle_len = 253
51029 def xtermTitle(mystr, raw=False):
51030 global _disable_xtermTitle
51031 if _disable_xtermTitle is None:
51032 - _disable_xtermTitle = not (sys.stderr.isatty() and \
51033 + _disable_xtermTitle = not (sys.__stderr__.isatty() and \
51034 'TERM' in os.environ and \
51035 _legal_terms_re.match(os.environ['TERM']) is not None)
51036
51037 @@ -278,15 +272,18 @@ def xtermTitleReset():
51038 if dotitles and \
51039 'TERM' in os.environ and \
51040 _legal_terms_re.match(os.environ['TERM']) is not None and \
51041 - sys.stderr.isatty():
51042 + sys.__stderr__.isatty():
51043 from portage.process import find_binary, spawn
51044 shell = os.environ.get("SHELL")
51045 if not shell or not os.access(shell, os.EX_OK):
51046 shell = find_binary("sh")
51047 if shell:
51048 spawn([shell, "-c", prompt_command], env=os.environ,
51049 - fd_pipes={0:sys.stdin.fileno(),1:sys.stderr.fileno(),
51050 - 2:sys.stderr.fileno()})
51051 + fd_pipes={
51052 + 0: portage._get_stdin().fileno(),
51053 + 1: sys.__stderr__.fileno(),
51054 + 2: sys.__stderr__.fileno()
51055 + })
51056 else:
51057 os.system(prompt_command)
51058 return
51059 @@ -302,12 +299,12 @@ def xtermTitleReset():
51060
51061 def notitles():
51062 "turn off title setting"
51063 - dotitles=0
51064 + dotitles = 0
51065
51066 def nocolor():
51067 "turn off colorization"
51068 global havecolor
51069 - havecolor=0
51070 + havecolor = 0
51071
51072 def resetColor():
51073 return codes["reset"]
51074 @@ -344,9 +341,11 @@ def colorize(color_key, text):
51075 else:
51076 return text
51077
51078 -compat_functions_colors = ["bold","white","teal","turquoise","darkteal",
51079 - "fuchsia","purple","blue","darkblue","green","darkgreen","yellow",
51080 - "brown","darkyellow","red","darkred"]
51081 +compat_functions_colors = [
51082 + "bold", "white", "teal", "turquoise", "darkteal",
51083 + "fuchsia", "purple", "blue", "darkblue", "green", "darkgreen", "yellow",
51084 + "brown", "darkyellow", "red", "darkred",
51085 +]
51086
51087 class create_color_func(object):
51088 __slots__ = ("_color_key",)
51089
51090 diff --git a/pym/portage/package/ebuild/_config/KeywordsManager.py b/pym/portage/package/ebuild/_config/KeywordsManager.py
51091 index 0c613ce..af606f1 100644
51092 --- a/pym/portage/package/ebuild/_config/KeywordsManager.py
51093 +++ b/pym/portage/package/ebuild/_config/KeywordsManager.py
51094 @@ -11,7 +11,7 @@ from portage.dep import ExtendedAtomDict, _repo_separator, _slot_separator
51095 from portage.localization import _
51096 from portage.package.ebuild._config.helper import ordered_by_atom_specificity
51097 from portage.util import grabdict_package, stack_lists, writemsg
51098 -from portage.versions import cpv_getkey, _pkg_str
51099 +from portage.versions import _pkg_str
51100
51101 class KeywordsManager(object):
51102 """Manager class to handle keywords processing and validation"""
51103 @@ -77,7 +77,9 @@ class KeywordsManager(object):
51104
51105
51106 def getKeywords(self, cpv, slot, keywords, repo):
51107 - if not hasattr(cpv, 'slot'):
51108 + try:
51109 + cpv.slot
51110 + except AttributeError:
51111 pkg = _pkg_str(cpv, slot=slot, repo=repo)
51112 else:
51113 pkg = cpv
51114 @@ -91,6 +93,47 @@ class KeywordsManager(object):
51115 keywords.extend(pkg_keywords)
51116 return stack_lists(keywords, incremental=True)
51117
51118 + def isStable(self, pkg, global_accept_keywords, backuped_accept_keywords):
51119 + mygroups = self.getKeywords(pkg, None, pkg._metadata["KEYWORDS"], None)
51120 + pgroups = global_accept_keywords.split()
51121 +
51122 + unmaskgroups = self.getPKeywords(pkg, None, None,
51123 + global_accept_keywords)
51124 + pgroups.extend(unmaskgroups)
51125 +
51126 + egroups = backuped_accept_keywords.split()
51127 +
51128 + if unmaskgroups or egroups:
51129 + pgroups = self._getEgroups(egroups, pgroups)
51130 + else:
51131 + pgroups = set(pgroups)
51132 +
51133 + if self._getMissingKeywords(pkg, pgroups, mygroups):
51134 + return False
51135 +
51136 + if pkg.cpv._settings.local_config:
51137 + # If replacing all keywords with unstable variants would mask the
51138 + # package, then it's considered stable.
51139 + unstable = []
51140 + for kw in mygroups:
51141 + if kw[:1] != "~":
51142 + kw = "~" + kw
51143 + unstable.append(kw)
51144 +
51145 + return bool(self._getMissingKeywords(pkg, pgroups, set(unstable)))
51146 + else:
51147 + # For repoman, if the package has an effective stable keyword that
51148 + # intersects with the effective ACCEPT_KEYWORDS for the current
51149 + # profile, then consider it stable.
51150 + for kw in pgroups:
51151 + if kw[:1] != "~":
51152 + if kw in mygroups or '*' in mygroups:
51153 + return True
51154 + if kw == '*':
51155 + for x in mygroups:
51156 + if x[:1] != "~":
51157 + return True
51158 + return False
51159
51160 def getMissingKeywords(self,
51161 cpv,
51162 @@ -237,7 +280,7 @@ class KeywordsManager(object):
51163 if not mygroups:
51164 # If KEYWORDS is empty then we still have to return something
51165 # in order to distinguish from the case of "none missing".
51166 - mygroups.append("**")
51167 + mygroups = ["**"]
51168 missing = mygroups
51169 return missing
51170
51171 @@ -261,9 +304,11 @@ class KeywordsManager(object):
51172 """
51173
51174 pgroups = global_accept_keywords.split()
51175 - if not hasattr(cpv, 'slot'):
51176 + try:
51177 + cpv.slot
51178 + except AttributeError:
51179 cpv = _pkg_str(cpv, slot=slot, repo=repo)
51180 - cp = cpv_getkey(cpv)
51181 + cp = cpv.cp
51182
51183 unmaskgroups = []
51184 if self._p_accept_keywords:
51185 @@ -288,4 +333,3 @@ class KeywordsManager(object):
51186 for x in pkg_accept_keywords:
51187 unmaskgroups.extend(x)
51188 return unmaskgroups
51189 -
51190
51191 diff --git a/pym/portage/package/ebuild/_config/LocationsManager.py b/pym/portage/package/ebuild/_config/LocationsManager.py
51192 index f7a1177..4427f1d 100644
51193 --- a/pym/portage/package/ebuild/_config/LocationsManager.py
51194 +++ b/pym/portage/package/ebuild/_config/LocationsManager.py
51195 @@ -1,6 +1,8 @@
51196 -# Copyright 2010-2011 Gentoo Foundation
51197 +# Copyright 2010-2014 Gentoo Foundation
51198 # Distributed under the terms of the GNU General Public License v2
51199
51200 +from __future__ import unicode_literals
51201 +
51202 __all__ = (
51203 'LocationsManager',
51204 )
51205 @@ -13,10 +15,12 @@ import portage
51206 from portage import os, eapi_is_supported, _encodings, _unicode_encode
51207 from portage.const import CUSTOM_PROFILE_PATH, GLOBAL_CONFIG_PATH, \
51208 PROFILE_PATH, USER_CONFIG_PATH
51209 +from portage.eapi import eapi_allows_directories_on_profile_level_and_repository_level
51210 from portage.exception import DirectoryNotFound, ParseError
51211 from portage.localization import _
51212 from portage.util import ensure_dirs, grabfile, \
51213 normalize_path, shlex_split, writemsg
51214 +from portage.util._path import exists_raise_eaccess, isdir_raise_eaccess
51215 from portage.repository.config import parse_layout_conf, \
51216 _portage1_profiles_allow_directories
51217
51218 @@ -27,7 +31,7 @@ _PORTAGE1_DIRECTORIES = frozenset([
51219 'use.mask', 'use.force'])
51220
51221 _profile_node = collections.namedtuple('_profile_node',
51222 - 'location portage1_directories')
51223 + 'location portage1_directories user_config')
51224
51225 _allow_parent_colon = frozenset(
51226 ["portage-2"])
51227 @@ -45,9 +49,13 @@ class LocationsManager(object):
51228
51229 if self.eprefix is None:
51230 self.eprefix = portage.const.EPREFIX
51231 + elif self.eprefix:
51232 + self.eprefix = normalize_path(self.eprefix)
51233 + if self.eprefix == os.sep:
51234 + self.eprefix = ""
51235
51236 if self.config_root is None:
51237 - self.config_root = self.eprefix + os.sep
51238 + self.config_root = portage.const.EPREFIX + os.sep
51239
51240 self.config_root = normalize_path(os.path.abspath(
51241 self.config_root)).rstrip(os.path.sep) + os.path.sep
51242 @@ -72,14 +80,26 @@ class LocationsManager(object):
51243 known_repos = tuple(known_repos)
51244
51245 if self.config_profile_path is None:
51246 + deprecated_profile_path = os.path.join(
51247 + self.config_root, 'etc', 'make.profile')
51248 self.config_profile_path = \
51249 os.path.join(self.config_root, PROFILE_PATH)
51250 - if os.path.isdir(self.config_profile_path):
51251 + if isdir_raise_eaccess(self.config_profile_path):
51252 self.profile_path = self.config_profile_path
51253 + if isdir_raise_eaccess(deprecated_profile_path) and not \
51254 + os.path.samefile(self.profile_path,
51255 + deprecated_profile_path):
51256 + # Don't warn if they refer to the same path, since
51257 + # that can be used for backward compatibility with
51258 + # old software.
51259 + writemsg("!!! %s\n" %
51260 + _("Found 2 make.profile dirs: "
51261 + "using '%s', ignoring '%s'") %
51262 + (self.profile_path, deprecated_profile_path),
51263 + noiselevel=-1)
51264 else:
51265 - self.config_profile_path = \
51266 - os.path.join(self.abs_user_config, 'make.profile')
51267 - if os.path.isdir(self.config_profile_path):
51268 + self.config_profile_path = deprecated_profile_path
51269 + if isdir_raise_eaccess(self.config_profile_path):
51270 self.profile_path = self.config_profile_path
51271 else:
51272 self.profile_path = None
51273 @@ -99,9 +119,9 @@ class LocationsManager(object):
51274 self._addProfile(os.path.realpath(self.profile_path),
51275 repositories, known_repos)
51276 except ParseError as e:
51277 - writemsg(_("!!! Unable to parse profile: '%s'\n") % \
51278 - self.profile_path, noiselevel=-1)
51279 - writemsg("!!! ParseError: %s\n" % str(e), noiselevel=-1)
51280 + if not portage._sync_mode:
51281 + writemsg(_("!!! Unable to parse profile: '%s'\n") % self.profile_path, noiselevel=-1)
51282 + writemsg("!!! ParseError: %s\n" % str(e), noiselevel=-1)
51283 self.profiles = []
51284 self.profiles_complex = []
51285
51286 @@ -111,14 +131,15 @@ class LocationsManager(object):
51287 if os.path.exists(custom_prof):
51288 self.user_profile_dir = custom_prof
51289 self.profiles.append(custom_prof)
51290 - self.profiles_complex.append(_profile_node(custom_prof, True))
51291 + self.profiles_complex.append(
51292 + _profile_node(custom_prof, True, True))
51293 del custom_prof
51294
51295 self.profiles = tuple(self.profiles)
51296 self.profiles_complex = tuple(self.profiles_complex)
51297
51298 def _check_var_directory(self, varname, var):
51299 - if not os.path.isdir(var):
51300 + if not isdir_raise_eaccess(var):
51301 writemsg(_("!!! Error: %s='%s' is not a directory. "
51302 "Please correct this.\n") % (varname, var),
51303 noiselevel=-1)
51304 @@ -130,33 +151,9 @@ class LocationsManager(object):
51305 allow_parent_colon = True
51306 repo_loc = None
51307 compat_mode = False
51308 - intersecting_repos = [x for x in known_repos if current_abs_path.startswith(x[0])]
51309 - if intersecting_repos:
51310 - # protect against nested repositories. Insane configuration, but the longest
51311 - # path will be the correct one.
51312 - repo_loc, layout_data = max(intersecting_repos, key=lambda x:len(x[0]))
51313 - allow_directories = any(x in _portage1_profiles_allow_directories
51314 - for x in layout_data['profile-formats'])
51315 - compat_mode = layout_data['profile-formats'] == ('portage-1-compat',)
51316 - allow_parent_colon = any(x in _allow_parent_colon
51317 - for x in layout_data['profile-formats'])
51318
51319 - if compat_mode:
51320 - offenders = _PORTAGE1_DIRECTORIES.intersection(os.listdir(currentPath))
51321 - offenders = sorted(x for x in offenders
51322 - if os.path.isdir(os.path.join(currentPath, x)))
51323 - if offenders:
51324 - warnings.warn(_("Profile '%(profile_path)s' in repository "
51325 - "'%(repo_name)s' is implicitly using 'portage-1' profile format, but "
51326 - "the repository profiles are not marked as that format. This will break "
51327 - "in the future. Please either convert the following paths "
51328 - "to files, or add\nprofile-formats = portage-1\nto the "
51329 - "repositories layout.conf. Files: '%(files)s'\n")
51330 - % dict(profile_path=currentPath, repo_name=repo_loc,
51331 - files=', '.join(offenders)))
51332 -
51333 - parentsFile = os.path.join(currentPath, "parent")
51334 eapi_file = os.path.join(currentPath, "eapi")
51335 + eapi = "0"
51336 f = None
51337 try:
51338 f = io.open(_unicode_encode(eapi_file,
51339 @@ -174,7 +171,38 @@ class LocationsManager(object):
51340 finally:
51341 if f is not None:
51342 f.close()
51343 - if os.path.exists(parentsFile):
51344 +
51345 + intersecting_repos = [x for x in known_repos if current_abs_path.startswith(x[0])]
51346 + if intersecting_repos:
51347 + # protect against nested repositories. Insane configuration, but the longest
51348 + # path will be the correct one.
51349 + repo_loc, layout_data = max(intersecting_repos, key=lambda x:len(x[0]))
51350 + allow_directories = eapi_allows_directories_on_profile_level_and_repository_level(eapi) or \
51351 + any(x in _portage1_profiles_allow_directories for x in layout_data['profile-formats'])
51352 + compat_mode = not eapi_allows_directories_on_profile_level_and_repository_level(eapi) and \
51353 + layout_data['profile-formats'] == ('portage-1-compat',)
51354 + allow_parent_colon = any(x in _allow_parent_colon
51355 + for x in layout_data['profile-formats'])
51356 +
51357 + if compat_mode:
51358 + offenders = _PORTAGE1_DIRECTORIES.intersection(os.listdir(currentPath))
51359 + offenders = sorted(x for x in offenders
51360 + if os.path.isdir(os.path.join(currentPath, x)))
51361 + if offenders:
51362 + warnings.warn(_(
51363 + "\nThe selected profile is implicitly using the 'portage-1' format:\n"
51364 + "\tprofile = %(profile_path)s\n"
51365 + "But this repository is not using that format:\n"
51366 + "\trepo = %(repo_name)s\n"
51367 + "This will break in the future. Please convert these dirs to files:\n"
51368 + "\t%(files)s\n"
51369 + "Or, add this line to the repository's layout.conf:\n"
51370 + "\tprofile-formats = portage-1")
51371 + % dict(profile_path=currentPath, repo_name=repo_loc,
51372 + files='\n\t'.join(offenders)))
51373 +
51374 + parentsFile = os.path.join(currentPath, "parent")
51375 + if exists_raise_eaccess(parentsFile):
51376 parents = grabfile(parentsFile)
51377 if not parents:
51378 raise ParseError(
51379 @@ -196,7 +224,7 @@ class LocationsManager(object):
51380 # of the current repo, so realpath it.
51381 parentPath = os.path.realpath(parentPath)
51382
51383 - if os.path.exists(parentPath):
51384 + if exists_raise_eaccess(parentPath):
51385 self._addProfile(parentPath, repositories, known_repos)
51386 else:
51387 raise ParseError(
51388 @@ -205,7 +233,7 @@ class LocationsManager(object):
51389
51390 self.profiles.append(currentPath)
51391 self.profiles_complex.append(
51392 - _profile_node(currentPath, allow_directories))
51393 + _profile_node(currentPath, allow_directories, False))
51394
51395 def _expand_parent_colon(self, parentsFile, parentPath,
51396 repo_loc, repositories):
51397 @@ -253,29 +281,10 @@ class LocationsManager(object):
51398
51399 self.eroot = self.target_root.rstrip(os.sep) + self.eprefix + os.sep
51400
51401 - # make.globals should not be relative to config_root
51402 - # because it only contains constants. However, if EPREFIX
51403 - # is set then there are two possible scenarios:
51404 - # 1) If $ROOT == "/" then make.globals should be
51405 - # relative to EPREFIX.
51406 - # 2) If $ROOT != "/" then the correct location of
51407 - # make.globals needs to be specified in the constructor
51408 - # parameters, since it's a property of the host system
51409 - # (and the current config represents the target system).
51410 self.global_config_path = GLOBAL_CONFIG_PATH
51411 - if self.eprefix:
51412 - if self.target_root == "/":
51413 - # case (1) above
51414 - self.global_config_path = os.path.join(self.eprefix,
51415 - GLOBAL_CONFIG_PATH.lstrip(os.sep))
51416 - else:
51417 - # case (2) above
51418 - # For now, just assume make.globals is relative
51419 - # to EPREFIX.
51420 - # TODO: Pass in more info to the constructor,
51421 - # so we know the host system configuration.
51422 - self.global_config_path = os.path.join(self.eprefix,
51423 - GLOBAL_CONFIG_PATH.lstrip(os.sep))
51424 + if portage.const.EPREFIX:
51425 + self.global_config_path = os.path.join(portage.const.EPREFIX,
51426 + GLOBAL_CONFIG_PATH.lstrip(os.sep))
51427
51428 def set_port_dirs(self, portdir, portdir_overlay):
51429 self.portdir = portdir
51430 @@ -287,7 +296,7 @@ class LocationsManager(object):
51431 for ov in shlex_split(self.portdir_overlay):
51432 ov = normalize_path(ov)
51433 profiles_dir = os.path.join(ov, "profiles")
51434 - if os.path.isdir(profiles_dir):
51435 + if isdir_raise_eaccess(profiles_dir):
51436 self.overlay_profiles.append(profiles_dir)
51437
51438 self.profile_locations = [os.path.join(portdir, "profiles")] + self.overlay_profiles
51439
51440 diff --git a/pym/portage/package/ebuild/_config/MaskManager.py b/pym/portage/package/ebuild/_config/MaskManager.py
51441 index bce1152..0f060c9 100644
51442 --- a/pym/portage/package/ebuild/_config/MaskManager.py
51443 +++ b/pym/portage/package/ebuild/_config/MaskManager.py
51444 @@ -1,4 +1,4 @@
51445 -# Copyright 2010-2011 Gentoo Foundation
51446 +# Copyright 2010-2014 Gentoo Foundation
51447 # Distributed under the terms of the GNU General Public License v2
51448
51449 __all__ = (
51450 @@ -8,11 +8,10 @@ __all__ = (
51451 import warnings
51452
51453 from portage import os
51454 -from portage.dep import ExtendedAtomDict, match_from_list, _repo_separator, _slot_separator
51455 +from portage.dep import ExtendedAtomDict, match_from_list
51456 from portage.localization import _
51457 from portage.util import append_repo, grabfile_package, stack_lists, writemsg
51458 -from portage.versions import cpv_getkey
51459 -from _emerge.Package import Package
51460 +from portage.versions import _pkg_str
51461
51462 class MaskManager(object):
51463
51464 @@ -47,7 +46,7 @@ class MaskManager(object):
51465 "the repository profiles are not marked as that format. This will break "
51466 "in the future. Please either convert the following paths "
51467 "to files, or add\nprofile-formats = portage-1\nto the "
51468 - "repositories layout.conf.\n")
51469 + "repository's layout.conf.\n")
51470 % dict(repo_name=repo_config.name))
51471
51472 return pmask_cache[loc]
51473 @@ -185,12 +184,15 @@ class MaskManager(object):
51474 @return: A matching atom string or None if one is not found.
51475 """
51476
51477 - cp = cpv_getkey(cpv)
51478 - mask_atoms = self._pmaskdict.get(cp)
51479 + try:
51480 + cpv.slot
51481 + except AttributeError:
51482 + pkg = _pkg_str(cpv, slot=slot, repo=repo)
51483 + else:
51484 + pkg = cpv
51485 +
51486 + mask_atoms = self._pmaskdict.get(pkg.cp)
51487 if mask_atoms:
51488 - pkg = "".join((cpv, _slot_separator, slot))
51489 - if repo and repo != Package.UNKNOWN_REPO:
51490 - pkg = "".join((pkg, _repo_separator, repo))
51491 pkg_list = [pkg]
51492 for x in mask_atoms:
51493 if not match_from_list(x, pkg_list):
51494 @@ -219,8 +221,15 @@ class MaskManager(object):
51495 @return: A matching atom string or None if one is not found.
51496 """
51497
51498 - cp = cpv_getkey(cpv)
51499 - return self._getMaskAtom(cpv, slot, repo, self._punmaskdict.get(cp))
51500 + try:
51501 + cpv.slot
51502 + except AttributeError:
51503 + pkg = _pkg_str(cpv, slot=slot, repo=repo)
51504 + else:
51505 + pkg = cpv
51506 +
51507 + return self._getMaskAtom(pkg, slot, repo,
51508 + self._punmaskdict.get(pkg.cp))
51509
51510
51511 def getRawMaskAtom(self, cpv, slot, repo):
51512
51513 diff --git a/pym/portage/package/ebuild/_config/UseManager.py b/pym/portage/package/ebuild/_config/UseManager.py
51514 index e1ec7f4..1c8c60e 100644
51515 --- a/pym/portage/package/ebuild/_config/UseManager.py
51516 +++ b/pym/portage/package/ebuild/_config/UseManager.py
51517 @@ -1,4 +1,4 @@
51518 -# Copyright 2010-2012 Gentoo Foundation
51519 +# Copyright 2010-2014 Gentoo Foundation
51520 # Distributed under the terms of the GNU General Public License v2
51521
51522 __all__ = (
51523 @@ -7,36 +7,49 @@ __all__ = (
51524
51525 from _emerge.Package import Package
51526 from portage import os
51527 -from portage.dep import dep_getrepo, dep_getslot, ExtendedAtomDict, remove_slot, _get_useflag_re
51528 +from portage.dep import Atom, dep_getrepo, dep_getslot, ExtendedAtomDict, remove_slot, _get_useflag_re, _repo_separator
51529 +from portage.eapi import eapi_has_use_aliases, eapi_supports_stable_use_forcing_and_masking
51530 +from portage.exception import InvalidAtom
51531 from portage.localization import _
51532 -from portage.util import grabfile, grabdict_package, read_corresponding_eapi_file, stack_lists, writemsg
51533 -from portage.versions import cpv_getkey, _pkg_str
51534 +from portage.util import grabfile, grabdict, grabdict_package, read_corresponding_eapi_file, stack_lists, writemsg
51535 +from portage.versions import _pkg_str
51536
51537 from portage.package.ebuild._config.helper import ordered_by_atom_specificity
51538
51539 class UseManager(object):
51540
51541 - def __init__(self, repositories, profiles, abs_user_config, user_config=True):
51542 + def __init__(self, repositories, profiles, abs_user_config, is_stable,
51543 + user_config=True):
51544 # file variable
51545 #--------------------------------
51546 # repositories
51547 #--------------------------------
51548 # use.mask _repo_usemask_dict
51549 + # use.stable.mask _repo_usestablemask_dict
51550 # use.force _repo_useforce_dict
51551 + # use.stable.force _repo_usestableforce_dict
51552 + # use.aliases _repo_usealiases_dict
51553 # package.use.mask _repo_pusemask_dict
51554 + # package.use.stable.mask _repo_pusestablemask_dict
51555 # package.use.force _repo_puseforce_dict
51556 + # package.use.stable.force _repo_pusestableforce_dict
51557 + # package.use.aliases _repo_pusealiases_dict
51558 #--------------------------------
51559 # profiles
51560 #--------------------------------
51561 # use.mask _usemask_list
51562 + # use.stable.mask _usestablemask_list
51563 # use.force _useforce_list
51564 + # use.stable.force _usestableforce_list
51565 # package.use.mask _pusemask_list
51566 + # package.use.stable.mask _pusestablemask_list
51567 # package.use _pkgprofileuse
51568 # package.use.force _puseforce_list
51569 + # package.use.stable.force _pusestableforce_list
51570 #--------------------------------
51571 # user config
51572 #--------------------------------
51573 - # package.use _pusedict
51574 + # package.use _pusedict
51575
51576 # Dynamic variables tracked by the config class
51577 #--------------------------------
51578 @@ -49,26 +62,61 @@ class UseManager(object):
51579 #--------------------------------
51580 # puse
51581
51582 + self._user_config = user_config
51583 + self._is_stable = is_stable
51584 self._repo_usemask_dict = self._parse_repository_files_to_dict_of_tuples("use.mask", repositories)
51585 + self._repo_usestablemask_dict = \
51586 + self._parse_repository_files_to_dict_of_tuples("use.stable.mask",
51587 + repositories, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51588 self._repo_useforce_dict = self._parse_repository_files_to_dict_of_tuples("use.force", repositories)
51589 + self._repo_usestableforce_dict = \
51590 + self._parse_repository_files_to_dict_of_tuples("use.stable.force",
51591 + repositories, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51592 self._repo_pusemask_dict = self._parse_repository_files_to_dict_of_dicts("package.use.mask", repositories)
51593 + self._repo_pusestablemask_dict = \
51594 + self._parse_repository_files_to_dict_of_dicts("package.use.stable.mask",
51595 + repositories, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51596 self._repo_puseforce_dict = self._parse_repository_files_to_dict_of_dicts("package.use.force", repositories)
51597 + self._repo_pusestableforce_dict = \
51598 + self._parse_repository_files_to_dict_of_dicts("package.use.stable.force",
51599 + repositories, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51600 self._repo_puse_dict = self._parse_repository_files_to_dict_of_dicts("package.use", repositories)
51601
51602 self._usemask_list = self._parse_profile_files_to_tuple_of_tuples("use.mask", profiles)
51603 + self._usestablemask_list = \
51604 + self._parse_profile_files_to_tuple_of_tuples("use.stable.mask",
51605 + profiles, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51606 self._useforce_list = self._parse_profile_files_to_tuple_of_tuples("use.force", profiles)
51607 + self._usestableforce_list = \
51608 + self._parse_profile_files_to_tuple_of_tuples("use.stable.force",
51609 + profiles, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51610 self._pusemask_list = self._parse_profile_files_to_tuple_of_dicts("package.use.mask", profiles)
51611 + self._pusestablemask_list = \
51612 + self._parse_profile_files_to_tuple_of_dicts("package.use.stable.mask",
51613 + profiles, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51614 self._pkgprofileuse = self._parse_profile_files_to_tuple_of_dicts("package.use", profiles, juststrings=True)
51615 self._puseforce_list = self._parse_profile_files_to_tuple_of_dicts("package.use.force", profiles)
51616 + self._pusestableforce_list = \
51617 + self._parse_profile_files_to_tuple_of_dicts("package.use.stable.force",
51618 + profiles, eapi_filter=eapi_supports_stable_use_forcing_and_masking)
51619
51620 self._pusedict = self._parse_user_files_to_extatomdict("package.use", abs_user_config, user_config)
51621
51622 + self._repo_usealiases_dict = self._parse_repository_usealiases(repositories)
51623 + self._repo_pusealiases_dict = self._parse_repository_packageusealiases(repositories)
51624 +
51625 self.repositories = repositories
51626 -
51627 - def _parse_file_to_tuple(self, file_name, recursive=True):
51628 +
51629 + def _parse_file_to_tuple(self, file_name, recursive=True, eapi_filter=None):
51630 ret = []
51631 lines = grabfile(file_name, recursive=recursive)
51632 eapi = read_corresponding_eapi_file(file_name)
51633 + if eapi_filter is not None and not eapi_filter(eapi):
51634 + if lines:
51635 + writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
51636 + (eapi, os.path.basename(file_name), file_name),
51637 + noiselevel=-1)
51638 + return ()
51639 useflag_re = _get_useflag_re(eapi)
51640 for prefixed_useflag in lines:
51641 if prefixed_useflag[:1] == "-":
51642 @@ -82,11 +130,26 @@ class UseManager(object):
51643 ret.append(prefixed_useflag)
51644 return tuple(ret)
51645
51646 - def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True):
51647 + def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True,
51648 + eapi_filter=None, user_config=False):
51649 ret = {}
51650 location_dict = {}
51651 - file_dict = grabdict_package(file_name, recursive=recursive, verify_eapi=True)
51652 - eapi = read_corresponding_eapi_file(file_name)
51653 + eapi = read_corresponding_eapi_file(file_name, default=None)
51654 + if eapi is None and not user_config:
51655 + eapi = "0"
51656 + if eapi is None:
51657 + ret = ExtendedAtomDict(dict)
51658 + else:
51659 + ret = {}
51660 + file_dict = grabdict_package(file_name, recursive=recursive,
51661 + allow_wildcard=(eapi is None), allow_repo=(eapi is None),
51662 + verify_eapi=(eapi is not None))
51663 + if eapi is not None and eapi_filter is not None and not eapi_filter(eapi):
51664 + if file_dict:
51665 + writemsg(_("--- EAPI '%s' does not support '%s': '%s'\n") %
51666 + (eapi, os.path.basename(file_name), file_name),
51667 + noiselevel=-1)
51668 + return ret
51669 useflag_re = _get_useflag_re(eapi)
51670 for k, v in file_dict.items():
51671 useflags = []
51672 @@ -119,31 +182,116 @@ class UseManager(object):
51673
51674 return ret
51675
51676 - def _parse_repository_files_to_dict_of_tuples(self, file_name, repositories):
51677 + def _parse_repository_files_to_dict_of_tuples(self, file_name, repositories, eapi_filter=None):
51678 ret = {}
51679 for repo in repositories.repos_with_profiles():
51680 - ret[repo.name] = self._parse_file_to_tuple(os.path.join(repo.location, "profiles", file_name))
51681 + ret[repo.name] = self._parse_file_to_tuple(os.path.join(repo.location, "profiles", file_name), eapi_filter=eapi_filter)
51682 return ret
51683
51684 - def _parse_repository_files_to_dict_of_dicts(self, file_name, repositories):
51685 + def _parse_repository_files_to_dict_of_dicts(self, file_name, repositories, eapi_filter=None):
51686 ret = {}
51687 for repo in repositories.repos_with_profiles():
51688 - ret[repo.name] = self._parse_file_to_dict(os.path.join(repo.location, "profiles", file_name))
51689 + ret[repo.name] = self._parse_file_to_dict(os.path.join(repo.location, "profiles", file_name), eapi_filter=eapi_filter)
51690 return ret
51691
51692 - def _parse_profile_files_to_tuple_of_tuples(self, file_name, locations):
51693 + def _parse_profile_files_to_tuple_of_tuples(self, file_name, locations,
51694 + eapi_filter=None):
51695 return tuple(self._parse_file_to_tuple(
51696 os.path.join(profile.location, file_name),
51697 - recursive=profile.portage1_directories)
51698 + recursive=profile.portage1_directories, eapi_filter=eapi_filter)
51699 for profile in locations)
51700
51701 - def _parse_profile_files_to_tuple_of_dicts(self, file_name, locations, juststrings=False):
51702 + def _parse_profile_files_to_tuple_of_dicts(self, file_name, locations,
51703 + juststrings=False, eapi_filter=None):
51704 return tuple(self._parse_file_to_dict(
51705 os.path.join(profile.location, file_name), juststrings,
51706 - recursive=profile.portage1_directories)
51707 + recursive=profile.portage1_directories, eapi_filter=eapi_filter,
51708 + user_config=profile.user_config)
51709 for profile in locations)
51710
51711 - def getUseMask(self, pkg=None):
51712 + def _parse_repository_usealiases(self, repositories):
51713 + ret = {}
51714 + for repo in repositories.repos_with_profiles():
51715 + file_name = os.path.join(repo.location, "profiles", "use.aliases")
51716 + eapi = read_corresponding_eapi_file(file_name)
51717 + useflag_re = _get_useflag_re(eapi)
51718 + raw_file_dict = grabdict(file_name, recursive=True)
51719 + file_dict = {}
51720 + for real_flag, aliases in raw_file_dict.items():
51721 + if useflag_re.match(real_flag) is None:
51722 + writemsg(_("--- Invalid real USE flag in '%s': '%s'\n") % (file_name, real_flag), noiselevel=-1)
51723 + else:
51724 + for alias in aliases:
51725 + if useflag_re.match(alias) is None:
51726 + writemsg(_("--- Invalid USE flag alias for '%s' real USE flag in '%s': '%s'\n") %
51727 + (real_flag, file_name, alias), noiselevel=-1)
51728 + else:
51729 + if any(alias in v for k, v in file_dict.items() if k != real_flag):
51730 + writemsg(_("--- Duplicated USE flag alias in '%s': '%s'\n") %
51731 + (file_name, alias), noiselevel=-1)
51732 + else:
51733 + file_dict.setdefault(real_flag, []).append(alias)
51734 + ret[repo.name] = file_dict
51735 + return ret
51736 +
51737 + def _parse_repository_packageusealiases(self, repositories):
51738 + ret = {}
51739 + for repo in repositories.repos_with_profiles():
51740 + file_name = os.path.join(repo.location, "profiles", "package.use.aliases")
51741 + eapi = read_corresponding_eapi_file(file_name)
51742 + useflag_re = _get_useflag_re(eapi)
51743 + lines = grabfile(file_name, recursive=True)
51744 + file_dict = {}
51745 + for line in lines:
51746 + elements = line.split()
51747 + atom = elements[0]
51748 + try:
51749 + atom = Atom(atom, eapi=eapi)
51750 + except InvalidAtom:
51751 + writemsg(_("--- Invalid atom in '%s': '%s'\n") % (file_name, atom))
51752 + continue
51753 + if len(elements) == 1:
51754 + writemsg(_("--- Missing real USE flag for '%s' in '%s'\n") % (atom, file_name), noiselevel=-1)
51755 + continue
51756 + real_flag = elements[1]
51757 + if useflag_re.match(real_flag) is None:
51758 + writemsg(_("--- Invalid real USE flag for '%s' in '%s': '%s'\n") % (atom, file_name, real_flag), noiselevel=-1)
51759 + else:
51760 + for alias in elements[2:]:
51761 + if useflag_re.match(alias) is None:
51762 + writemsg(_("--- Invalid USE flag alias for '%s' real USE flag for '%s' in '%s': '%s'\n") %
51763 + (real_flag, atom, file_name, alias), noiselevel=-1)
51764 + else:
51765 + # Duplicated USE flag aliases in entries for different atoms
51766 + # matching the same package version are detected in getUseAliases().
51767 + if any(alias in v for k, v in file_dict.get(atom.cp, {}).get(atom, {}).items() if k != real_flag):
51768 + writemsg(_("--- Duplicated USE flag alias for '%s' in '%s': '%s'\n") %
51769 + (atom, file_name, alias), noiselevel=-1)
51770 + else:
51771 + file_dict.setdefault(atom.cp, {}).setdefault(atom, {}).setdefault(real_flag, []).append(alias)
51772 + ret[repo.name] = file_dict
51773 + return ret
51774 +
51775 + def _isStable(self, pkg):
51776 + if self._user_config:
51777 + try:
51778 + return pkg.stable
51779 + except AttributeError:
51780 + # KEYWORDS is unavailable (prior to "depend" phase)
51781 + return False
51782 +
51783 + try:
51784 + pkg._metadata
51785 + except AttributeError:
51786 + # KEYWORDS is unavailable (prior to "depend" phase)
51787 + return False
51788 +
51789 + # Since repoman uses different config instances for
51790 + # different profiles, we have to be careful to do the
51791 + # stable check against the correct profile here.
51792 + return self._is_stable(pkg)
51793 +
51794 + def getUseMask(self, pkg=None, stable=None):
51795 if pkg is None:
51796 return frozenset(stack_lists(
51797 self._usemask_list, incremental=True))
51798 @@ -155,7 +303,12 @@ class UseManager(object):
51799 repo = dep_getrepo(pkg)
51800 pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
51801 cp = pkg.cp
51802 +
51803 + if stable is None:
51804 + stable = self._isStable(pkg)
51805 +
51806 usemask = []
51807 +
51808 if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
51809 repos = []
51810 try:
51811 @@ -166,30 +319,56 @@ class UseManager(object):
51812 repos.append(pkg.repo)
51813 for repo in repos:
51814 usemask.append(self._repo_usemask_dict.get(repo, {}))
51815 + if stable:
51816 + usemask.append(self._repo_usestablemask_dict.get(repo, {}))
51817 cpdict = self._repo_pusemask_dict.get(repo, {}).get(cp)
51818 if cpdict:
51819 pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
51820 if pkg_usemask:
51821 usemask.extend(pkg_usemask)
51822 + if stable:
51823 + cpdict = self._repo_pusestablemask_dict.get(repo, {}).get(cp)
51824 + if cpdict:
51825 + pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
51826 + if pkg_usemask:
51827 + usemask.extend(pkg_usemask)
51828 +
51829 for i, pusemask_dict in enumerate(self._pusemask_list):
51830 if self._usemask_list[i]:
51831 usemask.append(self._usemask_list[i])
51832 + if stable and self._usestablemask_list[i]:
51833 + usemask.append(self._usestablemask_list[i])
51834 cpdict = pusemask_dict.get(cp)
51835 if cpdict:
51836 pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
51837 if pkg_usemask:
51838 usemask.extend(pkg_usemask)
51839 + if stable:
51840 + cpdict = self._pusestablemask_list[i].get(cp)
51841 + if cpdict:
51842 + pkg_usemask = ordered_by_atom_specificity(cpdict, pkg)
51843 + if pkg_usemask:
51844 + usemask.extend(pkg_usemask)
51845 +
51846 return frozenset(stack_lists(usemask, incremental=True))
51847
51848 - def getUseForce(self, pkg=None):
51849 + def getUseForce(self, pkg=None, stable=None):
51850 if pkg is None:
51851 return frozenset(stack_lists(
51852 self._useforce_list, incremental=True))
51853
51854 cp = getattr(pkg, "cp", None)
51855 if cp is None:
51856 - cp = cpv_getkey(remove_slot(pkg))
51857 + slot = dep_getslot(pkg)
51858 + repo = dep_getrepo(pkg)
51859 + pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
51860 + cp = pkg.cp
51861 +
51862 + if stable is None:
51863 + stable = self._isStable(pkg)
51864 +
51865 useforce = []
51866 +
51867 if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
51868 repos = []
51869 try:
51870 @@ -200,25 +379,90 @@ class UseManager(object):
51871 repos.append(pkg.repo)
51872 for repo in repos:
51873 useforce.append(self._repo_useforce_dict.get(repo, {}))
51874 + if stable:
51875 + useforce.append(self._repo_usestableforce_dict.get(repo, {}))
51876 cpdict = self._repo_puseforce_dict.get(repo, {}).get(cp)
51877 if cpdict:
51878 pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
51879 if pkg_useforce:
51880 useforce.extend(pkg_useforce)
51881 + if stable:
51882 + cpdict = self._repo_pusestableforce_dict.get(repo, {}).get(cp)
51883 + if cpdict:
51884 + pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
51885 + if pkg_useforce:
51886 + useforce.extend(pkg_useforce)
51887 +
51888 for i, puseforce_dict in enumerate(self._puseforce_list):
51889 if self._useforce_list[i]:
51890 useforce.append(self._useforce_list[i])
51891 + if stable and self._usestableforce_list[i]:
51892 + useforce.append(self._usestableforce_list[i])
51893 cpdict = puseforce_dict.get(cp)
51894 if cpdict:
51895 pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
51896 if pkg_useforce:
51897 useforce.extend(pkg_useforce)
51898 + if stable:
51899 + cpdict = self._pusestableforce_list[i].get(cp)
51900 + if cpdict:
51901 + pkg_useforce = ordered_by_atom_specificity(cpdict, pkg)
51902 + if pkg_useforce:
51903 + useforce.extend(pkg_useforce)
51904 +
51905 return frozenset(stack_lists(useforce, incremental=True))
51906
51907 + def getUseAliases(self, pkg):
51908 + if hasattr(pkg, "eapi") and not eapi_has_use_aliases(pkg.eapi):
51909 + return {}
51910 +
51911 + cp = getattr(pkg, "cp", None)
51912 + if cp is None:
51913 + slot = dep_getslot(pkg)
51914 + repo = dep_getrepo(pkg)
51915 + pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
51916 + cp = pkg.cp
51917 +
51918 + usealiases = {}
51919 +
51920 + if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
51921 + repos = []
51922 + try:
51923 + repos.extend(repo.name for repo in
51924 + self.repositories[pkg.repo].masters)
51925 + except KeyError:
51926 + pass
51927 + repos.append(pkg.repo)
51928 + for repo in repos:
51929 + usealiases_dict = self._repo_usealiases_dict.get(repo, {})
51930 + for real_flag, aliases in usealiases_dict.items():
51931 + for alias in aliases:
51932 + if any(alias in v for k, v in usealiases.items() if k != real_flag):
51933 + writemsg(_("--- Duplicated USE flag alias for '%s%s%s': '%s'\n") %
51934 + (pkg.cpv, _repo_separator, pkg.repo, alias), noiselevel=-1)
51935 + else:
51936 + usealiases.setdefault(real_flag, []).append(alias)
51937 + cp_usealiases_dict = self._repo_pusealiases_dict.get(repo, {}).get(cp)
51938 + if cp_usealiases_dict:
51939 + usealiases_dict_list = ordered_by_atom_specificity(cp_usealiases_dict, pkg)
51940 + for usealiases_dict in usealiases_dict_list:
51941 + for real_flag, aliases in usealiases_dict.items():
51942 + for alias in aliases:
51943 + if any(alias in v for k, v in usealiases.items() if k != real_flag):
51944 + writemsg(_("--- Duplicated USE flag alias for '%s%s%s': '%s'\n") %
51945 + (pkg.cpv, _repo_separator, pkg.repo, alias), noiselevel=-1)
51946 + else:
51947 + usealiases.setdefault(real_flag, []).append(alias)
51948 +
51949 + return usealiases
51950 +
51951 def getPUSE(self, pkg):
51952 cp = getattr(pkg, "cp", None)
51953 if cp is None:
51954 - cp = cpv_getkey(remove_slot(pkg))
51955 + slot = dep_getslot(pkg)
51956 + repo = dep_getrepo(pkg)
51957 + pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
51958 + cp = pkg.cp
51959 ret = ""
51960 cpdict = self._pusedict.get(cp)
51961 if cpdict:
51962
51963 diff --git a/pym/portage/package/ebuild/_config/special_env_vars.py b/pym/portage/package/ebuild/_config/special_env_vars.py
51964 index 6ed6d05..74fedd6 100644
51965 --- a/pym/portage/package/ebuild/_config/special_env_vars.py
51966 +++ b/pym/portage/package/ebuild/_config/special_env_vars.py
51967 @@ -1,6 +1,8 @@
51968 -# Copyright 2010-2012 Gentoo Foundation
51969 +# Copyright 2010-2013 Gentoo Foundation
51970 # Distributed under the terms of the GNU General Public License v2
51971
51972 +from __future__ import unicode_literals
51973 +
51974 __all__ = (
51975 'case_insensitive_vars', 'default_globals', 'env_blacklist', \
51976 'environ_filter', 'environ_whitelist', 'environ_whitelist_re',
51977 @@ -13,14 +15,17 @@ import re
51978 # configuration files.
51979 env_blacklist = frozenset((
51980 "A", "AA", "CATEGORY", "DEPEND", "DESCRIPTION", "EAPI",
51981 - "EBUILD_FORCE_TEST", "EBUILD_PHASE", "EBUILD_SKIP_MANIFEST",
51982 + "EBUILD_FORCE_TEST", "EBUILD_PHASE",
51983 + "EBUILD_PHASE_FUNC", "EBUILD_SKIP_MANIFEST",
51984 "ED", "EMERGE_FROM", "EPREFIX", "EROOT",
51985 - "GREP_OPTIONS", "HOMEPAGE", "INHERITED", "IUSE",
51986 + "GREP_OPTIONS", "HDEPEND", "HOMEPAGE",
51987 + "INHERITED", "IUSE", "IUSE_EFFECTIVE",
51988 "KEYWORDS", "LICENSE", "MERGE_TYPE",
51989 "PDEPEND", "PF", "PKGUSE", "PORTAGE_BACKGROUND",
51990 - "PORTAGE_BACKGROUND_UNMERGE", "PORTAGE_BUILDIR_LOCKED",
51991 - "PORTAGE_BUILT_USE", "PORTAGE_CONFIGROOT", "PORTAGE_IUSE",
51992 - "PORTAGE_NONFATAL", "PORTAGE_REPO_NAME",
51993 + "PORTAGE_BACKGROUND_UNMERGE", "PORTAGE_BUILDDIR_LOCKED",
51994 + "PORTAGE_BUILT_USE", "PORTAGE_CONFIGROOT",
51995 + "PORTAGE_INTERNAL_CALLER", "PORTAGE_IUSE",
51996 + "PORTAGE_NONFATAL", "PORTAGE_PIPE_FD", "PORTAGE_REPO_NAME",
51997 "PORTAGE_USE", "PROPERTIES", "PROVIDE", "RDEPEND", "REPOSITORY",
51998 "RESTRICT", "ROOT", "SLOT", "SRC_URI"
51999 ))
52000 @@ -39,7 +44,7 @@ environ_whitelist += [
52001 "ACCEPT_LICENSE", "BASH_ENV", "BUILD_PREFIX", "COLUMNS", "D",
52002 "DISTDIR", "DOC_SYMLINKS_DIR", "EAPI", "EBUILD",
52003 "EBUILD_FORCE_TEST",
52004 - "EBUILD_PHASE", "ECLASSDIR", "ECLASS_DEPTH", "ED",
52005 + "EBUILD_PHASE", "EBUILD_PHASE_FUNC", "ECLASSDIR", "ECLASS_DEPTH", "ED",
52006 "EMERGE_FROM", "EPREFIX", "EROOT",
52007 "FEATURES", "FILESDIR", "HOME", "MERGE_TYPE", "NOCOLOR", "PATH",
52008 "PKGDIR",
52009 @@ -49,7 +54,8 @@ environ_whitelist += [
52010 "PORTAGE_BINPKG_FILE", "PORTAGE_BINPKG_TAR_OPTS",
52011 "PORTAGE_BINPKG_TMPFILE",
52012 "PORTAGE_BIN_PATH",
52013 - "PORTAGE_BUILDDIR", "PORTAGE_BUNZIP2_COMMAND", "PORTAGE_BZIP2_COMMAND",
52014 + "PORTAGE_BUILDDIR", "PORTAGE_BUILD_GROUP", "PORTAGE_BUILD_USER",
52015 + "PORTAGE_BUNZIP2_COMMAND", "PORTAGE_BZIP2_COMMAND",
52016 "PORTAGE_COLORMAP", "PORTAGE_COMPRESS",
52017 "PORTAGE_COMPRESS_EXCLUDE_SUFFIXES",
52018 "PORTAGE_CONFIGROOT", "PORTAGE_DEBUG", "PORTAGE_DEPCACHEDIR",
52019 @@ -58,14 +64,16 @@ environ_whitelist += [
52020 "PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES",
52021 "PORTAGE_EBUILD_EXIT_FILE", "PORTAGE_FEATURES",
52022 "PORTAGE_GID", "PORTAGE_GRPNAME",
52023 + "PORTAGE_INTERNAL_CALLER",
52024 "PORTAGE_INST_GID", "PORTAGE_INST_UID",
52025 - "PORTAGE_IPC_DAEMON", "PORTAGE_IUSE",
52026 - "PORTAGE_LOG_FILE", "PORTAGE_OVERRIDE_EPREFIX",
52027 - "PORTAGE_PYM_PATH", "PORTAGE_PYTHON", "PORTAGE_QUIET",
52028 - "PORTAGE_REPO_NAME", "PORTAGE_RESTRICT",
52029 + "PORTAGE_IPC_DAEMON", "PORTAGE_IUSE", "PORTAGE_ECLASS_LOCATIONS",
52030 + "PORTAGE_LOG_FILE", "PORTAGE_OVERRIDE_EPREFIX", "PORTAGE_PIPE_FD",
52031 + "PORTAGE_PYM_PATH", "PORTAGE_PYTHON",
52032 + "PORTAGE_PYTHONPATH", "PORTAGE_QUIET",
52033 + "PORTAGE_REPO_NAME", "PORTAGE_REPOSITORIES", "PORTAGE_RESTRICT",
52034 "PORTAGE_SIGPIPE_STATUS",
52035 "PORTAGE_TMPDIR", "PORTAGE_UPDATE_ENV", "PORTAGE_USERNAME",
52036 - "PORTAGE_VERBOSE", "PORTAGE_WORKDIR_MODE",
52037 + "PORTAGE_VERBOSE", "PORTAGE_WORKDIR_MODE", "PORTAGE_XATTR_EXCLUDE",
52038 "PORTDIR", "PORTDIR_OVERLAY", "PREROOTPATH", "PROFILE_PATHS",
52039 "REPLACING_VERSIONS", "REPLACED_BY_VERSION",
52040 "ROOT", "ROOTPATH", "T", "TMP", "TMPDIR",
52041 @@ -137,9 +145,11 @@ environ_filter += [
52042
52043 # portage config variables and variables set directly by portage
52044 environ_filter += [
52045 - "ACCEPT_CHOSTS", "ACCEPT_KEYWORDS", "ACCEPT_PROPERTIES", "AUTOCLEAN",
52046 + "ACCEPT_CHOSTS", "ACCEPT_KEYWORDS", "ACCEPT_PROPERTIES",
52047 + "ACCEPT_RESTRICT", "AUTOCLEAN",
52048 "CLEAN_DELAY", "COLLISION_IGNORE",
52049 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK",
52050 + "DCO_SIGNED_OFF_BY",
52051 "EGENCACHE_DEFAULT_OPTS", "EMERGE_DEFAULT_OPTS",
52052 "EMERGE_LOG_DIR",
52053 "EMERGE_WARNING_DELAY",
52054 @@ -148,8 +158,9 @@ environ_filter += [
52055 "FETCHCOMMAND_RSYNC", "FETCHCOMMAND_SFTP",
52056 "GENTOO_MIRRORS", "NOCONFMEM", "O",
52057 "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE",
52058 - "PORTAGE_BINHOST",
52059 - "PORTAGE_BINHOST_CHUNKSIZE", "PORTAGE_BUILDIR_LOCKED",
52060 + "PORTAGE_BINHOST", "PORTAGE_BINPKG_FORMAT",
52061 + "PORTAGE_BUILDDIR_LOCKED",
52062 + "PORTAGE_CHECKSUM_FILTER",
52063 "PORTAGE_ELOG_CLASSES",
52064 "PORTAGE_ELOG_MAILFROM", "PORTAGE_ELOG_MAILSUBJECT",
52065 "PORTAGE_ELOG_MAILURI", "PORTAGE_ELOG_SYSTEM",
52066 @@ -161,13 +172,20 @@ environ_filter += [
52067 "PORTAGE_REPO_DUPLICATE_WARN",
52068 "PORTAGE_RO_DISTDIRS",
52069 "PORTAGE_RSYNC_EXTRA_OPTS", "PORTAGE_RSYNC_OPTS",
52070 - "PORTAGE_RSYNC_RETRIES", "PORTAGE_SYNC_STALE",
52071 - "PORTAGE_USE", "PORT_LOGDIR", "PORT_LOGDIR_CLEAN",
52072 + "PORTAGE_RSYNC_RETRIES", "PORTAGE_SSH_OPTS", "PORTAGE_SYNC_STALE",
52073 + "PORTAGE_USE",
52074 + "PORT_LOGDIR", "PORT_LOGDIR_CLEAN",
52075 "QUICKPKG_DEFAULT_OPTS", "REPOMAN_DEFAULT_OPTS",
52076 "RESUMECOMMAND", "RESUMECOMMAND_FTP",
52077 "RESUMECOMMAND_HTTP", "RESUMECOMMAND_HTTPS",
52078 "RESUMECOMMAND_RSYNC", "RESUMECOMMAND_SFTP",
52079 - "SYNC", "UNINSTALL_IGNORE", "USE_EXPAND_HIDDEN", "USE_ORDER",
52080 + "UNINSTALL_IGNORE", "USE_EXPAND_HIDDEN", "USE_ORDER",
52081 + "__PORTAGE_HELPER"
52082 +]
52083 +
52084 +# No longer supported variables
52085 +environ_filter += [
52086 + "SYNC"
52087 ]
52088
52089 environ_filter = frozenset(environ_filter)
52090
52091 diff --git a/pym/portage/package/ebuild/_config/unpack_dependencies.py b/pym/portage/package/ebuild/_config/unpack_dependencies.py
52092 new file mode 100644
52093 index 0000000..1375189
52094 --- /dev/null
52095 +++ b/pym/portage/package/ebuild/_config/unpack_dependencies.py
52096 @@ -0,0 +1,38 @@
52097 +# Copyright 2012 Gentoo Foundation
52098 +# Distributed under the terms of the GNU General Public License v2
52099 +
52100 +from portage import os, _supported_eapis
52101 +from portage.dep import use_reduce
52102 +from portage.eapi import eapi_has_automatic_unpack_dependencies
52103 +from portage.exception import InvalidDependString
52104 +from portage.localization import _
52105 +from portage.util import grabfile, writemsg
52106 +
52107 +def load_unpack_dependencies_configuration(repositories):
52108 + repo_dict = {}
52109 + for repo in repositories.repos_with_profiles():
52110 + for eapi in _supported_eapis:
52111 + if eapi_has_automatic_unpack_dependencies(eapi):
52112 + file_name = os.path.join(repo.location, "profiles", "unpack_dependencies", eapi)
52113 + lines = grabfile(file_name, recursive=True)
52114 + for line in lines:
52115 + elements = line.split()
52116 + suffix = elements[0].lower()
52117 + if len(elements) == 1:
52118 + writemsg(_("--- Missing unpack dependencies for '%s' suffix in '%s'\n") % (suffix, file_name))
52119 + depend = " ".join(elements[1:])
52120 + try:
52121 + use_reduce(depend, eapi=eapi)
52122 + except InvalidDependString as e:
52123 + writemsg(_("--- Invalid unpack dependencies for '%s' suffix in '%s': '%s'\n" % (suffix, file_name, e)))
52124 + else:
52125 + repo_dict.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend
52126 +
52127 + ret = {}
52128 + for repo in repositories.repos_with_profiles():
52129 + for repo_name in [x.name for x in repo.masters] + [repo.name]:
52130 + for eapi in repo_dict.get(repo_name, {}):
52131 + for suffix, depend in repo_dict.get(repo_name, {}).get(eapi, {}).items():
52132 + ret.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend
52133 +
52134 + return ret
52135
52136 diff --git a/pym/portage/package/ebuild/_ipc/QueryCommand.py b/pym/portage/package/ebuild/_ipc/QueryCommand.py
52137 index d200fe8..351c956 100644
52138 --- a/pym/portage/package/ebuild/_ipc/QueryCommand.py
52139 +++ b/pym/portage/package/ebuild/_ipc/QueryCommand.py
52140 @@ -1,12 +1,13 @@
52141 -# Copyright 2010-2012 Gentoo Foundation
52142 +# Copyright 2010-2013 Gentoo Foundation
52143 # Distributed under the terms of the GNU General Public License v2
52144
52145 +from __future__ import unicode_literals
52146 +
52147 import io
52148
52149 import portage
52150 from portage import os
52151 -from portage import _unicode_decode
52152 -from portage.dep import Atom
52153 +from portage.dep import Atom, _repo_name_re
52154 from portage.eapi import eapi_has_repo_deps
52155 from portage.elog import messages as elog_messages
52156 from portage.exception import InvalidAtom
52157 @@ -36,40 +37,47 @@ class QueryCommand(IpcCommand):
52158 @return: tuple of (stdout, stderr, returncode)
52159 """
52160
52161 - cmd, root, atom_str = argv
52162 -
52163 - eapi = self.settings.get('EAPI')
52164 - allow_repo = eapi_has_repo_deps(eapi)
52165 - try:
52166 - atom = Atom(atom_str, allow_repo=allow_repo)
52167 - except InvalidAtom:
52168 - return ('', 'invalid atom: %s\n' % atom_str, 2)
52169 + # Python 3:
52170 + # cmd, root, *args = argv
52171 + cmd = argv[0]
52172 + root = argv[1]
52173 + args = argv[2:]
52174
52175 warnings = []
52176 - try:
52177 - atom = Atom(atom_str, allow_repo=allow_repo, eapi=eapi)
52178 - except InvalidAtom as e:
52179 - warnings.append(_unicode_decode("QA Notice: %s: %s") % (cmd, e))
52180 -
52181 - use = self.settings.get('PORTAGE_BUILT_USE')
52182 - if use is None:
52183 - use = self.settings['PORTAGE_USE']
52184 -
52185 - use = frozenset(use.split())
52186 - atom = atom.evaluate_conditionals(use)
52187 + warnings_str = ''
52188
52189 db = self.get_db()
52190 -
52191 - warnings_str = ''
52192 - if warnings:
52193 - warnings_str = self._elog('eqawarn', warnings)
52194 + eapi = self.settings.get('EAPI')
52195
52196 root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
52197 if root not in db:
52198 - return ('', 'invalid ROOT: %s\n' % root, 2)
52199 + return ('', '%s: Invalid ROOT: %s\n' % (cmd, root), 3)
52200
52201 + portdb = db[root]["porttree"].dbapi
52202 vardb = db[root]["vartree"].dbapi
52203
52204 + if cmd in ('best_version', 'has_version'):
52205 + allow_repo = eapi_has_repo_deps(eapi)
52206 + try:
52207 + atom = Atom(args[0], allow_repo=allow_repo)
52208 + except InvalidAtom:
52209 + return ('', '%s: Invalid atom: %s\n' % (cmd, args[0]), 2)
52210 +
52211 + try:
52212 + atom = Atom(args[0], allow_repo=allow_repo, eapi=eapi)
52213 + except InvalidAtom as e:
52214 + warnings.append("QA Notice: %s: %s" % (cmd, e))
52215 +
52216 + use = self.settings.get('PORTAGE_BUILT_USE')
52217 + if use is None:
52218 + use = self.settings['PORTAGE_USE']
52219 +
52220 + use = frozenset(use.split())
52221 + atom = atom.evaluate_conditionals(use)
52222 +
52223 + if warnings:
52224 + warnings_str = self._elog('eqawarn', warnings)
52225 +
52226 if cmd == 'has_version':
52227 if vardb.match(atom):
52228 returncode = 0
52229 @@ -79,8 +87,35 @@ class QueryCommand(IpcCommand):
52230 elif cmd == 'best_version':
52231 m = best(vardb.match(atom))
52232 return ('%s\n' % m, warnings_str, 0)
52233 + elif cmd in ('master_repositories', 'repository_path', 'available_eclasses', 'eclass_path', 'license_path'):
52234 + repo = _repo_name_re.match(args[0])
52235 + if repo is None:
52236 + return ('', '%s: Invalid repository: %s\n' % (cmd, args[0]), 2)
52237 + try:
52238 + repo = portdb.repositories[args[0]]
52239 + except KeyError:
52240 + return ('', warnings_str, 1)
52241 +
52242 + if cmd == 'master_repositories':
52243 + return ('%s\n' % ' '.join(x.name for x in repo.masters), warnings_str, 0)
52244 + elif cmd == 'repository_path':
52245 + return ('%s\n' % repo.location, warnings_str, 0)
52246 + elif cmd == 'available_eclasses':
52247 + return ('%s\n' % ' '.join(sorted(repo.eclass_db.eclasses)), warnings_str, 0)
52248 + elif cmd == 'eclass_path':
52249 + try:
52250 + eclass = repo.eclass_db.eclasses[args[1]]
52251 + except KeyError:
52252 + return ('', warnings_str, 1)
52253 + return ('%s\n' % eclass.location, warnings_str, 0)
52254 + elif cmd == 'license_path':
52255 + paths = reversed([os.path.join(x.location, 'licenses', args[1]) for x in list(repo.masters) + [repo]])
52256 + for path in paths:
52257 + if os.path.exists(path):
52258 + return ('%s\n' % path, warnings_str, 0)
52259 + return ('', warnings_str, 1)
52260 else:
52261 - return ('', 'invalid command: %s\n' % cmd, 2)
52262 + return ('', 'Invalid command: %s\n' % cmd, 3)
52263
52264 def _elog(self, elog_funcname, lines):
52265 """
52266
52267 diff --git a/pym/portage/package/ebuild/_eapi_invalid.py b/pym/portage/package/ebuild/_metadata_invalid.py
52268 similarity index 67%
52269 rename from pym/portage/package/ebuild/_eapi_invalid.py
52270 rename to pym/portage/package/ebuild/_metadata_invalid.py
52271 index d23677d..bcf1f7f 100644
52272 --- a/pym/portage/package/ebuild/_eapi_invalid.py
52273 +++ b/pym/portage/package/ebuild/_metadata_invalid.py
52274 @@ -28,19 +28,6 @@ def eapi_invalid(self, cpv, repo_name, settings,
52275 "assignment on line: %s") %
52276 (eapi_var, eapi_lineno))
52277
52278 - if 'parse-eapi-ebuild-head' in settings.features:
52279 - msg.extend(textwrap.wrap(("NOTE: This error will soon"
52280 - " become unconditionally fatal in a future version of Portage,"
52281 - " but at this time, it can by made non-fatal by setting"
52282 - " FEATURES=-parse-eapi-ebuild-head in"
52283 - " make.conf."), 70))
52284 - else:
52285 - msg.extend(textwrap.wrap(("NOTE: This error will soon"
52286 - " become unconditionally fatal in a future version of Portage."
52287 - " At the earliest opportunity, please enable"
52288 - " FEATURES=parse-eapi-ebuild-head in make.conf in order to"
52289 - " make this error fatal."), 70))
52290 -
52291 if portage.data.secpass >= 2:
52292 # TODO: improve elog permission error handling (bug #416231)
52293 for line in msg:
52294
52295 diff --git a/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py b/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
52296 new file mode 100644
52297 index 0000000..44e2576
52298 --- /dev/null
52299 +++ b/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
52300 @@ -0,0 +1,43 @@
52301 +# Copyright 2012 Gentoo Foundation
52302 +# Distributed under the terms of the GNU General Public License v2
52303 +
52304 +import portage
52305 +from portage import os
52306 +from portage.exception import (FileNotFound,
52307 + PermissionDenied, PortagePackageException)
52308 +from portage.localization import _
52309 +from portage.util._async.ForkProcess import ForkProcess
52310 +
52311 +class ManifestProcess(ForkProcess):
52312 +
52313 + __slots__ = ("cp", "distdir", "fetchlist_dict", "repo_config")
52314 +
52315 + MODIFIED = 16
52316 +
52317 + def _run(self):
52318 + mf = self.repo_config.load_manifest(
52319 + os.path.join(self.repo_config.location, self.cp),
52320 + self.distdir, fetchlist_dict=self.fetchlist_dict)
52321 +
52322 + try:
52323 + mf.create(assumeDistHashesAlways=True)
52324 + except FileNotFound as e:
52325 + portage.writemsg(_("!!! File %s doesn't exist, can't update "
52326 + "Manifest\n") % e, noiselevel=-1)
52327 + return 1
52328 +
52329 + except PortagePackageException as e:
52330 + portage.writemsg(("!!! %s\n") % (e,), noiselevel=-1)
52331 + return 1
52332 +
52333 + try:
52334 + modified = mf.write(sign=False)
52335 + except PermissionDenied as e:
52336 + portage.writemsg("!!! %s: %s\n" % (_("Permission Denied"), e,),
52337 + noiselevel=-1)
52338 + return 1
52339 + else:
52340 + if modified:
52341 + return self.MODIFIED
52342 + else:
52343 + return os.EX_OK
52344
52345 diff --git a/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py b/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
52346 new file mode 100644
52347 index 0000000..38ac482
52348 --- /dev/null
52349 +++ b/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
52350 @@ -0,0 +1,93 @@
52351 +# Copyright 2012-2013 Gentoo Foundation
52352 +# Distributed under the terms of the GNU General Public License v2
52353 +
52354 +import portage
52355 +from portage import os
52356 +from portage.dep import _repo_separator
52357 +from portage.exception import InvalidDependString
52358 +from portage.localization import _
52359 +from portage.util._async.AsyncScheduler import AsyncScheduler
52360 +from .ManifestTask import ManifestTask
52361 +
52362 +class ManifestScheduler(AsyncScheduler):
52363 +
52364 + def __init__(self, portdb, cp_iter=None,
52365 + gpg_cmd=None, gpg_vars=None, force_sign_key=None, **kwargs):
52366 +
52367 + AsyncScheduler.__init__(self, **kwargs)
52368 +
52369 + self._portdb = portdb
52370 +
52371 + if cp_iter is None:
52372 + cp_iter = self._iter_every_cp()
52373 + self._cp_iter = cp_iter
52374 + self._gpg_cmd = gpg_cmd
52375 + self._gpg_vars = gpg_vars
52376 + self._force_sign_key = force_sign_key
52377 + self._task_iter = self._iter_tasks()
52378 +
52379 + def _next_task(self):
52380 + return next(self._task_iter)
52381 +
52382 + def _iter_every_cp(self):
52383 + # List categories individually, in order to start yielding quicker,
52384 + # and in order to reduce latency in case of a signal interrupt.
52385 + cp_all = self._portdb.cp_all
52386 + for category in sorted(self._portdb.categories):
52387 + for cp in cp_all(categories=(category,)):
52388 + yield cp
52389 +
52390 + def _iter_tasks(self):
52391 + portdb = self._portdb
52392 + distdir = portdb.settings["DISTDIR"]
52393 + disabled_repos = set()
52394 +
52395 + for cp in self._cp_iter:
52396 + if self._terminated.is_set():
52397 + break
52398 + # We iterate over portdb.porttrees, since it's common to
52399 + # tweak this attribute in order to adjust repo selection.
52400 + for mytree in portdb.porttrees:
52401 + if self._terminated.is_set():
52402 + break
52403 + repo_config = portdb.repositories.get_repo_for_location(mytree)
52404 + if not repo_config.create_manifest:
52405 + if repo_config.name not in disabled_repos:
52406 + disabled_repos.add(repo_config.name)
52407 + portage.writemsg(
52408 + _(">>> Skipping creating Manifest for %s%s%s; "
52409 + "repository is configured to not use them\n") %
52410 + (cp, _repo_separator, repo_config.name),
52411 + noiselevel=-1)
52412 + continue
52413 + cpv_list = portdb.cp_list(cp, mytree=[repo_config.location])
52414 + if not cpv_list:
52415 + continue
52416 + fetchlist_dict = {}
52417 + try:
52418 + for cpv in cpv_list:
52419 + fetchlist_dict[cpv] = \
52420 + list(portdb.getFetchMap(cpv, mytree=mytree))
52421 + except InvalidDependString as e:
52422 + portage.writemsg(
52423 + _("!!! %s%s%s: SRC_URI: %s\n") %
52424 + (cp, _repo_separator, repo_config.name, e),
52425 + noiselevel=-1)
52426 + self._error_count += 1
52427 + continue
52428 +
52429 + yield ManifestTask(cp=cp, distdir=distdir,
52430 + fetchlist_dict=fetchlist_dict, repo_config=repo_config,
52431 + gpg_cmd=self._gpg_cmd, gpg_vars=self._gpg_vars,
52432 + force_sign_key=self._force_sign_key)
52433 +
52434 + def _task_exit(self, task):
52435 +
52436 + if task.returncode != os.EX_OK:
52437 + if not self._terminated_tasks:
52438 + portage.writemsg(
52439 + "Error processing %s%s%s, continuing...\n" %
52440 + (task.cp, _repo_separator, task.repo_config.name),
52441 + noiselevel=-1)
52442 +
52443 + AsyncScheduler._task_exit(self, task)
52444
52445 diff --git a/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py b/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py
52446 new file mode 100644
52447 index 0000000..0ee2b91
52448 --- /dev/null
52449 +++ b/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py
52450 @@ -0,0 +1,186 @@
52451 +# Copyright 2012-2013 Gentoo Foundation
52452 +# Distributed under the terms of the GNU General Public License v2
52453 +
52454 +import errno
52455 +import re
52456 +import subprocess
52457 +
52458 +from portage import os
52459 +from portage import _unicode_encode, _encodings
52460 +from portage.const import MANIFEST2_IDENTIFIERS
52461 +from portage.util import (atomic_ofstream, grablines,
52462 + shlex_split, varexpand, writemsg)
52463 +from portage.util._async.PipeLogger import PipeLogger
52464 +from portage.util._async.PopenProcess import PopenProcess
52465 +from _emerge.CompositeTask import CompositeTask
52466 +from _emerge.PipeReader import PipeReader
52467 +from .ManifestProcess import ManifestProcess
52468 +
52469 +class ManifestTask(CompositeTask):
52470 +
52471 + __slots__ = ("cp", "distdir", "fetchlist_dict", "gpg_cmd",
52472 + "gpg_vars", "repo_config", "force_sign_key", "_manifest_path")
52473 +
52474 + _PGP_HEADER = b"BEGIN PGP SIGNED MESSAGE"
52475 + _manifest_line_re = re.compile(r'^(%s) ' % "|".join(MANIFEST2_IDENTIFIERS))
52476 + _gpg_key_id_re = re.compile(r'^[0-9A-F]*$')
52477 + _gpg_key_id_lengths = (8, 16, 24, 32, 40)
52478 +
52479 + def _start(self):
52480 + self._manifest_path = os.path.join(self.repo_config.location,
52481 + self.cp, "Manifest")
52482 + manifest_proc = ManifestProcess(cp=self.cp, distdir=self.distdir,
52483 + fetchlist_dict=self.fetchlist_dict, repo_config=self.repo_config,
52484 + scheduler=self.scheduler)
52485 + self._start_task(manifest_proc, self._manifest_proc_exit)
52486 +
52487 + def _manifest_proc_exit(self, manifest_proc):
52488 + self._assert_current(manifest_proc)
52489 + if manifest_proc.returncode not in (os.EX_OK, manifest_proc.MODIFIED):
52490 + self.returncode = manifest_proc.returncode
52491 + self._current_task = None
52492 + self.wait()
52493 + return
52494 +
52495 + modified = manifest_proc.returncode == manifest_proc.MODIFIED
52496 + sign = self.gpg_cmd is not None
52497 +
52498 + if not modified and sign:
52499 + sign = self._need_signature()
52500 + if not sign and self.force_sign_key is not None \
52501 + and os.path.exists(self._manifest_path):
52502 + self._check_sig_key()
52503 + return
52504 +
52505 + if not sign or not os.path.exists(self._manifest_path):
52506 + self.returncode = os.EX_OK
52507 + self._current_task = None
52508 + self.wait()
52509 + return
52510 +
52511 + self._start_gpg_proc()
52512 +
52513 + def _check_sig_key(self):
52514 + null_fd = os.open('/dev/null', os.O_RDONLY)
52515 + popen_proc = PopenProcess(proc=subprocess.Popen(
52516 + ["gpg", "--verify", self._manifest_path],
52517 + stdin=null_fd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
52518 + pipe_reader=PipeReader())
52519 + os.close(null_fd)
52520 + popen_proc.pipe_reader.input_files = {
52521 + "producer" : popen_proc.proc.stdout}
52522 + self._start_task(popen_proc, self._check_sig_key_exit)
52523 +
52524 + @staticmethod
52525 + def _parse_gpg_key(output):
52526 + """
52527 + Returns the first token which appears to represent a gpg key
52528 + id, or None if there is no such token.
52529 + """
52530 + regex = ManifestTask._gpg_key_id_re
52531 + lengths = ManifestTask._gpg_key_id_lengths
52532 + for token in output.split():
52533 + m = regex.match(token)
52534 + if m is not None and len(m.group(0)) in lengths:
52535 + return m.group(0)
52536 + return None
52537 +
52538 + @staticmethod
52539 + def _normalize_gpg_key(key_str):
52540 + """
52541 + Strips leading "0x" and trailing "!", and converts to uppercase
52542 + (intended to be the same format as that in gpg --verify output).
52543 + """
52544 + key_str = key_str.upper()
52545 + if key_str.startswith("0X"):
52546 + key_str = key_str[2:]
52547 + key_str = key_str.rstrip("!")
52548 + return key_str
52549 +
52550 + def _check_sig_key_exit(self, proc):
52551 + self._assert_current(proc)
52552 +
52553 + parsed_key = self._parse_gpg_key(
52554 + proc.pipe_reader.getvalue().decode('utf_8', 'replace'))
52555 + if parsed_key is not None and \
52556 + self._normalize_gpg_key(parsed_key) == \
52557 + self._normalize_gpg_key(self.force_sign_key):
52558 + self.returncode = os.EX_OK
52559 + self._current_task = None
52560 + self.wait()
52561 + return
52562 +
52563 + if self._was_cancelled():
52564 + self.wait()
52565 + return
52566 +
52567 + self._strip_sig(self._manifest_path)
52568 + self._start_gpg_proc()
52569 +
52570 + @staticmethod
52571 + def _strip_sig(manifest_path):
52572 + """
52573 + Strip an existing signature from a Manifest file.
52574 + """
52575 + line_re = ManifestTask._manifest_line_re
52576 + lines = grablines(manifest_path)
52577 + f = None
52578 + try:
52579 + f = atomic_ofstream(manifest_path)
52580 + for line in lines:
52581 + if line_re.match(line) is not None:
52582 + f.write(line)
52583 + f.close()
52584 + f = None
52585 + finally:
52586 + if f is not None:
52587 + f.abort()
52588 +
52589 + def _start_gpg_proc(self):
52590 + gpg_vars = self.gpg_vars
52591 + if gpg_vars is None:
52592 + gpg_vars = {}
52593 + else:
52594 + gpg_vars = gpg_vars.copy()
52595 + gpg_vars["FILE"] = self._manifest_path
52596 + gpg_cmd = varexpand(self.gpg_cmd, mydict=gpg_vars)
52597 + gpg_cmd = shlex_split(gpg_cmd)
52598 + gpg_proc = PopenProcess(proc=subprocess.Popen(gpg_cmd,
52599 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT))
52600 + # PipeLogger echos output and efficiently monitors for process
52601 + # exit by listening for the stdout EOF event.
52602 + gpg_proc.pipe_reader = PipeLogger(background=self.background,
52603 + input_fd=gpg_proc.proc.stdout, scheduler=self.scheduler)
52604 + self._start_task(gpg_proc, self._gpg_proc_exit)
52605 +
52606 + def _gpg_proc_exit(self, gpg_proc):
52607 + if self._default_exit(gpg_proc) != os.EX_OK:
52608 + self.wait()
52609 + return
52610 +
52611 + rename_args = (self._manifest_path + ".asc", self._manifest_path)
52612 + try:
52613 + os.rename(*rename_args)
52614 + except OSError as e:
52615 + writemsg("!!! rename('%s', '%s'): %s\n" % rename_args + (e,),
52616 + noiselevel=-1)
52617 + try:
52618 + os.unlink(self._manifest_path + ".asc")
52619 + except OSError:
52620 + pass
52621 + self.returncode = 1
52622 + else:
52623 + self.returncode = os.EX_OK
52624 +
52625 + self._current_task = None
52626 + self.wait()
52627 +
52628 + def _need_signature(self):
52629 + try:
52630 + with open(_unicode_encode(self._manifest_path,
52631 + encoding=_encodings['fs'], errors='strict'), 'rb') as f:
52632 + return self._PGP_HEADER not in f.readline()
52633 + except IOError as e:
52634 + if e.errno in (errno.ENOENT, errno.ESTALE):
52635 + return False
52636 + raise
52637
52638 diff --git a/pym/portage/package/ebuild/_parallel_manifest/__init__.py b/pym/portage/package/ebuild/_parallel_manifest/__init__.py
52639 new file mode 100644
52640 index 0000000..418ad86
52641 --- /dev/null
52642 +++ b/pym/portage/package/ebuild/_parallel_manifest/__init__.py
52643 @@ -0,0 +1,2 @@
52644 +# Copyright 2012 Gentoo Foundation
52645 +# Distributed under the terms of the GNU General Public License v2
52646
52647 diff --git a/pym/portage/package/ebuild/_spawn_nofetch.py b/pym/portage/package/ebuild/_spawn_nofetch.py
52648 index 94f8c79..0fc53c8 100644
52649 --- a/pym/portage/package/ebuild/_spawn_nofetch.py
52650 +++ b/pym/portage/package/ebuild/_spawn_nofetch.py
52651 @@ -1,8 +1,9 @@
52652 -# Copyright 2010-2011 Gentoo Foundation
52653 +# Copyright 2010-2013 Gentoo Foundation
52654 # Distributed under the terms of the GNU General Public License v2
52655
52656 import tempfile
52657
52658 +import portage
52659 from portage import os
52660 from portage import shutil
52661 from portage.const import EBUILD_PHASES
52662 @@ -10,10 +11,12 @@ from portage.elog import elog_process
52663 from portage.package.ebuild.config import config
52664 from portage.package.ebuild.doebuild import doebuild_environment
52665 from portage.package.ebuild.prepare_build_dirs import prepare_build_dirs
52666 +from portage.util._async.SchedulerInterface import SchedulerInterface
52667 +from portage.util._eventloop.EventLoop import EventLoop
52668 +from portage.util._eventloop.global_event_loop import global_event_loop
52669 from _emerge.EbuildPhase import EbuildPhase
52670 -from _emerge.PollScheduler import PollScheduler
52671
52672 -def spawn_nofetch(portdb, ebuild_path, settings=None):
52673 +def spawn_nofetch(portdb, ebuild_path, settings=None, fd_pipes=None):
52674 """
52675 This spawns pkg_nofetch if appropriate. The settings parameter
52676 is useful only if setcpv has already been called in order
52677 @@ -47,7 +50,7 @@ def spawn_nofetch(portdb, ebuild_path, settings=None):
52678 settings = config(clone=settings)
52679
52680 if 'PORTAGE_PARALLEL_FETCHONLY' in settings:
52681 - return
52682 + return os.EX_OK
52683
52684 # We must create our private PORTAGE_TMPDIR before calling
52685 # doebuild_environment(), since lots of variables such
52686 @@ -59,7 +62,7 @@ def spawn_nofetch(portdb, ebuild_path, settings=None):
52687 settings['PORTAGE_TMPDIR'] = private_tmpdir
52688 settings.backup_changes('PORTAGE_TMPDIR')
52689 # private temp dir was just created, so it's not locked yet
52690 - settings.pop('PORTAGE_BUILDIR_LOCKED', None)
52691 + settings.pop('PORTAGE_BUILDDIR_LOCKED', None)
52692
52693 try:
52694 doebuild_environment(ebuild_path, 'nofetch',
52695 @@ -73,14 +76,18 @@ def spawn_nofetch(portdb, ebuild_path, settings=None):
52696
52697 if 'fetch' not in restrict and \
52698 'nofetch' not in defined_phases:
52699 - return
52700 + return os.EX_OK
52701
52702 prepare_build_dirs(settings=settings)
52703 ebuild_phase = EbuildPhase(background=False,
52704 - phase='nofetch', scheduler=PollScheduler().sched_iface,
52705 - settings=settings)
52706 + phase='nofetch',
52707 + scheduler=SchedulerInterface(portage._internal_caller and
52708 + global_event_loop() or EventLoop(main=False)),
52709 + fd_pipes=fd_pipes, settings=settings)
52710 ebuild_phase.start()
52711 ebuild_phase.wait()
52712 elog_process(settings.mycpv, settings)
52713 finally:
52714 shutil.rmtree(private_tmpdir)
52715 +
52716 + return ebuild_phase.returncode
52717
52718 diff --git a/pym/portage/package/ebuild/config.py b/pym/portage/package/ebuild/config.py
52719 index 2fa799f..e104501 100644
52720 --- a/pym/portage/package/ebuild/config.py
52721 +++ b/pym/portage/package/ebuild/config.py
52722 @@ -1,6 +1,8 @@
52723 -# Copyright 2010-2012 Gentoo Foundation
52724 +# Copyright 2010-2014 Gentoo Foundation
52725 # Distributed under the terms of the GNU General Public License v2
52726
52727 +from __future__ import unicode_literals
52728 +
52729 __all__ = [
52730 'autouse', 'best_from_dict', 'check_config_instance', 'config',
52731 ]
52732 @@ -19,20 +21,21 @@ from _emerge.Package import Package
52733 import portage
52734 portage.proxy.lazyimport.lazyimport(globals(),
52735 'portage.data:portage_gid',
52736 + 'portage.dbapi.vartree:vartree',
52737 + 'portage.package.ebuild.doebuild:_phase_func_map',
52738 )
52739 from portage import bsd_chflags, \
52740 load_mod, os, selinux, _unicode_decode
52741 from portage.const import CACHE_PATH, \
52742 DEPCACHE_PATH, INCREMENTALS, MAKE_CONF_FILE, \
52743 - MODULES_FILE_PATH, \
52744 + MODULES_FILE_PATH, PORTAGE_BASE_PATH, \
52745 PRIVATE_PATH, PROFILE_PATH, USER_CONFIG_PATH, \
52746 USER_VIRTUALS_FILE
52747 from portage.dbapi import dbapi
52748 from portage.dbapi.porttree import portdbapi
52749 -from portage.dbapi.vartree import vartree
52750 from portage.dep import Atom, isvalidatom, match_from_list, use_reduce, _repo_separator, _slot_separator
52751 from portage.eapi import eapi_exports_AA, eapi_exports_merge_type, \
52752 - eapi_supports_prefix, eapi_exports_replace_vars
52753 + eapi_supports_prefix, eapi_exports_replace_vars, _get_eapi_attrs
52754 from portage.env.loaders import KeyValuePairFileLoader
52755 from portage.exception import InvalidDependString, PortageException
52756 from portage.localization import _
52757 @@ -42,7 +45,8 @@ from portage.repository.config import load_repository_config
52758 from portage.util import ensure_dirs, getconfig, grabdict, \
52759 grabdict_package, grabfile, grabfile_package, LazyItemsDict, \
52760 normalize_path, shlex_split, stack_dictlist, stack_dicts, stack_lists, \
52761 - writemsg, writemsg_level
52762 + writemsg, writemsg_level, _eapi_cache
52763 +from portage.util._path import exists_raise_eaccess, isdir_raise_eaccess
52764 from portage.versions import catpkgsplit, catsplit, cpv_getkey, _pkg_str
52765
52766 from portage.package.ebuild._config import special_env_vars
52767 @@ -55,10 +59,30 @@ from portage.package.ebuild._config.LocationsManager import LocationsManager
52768 from portage.package.ebuild._config.MaskManager import MaskManager
52769 from portage.package.ebuild._config.VirtualsManager import VirtualsManager
52770 from portage.package.ebuild._config.helper import ordered_by_atom_specificity, prune_incremental
52771 +from portage.package.ebuild._config.unpack_dependencies import load_unpack_dependencies_configuration
52772
52773 if sys.hexversion >= 0x3000000:
52774 + # pylint: disable=W0622
52775 basestring = str
52776
52777 +_feature_flags_cache = {}
52778 +
52779 +def _get_feature_flags(eapi_attrs):
52780 + cache_key = (eapi_attrs.feature_flag_test, eapi_attrs.feature_flag_targetroot)
52781 + flags = _feature_flags_cache.get(cache_key)
52782 + if flags is not None:
52783 + return flags
52784 +
52785 + flags = []
52786 + if eapi_attrs.feature_flag_test:
52787 + flags.append("test")
52788 + if eapi_attrs.feature_flag_targetroot:
52789 + flags.append("targetroot")
52790 +
52791 + flags = frozenset(flags)
52792 + _feature_flags_cache[cache_key] = flags
52793 + return flags
52794 +
52795 def autouse(myvartree, use_cache=1, mysettings=None):
52796 warnings.warn("portage.autouse() is deprecated",
52797 DeprecationWarning, stacklevel=2)
52798 @@ -123,9 +147,9 @@ class config(object):
52799 """
52800
52801 _constant_keys = frozenset(['PORTAGE_BIN_PATH', 'PORTAGE_GID',
52802 - 'PORTAGE_PYM_PATH'])
52803 + 'PORTAGE_PYM_PATH', 'PORTAGE_PYTHONPATH'])
52804
52805 - _setcpv_aux_keys = ('DEFINED_PHASES', 'DEPEND', 'EAPI',
52806 + _setcpv_aux_keys = ('DEFINED_PHASES', 'DEPEND', 'EAPI', 'HDEPEND',
52807 'INHERITED', 'IUSE', 'REQUIRED_USE', 'KEYWORDS', 'LICENSE', 'PDEPEND',
52808 'PROPERTIES', 'PROVIDE', 'RDEPEND', 'SLOT',
52809 'repository', 'RESTRICT', 'LICENSE',)
52810 @@ -146,7 +170,7 @@ class config(object):
52811 def __init__(self, clone=None, mycpv=None, config_profile_path=None,
52812 config_incrementals=None, config_root=None, target_root=None,
52813 eprefix=None, local_config=True, env=None,
52814 - _unmatched_removal=False):
52815 + _unmatched_removal=False, repositories=None):
52816 """
52817 @param clone: If provided, init will use deepcopy to copy by value the instance.
52818 @type clone: Instance of config class.
52819 @@ -160,7 +184,8 @@ class config(object):
52820 @type config_incrementals: List
52821 @param config_root: path to read local config from (defaults to "/", see PORTAGE_CONFIGROOT)
52822 @type config_root: String
52823 - @param target_root: __init__ override of $ROOT env variable.
52824 + @param target_root: the target root, which typically corresponds to the
52825 + value of the $ROOT env variable (default is /)
52826 @type target_root: String
52827 @param eprefix: set the EPREFIX variable (default is portage.const.EPREFIX)
52828 @type eprefix: String
52829 @@ -173,8 +198,14 @@ class config(object):
52830 @param _unmatched_removal: Enabled by repoman when the
52831 --unmatched-removal option is given.
52832 @type _unmatched_removal: Boolean
52833 + @param repositories: Configuration of repositories.
52834 + Defaults to portage.repository.config.load_repository_config().
52835 + @type repositories: Instance of portage.repository.config.RepoConfigLoader class.
52836 """
52837
52838 + # This is important when config is reloaded after emerge --sync.
52839 + _eapi_cache.clear()
52840 +
52841 # When initializing the global portage.settings instance, avoid
52842 # raising exceptions whenever possible since exceptions thrown
52843 # from 'import portage' or 'import portage.exceptions' statements
52844 @@ -192,8 +223,10 @@ class config(object):
52845 self.uvlist = []
52846 self._accept_chost_re = None
52847 self._accept_properties = None
52848 + self._accept_restrict = None
52849 self._features_overrides = []
52850 self._make_defaults = None
52851 + self._parent_stable = None
52852
52853 # _unknown_features records unknown features that
52854 # have triggered warning messages, and ensures that
52855 @@ -215,6 +248,8 @@ class config(object):
52856 self.profiles = clone.profiles
52857 self.packages = clone.packages
52858 self.repositories = clone.repositories
52859 + self.unpack_dependencies = clone.unpack_dependencies
52860 + self._iuse_effective = clone._iuse_effective
52861 self._iuse_implicit_match = clone._iuse_implicit_match
52862 self._non_user_variables = clone._non_user_variables
52863 self._env_d_blacklist = clone._env_d_blacklist
52864 @@ -277,6 +312,8 @@ class config(object):
52865
52866 self._accept_properties = copy.deepcopy(clone._accept_properties)
52867 self._ppropertiesdict = copy.deepcopy(clone._ppropertiesdict)
52868 + self._accept_restrict = copy.deepcopy(clone._accept_restrict)
52869 + self._paccept_restrict = copy.deepcopy(clone._paccept_restrict)
52870 self._penvdict = copy.deepcopy(clone._penvdict)
52871 self._expand_map = copy.deepcopy(clone._expand_map)
52872
52873 @@ -294,15 +331,30 @@ class config(object):
52874 eprefix = locations_manager.eprefix
52875 config_root = locations_manager.config_root
52876 abs_user_config = locations_manager.abs_user_config
52877 + make_conf_paths = [
52878 + os.path.join(config_root, 'etc', 'make.conf'),
52879 + os.path.join(config_root, MAKE_CONF_FILE)
52880 + ]
52881 + try:
52882 + if os.path.samefile(*make_conf_paths):
52883 + make_conf_paths.pop()
52884 + except OSError:
52885 + pass
52886
52887 - make_conf = getconfig(
52888 - os.path.join(config_root, MAKE_CONF_FILE),
52889 - tolerant=tolerant, allow_sourcing=True) or {}
52890 -
52891 - make_conf.update(getconfig(
52892 - os.path.join(abs_user_config, 'make.conf'),
52893 - tolerant=tolerant, allow_sourcing=True,
52894 - expand=make_conf) or {})
52895 + make_conf_count = 0
52896 + make_conf = {}
52897 + for x in make_conf_paths:
52898 + mygcfg = getconfig(x,
52899 + tolerant=tolerant, allow_sourcing=True,
52900 + expand=make_conf, recursive=True)
52901 + if mygcfg is not None:
52902 + make_conf.update(mygcfg)
52903 + make_conf_count += 1
52904 +
52905 + if make_conf_count == 2:
52906 + writemsg("!!! %s\n" %
52907 + _("Found 2 make.conf files, using both '%s' and '%s'") %
52908 + tuple(make_conf_paths), noiselevel=-1)
52909
52910 # Allow ROOT setting to come from make.conf if it's not overridden
52911 # by the constructor argument (from the calling environment).
52912 @@ -335,8 +387,23 @@ class config(object):
52913 # Allow make.globals to set default paths relative to ${EPREFIX}.
52914 expand_map["EPREFIX"] = eprefix
52915
52916 - make_globals = getconfig(os.path.join(
52917 - self.global_config_path, 'make.globals'),
52918 + if portage._not_installed:
52919 + make_globals_path = os.path.join(PORTAGE_BASE_PATH, "cnf", "make.globals")
52920 + else:
52921 + make_globals_path = os.path.join(self.global_config_path, "make.globals")
52922 + old_make_globals = os.path.join(config_root, "etc", "make.globals")
52923 + if os.path.isfile(old_make_globals) and \
52924 + not os.path.samefile(make_globals_path, old_make_globals):
52925 + # Don't warn if they refer to the same path, since
52926 + # that can be used for backward compatibility with
52927 + # old software.
52928 + writemsg("!!! %s\n" %
52929 + _("Found obsolete make.globals file: "
52930 + "'%s', (using '%s' instead)") %
52931 + (old_make_globals, make_globals_path),
52932 + noiselevel=-1)
52933 +
52934 + make_globals = getconfig(make_globals_path,
52935 tolerant=tolerant, expand=expand_map)
52936 if make_globals is None:
52937 make_globals = {}
52938 @@ -426,6 +493,7 @@ class config(object):
52939 known_repos = []
52940 portdir = ""
52941 portdir_overlay = ""
52942 + portdir_sync = None
52943 for confs in [make_globals, make_conf, self.configdict["env"]]:
52944 v = confs.get("PORTDIR")
52945 if v is not None:
52946 @@ -435,12 +503,52 @@ class config(object):
52947 if v is not None:
52948 portdir_overlay = v
52949 known_repos.extend(shlex_split(v))
52950 + v = confs.get("SYNC")
52951 + if v is not None:
52952 + portdir_sync = v
52953 +
52954 known_repos = frozenset(known_repos)
52955 self["PORTDIR"] = portdir
52956 self["PORTDIR_OVERLAY"] = portdir_overlay
52957 + if portdir_sync:
52958 + self["SYNC"] = portdir_sync
52959 self.lookuplist = [self.configdict["env"]]
52960 - self.repositories = load_repository_config(self)
52961 + if repositories is None:
52962 + self.repositories = load_repository_config(self)
52963 + else:
52964 + self.repositories = repositories
52965 +
52966 + self['PORTAGE_REPOSITORIES'] = self.repositories.config_string()
52967 + self.backup_changes('PORTAGE_REPOSITORIES')
52968 +
52969 + #filling PORTDIR and PORTDIR_OVERLAY variable for compatibility
52970 + main_repo = self.repositories.mainRepo()
52971 + if main_repo is not None:
52972 + self["PORTDIR"] = main_repo.user_location
52973 + self.backup_changes("PORTDIR")
52974 + expand_map["PORTDIR"] = self["PORTDIR"]
52975
52976 + # repoman controls PORTDIR_OVERLAY via the environment, so no
52977 + # special cases are needed here.
52978 + portdir_overlay = list(self.repositories.repoUserLocationList())
52979 + if portdir_overlay and portdir_overlay[0] == self["PORTDIR"]:
52980 + portdir_overlay = portdir_overlay[1:]
52981 +
52982 + new_ov = []
52983 + if portdir_overlay:
52984 + for ov in portdir_overlay:
52985 + ov = normalize_path(ov)
52986 + if isdir_raise_eaccess(ov) or portage._sync_mode:
52987 + new_ov.append(portage._shell_quote(ov))
52988 + else:
52989 + writemsg(_("!!! Invalid PORTDIR_OVERLAY"
52990 + " (not a dir): '%s'\n") % ov, noiselevel=-1)
52991 +
52992 + self["PORTDIR_OVERLAY"] = " ".join(new_ov)
52993 + self.backup_changes("PORTDIR_OVERLAY")
52994 + expand_map["PORTDIR_OVERLAY"] = self["PORTDIR_OVERLAY"]
52995 +
52996 + locations_manager.set_port_dirs(self["PORTDIR"], self["PORTDIR_OVERLAY"])
52997 locations_manager.load_profiles(self.repositories, known_repos)
52998
52999 profiles_complex = locations_manager.profiles_complex
53000 @@ -460,12 +568,13 @@ class config(object):
53001 x = Atom(x.lstrip('*'))
53002 self.prevmaskdict.setdefault(x.cp, []).append(x)
53003
53004 + self.unpack_dependencies = load_unpack_dependencies_configuration(self.repositories)
53005
53006 mygcfg = {}
53007 - if self.profiles:
53008 - mygcfg_dlists = [getconfig(os.path.join(x, "make.defaults"),
53009 - tolerant=tolerant, expand=expand_map)
53010 - for x in self.profiles]
53011 + if profiles_complex:
53012 + mygcfg_dlists = [getconfig(os.path.join(x.location, "make.defaults"),
53013 + tolerant=tolerant, expand=expand_map, recursive=x.portage1_directories)
53014 + for x in profiles_complex]
53015 self._make_defaults = mygcfg_dlists
53016 mygcfg = stack_dicts(mygcfg_dlists,
53017 incrementals=self.incrementals)
53018 @@ -474,15 +583,11 @@ class config(object):
53019 self.configlist.append(mygcfg)
53020 self.configdict["defaults"]=self.configlist[-1]
53021
53022 - mygcfg = getconfig(
53023 - os.path.join(config_root, MAKE_CONF_FILE),
53024 - tolerant=tolerant, allow_sourcing=True,
53025 - expand=expand_map) or {}
53026 -
53027 - mygcfg.update(getconfig(
53028 - os.path.join(abs_user_config, 'make.conf'),
53029 - tolerant=tolerant, allow_sourcing=True,
53030 - expand=expand_map) or {})
53031 + mygcfg = {}
53032 + for x in make_conf_paths:
53033 + mygcfg.update(getconfig(x,
53034 + tolerant=tolerant, allow_sourcing=True,
53035 + expand=expand_map, recursive=True) or {})
53036
53037 # Don't allow the user to override certain variables in make.conf
53038 profile_only_variables = self.configdict["defaults"].get(
53039 @@ -535,54 +640,25 @@ class config(object):
53040 self.backup_changes("PORTAGE_CONFIGROOT")
53041 self["ROOT"] = target_root
53042 self.backup_changes("ROOT")
53043 -
53044 - # The PORTAGE_OVERRIDE_EPREFIX variable propagates the EPREFIX
53045 - # of this config instance to any portage commands or API
53046 - # consumers running in subprocesses.
53047 self["EPREFIX"] = eprefix
53048 self.backup_changes("EPREFIX")
53049 - self["PORTAGE_OVERRIDE_EPREFIX"] = eprefix
53050 - self.backup_changes("PORTAGE_OVERRIDE_EPREFIX")
53051 self["EROOT"] = eroot
53052 self.backup_changes("EROOT")
53053
53054 + # The prefix of the running portage instance is used in the
53055 + # ebuild environment to implement the --host-root option for
53056 + # best_version and has_version.
53057 + self["PORTAGE_OVERRIDE_EPREFIX"] = portage.const.EPREFIX
53058 + self.backup_changes("PORTAGE_OVERRIDE_EPREFIX")
53059 +
53060 self._ppropertiesdict = portage.dep.ExtendedAtomDict(dict)
53061 + self._paccept_restrict = portage.dep.ExtendedAtomDict(dict)
53062 self._penvdict = portage.dep.ExtendedAtomDict(dict)
53063
53064 - #filling PORTDIR and PORTDIR_OVERLAY variable for compatibility
53065 - main_repo = self.repositories.mainRepo()
53066 - if main_repo is not None:
53067 - self["PORTDIR"] = main_repo.user_location
53068 - self.backup_changes("PORTDIR")
53069 -
53070 - # repoman controls PORTDIR_OVERLAY via the environment, so no
53071 - # special cases are needed here.
53072 - portdir_overlay = list(self.repositories.repoUserLocationList())
53073 - if portdir_overlay and portdir_overlay[0] == self["PORTDIR"]:
53074 - portdir_overlay = portdir_overlay[1:]
53075 -
53076 - new_ov = []
53077 - if portdir_overlay:
53078 - shell_quote_re = re.compile(r"[\s\\\"'$`]")
53079 - for ov in portdir_overlay:
53080 - ov = normalize_path(ov)
53081 - if os.path.isdir(ov):
53082 - if shell_quote_re.search(ov) is not None:
53083 - ov = portage._shell_quote(ov)
53084 - new_ov.append(ov)
53085 - else:
53086 - writemsg(_("!!! Invalid PORTDIR_OVERLAY"
53087 - " (not a dir): '%s'\n") % ov, noiselevel=-1)
53088 -
53089 - self["PORTDIR_OVERLAY"] = " ".join(new_ov)
53090 - self.backup_changes("PORTDIR_OVERLAY")
53091 -
53092 - locations_manager.set_port_dirs(self["PORTDIR"], self["PORTDIR_OVERLAY"])
53093 -
53094 self._repo_make_defaults = {}
53095 for repo in self.repositories.repos_with_profiles():
53096 d = getconfig(os.path.join(repo.location, "profiles", "make.defaults"),
53097 - tolerant=tolerant, expand=self.configdict["globals"].copy()) or {}
53098 + tolerant=tolerant, expand=self.configdict["globals"].copy(), recursive=repo.portage1_profiles) or {}
53099 if d:
53100 for k in chain(self._env_blacklist,
53101 profile_only_variables, self._global_only_vars):
53102 @@ -590,7 +666,8 @@ class config(object):
53103 self._repo_make_defaults[repo.name] = d
53104
53105 #Read all USE related files from profiles and optionally from user config.
53106 - self._use_manager = UseManager(self.repositories, profiles_complex, abs_user_config, user_config=local_config)
53107 + self._use_manager = UseManager(self.repositories, profiles_complex,
53108 + abs_user_config, self._isStable, user_config=local_config)
53109 #Initialize all USE related variables we track ourselves.
53110 self.usemask = self._use_manager.getUseMask()
53111 self.useforce = self._use_manager.getUseForce()
53112 @@ -620,6 +697,20 @@ class config(object):
53113 for k, v in propdict.items():
53114 self._ppropertiesdict.setdefault(k.cp, {})[k] = v
53115
53116 + # package.accept_restrict
53117 + d = grabdict_package(os.path.join(
53118 + abs_user_config, "package.accept_restrict"),
53119 + recursive=True, allow_wildcard=True,
53120 + allow_repo=True, verify_eapi=False)
53121 + v = d.pop("*/*", None)
53122 + if v is not None:
53123 + if "ACCEPT_RESTRICT" in self.configdict["conf"]:
53124 + self.configdict["conf"]["ACCEPT_RESTRICT"] += " " + " ".join(v)
53125 + else:
53126 + self.configdict["conf"]["ACCEPT_RESTRICT"] = " ".join(v)
53127 + for k, v in d.items():
53128 + self._paccept_restrict.setdefault(k.cp, {})[k] = v
53129 +
53130 #package.env
53131 penvdict = grabdict_package(os.path.join(
53132 abs_user_config, "package.env"), recursive=1, allow_wildcard=True, \
53133 @@ -706,21 +797,9 @@ class config(object):
53134 self.backupenv["USE_ORDER"] = "env:pkg:conf:defaults:pkginternal:repo:env.d"
53135
53136 self.depcachedir = DEPCACHE_PATH
53137 - if eprefix:
53138 - # See comments about make.globals and EPREFIX
53139 - # above. DEPCACHE_PATH is similar.
53140 - if target_root == "/":
53141 - # case (1) above
53142 - self.depcachedir = os.path.join(eprefix,
53143 - DEPCACHE_PATH.lstrip(os.sep))
53144 - else:
53145 - # case (2) above
53146 - # For now, just assume DEPCACHE_PATH is relative
53147 - # to EPREFIX.
53148 - # TODO: Pass in more info to the constructor,
53149 - # so we know the host system configuration.
53150 - self.depcachedir = os.path.join(eprefix,
53151 - DEPCACHE_PATH.lstrip(os.sep))
53152 + if portage.const.EPREFIX:
53153 + self.depcachedir = os.path.join(portage.const.EPREFIX,
53154 + DEPCACHE_PATH.lstrip(os.sep))
53155
53156 if self.get("PORTAGE_DEPCACHEDIR", None):
53157 self.depcachedir = self["PORTAGE_DEPCACHEDIR"]
53158 @@ -787,12 +866,17 @@ class config(object):
53159 self[var] = default_val
53160 self.backup_changes(var)
53161
53162 + if portage._internal_caller:
53163 + self["PORTAGE_INTERNAL_CALLER"] = "1"
53164 + self.backup_changes("PORTAGE_INTERNAL_CALLER")
53165 +
53166 # initialize self.features
53167 self.regenerate()
53168
53169 if bsd_chflags:
53170 self.features.add('chflags')
53171
53172 + self._iuse_effective = self._calc_iuse_effective()
53173 self._iuse_implicit_match = _iuse_implicit_match_cache(self)
53174
53175 self._validate_commands()
53176 @@ -802,11 +886,6 @@ class config(object):
53177 self[k] = self[k].lower()
53178 self.backup_changes(k)
53179
53180 - if main_repo is not None and not main_repo.sync:
53181 - main_repo_sync = self.get("SYNC")
53182 - if main_repo_sync:
53183 - main_repo.sync = main_repo_sync
53184 -
53185 # The first constructed config object initializes these modules,
53186 # and subsequent calls to the _init() functions have no effect.
53187 portage.output._init(config_root=self['PORTAGE_CONFIGROOT'])
53188 @@ -949,13 +1028,23 @@ class config(object):
53189 writemsg(_("!!! INVALID ACCEPT_KEYWORDS: %s\n") % str(group),
53190 noiselevel=-1)
53191
53192 - profile_broken = not self.profile_path or \
53193 - not os.path.exists(os.path.join(self.profile_path, "parent")) and \
53194 - os.path.exists(os.path.join(self["PORTDIR"], "profiles"))
53195 + profile_broken = False
53196 +
53197 + if not self.profile_path:
53198 + profile_broken = True
53199 + else:
53200 + # If any one of these files exists, then
53201 + # the profile is considered valid.
53202 + for x in ("make.defaults", "parent",
53203 + "packages", "use.force", "use.mask"):
53204 + if exists_raise_eaccess(os.path.join(self.profile_path, x)):
53205 + break
53206 + else:
53207 + profile_broken = True
53208
53209 - if profile_broken:
53210 + if profile_broken and not portage._sync_mode:
53211 abs_profile_path = None
53212 - for x in (PROFILE_PATH, 'etc/portage/make.profile'):
53213 + for x in (PROFILE_PATH, 'etc/make.profile'):
53214 x = os.path.join(self["PORTAGE_CONFIGROOT"], x)
53215 try:
53216 os.lstat(x)
53217 @@ -1121,8 +1210,11 @@ class config(object):
53218 the previously calculated USE settings.
53219 """
53220
53221 - def __init__(self, use, usemask, iuse_implicit,
53222 + def __init__(self, settings, unfiltered_use,
53223 + use, usemask, iuse_implicit,
53224 use_expand_split, use_expand_dict):
53225 + self._settings = settings
53226 + self._unfiltered_use = unfiltered_use
53227 self._use = use
53228 self._usemask = usemask
53229 self._iuse_implicit = iuse_implicit
53230 @@ -1177,13 +1269,32 @@ class config(object):
53231 # Don't export empty USE_EXPAND vars unless the user config
53232 # exports them as empty. This is required for vars such as
53233 # LINGUAS, where unset and empty have different meanings.
53234 + # The special '*' token is understood by ebuild.sh, which
53235 + # will unset the variable so that things like LINGUAS work
53236 + # properly (see bug #459350).
53237 if has_wildcard:
53238 - # ebuild.sh will see this and unset the variable so
53239 - # that things like LINGUAS work properly
53240 value = '*'
53241 else:
53242 if has_iuse:
53243 - value = ''
53244 + already_set = False
53245 + # Skip the first 'env' configdict, in order to
53246 + # avoid infinite recursion here, since that dict's
53247 + # __getitem__ calls the current __getitem__.
53248 + for d in self._settings.lookuplist[1:]:
53249 + if key in d:
53250 + already_set = True
53251 + break
53252 +
53253 + if not already_set:
53254 + for x in self._unfiltered_use:
53255 + if x[:prefix_len] == prefix:
53256 + already_set = True
53257 + break
53258 +
53259 + if already_set:
53260 + value = ''
53261 + else:
53262 + value = '*'
53263 else:
53264 # It's not in IUSE, so just allow the variable content
53265 # to pass through if it is defined somewhere. This
53266 @@ -1219,7 +1330,7 @@ class config(object):
53267 if not isinstance(mycpv, basestring):
53268 pkg = mycpv
53269 mycpv = pkg.cpv
53270 - mydb = pkg.metadata
53271 + mydb = pkg._metadata
53272 explicit_iuse = pkg.iuse.all
53273 args_hash = (mycpv, id(pkg))
53274 if pkg.built:
53275 @@ -1240,6 +1351,7 @@ class config(object):
53276 iuse = ""
53277 pkg_configdict = self.configdict["pkg"]
53278 previous_iuse = pkg_configdict.get("IUSE")
53279 + previous_iuse_effective = pkg_configdict.get("IUSE_EFFECTIVE")
53280 previous_features = pkg_configdict.get("FEATURES")
53281
53282 aux_keys = self._setcpv_aux_keys
53283 @@ -1251,6 +1363,7 @@ class config(object):
53284 pkg_configdict["CATEGORY"] = cat
53285 pkg_configdict["PF"] = pf
53286 repository = None
53287 + eapi = None
53288 if mydb:
53289 if not hasattr(mydb, "aux_get"):
53290 for k in aux_keys:
53291 @@ -1277,14 +1390,16 @@ class config(object):
53292 # Empty USE means this dbapi instance does not contain
53293 # built packages.
53294 built_use = None
53295 + eapi = pkg_configdict['EAPI']
53296
53297 repository = pkg_configdict.pop("repository", None)
53298 if repository is not None:
53299 pkg_configdict["PORTAGE_REPO_NAME"] = repository
53300 - slot = pkg_configdict["SLOT"]
53301 iuse = pkg_configdict["IUSE"]
53302 if pkg is None:
53303 - cpv_slot = _pkg_str(self.mycpv, slot=slot, repo=repository)
53304 + self.mycpv = _pkg_str(self.mycpv, metadata=pkg_configdict,
53305 + settings=self)
53306 + cpv_slot = self.mycpv
53307 else:
53308 cpv_slot = pkg
53309 pkginternaluse = []
53310 @@ -1294,6 +1409,9 @@ class config(object):
53311 elif x.startswith("-"):
53312 pkginternaluse.append(x)
53313 pkginternaluse = " ".join(pkginternaluse)
53314 +
53315 + eapi_attrs = _get_eapi_attrs(eapi)
53316 +
53317 if pkginternaluse != self.configdict["pkginternal"].get("USE", ""):
53318 self.configdict["pkginternal"]["USE"] = pkginternaluse
53319 has_changed = True
53320 @@ -1424,30 +1542,70 @@ class config(object):
53321
53322 # If reset() has not been called, it's safe to return
53323 # early if IUSE has not changed.
53324 - if not has_changed and previous_iuse == iuse:
53325 + if not has_changed and previous_iuse == iuse and \
53326 + (previous_iuse_effective is not None == eapi_attrs.iuse_effective):
53327 return
53328
53329 # Filter out USE flags that aren't part of IUSE. This has to
53330 # be done for every setcpv() call since practically every
53331 # package has different IUSE.
53332 use = set(self["USE"].split())
53333 + unfiltered_use = frozenset(use)
53334 if explicit_iuse is None:
53335 explicit_iuse = frozenset(x.lstrip("+-") for x in iuse.split())
53336 - iuse_implicit_match = self._iuse_implicit_match
53337 - portage_iuse = self._get_implicit_iuse()
53338 - portage_iuse.update(explicit_iuse)
53339 +
53340 + if eapi_attrs.iuse_effective:
53341 + iuse_implicit_match = self._iuse_effective_match
53342 + portage_iuse = set(self._iuse_effective)
53343 + portage_iuse.update(explicit_iuse)
53344 + self.configdict["pkg"]["IUSE_EFFECTIVE"] = \
53345 + " ".join(sorted(portage_iuse))
53346 + else:
53347 + iuse_implicit_match = self._iuse_implicit_match
53348 + portage_iuse = self._get_implicit_iuse()
53349 + portage_iuse.update(explicit_iuse)
53350
53351 # PORTAGE_IUSE is not always needed so it's lazily evaluated.
53352 self.configdict["env"].addLazySingleton(
53353 "PORTAGE_IUSE", _lazy_iuse_regex, portage_iuse)
53354
53355 - ebuild_force_test = self.get("EBUILD_FORCE_TEST") == "1"
53356 + if pkg is None:
53357 + raw_restrict = pkg_configdict.get("RESTRICT")
53358 + else:
53359 + raw_restrict = pkg._raw_metadata["RESTRICT"]
53360 +
53361 + restrict_test = False
53362 + if raw_restrict:
53363 + try:
53364 + if built_use is not None:
53365 + restrict = use_reduce(raw_restrict,
53366 + uselist=built_use, flat=True)
53367 + else:
53368 + # Use matchnone=True to ignore USE conditional parts
53369 + # of RESTRICT, since we want to know whether to mask
53370 + # the "test" flag _before_ we know the USE values
53371 + # that would be needed to evaluate the USE
53372 + # conditionals (see bug #273272).
53373 + restrict = use_reduce(raw_restrict,
53374 + matchnone=True, flat=True)
53375 + except PortageException:
53376 + pass
53377 + else:
53378 + restrict_test = "test" in restrict
53379 +
53380 + ebuild_force_test = not restrict_test and \
53381 + self.get("EBUILD_FORCE_TEST") == "1"
53382 +
53383 if ebuild_force_test and \
53384 not hasattr(self, "_ebuild_force_test_msg_shown"):
53385 self._ebuild_force_test_msg_shown = True
53386 writemsg(_("Forcing test.\n"), noiselevel=-1)
53387 - if "test" in self.features:
53388 - if "test" in self.usemask and not ebuild_force_test:
53389 +
53390 + if "test" in explicit_iuse or iuse_implicit_match("test"):
53391 + if "test" not in self.features:
53392 + use.discard("test")
53393 + elif restrict_test or \
53394 + ("test" in self.usemask and not ebuild_force_test):
53395 # "test" is in IUSE and USE=test is masked, so execution
53396 # of src_test() probably is not reliable. Therefore,
53397 # temporarily disable FEATURES=test just for this package.
53398 @@ -1460,6 +1618,13 @@ class config(object):
53399 self.usemask = \
53400 frozenset(x for x in self.usemask if x != "test")
53401
53402 + if eapi_attrs.feature_flag_targetroot and \
53403 + ("targetroot" in explicit_iuse or iuse_implicit_match("targetroot")):
53404 + if self["ROOT"] != "/":
53405 + use.add("targetroot")
53406 + else:
53407 + use.discard("targetroot")
53408 +
53409 # Allow _* flags from USE_EXPAND wildcards to pass through here.
53410 use.difference_update([x for x in use \
53411 if (x not in explicit_iuse and \
53412 @@ -1470,7 +1635,8 @@ class config(object):
53413 # comparison instead of startswith().
53414 use_expand_split = set(x.lower() for \
53415 x in self.get('USE_EXPAND', '').split())
53416 - lazy_use_expand = self._lazy_use_expand(use, self.usemask,
53417 + lazy_use_expand = self._lazy_use_expand(
53418 + self, unfiltered_use, use, self.usemask,
53419 portage_iuse, use_expand_split, self._use_expand_dict)
53420
53421 use_expand_iuses = {}
53422 @@ -1500,6 +1666,14 @@ class config(object):
53423 self.configdict['env'].addLazySingleton(k,
53424 lazy_use_expand.__getitem__, k)
53425
53426 + for k in self.get("USE_EXPAND_UNPREFIXED", "").split():
53427 + var_split = self.get(k, '').split()
53428 + var_split = [ x for x in var_split if x in use ]
53429 + if var_split:
53430 + self.configlist[-1][k] = ' '.join(var_split)
53431 + elif k in self:
53432 + self.configlist[-1][k] = ''
53433 +
53434 # Filtered for the ebuild environment. Store this in a separate
53435 # attribute since we still want to be able to see global USE
53436 # settings for things like emerge --info.
53437 @@ -1507,6 +1681,10 @@ class config(object):
53438 self.configdict["env"]["PORTAGE_USE"] = \
53439 " ".join(sorted(x for x in use if x[-2:] != '_*'))
53440
53441 + # Clear the eapi cache here rather than in the constructor, since
53442 + # setcpv triggers lazy instantiation of things like _use_manager.
53443 + _eapi_cache.clear()
53444 +
53445 def _grab_pkg_env(self, penv, container, protected_keys=None):
53446 if protected_keys is None:
53447 protected_keys = ()
53448 @@ -1540,9 +1718,42 @@ class config(object):
53449 else:
53450 container[k] = v
53451
53452 + def _iuse_effective_match(self, flag):
53453 + return flag in self._iuse_effective
53454 +
53455 + def _calc_iuse_effective(self):
53456 + """
53457 + Beginning with EAPI 5, IUSE_EFFECTIVE is defined by PMS.
53458 + """
53459 + iuse_effective = []
53460 + iuse_effective.extend(self.get("IUSE_IMPLICIT", "").split())
53461 +
53462 + # USE_EXPAND_IMPLICIT should contain things like ARCH, ELIBC,
53463 + # KERNEL, and USERLAND.
53464 + use_expand_implicit = frozenset(
53465 + self.get("USE_EXPAND_IMPLICIT", "").split())
53466 +
53467 + # USE_EXPAND_UNPREFIXED should contain at least ARCH, and
53468 + # USE_EXPAND_VALUES_ARCH should contain all valid ARCH flags.
53469 + for v in self.get("USE_EXPAND_UNPREFIXED", "").split():
53470 + if v not in use_expand_implicit:
53471 + continue
53472 + iuse_effective.extend(
53473 + self.get("USE_EXPAND_VALUES_" + v, "").split())
53474 +
53475 + use_expand = frozenset(self.get("USE_EXPAND", "").split())
53476 + for v in use_expand_implicit:
53477 + if v not in use_expand:
53478 + continue
53479 + lower_v = v.lower()
53480 + for x in self.get("USE_EXPAND_VALUES_" + v, "").split():
53481 + iuse_effective.append(lower_v + "_" + x)
53482 +
53483 + return frozenset(iuse_effective)
53484 +
53485 def _get_implicit_iuse(self):
53486 """
53487 - Some flags are considered to
53488 + Prior to EAPI 5, these flags are considered to
53489 be implicit members of IUSE:
53490 * Flags derived from ARCH
53491 * Flags derived from USE_EXPAND_HIDDEN variables
53492 @@ -1579,11 +1790,11 @@ class config(object):
53493
53494 return iuse_implicit
53495
53496 - def _getUseMask(self, pkg):
53497 - return self._use_manager.getUseMask(pkg)
53498 + def _getUseMask(self, pkg, stable=None):
53499 + return self._use_manager.getUseMask(pkg, stable=stable)
53500
53501 - def _getUseForce(self, pkg):
53502 - return self._use_manager.getUseForce(pkg)
53503 + def _getUseForce(self, pkg, stable=None):
53504 + return self._use_manager.getUseForce(pkg, stable=stable)
53505
53506 def _getMaskAtom(self, cpv, metadata):
53507 """
53508 @@ -1648,6 +1859,11 @@ class config(object):
53509 return x
53510 return None
53511
53512 + def _isStable(self, pkg):
53513 + return self._keywords_manager.isStable(pkg,
53514 + self.get("ACCEPT_KEYWORDS", ""),
53515 + self.configdict["backupenv"].get("ACCEPT_KEYWORDS", ""))
53516 +
53517 def _getKeywords(self, cpv, metadata):
53518 return self._keywords_manager.getKeywords(cpv, metadata["SLOT"], \
53519 metadata.get("KEYWORDS", ""), metadata.get("repository"))
53520 @@ -1736,9 +1952,10 @@ class config(object):
53521 @return: A list of properties that have not been accepted.
53522 """
53523 accept_properties = self._accept_properties
53524 - if not hasattr(cpv, 'slot'):
53525 - cpv = _pkg_str(cpv, slot=metadata["SLOT"],
53526 - repo=metadata.get("repository"))
53527 + try:
53528 + cpv.slot
53529 + except AttributeError:
53530 + cpv = _pkg_str(cpv, metadata=metadata, settings=self)
53531 cp = cpv_getkey(cpv)
53532 cpdict = self._ppropertiesdict.get(cp)
53533 if cpdict:
53534 @@ -1750,7 +1967,6 @@ class config(object):
53535
53536 properties_str = metadata.get("PROPERTIES", "")
53537 properties = set(use_reduce(properties_str, matchall=1, flat=True))
53538 - properties.discard('||')
53539
53540 acceptable_properties = set()
53541 for x in accept_properties:
53542 @@ -1768,40 +1984,58 @@ class config(object):
53543 else:
53544 use = []
53545
53546 - properties_struct = use_reduce(properties_str, uselist=use, opconvert=True)
53547 - return self._getMaskedProperties(properties_struct, acceptable_properties)
53548 -
53549 - def _getMaskedProperties(self, properties_struct, acceptable_properties):
53550 - if not properties_struct:
53551 - return []
53552 - if properties_struct[0] == "||":
53553 - ret = []
53554 - for element in properties_struct[1:]:
53555 - if isinstance(element, list):
53556 - if element:
53557 - tmp = self._getMaskedProperties(
53558 - element, acceptable_properties)
53559 - if not tmp:
53560 - return []
53561 - ret.extend(tmp)
53562 - else:
53563 - if element in acceptable_properties:
53564 - return[]
53565 - ret.append(element)
53566 - # Return all masked properties, since we don't know which combination
53567 - # (if any) the user will decide to unmask
53568 - return ret
53569 -
53570 - ret = []
53571 - for element in properties_struct:
53572 - if isinstance(element, list):
53573 - if element:
53574 - ret.extend(self._getMaskedProperties(element,
53575 - acceptable_properties))
53576 + return [x for x in use_reduce(properties_str, uselist=use, flat=True)
53577 + if x not in acceptable_properties]
53578 +
53579 + def _getMissingRestrict(self, cpv, metadata):
53580 + """
53581 + Take a RESTRICT string and return a list of any tokens the user
53582 + may need to accept for the given package. The returned list will not
53583 + contain any tokens that have already been accepted. This method
53584 + can throw an InvalidDependString exception.
53585 +
53586 + @param cpv: The package name (for package.accept_restrict support)
53587 + @type cpv: String
53588 + @param metadata: A dictionary of raw package metadata
53589 + @type metadata: dict
53590 + @rtype: List
53591 + @return: A list of tokens that have not been accepted.
53592 + """
53593 + accept_restrict = self._accept_restrict
53594 + try:
53595 + cpv.slot
53596 + except AttributeError:
53597 + cpv = _pkg_str(cpv, metadata=metadata, settings=self)
53598 + cp = cpv_getkey(cpv)
53599 + cpdict = self._paccept_restrict.get(cp)
53600 + if cpdict:
53601 + paccept_restrict_list = ordered_by_atom_specificity(cpdict, cpv)
53602 + if paccept_restrict_list:
53603 + accept_restrict = list(self._accept_restrict)
53604 + for x in paccept_restrict_list:
53605 + accept_restrict.extend(x)
53606 +
53607 + restrict_str = metadata.get("RESTRICT", "")
53608 + all_restricts = set(use_reduce(restrict_str, matchall=1, flat=True))
53609 +
53610 + acceptable_restricts = set()
53611 + for x in accept_restrict:
53612 + if x == '*':
53613 + acceptable_restricts.update(all_restricts)
53614 + elif x == '-*':
53615 + acceptable_restricts.clear()
53616 + elif x[:1] == '-':
53617 + acceptable_restricts.discard(x[1:])
53618 else:
53619 - if element not in acceptable_properties:
53620 - ret.append(element)
53621 - return ret
53622 + acceptable_restricts.add(x)
53623 +
53624 + if "?" in restrict_str:
53625 + use = metadata["USE"].split()
53626 + else:
53627 + use = []
53628 +
53629 + return [x for x in use_reduce(restrict_str, uselist=use, flat=True)
53630 + if x not in acceptable_restricts]
53631
53632 def _accept_chost(self, cpv, metadata):
53633 """
53634 @@ -1940,6 +2174,18 @@ class config(object):
53635 # repoman will accept any property
53636 self._accept_properties = ('*',)
53637
53638 + if self.local_config:
53639 + mysplit = []
53640 + for curdb in mydbs:
53641 + mysplit.extend(curdb.get('ACCEPT_RESTRICT', '').split())
53642 + mysplit = prune_incremental(mysplit)
53643 + self.configlist[-1]['ACCEPT_RESTRICT'] = ' '.join(mysplit)
53644 + if tuple(mysplit) != self._accept_restrict:
53645 + self._accept_restrict = tuple(mysplit)
53646 + else:
53647 + # repoman will accept any property
53648 + self._accept_restrict = ('*',)
53649 +
53650 increment_lists = {}
53651 for k in myincrementals:
53652 incremental_list = []
53653 @@ -1994,6 +2240,8 @@ class config(object):
53654 if v is not None:
53655 use_expand_dict[k] = v
53656
53657 + use_expand_unprefixed = self.get("USE_EXPAND_UNPREFIXED", "").split()
53658 +
53659 # In order to best accomodate the long-standing practice of
53660 # setting default USE_EXPAND variables in the profile's
53661 # make.defaults, we translate these variables into their
53662 @@ -2007,6 +2255,12 @@ class config(object):
53663 continue
53664 use = cfg.get("USE", "")
53665 expand_use = []
53666 +
53667 + for k in use_expand_unprefixed:
53668 + v = cfg.get(k)
53669 + if v is not None:
53670 + expand_use.extend(v.split())
53671 +
53672 for k in use_expand_dict:
53673 v = cfg.get(k)
53674 if v is None:
53675 @@ -2044,6 +2298,17 @@ class config(object):
53676 iuse = [x.lstrip("+-") for x in iuse.split()]
53677 myflags = set()
53678 for curdb in self.uvlist:
53679 +
53680 + for k in use_expand_unprefixed:
53681 + v = curdb.get(k)
53682 + if v is None:
53683 + continue
53684 + for x in v.split():
53685 + if x[:1] == "-":
53686 + myflags.discard(x[1:])
53687 + else:
53688 + myflags.add(x)
53689 +
53690 cur_use_expand = [x for x in use_expand if x in curdb]
53691 mysplit = curdb.get("USE", "").split()
53692 if not mysplit and not cur_use_expand:
53693 @@ -2160,6 +2425,14 @@ class config(object):
53694 elif k in self:
53695 self.configlist[-1][k] = ''
53696
53697 + for k in use_expand_unprefixed:
53698 + var_split = self.get(k, '').split()
53699 + var_split = [ x for x in var_split if x in myflags ]
53700 + if var_split:
53701 + self.configlist[-1][k] = ' '.join(var_split)
53702 + elif k in self:
53703 + self.configlist[-1][k] = ''
53704 +
53705 @property
53706 def virts_p(self):
53707 warnings.warn("portage config.virts_p attribute " + \
53708 @@ -2220,8 +2493,22 @@ class config(object):
53709 elif mykey == "PORTAGE_PYM_PATH":
53710 return portage._pym_path
53711
53712 + elif mykey == "PORTAGE_PYTHONPATH":
53713 + value = [x for x in \
53714 + self.backupenv.get("PYTHONPATH", "").split(":") if x]
53715 + need_pym_path = True
53716 + if value:
53717 + try:
53718 + need_pym_path = not os.path.samefile(value[0],
53719 + portage._pym_path)
53720 + except OSError:
53721 + pass
53722 + if need_pym_path:
53723 + value.insert(0, portage._pym_path)
53724 + return ":".join(value)
53725 +
53726 elif mykey == "PORTAGE_GID":
53727 - return _unicode_decode(str(portage_gid))
53728 + return "%s" % portage_gid
53729
53730 for d in self.lookuplist:
53731 try:
53732 @@ -2308,6 +2595,7 @@ class config(object):
53733 environ_filter = self._environ_filter
53734
53735 eapi = self.get('EAPI')
53736 + eapi_attrs = _get_eapi_attrs(eapi)
53737 phase = self.get('EBUILD_PHASE')
53738 filter_calling_env = False
53739 if self.mycpv is not None and \
53740 @@ -2389,14 +2677,20 @@ class config(object):
53741 not eapi_exports_replace_vars(eapi):
53742 mydict.pop("REPLACED_BY_VERSION", None)
53743
53744 + if phase is not None and eapi_attrs.exports_EBUILD_PHASE_FUNC:
53745 + phase_func = _phase_func_map.get(phase)
53746 + if phase_func is not None:
53747 + mydict["EBUILD_PHASE_FUNC"] = phase_func
53748 +
53749 return mydict
53750
53751 def thirdpartymirrors(self):
53752 if getattr(self, "_thirdpartymirrors", None) is None:
53753 - profileroots = [os.path.join(self["PORTDIR"], "profiles")]
53754 - for x in shlex_split(self.get("PORTDIR_OVERLAY", "")):
53755 - profileroots.insert(0, os.path.join(x, "profiles"))
53756 - thirdparty_lists = [grabdict(os.path.join(x, "thirdpartymirrors")) for x in profileroots]
53757 + thirdparty_lists = []
53758 + for repo_name in reversed(self.repositories.prepos_order):
53759 + thirdparty_lists.append(grabdict(os.path.join(
53760 + self.repositories[repo_name].location,
53761 + "profiles", "thirdpartymirrors")))
53762 self._thirdpartymirrors = stack_dictlist(thirdparty_lists, incremental=True)
53763 return self._thirdpartymirrors
53764
53765
53766 diff --git a/pym/portage/package/ebuild/deprecated_profile_check.py b/pym/portage/package/ebuild/deprecated_profile_check.py
53767 index 3fab4da..fdb19b4 100644
53768 --- a/pym/portage/package/ebuild/deprecated_profile_check.py
53769 +++ b/pym/portage/package/ebuild/deprecated_profile_check.py
53770 @@ -1,10 +1,11 @@
53771 -# Copyright 2010-2011 Gentoo Foundation
53772 +# Copyright 2010-2013 Gentoo Foundation
53773 # Distributed under the terms of the GNU General Public License v2
53774
53775 __all__ = ['deprecated_profile_check']
53776
53777 import io
53778
53779 +import portage
53780 from portage import os, _encodings, _unicode_encode
53781 from portage.const import DEPRECATED_PROFILE_FILE
53782 from portage.localization import _
53783 @@ -12,16 +13,32 @@ from portage.output import colorize
53784 from portage.util import writemsg
53785
53786 def deprecated_profile_check(settings=None):
53787 - config_root = "/"
53788 + config_root = None
53789 + eprefix = None
53790 + deprecated_profile_file = None
53791 if settings is not None:
53792 config_root = settings["PORTAGE_CONFIGROOT"]
53793 - deprecated_profile_file = os.path.join(config_root,
53794 - DEPRECATED_PROFILE_FILE)
53795 - if not os.access(deprecated_profile_file, os.R_OK):
53796 - return False
53797 - dcontent = io.open(_unicode_encode(deprecated_profile_file,
53798 + eprefix = settings["EPREFIX"]
53799 + for x in reversed(settings.profiles):
53800 + deprecated_profile_file = os.path.join(x, "deprecated")
53801 + if os.access(deprecated_profile_file, os.R_OK):
53802 + break
53803 + else:
53804 + deprecated_profile_file = None
53805 +
53806 + if deprecated_profile_file is None:
53807 + deprecated_profile_file = os.path.join(config_root or "/",
53808 + DEPRECATED_PROFILE_FILE)
53809 + if not os.access(deprecated_profile_file, os.R_OK):
53810 + deprecated_profile_file = os.path.join(config_root or "/",
53811 + 'etc', 'make.profile', 'deprecated')
53812 + if not os.access(deprecated_profile_file, os.R_OK):
53813 + return
53814 +
53815 + with io.open(_unicode_encode(deprecated_profile_file,
53816 encoding=_encodings['fs'], errors='strict'),
53817 - mode='r', encoding=_encodings['content'], errors='replace').readlines()
53818 + mode='r', encoding=_encodings['content'], errors='replace') as f:
53819 + dcontent = f.readlines()
53820 writemsg(colorize("BAD", _("\n!!! Your current profile is "
53821 "deprecated and not supported anymore.")) + "\n", noiselevel=-1)
53822 writemsg(colorize("BAD", _("!!! Use eselect profile to update your "
53823 @@ -30,13 +47,37 @@ def deprecated_profile_check(settings=None):
53824 writemsg(colorize("BAD", _("!!! Please refer to the "
53825 "Gentoo Upgrading Guide.")) + "\n", noiselevel=-1)
53826 return True
53827 - newprofile = dcontent[0]
53828 + newprofile = dcontent[0].rstrip("\n")
53829 writemsg(colorize("BAD", _("!!! Please upgrade to the "
53830 - "following profile if possible:")) + "\n", noiselevel=-1)
53831 - writemsg(8*" " + colorize("GOOD", newprofile) + "\n", noiselevel=-1)
53832 + "following profile if possible:")) + "\n\n", noiselevel=-1)
53833 + writemsg(8*" " + colorize("GOOD", newprofile) + "\n\n", noiselevel=-1)
53834 if len(dcontent) > 1:
53835 writemsg(_("To upgrade do the following steps:\n"), noiselevel=-1)
53836 for myline in dcontent[1:]:
53837 writemsg(myline, noiselevel=-1)
53838 writemsg("\n\n", noiselevel=-1)
53839 + else:
53840 + writemsg(_("You may use the following command to upgrade:\n\n"), noiselevel=-1)
53841 + writemsg(8*" " + colorize("INFORM", 'eselect profile set ' +
53842 + newprofile) + "\n\n", noiselevel=-1)
53843 +
53844 + if settings is not None:
53845 + main_repo_loc = settings.repositories.mainRepoLocation()
53846 + new_profile_path = os.path.join(main_repo_loc,
53847 + "profiles", newprofile.rstrip("\n"))
53848 +
53849 + if os.path.isdir(new_profile_path):
53850 + new_config = portage.config(config_root=config_root,
53851 + config_profile_path=new_profile_path,
53852 + eprefix=eprefix)
53853 +
53854 + if not new_config.profiles:
53855 + writemsg("\n %s %s\n" % (colorize("WARN", "*"),
53856 + _("You must update portage before you "
53857 + "can migrate to the above profile.")), noiselevel=-1)
53858 + writemsg(" %s %s\n\n" % (colorize("WARN", "*"),
53859 + _("In order to update portage, "
53860 + "run 'emerge --oneshot portage'.")),
53861 + noiselevel=-1)
53862 +
53863 return True
53864
53865 diff --git a/pym/portage/package/ebuild/digestcheck.py b/pym/portage/package/ebuild/digestcheck.py
53866 index 8705639..e207ba8 100644
53867 --- a/pym/portage/package/ebuild/digestcheck.py
53868 +++ b/pym/portage/package/ebuild/digestcheck.py
53869 @@ -1,4 +1,4 @@
53870 -# Copyright 2010-2011 Gentoo Foundation
53871 +# Copyright 2010-2012 Gentoo Foundation
53872 # Distributed under the terms of the GNU General Public License v2
53873
53874 __all__ = ['digestcheck']
53875 @@ -6,6 +6,7 @@ __all__ = ['digestcheck']
53876 import warnings
53877
53878 from portage import os, _encodings, _unicode_decode
53879 +from portage.checksum import _hash_filter
53880 from portage.exception import DigestException, FileNotFound
53881 from portage.localization import _
53882 from portage.output import EOutput
53883 @@ -28,6 +29,9 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
53884 if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
53885 return 1
53886 pkgdir = mysettings["O"]
53887 + hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
53888 + if hash_filter.transparent:
53889 + hash_filter = None
53890 if mf is None:
53891 mf = mysettings.repositories.get_repo_for_location(
53892 os.path.dirname(os.path.dirname(pkgdir)))
53893 @@ -38,15 +42,16 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
53894 if not mf.thin and strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
53895 if mf.fhashdict.get("EBUILD"):
53896 eout.ebegin(_("checking ebuild checksums ;-)"))
53897 - mf.checkTypeHashes("EBUILD")
53898 + mf.checkTypeHashes("EBUILD", hash_filter=hash_filter)
53899 eout.eend(0)
53900 if mf.fhashdict.get("AUX"):
53901 eout.ebegin(_("checking auxfile checksums ;-)"))
53902 - mf.checkTypeHashes("AUX")
53903 + mf.checkTypeHashes("AUX", hash_filter=hash_filter)
53904 eout.eend(0)
53905 if mf.fhashdict.get("MISC"):
53906 eout.ebegin(_("checking miscfile checksums ;-)"))
53907 - mf.checkTypeHashes("MISC", ignoreMissingFiles=True)
53908 + mf.checkTypeHashes("MISC", ignoreMissingFiles=True,
53909 + hash_filter=hash_filter)
53910 eout.eend(0)
53911 for f in myfiles:
53912 eout.ebegin(_("checking %s ;-)") % f)
53913 @@ -58,7 +63,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
53914 writemsg(_("\n!!! Missing digest for '%s'\n") % (f,),
53915 noiselevel=-1)
53916 return 0
53917 - mf.checkFileHashes(ftype, f)
53918 + mf.checkFileHashes(ftype, f, hash_filter=hash_filter)
53919 eout.eend(0)
53920 except FileNotFound as e:
53921 eout.eend(1)
53922
53923 diff --git a/pym/portage/package/ebuild/digestgen.py b/pym/portage/package/ebuild/digestgen.py
53924 index 6ad3397..95d02db 100644
53925 --- a/pym/portage/package/ebuild/digestgen.py
53926 +++ b/pym/portage/package/ebuild/digestgen.py
53927 @@ -112,67 +112,64 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
53928 missing_files.append(myfile)
53929 continue
53930
53931 - if missing_files:
53932 - for myfile in missing_files:
53933 - uris = set()
53934 - all_restrict = set()
53935 - for cpv in distfiles_map[myfile]:
53936 - uris.update(myportdb.getFetchMap(
53937 - cpv, mytree=mytree)[myfile])
53938 - restrict = myportdb.aux_get(cpv, ['RESTRICT'],
53939 - mytree=mytree)[0]
53940 - # Here we ignore conditional parts of RESTRICT since
53941 - # they don't apply unconditionally. Assume such
53942 - # conditionals only apply on the client side where
53943 - # digestgen() does not need to be called.
53944 - all_restrict.update(use_reduce(restrict,
53945 - flat=True, matchnone=True))
53946 -
53947 - # fetch() uses CATEGORY and PF to display a message
53948 - # when fetch restriction is triggered.
53949 - cat, pf = catsplit(cpv)
53950 - mysettings["CATEGORY"] = cat
53951 - mysettings["PF"] = pf
53952 -
53953 - # fetch() uses PORTAGE_RESTRICT to control fetch
53954 - # restriction, which is only applied to files that
53955 - # are not fetchable via a mirror:// URI.
53956 - mysettings["PORTAGE_RESTRICT"] = " ".join(all_restrict)
53957 -
53958 - try:
53959 - st = os.stat(os.path.join(
53960 - mysettings["DISTDIR"],myfile))
53961 - except OSError:
53962 - st = None
53963 -
53964 - if not fetch({myfile : uris}, mysettings):
53965 - myebuild = os.path.join(mysettings["O"],
53966 - catsplit(cpv)[1] + ".ebuild")
53967 - spawn_nofetch(myportdb, myebuild)
53968 - writemsg(_("!!! Fetch failed for %s, can't update "
53969 - "Manifest\n") % myfile, noiselevel=-1)
53970 - if myfile in dist_hashes and \
53971 - st is not None and st.st_size > 0:
53972 - # stat result is obtained before calling fetch(),
53973 - # since fetch may rename the existing file if the
53974 - # digest does not match.
53975 - writemsg(_("!!! If you would like to "
53976 - "forcefully replace the existing "
53977 - "Manifest entry\n!!! for %s, use "
53978 - "the following command:\n") % myfile + \
53979 - "!!! " + colorize("INFORM",
53980 - "ebuild --force %s manifest" % \
53981 - os.path.basename(myebuild)) + "\n",
53982 - noiselevel=-1)
53983 - return 0
53984 + for myfile in missing_files:
53985 + uris = set()
53986 + all_restrict = set()
53987 + for cpv in distfiles_map[myfile]:
53988 + uris.update(myportdb.getFetchMap(
53989 + cpv, mytree=mytree)[myfile])
53990 + restrict = myportdb.aux_get(cpv, ['RESTRICT'], mytree=mytree)[0]
53991 + # Here we ignore conditional parts of RESTRICT since
53992 + # they don't apply unconditionally. Assume such
53993 + # conditionals only apply on the client side where
53994 + # digestgen() does not need to be called.
53995 + all_restrict.update(use_reduce(restrict,
53996 + flat=True, matchnone=True))
53997 +
53998 + # fetch() uses CATEGORY and PF to display a message
53999 + # when fetch restriction is triggered.
54000 + cat, pf = catsplit(cpv)
54001 + mysettings["CATEGORY"] = cat
54002 + mysettings["PF"] = pf
54003 +
54004 + # fetch() uses PORTAGE_RESTRICT to control fetch
54005 + # restriction, which is only applied to files that
54006 + # are not fetchable via a mirror:// URI.
54007 + mysettings["PORTAGE_RESTRICT"] = " ".join(all_restrict)
54008 +
54009 + try:
54010 + st = os.stat(os.path.join(mysettings["DISTDIR"], myfile))
54011 + except OSError:
54012 + st = None
54013 +
54014 + if not fetch({myfile : uris}, mysettings):
54015 + myebuild = os.path.join(mysettings["O"],
54016 + catsplit(cpv)[1] + ".ebuild")
54017 + spawn_nofetch(myportdb, myebuild)
54018 + writemsg(_("!!! Fetch failed for %s, can't update Manifest\n")
54019 + % myfile, noiselevel=-1)
54020 + if myfile in dist_hashes and \
54021 + st is not None and st.st_size > 0:
54022 + # stat result is obtained before calling fetch(),
54023 + # since fetch may rename the existing file if the
54024 + # digest does not match.
54025 + cmd = colorize("INFORM", "ebuild --force %s manifest" %
54026 + os.path.basename(myebuild))
54027 + writemsg((_(
54028 + "!!! If you would like to forcefully replace the existing Manifest entry\n"
54029 + "!!! for %s, use the following command:\n") % myfile) +
54030 + "!!! %s\n" % cmd,
54031 + noiselevel=-1)
54032 + return 0
54033 +
54034 writemsg_stdout(_(">>> Creating Manifest for %s\n") % mysettings["O"])
54035 try:
54036 mf.create(assumeDistHashesSometimes=True,
54037 assumeDistHashesAlways=(
54038 "assume-digests" in mysettings.features))
54039 except FileNotFound as e:
54040 - writemsg(_("!!! File %s doesn't exist, can't update "
54041 - "Manifest\n") % e, noiselevel=-1)
54042 + writemsg(_("!!! File %s doesn't exist, can't update Manifest\n")
54043 + % e, noiselevel=-1)
54044 return 0
54045 except PortagePackageException as e:
54046 writemsg(("!!! %s\n") % (e,), noiselevel=-1)
54047
54048 diff --git a/pym/portage/package/ebuild/doebuild.py b/pym/portage/package/ebuild/doebuild.py
54049 index 09062f9..01707ae 100644
54050 --- a/pym/portage/package/ebuild/doebuild.py
54051 +++ b/pym/portage/package/ebuild/doebuild.py
54052 @@ -1,14 +1,19 @@
54053 -# Copyright 2010-2012 Gentoo Foundation
54054 +# Copyright 2010-2013 Gentoo Foundation
54055 # Distributed under the terms of the GNU General Public License v2
54056
54057 +from __future__ import unicode_literals
54058 +
54059 __all__ = ['doebuild', 'doebuild_environment', 'spawn', 'spawnebuild']
54060
54061 +import grp
54062 import gzip
54063 import errno
54064 import io
54065 from itertools import chain
54066 import logging
54067 import os as _os
54068 +import platform
54069 +import pwd
54070 import re
54071 import signal
54072 import stat
54073 @@ -26,8 +31,12 @@ portage.proxy.lazyimport.lazyimport(globals(),
54074 'portage.package.ebuild.digestgen:digestgen',
54075 'portage.package.ebuild.fetch:fetch',
54076 'portage.package.ebuild._ipc.QueryCommand:QueryCommand',
54077 - 'portage.dep._slot_abi:evaluate_slot_abi_equal_deps',
54078 + 'portage.dep._slot_operator:evaluate_slot_operator_equal_deps',
54079 'portage.package.ebuild._spawn_nofetch:spawn_nofetch',
54080 + 'portage.util._desktop_entry:validate_desktop_entry',
54081 + 'portage.util._async.SchedulerInterface:SchedulerInterface',
54082 + 'portage.util._eventloop.EventLoop:EventLoop',
54083 + 'portage.util._eventloop.global_event_loop:global_event_loop',
54084 'portage.util.ExtractKernelVersion:ExtractKernelVersion'
54085 )
54086
54087 @@ -64,7 +73,6 @@ from _emerge.EbuildBuildDir import EbuildBuildDir
54088 from _emerge.EbuildPhase import EbuildPhase
54089 from _emerge.EbuildSpawnProcess import EbuildSpawnProcess
54090 from _emerge.Package import Package
54091 -from _emerge.PollScheduler import PollScheduler
54092 from _emerge.RootConfig import RootConfig
54093
54094 _unsandboxed_phases = frozenset([
54095 @@ -74,6 +82,40 @@ _unsandboxed_phases = frozenset([
54096 "prerm", "setup"
54097 ])
54098
54099 +# phases in which IPC with host is allowed
54100 +_ipc_phases = frozenset([
54101 + "setup", "pretend",
54102 + "preinst", "postinst", "prerm", "postrm",
54103 +])
54104 +
54105 +# phases in which networking access is allowed
54106 +_networked_phases = frozenset([
54107 + # for VCS fetching
54108 + "unpack",
54109 + # + for network-bound IPC
54110 +] + list(_ipc_phases))
54111 +
54112 +_phase_func_map = {
54113 + "config": "pkg_config",
54114 + "setup": "pkg_setup",
54115 + "nofetch": "pkg_nofetch",
54116 + "unpack": "src_unpack",
54117 + "prepare": "src_prepare",
54118 + "configure": "src_configure",
54119 + "compile": "src_compile",
54120 + "test": "src_test",
54121 + "install": "src_install",
54122 + "preinst": "pkg_preinst",
54123 + "postinst": "pkg_postinst",
54124 + "prerm": "pkg_prerm",
54125 + "postrm": "pkg_postrm",
54126 + "info": "pkg_info",
54127 + "pretend": "pkg_pretend",
54128 +}
54129 +
54130 +_vdb_use_conditional_keys = Package._dep_keys + \
54131 + ('LICENSE', 'PROPERTIES', 'PROVIDE', 'RESTRICT',)
54132 +
54133 def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
54134 """
54135 All proper ebuild phases which execute ebuild.sh are spawned
54136 @@ -83,8 +125,18 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
54137 if phase in _unsandboxed_phases:
54138 kwargs['free'] = True
54139
54140 + kwargs['ipc'] = 'ipc-sandbox' not in settings.features or \
54141 + phase in _ipc_phases
54142 + kwargs['networked'] = 'network-sandbox' not in settings.features or \
54143 + phase in _networked_phases
54144 +
54145 if phase == 'depend':
54146 kwargs['droppriv'] = 'userpriv' in settings.features
54147 + # It's not necessary to close_fds for this phase, since
54148 + # it should not spawn any daemons, and close_fds is
54149 + # best avoided since it can interact badly with some
54150 + # garbage collectors (see _setup_pipes docstring).
54151 + kwargs['close_fds'] = False
54152
54153 if actionmap is not None and phase in actionmap:
54154 kwargs.update(actionmap[phase]["args"])
54155 @@ -102,17 +154,24 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
54156
54157 settings['EBUILD_PHASE'] = phase
54158 try:
54159 - return spawn(cmd, settings, **kwargs)
54160 + return spawn(cmd, settings, **portage._native_kwargs(kwargs))
54161 finally:
54162 settings.pop('EBUILD_PHASE', None)
54163
54164 -def _spawn_phase(phase, settings, actionmap=None, **kwargs):
54165 - if kwargs.get('returnpid'):
54166 - return _doebuild_spawn(phase, settings, actionmap=actionmap, **kwargs)
54167 +def _spawn_phase(phase, settings, actionmap=None, returnpid=False,
54168 + logfile=None, **kwargs):
54169
54170 + if returnpid:
54171 + return _doebuild_spawn(phase, settings, actionmap=actionmap,
54172 + returnpid=returnpid, logfile=logfile, **kwargs)
54173 +
54174 + # The logfile argument is unused here, since EbuildPhase uses
54175 + # the PORTAGE_LOG_FILE variable if set.
54176 ebuild_phase = EbuildPhase(actionmap=actionmap, background=False,
54177 - phase=phase, scheduler=PollScheduler().sched_iface,
54178 - settings=settings)
54179 + phase=phase, scheduler=SchedulerInterface(portage._internal_caller and
54180 + global_event_loop() or EventLoop(main=False)),
54181 + settings=settings, **kwargs)
54182 +
54183 ebuild_phase.start()
54184 ebuild_phase.wait()
54185 return ebuild_phase.returncode
54186 @@ -125,19 +184,28 @@ def _doebuild_path(settings, eapi=None):
54187 # Note: PORTAGE_BIN_PATH may differ from the global constant
54188 # when portage is reinstalling itself.
54189 portage_bin_path = settings["PORTAGE_BIN_PATH"]
54190 - eprefix = settings["EPREFIX"]
54191 + eprefix = portage.const.EPREFIX
54192 prerootpath = [x for x in settings.get("PREROOTPATH", "").split(":") if x]
54193 rootpath = [x for x in settings.get("ROOTPATH", "").split(":") if x]
54194 + overrides = [x for x in settings.get(
54195 + "__PORTAGE_TEST_PATH_OVERRIDE", "").split(":") if x]
54196
54197 prefixes = []
54198 if eprefix:
54199 prefixes.append(eprefix)
54200 prefixes.append("/")
54201
54202 - path = []
54203 + path = overrides
54204 +
54205 + if "xattr" in settings.features:
54206 + path.append(os.path.join(portage_bin_path, "ebuild-helpers", "xattr"))
54207
54208 - if eapi not in (None, "0", "1", "2", "3"):
54209 - path.append(os.path.join(portage_bin_path, "ebuild-helpers", "4"))
54210 + if eprefix and uid != 0 and "fakeroot" not in settings.features:
54211 + path.append(os.path.join(portage_bin_path,
54212 + "ebuild-helpers", "unprivileged"))
54213 +
54214 + if settings.get("USERLAND", "GNU") != "GNU":
54215 + path.append(os.path.join(portage_bin_path, "ebuild-helpers", "bsd"))
54216
54217 path.append(os.path.join(portage_bin_path, "ebuild-helpers"))
54218 path.extend(prerootpath)
54219 @@ -256,10 +324,11 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None,
54220 if hasattr(mydbapi, 'repositories'):
54221 repo = mydbapi.repositories.get_repo_for_location(mytree)
54222 mysettings['PORTDIR'] = repo.eclass_db.porttrees[0]
54223 - mysettings['PORTDIR_OVERLAY'] = ' '.join(repo.eclass_db.porttrees[1:])
54224 + mysettings['PORTAGE_ECLASS_LOCATIONS'] = repo.eclass_db.eclass_locations_string
54225 mysettings.configdict["pkg"]["PORTAGE_REPO_NAME"] = repo.name
54226
54227 mysettings["PORTDIR"] = os.path.realpath(mysettings["PORTDIR"])
54228 + mysettings.pop("PORTDIR_OVERLAY", None)
54229 mysettings["DISTDIR"] = os.path.realpath(mysettings["DISTDIR"])
54230 mysettings["RPMDIR"] = os.path.realpath(mysettings["RPMDIR"])
54231
54232 @@ -416,8 +485,8 @@ _doebuild_commands_without_builddir = (
54233 'fetch', 'fetchall', 'help', 'manifest'
54234 )
54235
54236 -def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54237 - fetchonly=0, cleanup=0, dbkey=None, use_cache=1, fetchall=0, tree=None,
54238 +def doebuild(myebuild, mydo, _unused=DeprecationWarning, settings=None, debug=0, listonly=0,
54239 + fetchonly=0, cleanup=0, dbkey=DeprecationWarning, use_cache=1, fetchall=0, tree=None,
54240 mydbapi=None, vartree=None, prev_mtimes=None,
54241 fd_pipes=None, returnpid=False):
54242 """
54243 @@ -480,10 +549,15 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54244 mysettings = settings
54245 myroot = settings['EROOT']
54246
54247 - if _unused is not None and _unused != mysettings['EROOT']:
54248 + if _unused is not DeprecationWarning:
54249 warnings.warn("The third parameter of the "
54250 - "portage.doebuild() is now unused. Use "
54251 - "settings['ROOT'] instead.",
54252 + "portage.doebuild() is deprecated. Instead "
54253 + "settings['EROOT'] is used.",
54254 + DeprecationWarning, stacklevel=2)
54255 +
54256 + if dbkey is not DeprecationWarning:
54257 + warnings.warn("portage.doebuild() called "
54258 + "with deprecated dbkey argument.",
54259 DeprecationWarning, stacklevel=2)
54260
54261 if not tree:
54262 @@ -520,7 +594,7 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54263 "fetch", "fetchall", "digest",
54264 "unpack", "prepare", "configure", "compile", "test",
54265 "install", "rpm", "qmerge", "merge",
54266 - "package","unmerge", "manifest"]
54267 + "package", "unmerge", "manifest", "nofetch"]
54268
54269 if mydo not in validcommands:
54270 validcommands.sort()
54271 @@ -534,8 +608,11 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54272 return 1
54273
54274 if returnpid and mydo != 'depend':
54275 - warnings.warn("portage.doebuild() called " + \
54276 - "with returnpid parameter enabled. This usage will " + \
54277 + # This case is not supported, since it bypasses the EbuildPhase class
54278 + # which implements important functionality (including post phase hooks
54279 + # and IPC for things like best/has_version and die).
54280 + warnings.warn("portage.doebuild() called "
54281 + "with returnpid parameter enabled. This usage will "
54282 "not be supported in the future.",
54283 DeprecationWarning, stacklevel=2)
54284
54285 @@ -543,9 +620,6 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54286 fetchall = 1
54287 mydo = "fetch"
54288
54289 - parallel_fetchonly = mydo in ("fetch", "fetchall") and \
54290 - "PORTAGE_PARALLEL_FETCHONLY" in mysettings
54291 -
54292 if mydo not in clean_phases and not os.path.exists(myebuild):
54293 writemsg("!!! doebuild: %s not found for %s\n" % (myebuild, mydo),
54294 noiselevel=-1)
54295 @@ -652,7 +726,7 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54296 # we can temporarily override PORTAGE_TMPDIR with a random temp dir
54297 # so that there's no need for locking and it can be used even if the
54298 # user isn't in the portage group.
54299 - if mydo in ("info",):
54300 + if not returnpid and mydo in ("info",):
54301 tmpdir = tempfile.mkdtemp()
54302 tmpdir_orig = mysettings["PORTAGE_TMPDIR"]
54303 mysettings["PORTAGE_TMPDIR"] = tmpdir
54304 @@ -663,9 +737,10 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54305 if mydo in clean_phases:
54306 builddir_lock = None
54307 if not returnpid and \
54308 - 'PORTAGE_BUILDIR_LOCKED' not in mysettings:
54309 + 'PORTAGE_BUILDDIR_LOCKED' not in mysettings:
54310 builddir_lock = EbuildBuildDir(
54311 - scheduler=PollScheduler().sched_iface,
54312 + scheduler=(portage._internal_caller and
54313 + global_event_loop() or EventLoop(main=False)),
54314 settings=mysettings)
54315 builddir_lock.lock()
54316 try:
54317 @@ -681,42 +756,7 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54318 if returnpid:
54319 return _spawn_phase(mydo, mysettings,
54320 fd_pipes=fd_pipes, returnpid=returnpid)
54321 - elif isinstance(dbkey, dict):
54322 - warnings.warn("portage.doebuild() called " + \
54323 - "with dict dbkey argument. This usage will " + \
54324 - "not be supported in the future.",
54325 - DeprecationWarning, stacklevel=2)
54326 - mysettings["dbkey"] = ""
54327 - pr, pw = os.pipe()
54328 - fd_pipes = {
54329 - 0:sys.stdin.fileno(),
54330 - 1:sys.stdout.fileno(),
54331 - 2:sys.stderr.fileno(),
54332 - 9:pw}
54333 - mypids = _spawn_phase(mydo, mysettings, returnpid=True,
54334 - fd_pipes=fd_pipes)
54335 - os.close(pw) # belongs exclusively to the child process now
54336 - f = os.fdopen(pr, 'rb', 0)
54337 - for k, v in zip(auxdbkeys,
54338 - (_unicode_decode(line).rstrip('\n') for line in f)):
54339 - dbkey[k] = v
54340 - f.close()
54341 - retval = os.waitpid(mypids[0], 0)[1]
54342 - portage.process.spawned_pids.remove(mypids[0])
54343 - # If it got a signal, return the signal that was sent, but
54344 - # shift in order to distinguish it from a return value. (just
54345 - # like portage.process.spawn() would do).
54346 - if retval & 0xff:
54347 - retval = (retval & 0xff) << 8
54348 - else:
54349 - # Otherwise, return its exit code.
54350 - retval = retval >> 8
54351 - if retval == os.EX_OK and len(dbkey) != len(auxdbkeys):
54352 - # Don't trust bash's returncode if the
54353 - # number of lines is incorrect.
54354 - retval = 1
54355 - return retval
54356 - elif dbkey:
54357 + elif dbkey and dbkey is not DeprecationWarning:
54358 mysettings["dbkey"] = dbkey
54359 else:
54360 mysettings["dbkey"] = \
54361 @@ -725,14 +765,25 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54362 return _spawn_phase(mydo, mysettings,
54363 fd_pipes=fd_pipes, returnpid=returnpid)
54364
54365 - # Validate dependency metadata here to ensure that ebuilds with invalid
54366 - # data are never installed via the ebuild command. Don't bother when
54367 - # returnpid == True since there's no need to do this every time emerge
54368 - # executes a phase.
54369 + elif mydo == "nofetch":
54370 +
54371 + if returnpid:
54372 + writemsg("!!! doebuild: %s\n" %
54373 + _("returnpid is not supported for phase '%s'\n" % mydo),
54374 + noiselevel=-1)
54375 +
54376 + return spawn_nofetch(mydbapi, myebuild, settings=mysettings,
54377 + fd_pipes=fd_pipes)
54378 +
54379 if tree == "porttree":
54380 - rval = _validate_deps(mysettings, myroot, mydo, mydbapi)
54381 - if rval != os.EX_OK:
54382 - return rval
54383 +
54384 + if not returnpid:
54385 + # Validate dependency metadata here to ensure that ebuilds with
54386 + # invalid data are never installed via the ebuild command. Skip
54387 + # this when returnpid is True (assume the caller handled it).
54388 + rval = _validate_deps(mysettings, myroot, mydo, mydbapi)
54389 + if rval != os.EX_OK:
54390 + return rval
54391
54392 else:
54393 # FEATURES=noauto only makes sense for porttree, and we don't want
54394 @@ -741,20 +792,25 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54395 if "noauto" in mysettings.features:
54396 mysettings.features.discard("noauto")
54397
54398 - # The info phase is special because it uses mkdtemp so and
54399 - # user (not necessarily in the portage group) can run it.
54400 - if mydo not in ('info',) and \
54401 + # If we are not using a private temp dir, then check access
54402 + # to the global temp dir.
54403 + if tmpdir is None and \
54404 mydo not in _doebuild_commands_without_builddir:
54405 rval = _check_temp_dir(mysettings)
54406 if rval != os.EX_OK:
54407 return rval
54408
54409 if mydo == "unmerge":
54410 + if returnpid:
54411 + writemsg("!!! doebuild: %s\n" %
54412 + _("returnpid is not supported for phase '%s'\n" % mydo),
54413 + noiselevel=-1)
54414 return unmerge(mysettings["CATEGORY"],
54415 mysettings["PF"], myroot, mysettings, vartree=vartree)
54416
54417 phases_to_run = set()
54418 - if "noauto" in mysettings.features or \
54419 + if returnpid or \
54420 + "noauto" in mysettings.features or \
54421 mydo not in actionmap_deps:
54422 phases_to_run.add(mydo)
54423 else:
54424 @@ -805,9 +861,10 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54425
54426 if newstuff:
54427 if builddir_lock is None and \
54428 - 'PORTAGE_BUILDIR_LOCKED' not in mysettings:
54429 + 'PORTAGE_BUILDDIR_LOCKED' not in mysettings:
54430 builddir_lock = EbuildBuildDir(
54431 - scheduler=PollScheduler().sched_iface,
54432 + scheduler=(portage._internal_caller and
54433 + global_event_loop() or EventLoop(main=False)),
54434 settings=mysettings)
54435 builddir_lock.lock()
54436 try:
54437 @@ -825,12 +882,12 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54438 # in order to satisfy the sane $PWD requirement (from bug #239560)
54439 # when pkg_nofetch is spawned.
54440 have_build_dirs = False
54441 - if not parallel_fetchonly and \
54442 - mydo not in ('digest', 'fetch', 'help', 'manifest'):
54443 + if mydo not in ('digest', 'fetch', 'help', 'manifest'):
54444 if not returnpid and \
54445 - 'PORTAGE_BUILDIR_LOCKED' not in mysettings:
54446 + 'PORTAGE_BUILDDIR_LOCKED' not in mysettings:
54447 builddir_lock = EbuildBuildDir(
54448 - scheduler=PollScheduler().sched_iface,
54449 + scheduler=(portage._internal_caller and
54450 + global_event_loop() or EventLoop(main=False)),
54451 settings=mysettings)
54452 builddir_lock.lock()
54453 mystatus = prepare_build_dirs(myroot, mysettings, cleanup)
54454 @@ -873,9 +930,8 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54455 else:
54456 vardb = vartree.dbapi
54457 cpv = mysettings.mycpv
54458 - cp = portage.versions.cpv_getkey(cpv)
54459 - slot = mysettings["SLOT"]
54460 - cpv_slot = cp + ":" + slot
54461 + cpv_slot = "%s%s%s" % \
54462 + (cpv.cp, portage.dep._slot_separator, cpv.slot)
54463 mysettings["REPLACING_VERSIONS"] = " ".join(
54464 set(portage.versions.cpv_getversion(match) \
54465 for match in vardb.match(cpv_slot) + \
54466 @@ -885,8 +941,16 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54467 # the sandbox -- and stop now.
54468 if mydo in ("config", "help", "info", "postinst",
54469 "preinst", "pretend", "postrm", "prerm"):
54470 - return _spawn_phase(mydo, mysettings,
54471 - fd_pipes=fd_pipes, logfile=logfile, returnpid=returnpid)
54472 + if mydo in ("preinst", "postinst"):
54473 + env_file = os.path.join(os.path.dirname(mysettings["EBUILD"]),
54474 + "environment.bz2")
54475 + if os.path.isfile(env_file):
54476 + mysettings["PORTAGE_UPDATE_ENV"] = env_file
54477 + try:
54478 + return _spawn_phase(mydo, mysettings,
54479 + fd_pipes=fd_pipes, logfile=logfile, returnpid=returnpid)
54480 + finally:
54481 + mysettings.pop("PORTAGE_UPDATE_ENV", None)
54482
54483 mycpv = "/".join((mysettings["CATEGORY"], mysettings["PF"]))
54484
54485 @@ -927,7 +991,8 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54486 if not fetch(fetchme, mysettings, listonly=listonly,
54487 fetchonly=fetchonly, allow_missing_digests=True,
54488 digests=dist_digests):
54489 - spawn_nofetch(mydbapi, myebuild, settings=mysettings)
54490 + spawn_nofetch(mydbapi, myebuild, settings=mysettings,
54491 + fd_pipes=fd_pipes)
54492 if listonly:
54493 # The convention for listonly mode is to report
54494 # success in any case, even though fetch() may
54495 @@ -959,11 +1024,7 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54496 mf = None
54497 _doebuild_manifest_cache = None
54498 return not digestgen(mysettings=mysettings, myportdb=mydbapi)
54499 - elif mydo != 'fetch' and \
54500 - "digest" in mysettings.features:
54501 - # Don't do this when called by emerge or when called just
54502 - # for fetch (especially parallel-fetch) since it's not needed
54503 - # and it can interfere with parallel tasks.
54504 + elif "digest" in mysettings.features:
54505 mf = None
54506 _doebuild_manifest_cache = None
54507 digestgen(mysettings=mysettings, myportdb=mydbapi)
54508 @@ -972,14 +1033,17 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54509 if mydo in ("digest", "manifest"):
54510 return 1
54511
54512 + if mydo == "fetch":
54513 + # Return after digestgen for FEATURES=digest support.
54514 + # Return before digestcheck, since fetch() already
54515 + # checked any relevant digests.
54516 + return 0
54517 +
54518 # See above comment about fetching only when needed
54519 if tree == 'porttree' and \
54520 not digestcheck(checkme, mysettings, "strict" in features, mf=mf):
54521 return 1
54522
54523 - if mydo == "fetch":
54524 - return 0
54525 -
54526 # remove PORTAGE_ACTUAL_DISTDIR once cvs/svn is supported via SRC_URI
54527 if tree == 'porttree' and \
54528 ((mydo != "setup" and "noauto" not in features) \
54529 @@ -995,7 +1059,9 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54530 if len(actionmap_deps.get(x, [])):
54531 actionmap[x]["dep"] = ' '.join(actionmap_deps[x])
54532
54533 - if mydo in actionmap:
54534 + regular_actionmap_phase = mydo in actionmap
54535 +
54536 + if regular_actionmap_phase:
54537 bintree = None
54538 if mydo == "package":
54539 # Make sure the package directory exists before executing
54540 @@ -1019,6 +1085,9 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54541 actionmap, mysettings, debug, logfile=logfile,
54542 fd_pipes=fd_pipes, returnpid=returnpid)
54543
54544 + if returnpid and isinstance(retval, list):
54545 + return retval
54546 +
54547 if retval == os.EX_OK:
54548 if mydo == "package" and bintree is not None:
54549 bintree.inject(mysettings.mycpv,
54550 @@ -1030,7 +1099,15 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54551 except OSError:
54552 pass
54553
54554 - elif mydo=="qmerge":
54555 + elif returnpid:
54556 + writemsg("!!! doebuild: %s\n" %
54557 + _("returnpid is not supported for phase '%s'\n" % mydo),
54558 + noiselevel=-1)
54559 +
54560 + if regular_actionmap_phase:
54561 + # handled above
54562 + pass
54563 + elif mydo == "qmerge":
54564 # check to ensure install was run. this *only* pops up when users
54565 # forget it and are using ebuild
54566 if not os.path.exists(
54567 @@ -1047,7 +1124,8 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54568 mysettings["CATEGORY"], mysettings["PF"], mysettings["D"],
54569 os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info"),
54570 myroot, mysettings, myebuild=mysettings["EBUILD"], mytree=tree,
54571 - mydbapi=mydbapi, vartree=vartree, prev_mtimes=prev_mtimes)
54572 + mydbapi=mydbapi, vartree=vartree, prev_mtimes=prev_mtimes,
54573 + fd_pipes=fd_pipes)
54574 elif mydo=="merge":
54575 retval = spawnebuild("install", actionmap, mysettings, debug,
54576 alwaysdep=1, logfile=logfile, fd_pipes=fd_pipes,
54577 @@ -1063,7 +1141,9 @@ def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,
54578 mysettings["D"], os.path.join(mysettings["PORTAGE_BUILDDIR"],
54579 "build-info"), myroot, mysettings,
54580 myebuild=mysettings["EBUILD"], mytree=tree, mydbapi=mydbapi,
54581 - vartree=vartree, prev_mtimes=prev_mtimes)
54582 + vartree=vartree, prev_mtimes=prev_mtimes,
54583 + fd_pipes=fd_pipes)
54584 +
54585 else:
54586 writemsg_stdout(_("!!! Unknown mydo: %s\n") % mydo, noiselevel=-1)
54587 return 1
54588 @@ -1163,7 +1243,9 @@ def _prepare_env_file(settings):
54589 """
54590
54591 env_extractor = BinpkgEnvExtractor(background=False,
54592 - scheduler=PollScheduler().sched_iface, settings=settings)
54593 + scheduler=(portage._internal_caller and
54594 + global_event_loop() or EventLoop(main=False)),
54595 + settings=settings)
54596
54597 if env_extractor.dest_env_exists():
54598 # There are lots of possible states when doebuild()
54599 @@ -1242,7 +1324,7 @@ def _spawn_actionmap(settings):
54600 misc_sh_binary = os.path.join(portage_bin_path,
54601 os.path.basename(MISC_SH_BINARY))
54602 ebuild_sh = _shell_quote(ebuild_sh_binary) + " %s"
54603 - misc_sh = _shell_quote(misc_sh_binary) + " dyn_%s"
54604 + misc_sh = _shell_quote(misc_sh_binary) + " __dyn_%s"
54605
54606 # args are for the to spawn function
54607 actionmap = {
54608 @@ -1298,10 +1380,10 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
54609
54610 if not pkg.built and \
54611 mydo not in ("digest", "help", "manifest") and \
54612 - pkg.metadata["REQUIRED_USE"] and \
54613 - eapi_has_required_use(pkg.metadata["EAPI"]):
54614 - result = check_required_use(pkg.metadata["REQUIRED_USE"],
54615 - pkg.use.enabled, pkg.iuse.is_valid_flag)
54616 + pkg._metadata["REQUIRED_USE"] and \
54617 + eapi_has_required_use(pkg.eapi):
54618 + result = check_required_use(pkg._metadata["REQUIRED_USE"],
54619 + pkg.use.enabled, pkg.iuse.is_valid_flag, eapi=pkg.eapi)
54620 if not result:
54621 reduced_noise = result.tounicode()
54622 writemsg("\n %s\n" % _("The following REQUIRED_USE flag" + \
54623 @@ -1309,7 +1391,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
54624 writemsg(" %s\n" % reduced_noise,
54625 noiselevel=-1)
54626 normalized_required_use = \
54627 - " ".join(pkg.metadata["REQUIRED_USE"].split())
54628 + " ".join(pkg._metadata["REQUIRED_USE"].split())
54629 if reduced_noise != normalized_required_use:
54630 writemsg("\n %s\n" % _("The above constraints " + \
54631 "are a subset of the following complete expression:"),
54632 @@ -1324,7 +1406,8 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
54633
54634 # XXX This would be to replace getstatusoutput completely.
54635 # XXX Issue: cannot block execution. Deadlock condition.
54636 -def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakeroot=0, **keywords):
54637 +def spawn(mystring, mysettings, debug=False, free=False, droppriv=False,
54638 + sesandbox=False, fakeroot=False, networked=True, ipc=True, **keywords):
54639 """
54640 Spawn a subprocess with extra portage-specific options.
54641 Optiosn include:
54642 @@ -1354,6 +1437,10 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
54643 @type sesandbox: Boolean
54644 @param fakeroot: Run this command with faked root privileges
54645 @type fakeroot: Boolean
54646 + @param networked: Run this command with networking access enabled
54647 + @type networked: Boolean
54648 + @param ipc: Run this command with host IPC access enabled
54649 + @type ipc: Boolean
54650 @param keywords: Extra options encoded as a dict, to be passed to spawn
54651 @type keywords: Dictionary
54652 @rtype: Integer
54653 @@ -1366,29 +1453,90 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
54654 fd_pipes = keywords.get("fd_pipes")
54655 if fd_pipes is None:
54656 fd_pipes = {
54657 - 0:sys.stdin.fileno(),
54658 - 1:sys.stdout.fileno(),
54659 - 2:sys.stderr.fileno(),
54660 + 0:portage._get_stdin().fileno(),
54661 + 1:sys.__stdout__.fileno(),
54662 + 2:sys.__stderr__.fileno(),
54663 }
54664 # In some cases the above print statements don't flush stdout, so
54665 # it needs to be flushed before allowing a child process to use it
54666 # so that output always shows in the correct order.
54667 - stdout_filenos = (sys.stdout.fileno(), sys.stderr.fileno())
54668 + stdout_filenos = (sys.__stdout__.fileno(), sys.__stderr__.fileno())
54669 for fd in fd_pipes.values():
54670 if fd in stdout_filenos:
54671 - sys.stdout.flush()
54672 - sys.stderr.flush()
54673 + sys.__stdout__.flush()
54674 + sys.__stderr__.flush()
54675 break
54676
54677 features = mysettings.features
54678 +
54679 + # Use Linux namespaces if available
54680 + if uid == 0 and platform.system() == 'Linux':
54681 + keywords['unshare_net'] = not networked
54682 + keywords['unshare_ipc'] = not ipc
54683 +
54684 # TODO: Enable fakeroot to be used together with droppriv. The
54685 # fake ownership/permissions will have to be converted to real
54686 # permissions in the merge phase.
54687 fakeroot = fakeroot and uid != 0 and portage.process.fakeroot_capable
54688 - if droppriv and uid == 0 and portage_gid and portage_uid and \
54689 - hasattr(os, "setgroups"):
54690 - keywords.update({"uid":portage_uid,"gid":portage_gid,
54691 - "groups":userpriv_groups,"umask":0o02})
54692 + portage_build_uid = os.getuid()
54693 + portage_build_gid = os.getgid()
54694 + if uid == 0 and portage_uid and portage_gid and hasattr(os, "setgroups"):
54695 + if droppriv:
54696 + keywords.update({
54697 + "uid": portage_uid,
54698 + "gid": portage_gid,
54699 + "groups": userpriv_groups,
54700 + "umask": 0o02
54701 + })
54702 +
54703 + # Adjust pty ownership so that subprocesses
54704 + # can directly access /dev/fd/{1,2}.
54705 + stdout_fd = fd_pipes.get(1)
54706 + if stdout_fd is not None:
54707 + try:
54708 + subprocess_tty = _os.ttyname(stdout_fd)
54709 + except OSError:
54710 + pass
54711 + else:
54712 + try:
54713 + parent_tty = _os.ttyname(sys.__stdout__.fileno())
54714 + except OSError:
54715 + parent_tty = None
54716 +
54717 + if subprocess_tty != parent_tty:
54718 + _os.chown(subprocess_tty,
54719 + int(portage_uid), int(portage_gid))
54720 +
54721 + if "userpriv" in features and "userpriv" not in mysettings["PORTAGE_RESTRICT"].split() and secpass >= 2:
54722 + # Since Python 3.4, getpwuid and getgrgid
54723 + # require int type (no proxies).
54724 + portage_build_uid = int(portage_uid)
54725 + portage_build_gid = int(portage_gid)
54726 +
54727 + if "PORTAGE_BUILD_USER" not in mysettings:
54728 + user = None
54729 + try:
54730 + user = pwd.getpwuid(portage_build_uid).pw_name
54731 + except KeyError:
54732 + if portage_build_uid == 0:
54733 + user = "root"
54734 + elif portage_build_uid == portage_uid:
54735 + user = portage.data._portage_username
54736 + if user is not None:
54737 + mysettings["PORTAGE_BUILD_USER"] = user
54738 +
54739 + if "PORTAGE_BUILD_GROUP" not in mysettings:
54740 + group = None
54741 + try:
54742 + group = grp.getgrgid(portage_build_gid).gr_name
54743 + except KeyError:
54744 + if portage_build_gid == 0:
54745 + group = "root"
54746 + elif portage_build_gid == portage_gid:
54747 + group = portage.data._portage_grpname
54748 + if group is not None:
54749 + mysettings["PORTAGE_BUILD_GROUP"] = group
54750 +
54751 if not free:
54752 free=((droppriv and "usersandbox" not in features) or \
54753 (not droppriv and "sandbox" not in features and \
54754 @@ -1419,12 +1567,15 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero
54755 mysettings["PORTAGE_SANDBOX_T"])
54756
54757 if keywords.get("returnpid"):
54758 - return spawn_func(mystring, env=mysettings.environ(), **keywords)
54759 + return spawn_func(mystring, env=mysettings.environ(),
54760 + **portage._native_kwargs(keywords))
54761
54762 proc = EbuildSpawnProcess(
54763 background=False, args=mystring,
54764 - scheduler=PollScheduler().sched_iface, spawn_func=spawn_func,
54765 - settings=mysettings, **keywords)
54766 + scheduler=SchedulerInterface(portage._internal_caller and
54767 + global_event_loop() or EventLoop(main=False)),
54768 + spawn_func=spawn_func,
54769 + settings=mysettings, **portage._native_kwargs(keywords))
54770
54771 proc.start()
54772 proc.wait()
54773 @@ -1436,8 +1587,8 @@ def spawnebuild(mydo, actionmap, mysettings, debug, alwaysdep=0,
54774 logfile=None, fd_pipes=None, returnpid=False):
54775
54776 if returnpid:
54777 - warnings.warn("portage.spawnebuild() called " + \
54778 - "with returnpid parameter enabled. This usage will " + \
54779 + warnings.warn("portage.spawnebuild() called "
54780 + "with returnpid parameter enabled. This usage will "
54781 "not be supported in the future.",
54782 DeprecationWarning, stacklevel=2)
54783
54784 @@ -1530,7 +1681,52 @@ def _check_build_log(mysettings, out=None):
54785
54786 configure_opts_warn = []
54787 configure_opts_warn_re = re.compile(
54788 - r'^configure: WARNING: [Uu]nrecognized options: ')
54789 + r'^configure: WARNING: [Uu]nrecognized options: (.*)')
54790 +
54791 + qa_configure_opts = ""
54792 + try:
54793 + with io.open(_unicode_encode(os.path.join(
54794 + mysettings["PORTAGE_BUILDDIR"],
54795 + "build-info", "QA_CONFIGURE_OPTIONS"),
54796 + encoding=_encodings['fs'], errors='strict'),
54797 + mode='r', encoding=_encodings['repo.content'],
54798 + errors='replace') as qa_configure_opts_f:
54799 + qa_configure_opts = qa_configure_opts_f.read()
54800 + except IOError as e:
54801 + if e.errno not in (errno.ENOENT, errno.ESTALE):
54802 + raise
54803 +
54804 + qa_configure_opts = qa_configure_opts.split()
54805 + if qa_configure_opts:
54806 + if len(qa_configure_opts) > 1:
54807 + qa_configure_opts = "|".join("(%s)" % x for x in qa_configure_opts)
54808 + qa_configure_opts = "^(%s)$" % qa_configure_opts
54809 + else:
54810 + qa_configure_opts = "^%s$" % qa_configure_opts[0]
54811 + qa_configure_opts = re.compile(qa_configure_opts)
54812 +
54813 + qa_am_maintainer_mode = []
54814 + try:
54815 + with io.open(_unicode_encode(os.path.join(
54816 + mysettings["PORTAGE_BUILDDIR"],
54817 + "build-info", "QA_AM_MAINTAINER_MODE"),
54818 + encoding=_encodings['fs'], errors='strict'),
54819 + mode='r', encoding=_encodings['repo.content'],
54820 + errors='replace') as qa_am_maintainer_mode_f:
54821 + qa_am_maintainer_mode = [x for x in
54822 + qa_am_maintainer_mode_f.read().splitlines() if x]
54823 + except IOError as e:
54824 + if e.errno not in (errno.ENOENT, errno.ESTALE):
54825 + raise
54826 +
54827 + if qa_am_maintainer_mode:
54828 + if len(qa_am_maintainer_mode) > 1:
54829 + qa_am_maintainer_mode = \
54830 + "|".join("(%s)" % x for x in qa_am_maintainer_mode)
54831 + qa_am_maintainer_mode = "^(%s)$" % qa_am_maintainer_mode
54832 + else:
54833 + qa_am_maintainer_mode = "^%s$" % qa_am_maintainer_mode[0]
54834 + qa_am_maintainer_mode = re.compile(qa_am_maintainer_mode)
54835
54836 # Exclude output from dev-libs/yaz-3.0.47 which looks like this:
54837 #
54838 @@ -1552,7 +1748,9 @@ def _check_build_log(mysettings, out=None):
54839 for line in f:
54840 line = _unicode_decode(line)
54841 if am_maintainer_mode_re.search(line) is not None and \
54842 - am_maintainer_mode_exclude_re.search(line) is None:
54843 + am_maintainer_mode_exclude_re.search(line) is None and \
54844 + (not qa_am_maintainer_mode or
54845 + qa_am_maintainer_mode.search(line) is None):
54846 am_maintainer_mode.append(line.rstrip("\n"))
54847
54848 if bash_command_not_found_re.match(line) is not None and \
54849 @@ -1562,8 +1760,11 @@ def _check_build_log(mysettings, out=None):
54850 if helper_missing_file_re.match(line) is not None:
54851 helper_missing_file.append(line.rstrip("\n"))
54852
54853 - if configure_opts_warn_re.match(line) is not None:
54854 - configure_opts_warn.append(line.rstrip("\n"))
54855 + m = configure_opts_warn_re.match(line)
54856 + if m is not None:
54857 + for x in m.group(1).split(", "):
54858 + if not qa_configure_opts or qa_configure_opts.match(x) is None:
54859 + configure_opts_warn.append(x)
54860
54861 if make_jobserver_re.match(line) is not None:
54862 make_jobserver.append(line.rstrip("\n"))
54863 @@ -1612,7 +1813,7 @@ def _check_build_log(mysettings, out=None):
54864 if configure_opts_warn:
54865 msg = [_("QA Notice: Unrecognized configure options:")]
54866 msg.append("")
54867 - msg.extend("\t" + line for line in configure_opts_warn)
54868 + msg.extend("\t%s" % x for x in configure_opts_warn)
54869 _eqawarn(msg)
54870
54871 if make_jobserver:
54872 @@ -1637,8 +1838,12 @@ def _post_src_install_write_metadata(settings):
54873
54874 build_info_dir = os.path.join(settings['PORTAGE_BUILDDIR'], 'build-info')
54875
54876 - for k in ('IUSE',):
54877 - v = settings.get(k)
54878 + metadata_keys = ['IUSE']
54879 + if eapi_attrs.iuse_effective:
54880 + metadata_keys.append('IUSE_EFFECTIVE')
54881 +
54882 + for k in metadata_keys:
54883 + v = settings.configdict['pkg'].get(k)
54884 if v is not None:
54885 write_atomic(os.path.join(build_info_dir, k), v + '\n')
54886
54887 @@ -1654,7 +1859,7 @@ def _post_src_install_write_metadata(settings):
54888 'BUILD_TIME'), encoding=_encodings['fs'], errors='strict'),
54889 mode='w', encoding=_encodings['repo.content'],
54890 errors='strict') as f:
54891 - f.write(_unicode_decode("%.0f\n" % (time.time(),)))
54892 + f.write("%.0f\n" % (time.time(),))
54893
54894 use = frozenset(settings['PORTAGE_USE'].split())
54895 for k in _vdb_use_conditional_keys:
54896 @@ -1668,7 +1873,7 @@ def _post_src_install_write_metadata(settings):
54897 continue
54898
54899 if k.endswith('DEPEND'):
54900 - if eapi_attrs.slot_abi:
54901 + if eapi_attrs.slot_operator:
54902 continue
54903 token_class = Atom
54904 else:
54905 @@ -1686,10 +1891,10 @@ def _post_src_install_write_metadata(settings):
54906 k), encoding=_encodings['fs'], errors='strict'),
54907 mode='w', encoding=_encodings['repo.content'],
54908 errors='strict') as f:
54909 - f.write(_unicode_decode(v + '\n'))
54910 + f.write('%s\n' % v)
54911
54912 - if eapi_attrs.slot_abi:
54913 - deps = evaluate_slot_abi_equal_deps(settings, use, QueryCommand.get_db())
54914 + if eapi_attrs.slot_operator:
54915 + deps = evaluate_slot_operator_equal_deps(settings, use, QueryCommand.get_db())
54916 for k, v in deps.items():
54917 filename = os.path.join(build_info_dir, k)
54918 if not v:
54919 @@ -1702,10 +1907,7 @@ def _post_src_install_write_metadata(settings):
54920 k), encoding=_encodings['fs'], errors='strict'),
54921 mode='w', encoding=_encodings['repo.content'],
54922 errors='strict') as f:
54923 - f.write(_unicode_decode(v + '\n'))
54924 -
54925 -_vdb_use_conditional_keys = ('DEPEND', 'LICENSE', 'PDEPEND',
54926 - 'PROPERTIES', 'PROVIDE', 'RDEPEND', 'RESTRICT',)
54927 + f.write('%s\n' % v)
54928
54929 def _preinst_bsdflags(mysettings):
54930 if bsd_chflags:
54931 @@ -1747,6 +1949,33 @@ def _post_src_install_uid_fix(mysettings, out):
54932 destdir = mysettings["D"]
54933 ed_len = len(mysettings["ED"])
54934 unicode_errors = []
54935 + desktop_file_validate = \
54936 + portage.process.find_binary("desktop-file-validate") is not None
54937 + xdg_dirs = mysettings.get('XDG_DATA_DIRS', '/usr/share').split(':')
54938 + xdg_dirs = tuple(os.path.join(i, "applications") + os.sep
54939 + for i in xdg_dirs if i)
54940 +
54941 + qa_desktop_file = ""
54942 + try:
54943 + with io.open(_unicode_encode(os.path.join(
54944 + mysettings["PORTAGE_BUILDDIR"],
54945 + "build-info", "QA_DESKTOP_FILE"),
54946 + encoding=_encodings['fs'], errors='strict'),
54947 + mode='r', encoding=_encodings['repo.content'],
54948 + errors='replace') as f:
54949 + qa_desktop_file = f.read()
54950 + except IOError as e:
54951 + if e.errno not in (errno.ENOENT, errno.ESTALE):
54952 + raise
54953 +
54954 + qa_desktop_file = qa_desktop_file.split()
54955 + if qa_desktop_file:
54956 + if len(qa_desktop_file) > 1:
54957 + qa_desktop_file = "|".join("(%s)" % x for x in qa_desktop_file)
54958 + qa_desktop_file = "^(%s)$" % qa_desktop_file
54959 + else:
54960 + qa_desktop_file = "^%s$" % qa_desktop_file[0]
54961 + qa_desktop_file = re.compile(qa_desktop_file)
54962
54963 while True:
54964
54965 @@ -1755,6 +1984,7 @@ def _post_src_install_uid_fix(mysettings, out):
54966 counted_inodes = set()
54967 fixlafiles_announced = False
54968 fixlafiles = "fixlafiles" in mysettings.features
54969 + desktopfile_errors = []
54970
54971 for parent, dirs, files in os.walk(destdir):
54972 try:
54973 @@ -1794,6 +2024,16 @@ def _post_src_install_uid_fix(mysettings, out):
54974 else:
54975 fpath = os.path.join(parent, fname)
54976
54977 + fpath_relative = fpath[ed_len - 1:]
54978 + if desktop_file_validate and fname.endswith(".desktop") and \
54979 + os.path.isfile(fpath) and \
54980 + fpath_relative.startswith(xdg_dirs) and \
54981 + not (qa_desktop_file and qa_desktop_file.match(fpath_relative.strip(os.sep)) is not None):
54982 +
54983 + desktop_validate = validate_desktop_entry(fpath)
54984 + if desktop_validate:
54985 + desktopfile_errors.extend(desktop_validate)
54986 +
54987 if fixlafiles and \
54988 fname.endswith(".la") and os.path.isfile(fpath):
54989 f = open(_unicode_encode(fpath,
54990 @@ -1860,6 +2100,11 @@ def _post_src_install_uid_fix(mysettings, out):
54991 if not unicode_error:
54992 break
54993
54994 + if desktopfile_errors:
54995 + for l in _merge_desktopfile_error(desktopfile_errors):
54996 + l = l.replace(mysettings["ED"], '/')
54997 + eqawarn(l, phase='install', key=mysettings.mycpv, out=out)
54998 +
54999 if unicode_errors:
55000 for l in _merge_unicode_error(unicode_errors):
55001 eqawarn(l, phase='install', key=mysettings.mycpv, out=out)
55002 @@ -1871,7 +2116,7 @@ def _post_src_install_uid_fix(mysettings, out):
55003 'SIZE'), encoding=_encodings['fs'], errors='strict'),
55004 mode='w', encoding=_encodings['repo.content'],
55005 errors='strict')
55006 - f.write(_unicode_decode(str(size) + '\n'))
55007 + f.write('%d\n' % size)
55008 f.close()
55009
55010 _reapply_bsdflags_to_image(mysettings)
55011 @@ -2022,6 +2267,20 @@ def _post_src_install_soname_symlinks(mysettings, out):
55012 for line in qa_msg:
55013 eqawarn(line, key=mysettings.mycpv, out=out)
55014
55015 +def _merge_desktopfile_error(errors):
55016 + lines = []
55017 +
55018 + msg = _("QA Notice: This package installs one or more .desktop files "
55019 + "that do not pass validation.")
55020 + lines.extend(wrap(msg, 72))
55021 +
55022 + lines.append("")
55023 + errors.sort()
55024 + lines.extend("\t" + x for x in errors)
55025 + lines.append("")
55026 +
55027 + return lines
55028 +
55029 def _merge_unicode_error(errors):
55030 lines = []
55031
55032 @@ -2078,11 +2337,6 @@ def _handle_self_update(settings, vardb):
55033 if settings["ROOT"] == "/" and \
55034 portage.dep.match_from_list(
55035 portage.const.PORTAGE_PACKAGE_ATOM, [cpv]):
55036 - inherited = frozenset(settings.get('INHERITED', '').split())
55037 - if not vardb.cpv_exists(cpv) or \
55038 - '9999' in cpv or \
55039 - 'git' in inherited or \
55040 - 'git-2' in inherited:
55041 - _prepare_self_update(settings)
55042 - return True
55043 + _prepare_self_update(settings)
55044 + return True
55045 return False
55046
55047 diff --git a/pym/portage/package/ebuild/fetch.py b/pym/portage/package/ebuild/fetch.py
55048 index 60ed04d..2424ff3 100644
55049 --- a/pym/portage/package/ebuild/fetch.py
55050 +++ b/pym/portage/package/ebuild/fetch.py
55051 @@ -1,4 +1,4 @@
55052 -# Copyright 2010-2012 Gentoo Foundation
55053 +# Copyright 2010-2014 Gentoo Foundation
55054 # Distributed under the terms of the GNU General Public License v2
55055
55056 from __future__ import print_function
55057 @@ -14,6 +14,10 @@ import stat
55058 import sys
55059 import tempfile
55060
55061 +try:
55062 + from urllib.parse import urlparse
55063 +except ImportError:
55064 + from urlparse import urlparse
55065
55066 import portage
55067 portage.proxy.lazyimport.lazyimport(globals(),
55068 @@ -26,7 +30,7 @@ portage.proxy.lazyimport.lazyimport(globals(),
55069 from portage import OrderedDict, os, selinux, shutil, _encodings, \
55070 _shell_quote, _unicode_encode
55071 from portage.checksum import (hashfunc_map, perform_md5, verify_all,
55072 - _filter_unaccelarated_hashes)
55073 + _filter_unaccelarated_hashes, _hash_filter, _apply_hash_filter)
55074 from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \
55075 GLOBAL_CONFIG_PATH
55076 from portage.data import portage_gid, portage_uid, secpass, userpriv_groups
55077 @@ -64,9 +68,9 @@ def _spawn_fetch(settings, args, **kwargs):
55078 if "fd_pipes" not in kwargs:
55079
55080 kwargs["fd_pipes"] = {
55081 - 0 : sys.stdin.fileno(),
55082 - 1 : sys.stdout.fileno(),
55083 - 2 : sys.stdout.fileno(),
55084 + 0 : portage._get_stdin().fileno(),
55085 + 1 : sys.__stdout__.fileno(),
55086 + 2 : sys.__stdout__.fileno(),
55087 }
55088
55089 if "userfetch" in settings.features and \
55090 @@ -185,7 +189,7 @@ def _check_digests(filename, digests, show_errors=1):
55091 return False
55092 return True
55093
55094 -def _check_distfile(filename, digests, eout, show_errors=1):
55095 +def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
55096 """
55097 @return a tuple of (match, stat_obj) where match is True if filename
55098 matches all given digests (if any) and stat_obj is a stat result, or
55099 @@ -212,6 +216,8 @@ def _check_distfile(filename, digests, eout, show_errors=1):
55100 return (False, st)
55101 else:
55102 digests = _filter_unaccelarated_hashes(digests)
55103 + if hash_filter is not None:
55104 + digests = _apply_hash_filter(digests, hash_filter)
55105 if _check_digests(filename, digests, show_errors=show_errors):
55106 eout.ebegin("%s %s ;-)" % (os.path.basename(filename),
55107 " ".join(sorted(digests))))
55108 @@ -341,7 +347,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55109 _("!!! For fetching to a read-only filesystem, "
55110 "locking should be turned off.\n")), noiselevel=-1)
55111 writemsg(_("!!! This can be done by adding -distlocks to "
55112 - "FEATURES in /etc/make.conf\n"), noiselevel=-1)
55113 + "FEATURES in /etc/portage/make.conf\n"), noiselevel=-1)
55114 # use_locks = 0
55115
55116 # local mirrors are always added
55117 @@ -355,6 +361,9 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55118 if try_mirrors:
55119 mymirrors += [x.rstrip("/") for x in mysettings["GENTOO_MIRRORS"].split() if x]
55120
55121 + hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
55122 + if hash_filter.transparent:
55123 + hash_filter = None
55124 skip_manifest = mysettings.get("EBUILD_SKIP_MANIFEST") == "1"
55125 if skip_manifest:
55126 allow_missing_digests = True
55127 @@ -397,12 +406,16 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55128 for myfile, uri_set in myuris.items():
55129 for myuri in uri_set:
55130 file_uri_tuples.append((myfile, myuri))
55131 + if not uri_set:
55132 + file_uri_tuples.append((myfile, None))
55133 else:
55134 for myuri in myuris:
55135 - file_uri_tuples.append((os.path.basename(myuri), myuri))
55136 + if urlparse(myuri).scheme:
55137 + file_uri_tuples.append((os.path.basename(myuri), myuri))
55138 + else:
55139 + file_uri_tuples.append((os.path.basename(myuri), None))
55140
55141 filedict = OrderedDict()
55142 - primaryuri_indexes={}
55143 primaryuri_dict = {}
55144 thirdpartymirror_uris = {}
55145 for myfile, myuri in file_uri_tuples:
55146 @@ -410,6 +423,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55147 filedict[myfile]=[]
55148 for y in range(0,len(locations)):
55149 filedict[myfile].append(locations[y]+"/distfiles/"+myfile)
55150 + if myuri is None:
55151 + continue
55152 if myuri[:9]=="mirror://":
55153 eidx = myuri.find("/", 9)
55154 if eidx != -1:
55155 @@ -424,15 +439,15 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55156
55157 # now try the official mirrors
55158 if mirrorname in thirdpartymirrors:
55159 - random.shuffle(thirdpartymirrors[mirrorname])
55160 -
55161 uris = [locmirr.rstrip("/") + "/" + path \
55162 for locmirr in thirdpartymirrors[mirrorname]]
55163 + random.shuffle(uris)
55164 filedict[myfile].extend(uris)
55165 thirdpartymirror_uris.setdefault(myfile, []).extend(uris)
55166
55167 - if not filedict[myfile]:
55168 - writemsg(_("No known mirror by the name: %s\n") % (mirrorname))
55169 + if mirrorname not in custommirrors and \
55170 + mirrorname not in thirdpartymirrors:
55171 + writemsg(_("!!! No known mirror by the name: %s\n") % (mirrorname))
55172 else:
55173 writemsg(_("Invalid mirror definition in SRC_URI:\n"), noiselevel=-1)
55174 writemsg(" %s\n" % (myuri), noiselevel=-1)
55175 @@ -440,26 +455,30 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55176 if restrict_fetch or force_mirror:
55177 # Only fetch from specific mirrors is allowed.
55178 continue
55179 - if "primaryuri" in restrict:
55180 - # Use the source site first.
55181 - if myfile in primaryuri_indexes:
55182 - primaryuri_indexes[myfile] += 1
55183 - else:
55184 - primaryuri_indexes[myfile] = 0
55185 - filedict[myfile].insert(primaryuri_indexes[myfile], myuri)
55186 - else:
55187 - filedict[myfile].append(myuri)
55188 primaryuris = primaryuri_dict.get(myfile)
55189 if primaryuris is None:
55190 primaryuris = []
55191 primaryuri_dict[myfile] = primaryuris
55192 primaryuris.append(myuri)
55193
55194 + # Order primaryuri_dict values to match that in SRC_URI.
55195 + for uris in primaryuri_dict.values():
55196 + uris.reverse()
55197 +
55198 # Prefer thirdpartymirrors over normal mirrors in cases when
55199 # the file does not yet exist on the normal mirrors.
55200 for myfile, uris in thirdpartymirror_uris.items():
55201 primaryuri_dict.setdefault(myfile, []).extend(uris)
55202
55203 + # Now merge primaryuri values into filedict (includes mirrors
55204 + # explicitly referenced in SRC_URI).
55205 + if "primaryuri" in restrict:
55206 + for myfile, uris in filedict.items():
55207 + filedict[myfile] = primaryuri_dict.get(myfile, []) + uris
55208 + else:
55209 + for myfile in filedict:
55210 + filedict[myfile] += primaryuri_dict.get(myfile, [])
55211 +
55212 can_fetch=True
55213
55214 if listonly:
55215 @@ -637,7 +656,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55216 eout = EOutput()
55217 eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
55218 match, mystat = _check_distfile(
55219 - myfile_path, pruned_digests, eout)
55220 + myfile_path, pruned_digests, eout, hash_filter=hash_filter)
55221 if match:
55222 # Skip permission adjustment for symlinks, since we don't
55223 # want to modify anything outside of the primary DISTDIR,
55224 @@ -709,7 +728,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55225 for x in ro_distdirs:
55226 filename = os.path.join(x, myfile)
55227 match, mystat = _check_distfile(
55228 - filename, pruned_digests, eout)
55229 + filename, pruned_digests, eout, hash_filter=hash_filter)
55230 if match:
55231 readonly_file = filename
55232 break
55233 @@ -734,7 +753,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55234 "remaining space.\n"), noiselevel=-1)
55235 if userfetch:
55236 writemsg(_("!!! You may set FEATURES=\"-userfetch\""
55237 - " in /etc/make.conf in order to fetch with\n"
55238 + " in /etc/portage/make.conf in order to fetch with\n"
55239 "!!! superuser privileges.\n"), noiselevel=-1)
55240
55241 if fsmirrors and not os.path.exists(myfile_path) and has_space:
55242 @@ -796,6 +815,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55243 continue
55244 else:
55245 digests = _filter_unaccelarated_hashes(mydigests[myfile])
55246 + if hash_filter is not None:
55247 + digests = _apply_hash_filter(digests, hash_filter)
55248 verified_ok, reason = verify_all(myfile_path, digests)
55249 if not verified_ok:
55250 writemsg(_("!!! Previously fetched"
55251 @@ -845,8 +866,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55252 protocol = loc[0:loc.find("://")]
55253
55254 global_config_path = GLOBAL_CONFIG_PATH
55255 - if mysettings['EPREFIX']:
55256 - global_config_path = os.path.join(mysettings['EPREFIX'],
55257 + if portage.const.EPREFIX:
55258 + global_config_path = os.path.join(portage.const.EPREFIX,
55259 GLOBAL_CONFIG_PATH.lstrip(os.sep))
55260
55261 missing_file_param = False
55262 @@ -955,11 +976,16 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55263 writemsg_stdout(_(">>> Downloading '%s'\n") % \
55264 _hide_url_passwd(loc))
55265 variables = {
55266 - "DISTDIR": mysettings["DISTDIR"],
55267 "URI": loc,
55268 "FILE": myfile
55269 }
55270
55271 + for k in ("DISTDIR", "PORTAGE_SSH_OPTS"):
55272 + try:
55273 + variables[k] = mysettings[k]
55274 + except KeyError:
55275 + pass
55276 +
55277 myfetch = shlex_split(locfetch)
55278 myfetch = [varexpand(x, mydict=variables) for x in myfetch]
55279 myret = -1
55280 @@ -1053,6 +1079,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
55281 # net connection. This way we have a chance to try to download
55282 # from another mirror...
55283 digests = _filter_unaccelarated_hashes(mydigests[myfile])
55284 + if hash_filter is not None:
55285 + digests = _apply_hash_filter(digests, hash_filter)
55286 verified_ok, reason = verify_all(myfile_path, digests)
55287 if not verified_ok:
55288 writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,
55289
55290 diff --git a/pym/portage/package/ebuild/getmaskingreason.py b/pym/portage/package/ebuild/getmaskingreason.py
55291 index 8a88c2f..1e4ed21 100644
55292 --- a/pym/portage/package/ebuild/getmaskingreason.py
55293 +++ b/pym/portage/package/ebuild/getmaskingreason.py
55294 @@ -1,4 +1,4 @@
55295 -# Copyright 2010-2011 Gentoo Foundation
55296 +# Copyright 2010-2014 Gentoo Foundation
55297 # Distributed under the terms of the GNU General Public License v2
55298
55299 __all__ = ['getmaskingreason']
55300 @@ -6,13 +6,12 @@ __all__ = ['getmaskingreason']
55301 import portage
55302 from portage import os
55303 from portage.const import USER_CONFIG_PATH
55304 -from portage.dep import Atom, match_from_list, _slot_separator, _repo_separator
55305 +from portage.dep import Atom, match_from_list
55306 from portage.exception import InvalidAtom
55307 from portage.localization import _
55308 from portage.repository.config import _gen_valid_repo
55309 from portage.util import grablines, normalize_path
55310 -from portage.versions import catpkgsplit
55311 -from _emerge.Package import Package
55312 +from portage.versions import catpkgsplit, _pkg_str
55313
55314 def getmaskingreason(mycpv, metadata=None, settings=None,
55315 portdb=None, return_location=False, myrepo=None):
55316 @@ -60,23 +59,20 @@ def getmaskingreason(mycpv, metadata=None, settings=None,
55317
55318 # Sometimes we can't access SLOT or repository due to corruption.
55319 pkg = mycpv
55320 - if metadata is not None:
55321 - pkg = "".join((mycpv, _slot_separator, metadata["SLOT"]))
55322 - # At this point myrepo should be None, a valid name, or
55323 - # Package.UNKNOWN_REPO which we ignore.
55324 - if myrepo is not None and myrepo != Package.UNKNOWN_REPO:
55325 - pkg = "".join((pkg, _repo_separator, myrepo))
55326 + try:
55327 + pkg.slot
55328 + except AttributeError:
55329 + pkg = _pkg_str(mycpv, metadata=metadata, repo=myrepo)
55330 +
55331 cpv_slot_list = [pkg]
55332
55333 - mycp=mysplit[0]+"/"+mysplit[1]
55334 + mycp = pkg.cp
55335
55336 - # XXX- This is a temporary duplicate of code from the config constructor.
55337 - locations = [os.path.join(settings["PORTDIR"], "profiles")]
55338 + locations = []
55339 + if pkg.repo in settings.repositories:
55340 + for repo in settings.repositories[pkg.repo].masters + (settings.repositories[pkg.repo],):
55341 + locations.append(os.path.join(repo.location, "profiles"))
55342 locations.extend(settings.profiles)
55343 - for ov in settings["PORTDIR_OVERLAY"].split():
55344 - profdir = os.path.join(normalize_path(ov), "profiles")
55345 - if os.path.isdir(profdir):
55346 - locations.append(profdir)
55347 locations.append(os.path.join(settings["PORTAGE_CONFIGROOT"],
55348 USER_CONFIG_PATH))
55349 locations.reverse()
55350
55351 diff --git a/pym/portage/package/ebuild/getmaskingstatus.py b/pym/portage/package/ebuild/getmaskingstatus.py
55352 index 9bf605d..4b9e588 100644
55353 --- a/pym/portage/package/ebuild/getmaskingstatus.py
55354 +++ b/pym/portage/package/ebuild/getmaskingstatus.py
55355 @@ -1,17 +1,21 @@
55356 -# Copyright 2010-2012 Gentoo Foundation
55357 +# Copyright 2010-2014 Gentoo Foundation
55358 # Distributed under the terms of the GNU General Public License v2
55359
55360 +from __future__ import unicode_literals
55361 +
55362 __all__ = ['getmaskingstatus']
55363
55364 import sys
55365
55366 import portage
55367 from portage import eapi_is_supported, _eapi_is_deprecated
55368 +from portage.exception import InvalidDependString
55369 from portage.localization import _
55370 from portage.package.ebuild.config import config
55371 from portage.versions import catpkgsplit, _pkg_str
55372
55373 if sys.hexversion >= 0x3000000:
55374 + # pylint: disable=W0622
55375 basestring = str
55376
55377 class _UnmaskHint(object):
55378 @@ -48,7 +52,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
55379 # emerge passed in a Package instance
55380 pkg = mycpv
55381 mycpv = pkg.cpv
55382 - metadata = pkg.metadata
55383 + metadata = pkg._metadata
55384 installed = pkg.installed
55385
55386 if metadata is None:
55387 @@ -65,10 +69,11 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
55388 else:
55389 metadata["USE"] = ""
55390
55391 - if not hasattr(mycpv, 'slot'):
55392 + try:
55393 + mycpv.slot
55394 + except AttributeError:
55395 try:
55396 - mycpv = _pkg_str(mycpv, slot=metadata['SLOT'],
55397 - repo=metadata.get('repository'))
55398 + mycpv = _pkg_str(mycpv, metadata=metadata, settings=settings)
55399 except portage.exception.InvalidData:
55400 raise ValueError(_("invalid CPV: %s") % mycpv)
55401
55402 @@ -83,6 +88,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
55403 mygroups = settings._getKeywords(mycpv, metadata)
55404 licenses = metadata["LICENSE"]
55405 properties = metadata["PROPERTIES"]
55406 + restrict = metadata["RESTRICT"]
55407 if not eapi_is_supported(eapi):
55408 return [_MaskReason("EAPI", "EAPI %s" % eapi)]
55409 elif _eapi_is_deprecated(eapi) and not installed:
55410 @@ -122,6 +128,13 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
55411 if gp=="*":
55412 kmask=None
55413 break
55414 + elif gp == "~*":
55415 + for x in pgroups:
55416 + if x[:1] == "~":
55417 + kmask = None
55418 + break
55419 + if kmask is None:
55420 + break
55421 elif gp=="-"+myarch and myarch in pgroups:
55422 kmask="-"+myarch
55423 break
55424 @@ -161,6 +174,15 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
55425 except portage.exception.InvalidDependString as e:
55426 rValue.append(_MaskReason("invalid", "PROPERTIES: "+str(e)))
55427
55428 + try:
55429 + missing_restricts = settings._getMissingRestrict(mycpv, metadata)
55430 + if missing_restricts:
55431 + msg = list(missing_restricts)
55432 + msg.append("in RESTRICT")
55433 + rValue.append(_MaskReason("RESTRICT", " ".join(msg)))
55434 + except InvalidDependString as e:
55435 + rValue.append(_MaskReason("invalid", "RESTRICT: %s" % (e,)))
55436 +
55437 # Only show KEYWORDS masks for installed packages
55438 # if they're not masked for any other reason.
55439 if kmask and (not installed or not rValue):
55440
55441 diff --git a/pym/portage/package/ebuild/prepare_build_dirs.py b/pym/portage/package/ebuild/prepare_build_dirs.py
55442 index b8fbdc5..6782160 100644
55443 --- a/pym/portage/package/ebuild/prepare_build_dirs.py
55444 +++ b/pym/portage/package/ebuild/prepare_build_dirs.py
55445 @@ -1,6 +1,8 @@
55446 -# Copyright 2010-2011 Gentoo Foundation
55447 +# Copyright 2010-2013 Gentoo Foundation
55448 # Distributed under the terms of the GNU General Public License v2
55449
55450 +from __future__ import unicode_literals
55451 +
55452 __all__ = ['prepare_build_dirs']
55453
55454 import errno
55455 @@ -338,12 +340,12 @@ def _prepare_workdir(mysettings):
55456 try:
55457 _ensure_log_subdirs(logdir, log_subdir)
55458 except PortageException as e:
55459 - writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1)
55460 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
55461
55462 if os.access(log_subdir, os.W_OK):
55463 logdir_subdir_ok = True
55464 else:
55465 - writemsg(_unicode_decode("!!! %s: %s\n") %
55466 + writemsg("!!! %s: %s\n" %
55467 (_("Permission Denied"), log_subdir), noiselevel=-1)
55468
55469 tmpdir_log_path = os.path.join(
55470
55471 diff --git a/pym/portage/process.py b/pym/portage/process.py
55472 index f3cec88..ba41ea8 100644
55473 --- a/pym/portage/process.py
55474 +++ b/pym/portage/process.py
55475 @@ -1,25 +1,30 @@
55476 # portage.py -- core Portage functionality
55477 -# Copyright 1998-2012 Gentoo Foundation
55478 +# Copyright 1998-2014 Gentoo Foundation
55479 # Distributed under the terms of the GNU General Public License v2
55480
55481
55482 import atexit
55483 import errno
55484 +import fcntl
55485 import platform
55486 import signal
55487 +import socket
55488 +import struct
55489 import sys
55490 import traceback
55491 +import os as _os
55492
55493 from portage import os
55494 from portage import _encodings
55495 from portage import _unicode_encode
55496 import portage
55497 portage.proxy.lazyimport.lazyimport(globals(),
55498 - 'portage.util:dump_traceback',
55499 + 'portage.util:dump_traceback,writemsg',
55500 )
55501
55502 from portage.const import BASH_BINARY, SANDBOX_BINARY, FAKEROOT_BINARY
55503 from portage.exception import CommandNotFound
55504 +from portage.util._ctypes import find_library, LoadLibrary, ctypes
55505
55506 try:
55507 import resource
55508 @@ -28,12 +33,35 @@ except ImportError:
55509 max_fd_limit = 256
55510
55511 if sys.hexversion >= 0x3000000:
55512 + # pylint: disable=W0622
55513 basestring = str
55514
55515 -if os.path.isdir("/proc/%i/fd" % os.getpid()):
55516 +# Support PEP 446 for Python >=3.4
55517 +try:
55518 + _set_inheritable = _os.set_inheritable
55519 +except AttributeError:
55520 + _set_inheritable = None
55521 +
55522 +try:
55523 + _FD_CLOEXEC = fcntl.FD_CLOEXEC
55524 +except AttributeError:
55525 + _FD_CLOEXEC = None
55526 +
55527 +# Prefer /proc/self/fd if available (/dev/fd
55528 +# doesn't work on solaris, see bug #474536).
55529 +for _fd_dir in ("/proc/self/fd", "/dev/fd"):
55530 + if os.path.isdir(_fd_dir):
55531 + break
55532 + else:
55533 + _fd_dir = None
55534 +
55535 +# /dev/fd does not work on FreeBSD, see bug #478446
55536 +if platform.system() in ('FreeBSD',) and _fd_dir == '/dev/fd':
55537 + _fd_dir = None
55538 +
55539 +if _fd_dir is not None:
55540 def get_open_fds():
55541 - return (int(fd) for fd in os.listdir("/proc/%i/fd" % os.getpid()) \
55542 - if fd.isdigit())
55543 + return (int(fd) for fd in os.listdir(_fd_dir) if fd.isdigit())
55544
55545 if platform.python_implementation() == 'PyPy':
55546 # EAGAIN observed with PyPy 1.8.
55547 @@ -46,6 +74,13 @@ if os.path.isdir("/proc/%i/fd" % os.getpid()):
55548 raise
55549 return range(max_fd_limit)
55550
55551 +elif os.path.isdir("/proc/%s/fd" % os.getpid()):
55552 + # In order for this function to work in forked subprocesses,
55553 + # os.getpid() must be called from inside the function.
55554 + def get_open_fds():
55555 + return (int(fd) for fd in os.listdir("/proc/%s/fd" % os.getpid())
55556 + if fd.isdigit())
55557 +
55558 else:
55559 def get_open_fds():
55560 return range(max_fd_limit)
55561 @@ -83,14 +118,14 @@ def spawn_bash(mycommand, debug=False, opt_name=None, **keywords):
55562 def spawn_sandbox(mycommand, opt_name=None, **keywords):
55563 if not sandbox_capable:
55564 return spawn_bash(mycommand, opt_name=opt_name, **keywords)
55565 - args=[SANDBOX_BINARY]
55566 + args = [SANDBOX_BINARY]
55567 if not opt_name:
55568 opt_name = os.path.basename(mycommand.split()[0])
55569 args.append(mycommand)
55570 return spawn(args, opt_name=opt_name, **keywords)
55571
55572 def spawn_fakeroot(mycommand, fakeroot_state=None, opt_name=None, **keywords):
55573 - args=[FAKEROOT_BINARY]
55574 + args = [FAKEROOT_BINARY]
55575 if not opt_name:
55576 opt_name = os.path.basename(mycommand.split()[0])
55577 if fakeroot_state:
55578 @@ -141,30 +176,28 @@ def run_exitfuncs():
55579
55580 atexit.register(run_exitfuncs)
55581
55582 -# We need to make sure that any processes spawned are killed off when
55583 -# we exit. spawn() takes care of adding and removing pids to this list
55584 -# as it creates and cleans up processes.
55585 -spawned_pids = []
55586 -def cleanup():
55587 - while spawned_pids:
55588 - pid = spawned_pids.pop()
55589 +# It used to be necessary for API consumers to remove pids from spawned_pids,
55590 +# since otherwise it would accumulate a pids endlessly. Now, spawned_pids is
55591 +# just an empty dummy list, so for backward compatibility, ignore ValueError
55592 +# for removal on non-existent items.
55593 +class _dummy_list(list):
55594 + def remove(self, item):
55595 + # TODO: Trigger a DeprecationWarning here, after stable portage
55596 + # has dummy spawned_pids.
55597 try:
55598 - # With waitpid and WNOHANG, only check the
55599 - # first element of the tuple since the second
55600 - # element may vary (bug #337465).
55601 - if os.waitpid(pid, os.WNOHANG)[0] == 0:
55602 - os.kill(pid, signal.SIGTERM)
55603 - os.waitpid(pid, 0)
55604 - except OSError:
55605 - # This pid has been cleaned up outside
55606 - # of spawn().
55607 + list.remove(self, item)
55608 + except ValueError:
55609 pass
55610
55611 -atexit_register(cleanup)
55612 +spawned_pids = _dummy_list()
55613 +
55614 +def cleanup():
55615 + pass
55616
55617 def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55618 uid=None, gid=None, groups=None, umask=None, logfile=None,
55619 - path_lookup=True, pre_exec=None):
55620 + path_lookup=True, pre_exec=None, close_fds=True, unshare_net=False,
55621 + unshare_ipc=False, cgroup=None):
55622 """
55623 Spawns a given command.
55624
55625 @@ -175,6 +208,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55626 @param opt_name: an optional name for the spawn'd process (defaults to the binary name)
55627 @type opt_name: String
55628 @param fd_pipes: A dict of mapping for pipes, { '0': stdin, '1': stdout } for example
55629 + (default is {0:stdin, 1:stdout, 2:stderr})
55630 @type fd_pipes: Dictionary
55631 @param returnpid: Return the Process IDs for a successful spawn.
55632 NOTE: This requires the caller clean up all the PIDs, otherwise spawn will clean them.
55633 @@ -193,7 +227,16 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55634 @type path_lookup: Boolean
55635 @param pre_exec: A function to be called with no arguments just prior to the exec call.
55636 @type pre_exec: callable
55637 -
55638 + @param close_fds: If True, then close all file descriptors except those
55639 + referenced by fd_pipes (default is True).
55640 + @type close_fds: Boolean
55641 + @param unshare_net: If True, networking will be unshared from the spawned process
55642 + @type unshare_net: Boolean
55643 + @param unshare_ipc: If True, IPC will be unshared from the spawned process
55644 + @type unshare_ipc: Boolean
55645 + @param cgroup: CGroup path to bind the process to
55646 + @type cgroup: String
55647 +
55648 logfile requires stdout and stderr to be assigned to this process (ie not pointed
55649 somewhere else.)
55650
55651 @@ -226,9 +269,9 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55652 # default to propagating our stdin, stdout and stderr.
55653 if fd_pipes is None:
55654 fd_pipes = {
55655 - 0:sys.stdin.fileno(),
55656 - 1:sys.stdout.fileno(),
55657 - 2:sys.stderr.fileno(),
55658 + 0:portage._get_stdin().fileno(),
55659 + 1:sys.__stdout__.fileno(),
55660 + 2:sys.__stderr__.fileno(),
55661 }
55662
55663 # mypids will hold the pids of all processes created.
55664 @@ -256,21 +299,40 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55665 fd_pipes[1] = pw
55666 fd_pipes[2] = pw
55667
55668 - pid = os.fork()
55669 + # This caches the libc library lookup in the current
55670 + # process, so that it's only done once rather than
55671 + # for each child process.
55672 + if unshare_net or unshare_ipc:
55673 + find_library("c")
55674
55675 - if pid == 0:
55676 - try:
55677 - _exec(binary, mycommand, opt_name, fd_pipes,
55678 - env, gid, groups, uid, umask, pre_exec)
55679 - except SystemExit:
55680 - raise
55681 - except Exception as e:
55682 - # We need to catch _any_ exception so that it doesn't
55683 - # propagate out of this function and cause exiting
55684 - # with anything other than os._exit()
55685 - sys.stderr.write("%s:\n %s\n" % (e, " ".join(mycommand)))
55686 - traceback.print_exc()
55687 - sys.stderr.flush()
55688 + parent_pid = os.getpid()
55689 + pid = None
55690 + try:
55691 + pid = os.fork()
55692 +
55693 + if pid == 0:
55694 + try:
55695 + _exec(binary, mycommand, opt_name, fd_pipes,
55696 + env, gid, groups, uid, umask, pre_exec, close_fds,
55697 + unshare_net, unshare_ipc, cgroup)
55698 + except SystemExit:
55699 + raise
55700 + except Exception as e:
55701 + # We need to catch _any_ exception so that it doesn't
55702 + # propagate out of this function and cause exiting
55703 + # with anything other than os._exit()
55704 + writemsg("%s:\n %s\n" % (e, " ".join(mycommand)),
55705 + noiselevel=-1)
55706 + traceback.print_exc()
55707 + sys.stderr.flush()
55708 +
55709 + finally:
55710 + if pid == 0 or (pid is None and os.getpid() != parent_pid):
55711 + # Call os._exit() from a finally block in order
55712 + # to suppress any finally blocks from earlier
55713 + # in the call stack (see bug #345289). This
55714 + # finally block has to be setup before the fork
55715 + # in order to avoid a race condition.
55716 os._exit(1)
55717
55718 if not isinstance(pid, int):
55719 @@ -278,7 +340,6 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55720
55721 # Add the pid to our local and the global pid lists.
55722 mypids.append(pid)
55723 - spawned_pids.append(pid)
55724
55725 # If we started a tee process the write side of the pipe is no
55726 # longer needed, so close it.
55727 @@ -301,10 +362,6 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55728 # and wait for it.
55729 retval = os.waitpid(pid, 0)[1]
55730
55731 - # When it's done, we can remove it from the
55732 - # global pid list as well.
55733 - spawned_pids.remove(pid)
55734 -
55735 if retval:
55736 # If it failed, kill off anything else that
55737 # isn't dead yet.
55738 @@ -315,7 +372,6 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55739 if os.waitpid(pid, os.WNOHANG)[0] == 0:
55740 os.kill(pid, signal.SIGTERM)
55741 os.waitpid(pid, 0)
55742 - spawned_pids.remove(pid)
55743
55744 # If it got a signal, return the signal that was sent.
55745 if (retval & 0xff):
55746 @@ -328,7 +384,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False,
55747 return 0
55748
55749 def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask,
55750 - pre_exec):
55751 + pre_exec, close_fds, unshare_net, unshare_ipc, cgroup):
55752
55753 """
55754 Execute a given binary with options
55755 @@ -353,10 +409,16 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask,
55756 @type umask: Integer
55757 @param pre_exec: A function to be called with no arguments just prior to the exec call.
55758 @type pre_exec: callable
55759 + @param unshare_net: If True, networking will be unshared from the spawned process
55760 + @type unshare_net: Boolean
55761 + @param unshare_ipc: If True, IPC will be unshared from the spawned process
55762 + @type unshare_ipc: Boolean
55763 + @param cgroup: CGroup path to bind the process to
55764 + @type cgroup: String
55765 @rtype: None
55766 @return: Never returns (calls os.execve)
55767 """
55768 -
55769 +
55770 # If the process we're creating hasn't been given a name
55771 # assign it the name of the executable.
55772 if not opt_name:
55773 @@ -371,6 +433,10 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask,
55774 myargs = [opt_name]
55775 myargs.extend(mycommand[1:])
55776
55777 + # Avoid a potential UnicodeEncodeError from os.execve().
55778 + myargs = [_unicode_encode(x, encoding=_encodings['fs'],
55779 + errors='strict') for x in myargs]
55780 +
55781 # Use default signal handlers in order to avoid problems
55782 # killing subprocesses as reported in bug #353239.
55783 signal.signal(signal.SIGINT, signal.SIG_DFL)
55784 @@ -383,15 +449,63 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask,
55785 # the parent process (see bug #289486).
55786 signal.signal(signal.SIGQUIT, signal.SIG_DFL)
55787
55788 - _setup_pipes(fd_pipes)
55789 + _setup_pipes(fd_pipes, close_fds=close_fds, inheritable=True)
55790 +
55791 + # Add to cgroup
55792 + # it's better to do it from the child since we can guarantee
55793 + # it is done before we start forking children
55794 + if cgroup:
55795 + with open(os.path.join(cgroup, 'cgroup.procs'), 'a') as f:
55796 + f.write('%d\n' % os.getpid())
55797 +
55798 + # Unshare (while still uid==0)
55799 + if unshare_net or unshare_ipc:
55800 + filename = find_library("c")
55801 + if filename is not None:
55802 + libc = LoadLibrary(filename)
55803 + if libc is not None:
55804 + CLONE_NEWIPC = 0x08000000
55805 + CLONE_NEWNET = 0x40000000
55806 +
55807 + flags = 0
55808 + if unshare_net:
55809 + flags |= CLONE_NEWNET
55810 + if unshare_ipc:
55811 + flags |= CLONE_NEWIPC
55812 +
55813 + try:
55814 + if libc.unshare(flags) != 0:
55815 + writemsg("Unable to unshare: %s\n" % (
55816 + errno.errorcode.get(ctypes.get_errno(), '?')),
55817 + noiselevel=-1)
55818 + else:
55819 + if unshare_net:
55820 + # 'up' the loopback
55821 + IFF_UP = 0x1
55822 + ifreq = struct.pack('16sh', b'lo', IFF_UP)
55823 + SIOCSIFFLAGS = 0x8914
55824 +
55825 + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
55826 + try:
55827 + fcntl.ioctl(sock, SIOCSIFFLAGS, ifreq)
55828 + except IOError as e:
55829 + writemsg("Unable to enable loopback interface: %s\n" % (
55830 + errno.errorcode.get(e.errno, '?')),
55831 + noiselevel=-1)
55832 + sock.close()
55833 + except AttributeError:
55834 + # unshare() not supported by libc
55835 + pass
55836
55837 # Set requested process permissions.
55838 if gid:
55839 - os.setgid(gid)
55840 + # Cast proxies to int, in case it matters.
55841 + os.setgid(int(gid))
55842 if groups:
55843 os.setgroups(groups)
55844 if uid:
55845 - os.setuid(uid)
55846 + # Cast proxies to int, in case it matters.
55847 + os.setuid(int(uid))
55848 if umask:
55849 os.umask(umask)
55850 if pre_exec:
55851 @@ -400,9 +514,16 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask,
55852 # And switch to the new process.
55853 os.execve(binary, myargs, env)
55854
55855 -def _setup_pipes(fd_pipes, close_fds=True):
55856 +def _setup_pipes(fd_pipes, close_fds=True, inheritable=None):
55857 """Setup pipes for a forked process.
55858
55859 + Even when close_fds is False, file descriptors referenced as
55860 + values in fd_pipes are automatically closed if they do not also
55861 + occur as keys in fd_pipes. It is assumed that the caller will
55862 + explicitly add them to the fd_pipes keys if they are intended
55863 + to remain open. This allows for convenient elimination of
55864 + unnecessary duplicate file descriptors.
55865 +
55866 WARNING: When not followed by exec, the close_fds behavior
55867 can trigger interference from destructors that close file
55868 descriptors. This interference happens when the garbage
55869 @@ -413,22 +534,92 @@ def _setup_pipes(fd_pipes, close_fds=True):
55870 and also with CPython under some circumstances (as triggered
55871 by xmpppy in bug #374335). In order to close a safe subset of
55872 file descriptors, see portage.locks._close_fds().
55873 +
55874 + NOTE: When not followed by exec, even when close_fds is False,
55875 + it's still possible for dup2() calls to cause interference in a
55876 + way that's similar to the way that close_fds interferes (since
55877 + dup2() has to close the target fd if it happens to be open).
55878 + It's possible to avoid such interference by using allocated
55879 + file descriptors as the keys in fd_pipes. For example:
55880 +
55881 + pr, pw = os.pipe()
55882 + fd_pipes[pw] = pw
55883 +
55884 + By using the allocated pw file descriptor as the key in fd_pipes,
55885 + it's not necessary for dup2() to close a file descriptor (it
55886 + actually does nothing in this case), which avoids possible
55887 + interference.
55888 """
55889 - my_fds = {}
55890 +
55891 + reverse_map = {}
55892 # To protect from cases where direct assignment could
55893 - # clobber needed fds ({1:2, 2:1}) we first dupe the fds
55894 - # into unused fds.
55895 - for fd in fd_pipes:
55896 - my_fds[fd] = os.dup(fd_pipes[fd])
55897 - # Then assign them to what they should be.
55898 - for fd in my_fds:
55899 - os.dup2(my_fds[fd], fd)
55900 + # clobber needed fds ({1:2, 2:1}) we create a reverse map
55901 + # in order to know when it's necessary to create temporary
55902 + # backup copies with os.dup().
55903 + for newfd, oldfd in fd_pipes.items():
55904 + newfds = reverse_map.get(oldfd)
55905 + if newfds is None:
55906 + newfds = []
55907 + reverse_map[oldfd] = newfds
55908 + newfds.append(newfd)
55909 +
55910 + # Assign newfds via dup2(), making temporary backups when
55911 + # necessary, and closing oldfd if the caller has not
55912 + # explicitly requested for it to remain open by adding
55913 + # it to the keys of fd_pipes.
55914 + while reverse_map:
55915 +
55916 + oldfd, newfds = reverse_map.popitem()
55917 + old_fdflags = None
55918 +
55919 + for newfd in newfds:
55920 + if newfd in reverse_map:
55921 + # Make a temporary backup before re-assignment, assuming
55922 + # that backup_fd won't collide with a key in reverse_map
55923 + # (since all of the keys correspond to open file
55924 + # descriptors, and os.dup() only allocates a previously
55925 + # unused file discriptors).
55926 + backup_fd = os.dup(newfd)
55927 + reverse_map[backup_fd] = reverse_map.pop(newfd)
55928 +
55929 + if oldfd != newfd:
55930 + os.dup2(oldfd, newfd)
55931 + if _set_inheritable is not None:
55932 + # Don't do this unless _set_inheritable is available,
55933 + # since it's used below to ensure correct state, and
55934 + # otherwise /dev/null stdin fails to inherit (at least
55935 + # with Python versions from 3.1 to 3.3).
55936 + if old_fdflags is None:
55937 + old_fdflags = fcntl.fcntl(oldfd, fcntl.F_GETFD)
55938 + fcntl.fcntl(newfd, fcntl.F_SETFD, old_fdflags)
55939 +
55940 + if _set_inheritable is not None:
55941 +
55942 + inheritable_state = None
55943 + if not (old_fdflags is None or _FD_CLOEXEC is None):
55944 + inheritable_state = not bool(old_fdflags & _FD_CLOEXEC)
55945 +
55946 + if inheritable is not None:
55947 + if inheritable_state is not inheritable:
55948 + _set_inheritable(newfd, inheritable)
55949 +
55950 + elif newfd in (0, 1, 2):
55951 + if inheritable_state is not True:
55952 + _set_inheritable(newfd, True)
55953 +
55954 + if oldfd not in fd_pipes:
55955 + # If oldfd is not a key in fd_pipes, then it's safe
55956 + # to close now, since we've already made all of the
55957 + # requested duplicates. This also closes every
55958 + # backup_fd that may have been created on previous
55959 + # iterations of this loop.
55960 + os.close(oldfd)
55961
55962 if close_fds:
55963 # Then close _all_ fds that haven't been explicitly
55964 # requested to be kept open.
55965 for fd in get_open_fds():
55966 - if fd not in my_fds:
55967 + if fd not in fd_pipes:
55968 try:
55969 os.close(fd)
55970 except OSError:
55971 @@ -443,8 +634,16 @@ def find_binary(binary):
55972 @rtype: None or string
55973 @return: full path to binary or None if the binary could not be located.
55974 """
55975 - for path in os.environ.get("PATH", "").split(":"):
55976 - filename = "%s/%s" % (path, binary)
55977 - if os.access(filename, os.X_OK) and os.path.isfile(filename):
55978 + paths = os.environ.get("PATH", "")
55979 + if sys.hexversion >= 0x3000000 and isinstance(binary, bytes):
55980 + # return bytes when input is bytes
55981 + paths = paths.encode(sys.getfilesystemencoding(), 'surrogateescape')
55982 + paths = paths.split(b':')
55983 + else:
55984 + paths = paths.split(':')
55985 +
55986 + for path in paths:
55987 + filename = _os.path.join(path, binary)
55988 + if _os.access(filename, os.X_OK) and _os.path.isfile(filename):
55989 return filename
55990 return None
55991
55992 diff --git a/pym/portage/proxy/lazyimport.py b/pym/portage/proxy/lazyimport.py
55993 index ad4a542..5aa7e50 100644
55994 --- a/pym/portage/proxy/lazyimport.py
55995 +++ b/pym/portage/proxy/lazyimport.py
55996 @@ -1,4 +1,4 @@
55997 -# Copyright 2009 Gentoo Foundation
55998 +# Copyright 2009-2014 Gentoo Foundation
55999 # Distributed under the terms of the GNU General Public License v2
56000
56001 __all__ = ['lazyimport']
56002 @@ -14,6 +14,7 @@ except ImportError:
56003 from portage.proxy.objectproxy import ObjectProxy
56004
56005 if sys.hexversion >= 0x3000000:
56006 + # pylint: disable=W0622
56007 basestring = str
56008
56009 _module_proxies = {}
56010 @@ -32,7 +33,7 @@ def _preload_portage_submodules():
56011 while True:
56012 remaining = False
56013 for name in list(_module_proxies):
56014 - if name.startswith('portage.'):
56015 + if name.startswith('portage.') or name.startswith('_emerge.'):
56016 if name in imported:
56017 continue
56018 imported.add(name)
56019
56020 diff --git a/pym/portage/proxy/objectproxy.py b/pym/portage/proxy/objectproxy.py
56021 index 92b36d1..a755774 100644
56022 --- a/pym/portage/proxy/objectproxy.py
56023 +++ b/pym/portage/proxy/objectproxy.py
56024 @@ -1,4 +1,4 @@
56025 -# Copyright 2008-2009 Gentoo Foundation
56026 +# Copyright 2008-2012 Gentoo Foundation
56027 # Distributed under the terms of the GNU General Public License v2
56028
56029 import sys
56030 @@ -30,6 +30,13 @@ class ObjectProxy(object):
56031 result = object.__getattribute__(self, '_get_target')()
56032 return result(*args, **kwargs)
56033
56034 + def __enter__(self):
56035 + return object.__getattribute__(self, '_get_target')().__enter__()
56036 +
56037 + def __exit__(self, exc_type, exc_value, traceback):
56038 + return object.__getattribute__(self, '_get_target')().__exit__(
56039 + exc_type, exc_value, traceback)
56040 +
56041 def __setitem__(self, key, value):
56042 object.__getattribute__(self, '_get_target')()[key] = value
56043
56044
56045 diff --git a/pym/portage/repository/config.py b/pym/portage/repository/config.py
56046 index 9b43f38..5e0d055 100644
56047 --- a/pym/portage/repository/config.py
56048 +++ b/pym/portage/repository/config.py
56049 @@ -1,6 +1,8 @@
56050 -# Copyright 2010-2012 Gentoo Foundation
56051 +# Copyright 2010-2014 Gentoo Foundation
56052 # Distributed under the terms of the GNU General Public License v2
56053
56054 +from __future__ import unicode_literals
56055 +
56056 import io
56057 import logging
56058 import warnings
56059 @@ -8,25 +10,35 @@ import sys
56060 import re
56061
56062 try:
56063 - from configparser import ParsingError
56064 + from configparser import Error as ConfigParserError
56065 if sys.hexversion >= 0x3020000:
56066 from configparser import ConfigParser as SafeConfigParser
56067 else:
56068 from configparser import SafeConfigParser
56069 except ImportError:
56070 - from ConfigParser import SafeConfigParser, ParsingError
56071 + from ConfigParser import SafeConfigParser, Error as ConfigParserError
56072 +import portage
56073 from portage import eclass_cache, os
56074 from portage.const import (MANIFEST2_HASH_FUNCTIONS, MANIFEST2_REQUIRED_HASH,
56075 - REPO_NAME_LOC, USER_CONFIG_PATH)
56076 + PORTAGE_BASE_PATH, REPO_NAME_LOC, USER_CONFIG_PATH)
56077 +from portage.eapi import eapi_allows_directories_on_profile_level_and_repository_level
56078 from portage.env.loaders import KeyValuePairFileLoader
56079 from portage.util import (normalize_path, read_corresponding_eapi_file, shlex_split,
56080 - stack_lists, writemsg, writemsg_level)
56081 + stack_lists, writemsg, writemsg_level, _recursive_file_list)
56082 +from portage.util._path import exists_raise_eaccess, isdir_raise_eaccess
56083 from portage.localization import _
56084 from portage import _unicode_decode
56085 from portage import _unicode_encode
56086 from portage import _encodings
56087 from portage import manifest
56088
56089 +if sys.hexversion >= 0x3000000:
56090 + # pylint: disable=W0622
56091 + basestring = str
56092 +
56093 +# Characters prohibited by repoman's file.name check.
56094 +_invalid_path_char_re = re.compile(r'[^a-zA-Z0-9._\-+:/]')
56095 +
56096 _valid_profile_formats = frozenset(
56097 ['pms', 'portage-1', 'portage-2'])
56098
56099 @@ -48,38 +60,76 @@ def _gen_valid_repo(name):
56100 name = None
56101 return name
56102
56103 +def _find_invalid_path_char(path, pos=0, endpos=None):
56104 + """
56105 + Returns the position of the first invalid character found in basename,
56106 + or -1 if no invalid characters are found.
56107 + """
56108 + if endpos is None:
56109 + endpos = len(path)
56110 +
56111 + m = _invalid_path_char_re.search(path, pos=pos, endpos=endpos)
56112 + if m is not None:
56113 + return m.start()
56114 +
56115 + return -1
56116 +
56117 class RepoConfig(object):
56118 """Stores config of one repository"""
56119
56120 __slots__ = ('aliases', 'allow_missing_manifest', 'allow_provide_virtual',
56121 'cache_formats', 'create_manifest', 'disable_manifest', 'eapi',
56122 - 'eclass_db', 'eclass_locations', 'eclass_overrides', 'format', 'location',
56123 + 'eclass_db', 'eclass_locations', 'eclass_overrides',
56124 + 'find_invalid_path_char', 'force', 'format', 'local_config', 'location',
56125 'main_repo', 'manifest_hashes', 'masters', 'missing_repo_name',
56126 'name', 'portage1_profiles', 'portage1_profiles_compat', 'priority',
56127 - 'profile_formats', 'sign_commit', 'sign_manifest', 'sync',
56128 - 'thin_manifest', 'update_changelog', 'user_location')
56129 + 'profile_formats', 'sign_commit', 'sign_manifest', 'sync_cvs_repo',
56130 + 'sync_type', 'sync_uri', 'thin_manifest', 'update_changelog',
56131 + 'user_location', '_eapis_banned', '_eapis_deprecated', '_masters_orig')
56132
56133 - def __init__(self, name, repo_opts):
56134 + def __init__(self, name, repo_opts, local_config=True):
56135 """Build a RepoConfig with options in repo_opts
56136 Try to read repo_name in repository location, but if
56137 it is not found use variable name as repository name"""
56138 - aliases = repo_opts.get('aliases')
56139 - if aliases is not None:
56140 - aliases = tuple(aliases.split())
56141 +
56142 + force = repo_opts.get('force')
56143 + if force is not None:
56144 + force = tuple(force.split())
56145 + self.force = force
56146 + if force is None:
56147 + force = ()
56148 +
56149 + self.local_config = local_config
56150 +
56151 + if local_config or 'aliases' in force:
56152 + aliases = repo_opts.get('aliases')
56153 + if aliases is not None:
56154 + aliases = tuple(aliases.split())
56155 + else:
56156 + aliases = None
56157 +
56158 self.aliases = aliases
56159
56160 - eclass_overrides = repo_opts.get('eclass-overrides')
56161 - if eclass_overrides is not None:
56162 - eclass_overrides = tuple(eclass_overrides.split())
56163 + if local_config or 'eclass-overrides' in force:
56164 + eclass_overrides = repo_opts.get('eclass-overrides')
56165 + if eclass_overrides is not None:
56166 + eclass_overrides = tuple(eclass_overrides.split())
56167 + else:
56168 + eclass_overrides = None
56169 +
56170 self.eclass_overrides = eclass_overrides
56171 # Eclass databases and locations are computed later.
56172 self.eclass_db = None
56173 self.eclass_locations = None
56174
56175 - # Masters from repos.conf override layout.conf.
56176 - masters = repo_opts.get('masters')
56177 - if masters is not None:
56178 - masters = tuple(masters.split())
56179 + if local_config or 'masters' in force:
56180 + # Masters from repos.conf override layout.conf.
56181 + masters = repo_opts.get('masters')
56182 + if masters is not None:
56183 + masters = tuple(masters.split())
56184 + else:
56185 + masters = None
56186 +
56187 self.masters = masters
56188
56189 #The main-repo key makes only sense for the 'DEFAULT' section.
56190 @@ -93,11 +143,22 @@ class RepoConfig(object):
56191 priority = None
56192 self.priority = priority
56193
56194 - sync = repo_opts.get('sync')
56195 - if sync is not None:
56196 - sync = sync.strip()
56197 - self.sync = sync
56198 + sync_cvs_repo = repo_opts.get('sync-cvs-repo')
56199 + if sync_cvs_repo is not None:
56200 + sync_cvs_repo = sync_cvs_repo.strip()
56201 + self.sync_cvs_repo = sync_cvs_repo or None
56202 +
56203 + sync_type = repo_opts.get('sync-type')
56204 + if sync_type is not None:
56205 + sync_type = sync_type.strip()
56206 + self.sync_type = sync_type or None
56207
56208 + sync_uri = repo_opts.get('sync-uri')
56209 + if sync_uri is not None:
56210 + sync_uri = sync_uri.strip()
56211 + self.sync_uri = sync_uri or None
56212 +
56213 + # Not implemented.
56214 format = repo_opts.get('format')
56215 if format is not None:
56216 format = format.strip()
56217 @@ -106,7 +167,7 @@ class RepoConfig(object):
56218 location = repo_opts.get('location')
56219 self.user_location = location
56220 if location is not None and location.strip():
56221 - if os.path.isdir(location):
56222 + if os.path.isdir(location) or portage._sync_mode:
56223 location = os.path.realpath(location)
56224 else:
56225 location = None
56226 @@ -114,14 +175,23 @@ class RepoConfig(object):
56227
56228 eapi = None
56229 missing = True
56230 + self.name = name
56231 if self.location is not None:
56232 eapi = read_corresponding_eapi_file(os.path.join(self.location, REPO_NAME_LOC))
56233 - name, missing = self._read_valid_repo_name(self.location)
56234 - elif name == "DEFAULT":
56235 + self.name, missing = self._read_valid_repo_name(self.location)
56236 + if missing:
56237 + # The name from repos.conf has to be used here for
56238 + # things like emerge-webrsync to work when the repo
56239 + # is empty (bug #484950).
56240 + if name is not None:
56241 + self.name = name
56242 + if portage._sync_mode:
56243 + missing = False
56244 +
56245 + elif name == "DEFAULT":
56246 missing = False
56247
56248 self.eapi = eapi
56249 - self.name = name
56250 self.missing_repo_name = missing
56251 # sign_commit is disabled by default, since it requires Git >=1.7.9,
56252 # and key_id configured by `git config user.signingkey key_id`
56253 @@ -137,18 +207,20 @@ class RepoConfig(object):
56254 self.cache_formats = None
56255 self.portage1_profiles = True
56256 self.portage1_profiles_compat = False
56257 + self.find_invalid_path_char = _find_invalid_path_char
56258 + self._masters_orig = None
56259
56260 # Parse layout.conf.
56261 if self.location:
56262 - layout_filename = os.path.join(self.location, "metadata", "layout.conf")
56263 layout_data = parse_layout_conf(self.location, self.name)[0]
56264 + self._masters_orig = layout_data['masters']
56265
56266 # layout.conf masters may be overridden here if we have a masters
56267 # setting from the user's repos.conf
56268 if self.masters is None:
56269 self.masters = layout_data['masters']
56270
56271 - if layout_data['aliases']:
56272 + if (local_config or 'aliases' in force) and layout_data['aliases']:
56273 aliases = self.aliases
56274 if aliases is None:
56275 aliases = ()
56276 @@ -156,6 +228,12 @@ class RepoConfig(object):
56277 # them the ability to do incremental overrides
56278 self.aliases = layout_data['aliases'] + tuple(aliases)
56279
56280 + if layout_data['repo-name']:
56281 + # allow layout.conf to override repository name
56282 + # useful when having two copies of the same repo enabled
56283 + # to avoid modifying profiles/repo_name in one of them
56284 + self.name = layout_data['repo-name']
56285 +
56286 for value in ('allow-missing-manifest',
56287 'allow-provide-virtual', 'cache-formats',
56288 'create-manifest', 'disable-manifest', 'manifest-hashes',
56289 @@ -163,9 +241,19 @@ class RepoConfig(object):
56290 'sign-commit', 'sign-manifest', 'thin-manifest', 'update-changelog'):
56291 setattr(self, value.lower().replace("-", "_"), layout_data[value])
56292
56293 - self.portage1_profiles = any(x in _portage1_profiles_allow_directories
56294 - for x in layout_data['profile-formats'])
56295 - self.portage1_profiles_compat = layout_data['profile-formats'] == ('portage-1-compat',)
56296 + self.portage1_profiles = eapi_allows_directories_on_profile_level_and_repository_level(eapi) or \
56297 + any(x in _portage1_profiles_allow_directories for x in layout_data['profile-formats'])
56298 + self.portage1_profiles_compat = not eapi_allows_directories_on_profile_level_and_repository_level(eapi) and \
56299 + layout_data['profile-formats'] == ('portage-1-compat',)
56300 +
56301 + self._eapis_banned = frozenset(layout_data['eapis-banned'])
56302 + self._eapis_deprecated = frozenset(layout_data['eapis-deprecated'])
56303 +
56304 + def eapi_is_banned(self, eapi):
56305 + return eapi in self._eapis_banned
56306 +
56307 + def eapi_is_deprecated(self, eapi):
56308 + return eapi in self._eapis_deprecated
56309
56310 def iter_pregenerated_caches(self, auxdbkeys, readonly=True, force=False):
56311 """
56312 @@ -178,7 +266,11 @@ class RepoConfig(object):
56313 if not formats:
56314 if not force:
56315 return
56316 - formats = ('pms',)
56317 + # The default egencache format was 'pms' prior to portage-2.1.11.32
56318 + # (portage versions prior to portage-2.1.11.14 will NOT
56319 + # recognize md5-dict format unless it is explicitly listed in
56320 + # layout.conf).
56321 + formats = ('md5-dict',)
56322
56323 for fmt in formats:
56324 name = None
56325 @@ -209,7 +301,8 @@ class RepoConfig(object):
56326 kwds['hashes'] = self.manifest_hashes
56327 if self.disable_manifest:
56328 kwds['from_scratch'] = True
56329 - return manifest.Manifest(*args, **kwds)
56330 + kwds['find_invalid_path_char'] = self.find_invalid_path_char
56331 + return manifest.Manifest(*args, **portage._native_kwargs(kwds))
56332
56333 def update(self, new_repo):
56334 """Update repository with options in another RepoConfig"""
56335 @@ -272,8 +365,12 @@ class RepoConfig(object):
56336 repo_msg.append(indent + "format: " + self.format)
56337 if self.user_location:
56338 repo_msg.append(indent + "location: " + self.user_location)
56339 - if self.sync:
56340 - repo_msg.append(indent + "sync: " + self.sync)
56341 + if self.sync_cvs_repo:
56342 + repo_msg.append(indent + "sync-cvs-repo: " + self.sync_cvs_repo)
56343 + if self.sync_type:
56344 + repo_msg.append(indent + "sync-type: " + self.sync_type)
56345 + if self.sync_uri:
56346 + repo_msg.append(indent + "sync-uri: " + self.sync_uri)
56347 if self.masters:
56348 repo_msg.append(indent + "masters: " + " ".join(master.name for master in self.masters))
56349 if self.priority is not None:
56350 @@ -281,19 +378,19 @@ class RepoConfig(object):
56351 if self.aliases:
56352 repo_msg.append(indent + "aliases: " + " ".join(self.aliases))
56353 if self.eclass_overrides:
56354 - repo_msg.append(indent + "eclass_overrides: " + \
56355 + repo_msg.append(indent + "eclass-overrides: " + \
56356 " ".join(self.eclass_overrides))
56357 repo_msg.append("")
56358 return "\n".join(repo_msg)
56359
56360 def __repr__(self):
56361 - return "<portage.repository.config.RepoConfig(name='%s', location='%s')>" % (self.name, _unicode_decode(self.location))
56362 + return "<portage.repository.config.RepoConfig(name=%r, location=%r)>" % (self.name, _unicode_decode(self.location))
56363
56364 def __str__(self):
56365 d = {}
56366 for k in self.__slots__:
56367 d[k] = getattr(self, k, None)
56368 - return _unicode_decode("%s") % (d,)
56369 + return "%s" % (d,)
56370
56371 if sys.hexversion < 0x3000000:
56372
56373 @@ -306,11 +403,14 @@ class RepoConfigLoader(object):
56374 """Loads and store config of several repositories, loaded from PORTDIR_OVERLAY or repos.conf"""
56375
56376 @staticmethod
56377 - def _add_repositories(portdir, portdir_overlay, prepos, ignored_map, ignored_location_map):
56378 + def _add_repositories(portdir, portdir_overlay, prepos,
56379 + ignored_map, ignored_location_map, local_config, default_portdir):
56380 """Add overlays in PORTDIR_OVERLAY as repositories"""
56381 overlays = []
56382 + portdir_orig = None
56383 if portdir:
56384 portdir = normalize_path(portdir)
56385 + portdir_orig = portdir
56386 overlays.append(portdir)
56387 try:
56388 port_ov = [normalize_path(i) for i in shlex_split(portdir_overlay)]
56389 @@ -344,43 +444,57 @@ class RepoConfigLoader(object):
56390 #overlay priority is negative because we want them to be looked before any other repo
56391 base_priority = 0
56392 for ov in overlays:
56393 - if os.path.isdir(ov):
56394 + # Ignore missing directory for 'gentoo' so that
56395 + # first sync with emerge-webrsync is possible.
56396 + if isdir_raise_eaccess(ov) or \
56397 + (base_priority == 0 and ov is portdir):
56398 repo_opts = default_repo_opts.copy()
56399 repo_opts['location'] = ov
56400 - repo = RepoConfig(None, repo_opts)
56401 + repo = RepoConfig(None, repo_opts, local_config=local_config)
56402 # repos_conf_opts contains options from repos.conf
56403 repos_conf_opts = repos_conf.get(repo.name)
56404 if repos_conf_opts is not None:
56405 # Selectively copy only the attributes which
56406 # repos.conf is allowed to override.
56407 - for k in ('aliases', 'eclass_overrides', 'masters', 'priority'):
56408 + for k in ('aliases', 'eclass_overrides', 'force', 'masters',
56409 + 'priority', 'sync_cvs_repo', 'sync_type', 'sync_uri'):
56410 v = getattr(repos_conf_opts, k, None)
56411 if v is not None:
56412 setattr(repo, k, v)
56413
56414 if repo.name in prepos:
56415 + # Silently ignore when PORTDIR overrides the location
56416 + # setting from the default repos.conf (bug #478544).
56417 old_location = prepos[repo.name].location
56418 - if old_location is not None and old_location != repo.location:
56419 + if old_location is not None and \
56420 + old_location != repo.location and \
56421 + not (base_priority == 0 and
56422 + old_location == default_portdir):
56423 ignored_map.setdefault(repo.name, []).append(old_location)
56424 ignored_location_map[old_location] = repo.name
56425 if old_location == portdir:
56426 portdir = repo.user_location
56427
56428 - if ov == portdir and portdir not in port_ov:
56429 - repo.priority = -1000
56430 - elif repo.priority is None:
56431 - repo.priority = base_priority
56432 - base_priority += 1
56433 + if repo.priority is None:
56434 + if base_priority == 0 and ov == portdir_orig:
56435 + # If it's the original PORTDIR setting and it's not
56436 + # in PORTDIR_OVERLAY, then it will be assigned a
56437 + # special priority setting later.
56438 + pass
56439 + else:
56440 + repo.priority = base_priority
56441 + base_priority += 1
56442
56443 prepos[repo.name] = repo
56444 else:
56445 - writemsg(_("!!! Invalid PORTDIR_OVERLAY"
56446 - " (not a dir): '%s'\n") % ov, noiselevel=-1)
56447 +
56448 + if not portage._sync_mode:
56449 + writemsg(_("!!! Invalid PORTDIR_OVERLAY (not a dir): '%s'\n") % ov, noiselevel=-1)
56450
56451 return portdir
56452
56453 @staticmethod
56454 - def _parse(paths, prepos, ignored_map, ignored_location_map):
56455 + def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir):
56456 """Parse files in paths to load config"""
56457 parser = SafeConfigParser()
56458
56459 @@ -388,49 +502,78 @@ class RepoConfigLoader(object):
56460 try:
56461 # Python >=3.2
56462 read_file = parser.read_file
56463 + source_kwarg = 'source'
56464 except AttributeError:
56465 read_file = parser.readfp
56466 + source_kwarg = 'filename'
56467
56468 + recursive_paths = []
56469 for p in paths:
56470 - f = None
56471 - try:
56472 - f = io.open(_unicode_encode(p,
56473 - encoding=_encodings['fs'], errors='strict'),
56474 - mode='r', encoding=_encodings['repo.content'],
56475 - errors='replace')
56476 - except EnvironmentError:
56477 - pass
56478 + if isinstance(p, basestring):
56479 + recursive_paths.extend(_recursive_file_list(p))
56480 else:
56481 + recursive_paths.append(p)
56482 +
56483 + for p in recursive_paths:
56484 + if isinstance(p, basestring):
56485 + f = None
56486 try:
56487 - read_file(f)
56488 - except ParsingError as e:
56489 - writemsg(_unicode_decode(
56490 - _("!!! Error while reading repo config file: %s\n")
56491 - ) % e, noiselevel=-1)
56492 - finally:
56493 - if f is not None:
56494 - f.close()
56495 -
56496 - prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults())
56497 + f = io.open(_unicode_encode(p,
56498 + encoding=_encodings['fs'], errors='strict'),
56499 + mode='r', encoding=_encodings['repo.content'],
56500 + errors='replace')
56501 + except EnvironmentError:
56502 + pass
56503 + else:
56504 + # The 'source' keyword argument is needed since otherwise
56505 + # ConfigParser in Python <3.3.3 may throw a TypeError
56506 + # because it assumes that f.name is a native string rather
56507 + # than binary when constructing error messages.
56508 + kwargs = {source_kwarg: p}
56509 + read_file(f, **portage._native_kwargs(kwargs))
56510 + finally:
56511 + if f is not None:
56512 + f.close()
56513 + elif isinstance(p, io.StringIO):
56514 + kwargs = {source_kwarg: "<io.StringIO>"}
56515 + read_file(p, **portage._native_kwargs(kwargs))
56516 + else:
56517 + raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p))
56518 +
56519 + prepos['DEFAULT'] = RepoConfig("DEFAULT",
56520 + parser.defaults(), local_config=local_config)
56521 +
56522 for sname in parser.sections():
56523 optdict = {}
56524 for oname in parser.options(sname):
56525 optdict[oname] = parser.get(sname, oname)
56526
56527 - repo = RepoConfig(sname, optdict)
56528 - if repo.location and not os.path.exists(repo.location):
56529 - writemsg(_("!!! Invalid repos.conf entry '%s'"
56530 - " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1)
56531 + repo = RepoConfig(sname, optdict, local_config=local_config)
56532 +
56533 + if repo.sync_type is not None and repo.sync_uri is None:
56534 + writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute, but is missing sync-uri attribute") %
56535 + sname, level=logging.ERROR, noiselevel=-1)
56536 continue
56537
56538 - if repo.name in prepos:
56539 - old_location = prepos[repo.name].location
56540 - if old_location is not None and repo.location is not None and old_location != repo.location:
56541 - ignored_map.setdefault(repo.name, []).append(old_location)
56542 - ignored_location_map[old_location] = repo.name
56543 - prepos[repo.name].update(repo)
56544 - else:
56545 - prepos[repo.name] = repo
56546 + if repo.sync_uri is not None and repo.sync_type is None:
56547 + writemsg_level("!!! %s\n" % _("Repository '%s' has sync-uri attribute, but is missing sync-type attribute") %
56548 + sname, level=logging.ERROR, noiselevel=-1)
56549 + continue
56550 +
56551 + if repo.sync_type not in (None, "cvs", "git", "rsync"):
56552 + writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute set to unsupported value: '%s'") %
56553 + (sname, repo.sync_type), level=logging.ERROR, noiselevel=-1)
56554 + continue
56555 +
56556 + if repo.sync_type == "cvs" and repo.sync_cvs_repo is None:
56557 + writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type=cvs, but is missing sync-cvs-repo attribute") %
56558 + sname, level=logging.ERROR, noiselevel=-1)
56559 + continue
56560 +
56561 + # For backward compatibility with locations set via PORTDIR and
56562 + # PORTDIR_OVERLAY, delay validation of the location and repo.name
56563 + # until after PORTDIR and PORTDIR_OVERLAY have been processed.
56564 + prepos[sname] = repo
56565
56566 def __init__(self, paths, settings):
56567 """Load config from files in paths"""
56568 @@ -441,15 +584,42 @@ class RepoConfigLoader(object):
56569 ignored_map = {}
56570 ignored_location_map = {}
56571
56572 - portdir = settings.get('PORTDIR', '')
56573 - portdir_overlay = settings.get('PORTDIR_OVERLAY', '')
56574 + if "PORTAGE_REPOSITORIES" in settings:
56575 + portdir = ""
56576 + portdir_overlay = ""
56577 + portdir_sync = ""
56578 + else:
56579 + portdir = settings.get("PORTDIR", "")
56580 + portdir_overlay = settings.get("PORTDIR_OVERLAY", "")
56581 + portdir_sync = settings.get("SYNC", "")
56582
56583 - self._parse(paths, prepos, ignored_map, ignored_location_map)
56584 + try:
56585 + self._parse(paths, prepos, ignored_map,
56586 + ignored_location_map, settings.local_config,
56587 + portdir)
56588 + except ConfigParserError as e:
56589 + writemsg(
56590 + _("!!! Error while reading repo config file: %s\n") % e,
56591 + noiselevel=-1)
56592 + # The configparser state is unreliable (prone to quirky
56593 + # exceptions) after it has thrown an error, so use empty
56594 + # config and try to fall back to PORTDIR{,_OVERLAY}.
56595 + prepos.clear()
56596 + prepos['DEFAULT'] = RepoConfig('DEFAULT',
56597 + {}, local_config=settings.local_config)
56598 + location_map.clear()
56599 + treemap.clear()
56600 + ignored_map.clear()
56601 + ignored_location_map.clear()
56602 +
56603 + default_portdir = os.path.join(os.sep,
56604 + settings['EPREFIX'].lstrip(os.sep), 'usr', 'portage')
56605
56606 # If PORTDIR_OVERLAY contains a repo with the same repo_name as
56607 # PORTDIR, then PORTDIR is overridden.
56608 portdir = self._add_repositories(portdir, portdir_overlay, prepos,
56609 - ignored_map, ignored_location_map)
56610 + ignored_map, ignored_location_map, settings.local_config,
56611 + default_portdir)
56612 if portdir and portdir.strip():
56613 portdir = os.path.realpath(portdir)
56614
56615 @@ -460,9 +630,51 @@ class RepoConfigLoader(object):
56616 for repo in prepos.values()
56617 if repo.location is not None and repo.missing_repo_name)
56618
56619 - #Take aliases into account.
56620 - new_prepos = {}
56621 - for repo_name, repo in prepos.items():
56622 + # Do this before expanding aliases, so that location_map and
56623 + # treemap consistently map unaliased names whenever available.
56624 + for repo_name, repo in list(prepos.items()):
56625 + if repo.location is None:
56626 + if repo_name != 'DEFAULT':
56627 + # Skip this warning for repoman (bug #474578).
56628 + if settings.local_config and paths:
56629 + writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf is missing location attribute") %
56630 + repo.name, level=logging.ERROR, noiselevel=-1)
56631 + del prepos[repo_name]
56632 + continue
56633 + else:
56634 + if not portage._sync_mode:
56635 + if not isdir_raise_eaccess(repo.location):
56636 + writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf has location attribute set "
56637 + "to nonexistent directory: '%s'") %
56638 + (repo_name, repo.location), level=logging.ERROR, noiselevel=-1)
56639 +
56640 + # Ignore missing directory for 'gentoo' so that
56641 + # first sync with emerge-webrsync is possible.
56642 + if repo.name != 'gentoo':
56643 + del prepos[repo_name]
56644 + continue
56645 +
56646 + # After removing support for PORTDIR_OVERLAY, the following check can be:
56647 + # if repo.missing_repo_name:
56648 + if repo.missing_repo_name and repo.name != repo_name:
56649 + writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf refers to repository "
56650 + "without repository name set in '%s'") %
56651 + (repo_name, os.path.join(repo.location, REPO_NAME_LOC)), level=logging.ERROR, noiselevel=-1)
56652 + del prepos[repo_name]
56653 + continue
56654 +
56655 + if repo.name != repo_name:
56656 + writemsg_level("!!! %s\n" % _("Section '%s' in repos.conf has name different "
56657 + "from repository name '%s' set inside repository") %
56658 + (repo_name, repo.name), level=logging.ERROR, noiselevel=-1)
56659 + del prepos[repo_name]
56660 + continue
56661 +
56662 + location_map[repo.location] = repo_name
56663 + treemap[repo_name] = repo.location
56664 +
56665 + # Add alias mappings, but never replace unaliased mappings.
56666 + for repo_name, repo in list(prepos.items()):
56667 names = set()
56668 names.add(repo_name)
56669 if repo.aliases:
56670 @@ -470,36 +682,55 @@ class RepoConfigLoader(object):
56671 names.update(aliases)
56672
56673 for name in names:
56674 - if name in new_prepos:
56675 + if name in prepos and prepos[name].location is not None:
56676 + if name == repo_name:
56677 + # unaliased names already handled earlier
56678 + continue
56679 writemsg_level(_("!!! Repository name or alias '%s', " + \
56680 "defined for repository '%s', overrides " + \
56681 "existing alias or repository.\n") % (name, repo_name), level=logging.WARNING, noiselevel=-1)
56682 - new_prepos[name] = repo
56683 - prepos = new_prepos
56684 + # Never replace an unaliased mapping with
56685 + # an aliased mapping.
56686 + continue
56687 + prepos[name] = repo
56688 + if repo.location is not None:
56689 + if repo.location not in location_map:
56690 + # Never replace an unaliased mapping with
56691 + # an aliased mapping.
56692 + location_map[repo.location] = name
56693 + treemap[name] = repo.location
56694 +
56695 + main_repo = prepos['DEFAULT'].main_repo
56696 + if main_repo is None or main_repo not in prepos:
56697 + #setting main_repo if it was not set in repos.conf
56698 + main_repo = location_map.get(portdir)
56699 + if main_repo is not None:
56700 + prepos['DEFAULT'].main_repo = main_repo
56701 + else:
56702 + prepos['DEFAULT'].main_repo = None
56703 + if portdir and not portage._sync_mode:
56704 + writemsg(_("!!! main-repo not set in DEFAULT and PORTDIR is empty.\n"), noiselevel=-1)
56705
56706 - for (name, r) in prepos.items():
56707 - if r.location is not None:
56708 - location_map[r.location] = name
56709 - treemap[name] = r.location
56710 + if main_repo is not None and prepos[main_repo].priority is None:
56711 + # This happens if main-repo has been set in repos.conf.
56712 + prepos[main_repo].priority = -1000
56713
56714 - # filter duplicates from aliases, by only including
56715 - # items where repo.name == key
56716 + # Backward compatible SYNC support for mirrorselect.
56717 + if portdir_sync and main_repo is not None:
56718 + if portdir_sync.startswith("rsync://"):
56719 + prepos[main_repo].sync_uri = portdir_sync
56720 + prepos[main_repo].sync_type = "rsync"
56721
56722 - prepos_order = sorted(prepos.items(), key=lambda r:r[1].priority or 0)
56723 + # Include repo.name in sort key, for predictable sorting
56724 + # even when priorities are equal.
56725 + prepos_order = sorted(prepos.items(),
56726 + key=lambda r:(r[1].priority or 0, r[1].name))
56727
56728 + # filter duplicates from aliases, by only including
56729 + # items where repo.name == key
56730 prepos_order = [repo.name for (key, repo) in prepos_order
56731 - if repo.name == key and repo.location is not None]
56732 -
56733 - if prepos['DEFAULT'].main_repo is None or \
56734 - prepos['DEFAULT'].main_repo not in prepos:
56735 - #setting main_repo if it was not set in repos.conf
56736 - if portdir in location_map:
56737 - prepos['DEFAULT'].main_repo = location_map[portdir]
56738 - elif portdir in ignored_location_map:
56739 - prepos['DEFAULT'].main_repo = ignored_location_map[portdir]
56740 - else:
56741 - prepos['DEFAULT'].main_repo = None
56742 - writemsg(_("!!! main-repo not set in DEFAULT and PORTDIR is empty. \n"), noiselevel=-1)
56743 + if repo.name == key and key != 'DEFAULT' and
56744 + repo.location is not None]
56745
56746 self.prepos = prepos
56747 self.prepos_order = prepos_order
56748 @@ -578,6 +809,18 @@ class RepoConfigLoader(object):
56749 eclass_db.append(tree_db)
56750 repo.eclass_db = eclass_db
56751
56752 + for repo_name, repo in prepos.items():
56753 + if repo_name == "DEFAULT":
56754 + continue
56755 +
56756 + if repo._masters_orig is None and self.mainRepo() and \
56757 + repo.name != self.mainRepo().name and not portage._sync_mode:
56758 + # TODO: Delete masters code in pym/portage/tests/resolver/ResolverPlayground.py when deleting this warning.
56759 + writemsg_level("!!! %s\n" % _("Repository '%s' is missing masters attribute in '%s'") %
56760 + (repo.name, os.path.join(repo.location, "metadata", "layout.conf")) +
56761 + "!!! %s\n" % _("Set 'masters = %s' in this file for future compatibility") %
56762 + self.mainRepo().name, level=logging.WARNING, noiselevel=-1)
56763 +
56764 self._prepos_changed = True
56765 self._repo_location_list = []
56766
56767 @@ -613,10 +856,10 @@ class RepoConfigLoader(object):
56768
56769 def mainRepo(self):
56770 """Returns the main repo"""
56771 - maid_repo = self.prepos['DEFAULT'].main_repo
56772 - if maid_repo is None:
56773 + main_repo = self.prepos['DEFAULT'].main_repo
56774 + if main_repo is None:
56775 return None
56776 - return self.prepos[maid_repo]
56777 + return self.prepos[main_repo]
56778
56779 def _check_locations(self):
56780 """Check if repositories location are correct and show a warning message if not"""
56781 @@ -625,7 +868,7 @@ class RepoConfigLoader(object):
56782 if r.location is None:
56783 writemsg(_("!!! Location not set for repository %s\n") % name, noiselevel=-1)
56784 else:
56785 - if not os.path.isdir(r.location):
56786 + if not isdir_raise_eaccess(r.location) and not portage._sync_mode:
56787 self.prepos_order.remove(name)
56788 writemsg(_("!!! Invalid Repository Location"
56789 " (not a dir): '%s'\n") % r.location, noiselevel=-1)
56790 @@ -650,19 +893,66 @@ class RepoConfigLoader(object):
56791 def get_repo_for_location(self, location):
56792 return self.prepos[self.get_name_for_location(location)]
56793
56794 + def __setitem__(self, repo_name, repo):
56795 + # self.prepos[repo_name] = repo
56796 + raise NotImplementedError
56797 +
56798 def __getitem__(self, repo_name):
56799 return self.prepos[repo_name]
56800
56801 + def __delitem__(self, repo_name):
56802 + if repo_name == self.prepos['DEFAULT'].main_repo:
56803 + self.prepos['DEFAULT'].main_repo = None
56804 + location = self.prepos[repo_name].location
56805 + del self.prepos[repo_name]
56806 + if repo_name in self.prepos_order:
56807 + self.prepos_order.remove(repo_name)
56808 + for k, v in self.location_map.copy().items():
56809 + if v == repo_name:
56810 + del self.location_map[k]
56811 + if repo_name in self.treemap:
56812 + del self.treemap[repo_name]
56813 + self._repo_location_list = tuple(x for x in self._repo_location_list if x != location)
56814 +
56815 def __iter__(self):
56816 for repo_name in self.prepos_order:
56817 yield self.prepos[repo_name]
56818
56819 -def load_repository_config(settings):
56820 - #~ repoconfigpaths = [os.path.join(settings.global_config_path, "repos.conf")]
56821 + def __contains__(self, repo_name):
56822 + return repo_name in self.prepos
56823 +
56824 + def config_string(self):
56825 + str_or_int_keys = ("format", "location", "main_repo", "priority", "sync_cvs_repo", "sync_type", "sync_uri")
56826 + str_tuple_keys = ("aliases", "eclass_overrides", "force")
56827 + repo_config_tuple_keys = ("masters",)
56828 + keys = str_or_int_keys + str_tuple_keys + repo_config_tuple_keys
56829 + config_string = ""
56830 + for repo_name, repo in sorted(self.prepos.items(), key=lambda x: (x[0] != "DEFAULT", x[0])):
56831 + config_string += "\n[%s]\n" % repo_name
56832 + for key in sorted(keys):
56833 + if key == "main_repo" and repo_name != "DEFAULT":
56834 + continue
56835 + if getattr(repo, key) is not None:
56836 + if key in str_or_int_keys:
56837 + config_string += "%s = %s\n" % (key.replace("_", "-"), getattr(repo, key))
56838 + elif key in str_tuple_keys:
56839 + config_string += "%s = %s\n" % (key.replace("_", "-"), " ".join(getattr(repo, key)))
56840 + elif key in repo_config_tuple_keys:
56841 + config_string += "%s = %s\n" % (key.replace("_", "-"), " ".join(x.name for x in getattr(repo, key)))
56842 + return config_string.lstrip("\n")
56843 +
56844 +def load_repository_config(settings, extra_files=None):
56845 repoconfigpaths = []
56846 - if settings.local_config:
56847 - repoconfigpaths.append(os.path.join(settings["PORTAGE_CONFIGROOT"],
56848 - USER_CONFIG_PATH, "repos.conf"))
56849 + if "PORTAGE_REPOSITORIES" in settings:
56850 + repoconfigpaths.append(io.StringIO(settings["PORTAGE_REPOSITORIES"]))
56851 + else:
56852 + if portage._not_installed:
56853 + repoconfigpaths.append(os.path.join(PORTAGE_BASE_PATH, "cnf", "repos.conf"))
56854 + else:
56855 + repoconfigpaths.append(os.path.join(settings.global_config_path, "repos.conf"))
56856 + repoconfigpaths.append(os.path.join(settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "repos.conf"))
56857 + if extra_files:
56858 + repoconfigpaths.extend(extra_files)
56859 return RepoConfigLoader(repoconfigpaths, settings)
56860
56861 def _get_repo_name(repo_location, cached=None):
56862 @@ -696,6 +986,9 @@ def parse_layout_conf(repo_location, repo_name=None):
56863 data['allow-provide-virtual'] = \
56864 layout_data.get('allow-provide-virtuals', 'false').lower() == 'true'
56865
56866 + data['eapis-banned'] = tuple(layout_data.get('eapis-banned', '').split())
56867 + data['eapis-deprecated'] = tuple(layout_data.get('eapis-deprecated', '').split())
56868 +
56869 data['sign-commit'] = layout_data.get('sign-commits', 'false').lower() \
56870 == 'true'
56871
56872 @@ -705,6 +998,8 @@ def parse_layout_conf(repo_location, repo_name=None):
56873 data['thin-manifest'] = layout_data.get('thin-manifests', 'false').lower() \
56874 == 'true'
56875
56876 + data['repo-name'] = _gen_valid_repo(layout_data.get('repo-name', ''))
56877 +
56878 manifest_policy = layout_data.get('use-manifests', 'strict').lower()
56879 data['allow-missing-manifest'] = manifest_policy != 'strict'
56880 data['create-manifest'] = manifest_policy != 'false'
56881 @@ -713,9 +1008,18 @@ def parse_layout_conf(repo_location, repo_name=None):
56882 # for compatibility w/ PMS, fallback to pms; but also check if the
56883 # cache exists or not.
56884 cache_formats = layout_data.get('cache-formats', '').lower().split()
56885 - if not cache_formats and os.path.isdir(
56886 - os.path.join(repo_location, 'metadata', 'cache')):
56887 - cache_formats = ['pms']
56888 + if not cache_formats:
56889 + # Auto-detect cache formats, and prefer md5-cache if available.
56890 + # This behavior was deployed in portage-2.1.11.14, so that the
56891 + # default egencache format could eventually be changed to md5-dict
56892 + # in portage-2.1.11.32. WARNING: Versions prior to portage-2.1.11.14
56893 + # will NOT recognize md5-dict format unless it is explicitly
56894 + # listed in layout.conf.
56895 + cache_formats = []
56896 + if os.path.isdir(os.path.join(repo_location, 'metadata', 'md5-cache')):
56897 + cache_formats.append('md5-dict')
56898 + if os.path.isdir(os.path.join(repo_location, 'metadata', 'cache')):
56899 + cache_formats.append('pms')
56900 data['cache-formats'] = tuple(cache_formats)
56901
56902 manifest_hashes = layout_data.get('manifest-hashes')
56903 @@ -754,7 +1058,7 @@ def parse_layout_conf(repo_location, repo_name=None):
56904
56905 raw_formats = layout_data.get('profile-formats')
56906 if raw_formats is None:
56907 - if eapi in ('4-python',):
56908 + if eapi_allows_directories_on_profile_level_and_repository_level(eapi):
56909 raw_formats = ('portage-1',)
56910 else:
56911 raw_formats = ('portage-1-compat',)
56912
56913 diff --git a/pym/portage/tests/__init__.py b/pym/portage/tests/__init__.py
56914 index 492ece4..84e732a 100644
56915 --- a/pym/portage/tests/__init__.py
56916 +++ b/pym/portage/tests/__init__.py
56917 @@ -1,5 +1,5 @@
56918 # tests/__init__.py -- Portage Unit Test functionality
56919 -# Copyright 2006-2011 Gentoo Foundation
56920 +# Copyright 2006-2013 Gentoo Foundation
56921 # Distributed under the terms of the GNU General Public License v2
56922
56923 from __future__ import print_function
56924 @@ -7,26 +7,40 @@ from __future__ import print_function
56925 import sys
56926 import time
56927 import unittest
56928 -from optparse import OptionParser, OptionValueError
56929
56930 try:
56931 from unittest.runner import _TextTestResult # new in python-2.7
56932 except ImportError:
56933 from unittest import _TextTestResult
56934
56935 +try:
56936 + # They added the skip framework to python-2.7.
56937 + # Drop this once we drop python-2.6 support.
56938 + unittest_skip_shims = False
56939 + import unittest.SkipTest as SkipTest # new in python-2.7
56940 +except ImportError:
56941 + unittest_skip_shims = True
56942 +
56943 +import portage
56944 from portage import os
56945 from portage import _encodings
56946 from portage import _unicode_decode
56947 +from portage.util._argparse import ArgumentParser
56948
56949 def main():
56950 suite = unittest.TestSuite()
56951 basedir = os.path.dirname(os.path.realpath(__file__))
56952
56953 usage = "usage: %s [options] [tests to run]" % os.path.basename(sys.argv[0])
56954 - parser = OptionParser(usage=usage)
56955 - parser.add_option("-l", "--list", help="list all tests",
56956 + parser = ArgumentParser(usage=usage)
56957 + parser.add_argument("-l", "--list", help="list all tests",
56958 action="store_true", dest="list_tests")
56959 - (options, args) = parser.parse_args(args=sys.argv)
56960 + options, args = parser.parse_known_args(args=sys.argv)
56961 +
56962 + if (os.environ.get('NOCOLOR') in ('yes', 'true') or
56963 + os.environ.get('TERM') == 'dumb' or
56964 + not sys.stdout.isatty()):
56965 + portage.output.nocolor()
56966
56967 if options.list_tests:
56968 testdir = os.path.dirname(sys.argv[0])
56969 @@ -70,15 +84,12 @@ def getTestFromCommandLine(args, base_path):
56970
56971 def getTestDirs(base_path):
56972 TEST_FILE = b'__test__'
56973 - svn_dirname = b'.svn'
56974 testDirs = []
56975
56976 # the os.walk help mentions relative paths as being quirky
56977 # I was tired of adding dirs to the list, so now we add __test__
56978 # to each dir we want tested.
56979 for root, dirs, files in os.walk(base_path):
56980 - if svn_dirname in dirs:
56981 - dirs.remove(svn_dirname)
56982 try:
56983 root = _unicode_decode(root,
56984 encoding=_encodings['fs'], errors='strict')
56985 @@ -93,7 +104,7 @@ def getTestDirs(base_path):
56986
56987 def getTestNames(path):
56988 files = os.listdir(path)
56989 - files = [ f[:-3] for f in files if f.startswith("test") and f.endswith(".py") ]
56990 + files = [f[:-3] for f in files if f.startswith("test") and f.endswith(".py")]
56991 files.sort()
56992 return files
56993
56994 @@ -134,14 +145,14 @@ class TextTestResult(_TextTestResult):
56995 self.portage_skipped = []
56996
56997 def addTodo(self, test, info):
56998 - self.todoed.append((test,info))
56999 + self.todoed.append((test, info))
57000 if self.showAll:
57001 self.stream.writeln("TODO")
57002 elif self.dots:
57003 self.stream.write(".")
57004
57005 def addPortageSkip(self, test, info):
57006 - self.portage_skipped.append((test,info))
57007 + self.portage_skipped.append((test, info))
57008 if self.showAll:
57009 self.stream.writeln("SKIP")
57010 elif self.dots:
57011 @@ -185,10 +196,14 @@ class TestCase(unittest.TestCase):
57012 except:
57013 result.addError(self, sys.exc_info())
57014 return
57015 +
57016 ok = False
57017 try:
57018 testMethod()
57019 ok = True
57020 + except SkipTest as e:
57021 + result.addPortageSkip(self, "%s: SKIP: %s" %
57022 + (testMethod, str(e)))
57023 except self.failureException:
57024 if self.portage_skip is not None:
57025 if self.portage_skip is True:
57026 @@ -197,13 +212,14 @@ class TestCase(unittest.TestCase):
57027 result.addPortageSkip(self, "%s: SKIP: %s" %
57028 (testMethod, self.portage_skip))
57029 elif self.todo:
57030 - result.addTodo(self,"%s: TODO" % testMethod)
57031 + result.addTodo(self, "%s: TODO" % testMethod)
57032 else:
57033 result.addFailure(self, sys.exc_info())
57034 except (KeyboardInterrupt, SystemExit):
57035 raise
57036 except:
57037 result.addError(self, sys.exc_info())
57038 +
57039 try:
57040 self.tearDown()
57041 except SystemExit:
57042 @@ -213,7 +229,8 @@ class TestCase(unittest.TestCase):
57043 except:
57044 result.addError(self, sys.exc_info())
57045 ok = False
57046 - if ok: result.addSuccess(self)
57047 + if ok:
57048 + result.addSuccess(self)
57049 finally:
57050 result.stopTest(self)
57051
57052 @@ -230,10 +247,48 @@ class TestCase(unittest.TestCase):
57053 except excClass:
57054 return
57055 else:
57056 - if hasattr(excClass,'__name__'): excName = excClass.__name__
57057 + if hasattr(excClass, '__name__'): excName = excClass.__name__
57058 else: excName = str(excClass)
57059 raise self.failureException("%s not raised: %s" % (excName, msg))
57060
57061 + def assertExists(self, path):
57062 + """Make sure |path| exists"""
57063 + if not os.path.exists(path):
57064 + msg = ['path is missing: %s' % (path,)]
57065 + while path != '/':
57066 + path = os.path.dirname(path)
57067 + if not path:
57068 + # If we're given something like "foo", abort once we get to "".
57069 + break
57070 + result = os.path.exists(path)
57071 + msg.append('\tos.path.exists(%s): %s' % (path, result))
57072 + if result:
57073 + msg.append('\tcontents: %r' % os.listdir(path))
57074 + break
57075 + raise self.failureException('\n'.join(msg))
57076 +
57077 + def assertNotExists(self, path):
57078 + """Make sure |path| does not exist"""
57079 + if os.path.exists(path):
57080 + raise self.failureException('path exists when it should not: %s' % path)
57081 +
57082 +if unittest_skip_shims:
57083 + # Shim code for <python-2.7.
57084 + class SkipTest(Exception):
57085 + """unittest.SkipTest shim for <python-2.7"""
57086 +
57087 + def skipTest(self, reason):
57088 + raise SkipTest(reason)
57089 + setattr(TestCase, 'skipTest', skipTest)
57090 +
57091 + def assertIn(self, member, container, msg=None):
57092 + self.assertTrue(member in container, msg=msg)
57093 + setattr(TestCase, 'assertIn', assertIn)
57094 +
57095 + def assertNotIn(self, member, container, msg=None):
57096 + self.assertFalse(member in container, msg=msg)
57097 + setattr(TestCase, 'assertNotIn', assertNotIn)
57098 +
57099 class TextTestRunner(unittest.TextTestRunner):
57100 """
57101 We subclass unittest.TextTestRunner to output SKIP for tests that fail but are skippable
57102 @@ -271,8 +326,8 @@ class TextTestRunner(unittest.TextTestRunner):
57103 self.stream.writeln("OK")
57104 return result
57105
57106 -test_cps = ['sys-apps/portage','virtual/portage']
57107 -test_versions = ['1.0', '1.0-r1','2.3_p4','1.0_alpha57']
57108 -test_slots = [ None, '1','gentoo-sources-2.6.17','spankywashere']
57109 -test_usedeps = ['foo','-bar', ('foo','bar'),
57110 - ('foo','-bar'), ('foo?', '!bar?') ]
57111 +test_cps = ['sys-apps/portage', 'virtual/portage']
57112 +test_versions = ['1.0', '1.0-r1', '2.3_p4', '1.0_alpha57']
57113 +test_slots = [None, '1', 'gentoo-sources-2.6.17', 'spankywashere']
57114 +test_usedeps = ['foo', '-bar', ('foo', 'bar'),
57115 + ('foo', '-bar'), ('foo?', '!bar?')]
57116
57117 diff --git a/pym/portage/tests/bin/setup_env.py b/pym/portage/tests/bin/setup_env.py
57118 index 1f8554e..9cc26df 100644
57119 --- a/pym/portage/tests/bin/setup_env.py
57120 +++ b/pym/portage/tests/bin/setup_env.py
57121 @@ -1,19 +1,17 @@
57122 # setup_env.py -- Make sure bin subdir has sane env for testing
57123 -# Copyright 2007-2011 Gentoo Foundation
57124 +# Copyright 2007-2013 Gentoo Foundation
57125 # Distributed under the terms of the GNU General Public License v2
57126
57127 import tempfile
57128
57129 from portage import os
57130 from portage import shutil
57131 +from portage.const import PORTAGE_BIN_PATH
57132 +from portage.const import PORTAGE_PYM_PATH
57133 from portage.tests import TestCase
57134 from portage.process import spawn
57135
57136 -basepath = os.path.join(os.path.dirname(os.path.dirname(
57137 - os.path.abspath(__file__))),
57138 - "..", "..", "..")
57139 -bindir = os.path.join(basepath, "bin")
57140 -pymdir = os.path.join(basepath, "pym")
57141 +bindir = PORTAGE_BIN_PATH
57142 basedir = None
57143 env = None
57144
57145 @@ -30,20 +28,20 @@ def binTestsInit():
57146 global basedir, env
57147 basedir = tempfile.mkdtemp()
57148 env = {}
57149 - env["EAPI"] = "0"
57150 - env["D"] = os.path.join(basedir, "image")
57151 - env["T"] = os.path.join(basedir, "temp")
57152 - env["S"] = os.path.join(basedir, "workdir")
57153 - env["PF"] = "portage-tests-0.09-r1"
57154 - env["PATH"] = bindir + ":" + os.environ["PATH"]
57155 - env["PORTAGE_BIN_PATH"] = bindir
57156 - env["PORTAGE_PYM_PATH"] = pymdir
57157 - env["PORTAGE_INST_UID"] = str(os.getuid())
57158 - env["PORTAGE_INST_GID"] = str(os.getgid())
57159 - env["DESTTREE"] = "/usr"
57160 - os.mkdir(env["D"])
57161 - os.mkdir(env["T"])
57162 - os.mkdir(env["S"])
57163 + env['EAPI'] = '0'
57164 + env['D'] = os.path.join(basedir, 'image')
57165 + env['T'] = os.path.join(basedir, 'temp')
57166 + env['S'] = os.path.join(basedir, 'workdir')
57167 + env['PF'] = 'portage-tests-0.09-r1'
57168 + env['PATH'] = bindir + ':' + os.environ['PATH']
57169 + env['PORTAGE_BIN_PATH'] = bindir
57170 + env['PORTAGE_PYM_PATH'] = PORTAGE_PYM_PATH
57171 + env['PORTAGE_INST_UID'] = str(os.getuid())
57172 + env['PORTAGE_INST_GID'] = str(os.getgid())
57173 + env['DESTTREE'] = '/usr'
57174 + os.mkdir(env['D'])
57175 + os.mkdir(env['T'])
57176 + os.mkdir(env['S'])
57177
57178 class BinTestCase(TestCase):
57179 def init(self):
57180 @@ -53,7 +51,7 @@ class BinTestCase(TestCase):
57181
57182 def _exists_in_D(path):
57183 # Note: do not use os.path.join() here, we assume D to end in /
57184 - return os.access(env["D"] + path, os.W_OK)
57185 + return os.access(env['D'] + path, os.W_OK)
57186 def exists_in_D(path):
57187 if not _exists_in_D(path):
57188 raise TestCase.failureException
57189 @@ -68,7 +66,7 @@ def portage_func(func, args, exit_status=0):
57190 f = open('/dev/null', 'wb')
57191 fd_pipes = {0:0,1:f.fileno(),2:f.fileno()}
57192 def pre_exec():
57193 - os.chdir(env["S"])
57194 + os.chdir(env['S'])
57195 spawn([func] + args.split(), env=env,
57196 fd_pipes=fd_pipes, pre_exec=pre_exec)
57197 f.close()
57198 @@ -80,10 +78,10 @@ def create_portage_wrapper(bin):
57199 return portage_func(*newargs)
57200 return derived_func
57201
57202 -for bin in os.listdir(os.path.join(bindir, "ebuild-helpers")):
57203 - if bin.startswith("do") or \
57204 - bin.startswith("new") or \
57205 - bin.startswith("prep") or \
57206 - bin in ["ecompress","ecompressdir","fowners","fperms"]:
57207 +for bin in os.listdir(os.path.join(bindir, 'ebuild-helpers')):
57208 + if bin.startswith('do') or \
57209 + bin.startswith('new') or \
57210 + bin.startswith('prep') or \
57211 + bin in ('ecompress', 'ecompressdir', 'fowners', 'fperms'):
57212 globals()[bin] = create_portage_wrapper(
57213 - os.path.join(bindir, "ebuild-helpers", bin))
57214 + os.path.join(bindir, 'ebuild-helpers', bin))
57215
57216 diff --git a/pym/portage/tests/dbapi/test_fakedbapi.py b/pym/portage/tests/dbapi/test_fakedbapi.py
57217 index e3843f0..7713563 100644
57218 --- a/pym/portage/tests/dbapi/test_fakedbapi.py
57219 +++ b/pym/portage/tests/dbapi/test_fakedbapi.py
57220 @@ -1,4 +1,4 @@
57221 -# Copyright 2011-2012 Gentoo Foundation
57222 +# Copyright 2011-2013 Gentoo Foundation
57223 # Distributed under the terms of the GNU General Public License v2
57224
57225 import tempfile
57226 @@ -42,10 +42,12 @@ class TestFakedbapi(TestCase):
57227
57228 tempdir = tempfile.mkdtemp()
57229 try:
57230 - portdir = os.path.join(tempdir, "usr/portage")
57231 - os.makedirs(portdir)
57232 + test_repo = os.path.join(tempdir, "var", "repositories", "test_repo")
57233 + os.makedirs(os.path.join(test_repo, "profiles"))
57234 + with open(os.path.join(test_repo, "profiles", "repo_name"), "w") as f:
57235 + f.write("test_repo")
57236 env = {
57237 - "PORTDIR": portdir,
57238 + "PORTAGE_REPOSITORIES": "[DEFAULT]\nmain-repo = test_repo\n[test_repo]\nlocation = %s" % test_repo
57239 }
57240 fakedb = fakedbapi(settings=config(config_profile_path="",
57241 env=env, eprefix=tempdir))
57242
57243 diff --git a/pym/portage/tests/dbapi/test_portdb_cache.py b/pym/portage/tests/dbapi/test_portdb_cache.py
57244 new file mode 100644
57245 index 0000000..94af96e
57246 --- /dev/null
57247 +++ b/pym/portage/tests/dbapi/test_portdb_cache.py
57248 @@ -0,0 +1,183 @@
57249 +# Copyright 2012-2014 Gentoo Foundation
57250 +# Distributed under the terms of the GNU General Public License v2
57251 +
57252 +import subprocess
57253 +import sys
57254 +import textwrap
57255 +
57256 +import portage
57257 +from portage import os
57258 +from portage import _unicode_decode
57259 +from portage.const import (BASH_BINARY, PORTAGE_BIN_PATH,
57260 + PORTAGE_PYM_PATH, USER_CONFIG_PATH)
57261 +from portage.tests import TestCase
57262 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground
57263 +from portage.util import ensure_dirs
57264 +
57265 +class PortdbCacheTestCase(TestCase):
57266 +
57267 + def testPortdbCache(self):
57268 + debug = False
57269 +
57270 + ebuilds = {
57271 + "dev-libs/A-1": {},
57272 + "dev-libs/A-2": {},
57273 + "sys-apps/B-1": {},
57274 + "sys-apps/B-2": {},
57275 + }
57276 +
57277 + playground = ResolverPlayground(ebuilds=ebuilds, debug=debug)
57278 + settings = playground.settings
57279 + eprefix = settings["EPREFIX"]
57280 + test_repo_location = settings.repositories["test_repo"].location
57281 + user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
57282 + metadata_dir = os.path.join(test_repo_location, "metadata")
57283 + md5_cache_dir = os.path.join(metadata_dir, "md5-cache")
57284 + pms_cache_dir = os.path.join(metadata_dir, "cache")
57285 + layout_conf_path = os.path.join(metadata_dir, "layout.conf")
57286 +
57287 + portage_python = portage._python_interpreter
57288 + egencache_cmd = (portage_python, "-b", "-Wd",
57289 + os.path.join(PORTAGE_BIN_PATH, "egencache"),
57290 + "--repo", "test_repo",
57291 + "--repositories-configuration", settings.repositories.config_string())
57292 + python_cmd = (portage_python, "-b", "-Wd", "-c")
57293 +
57294 + test_commands = (
57295 + (lambda: not os.path.exists(pms_cache_dir),),
57296 + (lambda: not os.path.exists(md5_cache_dir),),
57297 + python_cmd + (textwrap.dedent("""
57298 + import os, sys, portage
57299 + if portage.portdb.porttree_root in portage.portdb._pregen_auxdb:
57300 + sys.exit(1)
57301 + """),),
57302 +
57303 + egencache_cmd + ("--update",),
57304 + (lambda: not os.path.exists(pms_cache_dir),),
57305 + (lambda: os.path.exists(md5_cache_dir),),
57306 + python_cmd + (textwrap.dedent("""
57307 + import os, sys, portage
57308 + if portage.portdb.porttree_root not in portage.portdb._pregen_auxdb:
57309 + sys.exit(1)
57310 + """),),
57311 + python_cmd + (textwrap.dedent("""
57312 + import os, sys, portage
57313 + from portage.cache.flat_hash import md5_database
57314 + if not isinstance(portage.portdb._pregen_auxdb[portage.portdb.porttree_root], md5_database):
57315 + sys.exit(1)
57316 + """),),
57317 +
57318 + (BASH_BINARY, "-c", "echo %s > %s" %
57319 + tuple(map(portage._shell_quote,
57320 + ("cache-formats = md5-dict pms", layout_conf_path,)))),
57321 + egencache_cmd + ("--update",),
57322 + (lambda: os.path.exists(md5_cache_dir),),
57323 + python_cmd + (textwrap.dedent("""
57324 + import os, sys, portage
57325 + if portage.portdb.porttree_root not in portage.portdb._pregen_auxdb:
57326 + sys.exit(1)
57327 + """),),
57328 + python_cmd + (textwrap.dedent("""
57329 + import os, sys, portage
57330 + from portage.cache.flat_hash import md5_database
57331 + if not isinstance(portage.portdb._pregen_auxdb[portage.portdb.porttree_root], md5_database):
57332 + sys.exit(1)
57333 + """),),
57334 +
57335 + # Disable DeprecationWarnings, since the pms format triggers them
57336 + # in portdbapi._create_pregen_cache().
57337 + (BASH_BINARY, "-c", "echo %s > %s" %
57338 + tuple(map(portage._shell_quote,
57339 + ("cache-formats = pms md5-dict", layout_conf_path,)))),
57340 + (portage_python, "-b", "-Wd", "-Wi::DeprecationWarning", "-c") + (textwrap.dedent("""
57341 + import os, sys, portage
57342 + if portage.portdb.porttree_root not in portage.portdb._pregen_auxdb:
57343 + sys.exit(1)
57344 + """),),
57345 + (portage_python, "-b", "-Wd", "-Wi::DeprecationWarning", "-c") + (textwrap.dedent("""
57346 + import os, sys, portage
57347 + from portage.cache.metadata import database as pms_database
57348 + if not isinstance(portage.portdb._pregen_auxdb[portage.portdb.porttree_root], pms_database):
57349 + sys.exit(1)
57350 + """),),
57351 +
57352 + # Test auto-detection and preference for md5-cache when both
57353 + # cache formats are available but layout.conf is absent.
57354 + (BASH_BINARY, "-c", "rm %s" % portage._shell_quote(layout_conf_path)),
57355 + python_cmd + (textwrap.dedent("""
57356 + import os, sys, portage
57357 + if portage.portdb.porttree_root not in portage.portdb._pregen_auxdb:
57358 + sys.exit(1)
57359 + """),),
57360 + python_cmd + (textwrap.dedent("""
57361 + import os, sys, portage
57362 + from portage.cache.flat_hash import md5_database
57363 + if not isinstance(portage.portdb._pregen_auxdb[portage.portdb.porttree_root], md5_database):
57364 + sys.exit(1)
57365 + """),),
57366 + )
57367 +
57368 + pythonpath = os.environ.get("PYTHONPATH")
57369 + if pythonpath is not None and not pythonpath.strip():
57370 + pythonpath = None
57371 + if pythonpath is not None and \
57372 + pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
57373 + pass
57374 + else:
57375 + if pythonpath is None:
57376 + pythonpath = ""
57377 + else:
57378 + pythonpath = ":" + pythonpath
57379 + pythonpath = PORTAGE_PYM_PATH + pythonpath
57380 +
57381 + env = {
57382 + "PATH" : os.environ.get("PATH", ""),
57383 + "PORTAGE_OVERRIDE_EPREFIX" : eprefix,
57384 + "PORTAGE_PYTHON" : portage_python,
57385 + "PORTAGE_REPOSITORIES" : settings.repositories.config_string(),
57386 + "PYTHONPATH" : pythonpath,
57387 + }
57388 +
57389 + if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
57390 + env["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
57391 + os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
57392 +
57393 + dirs = [user_config_dir]
57394 +
57395 + try:
57396 + for d in dirs:
57397 + ensure_dirs(d)
57398 +
57399 + if debug:
57400 + # The subprocess inherits both stdout and stderr, for
57401 + # debugging purposes.
57402 + stdout = None
57403 + else:
57404 + # The subprocess inherits stderr so that any warnings
57405 + # triggered by python -Wd will be visible.
57406 + stdout = subprocess.PIPE
57407 +
57408 + for i, args in enumerate(test_commands):
57409 +
57410 + if hasattr(args[0], '__call__'):
57411 + self.assertTrue(args[0](),
57412 + "callable at index %s failed" % (i,))
57413 + continue
57414 +
57415 + proc = subprocess.Popen(args,
57416 + env=env, stdout=stdout)
57417 +
57418 + if debug:
57419 + proc.wait()
57420 + else:
57421 + output = proc.stdout.readlines()
57422 + proc.wait()
57423 + proc.stdout.close()
57424 + if proc.returncode != os.EX_OK:
57425 + for line in output:
57426 + sys.stderr.write(_unicode_decode(line))
57427 +
57428 + self.assertEqual(os.EX_OK, proc.returncode,
57429 + "command %d failed with args %s" % (i, args,))
57430 + finally:
57431 + playground.cleanup()
57432
57433 diff --git a/pym/portage/tests/dep/testAtom.py b/pym/portage/tests/dep/testAtom.py
57434 index f5a7d37..da58be2 100644
57435 --- a/pym/portage/tests/dep/testAtom.py
57436 +++ b/pym/portage/tests/dep/testAtom.py
57437 @@ -1,4 +1,4 @@
57438 -# Copyright 2006, 2010 Gentoo Foundation
57439 +# Copyright 2006-2012 Gentoo Foundation
57440 # Distributed under the terms of the GNU General Public License v2
57441
57442 from portage.tests import TestCase
57443 @@ -10,154 +10,157 @@ class TestAtom(TestCase):
57444 def testAtom(self):
57445
57446 tests = (
57447 - ( "=sys-apps/portage-2.1-r1:0[doc,a=,!b=,c?,!d?,-e]",
57448 - ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', None), False, False ),
57449 - ( "=sys-apps/portage-2.1-r1*:0[doc]",
57450 - ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', None), False, False ),
57451 - ( "sys-apps/portage:0[doc]",
57452 - (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False ),
57453 - ( "sys-apps/portage:0[doc]",
57454 - (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False ),
57455 - ( "*/*",
57456 - (None, '*/*', None, None, None, None), True, False ),
57457 - ( "=*/*-*9999*",
57458 - ('=*', '*/*', '*9999*', None, None, None), True, False ),
57459 - ( "=*/*-*9999*:0::repo_name",
57460 - ('=*', '*/*', '*9999*', '0', None, 'repo_name'), True, True ),
57461 - ( "sys-apps/*",
57462 - (None, 'sys-apps/*', None, None, None, None), True, False ),
57463 - ( "*/portage",
57464 - (None, '*/portage', None, None, None, None), True, False ),
57465 - ( "s*s-*/portage:1",
57466 - (None, 's*s-*/portage', None, '1', None, None), True, False ),
57467 - ( "*/po*ge:2",
57468 - (None, '*/po*ge', None, '2', None, None), True, False ),
57469 - ( "!dev-libs/A",
57470 - (None, 'dev-libs/A', None, None, None, None), True, True ),
57471 - ( "!!dev-libs/A",
57472 - (None, 'dev-libs/A', None, None, None, None), True, True ),
57473 - ( "!!dev-libs/A",
57474 - (None, 'dev-libs/A', None, None, None, None), True, True ),
57475 - ( "dev-libs/A[foo(+)]",
57476 - (None, 'dev-libs/A', None, None, "[foo(+)]", None), True, True ),
57477 - ( "dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]",
57478 - (None, 'dev-libs/A', None, None, "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True ),
57479 - ( "dev-libs/A:2[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]",
57480 - (None, 'dev-libs/A', None, "2", "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True ),
57481 -
57482 - ( "=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]",
57483 - ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', 'repo_name'), False, True ),
57484 - ( "=sys-apps/portage-2.1-r1*:0::repo_name[doc]",
57485 - ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', 'repo_name'), False, True ),
57486 - ( "sys-apps/portage:0::repo_name[doc]",
57487 - (None, 'sys-apps/portage', None, '0', '[doc]', 'repo_name'), False, True ),
57488 -
57489 - ( "*/*::repo_name",
57490 - (None, '*/*', None, None, None, 'repo_name'), True, True ),
57491 - ( "sys-apps/*::repo_name",
57492 - (None, 'sys-apps/*', None, None, None, 'repo_name'), True, True ),
57493 - ( "*/portage::repo_name",
57494 - (None, '*/portage', None, None, None, 'repo_name'), True, True ),
57495 - ( "s*s-*/portage:1::repo_name",
57496 - (None, 's*s-*/portage', None, '1', None, 'repo_name'), True, True ),
57497 + ("=sys-apps/portage-2.1-r1:0[doc,a=,!b=,c?,!d?,-e]",
57498 + ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', None), False, False),
57499 + ("=sys-apps/portage-2.1-r1*:0[doc]",
57500 + ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', None), False, False),
57501 + ("sys-apps/portage:0[doc]",
57502 + (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False),
57503 + ("sys-apps/portage:0[doc]",
57504 + (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False),
57505 + ("*/*",
57506 + (None, '*/*', None, None, None, None), True, False),
57507 + ("=*/*-*9999*",
57508 + ('=*', '*/*', '*9999*', None, None, None), True, False),
57509 + ("=*/*-*9999*:0::repo_name",
57510 + ('=*', '*/*', '*9999*', '0', None, 'repo_name'), True, True),
57511 + ("=*/*-*_beta*",
57512 + ('=*', '*/*', '*_beta*', None, None, None), True, False),
57513 + ("=*/*-*_beta*:0::repo_name",
57514 + ('=*', '*/*', '*_beta*', '0', None, 'repo_name'), True, True),
57515 + ("sys-apps/*",
57516 + (None, 'sys-apps/*', None, None, None, None), True, False),
57517 + ("*/portage",
57518 + (None, '*/portage', None, None, None, None), True, False),
57519 + ("s*s-*/portage:1",
57520 + (None, 's*s-*/portage', None, '1', None, None), True, False),
57521 + ("*/po*ge:2",
57522 + (None, '*/po*ge', None, '2', None, None), True, False),
57523 + ("!dev-libs/A",
57524 + (None, 'dev-libs/A', None, None, None, None), True, True),
57525 + ("!!dev-libs/A",
57526 + (None, 'dev-libs/A', None, None, None, None), True, True),
57527 + ("!!dev-libs/A",
57528 + (None, 'dev-libs/A', None, None, None, None), True, True),
57529 + ("dev-libs/A[foo(+)]",
57530 + (None, 'dev-libs/A', None, None, "[foo(+)]", None), True, True),
57531 + ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]",
57532 + (None, 'dev-libs/A', None, None, "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True),
57533 + ("dev-libs/A:2[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]",
57534 + (None, 'dev-libs/A', None, "2", "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True),
57535 +
57536 + ("=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]",
57537 + ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', 'repo_name'), False, True),
57538 + ("=sys-apps/portage-2.1-r1*:0::repo_name[doc]",
57539 + ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', 'repo_name'), False, True),
57540 + ("sys-apps/portage:0::repo_name[doc]",
57541 + (None, 'sys-apps/portage', None, '0', '[doc]', 'repo_name'), False, True),
57542 +
57543 + ("*/*::repo_name",
57544 + (None, '*/*', None, None, None, 'repo_name'), True, True),
57545 + ("sys-apps/*::repo_name",
57546 + (None, 'sys-apps/*', None, None, None, 'repo_name'), True, True),
57547 + ("*/portage::repo_name",
57548 + (None, '*/portage', None, None, None, 'repo_name'), True, True),
57549 + ("s*s-*/portage:1::repo_name",
57550 + (None, 's*s-*/portage', None, '1', None, 'repo_name'), True, True),
57551 )
57552 -
57553 +
57554 tests_xfail = (
57555 - ( Atom("sys-apps/portage"), False, False ),
57556 - ( "cat/pkg[a!]", False, False ),
57557 - ( "cat/pkg[!a]", False, False ),
57558 - ( "cat/pkg[!a!]", False, False ),
57559 - ( "cat/pkg[!a-]", False, False ),
57560 - ( "cat/pkg[-a=]", False, False ),
57561 - ( "cat/pkg[-a?]", False, False ),
57562 - ( "cat/pkg[-a!]", False, False ),
57563 - ( "cat/pkg[=a]", False, False ),
57564 - ( "cat/pkg[=a=]", False, False ),
57565 - ( "cat/pkg[=a?]", False, False ),
57566 - ( "cat/pkg[=a!]", False, False ),
57567 - ( "cat/pkg[=a-]", False, False ),
57568 - ( "cat/pkg[?a]", False, False ),
57569 - ( "cat/pkg[?a=]", False, False ),
57570 - ( "cat/pkg[?a?]", False, False ),
57571 - ( "cat/pkg[?a!]", False, False ),
57572 - ( "cat/pkg[?a-]", False, False ),
57573 - ( "sys-apps/portage[doc]:0", False, False ),
57574 - ( "*/*", False, False ),
57575 - ( "sys-apps/*", False, False ),
57576 - ( "*/portage", False, False ),
57577 - ( "*/**", True, False ),
57578 - ( "*/portage[use]", True, False ),
57579 - ( "cat/pkg[a()]", False, False ),
57580 - ( "cat/pkg[a(]", False, False ),
57581 - ( "cat/pkg[a)]", False, False ),
57582 - ( "cat/pkg[a(,b]", False, False ),
57583 - ( "cat/pkg[a),b]", False, False ),
57584 - ( "cat/pkg[a(*)]", False, False ),
57585 - ( "cat/pkg[a(*)]", True, False ),
57586 - ( "cat/pkg[a(+-)]", False, False ),
57587 - ( "cat/pkg[a()]", False, False ),
57588 - ( "cat/pkg[(+)a]", False, False ),
57589 - ( "cat/pkg[a=(+)]", False, False ),
57590 - ( "cat/pkg[!(+)a=]", False, False ),
57591 - ( "cat/pkg[!a=(+)]", False, False ),
57592 - ( "cat/pkg[a?(+)]", False, False ),
57593 - ( "cat/pkg[!a?(+)]", False, False ),
57594 - ( "cat/pkg[!(+)a?]", False, False ),
57595 - ( "cat/pkg[-(+)a]", False, False ),
57596 - ( "cat/pkg[a(+),-a]", False, False ),
57597 - ( "cat/pkg[a(-),-a]", False, False ),
57598 - ( "cat/pkg[-a,a(+)]", False, False ),
57599 - ( "cat/pkg[-a,a(-)]", False, False ),
57600 - ( "cat/pkg[-a(+),a(-)]", False, False ),
57601 - ( "cat/pkg[-a(-),a(+)]", False, False ),
57602 - ( "sys-apps/portage[doc]::repo_name", False, False ),
57603 - ( "sys-apps/portage:0[doc]::repo_name", False, False ),
57604 - ( "sys-apps/portage[doc]:0::repo_name", False, False ),
57605 - ( "=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]", False, False ),
57606 - ( "=sys-apps/portage-2.1-r1*:0::repo_name[doc]", False, False ),
57607 - ( "sys-apps/portage:0::repo_name[doc]", False, False ),
57608 - ( "*/*::repo_name", True, False ),
57609 + (Atom("sys-apps/portage"), False, False),
57610 + ("cat/pkg[a!]", False, False),
57611 + ("cat/pkg[!a]", False, False),
57612 + ("cat/pkg[!a!]", False, False),
57613 + ("cat/pkg[!a-]", False, False),
57614 + ("cat/pkg[-a=]", False, False),
57615 + ("cat/pkg[-a?]", False, False),
57616 + ("cat/pkg[-a!]", False, False),
57617 + ("cat/pkg[=a]", False, False),
57618 + ("cat/pkg[=a=]", False, False),
57619 + ("cat/pkg[=a?]", False, False),
57620 + ("cat/pkg[=a!]", False, False),
57621 + ("cat/pkg[=a-]", False, False),
57622 + ("cat/pkg[?a]", False, False),
57623 + ("cat/pkg[?a=]", False, False),
57624 + ("cat/pkg[?a?]", False, False),
57625 + ("cat/pkg[?a!]", False, False),
57626 + ("cat/pkg[?a-]", False, False),
57627 + ("sys-apps/portage[doc]:0", False, False),
57628 + ("*/*", False, False),
57629 + ("sys-apps/*", False, False),
57630 + ("*/portage", False, False),
57631 + ("*/**", True, False),
57632 + ("*/portage[use]", True, False),
57633 + ("cat/pkg[a()]", False, False),
57634 + ("cat/pkg[a(]", False, False),
57635 + ("cat/pkg[a)]", False, False),
57636 + ("cat/pkg[a(,b]", False, False),
57637 + ("cat/pkg[a),b]", False, False),
57638 + ("cat/pkg[a(*)]", False, False),
57639 + ("cat/pkg[a(*)]", True, False),
57640 + ("cat/pkg[a(+-)]", False, False),
57641 + ("cat/pkg[a()]", False, False),
57642 + ("cat/pkg[(+)a]", False, False),
57643 + ("cat/pkg[a=(+)]", False, False),
57644 + ("cat/pkg[!(+)a=]", False, False),
57645 + ("cat/pkg[!a=(+)]", False, False),
57646 + ("cat/pkg[a?(+)]", False, False),
57647 + ("cat/pkg[!a?(+)]", False, False),
57648 + ("cat/pkg[!(+)a?]", False, False),
57649 + ("cat/pkg[-(+)a]", False, False),
57650 + ("cat/pkg[a(+),-a]", False, False),
57651 + ("cat/pkg[a(-),-a]", False, False),
57652 + ("cat/pkg[-a,a(+)]", False, False),
57653 + ("cat/pkg[-a,a(-)]", False, False),
57654 + ("cat/pkg[-a(+),a(-)]", False, False),
57655 + ("cat/pkg[-a(-),a(+)]", False, False),
57656 + ("sys-apps/portage[doc]::repo_name", False, False),
57657 + ("sys-apps/portage:0[doc]::repo_name", False, False),
57658 + ("sys-apps/portage[doc]:0::repo_name", False, False),
57659 + ("=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]", False, False),
57660 + ("=sys-apps/portage-2.1-r1*:0::repo_name[doc]", False, False),
57661 + ("sys-apps/portage:0::repo_name[doc]", False, False),
57662 + ("*/*::repo_name", True, False),
57663 )
57664
57665 for atom, parts, allow_wildcard, allow_repo in tests:
57666 a = Atom(atom, allow_wildcard=allow_wildcard, allow_repo=allow_repo)
57667 op, cp, ver, slot, use, repo = parts
57668 - self.assertEqual( op, a.operator,
57669 - msg="Atom('%s').operator = %s == '%s'" % ( atom, a.operator, op ) )
57670 - self.assertEqual( cp, a.cp,
57671 - msg="Atom('%s').cp = %s == '%s'" % ( atom, a.cp, cp ) )
57672 + self.assertEqual(op, a.operator,
57673 + msg="Atom('%s').operator = %s == '%s'" % (atom, a.operator, op))
57674 + self.assertEqual(cp, a.cp,
57675 + msg="Atom('%s').cp = %s == '%s'" % (atom, a.cp, cp))
57676 if ver is not None:
57677 cpv = "%s-%s" % (cp, ver)
57678 else:
57679 cpv = cp
57680 - self.assertEqual( cpv, a.cpv,
57681 - msg="Atom('%s').cpv = %s == '%s'" % ( atom, a.cpv, cpv ) )
57682 - self.assertEqual( slot, a.slot,
57683 - msg="Atom('%s').slot = %s == '%s'" % ( atom, a.slot, slot ) )
57684 - self.assertEqual( repo, a.repo,
57685 - msg="Atom('%s').repo == %s == '%s'" % ( atom, a.repo, repo ) )
57686 + self.assertEqual(cpv, a.cpv,
57687 + msg="Atom('%s').cpv = %s == '%s'" % (atom, a.cpv, cpv))
57688 + self.assertEqual(slot, a.slot,
57689 + msg="Atom('%s').slot = %s == '%s'" % (atom, a.slot, slot))
57690 + self.assertEqual(repo, a.repo,
57691 + msg="Atom('%s').repo == %s == '%s'" % (atom, a.repo, repo))
57692
57693 if a.use:
57694 returned_use = str(a.use)
57695 else:
57696 returned_use = None
57697 - self.assertEqual( use, returned_use,
57698 - msg="Atom('%s').use = %s == '%s'" % ( atom, returned_use, use ) )
57699 + self.assertEqual(use, returned_use,
57700 + msg="Atom('%s').use = %s == '%s'" % (atom, returned_use, use))
57701
57702 for atom, allow_wildcard, allow_repo in tests_xfail:
57703 - self.assertRaisesMsg(atom, (InvalidAtom, TypeError), Atom, atom, \
57704 + self.assertRaisesMsg(atom, (InvalidAtom, TypeError), Atom, atom,
57705 allow_wildcard=allow_wildcard, allow_repo=allow_repo)
57706
57707 def testSlotAbiAtom(self):
57708 tests = (
57709 - ("virtual/ffmpeg:0/53", "4-slot-abi", {"slot": "0", "slot_abi": "53", "slot_abi_op": None}),
57710 - ("virtual/ffmpeg:0/53=", "4-slot-abi", {"slot": "0", "slot_abi": "53", "slot_abi_op": "="}),
57711 - ("virtual/ffmpeg:=", "4-slot-abi", {"slot": None, "slot_abi": None, "slot_abi_op": "="}),
57712 - ("virtual/ffmpeg:0=", "4-slot-abi", {"slot": "0", "slot_abi": None, "slot_abi_op": "="}),
57713 - ("virtual/ffmpeg:*", "4-slot-abi", {"slot": None, "slot_abi": None, "slot_abi_op": "*"}),
57714 - ("virtual/ffmpeg:0*", "4-slot-abi", {"slot": "0", "slot_abi": None, "slot_abi_op": "*"}),
57715 - ("virtual/ffmpeg:0", "4-slot-abi", {"slot": "0", "slot_abi": None, "slot_abi_op": None}),
57716 - ("virtual/ffmpeg", "4-slot-abi", {"slot": None, "slot_abi": None, "slot_abi_op": None}),
57717 + ("virtual/ffmpeg:0/53", "4-slot-abi", {"slot": "0", "sub_slot": "53", "slot_operator": None}),
57718 + ("virtual/ffmpeg:0/53=", "4-slot-abi", {"slot": "0", "sub_slot": "53", "slot_operator": "="}),
57719 + ("virtual/ffmpeg:=", "4-slot-abi", {"slot": None, "sub_slot": None, "slot_operator": "="}),
57720 + ("virtual/ffmpeg:0=", "4-slot-abi", {"slot": "0", "sub_slot": None, "slot_operator": "="}),
57721 + ("virtual/ffmpeg:*", "4-slot-abi", {"slot": None, "sub_slot": None, "slot_operator": "*"}),
57722 + ("virtual/ffmpeg:0", "4-slot-abi", {"slot": "0", "sub_slot": None, "slot_operator": None}),
57723 + ("virtual/ffmpeg", "4-slot-abi", {"slot": None, "sub_slot": None, "slot_operator": None}),
57724 )
57725
57726 for atom, eapi, parts in tests:
57727 @@ -165,7 +168,7 @@ class TestAtom(TestCase):
57728 for k, v in parts.items():
57729 self.assertEqual(v, getattr(a, k),
57730 msg="Atom('%s').%s = %s == '%s'" %
57731 - (atom, k, getattr(a, k), v ))
57732 + (atom, k, getattr(a, k), v))
57733
57734 def test_intersects(self):
57735 test_cases = (
57736 @@ -182,7 +185,7 @@ class TestAtom(TestCase):
57737 )
57738
57739 for atom, other, expected_result in test_cases:
57740 - self.assertEqual(Atom(atom).intersects(Atom(other)), expected_result, \
57741 + self.assertEqual(Atom(atom).intersects(Atom(other)), expected_result,
57742 "%s and %s should intersect: %s" % (atom, other, expected_result))
57743
57744 def test_violated_conditionals(self):
57745 @@ -276,7 +279,7 @@ class TestAtom(TestCase):
57746 for atom, other_use, iuse, parent_use in test_cases_xfail:
57747 a = Atom(atom)
57748 validator = use_flag_validator(iuse)
57749 - self.assertRaisesMsg(atom, InvalidAtom, \
57750 + self.assertRaisesMsg(atom, InvalidAtom,
57751 a.violated_conditionals, other_use, validator.is_valid_flag, parent_use)
57752
57753 def test_evaluate_conditionals(self):
57754 @@ -325,9 +328,9 @@ class TestAtom(TestCase):
57755 ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d", "e", "f"], [], "dev-libs/A[a,b,-b,c,-c,-e,-f]"),
57756 ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], ["d", "e", "f"], "dev-libs/A[a,b,-b,c,-c,d,-f]"),
57757
57758 - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", \
57759 + ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]",
57760 ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a(-),-b(+),c(-),-e(-),-f(+)]"),
57761 - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", \
57762 + ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]",
57763 [], ["a", "b", "c", "d", "e", "f"], "dev-libs/A[a(+),b(-),-c(+),d(-),-f(-)]"),
57764 )
57765
57766
57767 diff --git a/pym/portage/tests/dep/testCheckRequiredUse.py b/pym/portage/tests/dep/testCheckRequiredUse.py
57768 index 54791e0..63330b5 100644
57769 --- a/pym/portage/tests/dep/testCheckRequiredUse.py
57770 +++ b/pym/portage/tests/dep/testCheckRequiredUse.py
57771 @@ -1,4 +1,4 @@
57772 -# Copyright 2010-2011 Gentoo Foundation
57773 +# Copyright 2010-2012 Gentoo Foundation
57774 # Distributed under the terms of the GNU General Public License v2
57775
57776 from portage.tests import TestCase
57777 @@ -9,97 +9,106 @@ class TestCheckRequiredUse(TestCase):
57778
57779 def testCheckRequiredUse(self):
57780 test_cases = (
57781 - ( "|| ( a b )", [], ["a", "b"], False),
57782 - ( "|| ( a b )", ["a"], ["a", "b"], True),
57783 - ( "|| ( a b )", ["b"], ["a", "b"], True),
57784 - ( "|| ( a b )", ["a", "b"], ["a", "b"], True),
57785 -
57786 - ( "^^ ( a b )", [], ["a", "b"], False),
57787 - ( "^^ ( a b )", ["a"], ["a", "b"], True),
57788 - ( "^^ ( a b )", ["b"], ["a", "b"], True),
57789 - ( "^^ ( a b )", ["a", "b"], ["a", "b"], False),
57790 -
57791 - ( "^^ ( || ( a b ) c )", [], ["a", "b", "c"], False),
57792 - ( "^^ ( || ( a b ) c )", ["a"], ["a", "b", "c"], True),
57793 -
57794 - ( "^^ ( || ( ( a b ) ) ( c ) )", [], ["a", "b", "c"], False),
57795 - ( "( ^^ ( ( || ( ( a ) ( b ) ) ) ( ( c ) ) ) )", ["a"], ["a", "b", "c"], True),
57796 -
57797 - ( "a || ( b c )", ["a"], ["a", "b", "c"], False),
57798 - ( "|| ( b c ) a", ["a"], ["a", "b", "c"], False),
57799 -
57800 - ( "|| ( a b c )", ["a"], ["a", "b", "c"], True),
57801 - ( "|| ( a b c )", ["b"], ["a", "b", "c"], True),
57802 - ( "|| ( a b c )", ["c"], ["a", "b", "c"], True),
57803 -
57804 - ( "^^ ( a b c )", ["a"], ["a", "b", "c"], True),
57805 - ( "^^ ( a b c )", ["b"], ["a", "b", "c"], True),
57806 - ( "^^ ( a b c )", ["c"], ["a", "b", "c"], True),
57807 - ( "^^ ( a b c )", ["a", "b"], ["a", "b", "c"], False),
57808 - ( "^^ ( a b c )", ["b", "c"], ["a", "b", "c"], False),
57809 - ( "^^ ( a b c )", ["a", "c"], ["a", "b", "c"], False),
57810 - ( "^^ ( a b c )", ["a", "b", "c"], ["a", "b", "c"], False),
57811 -
57812 - ( "a? ( ^^ ( b c ) )", [], ["a", "b", "c"], True),
57813 - ( "a? ( ^^ ( b c ) )", ["a"], ["a", "b", "c"], False),
57814 - ( "a? ( ^^ ( b c ) )", ["b"], ["a", "b", "c"], True),
57815 - ( "a? ( ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57816 - ( "a? ( ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True),
57817 - ( "a? ( ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False),
57818 -
57819 - ( "^^ ( a? ( !b ) !c? ( d ) )", [], ["a", "b", "c", "d"], False),
57820 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a"], ["a", "b", "c", "d"], True),
57821 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["c"], ["a", "b", "c", "d"], True),
57822 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "c"], ["a", "b", "c", "d"], True),
57823 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "c"], ["a", "b", "c", "d"], False),
57824 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True),
57825 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True),
57826 - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "d"], ["a", "b", "c", "d"], False),
57827 -
57828 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False),
57829 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True),
57830 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], True),
57831 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57832 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True),
57833 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], True),
57834 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True),
57835 - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False),
57836 -
57837 - ( "^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False),
57838 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True),
57839 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], False),
57840 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57841 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], False),
57842 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], False),
57843 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True),
57844 - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], True),
57845 -
57846 - ( "|| ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], True),
57847 - ( "|| ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True),
57848 - ( "|| ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True),
57849 - ( "|| ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True),
57850 - ( "|| ( ( a b ) c )", ["a"], ["a", "b", "c"], False),
57851 - ( "|| ( ( a b ) c )", ["b"], ["a", "b", "c"], False),
57852 - ( "|| ( ( a b ) c )", ["c"], ["a", "b", "c"], True),
57853 - ( "|| ( ( a b ) c )", [], ["a", "b", "c"], False),
57854 -
57855 - ( "^^ ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], False),
57856 - ( "^^ ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True),
57857 - ( "^^ ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True),
57858 - ( "^^ ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True),
57859 - ( "^^ ( ( a b ) c )", ["a"], ["a", "b", "c"], False),
57860 - ( "^^ ( ( a b ) c )", ["b"], ["a", "b", "c"], False),
57861 - ( "^^ ( ( a b ) c )", ["c"], ["a", "b", "c"], True),
57862 - ( "^^ ( ( a b ) c )", [], ["a", "b", "c"], False),
57863 + ("|| ( a b )", [], ["a", "b"], False),
57864 + ("|| ( a b )", ["a"], ["a", "b"], True),
57865 + ("|| ( a b )", ["b"], ["a", "b"], True),
57866 + ("|| ( a b )", ["a", "b"], ["a", "b"], True),
57867 +
57868 + ("^^ ( a b )", [], ["a", "b"], False),
57869 + ("^^ ( a b )", ["a"], ["a", "b"], True),
57870 + ("^^ ( a b )", ["b"], ["a", "b"], True),
57871 + ("^^ ( a b )", ["a", "b"], ["a", "b"], False),
57872 + ("?? ( a b )", ["a", "b"], ["a", "b"], False),
57873 + ("?? ( a b )", ["a"], ["a", "b"], True),
57874 + ("?? ( a b )", ["b"], ["a", "b"], True),
57875 + ("?? ( a b )", [], ["a", "b"], True),
57876 + ("?? ( )", [], [], True),
57877 +
57878 + ("^^ ( || ( a b ) c )", [], ["a", "b", "c"], False),
57879 + ("^^ ( || ( a b ) c )", ["a"], ["a", "b", "c"], True),
57880 +
57881 + ("^^ ( || ( ( a b ) ) ( c ) )", [], ["a", "b", "c"], False),
57882 + ("( ^^ ( ( || ( ( a ) ( b ) ) ) ( ( c ) ) ) )", ["a"], ["a", "b", "c"], True),
57883 +
57884 + ("a || ( b c )", ["a"], ["a", "b", "c"], False),
57885 + ("|| ( b c ) a", ["a"], ["a", "b", "c"], False),
57886 +
57887 + ("|| ( a b c )", ["a"], ["a", "b", "c"], True),
57888 + ("|| ( a b c )", ["b"], ["a", "b", "c"], True),
57889 + ("|| ( a b c )", ["c"], ["a", "b", "c"], True),
57890 +
57891 + ("^^ ( a b c )", ["a"], ["a", "b", "c"], True),
57892 + ("^^ ( a b c )", ["b"], ["a", "b", "c"], True),
57893 + ("^^ ( a b c )", ["c"], ["a", "b", "c"], True),
57894 + ("^^ ( a b c )", ["a", "b"], ["a", "b", "c"], False),
57895 + ("^^ ( a b c )", ["b", "c"], ["a", "b", "c"], False),
57896 + ("^^ ( a b c )", ["a", "c"], ["a", "b", "c"], False),
57897 + ("^^ ( a b c )", ["a", "b", "c"], ["a", "b", "c"], False),
57898 +
57899 + ("a? ( ^^ ( b c ) )", [], ["a", "b", "c"], True),
57900 + ("a? ( ^^ ( b c ) )", ["a"], ["a", "b", "c"], False),
57901 + ("a? ( ^^ ( b c ) )", ["b"], ["a", "b", "c"], True),
57902 + ("a? ( ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57903 + ("a? ( ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True),
57904 + ("a? ( ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False),
57905 +
57906 + ("^^ ( a? ( !b ) !c? ( d ) )", [], ["a", "b", "c", "d"], False),
57907 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a"], ["a", "b", "c", "d"], True),
57908 + ("^^ ( a? ( !b ) !c? ( d ) )", ["c"], ["a", "b", "c", "d"], True),
57909 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a", "c"], ["a", "b", "c", "d"], True),
57910 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "c"], ["a", "b", "c", "d"], False),
57911 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True),
57912 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True),
57913 + ("^^ ( a? ( !b ) !c? ( d ) )", ["a", "d"], ["a", "b", "c", "d"], False),
57914 +
57915 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False),
57916 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True),
57917 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], True),
57918 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57919 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True),
57920 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], True),
57921 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True),
57922 + ("|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False),
57923 +
57924 + ("^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False),
57925 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True),
57926 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], False),
57927 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True),
57928 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], False),
57929 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], False),
57930 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True),
57931 + ("^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], True),
57932 +
57933 + ("|| ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], True),
57934 + ("|| ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True),
57935 + ("|| ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True),
57936 + ("|| ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True),
57937 + ("|| ( ( a b ) c )", ["a"], ["a", "b", "c"], False),
57938 + ("|| ( ( a b ) c )", ["b"], ["a", "b", "c"], False),
57939 + ("|| ( ( a b ) c )", ["c"], ["a", "b", "c"], True),
57940 + ("|| ( ( a b ) c )", [], ["a", "b", "c"], False),
57941 +
57942 + ("^^ ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], False),
57943 + ("^^ ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True),
57944 + ("^^ ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True),
57945 + ("^^ ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True),
57946 + ("^^ ( ( a b ) c )", ["a"], ["a", "b", "c"], False),
57947 + ("^^ ( ( a b ) c )", ["b"], ["a", "b", "c"], False),
57948 + ("^^ ( ( a b ) c )", ["c"], ["a", "b", "c"], True),
57949 + ("^^ ( ( a b ) c )", [], ["a", "b", "c"], False),
57950 )
57951
57952 test_cases_xfail = (
57953 - ( "^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b"]),
57954 - ( "^^ ( || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]),
57955 - ( "^^( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57956 - ( "^^ || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]),
57957 - ( "^^ ( ( || ) ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57958 - ( "^^ ( || ( a b ) ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57959 + ("^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b"]),
57960 + ("^^ ( || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]),
57961 + ("^^( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57962 + ("^^ || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]),
57963 + ("^^ ( ( || ) ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57964 + ("^^ ( || ( a b ) ) ^^ ( b c ) )", [], ["a", "b", "c"]),
57965 + )
57966 +
57967 + test_cases_xfail_eapi = (
57968 + ("?? ( a b )", [], ["a", "b"], "4"),
57969 )
57970
57971 for required_use, use, iuse, expected in test_cases:
57972 @@ -110,6 +119,11 @@ class TestCheckRequiredUse(TestCase):
57973 self.assertRaisesMsg(required_use + ", USE = " + " ".join(use), \
57974 InvalidDependString, check_required_use, required_use, use, iuse.__contains__)
57975
57976 + for required_use, use, iuse, eapi in test_cases_xfail_eapi:
57977 + self.assertRaisesMsg(required_use + ", USE = " + " ".join(use), \
57978 + InvalidDependString, check_required_use, required_use, use,
57979 + iuse.__contains__, eapi=eapi)
57980 +
57981 def testCheckRequiredUseFilterSatisfied(self):
57982 """
57983 Test filtering of satisfied parts of REQUIRED_USE,
57984
57985 diff --git a/pym/portage/tests/dep/testStandalone.py b/pym/portage/tests/dep/testStandalone.py
57986 index f03f2d5..88e3f39 100644
57987 --- a/pym/portage/tests/dep/testStandalone.py
57988 +++ b/pym/portage/tests/dep/testStandalone.py
57989 @@ -12,20 +12,20 @@ class TestStandalone(TestCase):
57990 def testCPVequal(self):
57991
57992 test_cases = (
57993 - ( "sys-apps/portage-2.1","sys-apps/portage-2.1", True ),
57994 - ( "sys-apps/portage-2.1","sys-apps/portage-2.0", False ),
57995 - ( "sys-apps/portage-2.1","sys-apps/portage-2.1-r1", False ),
57996 - ( "sys-apps/portage-2.1-r1","sys-apps/portage-2.1", False ),
57997 - ( "sys-apps/portage-2.1_alpha3","sys-apps/portage-2.1", False ),
57998 - ( "sys-apps/portage-2.1_alpha3_p6","sys-apps/portage-2.1_alpha3", False ),
57999 - ( "sys-apps/portage-2.1_alpha3","sys-apps/portage-2.1", False ),
58000 - ( "sys-apps/portage-2.1","sys-apps/X-2.1", False ),
58001 - ( "sys-apps/portage-2.1","portage-2.1", False ),
58002 + ("sys-apps/portage-2.1", "sys-apps/portage-2.1", True),
58003 + ("sys-apps/portage-2.1", "sys-apps/portage-2.0", False),
58004 + ("sys-apps/portage-2.1", "sys-apps/portage-2.1-r1", False),
58005 + ("sys-apps/portage-2.1-r1", "sys-apps/portage-2.1", False),
58006 + ("sys-apps/portage-2.1_alpha3", "sys-apps/portage-2.1", False),
58007 + ("sys-apps/portage-2.1_alpha3_p6", "sys-apps/portage-2.1_alpha3", False),
58008 + ("sys-apps/portage-2.1_alpha3", "sys-apps/portage-2.1", False),
58009 + ("sys-apps/portage-2.1", "sys-apps/X-2.1", False),
58010 + ("sys-apps/portage-2.1", "portage-2.1", False),
58011 )
58012 -
58013 +
58014 test_cases_xfail = (
58015 - ( "sys-apps/portage","sys-apps/portage" ),
58016 - ( "sys-apps/portage-2.1-6","sys-apps/portage-2.1-6" ),
58017 + ("sys-apps/portage", "sys-apps/portage"),
58018 + ("sys-apps/portage-2.1-6", "sys-apps/portage-2.1-6"),
58019 )
58020
58021 for cpv1, cpv2, expected_result in test_cases:
58022 @@ -33,5 +33,5 @@ class TestStandalone(TestCase):
58023 "cpvequal('%s', '%s') != %s" % (cpv1, cpv2, expected_result))
58024
58025 for cpv1, cpv2 in test_cases_xfail:
58026 - self.assertRaisesMsg("cpvequal("+cpv1+", "+cpv2+")", \
58027 + self.assertRaisesMsg("cpvequal(%s, %s)" % (cpv1, cpv2),
58028 PortageException, cpvequal, cpv1, cpv2)
58029
58030 diff --git a/pym/portage/tests/dep/test_best_match_to_list.py b/pym/portage/tests/dep/test_best_match_to_list.py
58031 index 8a14038..586c8bc 100644
58032 --- a/pym/portage/tests/dep/test_best_match_to_list.py
58033 +++ b/pym/portage/tests/dep/test_best_match_to_list.py
58034 @@ -1,5 +1,5 @@
58035 # test_best_match_to_list.py -- Portage Unit Testing Functionality
58036 -# Copyright 2010-2011 Gentoo Foundation
58037 +# Copyright 2010-2012 Gentoo Foundation
58038 # Distributed under the terms of the GNU General Public License v2
58039
58040 from itertools import permutations
58041 @@ -28,25 +28,29 @@ class Test_best_match_to_list(TestCase):
58042
58043 def testBest_match_to_list(self):
58044 tests = [
58045 - ("dev-libs/A-4", [Atom(">=dev-libs/A-3"), Atom(">=dev-libs/A-2")], \
58046 - [Atom(">=dev-libs/A-3"), Atom(">=dev-libs/A-2")], True),
58047 - ("dev-libs/A-4", [Atom("<=dev-libs/A-5"), Atom("<=dev-libs/A-6")], \
58048 - [Atom("<=dev-libs/A-5"), Atom("<=dev-libs/A-6")], True),
58049 - ("dev-libs/A-1", [Atom("dev-libs/A"), Atom("=dev-libs/A-1")], \
58050 - [Atom("=dev-libs/A-1"), Atom("dev-libs/A")], True),
58051 - ("dev-libs/A-1", [Atom("dev-libs/B"), Atom("=dev-libs/A-1:0")], \
58052 - [Atom("=dev-libs/A-1:0")], True),
58053 - ("dev-libs/A-1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=dev-libs/A-1:0")], \
58054 - [Atom("=dev-libs/A-1:0"), Atom("dev-libs/*", allow_wildcard=True)], True),
58055 - ("dev-libs/A-4.9999-r1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=*/*-*9999*", allow_wildcard=True)], \
58056 - [Atom("=*/*-*9999*", allow_wildcard=True), Atom("dev-libs/*", allow_wildcard=True)], True),
58057 - ("dev-libs/A-1:0", [Atom("dev-*/*", allow_wildcard=True), Atom("dev-*/*:0", allow_wildcard=True),\
58058 - Atom("dev-libs/A"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A:0"), \
58059 - Atom("=dev-libs/A-1*"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1")], \
58060 - [Atom("=dev-libs/A-1"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1*"), \
58061 - Atom("dev-libs/A:0"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A"), \
58062 - Atom("dev-*/*:0", allow_wildcard=True), Atom("dev-*/*", allow_wildcard=True)], False)
58063 - ]
58064 + ("dev-libs/A-4", [Atom(">=dev-libs/A-3"), Atom(">=dev-libs/A-2")],
58065 + [Atom(">=dev-libs/A-3"), Atom(">=dev-libs/A-2")], True),
58066 + ("dev-libs/A-4", [Atom("<=dev-libs/A-5"), Atom("<=dev-libs/A-6")],
58067 + [Atom("<=dev-libs/A-5"), Atom("<=dev-libs/A-6")], True),
58068 + ("dev-libs/A-1", [Atom("dev-libs/A"), Atom("=dev-libs/A-1")],
58069 + [Atom("=dev-libs/A-1"), Atom("dev-libs/A")], True),
58070 + ("dev-libs/A-1", [Atom("dev-libs/B"), Atom("=dev-libs/A-1:0")],
58071 + [Atom("=dev-libs/A-1:0")], True),
58072 + ("dev-libs/A-1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=dev-libs/A-1:0")],
58073 + [Atom("=dev-libs/A-1:0"), Atom("dev-libs/*", allow_wildcard=True)], True),
58074 + ("dev-libs/A-4.9999-r1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=*/*-*9999*", allow_wildcard=True)],
58075 + [Atom("=*/*-*9999*", allow_wildcard=True), Atom("dev-libs/*", allow_wildcard=True)], True),
58076 + ("dev-libs/A-4_beta-r1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=*/*-*_beta*", allow_wildcard=True)],
58077 + [Atom("=*/*-*_beta*", allow_wildcard=True), Atom("dev-libs/*", allow_wildcard=True)], True),
58078 + ("dev-libs/A-4_beta1-r1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=*/*-*_beta*", allow_wildcard=True)],
58079 + [Atom("=*/*-*_beta*", allow_wildcard=True), Atom("dev-libs/*", allow_wildcard=True)], True),
58080 + ("dev-libs/A-1:0", [Atom("dev-*/*", allow_wildcard=True), Atom("dev-*/*:0", allow_wildcard=True),
58081 + Atom("dev-libs/A"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A:0"),
58082 + Atom("=dev-libs/A-1*"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1")],
58083 + [Atom("=dev-libs/A-1"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1*"),
58084 + Atom("dev-libs/A:0"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A"),
58085 + Atom("dev-*/*:0", allow_wildcard=True), Atom("dev-*/*", allow_wildcard=True)], False)
58086 + ]
58087
58088 for pkg, atom_list, result, all_permutations in tests:
58089 if all_permutations:
58090
58091 diff --git a/pym/portage/tests/dep/test_dep_getcpv.py b/pym/portage/tests/dep/test_dep_getcpv.py
58092 index 8a0a8aa..79c1514 100644
58093 --- a/pym/portage/tests/dep/test_dep_getcpv.py
58094 +++ b/pym/portage/tests/dep/test_dep_getcpv.py
58095 @@ -10,12 +10,14 @@ class DepGetCPV(TestCase):
58096 """
58097
58098 def testDepGetCPV(self):
58099 -
58100 - prefix_ops = ["<", ">", "=", "~", "<=",
58101 - ">=", "!=", "!<", "!>", "!~"]
58102
58103 - bad_prefix_ops = [ ">~", "<~", "~>", "~<" ]
58104 - postfix_ops = [ ("=", "*"), ]
58105 + prefix_ops = [
58106 + "<", ">", "=", "~", "<=",
58107 + ">=", "!=", "!<", "!>", "!~"
58108 + ]
58109 +
58110 + bad_prefix_ops = [">~", "<~", "~>", "~<"]
58111 + postfix_ops = [("=", "*"),]
58112
58113 cpvs = ["sys-apps/portage-2.1", "sys-apps/portage-2.1",
58114 "sys-apps/portage-2.1"]
58115 @@ -26,10 +28,10 @@ class DepGetCPV(TestCase):
58116 mycpv = prefix + cpv
58117 if slot:
58118 mycpv += slot
58119 - self.assertEqual( dep_getcpv( mycpv ), cpv )
58120 + self.assertEqual(dep_getcpv(mycpv), cpv)
58121
58122 for prefix, postfix in postfix_ops:
58123 mycpv = prefix + cpv + postfix
58124 if slot:
58125 mycpv += slot
58126 - self.assertEqual( dep_getcpv( mycpv ), cpv )
58127 + self.assertEqual(dep_getcpv(mycpv), cpv)
58128
58129 diff --git a/pym/portage/tests/dep/test_dep_getrepo.py b/pym/portage/tests/dep/test_dep_getrepo.py
58130 index 78ead8c..6c17d3c 100644
58131 --- a/pym/portage/tests/dep/test_dep_getrepo.py
58132 +++ b/pym/portage/tests/dep/test_dep_getrepo.py
58133 @@ -11,9 +11,9 @@ class DepGetRepo(TestCase):
58134 def testDepGetRepo(self):
58135
58136 repo_char = "::"
58137 - repos = ( "a", "repo-name", "repo_name", "repo123", None )
58138 + repos = ("a", "repo-name", "repo_name", "repo123", None)
58139 cpvs = ["sys-apps/portage"]
58140 - versions = ["2.1.1","2.1-r1", None]
58141 + versions = ["2.1.1", "2.1-r1", None]
58142 uses = ["[use]", None]
58143 for cpv in cpvs:
58144 for version in versions:
58145 @@ -26,4 +26,4 @@ class DepGetRepo(TestCase):
58146 pkg = pkg + repo_char + repo
58147 if use:
58148 pkg = pkg + use
58149 - self.assertEqual( dep_getrepo( pkg ), repo )
58150 + self.assertEqual(dep_getrepo(pkg), repo)
58151
58152 diff --git a/pym/portage/tests/dep/test_dep_getslot.py b/pym/portage/tests/dep/test_dep_getslot.py
58153 index 206cecc..8482864 100644
58154 --- a/pym/portage/tests/dep/test_dep_getslot.py
58155 +++ b/pym/portage/tests/dep/test_dep_getslot.py
58156 @@ -12,9 +12,9 @@ class DepGetSlot(TestCase):
58157 def testDepGetSlot(self):
58158
58159 slot_char = ":"
58160 - slots = ( "a", "1.2", "1", "IloveVapier", None )
58161 + slots = ("a", "1.2", "1", "IloveVapier", None)
58162 cpvs = ["sys-apps/portage"]
58163 - versions = ["2.1.1","2.1-r1"]
58164 + versions = ["2.1.1", "2.1-r1"]
58165 for cpv in cpvs:
58166 for version in versions:
58167 for slot in slots:
58168 @@ -22,7 +22,7 @@ class DepGetSlot(TestCase):
58169 if version:
58170 mycpv = '=' + mycpv + '-' + version
58171 if slot is not None:
58172 - self.assertEqual( dep_getslot(
58173 - mycpv + slot_char + slot ), slot )
58174 + self.assertEqual(dep_getslot(
58175 + mycpv + slot_char + slot), slot)
58176 else:
58177 - self.assertEqual( dep_getslot( mycpv ), slot )
58178 + self.assertEqual(dep_getslot(mycpv), slot)
58179
58180 diff --git a/pym/portage/tests/dep/test_dep_getusedeps.py b/pym/portage/tests/dep/test_dep_getusedeps.py
58181 index d2494f7..cd58eab 100644
58182 --- a/pym/portage/tests/dep/test_dep_getusedeps.py
58183 +++ b/pym/portage/tests/dep/test_dep_getusedeps.py
58184 @@ -24,12 +24,12 @@ class DepGetUseDeps(TestCase):
58185 cpv += ":" + slot
58186 if isinstance(use, tuple):
58187 cpv += "[%s]" % (",".join(use),)
58188 - self.assertEqual( dep_getusedeps(
58189 - cpv ), use )
58190 + self.assertEqual(dep_getusedeps(
58191 + cpv), use)
58192 else:
58193 if len(use):
58194 - self.assertEqual( dep_getusedeps(
58195 - cpv + "[" + use + "]" ), (use,) )
58196 + self.assertEqual(dep_getusedeps(
58197 + cpv + "[" + use + "]"), (use,))
58198 else:
58199 - self.assertEqual( dep_getusedeps(
58200 - cpv + "[" + use + "]" ), () )
58201 + self.assertEqual(dep_getusedeps(
58202 + cpv + "[" + use + "]"), ())
58203
58204 diff --git a/pym/portage/tests/dep/test_get_operator.py b/pym/portage/tests/dep/test_get_operator.py
58205 index 4f9848f..5076e21 100644
58206 --- a/pym/portage/tests/dep/test_get_operator.py
58207 +++ b/pym/portage/tests/dep/test_get_operator.py
58208 @@ -10,24 +10,28 @@ class GetOperator(TestCase):
58209 def testGetOperator(self):
58210
58211 # get_operator does not validate operators
58212 - tests = [ ( "~", "~" ), ( "=", "=" ), ( ">", ">" ),
58213 - ( ">=", ">=" ), ( "<=", "<=" ),
58214 + tests = [
58215 + ("~", "~"),
58216 + ("=", "="),
58217 + (">", ">"),
58218 + (">=", ">="),
58219 + ("<=", "<="),
58220 ]
58221
58222 test_cpvs = ["sys-apps/portage-2.1"]
58223 - slots = [ None,"1","linux-2.5.6" ]
58224 + slots = [None, "1", "linux-2.5.6"]
58225 for cpv in test_cpvs:
58226 for test in tests:
58227 for slot in slots:
58228 atom = cpv[:]
58229 if slot:
58230 atom += ":" + slot
58231 - result = get_operator( test[0] + atom )
58232 - self.assertEqual( result, test[1],
58233 - msg="get_operator(%s) != %s" % (test[0] + atom, test[1]) )
58234 + result = get_operator(test[0] + atom)
58235 + self.assertEqual(result, test[1],
58236 + msg="get_operator(%s) != %s" % (test[0] + atom, test[1]))
58237
58238 - result = get_operator( "sys-apps/portage" )
58239 - self.assertEqual( result, None )
58240 + result = get_operator("sys-apps/portage")
58241 + self.assertEqual(result, None)
58242
58243 - result = get_operator( "=sys-apps/portage-2.1*" )
58244 - self.assertEqual( result , "=*" )
58245 + result = get_operator("=sys-apps/portage-2.1*")
58246 + self.assertEqual(result , "=*")
58247
58248 diff --git a/pym/portage/tests/dep/test_get_required_use_flags.py b/pym/portage/tests/dep/test_get_required_use_flags.py
58249 index 06f8110..90e096c 100644
58250 --- a/pym/portage/tests/dep/test_get_required_use_flags.py
58251 +++ b/pym/portage/tests/dep/test_get_required_use_flags.py
58252 @@ -1,4 +1,4 @@
58253 -# Copyright 2010 Gentoo Foundation
58254 +# Copyright 2010-2012 Gentoo Foundation
58255 # Distributed under the terms of the GNU General Public License v2
58256
58257 from portage.tests import TestCase
58258 @@ -13,6 +13,8 @@ class TestCheckRequiredUse(TestCase):
58259
58260 ("|| ( a b c )", ["a", "b", "c"]),
58261 ("^^ ( a b c )", ["a", "b", "c"]),
58262 + ("?? ( a b c )", ["a", "b", "c"]),
58263 + ("?? ( )", []),
58264
58265 ("|| ( a b ^^ ( d e f ) )", ["a", "b", "d", "e", "f"]),
58266 ("^^ ( a b || ( d e f ) )", ["a", "b", "d", "e", "f"]),
58267
58268 diff --git a/pym/portage/tests/dep/test_isjustname.py b/pym/portage/tests/dep/test_isjustname.py
58269 index c16fb54..9b95bcd 100644
58270 --- a/pym/portage/tests/dep/test_isjustname.py
58271 +++ b/pym/portage/tests/dep/test_isjustname.py
58272 @@ -9,16 +9,16 @@ class IsJustName(TestCase):
58273
58274 def testIsJustName(self):
58275
58276 - cats = ( "", "sys-apps/", "foo/", "virtual/" )
58277 - pkgs = ( "portage", "paludis", "pkgcore", "notARealPkg" )
58278 - vers = ( "", "-2.0-r3", "-1.0_pre2", "-3.1b" )
58279 + cats = ("", "sys-apps/", "foo/", "virtual/")
58280 + pkgs = ("portage", "paludis", "pkgcore", "notARealPkg")
58281 + vers = ("", "-2.0-r3", "-1.0_pre2", "-3.1b")
58282
58283 for pkg in pkgs:
58284 for cat in cats:
58285 for ver in vers:
58286 if len(ver):
58287 - self.assertFalse( isjustname( cat + pkg + ver ),
58288 - msg="isjustname(%s) is True!" % (cat + pkg + ver) )
58289 + self.assertFalse(isjustname(cat + pkg + ver),
58290 + msg="isjustname(%s) is True!" % (cat + pkg + ver))
58291 else:
58292 - self.assertTrue( isjustname( cat + pkg + ver ),
58293 - msg="isjustname(%s) is False!" % (cat + pkg + ver) )
58294 + self.assertTrue(isjustname(cat + pkg + ver),
58295 + msg="isjustname(%s) is False!" % (cat + pkg + ver))
58296
58297 diff --git a/pym/portage/tests/dep/test_isvalidatom.py b/pym/portage/tests/dep/test_isvalidatom.py
58298 index abcec75..67ba603 100644
58299 --- a/pym/portage/tests/dep/test_isvalidatom.py
58300 +++ b/pym/portage/tests/dep/test_isvalidatom.py
58301 @@ -1,4 +1,4 @@
58302 -# Copyright 2006-2010 Gentoo Foundation
58303 +# Copyright 2006-2013 Gentoo Foundation
58304 # Distributed under the terms of the GNU General Public License v2
58305
58306 from portage.tests import TestCase
58307 @@ -26,7 +26,7 @@ class IsValidAtom(TestCase):
58308 IsValidAtomTestCase("~sys-apps/portage-2.1", True),
58309 IsValidAtomTestCase("sys-apps/portage:foo", True),
58310 IsValidAtomTestCase("sys-apps/portage-2.1:foo", False),
58311 - IsValidAtomTestCase( "sys-apps/portage-2.1:", False),
58312 + IsValidAtomTestCase("sys-apps/portage-2.1:", False),
58313 IsValidAtomTestCase("sys-apps/portage-2.1:", False),
58314 IsValidAtomTestCase("sys-apps/portage-2.1:[foo]", False),
58315 IsValidAtomTestCase("sys-apps/portage", True),
58316 @@ -141,8 +141,11 @@ class IsValidAtom(TestCase):
58317 IsValidAtomTestCase("virtual/ffmpeg:=", True),
58318 IsValidAtomTestCase("virtual/ffmpeg:0=", True),
58319 IsValidAtomTestCase("virtual/ffmpeg:*", True),
58320 - IsValidAtomTestCase("virtual/ffmpeg:0*", True),
58321 + IsValidAtomTestCase("virtual/ffmpeg:0*", False),
58322 IsValidAtomTestCase("virtual/ffmpeg:0", True),
58323 +
58324 + # Wildcard atoms
58325 + IsValidAtomTestCase("*/portage-2.1", False, allow_wildcard=True),
58326 )
58327
58328 for test_case in test_cases:
58329 @@ -150,6 +153,6 @@ class IsValidAtom(TestCase):
58330 atom_type = "valid"
58331 else:
58332 atom_type = "invalid"
58333 - self.assertEqual( bool(isvalidatom(test_case.atom, allow_wildcard=test_case.allow_wildcard, \
58334 + self.assertEqual(bool(isvalidatom(test_case.atom, allow_wildcard=test_case.allow_wildcard,
58335 allow_repo=test_case.allow_repo)), test_case.expected,
58336 - msg="isvalidatom(%s) != %s" % ( test_case.atom, test_case.expected ) )
58337 + msg="isvalidatom(%s) != %s" % (test_case.atom, test_case.expected))
58338
58339 diff --git a/pym/portage/tests/dep/test_match_from_list.py b/pym/portage/tests/dep/test_match_from_list.py
58340 index d5d718f..75ac8fd 100644
58341 --- a/pym/portage/tests/dep/test_match_from_list.py
58342 +++ b/pym/portage/tests/dep/test_match_from_list.py
58343 @@ -1,4 +1,4 @@
58344 -# Copyright 2006-2012 Gentoo Foundation
58345 +# Copyright 2006-2014 Gentoo Foundation
58346 # Distributed under the terms of the GNU General Public License v2
58347
58348 import sys
58349 @@ -7,6 +7,7 @@ from portage.dep import Atom, match_from_list, _repo_separator
58350 from portage.versions import catpkgsplit, _pkg_str
58351
58352 if sys.hexversion >= 0x3000000:
58353 + # pylint: disable=W0622
58354 basestring = str
58355
58356 class Package(object):
58357 @@ -17,14 +18,14 @@ class Package(object):
58358 atom = Atom(atom, allow_repo=True)
58359 self.cp = atom.cp
58360 slot = atom.slot
58361 - if atom.slot_abi:
58362 - slot = "%s/%s" % (slot, atom.slot_abi)
58363 + if atom.sub_slot:
58364 + slot = "%s/%s" % (slot, atom.sub_slot)
58365 if not slot:
58366 slot = '0'
58367 self.cpv = _pkg_str(atom.cpv, slot=slot, repo=atom.repo)
58368 self.cpv_split = catpkgsplit(self.cpv)
58369 self.slot = self.cpv.slot
58370 - self.slot_abi = self.cpv.slot_abi
58371 + self.sub_slot = self.cpv.sub_slot
58372 self.repo = atom.repo
58373 if atom.use:
58374 self.use = self._use_class(atom.use.enabled)
58375 @@ -53,76 +54,79 @@ class Test_match_from_list(TestCase):
58376
58377 def testMatch_from_list(self):
58378 tests = (
58379 - ("=sys-apps/portage-45*", [], [] ),
58380 - ("=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58381 - ("!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58382 - ("!!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58383 - ("=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58384 - ("=sys-apps/portage-045", ["sys-apps/portage-046"], [] ),
58385 - ("~sys-apps/portage-045", ["sys-apps/portage-045-r1"], ["sys-apps/portage-045-r1"] ),
58386 - ("~sys-apps/portage-045", ["sys-apps/portage-046-r1"], [] ),
58387 - ("<=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58388 - ("<=sys-apps/portage-045", ["sys-apps/portage-046"], [] ),
58389 - ("<sys-apps/portage-046", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58390 - ("<sys-apps/portage-046", ["sys-apps/portage-046"], [] ),
58391 - (">=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58392 - (">=sys-apps/portage-047", ["sys-apps/portage-046-r1"], [] ),
58393 - (">sys-apps/portage-044", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ),
58394 - (">sys-apps/portage-047", ["sys-apps/portage-046-r1"], [] ),
58395 - ("sys-apps/portage:0", [Package("=sys-apps/portage-045:0")], ["sys-apps/portage-045"] ),
58396 - ("sys-apps/portage:0", [Package("=sys-apps/portage-045:1")], [] ),
58397 + ("=sys-apps/portage-45*", [], []),
58398 + ("=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58399 + ("!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58400 + ("!!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58401 + ("=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58402 + ("=sys-apps/portage-045", ["sys-apps/portage-046"], []),
58403 + ("~sys-apps/portage-045", ["sys-apps/portage-045-r1"], ["sys-apps/portage-045-r1"]),
58404 + ("~sys-apps/portage-045", ["sys-apps/portage-046-r1"], []),
58405 + ("<=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58406 + ("<=sys-apps/portage-045", ["sys-apps/portage-046"], []),
58407 + ("<sys-apps/portage-046", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58408 + ("<sys-apps/portage-046", ["sys-apps/portage-046"], []),
58409 + (">=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58410 + (">=sys-apps/portage-047", ["sys-apps/portage-046-r1"], []),
58411 + (">sys-apps/portage-044", ["sys-apps/portage-045"], ["sys-apps/portage-045"]),
58412 + (">sys-apps/portage-047", ["sys-apps/portage-046-r1"], []),
58413 + ("sys-apps/portage:0", [Package("=sys-apps/portage-045:0")], ["sys-apps/portage-045"]),
58414 + ("sys-apps/portage:0", [Package("=sys-apps/portage-045:1")], []),
58415 + ("=cat/pkg-1-r1*", ["cat/pkg-1_alpha1"], []),
58416 + ("=cat/pkg-1-r1*", ["cat/pkg-1-r11"], ["cat/pkg-1-r11"]),
58417 + ("=cat/pkg-1-r1*", ["cat/pkg-01-r11"], ["cat/pkg-01-r11"]),
58418 + ("=cat/pkg-01-r1*", ["cat/pkg-1-r11"], ["cat/pkg-1-r11"]),
58419 + ("=cat/pkg-01-r1*", ["cat/pkg-001-r11"], ["cat/pkg-001-r11"]),
58420 ("=sys-fs/udev-1*", ["sys-fs/udev-123"], ["sys-fs/udev-123"]),
58421 - ("=sys-fs/udev-4*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ),
58422 - ("*/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ),
58423 - ("*/*:0", ["sys-fs/udev-456:0"], ["sys-fs/udev-456:0"] ),
58424 - ("*/*:1", ["sys-fs/udev-456:0"], [] ),
58425 - ("sys-fs/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ),
58426 - ("*/udev", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ),
58427 - ("=sys-apps/portage-2*", ["sys-apps/portage-2.1"], ["sys-apps/portage-2.1"] ),
58428 - ("=sys-apps/portage-2.1*", ["sys-apps/portage-2.1.2"], ["sys-apps/portage-2.1.2"] ),
58429 - ("dev-libs/*", ["sys-apps/portage-2.1.2"], [] ),
58430 - ("*/tar", ["sys-apps/portage-2.1.2"], [] ),
58431 - ("*/*", ["dev-libs/A-1", "dev-libs/B-1"], ["dev-libs/A-1", "dev-libs/B-1"] ),
58432 - ("dev-libs/*", ["dev-libs/A-1", "sci-libs/B-1"], ["dev-libs/A-1"] ),
58433 + ("=sys-fs/udev-4*", ["sys-fs/udev-456"], ["sys-fs/udev-456"]),
58434 + ("*/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"]),
58435 + ("*/*:0", ["sys-fs/udev-456:0"], ["sys-fs/udev-456:0"]),
58436 + ("*/*:1", ["sys-fs/udev-456:0"], []),
58437 + ("sys-fs/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"]),
58438 + ("*/udev", ["sys-fs/udev-456"], ["sys-fs/udev-456"]),
58439 + ("=sys-apps/portage-2*", ["sys-apps/portage-2.1"], ["sys-apps/portage-2.1"]),
58440 + ("=sys-apps/portage-2.1*", ["sys-apps/portage-2.1.2"], ["sys-apps/portage-2.1.2"]),
58441 + ("dev-libs/*", ["sys-apps/portage-2.1.2"], []),
58442 + ("*/tar", ["sys-apps/portage-2.1.2"], []),
58443 + ("*/*", ["dev-libs/A-1", "dev-libs/B-1"], ["dev-libs/A-1", "dev-libs/B-1"]),
58444 + ("dev-libs/*", ["dev-libs/A-1", "sci-libs/B-1"], ["dev-libs/A-1"]),
58445
58446 - ("dev-libs/A[foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-1"] ),
58447 - ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-2"] ),
58448 - ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2")], [] ),
58449 - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], [] ),
58450 - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo,bar]")], [] ),
58451 - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[foo,bar]")], ["dev-libs/A-2"] ),
58452 - ("dev-libs/A[foo,bar(+)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"] ),
58453 - ("dev-libs/A[foo,bar(-)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], [] ),
58454 - ("dev-libs/A[foo,-bar(-)]", [Package("=dev-libs/A-1[-foo,bar]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"] ),
58455 + ("dev-libs/A[foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-1"]),
58456 + ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-2"]),
58457 + ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2")], []),
58458 + ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], []),
58459 + ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo,bar]")], []),
58460 + ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[foo,bar]")], ["dev-libs/A-2"]),
58461 + ("dev-libs/A[foo,bar(+)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"]),
58462 + ("dev-libs/A[foo,bar(-)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], []),
58463 + ("dev-libs/A[foo,-bar(-)]", [Package("=dev-libs/A-1[-foo,bar]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"]),
58464
58465 - ("dev-libs/A::repo1", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo1"] ),
58466 - ("dev-libs/A::repo2", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo2"] ),
58467 - ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[foo]"), Package("=dev-libs/A-1::repo2[-foo]")], [] ),
58468 - ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[-foo]"), Package("=dev-libs/A-1::repo2[foo]")], ["dev-libs/A-1::repo2"] ),
58469 - ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:1::repo1"), Package("=dev-libs/A-1:2::repo2")], [] ),
58470 - ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:2::repo1"), Package("=dev-libs/A-1:1::repo2[foo]")], ["dev-libs/A-1::repo2"] ),
58471 + ("dev-libs/A::repo1", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo1"]),
58472 + ("dev-libs/A::repo2", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo2"]),
58473 + ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[foo]"), Package("=dev-libs/A-1::repo2[-foo]")], []),
58474 + ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[-foo]"), Package("=dev-libs/A-1::repo2[foo]")], ["dev-libs/A-1::repo2"]),
58475 + ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:1::repo1"), Package("=dev-libs/A-1:2::repo2")], []),
58476 + ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:2::repo1"), Package("=dev-libs/A-1:1::repo2[foo]")], ["dev-libs/A-1::repo2"]),
58477
58478 - ("virtual/ffmpeg:0/53", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58479 - ("virtual/ffmpeg:0/53=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58480 - ("virtual/ffmpeg:0/52", [Package("=virtual/ffmpeg-0.10.3:0/53")], [] ),
58481 - ("virtual/ffmpeg:=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58482 - ("virtual/ffmpeg:0=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58483 - ("virtual/ffmpeg:*", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58484 - ("virtual/ffmpeg:0*", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58485 - ("virtual/ffmpeg:0", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"] ),
58486 + ("virtual/ffmpeg:0/53", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58487 + ("virtual/ffmpeg:0/53=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58488 + ("virtual/ffmpeg:0/52", [Package("=virtual/ffmpeg-0.10.3:0/53")], []),
58489 + ("virtual/ffmpeg:=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58490 + ("virtual/ffmpeg:0=", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58491 + ("virtual/ffmpeg:*", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58492 + ("virtual/ffmpeg:0", [Package("=virtual/ffmpeg-0.10.3:0/53")], ["virtual/ffmpeg-0.10.3"]),
58493
58494 - ("sys-libs/db:4.8/4.8", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"] ),
58495 - ("sys-libs/db:4.8/4.8=", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"] ),
58496 - ("sys-libs/db:4.8=", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"] ),
58497 - ("sys-libs/db:4.8*", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"] ),
58498 - ("sys-libs/db:*", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"] ),
58499 - ("sys-libs/db:4.8/0", [Package("=sys-libs/db-4.8.30:4.8")], [] ),
58500 - ("sys-libs/db:4.8/0=", [Package("=sys-libs/db-4.8.30:4.8")], [] ),
58501 + ("sys-libs/db:4.8/4.8", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"]),
58502 + ("sys-libs/db:4.8/4.8=", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"]),
58503 + ("sys-libs/db:4.8=", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"]),
58504 + ("sys-libs/db:*", [Package("=sys-libs/db-4.8.30:4.8")], ["sys-libs/db-4.8.30"]),
58505 + ("sys-libs/db:4.8/0", [Package("=sys-libs/db-4.8.30:4.8")], []),
58506 + ("sys-libs/db:4.8/0=", [Package("=sys-libs/db-4.8.30:4.8")], []),
58507 )
58508
58509 for atom, cpv_list, expected_result in tests:
58510 result = []
58511 - for pkg in match_from_list( atom, cpv_list ):
58512 + for pkg in match_from_list(atom, cpv_list):
58513 if isinstance(pkg, Package):
58514 if pkg.repo:
58515 result.append(pkg.cpv + _repo_separator + pkg.repo)
58516 @@ -130,4 +134,4 @@ class Test_match_from_list(TestCase):
58517 result.append(pkg.cpv)
58518 else:
58519 result.append(pkg)
58520 - self.assertEqual( result, expected_result )
58521 + self.assertEqual(result, expected_result)
58522
58523 diff --git a/pym/portage/tests/dep/test_paren_reduce.py b/pym/portage/tests/dep/test_paren_reduce.py
58524 index 9a147a0..3244652 100644
58525 --- a/pym/portage/tests/dep/test_paren_reduce.py
58526 +++ b/pym/portage/tests/dep/test_paren_reduce.py
58527 @@ -1,4 +1,4 @@
58528 -# Copyright 2010-2011 Gentoo Foundation
58529 +# Copyright 2010-2013 Gentoo Foundation
58530 # Distributed under the terms of the GNU General Public License v2
58531
58532 from portage.tests import TestCase
58533 @@ -10,30 +10,30 @@ class TestParenReduce(TestCase):
58534 def testParenReduce(self):
58535
58536 test_cases = (
58537 - ( "A", ["A"]),
58538 - ( "( A )", ["A"]),
58539 - ( "|| ( A B )", [ "||", ["A", "B"] ]),
58540 - ( "|| ( A || ( B C ) )", [ "||", ["A", "||", ["B", "C"]]]),
58541 - ( "|| ( A || ( B C D ) )", [ "||", ["A", "||", ["B", "C", "D"]] ]),
58542 - ( "|| ( A || ( B || ( C D ) E ) )", [ "||", ["A", "||", ["B", "||", ["C", "D"], "E"]] ]),
58543 - ( "a? ( A )", ["a?", ["A"]]),
58544 -
58545 - ( "( || ( ( ( A ) B ) ) )", ["A", "B"]),
58546 - ( "( || ( || ( ( A ) B ) ) )", [ "||", ["A", "B"] ]),
58547 - ( "|| ( A )", ["A"]),
58548 - ( "( || ( || ( || ( A ) foo? ( B ) ) ) )", [ "||", ["A", "foo?", ["B"] ]]),
58549 - ( "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", [ "||", ["bar?", ["A"], "foo?", ["B"] ]]),
58550 - ( "A || ( ) foo? ( ) B", ["A", "B"]),
58551 + ("A", ["A"]),
58552 + ("( A )", ["A"]),
58553 + ("|| ( A B )", ["||", ["A", "B"]]),
58554 + ("|| ( A || ( B C ) )", ["||", ["A", "||", ["B", "C"]]]),
58555 + ("|| ( A || ( B C D ) )", ["||", ["A", "||", ["B", "C", "D"]]]),
58556 + ("|| ( A || ( B || ( C D ) E ) )", ["||", ["A", "||", ["B", "||", ["C", "D"], "E"]]]),
58557 + ("a? ( A )", ["a?", ["A"]]),
58558
58559 - ( "|| ( A ) || ( B )", ["A", "B"]),
58560 - ( "foo? ( A ) foo? ( B )", ["foo?", ["A"], "foo?", ["B"]]),
58561 + ("( || ( ( ( A ) B ) ) )", ["A", "B"]),
58562 + ("( || ( || ( ( A ) B ) ) )", ["||", ["A", "B"]]),
58563 + ("|| ( A )", ["A"]),
58564 + ("( || ( || ( || ( A ) foo? ( B ) ) ) )", ["||", ["A", "foo?", ["B"]]]),
58565 + ("( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", ["||", ["bar?", ["A"], "foo?", ["B"]]]),
58566 + ("A || ( ) foo? ( ) B", ["A", "B"]),
58567
58568 - ( "|| ( ( A B ) C )", [ "||", [ ["A", "B"], "C"] ]),
58569 - ( "|| ( ( A B ) ( C ) )", [ "||", [ ["A", "B"], "C"] ]),
58570 + ("|| ( A ) || ( B )", ["A", "B"]),
58571 + ("foo? ( A ) foo? ( B )", ["foo?", ["A"], "foo?", ["B"]]),
58572 +
58573 + ("|| ( ( A B ) C )", ["||", [["A", "B"], "C"]]),
58574 + ("|| ( ( A B ) ( C ) )", ["||", [["A", "B"], "C"]]),
58575 # test USE dep defaults for bug #354003
58576 - ( ">=dev-lang/php-5.2[pcre(+)]", [ ">=dev-lang/php-5.2[pcre(+)]" ]),
58577 + (">=dev-lang/php-5.2[pcre(+)]", [">=dev-lang/php-5.2[pcre(+)]"]),
58578 )
58579 -
58580 +
58581 test_cases_xfail = (
58582 "( A",
58583 "A )",
58584 @@ -47,20 +47,23 @@ class TestParenReduce(TestCase):
58585 "|| A B",
58586 "|| ( A B ) )",
58587 "|| || B C",
58588 -
58589 +
58590 "|| ( A B || )",
58591 -
58592 +
58593 "a? A",
58594 -
58595 - ( "( || ( || || ( A ) foo? ( B ) ) )"),
58596 - ( "( || ( || bar? ( A ) foo? ( B ) ) )"),
58597 +
58598 + "( || ( || || ( A ) foo? ( B ) ) )",
58599 + "( || ( || bar? ( A ) foo? ( B ) ) )",
58600 )
58601
58602 for dep_str, expected_result in test_cases:
58603 - self.assertEqual(paren_reduce(dep_str), expected_result,
58604 + self.assertEqual(paren_reduce(dep_str, _deprecation_warn=False),
58605 + expected_result,
58606 "input: '%s' result: %s != %s" % (dep_str,
58607 - paren_reduce(dep_str), expected_result))
58608 + paren_reduce(dep_str, _deprecation_warn=False),
58609 + expected_result))
58610
58611 for dep_str in test_cases_xfail:
58612 self.assertRaisesMsg(dep_str,
58613 - InvalidDependString, paren_reduce, dep_str)
58614 + InvalidDependString, paren_reduce, dep_str,
58615 + _deprecation_warn=False)
58616
58617 diff --git a/pym/portage/tests/dep/test_use_reduce.py b/pym/portage/tests/dep/test_use_reduce.py
58618 index 1618430..4f65567 100644
58619 --- a/pym/portage/tests/dep/test_use_reduce.py
58620 +++ b/pym/portage/tests/dep/test_use_reduce.py
58621 @@ -6,10 +6,10 @@ from portage.exception import InvalidDependString
58622 from portage.dep import Atom, use_reduce
58623
58624 class UseReduceTestCase(object):
58625 - def __init__(self, deparray, uselist=[], masklist=[], \
58626 - matchall=0, excludeall=[], is_src_uri=False, \
58627 - eapi="0", opconvert=False, flat=False, expected_result=None, \
58628 - is_valid_flag=None, token_class=None):
58629 + def __init__(self, deparray, uselist=[], masklist=[],
58630 + matchall=0, excludeall=[], is_src_uri=False,
58631 + eapi='0', opconvert=False, flat=False, expected_result=None,
58632 + is_valid_flag=None, token_class=None):
58633 self.deparray = deparray
58634 self.uselist = uselist
58635 self.masklist = masklist
58636 @@ -25,8 +25,8 @@ class UseReduceTestCase(object):
58637
58638 def run(self):
58639 try:
58640 - return use_reduce(self.deparray, self.uselist, self.masklist, \
58641 - self.matchall, self.excludeall, self.is_src_uri, self.eapi, \
58642 + return use_reduce(self.deparray, self.uselist, self.masklist,
58643 + self.matchall, self.excludeall, self.is_src_uri, self.eapi,
58644 self.opconvert, self.flat, self.is_valid_flag, self.token_class)
58645 except InvalidDependString as e:
58646 raise InvalidDependString("%s: %s" % (e, self.deparray))
58647 @@ -47,508 +47,507 @@ class UseReduce(TestCase):
58648 test_cases = (
58649 UseReduceTestCase(
58650 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58651 - uselist = ["a", "b", "c", "d"],
58652 - expected_result = ["A", "B"]
58653 + uselist=["a", "b", "c", "d"],
58654 + expected_result=["A", "B"]
58655 ),
58656 UseReduceTestCase(
58657 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58658 - uselist = ["a", "b", "c"],
58659 - expected_result = ["A", "B", "D"]
58660 + uselist=["a", "b", "c"],
58661 + expected_result=["A", "B", "D"]
58662 ),
58663 UseReduceTestCase(
58664 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58665 - uselist = ["b", "c"],
58666 - expected_result = ["B", "D"]
58667 + uselist=["b", "c"],
58668 + expected_result=["B", "D"]
58669 ),
58670
58671 UseReduceTestCase(
58672 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58673 - matchall = True,
58674 - expected_result = ["A", "B", "C", "D"]
58675 + matchall=True,
58676 + expected_result=["A", "B", "C", "D"]
58677 ),
58678 UseReduceTestCase(
58679 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58680 - masklist = ["a", "c"],
58681 - expected_result = ["C", "D"]
58682 + masklist=["a", "c"],
58683 + expected_result=["C", "D"]
58684 ),
58685 UseReduceTestCase(
58686 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58687 - matchall = True,
58688 - masklist = ["a", "c"],
58689 - expected_result = ["B", "C", "D"]
58690 + matchall=True,
58691 + masklist=["a", "c"],
58692 + expected_result=["B", "C", "D"]
58693 ),
58694 UseReduceTestCase(
58695 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58696 - uselist = ["a", "b"],
58697 - masklist = ["a", "c"],
58698 - expected_result = ["B", "C", "D"]
58699 + uselist=["a", "b"],
58700 + masklist=["a", "c"],
58701 + expected_result=["B", "C", "D"]
58702 ),
58703 UseReduceTestCase(
58704 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58705 - excludeall = ["a", "c"],
58706 - expected_result = ["D"]
58707 + excludeall=["a", "c"],
58708 + expected_result=["D"]
58709 ),
58710 UseReduceTestCase(
58711 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58712 - uselist = ["b"],
58713 - excludeall = ["a", "c"],
58714 - expected_result = ["B", "D"]
58715 + uselist=["b"],
58716 + excludeall=["a", "c"],
58717 + expected_result=["B", "D"]
58718 ),
58719 UseReduceTestCase(
58720 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58721 - matchall = True,
58722 - excludeall = ["a", "c"],
58723 - expected_result = ["A", "B", "D"]
58724 + matchall=True,
58725 + excludeall=["a", "c"],
58726 + expected_result=["A", "B", "D"]
58727 ),
58728 UseReduceTestCase(
58729 "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )",
58730 - matchall = True,
58731 - excludeall = ["a", "c"],
58732 - masklist = ["b"],
58733 - expected_result = ["A", "D"]
58734 + matchall=True,
58735 + excludeall=["a", "c"],
58736 + masklist=["b"],
58737 + expected_result=["A", "D"]
58738 ),
58739
58740 -
58741 UseReduceTestCase(
58742 "a? ( b? ( AB ) )",
58743 - uselist = ["a", "b"],
58744 - expected_result = ["AB"]
58745 + uselist=["a", "b"],
58746 + expected_result=["AB"]
58747 ),
58748 UseReduceTestCase(
58749 "a? ( b? ( AB ) C )",
58750 - uselist = ["a"],
58751 - expected_result = ["C"]
58752 + uselist=["a"],
58753 + expected_result=["C"]
58754 ),
58755 UseReduceTestCase(
58756 "a? ( b? ( || ( AB CD ) ) )",
58757 - uselist = ["a", "b"],
58758 - expected_result = ["||", ["AB", "CD"]]
58759 + uselist=["a", "b"],
58760 + expected_result=["||", ["AB", "CD"]]
58761 ),
58762 UseReduceTestCase(
58763 "|| ( || ( a? ( A ) b? ( B ) ) )",
58764 - uselist = ["a", "b"],
58765 - expected_result = ["||", ["A", "B"]]
58766 + uselist=["a", "b"],
58767 + expected_result=["||", ["A", "B"]]
58768 ),
58769 UseReduceTestCase(
58770 "|| ( || ( a? ( A ) b? ( B ) ) )",
58771 - uselist = ["a"],
58772 - expected_result = ["A"]
58773 + uselist=["a"],
58774 + expected_result=["A"]
58775 ),
58776 UseReduceTestCase(
58777 "|| ( || ( a? ( A ) b? ( B ) ) )",
58778 - uselist = [],
58779 - expected_result = []
58780 + uselist=[],
58781 + expected_result=[]
58782 ),
58783 UseReduceTestCase(
58784 "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )",
58785 - uselist = [],
58786 - expected_result = []
58787 + uselist=[],
58788 + expected_result=[]
58789 ),
58790 UseReduceTestCase(
58791 "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )",
58792 - uselist = ["a"],
58793 - expected_result = ["A"]
58794 + uselist=["a"],
58795 + expected_result=["A"]
58796 ),
58797 UseReduceTestCase(
58798 "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )",
58799 - uselist = ["b"],
58800 - expected_result = ["B"]
58801 + uselist=["b"],
58802 + expected_result=["B"]
58803 ),
58804 UseReduceTestCase(
58805 "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )",
58806 - uselist = ["c"],
58807 - expected_result = []
58808 + uselist=["c"],
58809 + expected_result=[]
58810 ),
58811 UseReduceTestCase(
58812 "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )",
58813 - uselist = ["a", "c"],
58814 - expected_result = ["||", [ "A", "C"]]
58815 + uselist=["a", "c"],
58816 + expected_result=["||", ["A", "C"]]
58817 ),
58818 -
58819 - #paren_reduce tests
58820 +
58821 + # paren_reduce tests
58822 UseReduceTestCase(
58823 "A",
58824 - expected_result = ["A"]),
58825 + expected_result=["A"]),
58826 UseReduceTestCase(
58827 "( A )",
58828 - expected_result = ["A"]),
58829 + expected_result=["A"]),
58830 UseReduceTestCase(
58831 "|| ( A B )",
58832 - expected_result = [ "||", ["A", "B"] ]),
58833 + expected_result=["||", ["A", "B"]]),
58834 UseReduceTestCase(
58835 "|| ( ( A B ) C )",
58836 - expected_result = [ "||", [ ["A", "B"], "C"] ]),
58837 + expected_result=["||", [["A", "B"], "C"]]),
58838 UseReduceTestCase(
58839 "|| ( ( A B ) ( C ) )",
58840 - expected_result = [ "||", [ ["A", "B"], "C"] ]),
58841 + expected_result=["||", [["A", "B"], "C"]]),
58842 UseReduceTestCase(
58843 "|| ( A || ( B C ) )",
58844 - expected_result = [ "||", ["A", "B", "C"]]),
58845 + expected_result=["||", ["A", "B", "C"]]),
58846 UseReduceTestCase(
58847 "|| ( A || ( B C D ) )",
58848 - expected_result = [ "||", ["A", "B", "C", "D"] ]),
58849 + expected_result=["||", ["A", "B", "C", "D"]]),
58850 UseReduceTestCase(
58851 "|| ( A || ( B || ( C D ) E ) )",
58852 - expected_result = [ "||", ["A", "B", "C", "D", "E"] ]),
58853 + expected_result=["||", ["A", "B", "C", "D", "E"]]),
58854 UseReduceTestCase(
58855 "( || ( ( ( A ) B ) ) )",
58856 - expected_result = ["A", "B"] ),
58857 + expected_result=["A", "B"]),
58858 UseReduceTestCase(
58859 "( || ( || ( ( A ) B ) ) )",
58860 - expected_result = [ "||", ["A", "B"] ]),
58861 + expected_result=["||", ["A", "B"]]),
58862 UseReduceTestCase(
58863 "( || ( || ( ( A ) B ) ) )",
58864 - expected_result = [ "||", ["A", "B"] ]),
58865 + expected_result=["||", ["A", "B"]]),
58866 UseReduceTestCase(
58867 "|| ( A )",
58868 - expected_result = ["A"]),
58869 + expected_result=["A"]),
58870 UseReduceTestCase(
58871 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
58872 - expected_result = ["A"]),
58873 + expected_result=["A"]),
58874 UseReduceTestCase(
58875 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
58876 - uselist = ["foo"],
58877 - expected_result = [ "||", ["A", "B"] ]),
58878 + uselist=["foo"],
58879 + expected_result=["||", ["A", "B"]]),
58880 UseReduceTestCase(
58881 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
58882 - expected_result = []),
58883 + expected_result=[]),
58884 UseReduceTestCase(
58885 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
58886 - uselist = ["foo", "bar"],
58887 - expected_result = [ "||", [ "A", "B" ] ]),
58888 + uselist=["foo", "bar"],
58889 + expected_result=["||", ["A", "B"]]),
58890 UseReduceTestCase(
58891 "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B",
58892 - expected_result = ["A", "B"]),
58893 + expected_result=["A", "B"]),
58894 UseReduceTestCase(
58895 "|| ( A ) || ( B )",
58896 - expected_result = ["A", "B"]),
58897 + expected_result=["A", "B"]),
58898 UseReduceTestCase(
58899 "foo? ( A ) foo? ( B )",
58900 - expected_result = []),
58901 + expected_result=[]),
58902 UseReduceTestCase(
58903 "foo? ( A ) foo? ( B )",
58904 - uselist = ["foo"],
58905 - expected_result = ["A", "B"]),
58906 + uselist=["foo"],
58907 + expected_result=["A", "B"]),
58908 UseReduceTestCase(
58909 "|| ( A B ) C",
58910 - expected_result = ['||', ['A', 'B'], 'C']),
58911 + expected_result=['||', ['A', 'B'], 'C']),
58912 UseReduceTestCase(
58913 "A || ( B C )",
58914 - expected_result = ['A', '||', ['B', 'C']]),
58915 + expected_result=['A', '||', ['B', 'C']]),
58916
58917 - #SRC_URI stuff
58918 + # SRC_URI stuff
58919 UseReduceTestCase(
58920 "http://foo/bar -> blah.tbz2",
58921 - is_src_uri = True,
58922 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58923 - expected_result = ["http://foo/bar", "->", "blah.tbz2"]),
58924 + is_src_uri=True,
58925 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58926 + expected_result=["http://foo/bar", "->", "blah.tbz2"]),
58927 UseReduceTestCase(
58928 "foo? ( http://foo/bar -> blah.tbz2 )",
58929 - uselist = [],
58930 - is_src_uri = True,
58931 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58932 - expected_result = []),
58933 + uselist=[],
58934 + is_src_uri=True,
58935 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58936 + expected_result=[]),
58937 UseReduceTestCase(
58938 "foo? ( http://foo/bar -> blah.tbz2 )",
58939 - uselist = ["foo"],
58940 - is_src_uri = True,
58941 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58942 - expected_result = ["http://foo/bar", "->", "blah.tbz2"]),
58943 + uselist=["foo"],
58944 + is_src_uri=True,
58945 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58946 + expected_result=["http://foo/bar", "->", "blah.tbz2"]),
58947 UseReduceTestCase(
58948 "http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )",
58949 - uselist = [],
58950 - is_src_uri = True,
58951 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58952 - expected_result = ["http://foo/bar", "->", "bar.tbz2"]),
58953 + uselist=[],
58954 + is_src_uri=True,
58955 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58956 + expected_result=["http://foo/bar", "->", "bar.tbz2"]),
58957 UseReduceTestCase(
58958 "http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )",
58959 - uselist = ["foo"],
58960 - is_src_uri = True,
58961 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58962 - expected_result = ["http://foo/bar", "->", "bar.tbz2", "ftp://foo/a"]),
58963 + uselist=["foo"],
58964 + is_src_uri=True,
58965 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58966 + expected_result=["http://foo/bar", "->", "bar.tbz2", "ftp://foo/a"]),
58967 UseReduceTestCase(
58968 "http://foo.com/foo http://foo/bar -> blah.tbz2",
58969 - uselist = ["foo"],
58970 - is_src_uri = True,
58971 - eapi = EAPI_WITH_SRC_URI_ARROWS,
58972 - expected_result = ["http://foo.com/foo", "http://foo/bar", "->", "blah.tbz2"]),
58973 + uselist=["foo"],
58974 + is_src_uri=True,
58975 + eapi=EAPI_WITH_SRC_URI_ARROWS,
58976 + expected_result=["http://foo.com/foo", "http://foo/bar", "->", "blah.tbz2"]),
58977
58978 - #opconvert tests
58979 + # opconvert tests
58980 UseReduceTestCase(
58981 "A",
58982 - opconvert = True,
58983 - expected_result = ["A"]),
58984 + opconvert=True,
58985 + expected_result=["A"]),
58986 UseReduceTestCase(
58987 "( A )",
58988 - opconvert = True,
58989 - expected_result = ["A"]),
58990 + opconvert=True,
58991 + expected_result=["A"]),
58992 UseReduceTestCase(
58993 "|| ( A B )",
58994 - opconvert = True,
58995 - expected_result = [['||', 'A', 'B']]),
58996 + opconvert=True,
58997 + expected_result=[['||', 'A', 'B']]),
58998 UseReduceTestCase(
58999 "|| ( ( A B ) C )",
59000 - opconvert = True,
59001 - expected_result = [['||', ['A', 'B'], 'C']]),
59002 + opconvert=True,
59003 + expected_result=[['||', ['A', 'B'], 'C']]),
59004 UseReduceTestCase(
59005 "|| ( A || ( B C ) )",
59006 - opconvert = True,
59007 - expected_result = [['||', 'A', 'B', 'C']]),
59008 + opconvert=True,
59009 + expected_result=[['||', 'A', 'B', 'C']]),
59010 UseReduceTestCase(
59011 "|| ( A || ( B C D ) )",
59012 - opconvert = True,
59013 - expected_result = [['||', 'A', 'B', 'C', 'D']]),
59014 + opconvert=True,
59015 + expected_result=[['||', 'A', 'B', 'C', 'D']]),
59016 UseReduceTestCase(
59017 "|| ( A || ( B || ( C D ) E ) )",
59018 - expected_result = [ "||", ["A", "B", "C", "D", "E"] ]),
59019 + expected_result=["||", ["A", "B", "C", "D", "E"]]),
59020 UseReduceTestCase(
59021 "( || ( ( ( A ) B ) ) )",
59022 - opconvert = True,
59023 - expected_result = [ "A", "B" ] ),
59024 + opconvert=True,
59025 + expected_result=['A', 'B']),
59026 UseReduceTestCase(
59027 "( || ( || ( ( A ) B ) ) )",
59028 - opconvert = True,
59029 - expected_result = [['||', 'A', 'B']]),
59030 + opconvert=True,
59031 + expected_result=[['||', 'A', 'B']]),
59032 UseReduceTestCase(
59033 "|| ( A B ) C",
59034 - opconvert = True,
59035 - expected_result = [['||', 'A', 'B'], 'C']),
59036 + opconvert=True,
59037 + expected_result=[['||', 'A', 'B'], 'C']),
59038 UseReduceTestCase(
59039 "A || ( B C )",
59040 - opconvert = True,
59041 - expected_result = ['A', ['||', 'B', 'C']]),
59042 + opconvert=True,
59043 + expected_result=['A', ['||', 'B', 'C']]),
59044 UseReduceTestCase(
59045 "A foo? ( || ( B || ( bar? ( || ( C D E ) ) !bar? ( F ) ) ) ) G",
59046 - uselist = ["foo", "bar"],
59047 - opconvert = True,
59048 - expected_result = ['A', ['||', 'B', 'C', 'D', 'E'], 'G']),
59049 + uselist=["foo", "bar"],
59050 + opconvert=True,
59051 + expected_result=['A', ['||', 'B', 'C', 'D', 'E'], 'G']),
59052 UseReduceTestCase(
59053 "A foo? ( || ( B || ( bar? ( || ( C D E ) ) !bar? ( F ) ) ) ) G",
59054 - uselist = ["foo", "bar"],
59055 - opconvert = False,
59056 - expected_result = ['A', '||', ['B', 'C', 'D', 'E'], 'G']),
59057 + uselist=["foo", "bar"],
59058 + opconvert=False,
59059 + expected_result=['A', '||', ['B', 'C', 'D', 'E'], 'G']),
59060
59061 UseReduceTestCase(
59062 "|| ( A )",
59063 - opconvert = True,
59064 - expected_result = ["A"]),
59065 + opconvert=True,
59066 + expected_result=["A"]),
59067 UseReduceTestCase(
59068 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
59069 - expected_result = ["A"]),
59070 + expected_result=["A"]),
59071 UseReduceTestCase(
59072 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
59073 - uselist = ["foo"],
59074 - opconvert = True,
59075 - expected_result = [['||', 'A', 'B']]),
59076 + uselist=["foo"],
59077 + opconvert=True,
59078 + expected_result=[['||', 'A', 'B']]),
59079 UseReduceTestCase(
59080 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
59081 - opconvert = True,
59082 - expected_result = []),
59083 + opconvert=True,
59084 + expected_result=[]),
59085 UseReduceTestCase(
59086 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
59087 - uselist = ["foo", "bar"],
59088 - opconvert = True,
59089 - expected_result = [['||', 'A', 'B']]),
59090 + uselist=["foo", "bar"],
59091 + opconvert=True,
59092 + expected_result=[['||', 'A', 'B']]),
59093 UseReduceTestCase(
59094 "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B",
59095 - opconvert = True,
59096 - expected_result = ["A", "B"]),
59097 + opconvert=True,
59098 + expected_result=["A", "B"]),
59099 UseReduceTestCase(
59100 "|| ( A ) || ( B )",
59101 - opconvert = True,
59102 - expected_result = ["A", "B"]),
59103 + opconvert=True,
59104 + expected_result=["A", "B"]),
59105 UseReduceTestCase(
59106 "foo? ( A ) foo? ( B )",
59107 - opconvert = True,
59108 - expected_result = []),
59109 + opconvert=True,
59110 + expected_result=[]),
59111 UseReduceTestCase(
59112 "foo? ( A ) foo? ( B )",
59113 - uselist = ["foo"],
59114 - opconvert = True,
59115 - expected_result = ["A", "B"]),
59116 + uselist=["foo"],
59117 + opconvert=True,
59118 + expected_result=["A", "B"]),
59119 UseReduceTestCase(
59120 "|| ( foo? ( || ( A B ) ) )",
59121 - uselist = ["foo"],
59122 - opconvert = True,
59123 - expected_result = [['||', 'A', 'B']]),
59124 + uselist=["foo"],
59125 + opconvert=True,
59126 + expected_result=[['||', 'A', 'B']]),
59127
59128 UseReduceTestCase(
59129 "|| ( ( A B ) foo? ( || ( C D ) ) )",
59130 - uselist = ["foo"],
59131 - opconvert = True,
59132 - expected_result = [['||', ['A', 'B'], 'C', 'D']]),
59133 + uselist=["foo"],
59134 + opconvert=True,
59135 + expected_result=[['||', ['A', 'B'], 'C', 'D']]),
59136
59137 UseReduceTestCase(
59138 "|| ( ( A B ) foo? ( || ( C D ) ) )",
59139 - uselist = ["foo"],
59140 - opconvert = False,
59141 - expected_result = ['||', [['A', 'B'], 'C', 'D']]),
59142 + uselist=["foo"],
59143 + opconvert=False,
59144 + expected_result=['||', [['A', 'B'], 'C', 'D']]),
59145
59146 UseReduceTestCase(
59147 "|| ( ( A B ) || ( C D ) )",
59148 - expected_result = ['||', [['A', 'B'], 'C', 'D']]),
59149 + expected_result=['||', [['A', 'B'], 'C', 'D']]),
59150
59151 UseReduceTestCase(
59152 "|| ( ( A B ) || ( C D || ( E ( F G ) || ( H ) ) ) )",
59153 - expected_result = ['||', [['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]),
59154 + expected_result=['||', [['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]),
59155
59156 UseReduceTestCase(
59157 "|| ( ( A B ) || ( C D || ( E ( F G ) || ( H ) ) ) )",
59158 - opconvert = True,
59159 - expected_result = [['||', ['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]),
59160 + opconvert=True,
59161 + expected_result=[['||', ['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]),
59162
59163 UseReduceTestCase(
59164 "|| ( foo? ( A B ) )",
59165 - uselist = ["foo"],
59166 - expected_result = ['A', 'B']),
59167 + uselist=["foo"],
59168 + expected_result=['A', 'B']),
59169
59170 UseReduceTestCase(
59171 "|| ( || ( foo? ( A B ) ) )",
59172 - uselist = ["foo"],
59173 - expected_result = ['A', 'B']),
59174 + uselist=["foo"],
59175 + expected_result=['A', 'B']),
59176
59177 UseReduceTestCase(
59178 "|| ( || ( || ( a? ( b? ( c? ( || ( || ( || ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) )",
59179 - uselist = ["a", "b", "c", "d", "e", "f"],
59180 - expected_result = ['A', 'B']),
59181 + uselist=["a", "b", "c", "d", "e", "f"],
59182 + expected_result=['A', 'B']),
59183
59184 UseReduceTestCase(
59185 "|| ( || ( ( || ( a? ( ( b? ( c? ( || ( || ( || ( ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) ) ) ) )",
59186 - uselist = ["a", "b", "c", "d", "e", "f"],
59187 - expected_result = ['A', 'B']),
59188 + uselist=["a", "b", "c", "d", "e", "f"],
59189 + expected_result=['A', 'B']),
59190
59191 UseReduceTestCase(
59192 "|| ( ( A ( || ( B ) ) ) )",
59193 - expected_result = ['A', 'B']),
59194 + expected_result=['A', 'B']),
59195
59196 UseReduceTestCase(
59197 "|| ( ( A B ) || ( foo? ( bar? ( ( C D || ( baz? ( E ) ( F G ) || ( H ) ) ) ) ) ) )",
59198 - uselist = ["foo", "bar", "baz"],
59199 - expected_result = ['||', [['A', 'B'], ['C', 'D', '||', ['E', ['F', 'G'], 'H']]]]),
59200 + uselist=["foo", "bar", "baz"],
59201 + expected_result=['||', [['A', 'B'], ['C', 'D', '||', ['E', ['F', 'G'], 'H']]]]),
59202
59203 UseReduceTestCase(
59204 "|| ( ( A B ) || ( foo? ( bar? ( ( C D || ( baz? ( E ) ( F G ) || ( H ) ) ) ) ) ) )",
59205 - uselist = ["foo", "bar", "baz"],
59206 - opconvert = True,
59207 - expected_result = [['||', ['A', 'B'], ['C', 'D', ['||', 'E', ['F', 'G'], 'H']]]]),
59208 + uselist=["foo", "bar", "baz"],
59209 + opconvert=True,
59210 + expected_result=[['||', ['A', 'B'], ['C', 'D', ['||', 'E', ['F', 'G'], 'H']]]]),
59211
59212 UseReduceTestCase(
59213 "|| ( foo? ( A B ) )",
59214 - uselist = ["foo"],
59215 + uselist=["foo"],
59216 opconvert=True,
59217 - expected_result = ['A', 'B']),
59218 + expected_result=['A', 'B']),
59219
59220 UseReduceTestCase(
59221 "|| ( || ( foo? ( A B ) ) )",
59222 - uselist = ["foo"],
59223 + uselist=["foo"],
59224 opconvert=True,
59225 - expected_result = ['A', 'B']),
59226 + expected_result=['A', 'B']),
59227
59228 UseReduceTestCase(
59229 "|| ( || ( || ( a? ( b? ( c? ( || ( || ( || ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) )",
59230 - uselist = ["a", "b", "c", "d", "e", "f"],
59231 + uselist=["a", "b", "c", "d", "e", "f"],
59232 opconvert=True,
59233 - expected_result = ['A', 'B']),
59234 + expected_result=['A', 'B']),
59235
59236 - #flat test
59237 + # flat test
59238 UseReduceTestCase(
59239 "A",
59240 - flat = True,
59241 - expected_result = ["A"]),
59242 + flat=True,
59243 + expected_result=["A"]),
59244 UseReduceTestCase(
59245 "( A )",
59246 - flat = True,
59247 - expected_result = ["A"]),
59248 + flat=True,
59249 + expected_result=["A"]),
59250 UseReduceTestCase(
59251 "|| ( A B )",
59252 - flat = True,
59253 - expected_result = [ "||", "A", "B" ] ),
59254 + flat=True,
59255 + expected_result=["||", "A", "B"]),
59256 UseReduceTestCase(
59257 "|| ( A || ( B C ) )",
59258 - flat = True,
59259 - expected_result = [ "||", "A", "||", "B", "C" ]),
59260 + flat=True,
59261 + expected_result=["||", "A", "||", "B", "C"]),
59262 UseReduceTestCase(
59263 "|| ( A || ( B C D ) )",
59264 - flat = True,
59265 - expected_result = [ "||", "A", "||", "B", "C", "D" ]),
59266 + flat=True,
59267 + expected_result=["||", "A", "||", "B", "C", "D"]),
59268 UseReduceTestCase(
59269 "|| ( A || ( B || ( C D ) E ) )",
59270 - flat = True,
59271 - expected_result = [ "||", "A", "||", "B", "||", "C", "D", "E" ]),
59272 + flat=True,
59273 + expected_result=["||", "A", "||", "B", "||", "C", "D", "E"]),
59274 UseReduceTestCase(
59275 "( || ( ( ( A ) B ) ) )",
59276 - flat = True,
59277 - expected_result = [ "||", "A", "B"] ),
59278 + flat=True,
59279 + expected_result=["||", "A", "B"]),
59280 UseReduceTestCase(
59281 "( || ( || ( ( A ) B ) ) )",
59282 - flat = True,
59283 - expected_result = [ "||", "||", "A", "B" ]),
59284 + flat=True,
59285 + expected_result=["||", "||", "A", "B"]),
59286 UseReduceTestCase(
59287 "( || ( || ( ( A ) B ) ) )",
59288 - flat = True,
59289 - expected_result = [ "||", "||", "A", "B" ]),
59290 + flat=True,
59291 + expected_result=["||", "||", "A", "B"]),
59292 UseReduceTestCase(
59293 "|| ( A )",
59294 - flat = True,
59295 - expected_result = ["||", "A"]),
59296 + flat=True,
59297 + expected_result=["||", "A"]),
59298 UseReduceTestCase(
59299 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
59300 - expected_result = ["A"]),
59301 + expected_result=["A"]),
59302 UseReduceTestCase(
59303 "( || ( || ( || ( A ) foo? ( B ) ) ) )",
59304 - uselist = ["foo"],
59305 - flat = True,
59306 - expected_result = [ "||", "||","||", "A", "B" ]),
59307 + uselist=["foo"],
59308 + flat=True,
59309 + expected_result=["||", "||", "||", "A", "B"]),
59310 UseReduceTestCase(
59311 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
59312 - flat = True,
59313 - expected_result = ["||", "||","||"]),
59314 + flat=True,
59315 + expected_result=["||", "||", "||"]),
59316 UseReduceTestCase(
59317 "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )",
59318 - uselist = ["foo", "bar"],
59319 - flat = True,
59320 - expected_result = [ "||", "||", "A", "||", "B" ]),
59321 + uselist=["foo", "bar"],
59322 + flat=True,
59323 + expected_result=["||", "||", "A", "||", "B"]),
59324 UseReduceTestCase(
59325 "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B",
59326 - flat = True,
59327 - expected_result = ["A", "||", "B"]),
59328 + flat=True,
59329 + expected_result=["A", "||", "B"]),
59330 UseReduceTestCase(
59331 "|| ( A ) || ( B )",
59332 - flat = True,
59333 - expected_result = ["||", "A", "||", "B"]),
59334 + flat=True,
59335 + expected_result=["||", "A", "||", "B"]),
59336 UseReduceTestCase(
59337 "foo? ( A ) foo? ( B )",
59338 - flat = True,
59339 - expected_result = []),
59340 + flat=True,
59341 + expected_result=[]),
59342 UseReduceTestCase(
59343 "foo? ( A ) foo? ( B )",
59344 - uselist = ["foo"],
59345 - flat = True,
59346 - expected_result = ["A", "B"]),
59347 + uselist=["foo"],
59348 + flat=True,
59349 + expected_result=["A", "B"]),
59350
59351 - #use flag validation
59352 + # use flag validation
59353 UseReduceTestCase(
59354 "foo? ( A )",
59355 - uselist = ["foo"],
59356 - is_valid_flag = self.always_true,
59357 - expected_result = ["A"]),
59358 + uselist=["foo"],
59359 + is_valid_flag=self.always_true,
59360 + expected_result=["A"]),
59361 UseReduceTestCase(
59362 "foo? ( A )",
59363 - is_valid_flag = self.always_true,
59364 - expected_result = []),
59365 + is_valid_flag=self.always_true,
59366 + expected_result=[]),
59367
59368 - #token_class
59369 + # token_class
59370 UseReduceTestCase(
59371 "foo? ( dev-libs/A )",
59372 - uselist = ["foo"],
59373 + uselist=["foo"],
59374 token_class=Atom,
59375 - expected_result = ["dev-libs/A"]),
59376 + expected_result=["dev-libs/A"]),
59377 UseReduceTestCase(
59378 "foo? ( dev-libs/A )",
59379 token_class=Atom,
59380 - expected_result = []),
59381 + expected_result=[]),
59382 )
59383 -
59384 +
59385 test_cases_xfail = (
59386 UseReduceTestCase("? ( A )"),
59387 UseReduceTestCase("!? ( A )"),
59388 @@ -571,44 +570,44 @@ class UseReduce(TestCase):
59389 UseReduceTestCase("|| ( )"),
59390 UseReduceTestCase("foo? ( )"),
59391
59392 - #SRC_URI stuff
59393 - UseReduceTestCase("http://foo/bar -> blah.tbz2", is_src_uri = True, eapi = EAPI_WITHOUT_SRC_URI_ARROWS),
59394 - UseReduceTestCase("|| ( http://foo/bar -> blah.tbz2 )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59395 - UseReduceTestCase("http://foo/bar -> foo? ( ftp://foo/a )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59396 - UseReduceTestCase("http://foo/bar blah.tbz2 ->", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59397 - UseReduceTestCase("-> http://foo/bar blah.tbz2 )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59398 - UseReduceTestCase("http://foo/bar ->", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59399 - UseReduceTestCase("http://foo/bar -> foo? ( http://foo.com/foo )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59400 - UseReduceTestCase("foo? ( http://foo/bar -> ) blah.tbz2", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59401 - UseReduceTestCase("http://foo/bar -> foo/blah.tbz2", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59402 - UseReduceTestCase("http://foo/bar -> -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS),
59403 -
59404 - UseReduceTestCase("http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri = False, eapi = EAPI_WITH_SRC_URI_ARROWS),
59405 + # SRC_URI stuff
59406 + UseReduceTestCase("http://foo/bar -> blah.tbz2", is_src_uri=True, eapi=EAPI_WITHOUT_SRC_URI_ARROWS),
59407 + UseReduceTestCase("|| ( http://foo/bar -> blah.tbz2 )", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59408 + UseReduceTestCase("http://foo/bar -> foo? ( ftp://foo/a )", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59409 + UseReduceTestCase("http://foo/bar blah.tbz2 ->", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59410 + UseReduceTestCase("-> http://foo/bar blah.tbz2 )", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59411 + UseReduceTestCase("http://foo/bar ->", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59412 + UseReduceTestCase("http://foo/bar -> foo? ( http://foo.com/foo )", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59413 + UseReduceTestCase("foo? ( http://foo/bar -> ) blah.tbz2", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59414 + UseReduceTestCase("http://foo/bar -> foo/blah.tbz2", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59415 + UseReduceTestCase("http://foo/bar -> -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri=True, eapi=EAPI_WITH_SRC_URI_ARROWS),
59416 +
59417 + UseReduceTestCase("http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri=False, eapi=EAPI_WITH_SRC_URI_ARROWS),
59418
59419 UseReduceTestCase(
59420 "A",
59421 - opconvert = True,
59422 - flat = True),
59423 + opconvert=True,
59424 + flat=True),
59425
59426 - #use flag validation
59427 + # use flag validation
59428 UseReduceTestCase("1.0? ( A )"),
59429 UseReduceTestCase("!1.0? ( A )"),
59430 UseReduceTestCase("!? ( A )"),
59431 UseReduceTestCase("!?? ( A )"),
59432 UseReduceTestCase(
59433 "foo? ( A )",
59434 - is_valid_flag = self.always_false,
59435 + is_valid_flag=self.always_false,
59436 ),
59437 UseReduceTestCase(
59438 "foo? ( A )",
59439 - uselist = ["foo"],
59440 - is_valid_flag = self.always_false,
59441 + uselist=["foo"],
59442 + is_valid_flag=self.always_false,
59443 ),
59444
59445 - #token_class
59446 + # token_class
59447 UseReduceTestCase(
59448 "foo? ( A )",
59449 - uselist = ["foo"],
59450 + uselist=["foo"],
59451 token_class=Atom),
59452 UseReduceTestCase(
59453 "A(B",
59454
59455 diff --git a/pym/portage/tests/ebuild/test_config.py b/pym/portage/tests/ebuild/test_config.py
59456 index 63cb99d..08e0a5d 100644
59457 --- a/pym/portage/tests/ebuild/test_config.py
59458 +++ b/pym/portage/tests/ebuild/test_config.py
59459 @@ -1,4 +1,4 @@
59460 -# Copyright 2010-2012 Gentoo Foundation
59461 +# Copyright 2010-2014 Gentoo Foundation
59462 # Distributed under the terms of the GNU General Public License v2
59463
59464 import portage
59465 @@ -46,7 +46,7 @@ class ConfigTestCase(TestCase):
59466 settings.features.add('noclean')
59467 self.assertEqual('noclean' in settings['FEATURES'].split(), True)
59468 settings.regenerate()
59469 - self.assertEqual('noclean' in settings['FEATURES'].split(),True)
59470 + self.assertEqual('noclean' in settings['FEATURES'].split(), True)
59471
59472 settings.features.discard('noclean')
59473 self.assertEqual('noclean' in settings['FEATURES'].split(), False)
59474 @@ -56,7 +56,7 @@ class ConfigTestCase(TestCase):
59475 settings.features.add('noclean')
59476 self.assertEqual('noclean' in settings['FEATURES'].split(), True)
59477 settings.regenerate()
59478 - self.assertEqual('noclean' in settings['FEATURES'].split(),True)
59479 + self.assertEqual('noclean' in settings['FEATURES'].split(), True)
59480
59481 # before: ['noclean', '-noclean', 'noclean']
59482 settings.features._prune_overrides()
59483 @@ -92,7 +92,7 @@ class ConfigTestCase(TestCase):
59484 try:
59485 portage.util.noiselimit = -2
59486
59487 - license_group_locations = (os.path.join(playground.portdir, "profiles"),)
59488 + license_group_locations = (os.path.join(playground.settings.repositories["test_repo"].location, "profiles"),)
59489 pkg_license = os.path.join(playground.eroot, "etc", "portage")
59490
59491 lic_man = LicenseManager(license_group_locations, pkg_license)
59492 @@ -221,6 +221,7 @@ class ConfigTestCase(TestCase):
59493 "profile-formats = pms",
59494 "thin-manifests = true",
59495 "manifest-hashes = SHA256 SHA512 WHIRLPOOL",
59496 + "# use implicit masters"
59497 ),
59498 }
59499 }
59500 @@ -239,28 +240,30 @@ class ConfigTestCase(TestCase):
59501
59502 playground = ResolverPlayground(ebuilds=ebuilds,
59503 repo_configs=repo_configs, distfiles=distfiles)
59504 + settings = playground.settings
59505
59506 - new_repo_config = playground.settings.repositories.prepos['new_repo']
59507 + new_repo_config = settings.repositories["new_repo"]
59508 + old_repo_config = settings.repositories["old_repo"]
59509 self.assertTrue(len(new_repo_config.masters) > 0, "new_repo has no default master")
59510 - self.assertEqual(new_repo_config.masters[0].user_location, playground.portdir,
59511 - "new_repo default master is not PORTDIR")
59512 + self.assertEqual(new_repo_config.masters[0].user_location, playground.settings.repositories["test_repo"].location,
59513 + "new_repo default master is not test_repo")
59514 self.assertEqual(new_repo_config.thin_manifest, True,
59515 "new_repo_config.thin_manifest != True")
59516
59517 - new_manifest_file = os.path.join(playground.repo_dirs["new_repo"], "dev-libs", "A", "Manifest")
59518 - self.assertEqual(os.path.exists(new_manifest_file), False)
59519 + new_manifest_file = os.path.join(new_repo_config.location, "dev-libs", "A", "Manifest")
59520 + self.assertNotExists(new_manifest_file)
59521
59522 - new_manifest_file = os.path.join(playground.repo_dirs["new_repo"], "dev-libs", "B", "Manifest")
59523 + new_manifest_file = os.path.join(new_repo_config.location, "dev-libs", "B", "Manifest")
59524 f = open(new_manifest_file)
59525 self.assertEqual(len(list(f)), 1)
59526 f.close()
59527
59528 - new_manifest_file = os.path.join(playground.repo_dirs["new_repo"], "dev-libs", "C", "Manifest")
59529 + new_manifest_file = os.path.join(new_repo_config.location, "dev-libs", "C", "Manifest")
59530 f = open(new_manifest_file)
59531 self.assertEqual(len(list(f)), 2)
59532 f.close()
59533
59534 - old_manifest_file = os.path.join(playground.repo_dirs["old_repo"], "dev-libs", "A", "Manifest")
59535 + old_manifest_file = os.path.join(old_repo_config.location, "dev-libs", "A", "Manifest")
59536 f = open(old_manifest_file)
59537 self.assertEqual(len(list(f)), 1)
59538 f.close()
59539
59540 diff --git a/pym/portage/tests/ebuild/test_doebuild_fd_pipes.py b/pym/portage/tests/ebuild/test_doebuild_fd_pipes.py
59541 new file mode 100644
59542 index 0000000..61392dd
59543 --- /dev/null
59544 +++ b/pym/portage/tests/ebuild/test_doebuild_fd_pipes.py
59545 @@ -0,0 +1,137 @@
59546 +# Copyright 2013 Gentoo Foundation
59547 +# Distributed under the terms of the GNU General Public License v2
59548 +
59549 +import textwrap
59550 +
59551 +import portage
59552 +from portage import os
59553 +from portage.tests import TestCase
59554 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground
59555 +from portage.package.ebuild._ipc.QueryCommand import QueryCommand
59556 +from portage.util._async.ForkProcess import ForkProcess
59557 +from portage.util._async.TaskScheduler import TaskScheduler
59558 +from portage.util._eventloop.global_event_loop import global_event_loop
59559 +from _emerge.Package import Package
59560 +from _emerge.PipeReader import PipeReader
59561 +
59562 +class DoebuildProcess(ForkProcess):
59563 +
59564 + __slots__ = ('doebuild_kwargs', 'doebuild_pargs')
59565 +
59566 + def _run(self):
59567 + return portage.doebuild(*self.doebuild_pargs, **self.doebuild_kwargs)
59568 +
59569 +class DoebuildFdPipesTestCase(TestCase):
59570 +
59571 + def testDoebuild(self):
59572 + """
59573 + Invoke portage.doebuild() with the fd_pipes parameter, and
59574 + check that the expected output appears in the pipe. This
59575 + functionality is not used by portage internally, but it is
59576 + supported for API consumers (see bug #475812).
59577 + """
59578 +
59579 + ebuild_body = textwrap.dedent("""
59580 + S=${WORKDIR}
59581 + pkg_info() { echo info ; }
59582 + pkg_nofetch() { echo nofetch ; }
59583 + pkg_pretend() { echo pretend ; }
59584 + pkg_setup() { echo setup ; }
59585 + src_unpack() { echo unpack ; }
59586 + src_prepare() { echo prepare ; }
59587 + src_configure() { echo configure ; }
59588 + src_compile() { echo compile ; }
59589 + src_test() { echo test ; }
59590 + src_install() { echo install ; }
59591 + """)
59592 +
59593 + ebuilds = {
59594 + 'app-misct/foo-1': {
59595 + 'EAPI' : '5',
59596 + "MISC_CONTENT": ebuild_body,
59597 + }
59598 + }
59599 +
59600 + # Override things that may be unavailable, or may have portability
59601 + # issues when running tests in exotic environments.
59602 + # prepstrip - bug #447810 (bash read builtin EINTR problem)
59603 + true_symlinks = ("find", "prepstrip", "sed", "scanelf")
59604 + true_binary = portage.process.find_binary("true")
59605 + self.assertEqual(true_binary is None, False,
59606 + "true command not found")
59607 +
59608 + playground = ResolverPlayground(ebuilds=ebuilds)
59609 + try:
59610 + QueryCommand._db = playground.trees
59611 + root_config = playground.trees[playground.eroot]['root_config']
59612 + portdb = root_config.trees["porttree"].dbapi
59613 + settings = portage.config(clone=playground.settings)
59614 + if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
59615 + settings["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
59616 + os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
59617 + settings.backup_changes("__PORTAGE_TEST_HARDLINK_LOCKS")
59618 +
59619 + settings.features.add("noauto")
59620 + settings.features.add("test")
59621 + settings['PORTAGE_PYTHON'] = portage._python_interpreter
59622 + settings['PORTAGE_QUIET'] = "1"
59623 +
59624 + fake_bin = os.path.join(settings["EPREFIX"], "bin")
59625 + portage.util.ensure_dirs(fake_bin)
59626 + for x in true_symlinks:
59627 + os.symlink(true_binary, os.path.join(fake_bin, x))
59628 +
59629 + settings["__PORTAGE_TEST_PATH_OVERRIDE"] = fake_bin
59630 + settings.backup_changes("__PORTAGE_TEST_PATH_OVERRIDE")
59631 +
59632 + cpv = 'app-misct/foo-1'
59633 + metadata = dict(zip(Package.metadata_keys,
59634 + portdb.aux_get(cpv, Package.metadata_keys)))
59635 +
59636 + pkg = Package(built=False, cpv=cpv, installed=False,
59637 + metadata=metadata, root_config=root_config,
59638 + type_name='ebuild')
59639 + settings.setcpv(pkg)
59640 + ebuildpath = portdb.findname(cpv)
59641 + self.assertNotEqual(ebuildpath, None)
59642 +
59643 + for phase in ('info', 'nofetch',
59644 + 'pretend', 'setup', 'unpack', 'prepare', 'configure',
59645 + 'compile', 'test', 'install', 'qmerge', 'clean', 'merge'):
59646 +
59647 + pr, pw = os.pipe()
59648 +
59649 + producer = DoebuildProcess(doebuild_pargs=(ebuildpath, phase),
59650 + doebuild_kwargs={"settings" : settings,
59651 + "mydbapi": portdb, "tree": "porttree",
59652 + "vartree": root_config.trees["vartree"],
59653 + "fd_pipes": {1: pw, 2: pw},
59654 + "prev_mtimes": {}})
59655 +
59656 + consumer = PipeReader(
59657 + input_files={"producer" : pr})
59658 +
59659 + task_scheduler = TaskScheduler(iter([producer, consumer]),
59660 + max_jobs=2)
59661 +
59662 + try:
59663 + task_scheduler.start()
59664 + finally:
59665 + # PipeReader closes pr
59666 + os.close(pw)
59667 +
59668 + task_scheduler.wait()
59669 + output = portage._unicode_decode(
59670 + consumer.getvalue()).rstrip("\n")
59671 +
59672 + if task_scheduler.returncode != os.EX_OK:
59673 + portage.writemsg(output, noiselevel=-1)
59674 +
59675 + self.assertEqual(task_scheduler.returncode, os.EX_OK)
59676 +
59677 + if phase not in ('clean', 'merge', 'qmerge'):
59678 + self.assertEqual(phase, output)
59679 +
59680 + finally:
59681 + playground.cleanup()
59682 + QueryCommand._db = None
59683
59684 diff --git a/pym/portage/tests/ebuild/test_doebuild_spawn.py b/pym/portage/tests/ebuild/test_doebuild_spawn.py
59685 index 89e27a3..ae9a5c5 100644
59686 --- a/pym/portage/tests/ebuild/test_doebuild_spawn.py
59687 +++ b/pym/portage/tests/ebuild/test_doebuild_spawn.py
59688 @@ -1,18 +1,22 @@
59689 -# Copyright 2010-2011 Gentoo Foundation
59690 +# Copyright 2010-2012 Gentoo Foundation
59691 # Distributed under the terms of the GNU General Public License v2
59692
59693 +import textwrap
59694 +
59695 from portage import os
59696 from portage import _python_interpreter
59697 from portage import _shell_quote
59698 from portage.const import EBUILD_SH_BINARY
59699 from portage.package.ebuild.config import config
59700 from portage.package.ebuild.doebuild import spawn as doebuild_spawn
59701 +from portage.package.ebuild._spawn_nofetch import spawn_nofetch
59702 from portage.tests import TestCase
59703 from portage.tests.resolver.ResolverPlayground import ResolverPlayground
59704 +from portage.util._async.SchedulerInterface import SchedulerInterface
59705 +from portage.util._eventloop.global_event_loop import global_event_loop
59706 from _emerge.EbuildPhase import EbuildPhase
59707 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
59708 from _emerge.Package import Package
59709 -from _emerge.PollScheduler import PollScheduler
59710
59711 class DoebuildSpawnTestCase(TestCase):
59712 """
59713 @@ -23,25 +27,37 @@ class DoebuildSpawnTestCase(TestCase):
59714 """
59715
59716 def testDoebuildSpawn(self):
59717 - playground = ResolverPlayground()
59718 - try:
59719 - settings = config(clone=playground.settings)
59720 - if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
59721 - settings["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
59722 - os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
59723 - settings.backup_changes("__PORTAGE_TEST_HARDLINK_LOCKS")
59724
59725 - cpv = 'sys-apps/portage-2.1'
59726 - metadata = {
59727 + ebuild_body = textwrap.dedent("""
59728 + pkg_nofetch() { : ; }
59729 + """)
59730 +
59731 + ebuilds = {
59732 + 'sys-apps/portage-2.1': {
59733 'EAPI' : '2',
59734 - 'INHERITED' : 'python eutils',
59735 'IUSE' : 'build doc epydoc python3 selinux',
59736 + 'KEYWORDS' : 'x86',
59737 'LICENSE' : 'GPL-2',
59738 - 'PROVIDE' : 'virtual/portage',
59739 'RDEPEND' : '>=app-shells/bash-3.2_p17 >=dev-lang/python-2.6',
59740 'SLOT' : '0',
59741 + "MISC_CONTENT": ebuild_body,
59742 }
59743 + }
59744 +
59745 + playground = ResolverPlayground(ebuilds=ebuilds)
59746 + try:
59747 root_config = playground.trees[playground.eroot]['root_config']
59748 + portdb = root_config.trees["porttree"].dbapi
59749 + settings = config(clone=playground.settings)
59750 + if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
59751 + settings["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
59752 + os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
59753 + settings.backup_changes("__PORTAGE_TEST_HARDLINK_LOCKS")
59754 +
59755 + cpv = 'sys-apps/portage-2.1'
59756 + metadata = dict(zip(Package.metadata_keys,
59757 + portdb.aux_get(cpv, Package.metadata_keys)))
59758 +
59759 pkg = Package(built=False, cpv=cpv, installed=False,
59760 metadata=metadata, root_config=root_config,
59761 type_name='ebuild')
59762 @@ -57,7 +73,7 @@ class DoebuildSpawnTestCase(TestCase):
59763 # has been sourced already.
59764 open(os.path.join(settings['T'], 'environment'), 'wb').close()
59765
59766 - scheduler = PollScheduler().sched_iface
59767 + scheduler = SchedulerInterface(global_event_loop())
59768 for phase in ('_internal_test',):
59769
59770 # Test EbuildSpawnProcess by calling doebuild.spawn() with
59771 @@ -83,5 +99,7 @@ class DoebuildSpawnTestCase(TestCase):
59772 ebuild_phase.start()
59773 ebuild_phase.wait()
59774 self.assertEqual(ebuild_phase.returncode, os.EX_OK)
59775 +
59776 + spawn_nofetch(portdb, portdb.findname(cpv), settings=settings)
59777 finally:
59778 playground.cleanup()
59779
59780 diff --git a/pym/portage/tests/ebuild/test_ipc_daemon.py b/pym/portage/tests/ebuild/test_ipc_daemon.py
59781 index 0efab65..a871076 100644
59782 --- a/pym/portage/tests/ebuild/test_ipc_daemon.py
59783 +++ b/pym/portage/tests/ebuild/test_ipc_daemon.py
59784 @@ -1,4 +1,4 @@
59785 -# Copyright 2010-2011 Gentoo Foundation
59786 +# Copyright 2010-2012 Gentoo Foundation
59787 # Distributed under the terms of the GNU General Public License v2
59788
59789 import tempfile
59790 @@ -13,16 +13,28 @@ from portage.const import BASH_BINARY
59791 from portage.locks import hardlock_cleanup
59792 from portage.package.ebuild._ipc.ExitCommand import ExitCommand
59793 from portage.util import ensure_dirs
59794 +from portage.util._async.ForkProcess import ForkProcess
59795 +from portage.util._async.TaskScheduler import TaskScheduler
59796 +from portage.util._eventloop.global_event_loop import global_event_loop
59797 from _emerge.SpawnProcess import SpawnProcess
59798 from _emerge.EbuildBuildDir import EbuildBuildDir
59799 from _emerge.EbuildIpcDaemon import EbuildIpcDaemon
59800 -from _emerge.TaskScheduler import TaskScheduler
59801 +
59802 +class SleepProcess(ForkProcess):
59803 + """
59804 + Emulate the sleep command, in order to ensure a consistent
59805 + return code when it is killed by SIGTERM (see bug #437180).
59806 + """
59807 + __slots__ = ('seconds',)
59808 + def _run(self):
59809 + time.sleep(self.seconds)
59810
59811 class IpcDaemonTestCase(TestCase):
59812
59813 _SCHEDULE_TIMEOUT = 40000 # 40 seconds
59814
59815 def testIpcDaemon(self):
59816 + event_loop = global_event_loop()
59817 tmpdir = tempfile.mkdtemp()
59818 build_dir = None
59819 try:
59820 @@ -44,9 +56,8 @@ class IpcDaemonTestCase(TestCase):
59821 env["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
59822 os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
59823
59824 - task_scheduler = TaskScheduler(max_jobs=2)
59825 build_dir = EbuildBuildDir(
59826 - scheduler=task_scheduler.sched_iface,
59827 + scheduler=event_loop,
59828 settings=env)
59829 build_dir.lock()
59830 ensure_dirs(env['PORTAGE_BUILDDIR'])
59831 @@ -61,26 +72,23 @@ class IpcDaemonTestCase(TestCase):
59832 commands = {'exit' : exit_command}
59833 daemon = EbuildIpcDaemon(commands=commands,
59834 input_fifo=input_fifo,
59835 - output_fifo=output_fifo,
59836 - scheduler=task_scheduler.sched_iface)
59837 + output_fifo=output_fifo)
59838 proc = SpawnProcess(
59839 args=[BASH_BINARY, "-c",
59840 '"$PORTAGE_BIN_PATH"/ebuild-ipc exit %d' % exitcode],
59841 - env=env, scheduler=task_scheduler.sched_iface)
59842 + env=env)
59843 + task_scheduler = TaskScheduler(iter([daemon, proc]),
59844 + max_jobs=2, event_loop=event_loop)
59845
59846 self.received_command = False
59847 def exit_command_callback():
59848 self.received_command = True
59849 - task_scheduler.clear()
59850 - task_scheduler.wait()
59851 + task_scheduler.cancel()
59852
59853 exit_command.reply_hook = exit_command_callback
59854 start_time = time.time()
59855 - task_scheduler.add(daemon)
59856 - task_scheduler.add(proc)
59857 - task_scheduler.run(timeout=self._SCHEDULE_TIMEOUT)
59858 - task_scheduler.clear()
59859 - task_scheduler.wait()
59860 + self._run(event_loop, task_scheduler, self._SCHEDULE_TIMEOUT)
59861 +
59862 hardlock_cleanup(env['PORTAGE_BUILDDIR'],
59863 remove_all_locks=True)
59864
59865 @@ -91,8 +99,10 @@ class IpcDaemonTestCase(TestCase):
59866 self.assertEqual(daemon.isAlive(), False)
59867 self.assertEqual(exit_command.exitcode, exitcode)
59868
59869 - # Intentionally short timeout test for QueueScheduler.run()
59870 - sleep_time_s = 10 # 10.000 seconds
59871 + # Intentionally short timeout test for EventLoop/AsyncScheduler.
59872 + # Use a ridiculously long sleep_time_s in case the user's
59873 + # system is heavily loaded (see bug #436334).
59874 + sleep_time_s = 600 #600.000 seconds
59875 short_timeout_ms = 10 # 0.010 seconds
59876
59877 for i in range(3):
59878 @@ -100,25 +110,20 @@ class IpcDaemonTestCase(TestCase):
59879 commands = {'exit' : exit_command}
59880 daemon = EbuildIpcDaemon(commands=commands,
59881 input_fifo=input_fifo,
59882 - output_fifo=output_fifo,
59883 - scheduler=task_scheduler.sched_iface)
59884 - proc = SpawnProcess(
59885 - args=[BASH_BINARY, "-c", 'exec sleep %d' % sleep_time_s],
59886 - env=env, scheduler=task_scheduler.sched_iface)
59887 + output_fifo=output_fifo)
59888 + proc = SleepProcess(seconds=sleep_time_s)
59889 + task_scheduler = TaskScheduler(iter([daemon, proc]),
59890 + max_jobs=2, event_loop=event_loop)
59891
59892 self.received_command = False
59893 def exit_command_callback():
59894 self.received_command = True
59895 - task_scheduler.clear()
59896 - task_scheduler.wait()
59897 + task_scheduler.cancel()
59898
59899 exit_command.reply_hook = exit_command_callback
59900 start_time = time.time()
59901 - task_scheduler.add(daemon)
59902 - task_scheduler.add(proc)
59903 - task_scheduler.run(timeout=short_timeout_ms)
59904 - task_scheduler.clear()
59905 - task_scheduler.wait()
59906 + self._run(event_loop, task_scheduler, short_timeout_ms)
59907 +
59908 hardlock_cleanup(env['PORTAGE_BUILDDIR'],
59909 remove_all_locks=True)
59910
59911 @@ -133,3 +138,20 @@ class IpcDaemonTestCase(TestCase):
59912 if build_dir is not None:
59913 build_dir.unlock()
59914 shutil.rmtree(tmpdir)
59915 +
59916 + def _timeout_callback(self):
59917 + self._timed_out = True
59918 +
59919 + def _run(self, event_loop, task_scheduler, timeout):
59920 + self._timed_out = False
59921 + timeout_id = event_loop.timeout_add(timeout, self._timeout_callback)
59922 +
59923 + try:
59924 + task_scheduler.start()
59925 + while not self._timed_out and task_scheduler.poll() is None:
59926 + event_loop.iteration()
59927 + if self._timed_out:
59928 + task_scheduler.cancel()
59929 + task_scheduler.wait()
59930 + finally:
59931 + event_loop.source_remove(timeout_id)
59932
59933 diff --git a/pym/portage/tests/ebuild/test_spawn.py b/pym/portage/tests/ebuild/test_spawn.py
59934 index fea4738..a38e109 100644
59935 --- a/pym/portage/tests/ebuild/test_spawn.py
59936 +++ b/pym/portage/tests/ebuild/test_spawn.py
59937 @@ -1,17 +1,18 @@
59938 -# Copyright 1998-2011 Gentoo Foundation
59939 +# Copyright 1998-2013 Gentoo Foundation
59940 # Distributed under the terms of the GNU General Public License v2
59941
59942 import errno
59943 import io
59944 import sys
59945 import tempfile
59946 +import portage
59947 from portage import os
59948 from portage import _encodings
59949 from portage import _unicode_encode
59950 from portage.const import BASH_BINARY
59951 from portage.tests import TestCase
59952 +from portage.util._eventloop.global_event_loop import global_event_loop
59953 from _emerge.SpawnProcess import SpawnProcess
59954 -from _emerge.PollScheduler import PollScheduler
59955
59956 class SpawnTestCase(TestCase):
59957
59958 @@ -22,12 +23,16 @@ class SpawnTestCase(TestCase):
59959 os.close(fd)
59960 null_fd = os.open('/dev/null', os.O_RDWR)
59961 test_string = 2 * "blah blah blah\n"
59962 - scheduler = PollScheduler().sched_iface
59963 proc = SpawnProcess(
59964 args=[BASH_BINARY, "-c",
59965 "echo -n '%s'" % test_string],
59966 - env={}, fd_pipes={0:sys.stdin.fileno(), 1:null_fd, 2:null_fd},
59967 - scheduler=scheduler,
59968 + env={},
59969 + fd_pipes={
59970 + 0: portage._get_stdin().fileno(),
59971 + 1: null_fd,
59972 + 2: null_fd
59973 + },
59974 + scheduler=global_event_loop(),
59975 logfile=logfile)
59976 proc.start()
59977 os.close(null_fd)
59978
59979 diff --git a/pym/portage/tests/emerge/test_emerge_slot_abi.py b/pym/portage/tests/emerge/test_emerge_slot_abi.py
59980 index f18bd12..fd7ec0e 100644
59981 --- a/pym/portage/tests/emerge/test_emerge_slot_abi.py
59982 +++ b/pym/portage/tests/emerge/test_emerge_slot_abi.py
59983 @@ -1,4 +1,4 @@
59984 -# Copyright 2012 Gentoo Foundation
59985 +# Copyright 2012-2014 Gentoo Foundation
59986 # Distributed under the terms of the GNU General Public License v2
59987
59988 import subprocess
59989 @@ -64,15 +64,14 @@ class SlotAbiEmergeTestCase(TestCase):
59990 trees = playground.trees
59991 portdb = trees[eroot]["porttree"].dbapi
59992 vardb = trees[eroot]["vartree"].dbapi
59993 - portdir = settings["PORTDIR"]
59994 var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
59995 user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
59996 package_mask_path = os.path.join(user_config_dir, "package.mask")
59997
59998 portage_python = portage._python_interpreter
59999 - ebuild_cmd = (portage_python, "-Wd",
60000 + ebuild_cmd = (portage_python, "-b", "-Wd",
60001 os.path.join(PORTAGE_BIN_PATH, "ebuild"))
60002 - emerge_cmd = (portage_python, "-Wd",
60003 + emerge_cmd = (portage_python, "-b", "-Wd",
60004 os.path.join(PORTAGE_BIN_PATH, "emerge"))
60005
60006 test_ebuild = portdb.findname("dev-libs/dbus-glib-0.98")
60007 @@ -94,25 +93,6 @@ class SlotAbiEmergeTestCase(TestCase):
60008 portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
60009 profile_path = settings.profile_path
60010
60011 - features = []
60012 - if not portage.process.sandbox_capable or \
60013 - os.environ.get("SANDBOX_ON") == "1":
60014 - features.append("-sandbox")
60015 -
60016 - make_conf = (
60017 - "FEATURES=\"%s\"\n" % (" ".join(features),),
60018 - "PORTDIR=\"%s\"\n" % (portdir,),
60019 - "PORTAGE_GRPNAME=\"%s\"\n" % (os.environ["PORTAGE_GRPNAME"],),
60020 - "PORTAGE_USERNAME=\"%s\"\n" % (os.environ["PORTAGE_USERNAME"],),
60021 - "PKGDIR=\"%s\"\n" % (pkgdir,),
60022 - "PORTAGE_INST_GID=%s\n" % (portage.data.portage_gid,),
60023 - "PORTAGE_INST_UID=%s\n" % (portage.data.portage_uid,),
60024 - "PORTAGE_TMPDIR=\"%s\"\n" % (portage_tmpdir,),
60025 - "CLEAN_DELAY=0\n",
60026 - "DISTDIR=\"%s\"\n" % (distdir,),
60027 - "EMERGE_WARNING_DELAY=0\n",
60028 - )
60029 -
60030 path = os.environ.get("PATH")
60031 if path is not None and not path.strip():
60032 path = None
60033 @@ -139,6 +119,7 @@ class SlotAbiEmergeTestCase(TestCase):
60034 "PORTAGE_OVERRIDE_EPREFIX" : eprefix,
60035 "PATH" : path,
60036 "PORTAGE_PYTHON" : portage_python,
60037 + "PORTAGE_REPOSITORIES" : settings.repositories.config_string(),
60038 "PYTHONPATH" : pythonpath,
60039 }
60040
60041 @@ -155,9 +136,6 @@ class SlotAbiEmergeTestCase(TestCase):
60042 try:
60043 for d in dirs:
60044 ensure_dirs(d)
60045 - with open(os.path.join(user_config_dir, "make.conf"), 'w') as f:
60046 - for line in make_conf:
60047 - f.write(line)
60048 for x in true_symlinks:
60049 os.symlink(true_binary, os.path.join(fake_bin, x))
60050 with open(os.path.join(var_cache_edb, "counter"), 'wb') as f:
60051
60052 diff --git a/pym/portage/tests/emerge/test_simple.py b/pym/portage/tests/emerge/test_simple.py
60053 index f87170a..bf0af8b 100644
60054 --- a/pym/portage/tests/emerge/test_simple.py
60055 +++ b/pym/portage/tests/emerge/test_simple.py
60056 @@ -1,4 +1,4 @@
60057 -# Copyright 2011-2012 Gentoo Foundation
60058 +# Copyright 2011-2014 Gentoo Foundation
60059 # Distributed under the terms of the GNU General Public License v2
60060
60061 import subprocess
60062 @@ -7,7 +7,8 @@ import sys
60063 import portage
60064 from portage import os
60065 from portage import _unicode_decode
60066 -from portage.const import PORTAGE_BIN_PATH, PORTAGE_PYM_PATH, USER_CONFIG_PATH
60067 +from portage.const import (BASH_BINARY, PORTAGE_BASE_PATH,
60068 + PORTAGE_BIN_PATH, PORTAGE_PYM_PATH, USER_CONFIG_PATH)
60069 from portage.process import find_binary
60070 from portage.tests import TestCase
60071 from portage.tests.resolver.ResolverPlayground import ResolverPlayground
60072 @@ -75,13 +76,21 @@ pkg_preinst() {
60073 else
60074 einfo "has_version does not detect an installed instance of $CATEGORY/$PN:$SLOT"
60075 fi
60076 + if [[ ${EPREFIX} != ${PORTAGE_OVERRIDE_EPREFIX} ]] ; then
60077 + if has_version --host-root $CATEGORY/$PN:$SLOT ; then
60078 + einfo "has_version --host-root detects an installed instance of $CATEGORY/$PN:$SLOT"
60079 + einfo "best_version --host-root reports that the installed instance is $(best_version $CATEGORY/$PN:$SLOT)"
60080 + else
60081 + einfo "has_version --host-root does not detect an installed instance of $CATEGORY/$PN:$SLOT"
60082 + fi
60083 + fi
60084 }
60085
60086 """
60087
60088 ebuilds = {
60089 "dev-libs/A-1": {
60090 - "EAPI" : "4",
60091 + "EAPI" : "5",
60092 "IUSE" : "+flag",
60093 "KEYWORDS": "x86",
60094 "LICENSE": "GPL-2",
60095 @@ -89,14 +98,14 @@ pkg_preinst() {
60096 "RDEPEND": "flag? ( dev-libs/B[flag] )",
60097 },
60098 "dev-libs/B-1": {
60099 - "EAPI" : "4",
60100 + "EAPI" : "5",
60101 "IUSE" : "+flag",
60102 "KEYWORDS": "x86",
60103 "LICENSE": "GPL-2",
60104 "MISC_CONTENT": install_something,
60105 },
60106 "virtual/foo-0": {
60107 - "EAPI" : "4",
60108 + "EAPI" : "5",
60109 "KEYWORDS": "x86",
60110 "LICENSE": "GPL-2",
60111 },
60112 @@ -104,7 +113,7 @@ pkg_preinst() {
60113
60114 installed = {
60115 "dev-libs/A-1": {
60116 - "EAPI" : "4",
60117 + "EAPI" : "5",
60118 "IUSE" : "+flag",
60119 "KEYWORDS": "x86",
60120 "LICENSE": "GPL-2",
60121 @@ -112,21 +121,21 @@ pkg_preinst() {
60122 "USE": "flag",
60123 },
60124 "dev-libs/B-1": {
60125 - "EAPI" : "4",
60126 + "EAPI" : "5",
60127 "IUSE" : "+flag",
60128 "KEYWORDS": "x86",
60129 "LICENSE": "GPL-2",
60130 "USE": "flag",
60131 },
60132 "dev-libs/depclean-me-1": {
60133 - "EAPI" : "4",
60134 + "EAPI" : "5",
60135 "IUSE" : "",
60136 "KEYWORDS": "x86",
60137 "LICENSE": "GPL-2",
60138 "USE": "",
60139 },
60140 "app-misc/depclean-me-1": {
60141 - "EAPI" : "4",
60142 + "EAPI" : "5",
60143 "IUSE" : "",
60144 "KEYWORDS": "x86",
60145 "LICENSE": "GPL-2",
60146 @@ -159,29 +168,35 @@ pkg_preinst() {
60147 eroot = settings["EROOT"]
60148 trees = playground.trees
60149 portdb = trees[eroot]["porttree"].dbapi
60150 - portdir = settings["PORTDIR"]
60151 + test_repo_location = settings.repositories["test_repo"].location
60152 var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
60153 cachedir = os.path.join(var_cache_edb, "dep")
60154 - cachedir_pregen = os.path.join(portdir, "metadata", "cache")
60155 + cachedir_pregen = os.path.join(test_repo_location, "metadata", "md5-cache")
60156
60157 portage_python = portage._python_interpreter
60158 - ebuild_cmd = (portage_python, "-Wd",
60159 + dispatch_conf_cmd = (portage_python, "-b", "-Wd",
60160 + os.path.join(PORTAGE_BIN_PATH, "dispatch-conf"))
60161 + ebuild_cmd = (portage_python, "-b", "-Wd",
60162 os.path.join(PORTAGE_BIN_PATH, "ebuild"))
60163 - egencache_cmd = (portage_python, "-Wd",
60164 - os.path.join(PORTAGE_BIN_PATH, "egencache"))
60165 - emerge_cmd = (portage_python, "-Wd",
60166 + egencache_cmd = (portage_python, "-b", "-Wd",
60167 + os.path.join(PORTAGE_BIN_PATH, "egencache"),
60168 + "--repo", "test_repo",
60169 + "--repositories-configuration", settings.repositories.config_string())
60170 + emerge_cmd = (portage_python, "-b", "-Wd",
60171 os.path.join(PORTAGE_BIN_PATH, "emerge"))
60172 - emaint_cmd = (portage_python, "-Wd",
60173 + emaint_cmd = (portage_python, "-b", "-Wd",
60174 os.path.join(PORTAGE_BIN_PATH, "emaint"))
60175 - env_update_cmd = (portage_python, "-Wd",
60176 + env_update_cmd = (portage_python, "-b", "-Wd",
60177 os.path.join(PORTAGE_BIN_PATH, "env-update"))
60178 - fixpackages_cmd = (portage_python, "-Wd",
60179 + etc_update_cmd = (BASH_BINARY,
60180 + os.path.join(PORTAGE_BIN_PATH, "etc-update"))
60181 + fixpackages_cmd = (portage_python, "-b", "-Wd",
60182 os.path.join(PORTAGE_BIN_PATH, "fixpackages"))
60183 - portageq_cmd = (portage_python, "-Wd",
60184 + portageq_cmd = (portage_python, "-b", "-Wd",
60185 os.path.join(PORTAGE_BIN_PATH, "portageq"))
60186 - quickpkg_cmd = (portage_python, "-Wd",
60187 + quickpkg_cmd = (portage_python, "-b", "-Wd",
60188 os.path.join(PORTAGE_BIN_PATH, "quickpkg"))
60189 - regenworld_cmd = (portage_python, "-Wd",
60190 + regenworld_cmd = (portage_python, "-b", "-Wd",
60191 os.path.join(PORTAGE_BIN_PATH, "regenworld"))
60192
60193 rm_binary = find_binary("rm")
60194 @@ -196,8 +211,14 @@ pkg_preinst() {
60195 test_ebuild = portdb.findname("dev-libs/A-1")
60196 self.assertFalse(test_ebuild is None)
60197
60198 + cross_prefix = os.path.join(eprefix, "cross_prefix")
60199 +
60200 test_commands = (
60201 env_update_cmd,
60202 + portageq_cmd + ("envvar", "-v", "CONFIG_PROTECT", "EROOT",
60203 + "PORTAGE_CONFIGROOT", "PORTAGE_TMPDIR", "USERLAND"),
60204 + etc_update_cmd,
60205 + dispatch_conf_cmd,
60206 emerge_cmd + ("--version",),
60207 emerge_cmd + ("--info",),
60208 emerge_cmd + ("--info", "--verbose"),
60209 @@ -210,7 +231,7 @@ pkg_preinst() {
60210 ({"FEATURES" : "metadata-transfer"},) + \
60211 emerge_cmd + ("--regen",),
60212 rm_cmd + ("-rf", cachedir),
60213 - ({"FEATURES" : "metadata-transfer parse-eapi-ebuild-head"},) + \
60214 + ({"FEATURES" : "metadata-transfer"},) + \
60215 emerge_cmd + ("--regen",),
60216 rm_cmd + ("-rf", cachedir),
60217 egencache_cmd + ("--update",) + tuple(egencache_extra_args),
60218 @@ -226,6 +247,7 @@ pkg_preinst() {
60219 ebuild_cmd + (test_ebuild, "manifest", "clean", "package", "merge"),
60220 emerge_cmd + ("--pretend", "--tree", "--complete-graph", "dev-libs/A"),
60221 emerge_cmd + ("-p", "dev-libs/B"),
60222 + emerge_cmd + ("-p", "--newrepo", "dev-libs/B"),
60223 emerge_cmd + ("-B", "dev-libs/B",),
60224 emerge_cmd + ("--oneshot", "--usepkg", "dev-libs/B",),
60225
60226 @@ -257,6 +279,24 @@ pkg_preinst() {
60227 emerge_cmd + ("-p", "--unmerge", "-q", eroot + "usr"),
60228 emerge_cmd + ("--unmerge", "--quiet", "dev-libs/A"),
60229 emerge_cmd + ("-C", "--quiet", "dev-libs/B"),
60230 +
60231 + # Test cross-prefix usage, including chpathtool for binpkgs.
60232 + ({"EPREFIX" : cross_prefix},) + \
60233 + emerge_cmd + ("--usepkgonly", "dev-libs/A"),
60234 + ({"EPREFIX" : cross_prefix},) + \
60235 + portageq_cmd + ("has_version", cross_prefix, "dev-libs/A"),
60236 + ({"EPREFIX" : cross_prefix},) + \
60237 + portageq_cmd + ("has_version", cross_prefix, "dev-libs/B"),
60238 + ({"EPREFIX" : cross_prefix},) + \
60239 + emerge_cmd + ("-C", "--quiet", "dev-libs/B"),
60240 + ({"EPREFIX" : cross_prefix},) + \
60241 + emerge_cmd + ("-C", "--quiet", "dev-libs/A"),
60242 + ({"EPREFIX" : cross_prefix},) + \
60243 + emerge_cmd + ("dev-libs/A",),
60244 + ({"EPREFIX" : cross_prefix},) + \
60245 + portageq_cmd + ("has_version", cross_prefix, "dev-libs/A"),
60246 + ({"EPREFIX" : cross_prefix},) + \
60247 + portageq_cmd + ("has_version", cross_prefix, "dev-libs/B"),
60248 )
60249
60250 distdir = playground.distdir
60251 @@ -266,20 +306,6 @@ pkg_preinst() {
60252 profile_path = settings.profile_path
60253 user_config_dir = os.path.join(os.sep, eprefix, USER_CONFIG_PATH)
60254
60255 - features = []
60256 - if not portage.process.sandbox_capable or \
60257 - os.environ.get("SANDBOX_ON") == "1":
60258 - features.append("-sandbox")
60259 -
60260 - # Since egencache ignores settings from the calling environment,
60261 - # configure it via make.conf.
60262 - make_conf = (
60263 - "FEATURES=\"%s\"\n" % (" ".join(features),),
60264 - "PORTDIR=\"%s\"\n" % (portdir,),
60265 - "PORTAGE_GRPNAME=\"%s\"\n" % (os.environ["PORTAGE_GRPNAME"],),
60266 - "PORTAGE_USERNAME=\"%s\"\n" % (os.environ["PORTAGE_USERNAME"],),
60267 - )
60268 -
60269 path = os.environ.get("PATH")
60270 if path is not None and not path.strip():
60271 path = None
60272 @@ -314,37 +340,43 @@ pkg_preinst() {
60273 "PORTAGE_INST_GID" : str(portage.data.portage_gid),
60274 "PORTAGE_INST_UID" : str(portage.data.portage_uid),
60275 "PORTAGE_PYTHON" : portage_python,
60276 + "PORTAGE_REPOSITORIES" : settings.repositories.config_string(),
60277 "PORTAGE_TMPDIR" : portage_tmpdir,
60278 "PYTHONPATH" : pythonpath,
60279 + "__PORTAGE_TEST_PATH_OVERRIDE" : fake_bin,
60280 }
60281
60282 if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
60283 env["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
60284 os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
60285
60286 - updates_dir = os.path.join(portdir, "profiles", "updates")
60287 + updates_dir = os.path.join(test_repo_location, "profiles", "updates")
60288 dirs = [cachedir, cachedir_pregen, distdir, fake_bin,
60289 portage_tmpdir, updates_dir,
60290 user_config_dir, var_cache_edb]
60291 - true_symlinks = ["chown", "chgrp"]
60292 + etc_symlinks = ("dispatch-conf.conf", "etc-update.conf")
60293 + # Override things that may be unavailable, or may have portability
60294 + # issues when running tests in exotic environments.
60295 + # prepstrip - bug #447810 (bash read builtin EINTR problem)
60296 + true_symlinks = ["find", "prepstrip", "sed", "scanelf"]
60297 true_binary = find_binary("true")
60298 self.assertEqual(true_binary is None, False,
60299 "true command not found")
60300 try:
60301 for d in dirs:
60302 ensure_dirs(d)
60303 - with open(os.path.join(user_config_dir, "make.conf"), 'w') as f:
60304 - for line in make_conf:
60305 - f.write(line)
60306 for x in true_symlinks:
60307 os.symlink(true_binary, os.path.join(fake_bin, x))
60308 + for x in etc_symlinks:
60309 + os.symlink(os.path.join(PORTAGE_BASE_PATH, "cnf", x),
60310 + os.path.join(eprefix, "etc", x))
60311 with open(os.path.join(var_cache_edb, "counter"), 'wb') as f:
60312 f.write(b"100")
60313 # non-empty system set keeps --depclean quiet
60314 with open(os.path.join(profile_path, "packages"), 'w') as f:
60315 f.write("*dev-libs/token-system-pkg")
60316 for cp, xml_data in metadata_xml_files:
60317 - with open(os.path.join(portdir, cp, "metadata.xml"), 'w') as f:
60318 + with open(os.path.join(test_repo_location, cp, "metadata.xml"), 'w') as f:
60319 f.write(playground.metadata_xml_template % xml_data)
60320 with open(os.path.join(updates_dir, "1Q-2010"), 'w') as f:
60321 f.write("""
60322
60323 diff --git a/pym/portage/tests/env/config/test_PackageKeywordsFile.py b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
60324 index f1e9e98..609c0fd 100644
60325 --- a/pym/portage/tests/env/config/test_PackageKeywordsFile.py
60326 +++ b/pym/portage/tests/env/config/test_PackageKeywordsFile.py
60327 @@ -11,7 +11,7 @@ class PackageKeywordsFileTestCase(TestCase):
60328
60329 cpv = ['sys-apps/portage']
60330 keywords = ['~x86', 'amd64', '-mips']
60331 -
60332 +
60333 def testPackageKeywordsFile(self):
60334 """
60335 A simple test to ensure the load works properly
60336 @@ -23,17 +23,17 @@ class PackageKeywordsFileTestCase(TestCase):
60337 f.load()
60338 i = 0
60339 for cpv, keyword in f.items():
60340 - self.assertEqual( cpv, self.cpv[i] )
60341 + self.assertEqual(cpv, self.cpv[i])
60342 [k for k in keyword if self.assertTrue(k in self.keywords)]
60343 i = i + 1
60344 finally:
60345 self.NukeFile()
60346 -
60347 +
60348 def BuildFile(self):
60349 fd, self.fname = mkstemp()
60350 f = os.fdopen(fd, 'w')
60351 for c in self.cpv:
60352 - f.write("%s %s\n" % (c,' '.join(self.keywords)))
60353 + f.write("%s %s\n" % (c, ' '.join(self.keywords)))
60354 f.close()
60355
60356 def NukeFile(self):
60357
60358 diff --git a/pym/portage/tests/env/config/test_PackageUseFile.py b/pym/portage/tests/env/config/test_PackageUseFile.py
60359 index 7a38067..b1a6ccb 100644
60360 --- a/pym/portage/tests/env/config/test_PackageUseFile.py
60361 +++ b/pym/portage/tests/env/config/test_PackageUseFile.py
60362 @@ -12,7 +12,7 @@ class PackageUseFileTestCase(TestCase):
60363
60364 cpv = 'sys-apps/portage'
60365 useflags = ['cdrom', 'far', 'boo', 'flag', 'blat']
60366 -
60367 +
60368 def testPackageUseFile(self):
60369 """
60370 A simple test to ensure the load works properly
60371 @@ -22,7 +22,7 @@ class PackageUseFileTestCase(TestCase):
60372 f = PackageUseFile(self.fname)
60373 f.load()
60374 for cpv, use in f.items():
60375 - self.assertEqual( cpv, self.cpv )
60376 + self.assertEqual(cpv, self.cpv)
60377 [flag for flag in use if self.assertTrue(flag in self.useflags)]
60378 finally:
60379 self.NukeFile()
60380 @@ -32,6 +32,6 @@ class PackageUseFileTestCase(TestCase):
60381 f = os.fdopen(fd, 'w')
60382 f.write("%s %s" % (self.cpv, ' '.join(self.useflags)))
60383 f.close()
60384 -
60385 +
60386 def NukeFile(self):
60387 os.unlink(self.fname)
60388
60389 diff --git a/pym/portage/tests/env/config/test_PortageModulesFile.py b/pym/portage/tests/env/config/test_PortageModulesFile.py
60390 index 2cd1a8a..05584a5 100644
60391 --- a/pym/portage/tests/env/config/test_PortageModulesFile.py
60392 +++ b/pym/portage/tests/env/config/test_PortageModulesFile.py
60393 @@ -8,14 +8,13 @@ from tempfile import mkstemp
60394
60395 class PortageModulesFileTestCase(TestCase):
60396
60397 - keys = ['foo.bar','baz','bob','extra_key']
60398 - invalid_keys = ['',""]
60399 - modules = ['spanky','zmedico','antarus','ricer','5','6']
60400 + keys = ['foo.bar', 'baz', 'bob', 'extra_key']
60401 + invalid_keys = ['', ""]
60402 + modules = ['spanky', 'zmedico', 'antarus', 'ricer', '5', '6']
60403
60404 def setUp(self):
60405 self.items = {}
60406 - for k, v in zip(self.keys + self.invalid_keys,
60407 - self.modules):
60408 + for k, v in zip(self.keys + self.invalid_keys, self.modules):
60409 self.items[k] = v
60410
60411 def testPortageModulesFile(self):
60412 @@ -32,7 +31,7 @@ class PortageModulesFileTestCase(TestCase):
60413 fd, self.fname = mkstemp()
60414 f = os.fdopen(fd, 'w')
60415 for k, v in self.items.items():
60416 - f.write('%s=%s\n' % (k,v))
60417 + f.write('%s=%s\n' % (k, v))
60418 f.close()
60419
60420 def NukeFile(self):
60421
60422 diff --git a/pym/portage/tests/glsa/__init__.py b/pym/portage/tests/glsa/__init__.py
60423 new file mode 100644
60424 index 0000000..6cde932
60425 --- /dev/null
60426 +++ b/pym/portage/tests/glsa/__init__.py
60427 @@ -0,0 +1,2 @@
60428 +# Copyright 2013 Gentoo Foundation
60429 +# Distributed under the terms of the GNU General Public License v2
60430
60431 diff --git a/pym/portage/tests/glsa/__test__ b/pym/portage/tests/glsa/__test__
60432 new file mode 100644
60433 index 0000000..e69de29
60434
60435 diff --git a/pym/portage/tests/glsa/test_security_set.py b/pym/portage/tests/glsa/test_security_set.py
60436 new file mode 100644
60437 index 0000000..edf5678
60438 --- /dev/null
60439 +++ b/pym/portage/tests/glsa/test_security_set.py
60440 @@ -0,0 +1,144 @@
60441 +# Copyright 2013 Gentoo Foundation
60442 +# Distributed under the terms of the GNU General Public License v2
60443 +
60444 +from __future__ import unicode_literals
60445 +
60446 +import io
60447 +
60448 +import portage
60449 +from portage import os, _encodings
60450 +from portage.tests import TestCase
60451 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
60452 + ResolverPlaygroundTestCase)
60453 +
60454 +class SecuritySetTestCase(TestCase):
60455 +
60456 + glsa_template = """\
60457 +<?xml version="1.0" encoding="UTF-8"?>
60458 +<?xml-stylesheet href="/xsl/glsa.xsl" type="text/xsl"?>
60459 +<?xml-stylesheet href="/xsl/guide.xsl" type="text/xsl"?>
60460 +<!DOCTYPE glsa SYSTEM "http://www.gentoo.org/dtd/glsa.dtd">
60461 +<glsa id="%(glsa_id)s">
60462 + <title>%(pkgname)s: Multiple vulnerabilities</title>
60463 + <synopsis>Multiple vulnerabilities have been found in %(pkgname)s.
60464 + </synopsis>
60465 + <product type="ebuild">%(pkgname)s</product>
60466 + <announced>January 18, 2013</announced>
60467 + <revised>January 18, 2013: 1</revised>
60468 + <bug>55555</bug>
60469 + <access>remote</access>
60470 + <affected>
60471 + <package name="%(cp)s" auto="yes" arch="*">
60472 + <unaffected range="ge">%(unaffected_version)s</unaffected>
60473 + <vulnerable range="lt">%(unaffected_version)s</vulnerable>
60474 + </package>
60475 + </affected>
60476 + <background>
60477 + <p>%(pkgname)s is software package.</p>
60478 + </background>
60479 + <description>
60480 + <p>Multiple vulnerabilities have been discovered in %(pkgname)s.
60481 + </p>
60482 + </description>
60483 + <impact type="normal">
60484 + <p>A remote attacker could exploit these vulnerabilities.</p>
60485 + </impact>
60486 + <workaround>
60487 + <p>There is no known workaround at this time.</p>
60488 + </workaround>
60489 + <resolution>
60490 + <p>All %(pkgname)s users should upgrade to the latest version:</p>
60491 + <code>
60492 + # emerge --sync
60493 + # emerge --ask --oneshot --verbose "&gt;=%(cp)s-%(unaffected_version)s"
60494 + </code>
60495 + </resolution>
60496 + <references>
60497 + </references>
60498 +</glsa>
60499 +"""
60500 +
60501 + def _must_skip(self):
60502 + try:
60503 + __import__("xml.etree.ElementTree")
60504 + __import__("xml.parsers.expat").parsers.expat.ExpatError
60505 + except (AttributeError, ImportError):
60506 + return "python is missing xml support"
60507 +
60508 + def testSecuritySet(self):
60509 +
60510 + skip_reason = self._must_skip()
60511 + if skip_reason:
60512 + self.portage_skip = skip_reason
60513 + self.assertFalse(True, skip_reason)
60514 + return
60515 +
60516 + ebuilds = {
60517 + "cat/A-vulnerable-2.2": {
60518 + "KEYWORDS": "x86"
60519 + },
60520 + "cat/B-not-vulnerable-4.5": {
60521 + "KEYWORDS": "x86"
60522 + },
60523 + }
60524 +
60525 + installed = {
60526 + "cat/A-vulnerable-2.1": {
60527 + "KEYWORDS": "x86"
60528 + },
60529 + "cat/B-not-vulnerable-4.4": {
60530 + "KEYWORDS": "x86"
60531 + },
60532 + }
60533 +
60534 + glsas = (
60535 + {
60536 + "glsa_id": "201301-01",
60537 + "pkgname": "A-vulnerable",
60538 + "cp": "cat/A-vulnerable",
60539 + "unaffected_version": "2.2"
60540 + },
60541 + {
60542 + "glsa_id": "201301-02",
60543 + "pkgname": "B-not-vulnerable",
60544 + "cp": "cat/B-not-vulnerable",
60545 + "unaffected_version": "4.4"
60546 + },
60547 + {
60548 + "glsa_id": "201301-03",
60549 + "pkgname": "NotInstalled",
60550 + "cp": "cat/NotInstalled",
60551 + "unaffected_version": "3.5"
60552 + },
60553 + )
60554 +
60555 + world = ["cat/A"]
60556 +
60557 + test_cases = (
60558 +
60559 + ResolverPlaygroundTestCase(
60560 + ["@security"],
60561 + options = {},
60562 + success = True,
60563 + mergelist = ["cat/A-vulnerable-2.2"]),
60564 + )
60565 +
60566 + playground = ResolverPlayground(ebuilds=ebuilds,
60567 + installed=installed, world=world, debug=False)
60568 +
60569 + try:
60570 +
60571 + portdb = playground.trees[playground.eroot]["porttree"].dbapi
60572 + glsa_dir = os.path.join(portdb.porttree_root, 'metadata', 'glsa')
60573 + portage.util.ensure_dirs(glsa_dir)
60574 + for glsa in glsas:
60575 + with io.open(os.path.join(glsa_dir,
60576 + 'glsa-' + glsa["glsa_id"] + '.xml'),
60577 + encoding=_encodings['repo.content'], mode='w') as f:
60578 + f.write(self.glsa_template % glsa)
60579 +
60580 + for test_case in test_cases:
60581 + playground.run_TestCase(test_case)
60582 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
60583 + finally:
60584 + playground.cleanup()
60585
60586 diff --git a/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py b/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
60587 index c7ebbaf..080cf3f 100644
60588 --- a/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
60589 +++ b/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
60590 @@ -6,8 +6,8 @@ import portage
60591 from portage import os
60592 from portage.const import PORTAGE_PYM_PATH
60593 from portage.tests import TestCase
60594 +from portage.util._eventloop.global_event_loop import global_event_loop
60595
60596 -from _emerge.PollScheduler import PollScheduler
60597 from _emerge.PipeReader import PipeReader
60598 from _emerge.SpawnProcess import SpawnProcess
60599
60600 @@ -52,7 +52,7 @@ sys.stdout.write(" ".join(k for k in sys.modules
60601 # then the above PYTHONPATH override doesn't help.
60602 env['PORTAGE_PYM_PATH'] = PORTAGE_PYM_PATH
60603
60604 - scheduler = PollScheduler().sched_iface
60605 + scheduler = global_event_loop()
60606 master_fd, slave_fd = os.pipe()
60607 master_file = os.fdopen(master_fd, 'rb', 0)
60608 slave_file = os.fdopen(slave_fd, 'wb')
60609
60610 diff --git a/pym/portage/tests/lint/test_bash_syntax.py b/pym/portage/tests/lint/test_bash_syntax.py
60611 index aef8d74..fdbb6fe 100644
60612 --- a/pym/portage/tests/lint/test_bash_syntax.py
60613 +++ b/pym/portage/tests/lint/test_bash_syntax.py
60614 @@ -1,20 +1,26 @@
60615 -# Copyright 2010 Gentoo Foundation
60616 +# Copyright 2010-2013 Gentoo Foundation
60617 # Distributed under the terms of the GNU General Public License v2
60618
60619 +from itertools import chain
60620 import stat
60621 +import subprocess
60622 +import sys
60623
60624 -from portage.const import BASH_BINARY, PORTAGE_BIN_PATH
60625 +from portage.const import BASH_BINARY, PORTAGE_BASE_PATH, PORTAGE_BIN_PATH
60626 from portage.tests import TestCase
60627 from portage import os
60628 -from portage import subprocess_getstatusoutput
60629 from portage import _encodings
60630 -from portage import _shell_quote
60631 from portage import _unicode_decode, _unicode_encode
60632
60633 class BashSyntaxTestCase(TestCase):
60634
60635 def testBashSyntax(self):
60636 - for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
60637 + locations = [PORTAGE_BIN_PATH]
60638 + misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
60639 + if os.path.isdir(misc_dir):
60640 + locations.append(misc_dir)
60641 + for parent, dirs, files in \
60642 + chain.from_iterable(os.walk(x) for x in locations):
60643 parent = _unicode_decode(parent,
60644 encoding=_encodings['fs'], errors='strict')
60645 for x in files:
60646 @@ -36,7 +42,13 @@ class BashSyntaxTestCase(TestCase):
60647 f.close()
60648 if line[:2] == '#!' and \
60649 'bash' in line:
60650 - cmd = "%s -n %s" % (_shell_quote(BASH_BINARY), _shell_quote(x))
60651 - status, output = subprocess_getstatusoutput(cmd)
60652 + cmd = [BASH_BINARY, "-n", x]
60653 + cmd = [_unicode_encode(x,
60654 + encoding=_encodings['fs'], errors='strict') for x in cmd]
60655 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
60656 + stderr=subprocess.STDOUT)
60657 + output = _unicode_decode(proc.communicate()[0],
60658 + encoding=_encodings['fs'])
60659 + status = proc.wait()
60660 self.assertEqual(os.WIFEXITED(status) and \
60661 os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
60662
60663 diff --git a/pym/portage/tests/lint/test_compile_modules.py b/pym/portage/tests/lint/test_compile_modules.py
60664 index f90a666..ce7e3fb 100644
60665 --- a/pym/portage/tests/lint/test_compile_modules.py
60666 +++ b/pym/portage/tests/lint/test_compile_modules.py
60667 @@ -1,6 +1,7 @@
60668 -# Copyright 2009-2010 Gentoo Foundation
60669 +# Copyright 2009-2014 Gentoo Foundation
60670 # Distributed under the terms of the GNU General Public License v2
60671
60672 +import errno
60673 import itertools
60674 import stat
60675
60676 @@ -10,12 +11,10 @@ from portage import os
60677 from portage import _encodings
60678 from portage import _unicode_decode, _unicode_encode
60679
60680 -import py_compile
60681 -
60682 class CompileModulesTestCase(TestCase):
60683
60684 def testCompileModules(self):
60685 - for parent, dirs, files in itertools.chain(
60686 + for parent, _dirs, files in itertools.chain(
60687 os.walk(PORTAGE_BIN_PATH),
60688 os.walk(PORTAGE_PYM_PATH)):
60689 parent = _unicode_decode(parent,
60690 @@ -33,14 +32,21 @@ class CompileModulesTestCase(TestCase):
60691 if x[-3:] == '.py':
60692 do_compile = True
60693 else:
60694 - # Check for python shebang
60695 - f = open(_unicode_encode(x,
60696 - encoding=_encodings['fs'], errors='strict'), 'rb')
60697 - line = _unicode_decode(f.readline(),
60698 - encoding=_encodings['content'], errors='replace')
60699 - f.close()
60700 - if line[:2] == '#!' and \
60701 - 'python' in line:
60702 + # Check for python shebang.
60703 + try:
60704 + with open(_unicode_encode(x,
60705 + encoding=_encodings['fs'], errors='strict'), 'rb') as f:
60706 + line = _unicode_decode(f.readline(),
60707 + encoding=_encodings['content'], errors='replace')
60708 + except IOError as e:
60709 + # Some tests create files that are unreadable by the
60710 + # user (by design), so ignore EACCES issues.
60711 + if e.errno != errno.EACCES:
60712 + raise
60713 + continue
60714 + if line[:2] == '#!' and 'python' in line:
60715 do_compile = True
60716 if do_compile:
60717 - py_compile.compile(x, cfile='/dev/null', doraise=True)
60718 + with open(_unicode_encode(x,
60719 + encoding=_encodings['fs'], errors='strict'), 'rb') as f:
60720 + compile(f.read(), x, 'exec')
60721
60722 diff --git a/pym/portage/tests/lint/test_import_modules.py b/pym/portage/tests/lint/test_import_modules.py
60723 index 8d257c5..34261f4 100644
60724 --- a/pym/portage/tests/lint/test_import_modules.py
60725 +++ b/pym/portage/tests/lint/test_import_modules.py
60726 @@ -1,4 +1,4 @@
60727 -# Copyright 2011 Gentoo Foundation
60728 +# Copyright 2011-2012 Gentoo Foundation
60729 # Distributed under the terms of the GNU General Public License v2
60730
60731 from portage.const import PORTAGE_PYM_PATH
60732
60733 diff --git a/pym/portage/tests/locks/test_asynchronous_lock.py b/pym/portage/tests/locks/test_asynchronous_lock.py
60734 index 49dd10e..3a2ccfb 100644
60735 --- a/pym/portage/tests/locks/test_asynchronous_lock.py
60736 +++ b/pym/portage/tests/locks/test_asynchronous_lock.py
60737 @@ -7,13 +7,13 @@ import tempfile
60738 from portage import os
60739 from portage import shutil
60740 from portage.tests import TestCase
60741 +from portage.util._eventloop.global_event_loop import global_event_loop
60742 from _emerge.AsynchronousLock import AsynchronousLock
60743 -from _emerge.PollScheduler import PollScheduler
60744
60745 class AsynchronousLockTestCase(TestCase):
60746
60747 def _testAsynchronousLock(self):
60748 - scheduler = PollScheduler().sched_iface
60749 + scheduler = global_event_loop()
60750 tempdir = tempfile.mkdtemp()
60751 try:
60752 path = os.path.join(tempdir, 'lock_me')
60753 @@ -53,7 +53,7 @@ class AsynchronousLockTestCase(TestCase):
60754 os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"] = prev_state
60755
60756 def _testAsynchronousLockWait(self):
60757 - scheduler = PollScheduler().sched_iface
60758 + scheduler = global_event_loop()
60759 tempdir = tempfile.mkdtemp()
60760 try:
60761 path = os.path.join(tempdir, 'lock_me')
60762 @@ -94,7 +94,7 @@ class AsynchronousLockTestCase(TestCase):
60763 os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"] = prev_state
60764
60765 def _testAsynchronousLockWaitCancel(self):
60766 - scheduler = PollScheduler().sched_iface
60767 + scheduler = global_event_loop()
60768 tempdir = tempfile.mkdtemp()
60769 try:
60770 path = os.path.join(tempdir, 'lock_me')
60771 @@ -132,7 +132,7 @@ class AsynchronousLockTestCase(TestCase):
60772 os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"] = prev_state
60773
60774 def _testAsynchronousLockWaitKill(self):
60775 - scheduler = PollScheduler().sched_iface
60776 + scheduler = global_event_loop()
60777 tempdir = tempfile.mkdtemp()
60778 try:
60779 path = os.path.join(tempdir, 'lock_me')
60780
60781 diff --git a/pym/portage/tests/process/test_PopenProcess.py b/pym/portage/tests/process/test_PopenProcess.py
60782 new file mode 100644
60783 index 0000000..88da0b3
60784 --- /dev/null
60785 +++ b/pym/portage/tests/process/test_PopenProcess.py
60786 @@ -0,0 +1,85 @@
60787 +# Copyright 2012-2013 Gentoo Foundation
60788 +# Distributed under the terms of the GNU General Public License v2
60789 +
60790 +import subprocess
60791 +import tempfile
60792 +
60793 +from portage import os
60794 +from portage.tests import TestCase
60795 +from portage.util._async.PipeLogger import PipeLogger
60796 +from portage.util._async.PopenProcess import PopenProcess
60797 +from portage.util._eventloop.global_event_loop import global_event_loop
60798 +from _emerge.PipeReader import PipeReader
60799 +
60800 +class PopenPipeTestCase(TestCase):
60801 + """
60802 + Test PopenProcess, which can be useful for Jython support, since it
60803 + uses the subprocess.Popen instead of os.fork().
60804 + """
60805 +
60806 + _echo_cmd = "echo -n '%s'"
60807 +
60808 + def _testPipeReader(self, test_string):
60809 + """
60810 + Use a poll loop to read data from a pipe and assert that
60811 + the data written to the pipe is identical to the data
60812 + read from the pipe.
60813 + """
60814 +
60815 + producer = PopenProcess(proc=subprocess.Popen(
60816 + ["bash", "-c", self._echo_cmd % test_string],
60817 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
60818 + pipe_reader=PipeReader(), scheduler=global_event_loop())
60819 +
60820 + consumer = producer.pipe_reader
60821 + consumer.input_files = {"producer" : producer.proc.stdout}
60822 +
60823 + producer.start()
60824 + producer.wait()
60825 +
60826 + self.assertEqual(producer.returncode, os.EX_OK)
60827 + self.assertEqual(consumer.returncode, os.EX_OK)
60828 +
60829 + return consumer.getvalue().decode('ascii', 'replace')
60830 +
60831 + def _testPipeLogger(self, test_string):
60832 +
60833 + producer = PopenProcess(proc=subprocess.Popen(
60834 + ["bash", "-c", self._echo_cmd % test_string],
60835 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
60836 + scheduler=global_event_loop())
60837 +
60838 + fd, log_file_path = tempfile.mkstemp()
60839 + try:
60840 +
60841 + consumer = PipeLogger(background=True,
60842 + input_fd=producer.proc.stdout,
60843 + log_file_path=log_file_path)
60844 +
60845 + producer.pipe_reader = consumer
60846 +
60847 + producer.start()
60848 + producer.wait()
60849 +
60850 + self.assertEqual(producer.returncode, os.EX_OK)
60851 + self.assertEqual(consumer.returncode, os.EX_OK)
60852 +
60853 + with open(log_file_path, 'rb') as f:
60854 + content = f.read()
60855 +
60856 + finally:
60857 + os.close(fd)
60858 + os.unlink(log_file_path)
60859 +
60860 + return content.decode('ascii', 'replace')
60861 +
60862 + def testPopenPipe(self):
60863 + for x in (1, 2, 5, 6, 7, 8, 2**5, 2**10, 2**12, 2**13, 2**14):
60864 + test_string = x * "a"
60865 + output = self._testPipeReader(test_string)
60866 + self.assertEqual(test_string, output,
60867 + "x = %s, len(output) = %s" % (x, len(output)))
60868 +
60869 + output = self._testPipeLogger(test_string)
60870 + self.assertEqual(test_string, output,
60871 + "x = %s, len(output) = %s" % (x, len(output)))
60872
60873 diff --git a/pym/portage/tests/process/test_PopenProcessBlockingIO.py b/pym/portage/tests/process/test_PopenProcessBlockingIO.py
60874 new file mode 100644
60875 index 0000000..9ee291a
60876 --- /dev/null
60877 +++ b/pym/portage/tests/process/test_PopenProcessBlockingIO.py
60878 @@ -0,0 +1,63 @@
60879 +# Copyright 2012 Gentoo Foundation
60880 +# Distributed under the terms of the GNU General Public License v2
60881 +
60882 +import subprocess
60883 +
60884 +try:
60885 + import threading
60886 +except ImportError:
60887 + # dummy_threading will not suffice
60888 + threading = None
60889 +
60890 +from portage import os
60891 +from portage.tests import TestCase
60892 +from portage.util._async.PopenProcess import PopenProcess
60893 +from portage.util._eventloop.global_event_loop import global_event_loop
60894 +from portage.util._async.PipeReaderBlockingIO import PipeReaderBlockingIO
60895 +
60896 +class PopenPipeBlockingIOTestCase(TestCase):
60897 + """
60898 + Test PopenProcess, which can be useful for Jython support:
60899 + * use subprocess.Popen since Jython does not support os.fork()
60900 + * use blocking IO with threads, since Jython does not support
60901 + fcntl non-blocking IO)
60902 + """
60903 +
60904 + _echo_cmd = "echo -n '%s'"
60905 +
60906 + def _testPipeReader(self, test_string):
60907 + """
60908 + Use a poll loop to read data from a pipe and assert that
60909 + the data written to the pipe is identical to the data
60910 + read from the pipe.
60911 + """
60912 +
60913 + producer = PopenProcess(proc=subprocess.Popen(
60914 + ["bash", "-c", self._echo_cmd % test_string],
60915 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
60916 + pipe_reader=PipeReaderBlockingIO(), scheduler=global_event_loop())
60917 +
60918 + consumer = producer.pipe_reader
60919 + consumer.input_files = {"producer" : producer.proc.stdout}
60920 +
60921 + producer.start()
60922 + producer.wait()
60923 +
60924 + self.assertEqual(producer.returncode, os.EX_OK)
60925 + self.assertEqual(consumer.returncode, os.EX_OK)
60926 +
60927 + return consumer.getvalue().decode('ascii', 'replace')
60928 +
60929 + def testPopenPipeBlockingIO(self):
60930 +
60931 + if threading is None:
60932 + skip_reason = "threading disabled"
60933 + self.portage_skip = "threading disabled"
60934 + self.assertFalse(True, skip_reason)
60935 + return
60936 +
60937 + for x in (1, 2, 5, 6, 7, 8, 2**5, 2**10, 2**12, 2**13, 2**14):
60938 + test_string = x * "a"
60939 + output = self._testPipeReader(test_string)
60940 + self.assertEqual(test_string, output,
60941 + "x = %s, len(output) = %s" % (x, len(output)))
60942
60943 diff --git a/pym/portage/tests/process/test_poll.py b/pym/portage/tests/process/test_poll.py
60944 index d6667b4..8c57c23 100644
60945 --- a/pym/portage/tests/process/test_poll.py
60946 +++ b/pym/portage/tests/process/test_poll.py
60947 @@ -1,12 +1,14 @@
60948 -# Copyright 1998-2011 Gentoo Foundation
60949 +# Copyright 1998-2013 Gentoo Foundation
60950 # Distributed under the terms of the GNU General Public License v2
60951
60952 +import subprocess
60953 +
60954 from portage import os
60955 from portage.tests import TestCase
60956 from portage.util._pty import _create_pty_or_pipe
60957 -from _emerge.TaskScheduler import TaskScheduler
60958 +from portage.util._async.PopenProcess import PopenProcess
60959 +from portage.util._eventloop.global_event_loop import global_event_loop
60960 from _emerge.PipeReader import PipeReader
60961 -from _emerge.SpawnProcess import SpawnProcess
60962
60963 class PipeReaderTestCase(TestCase):
60964
60965 @@ -36,26 +38,23 @@ class PipeReaderTestCase(TestCase):
60966 # WARNING: It is very important to use unbuffered mode here,
60967 # in order to avoid issue 5380 with python3.
60968 master_file = os.fdopen(master_fd, 'rb', 0)
60969 - slave_file = os.fdopen(slave_fd, 'wb', 0)
60970 - task_scheduler = TaskScheduler(max_jobs=2)
60971 - producer = SpawnProcess(
60972 - args=["bash", "-c", self._echo_cmd % test_string],
60973 - env=os.environ, fd_pipes={1:slave_fd},
60974 - scheduler=task_scheduler.sched_iface)
60975 - task_scheduler.add(producer)
60976 - slave_file.close()
60977 + scheduler = global_event_loop()
60978
60979 consumer = PipeReader(
60980 input_files={"producer" : master_file},
60981 - scheduler=task_scheduler.sched_iface, _use_array=self._use_array)
60982 + _use_array=self._use_array,
60983 + scheduler=scheduler)
60984 +
60985 + producer = PopenProcess(
60986 + pipe_reader=consumer,
60987 + proc=subprocess.Popen(["bash", "-c", self._echo_cmd % test_string],
60988 + stdout=slave_fd),
60989 + scheduler=scheduler)
60990
60991 - task_scheduler.add(consumer)
60992 + producer.start()
60993 + os.close(slave_fd)
60994 + producer.wait()
60995
60996 - # This will ensure that both tasks have exited, which
60997 - # is necessary to avoid "ResourceWarning: unclosed file"
60998 - # warnings since Python 3.2 (and also ensures that we
60999 - # don't leave any zombie child processes).
61000 - task_scheduler.run()
61001 self.assertEqual(producer.returncode, os.EX_OK)
61002 self.assertEqual(consumer.returncode, os.EX_OK)
61003
61004
61005 diff --git a/pym/portage/tests/repoman/test_echangelog.py b/pym/portage/tests/repoman/test_echangelog.py
61006 index 502aa72..1640be2 100644
61007 --- a/pym/portage/tests/repoman/test_echangelog.py
61008 +++ b/pym/portage/tests/repoman/test_echangelog.py
61009 @@ -1,13 +1,9 @@
61010 # Copyright 2012 Gentoo Foundation
61011 # Distributed under the terms of the GNU General Public License v2
61012
61013 -import datetime
61014 -import subprocess
61015 -import sys
61016 import tempfile
61017 import time
61018
61019 -import portage
61020 from portage import os
61021 from portage import shutil
61022 from portage.tests import TestCase
61023 @@ -35,7 +31,7 @@ class RepomanEchangelogTestCase(TestCase):
61024
61025 self.header_pkg = '# ChangeLog for %s/%s\n' % (self.cat, self.pkg)
61026 self.header_copyright = '# Copyright 1999-%s Gentoo Foundation; Distributed under the GPL v2\n' % \
61027 - datetime.datetime.now().year
61028 + time.strftime('%Y', time.gmtime())
61029 self.header_cvs = '# $Header: $\n'
61030
61031 self.changelog = os.path.join(self.pkgdir, 'ChangeLog')
61032
61033 diff --git a/pym/portage/tests/repoman/test_simple.py b/pym/portage/tests/repoman/test_simple.py
61034 index eab06d9..69eb36d 100644
61035 --- a/pym/portage/tests/repoman/test_simple.py
61036 +++ b/pym/portage/tests/repoman/test_simple.py
61037 @@ -1,4 +1,4 @@
61038 -# Copyright 2011 Gentoo Foundation
61039 +# Copyright 2011-2014 Gentoo Foundation
61040 # Distributed under the terms of the GNU General Public License v2
61041
61042 import subprocess
61043 @@ -76,9 +76,26 @@ class SimpleRepomanTestCase(TestCase):
61044
61045 profiles = (
61046 ("x86", "default/linux/x86/test_profile", "stable"),
61047 + ("x86", "default/linux/x86/test_dev", "dev"),
61048 + ("x86", "default/linux/x86/test_exp", "exp"),
61049 )
61050
61051 + profile = {
61052 + "eapi": ("5",),
61053 + "package.use.stable.mask": ("dev-libs/A flag",)
61054 + }
61055 +
61056 ebuilds = {
61057 + "dev-libs/A-0": {
61058 + "COPYRIGHT_HEADER" : copyright_header,
61059 + "DESCRIPTION" : "Desc goes here",
61060 + "EAPI" : "5",
61061 + "HOMEPAGE" : "http://example.com",
61062 + "IUSE" : "flag",
61063 + "KEYWORDS": "x86",
61064 + "LICENSE": "GPL-2",
61065 + "RDEPEND": "flag? ( dev-libs/B[flag] )",
61066 + },
61067 "dev-libs/A-1": {
61068 "COPYRIGHT_HEADER" : copyright_header,
61069 "DESCRIPTION" : "Desc goes here",
61070 @@ -98,6 +115,17 @@ class SimpleRepomanTestCase(TestCase):
61071 "KEYWORDS": "~x86",
61072 "LICENSE": "GPL-2",
61073 },
61074 + "dev-libs/C-0": {
61075 + "COPYRIGHT_HEADER" : copyright_header,
61076 + "DESCRIPTION" : "Desc goes here",
61077 + "EAPI" : "4",
61078 + "HOMEPAGE" : "http://example.com",
61079 + "IUSE" : "flag",
61080 + # must be unstable, since dev-libs/A[flag] is stable masked
61081 + "KEYWORDS": "~x86",
61082 + "LICENSE": "GPL-2",
61083 + "RDEPEND": "flag? ( dev-libs/A[flag] )",
61084 + },
61085 }
61086 licenses = ["GPL-2"]
61087 arch_list = ["x86"]
61088 @@ -107,7 +135,7 @@ class SimpleRepomanTestCase(TestCase):
61089 "dev-libs/A",
61090 {
61091 "herd" : "base-system",
61092 - "flags" : "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
61093 + "flags" : "<flag name='flag' restrict='&gt;=dev-libs/A-0'>Description of how USE='flag' affects this package</flag>",
61094 },
61095 ),
61096 (
61097 @@ -117,6 +145,13 @@ class SimpleRepomanTestCase(TestCase):
61098 "flags" : "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
61099 },
61100 ),
61101 + (
61102 + "dev-libs/C",
61103 + {
61104 + "herd" : "no-herd",
61105 + "flags" : "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
61106 + },
61107 + ),
61108 )
61109
61110 use_desc = (
61111 @@ -124,18 +159,18 @@ class SimpleRepomanTestCase(TestCase):
61112 )
61113
61114 playground = ResolverPlayground(ebuilds=ebuilds,
61115 - repo_configs=repo_configs, debug=debug)
61116 + profile=profile, repo_configs=repo_configs, debug=debug)
61117 settings = playground.settings
61118 eprefix = settings["EPREFIX"]
61119 eroot = settings["EROOT"]
61120 portdb = playground.trees[playground.eroot]["porttree"].dbapi
61121 homedir = os.path.join(eroot, "home")
61122 distdir = os.path.join(eprefix, "distdir")
61123 - portdir = settings["PORTDIR"]
61124 - profiles_dir = os.path.join(portdir, "profiles")
61125 - license_dir = os.path.join(portdir, "licenses")
61126 + test_repo_location = settings.repositories["test_repo"].location
61127 + profiles_dir = os.path.join(test_repo_location, "profiles")
61128 + license_dir = os.path.join(test_repo_location, "licenses")
61129
61130 - repoman_cmd = (portage._python_interpreter, "-Wd",
61131 + repoman_cmd = (portage._python_interpreter, "-b", "-Wd",
61132 os.path.join(PORTAGE_BIN_PATH, "repoman"))
61133
61134 git_binary = find_binary("git")
61135 @@ -159,6 +194,7 @@ class SimpleRepomanTestCase(TestCase):
61136 ("", git_cmd + ("init-db",)),
61137 ("", git_cmd + ("add", ".")),
61138 ("", git_cmd + ("commit", "-a", "-m", "add whole repo")),
61139 + ("", repoman_cmd + ("full", "-d")),
61140 ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "2.ebuild")),
61141 ("", git_cmd + ("add", test_ebuild[:-8] + "2.ebuild")),
61142 ("", repoman_cmd + ("commit", "-m", "bump to version 2")),
61143 @@ -192,23 +228,35 @@ class SimpleRepomanTestCase(TestCase):
61144 "PATH" : os.environ["PATH"],
61145 "PORTAGE_GRPNAME" : os.environ["PORTAGE_GRPNAME"],
61146 "PORTAGE_USERNAME" : os.environ["PORTAGE_USERNAME"],
61147 - "PORTDIR" : portdir,
61148 + "PORTAGE_REPOSITORIES" : settings.repositories.config_string(),
61149 "PYTHONPATH" : pythonpath,
61150 }
61151
61152 if os.environ.get("SANDBOX_ON") == "1":
61153 # avoid problems from nested sandbox instances
61154 - env["FEATURES"] = "-sandbox"
61155 + env["FEATURES"] = "-sandbox -usersandbox"
61156
61157 dirs = [homedir, license_dir, profiles_dir, distdir]
61158 try:
61159 for d in dirs:
61160 ensure_dirs(d)
61161 - with open(os.path.join(portdir, "skel.ChangeLog"), 'w') as f:
61162 + with open(os.path.join(test_repo_location, "skel.ChangeLog"), 'w') as f:
61163 f.write(copyright_header)
61164 with open(os.path.join(profiles_dir, "profiles.desc"), 'w') as f:
61165 for x in profiles:
61166 f.write("%s %s %s\n" % x)
61167 +
61168 + # ResolverPlayground only created the first profile,
61169 + # so create the remaining ones.
61170 + for x in profiles[1:]:
61171 + sub_profile_dir = os.path.join(profiles_dir, x[1])
61172 + ensure_dirs(sub_profile_dir)
61173 + for config_file, lines in profile.items():
61174 + file_name = os.path.join(sub_profile_dir, config_file)
61175 + with open(file_name, "w") as f:
61176 + for line in lines:
61177 + f.write("%s\n" % line)
61178 +
61179 for x in licenses:
61180 open(os.path.join(license_dir, x), 'wb').close()
61181 with open(os.path.join(profiles_dir, "arch.list"), 'w') as f:
61182 @@ -218,12 +266,12 @@ class SimpleRepomanTestCase(TestCase):
61183 for k, v in use_desc:
61184 f.write("%s - %s\n" % (k, v))
61185 for cp, xml_data in metadata_xml_files:
61186 - with open(os.path.join(portdir, cp, "metadata.xml"), 'w') as f:
61187 + with open(os.path.join(test_repo_location, cp, "metadata.xml"), 'w') as f:
61188 f.write(playground.metadata_xml_template % xml_data)
61189 - # Use a symlink to portdir, in order to trigger bugs
61190 + # Use a symlink to test_repo, in order to trigger bugs
61191 # involving canonical vs. non-canonical paths.
61192 - portdir_symlink = os.path.join(eroot, "portdir_symlink")
61193 - os.symlink(portdir, portdir_symlink)
61194 + test_repo_symlink = os.path.join(eroot, "test_repo_symlink")
61195 + os.symlink(test_repo_location, test_repo_symlink)
61196 # repoman checks metadata.dtd for recent CTIME, so copy the file in
61197 # order to ensure that the CTIME is current
61198 shutil.copyfile(metadata_dtd, os.path.join(distdir, "metadata.dtd"))
61199 @@ -238,9 +286,8 @@ class SimpleRepomanTestCase(TestCase):
61200 stdout = subprocess.PIPE
61201
61202 for cwd in ("", "dev-libs", "dev-libs/A", "dev-libs/B"):
61203 - abs_cwd = os.path.join(portdir_symlink, cwd)
61204 - proc = subprocess.Popen([portage._python_interpreter, "-Wd",
61205 - os.path.join(PORTAGE_BIN_PATH, "repoman"), "full"],
61206 + abs_cwd = os.path.join(test_repo_symlink, cwd)
61207 + proc = subprocess.Popen(repoman_cmd + ("full",),
61208 cwd=abs_cwd, env=env, stdout=stdout)
61209
61210 if debug:
61211 @@ -258,7 +305,7 @@ class SimpleRepomanTestCase(TestCase):
61212
61213 if git_binary is not None:
61214 for cwd, cmd in git_test:
61215 - abs_cwd = os.path.join(portdir_symlink, cwd)
61216 + abs_cwd = os.path.join(test_repo_symlink, cwd)
61217 proc = subprocess.Popen(cmd,
61218 cwd=abs_cwd, env=env, stdout=stdout)
61219
61220
61221 diff --git a/pym/portage/tests/resolver/ResolverPlayground.py b/pym/portage/tests/resolver/ResolverPlayground.py
61222 index 0ac2097..077e271 100644
61223 --- a/pym/portage/tests/resolver/ResolverPlayground.py
61224 +++ b/pym/portage/tests/resolver/ResolverPlayground.py
61225 @@ -1,7 +1,8 @@
61226 -# Copyright 2010-2012 Gentoo Foundation
61227 +# Copyright 2010-2014 Gentoo Foundation
61228 # Distributed under the terms of the GNU General Public License v2
61229
61230 from itertools import permutations
61231 +import fnmatch
61232 import sys
61233 import tempfile
61234 import portage
61235 @@ -25,6 +26,7 @@ from _emerge.depgraph import backtrack_depgraph
61236 from _emerge.RootConfig import RootConfig
61237
61238 if sys.hexversion >= 0x3000000:
61239 + # pylint: disable=W0622
61240 basestring = str
61241
61242 class ResolverPlayground(object):
61243 @@ -34,9 +36,10 @@ class ResolverPlayground(object):
61244 its work.
61245 """
61246
61247 - config_files = frozenset(("package.accept_keywords", "package.use", "package.mask", "package.keywords", \
61248 - "package.unmask", "package.properties", "package.license", "use.mask", "use.force",
61249 - "layout.conf",))
61250 + config_files = frozenset(("eapi", "layout.conf", "make.conf", "package.accept_keywords",
61251 + "package.keywords", "package.license", "package.mask", "package.properties",
61252 + "package.unmask", "package.use", "package.use.aliases", "package.use.stable.mask",
61253 + "unpack_dependencies", "use.aliases", "use.force", "use.mask", "layout.conf"))
61254
61255 metadata_xml_template = """<?xml version="1.0" encoding="UTF-8"?>
61256 <!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
61257 @@ -54,27 +57,32 @@ class ResolverPlayground(object):
61258 """
61259
61260 def __init__(self, ebuilds={}, binpkgs={}, installed={}, profile={}, repo_configs={}, \
61261 - user_config={}, sets={}, world=[], world_sets=[], distfiles={}, debug=False):
61262 + user_config={}, sets={}, world=[], world_sets=[], distfiles={},
61263 + targetroot=False, debug=False):
61264 """
61265 - ebuilds: cpv -> metadata mapping simulating available ebuilds.
61266 + ebuilds: cpv -> metadata mapping simulating available ebuilds.
61267 installed: cpv -> metadata mapping simulating installed packages.
61268 If a metadata key is missing, it gets a default value.
61269 profile: settings defined by the profile.
61270 """
61271 self.debug = debug
61272 self.eprefix = normalize_path(tempfile.mkdtemp())
61273 + portage.const.EPREFIX = self.eprefix.rstrip(os.sep)
61274 +
61275 self.eroot = self.eprefix + os.sep
61276 + if targetroot:
61277 + self.target_root = os.path.join(self.eroot, 'target_root')
61278 + else:
61279 + self.target_root = os.sep
61280 self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
61281 self.pkgdir = os.path.join(self.eprefix, "pkgdir")
61282 - self.portdir = os.path.join(self.eroot, "usr/portage")
61283 self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
61284 - os.makedirs(self.portdir)
61285 os.makedirs(self.vdbdir)
61286
61287 if not debug:
61288 portage.util.noiselimit = -2
61289
61290 - self.repo_dirs = {}
61291 + self._repositories = {}
61292 #Make sure the main repo is always created
61293 self._get_repo_dir("test_repo")
61294
61295 @@ -88,20 +96,19 @@ class ResolverPlayground(object):
61296 self.settings, self.trees = self._load_config()
61297
61298 self._create_ebuild_manifests(ebuilds)
61299 -
61300 +
61301 portage.util.noiselimit = 0
61302
61303 def _get_repo_dir(self, repo):
61304 """
61305 Create the repo directory if needed.
61306 """
61307 - if repo not in self.repo_dirs:
61308 + if repo not in self._repositories:
61309 if repo == "test_repo":
61310 - repo_path = self.portdir
61311 - else:
61312 - repo_path = os.path.join(self.eroot, "usr", "local", repo)
61313 + self._repositories["DEFAULT"] = {"main-repo": repo}
61314
61315 - self.repo_dirs[repo] = repo_path
61316 + repo_path = os.path.join(self.eroot, "var", "repositories", repo)
61317 + self._repositories[repo] = {"location": repo_path}
61318 profile_path = os.path.join(repo_path, "profiles")
61319
61320 try:
61321 @@ -110,11 +117,10 @@ class ResolverPlayground(object):
61322 pass
61323
61324 repo_name_file = os.path.join(profile_path, "repo_name")
61325 - f = open(repo_name_file, "w")
61326 - f.write("%s\n" % repo)
61327 - f.close()
61328 + with open(repo_name_file, "w") as f:
61329 + f.write("%s\n" % repo)
61330
61331 - return self.repo_dirs[repo]
61332 + return self._repositories[repo]["location"]
61333
61334 def _create_distfiles(self, distfiles):
61335 os.makedirs(self.distdir)
61336 @@ -131,24 +137,18 @@ class ResolverPlayground(object):
61337
61338 metadata = ebuilds[cpv].copy()
61339 copyright_header = metadata.pop("COPYRIGHT_HEADER", None)
61340 - desc = metadata.pop("DESCRIPTION", None)
61341 - eapi = metadata.pop("EAPI", 0)
61342 - lic = metadata.pop("LICENSE", "")
61343 - properties = metadata.pop("PROPERTIES", "")
61344 - slot = metadata.pop("SLOT", 0)
61345 - keywords = metadata.pop("KEYWORDS", "x86")
61346 - homepage = metadata.pop("HOMEPAGE", None)
61347 - src_uri = metadata.pop("SRC_URI", None)
61348 - iuse = metadata.pop("IUSE", "")
61349 - provide = metadata.pop("PROVIDE", None)
61350 - depend = metadata.pop("DEPEND", "")
61351 - rdepend = metadata.pop("RDEPEND", None)
61352 - pdepend = metadata.pop("PDEPEND", None)
61353 - required_use = metadata.pop("REQUIRED_USE", None)
61354 + eapi = metadata.pop("EAPI", "0")
61355 misc_content = metadata.pop("MISC_CONTENT", None)
61356 + metadata.setdefault("DEPEND", "")
61357 + metadata.setdefault("SLOT", "0")
61358 + metadata.setdefault("KEYWORDS", "x86")
61359 + metadata.setdefault("IUSE", "")
61360
61361 - if metadata:
61362 - raise ValueError("metadata of ebuild '%s' contains unknown keys: %s" % (cpv, metadata.keys()))
61363 + unknown_keys = set(metadata).difference(
61364 + portage.dbapi.dbapi._known_keys)
61365 + if unknown_keys:
61366 + raise ValueError("metadata of ebuild '%s' contains unknown keys: %s" %
61367 + (cpv, sorted(unknown_keys)))
61368
61369 repo_dir = self._get_repo_dir(repo)
61370 ebuild_dir = os.path.join(repo_dir, a.cp)
61371 @@ -158,33 +158,14 @@ class ResolverPlayground(object):
61372 except os.error:
61373 pass
61374
61375 - f = open(ebuild_path, "w")
61376 - if copyright_header is not None:
61377 - f.write(copyright_header)
61378 - f.write('EAPI="' + str(eapi) + '"\n')
61379 - if desc is not None:
61380 - f.write('DESCRIPTION="%s"\n' % desc)
61381 - if homepage is not None:
61382 - f.write('HOMEPAGE="%s"\n' % homepage)
61383 - if src_uri is not None:
61384 - f.write('SRC_URI="%s"\n' % src_uri)
61385 - f.write('LICENSE="' + str(lic) + '"\n')
61386 - f.write('PROPERTIES="' + str(properties) + '"\n')
61387 - f.write('SLOT="' + str(slot) + '"\n')
61388 - f.write('KEYWORDS="' + str(keywords) + '"\n')
61389 - f.write('IUSE="' + str(iuse) + '"\n')
61390 - if provide is not None:
61391 - f.write('PROVIDE="%s"\n' % provide)
61392 - f.write('DEPEND="' + str(depend) + '"\n')
61393 - if rdepend is not None:
61394 - f.write('RDEPEND="' + str(rdepend) + '"\n')
61395 - if pdepend is not None:
61396 - f.write('PDEPEND="' + str(pdepend) + '"\n')
61397 - if required_use is not None:
61398 - f.write('REQUIRED_USE="' + str(required_use) + '"\n')
61399 - if misc_content is not None:
61400 - f.write(misc_content)
61401 - f.close()
61402 + with open(ebuild_path, "w") as f:
61403 + if copyright_header is not None:
61404 + f.write(copyright_header)
61405 + f.write('EAPI="%s"\n' % eapi)
61406 + for k, v in metadata.items():
61407 + f.write('%s="%s"\n' % (k, v))
61408 + if misc_content is not None:
61409 + f.write(misc_content)
61410
61411 def _create_ebuild_manifests(self, ebuilds):
61412 tmpsettings = config(clone=self.settings)
61413 @@ -241,49 +222,25 @@ class ResolverPlayground(object):
61414 pass
61415
61416 metadata = installed[cpv].copy()
61417 - eapi = metadata.pop("EAPI", 0)
61418 - lic = metadata.pop("LICENSE", "")
61419 - properties = metadata.pop("PROPERTIES", "")
61420 - slot = metadata.pop("SLOT", 0)
61421 - build_time = metadata.pop("BUILD_TIME", "0")
61422 - keywords = metadata.pop("KEYWORDS", "~x86")
61423 - iuse = metadata.pop("IUSE", "")
61424 - use = metadata.pop("USE", "")
61425 - provide = metadata.pop("PROVIDE", None)
61426 - depend = metadata.pop("DEPEND", "")
61427 - rdepend = metadata.pop("RDEPEND", None)
61428 - pdepend = metadata.pop("PDEPEND", None)
61429 - required_use = metadata.pop("REQUIRED_USE", None)
61430 -
61431 - if metadata:
61432 - raise ValueError("metadata of installed '%s' contains unknown keys: %s" % (cpv, metadata.keys()))
61433 -
61434 - def write_key(key, value):
61435 - f = open(os.path.join(vdb_pkg_dir, key), "w")
61436 - f.write(str(value) + "\n")
61437 - f.close()
61438 -
61439 - write_key("EAPI", eapi)
61440 - write_key("BUILD_TIME", build_time)
61441 - write_key("COUNTER", "0")
61442 - write_key("LICENSE", lic)
61443 - write_key("PROPERTIES", properties)
61444 - write_key("SLOT", slot)
61445 - write_key("LICENSE", lic)
61446 - write_key("PROPERTIES", properties)
61447 - write_key("repository", repo)
61448 - write_key("KEYWORDS", keywords)
61449 - write_key("IUSE", iuse)
61450 - write_key("USE", use)
61451 - if provide is not None:
61452 - write_key("PROVIDE", provide)
61453 - write_key("DEPEND", depend)
61454 - if rdepend is not None:
61455 - write_key("RDEPEND", rdepend)
61456 - if pdepend is not None:
61457 - write_key("PDEPEND", pdepend)
61458 - if required_use is not None:
61459 - write_key("REQUIRED_USE", required_use)
61460 + metadata.setdefault("SLOT", "0")
61461 + metadata.setdefault("BUILD_TIME", "0")
61462 + metadata.setdefault("COUNTER", "0")
61463 + metadata.setdefault("KEYWORDS", "~x86")
61464 +
61465 + unknown_keys = set(metadata).difference(
61466 + portage.dbapi.dbapi._known_keys)
61467 + unknown_keys.discard("BUILD_TIME")
61468 + unknown_keys.discard("COUNTER")
61469 + unknown_keys.discard("repository")
61470 + unknown_keys.discard("USE")
61471 + if unknown_keys:
61472 + raise ValueError("metadata of installed '%s' contains unknown keys: %s" %
61473 + (cpv, sorted(unknown_keys)))
61474 +
61475 + metadata["repository"] = repo
61476 + for k, v in metadata.items():
61477 + with open(os.path.join(vdb_pkg_dir, k), "w") as f:
61478 + f.write("%s\n" % v)
61479
61480 def _create_profile(self, ebuilds, installed, profile, repo_configs, user_config, sets):
61481
61482 @@ -294,9 +251,12 @@ class ResolverPlayground(object):
61483 except os.error:
61484 pass
61485
61486 - for repo in self.repo_dirs:
61487 + for repo in self._repositories:
61488 + if repo == "DEFAULT":
61489 + continue
61490 +
61491 repo_dir = self._get_repo_dir(repo)
61492 - profile_dir = os.path.join(self._get_repo_dir(repo), "profiles")
61493 + profile_dir = os.path.join(repo_dir, "profiles")
61494 metadata_dir = os.path.join(repo_dir, "metadata")
61495 os.makedirs(metadata_dir)
61496
61497 @@ -310,60 +270,66 @@ class ResolverPlayground(object):
61498 categories.add(catsplit(cpv)[0])
61499
61500 categories_file = os.path.join(profile_dir, "categories")
61501 - f = open(categories_file, "w")
61502 - for cat in categories:
61503 - f.write(cat + "\n")
61504 - f.close()
61505 -
61506 + with open(categories_file, "w") as f:
61507 + for cat in categories:
61508 + f.write(cat + "\n")
61509 +
61510 #Create $REPO/profiles/license_groups
61511 license_file = os.path.join(profile_dir, "license_groups")
61512 - f = open(license_file, "w")
61513 - f.write("EULA TEST\n")
61514 - f.close()
61515 + with open(license_file, "w") as f:
61516 + f.write("EULA TEST\n")
61517
61518 - repo_config = repo_configs.get(repo)
61519 + repo_config = repo_configs.get(repo)
61520 if repo_config:
61521 for config_file, lines in repo_config.items():
61522 - if config_file not in self.config_files:
61523 + if config_file not in self.config_files and not any(fnmatch.fnmatch(config_file, os.path.join(x, "*")) for x in self.config_files):
61524 raise ValueError("Unknown config file: '%s'" % config_file)
61525
61526 if config_file in ("layout.conf",):
61527 file_name = os.path.join(repo_dir, "metadata", config_file)
61528 else:
61529 file_name = os.path.join(profile_dir, config_file)
61530 - f = open(file_name, "w")
61531 - for line in lines:
61532 - f.write("%s\n" % line)
61533 - f.close()
61534 + if "/" in config_file and not os.path.isdir(os.path.dirname(file_name)):
61535 + os.makedirs(os.path.dirname(file_name))
61536 + with open(file_name, "w") as f:
61537 + for line in lines:
61538 + f.write("%s\n" % line)
61539 + # Temporarily write empty value of masters until it becomes default.
61540 + # TODO: Delete all references to "# use implicit masters" when empty value becomes default.
61541 + if config_file == "layout.conf" and not any(line.startswith(("masters =", "# use implicit masters")) for line in lines):
61542 + f.write("masters =\n")
61543
61544 #Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there)
61545 os.makedirs(os.path.join(repo_dir, "eclass"))
61546
61547 + # Temporarily write empty value of masters until it becomes default.
61548 + if not repo_config or "layout.conf" not in repo_config:
61549 + layout_conf_path = os.path.join(repo_dir, "metadata", "layout.conf")
61550 + with open(layout_conf_path, "w") as f:
61551 + f.write("masters =\n")
61552 +
61553 if repo == "test_repo":
61554 #Create a minimal profile in /usr/portage
61555 sub_profile_dir = os.path.join(profile_dir, "default", "linux", "x86", "test_profile")
61556 os.makedirs(sub_profile_dir)
61557
61558 - eapi_file = os.path.join(sub_profile_dir, "eapi")
61559 - f = open(eapi_file, "w")
61560 - f.write("0\n")
61561 - f.close()
61562 + if not (profile and "eapi" in profile):
61563 + eapi_file = os.path.join(sub_profile_dir, "eapi")
61564 + with open(eapi_file, "w") as f:
61565 + f.write("0\n")
61566
61567 make_defaults_file = os.path.join(sub_profile_dir, "make.defaults")
61568 - f = open(make_defaults_file, "w")
61569 - f.write("ARCH=\"x86\"\n")
61570 - f.write("ACCEPT_KEYWORDS=\"x86\"\n")
61571 - f.close()
61572 + with open(make_defaults_file, "w") as f:
61573 + f.write("ARCH=\"x86\"\n")
61574 + f.write("ACCEPT_KEYWORDS=\"x86\"\n")
61575
61576 use_force_file = os.path.join(sub_profile_dir, "use.force")
61577 - f = open(use_force_file, "w")
61578 - f.write("x86\n")
61579 - f.close()
61580 + with open(use_force_file, "w") as f:
61581 + f.write("x86\n")
61582
61583 parent_file = os.path.join(sub_profile_dir, "parent")
61584 - f = open(parent_file, "w")
61585 - f.write("..\n")
61586 - f.close()
61587 + with open(parent_file, "w") as f:
61588 + f.write("..\n")
61589
61590 if profile:
61591 for config_file, lines in profile.items():
61592 @@ -371,10 +337,9 @@ class ResolverPlayground(object):
61593 raise ValueError("Unknown config file: '%s'" % config_file)
61594
61595 file_name = os.path.join(sub_profile_dir, config_file)
61596 - f = open(file_name, "w")
61597 - for line in lines:
61598 - f.write("%s\n" % line)
61599 - f.close()
61600 + with open(file_name, "w") as f:
61601 + for line in lines:
61602 + f.write("%s\n" % line)
61603
61604 #Create profile symlink
61605 os.symlink(sub_profile_dir, os.path.join(user_config_dir, "make.profile"))
61606 @@ -400,24 +365,50 @@ class ResolverPlayground(object):
61607 with open(os.path.join(metadata_dir, "metadata.xml"), 'w') as f:
61608 f.write(herds_xml)
61609
61610 - # Write empty entries for each repository, in order to exercise
61611 - # RepoConfigLoader's repos.conf processing.
61612 - repos_conf_file = os.path.join(user_config_dir, "repos.conf")
61613 - f = open(repos_conf_file, "w")
61614 - for repo in sorted(self.repo_dirs.keys()):
61615 - f.write("[%s]\n" % repo)
61616 - f.write("\n")
61617 - f.close()
61618 + make_conf = {
61619 + "ACCEPT_KEYWORDS": "x86",
61620 + "CLEAN_DELAY": "0",
61621 + "DISTDIR" : self.distdir,
61622 + "EMERGE_WARNING_DELAY": "0",
61623 + "PKGDIR": self.pkgdir,
61624 + "PORTAGE_INST_GID": str(portage.data.portage_gid),
61625 + "PORTAGE_INST_UID": str(portage.data.portage_uid),
61626 + "PORTAGE_TMPDIR": os.path.join(self.eroot, 'var/tmp'),
61627 + }
61628 +
61629 + if os.environ.get("NOCOLOR"):
61630 + make_conf["NOCOLOR"] = os.environ["NOCOLOR"]
61631 +
61632 + # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
61633 + # need to be inherited by ebuild subprocesses.
61634 + if 'PORTAGE_USERNAME' in os.environ:
61635 + make_conf['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME']
61636 + if 'PORTAGE_GRPNAME' in os.environ:
61637 + make_conf['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME']
61638 +
61639 + make_conf_lines = []
61640 + for k_v in make_conf.items():
61641 + make_conf_lines.append('%s="%s"' % k_v)
61642
61643 - for config_file, lines in user_config.items():
61644 + if "make.conf" in user_config:
61645 + make_conf_lines.extend(user_config["make.conf"])
61646 +
61647 + if not portage.process.sandbox_capable or \
61648 + os.environ.get("SANDBOX_ON") == "1":
61649 + # avoid problems from nested sandbox instances
61650 + make_conf_lines.append('FEATURES="${FEATURES} -sandbox -usersandbox"')
61651 +
61652 + configs = user_config.copy()
61653 + configs["make.conf"] = make_conf_lines
61654 +
61655 + for config_file, lines in configs.items():
61656 if config_file not in self.config_files:
61657 raise ValueError("Unknown config file: '%s'" % config_file)
61658
61659 file_name = os.path.join(user_config_dir, config_file)
61660 - f = open(file_name, "w")
61661 - for line in lines:
61662 - f.write("%s\n" % line)
61663 - f.close()
61664 + with open(file_name, "w") as f:
61665 + for line in lines:
61666 + f.write("%s\n" % line)
61667
61668 #Create /usr/share/portage/config/make.globals
61669 make_globals_path = os.path.join(self.eroot,
61670 @@ -428,7 +419,7 @@ class ResolverPlayground(object):
61671
61672 #Create /usr/share/portage/config/sets/portage.conf
61673 default_sets_conf_dir = os.path.join(self.eroot, "usr/share/portage/config/sets")
61674 -
61675 +
61676 try:
61677 os.makedirs(default_sets_conf_dir)
61678 except os.error:
61679 @@ -447,27 +438,9 @@ class ResolverPlayground(object):
61680
61681 for sets_file, lines in sets.items():
61682 file_name = os.path.join(set_config_dir, sets_file)
61683 - f = open(file_name, "w")
61684 - for line in lines:
61685 - f.write("%s\n" % line)
61686 - f.close()
61687 -
61688 - user_config_dir = os.path.join(self.eroot, "etc", "portage")
61689 -
61690 - try:
61691 - os.makedirs(user_config_dir)
61692 - except os.error:
61693 - pass
61694 -
61695 - for config_file, lines in user_config.items():
61696 - if config_file not in self.config_files:
61697 - raise ValueError("Unknown config file: '%s'" % config_file)
61698 -
61699 - file_name = os.path.join(user_config_dir, config_file)
61700 - f = open(file_name, "w")
61701 - for line in lines:
61702 - f.write("%s\n" % line)
61703 - f.close()
61704 + with open(file_name, "w") as f:
61705 + for line in lines:
61706 + f.write("%s\n" % line)
61707
61708 def _create_world(self, world, world_sets):
61709 #Create /var/lib/portage/world
61710 @@ -477,54 +450,34 @@ class ResolverPlayground(object):
61711 world_file = os.path.join(var_lib_portage, "world")
61712 world_set_file = os.path.join(var_lib_portage, "world_sets")
61713
61714 - f = open(world_file, "w")
61715 - for atom in world:
61716 - f.write("%s\n" % atom)
61717 - f.close()
61718 + with open(world_file, "w") as f:
61719 + for atom in world:
61720 + f.write("%s\n" % atom)
61721
61722 - f = open(world_set_file, "w")
61723 - for atom in world_sets:
61724 - f.write("%s\n" % atom)
61725 - f.close()
61726 + with open(world_set_file, "w") as f:
61727 + for atom in world_sets:
61728 + f.write("%s\n" % atom)
61729
61730 def _load_config(self):
61731 - portdir_overlay = []
61732 - for repo_name in sorted(self.repo_dirs):
61733 - path = self.repo_dirs[repo_name]
61734 - if path != self.portdir:
61735 - portdir_overlay.append(path)
61736 +
61737 + create_trees_kwargs = {}
61738 + if self.target_root != os.sep:
61739 + create_trees_kwargs["target_root"] = self.target_root
61740
61741 env = {
61742 - "ACCEPT_KEYWORDS": "x86",
61743 - "DISTDIR" : self.distdir,
61744 - "PKGDIR": self.pkgdir,
61745 - "PORTDIR": self.portdir,
61746 - "PORTDIR_OVERLAY": " ".join(portdir_overlay),
61747 - 'PORTAGE_TMPDIR' : os.path.join(self.eroot, 'var/tmp'),
61748 + "PORTAGE_REPOSITORIES": "\n".join("[%s]\n%s" % (repo_name, "\n".join("%s = %s" % (k, v) for k, v in repo_config.items())) for repo_name, repo_config in self._repositories.items())
61749 }
61750
61751 - if os.environ.get("NOCOLOR"):
61752 - env["NOCOLOR"] = os.environ["NOCOLOR"]
61753 -
61754 - if os.environ.get("SANDBOX_ON") == "1":
61755 - # avoid problems from nested sandbox instances
61756 - env["FEATURES"] = "-sandbox"
61757 + trees = portage.create_trees(env=env, eprefix=self.eprefix,
61758 + **create_trees_kwargs)
61759
61760 - # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
61761 - # need to be inherited by ebuild subprocesses.
61762 - if 'PORTAGE_USERNAME' in os.environ:
61763 - env['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME']
61764 - if 'PORTAGE_GRPNAME' in os.environ:
61765 - env['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME']
61766 -
61767 - trees = portage.create_trees(env=env, eprefix=self.eprefix)
61768 for root, root_trees in trees.items():
61769 settings = root_trees["vartree"].settings
61770 settings._init_dirs()
61771 setconfig = load_default_config(settings, root_trees)
61772 root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
61773 -
61774 - return settings, trees
61775 +
61776 + return trees[trees._target_eroot]["vartree"].settings, trees
61777
61778 def run(self, atoms, options={}, action=None):
61779 options = options.copy()
61780 @@ -553,7 +506,7 @@ class ResolverPlayground(object):
61781 rval, cleanlist, ordered, req_pkg_count = \
61782 calc_depclean(self.settings, self.trees, None,
61783 options, action, InternalPackageSet(initial_atoms=atoms, allow_wildcard=True), None)
61784 - result = ResolverPlaygroundDepcleanResult( \
61785 + result = ResolverPlaygroundDepcleanResult(
61786 atoms, rval, cleanlist, ordered, req_pkg_count)
61787 else:
61788 params = create_depgraph_params(options, action)
61789 @@ -577,9 +530,9 @@ class ResolverPlayground(object):
61790 return
61791
61792 def cleanup(self):
61793 - portdb = self.trees[self.eroot]["porttree"].dbapi
61794 - portdb.close_caches()
61795 - portage.dbapi.porttree.portdbapi.portdbapi_instances.remove(portdb)
61796 + for eroot in self.trees:
61797 + portdb = self.trees[eroot]["porttree"].dbapi
61798 + portdb.close_caches()
61799 if self.debug:
61800 print("\nEROOT=%s" % self.eroot)
61801 else:
61802 @@ -742,13 +695,14 @@ class ResolverPlaygroundResult(object):
61803
61804 if self.depgraph._dynamic_config._serialized_tasks_cache is not None:
61805 self.mergelist = []
61806 + host_root = self.depgraph._frozen_config._running_root.root
61807 for x in self.depgraph._dynamic_config._serialized_tasks_cache:
61808 if isinstance(x, Blocker):
61809 self.mergelist.append(x.atom)
61810 else:
61811 repo_str = ""
61812 - if x.metadata["repository"] != "test_repo":
61813 - repo_str = _repo_separator + x.metadata["repository"]
61814 + if x.repo != "test_repo":
61815 + repo_str = _repo_separator + x.repo
61816 mergelist_str = x.cpv + repo_str
61817 if x.built:
61818 if x.operation == "merge":
61819 @@ -756,6 +710,8 @@ class ResolverPlaygroundResult(object):
61820 else:
61821 desc = x.operation
61822 mergelist_str = "[%s]%s" % (desc, mergelist_str)
61823 + if x.root != host_root:
61824 + mergelist_str += "{targetroot}"
61825 self.mergelist.append(mergelist_str)
61826
61827 if self.depgraph._dynamic_config._needed_use_config_changes:
61828 @@ -781,7 +737,7 @@ class ResolverPlaygroundResult(object):
61829 self.license_changes[pkg.cpv] = missing_licenses
61830
61831 if self.depgraph._dynamic_config._slot_conflict_handler is not None:
61832 - self.slot_collision_solutions = []
61833 + self.slot_collision_solutions = []
61834 handler = self.depgraph._dynamic_config._slot_conflict_handler
61835
61836 for change in handler.changes:
61837 @@ -793,7 +749,7 @@ class ResolverPlaygroundResult(object):
61838 if self.depgraph._dynamic_config._circular_dependency_handler is not None:
61839 handler = self.depgraph._dynamic_config._circular_dependency_handler
61840 sol = handler.solutions
61841 - self.circular_dependency_solutions = dict( zip([x.cpv for x in sol.keys()], sol.values()) )
61842 + self.circular_dependency_solutions = dict(zip([x.cpv for x in sol.keys()], sol.values()))
61843
61844 class ResolverPlaygroundDepcleanResult(object):
61845
61846
61847 diff --git a/pym/portage/tests/resolver/test_autounmask.py b/pym/portage/tests/resolver/test_autounmask.py
61848 index 6acac99..75fb368 100644
61849 --- a/pym/portage/tests/resolver/test_autounmask.py
61850 +++ b/pym/portage/tests/resolver/test_autounmask.py
61851 @@ -10,9 +10,9 @@ class AutounmaskTestCase(TestCase):
61852
61853 ebuilds = {
61854 #ebuilds to test use changes
61855 - "dev-libs/A-1": { "SLOT": 1, "DEPEND": "dev-libs/B[foo]", "EAPI": 2},
61856 - "dev-libs/A-2": { "SLOT": 2, "DEPEND": "dev-libs/B[bar]", "EAPI": 2},
61857 - "dev-libs/B-1": { "DEPEND": "foo? ( dev-libs/C ) bar? ( dev-libs/D )", "IUSE": "foo bar"},
61858 + "dev-libs/A-1": { "SLOT": 1, "DEPEND": "dev-libs/B[foo]", "EAPI": 2},
61859 + "dev-libs/A-2": { "SLOT": 2, "DEPEND": "dev-libs/B[bar]", "EAPI": 2},
61860 + "dev-libs/B-1": { "DEPEND": "foo? ( dev-libs/C ) bar? ( dev-libs/D )", "IUSE": "foo bar"},
61861 "dev-libs/C-1": {},
61862 "dev-libs/D-1": {},
61863
61864 @@ -56,10 +56,10 @@ class AutounmaskTestCase(TestCase):
61865 "dev-util/R-1": { "IUSE": "bar" },
61866
61867 #ebuilds to test interaction with REQUIRED_USE
61868 - "app-portage/A-1": { "DEPEND": "app-portage/B[foo]", "EAPI": 2 },
61869 - "app-portage/A-2": { "DEPEND": "app-portage/B[foo=]", "IUSE": "+foo", "REQUIRED_USE": "foo", "EAPI": "4" },
61870 + "app-portage/A-1": { "DEPEND": "app-portage/B[foo]", "EAPI": 2 },
61871 + "app-portage/A-2": { "DEPEND": "app-portage/B[foo=]", "IUSE": "+foo", "REQUIRED_USE": "foo", "EAPI": "4" },
61872
61873 - "app-portage/B-1": { "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" },
61874 + "app-portage/B-1": { "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" },
61875 "app-portage/C-1": { "IUSE": "+foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" },
61876 }
61877
61878 @@ -69,183 +69,183 @@ class AutounmaskTestCase(TestCase):
61879
61880 ResolverPlaygroundTestCase(
61881 ["dev-libs/A:1"],
61882 - options = {"--autounmask": "n"},
61883 - success = False),
61884 + options={"--autounmask": "n"},
61885 + success=False),
61886 ResolverPlaygroundTestCase(
61887 ["dev-libs/A:1"],
61888 - options = {"--autounmask": True},
61889 - success = False,
61890 - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"],
61891 - use_changes = { "dev-libs/B-1": {"foo": True} } ),
61892 + options={"--autounmask": True},
61893 + success=False,
61894 + mergelist=["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"],
61895 + use_changes={ "dev-libs/B-1": {"foo": True} }),
61896
61897 #Make sure we restart if needed.
61898 ResolverPlaygroundTestCase(
61899 ["dev-libs/A:1", "dev-libs/B"],
61900 - options = {"--autounmask": True},
61901 - all_permutations = True,
61902 - success = False,
61903 - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"],
61904 - use_changes = { "dev-libs/B-1": {"foo": True} } ),
61905 + options={"--autounmask": True},
61906 + all_permutations=True,
61907 + success=False,
61908 + mergelist=["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"],
61909 + use_changes={ "dev-libs/B-1": {"foo": True} }),
61910 ResolverPlaygroundTestCase(
61911 ["dev-libs/A:1", "dev-libs/A:2", "dev-libs/B"],
61912 - options = {"--autounmask": True},
61913 - all_permutations = True,
61914 - success = False,
61915 - mergelist = ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"],
61916 - ignore_mergelist_order = True,
61917 - use_changes = { "dev-libs/B-1": {"foo": True, "bar": True} } ),
61918 + options={"--autounmask": True},
61919 + all_permutations=True,
61920 + success=False,
61921 + mergelist=["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"],
61922 + ignore_mergelist_order=True,
61923 + use_changes={ "dev-libs/B-1": {"foo": True, "bar": True} }),
61924
61925 #Test keywording.
61926 #The simple case.
61927
61928 ResolverPlaygroundTestCase(
61929 ["app-misc/Z"],
61930 - options = {"--autounmask": "n"},
61931 - success = False),
61932 + options={"--autounmask": "n"},
61933 + success=False),
61934 ResolverPlaygroundTestCase(
61935 ["app-misc/Z"],
61936 - options = {"--autounmask": True},
61937 - success = False,
61938 - mergelist = ["app-misc/Y-1", "app-misc/Z-1"],
61939 - unstable_keywords = ["app-misc/Y-1", "app-misc/Z-1"]),
61940 + options={"--autounmask": True},
61941 + success=False,
61942 + mergelist=["app-misc/Y-1", "app-misc/Z-1"],
61943 + unstable_keywords=["app-misc/Y-1", "app-misc/Z-1"]),
61944
61945 #Make sure that the backtracking for slot conflicts handles our mess.
61946
61947 ResolverPlaygroundTestCase(
61948 ["=app-misc/V-1", "app-misc/W"],
61949 - options = {"--autounmask": True},
61950 - all_permutations = True,
61951 - success = False,
61952 - mergelist = ["app-misc/W-2", "app-misc/V-1"],
61953 - unstable_keywords = ["app-misc/W-2", "app-misc/V-1"]),
61954 + options={"--autounmask": True},
61955 + all_permutations=True,
61956 + success=False,
61957 + mergelist=["app-misc/W-2", "app-misc/V-1"],
61958 + unstable_keywords=["app-misc/W-2", "app-misc/V-1"]),
61959
61960 #Mixed testing
61961 #Make sure we don't change use for something in a || dep if there is another choice
61962 #that needs no change.
61963 -
61964 +
61965 ResolverPlaygroundTestCase(
61966 ["=sci-libs/K-1"],
61967 - options = {"--autounmask": True},
61968 - success = True,
61969 - mergelist = ["sci-libs/P-1", "sci-libs/K-1"]),
61970 + options={"--autounmask": True},
61971 + success=True,
61972 + mergelist=["sci-libs/P-1", "sci-libs/K-1"]),
61973 ResolverPlaygroundTestCase(
61974 ["=sci-libs/K-2"],
61975 - options = {"--autounmask": True},
61976 - success = True,
61977 - mergelist = ["sci-libs/P-1", "sci-libs/K-2"]),
61978 + options={"--autounmask": True},
61979 + success=True,
61980 + mergelist=["sci-libs/P-1", "sci-libs/K-2"]),
61981 ResolverPlaygroundTestCase(
61982 ["=sci-libs/K-3"],
61983 - options = {"--autounmask": True},
61984 - success = True,
61985 - mergelist = ["sci-libs/P-1", "sci-libs/K-3"]),
61986 + options={"--autounmask": True},
61987 + success=True,
61988 + mergelist=["sci-libs/P-1", "sci-libs/K-3"]),
61989 ResolverPlaygroundTestCase(
61990 ["=sci-libs/K-4"],
61991 - options = {"--autounmask": True},
61992 - success = True,
61993 - mergelist = ["sci-libs/P-1", "sci-libs/K-4"]),
61994 + options={"--autounmask": True},
61995 + success=True,
61996 + mergelist=["sci-libs/P-1", "sci-libs/K-4"]),
61997 ResolverPlaygroundTestCase(
61998 ["=sci-libs/K-5"],
61999 - options = {"--autounmask": True},
62000 - success = True,
62001 - mergelist = ["sci-libs/P-1", "sci-libs/K-5"]),
62002 + options={"--autounmask": True},
62003 + success=True,
62004 + mergelist=["sci-libs/P-1", "sci-libs/K-5"]),
62005 ResolverPlaygroundTestCase(
62006 ["=sci-libs/K-6"],
62007 - options = {"--autounmask": True},
62008 - success = True,
62009 - mergelist = ["sci-libs/P-1", "sci-libs/K-6"]),
62010 + options={"--autounmask": True},
62011 + success=True,
62012 + mergelist=["sci-libs/P-1", "sci-libs/K-6"]),
62013
62014 #Make sure we prefer use changes over keyword changes.
62015 ResolverPlaygroundTestCase(
62016 ["=sci-libs/K-7"],
62017 - options = {"--autounmask": True},
62018 - success = False,
62019 - mergelist = ["sci-libs/L-1", "sci-libs/K-7"],
62020 - use_changes = { "sci-libs/L-1": { "bar": True } }),
62021 + options={"--autounmask": True},
62022 + success=False,
62023 + mergelist=["sci-libs/L-1", "sci-libs/K-7"],
62024 + use_changes={ "sci-libs/L-1": { "bar": True } }),
62025 ResolverPlaygroundTestCase(
62026 ["=sci-libs/K-8"],
62027 - options = {"--autounmask": True},
62028 - success = False,
62029 - mergelist = ["sci-libs/L-1", "sci-libs/K-8"],
62030 - use_changes = { "sci-libs/L-1": { "bar": True } }),
62031 + options={"--autounmask": True},
62032 + success=False,
62033 + mergelist=["sci-libs/L-1", "sci-libs/K-8"],
62034 + use_changes={ "sci-libs/L-1": { "bar": True } }),
62035
62036 #Test these nice "required by cat/pkg[foo]" messages.
62037 ResolverPlaygroundTestCase(
62038 ["=dev-util/Q-1"],
62039 - options = {"--autounmask": True},
62040 - success = False,
62041 - mergelist = ["dev-util/R-1", "dev-util/Q-1"],
62042 - use_changes = { "dev-util/R-1": { "bar": True } }),
62043 + options={"--autounmask": True},
62044 + success=False,
62045 + mergelist=["dev-util/R-1", "dev-util/Q-1"],
62046 + use_changes={ "dev-util/R-1": { "bar": True } }),
62047 ResolverPlaygroundTestCase(
62048 ["=dev-util/Q-2"],
62049 - options = {"--autounmask": True},
62050 - success = False,
62051 - mergelist = ["dev-util/R-1", "dev-util/Q-2"],
62052 - use_changes = { "dev-util/R-1": { "bar": True } }),
62053 + options={"--autounmask": True},
62054 + success=False,
62055 + mergelist=["dev-util/R-1", "dev-util/Q-2"],
62056 + use_changes={ "dev-util/R-1": { "bar": True } }),
62057
62058 #Test interaction with REQUIRED_USE.
62059 ResolverPlaygroundTestCase(
62060 ["=app-portage/A-1"],
62061 - options = { "--autounmask": True },
62062 - use_changes = None,
62063 - success = False),
62064 + options={ "--autounmask": True },
62065 + use_changes=None,
62066 + success=False),
62067 ResolverPlaygroundTestCase(
62068 ["=app-portage/A-2"],
62069 - options = { "--autounmask": True },
62070 - use_changes = None,
62071 - success = False),
62072 + options={ "--autounmask": True },
62073 + use_changes=None,
62074 + success=False),
62075 ResolverPlaygroundTestCase(
62076 ["=app-portage/C-1"],
62077 - options = { "--autounmask": True },
62078 - use_changes = None,
62079 - success = False),
62080 + options={ "--autounmask": True },
62081 + use_changes=None,
62082 + success=False),
62083
62084 #Make sure we don't change masked/forced flags.
62085 ResolverPlaygroundTestCase(
62086 ["dev-libs/E:1"],
62087 - options = {"--autounmask": True},
62088 - use_changes = None,
62089 - success = False),
62090 + options={"--autounmask": True},
62091 + use_changes=None,
62092 + success=False),
62093 ResolverPlaygroundTestCase(
62094 ["dev-libs/E:2"],
62095 - options = {"--autounmask": True},
62096 - use_changes = None,
62097 - success = False),
62098 + options={"--autounmask": True},
62099 + use_changes=None,
62100 + success=False),
62101
62102 #Test mask and keyword changes.
62103 ResolverPlaygroundTestCase(
62104 ["app-text/A"],
62105 - options = {"--autounmask": True},
62106 - success = False,
62107 - mergelist = ["app-text/A-1"],
62108 - needed_p_mask_changes = ["app-text/A-1"]),
62109 + options={"--autounmask": True},
62110 + success=False,
62111 + mergelist=["app-text/A-1"],
62112 + needed_p_mask_changes=["app-text/A-1"]),
62113 ResolverPlaygroundTestCase(
62114 ["app-text/B"],
62115 - options = {"--autounmask": True},
62116 - success = False,
62117 - mergelist = ["app-text/B-1"],
62118 - unstable_keywords = ["app-text/B-1"],
62119 - needed_p_mask_changes = ["app-text/B-1"]),
62120 + options={"--autounmask": True},
62121 + success=False,
62122 + mergelist=["app-text/B-1"],
62123 + unstable_keywords=["app-text/B-1"],
62124 + needed_p_mask_changes=["app-text/B-1"]),
62125 ResolverPlaygroundTestCase(
62126 ["app-text/C"],
62127 - options = {"--autounmask": True},
62128 - success = False,
62129 - mergelist = ["app-text/C-1"],
62130 - unstable_keywords = ["app-text/C-1"],
62131 - needed_p_mask_changes = ["app-text/C-1"]),
62132 + options={"--autounmask": True},
62133 + success=False,
62134 + mergelist=["app-text/C-1"],
62135 + unstable_keywords=["app-text/C-1"],
62136 + needed_p_mask_changes=["app-text/C-1"]),
62137 #Make sure unstable keyword is preferred over missing keyword
62138 ResolverPlaygroundTestCase(
62139 ["app-text/D"],
62140 - options = {"--autounmask": True},
62141 - success = False,
62142 - mergelist = ["app-text/D-1"],
62143 - unstable_keywords = ["app-text/D-1"]),
62144 + options={"--autounmask": True},
62145 + success=False,
62146 + mergelist=["app-text/D-1"],
62147 + unstable_keywords=["app-text/D-1"]),
62148 #Test missing keyword
62149 ResolverPlaygroundTestCase(
62150 ["=app-text/D-2"],
62151 - options = {"--autounmask": True},
62152 - success = False,
62153 - mergelist = ["app-text/D-2"],
62154 - unstable_keywords = ["app-text/D-2"])
62155 + options={"--autounmask": True},
62156 + success=False,
62157 + mergelist=["app-text/D-2"],
62158 + unstable_keywords=["app-text/D-2"])
62159 )
62160
62161 profile = {
62162 @@ -279,7 +279,7 @@ class AutounmaskTestCase(TestCase):
62163 "dev-libs/A-1": { "LICENSE": "TEST" },
62164 "dev-libs/B-1": { "LICENSE": "TEST", "IUSE": "foo", "KEYWORDS": "~x86"},
62165 "dev-libs/C-1": { "DEPEND": "dev-libs/B[foo]", "EAPI": 2 },
62166 -
62167 +
62168 "dev-libs/D-1": { "DEPEND": "dev-libs/E dev-libs/F", "LICENSE": "TEST" },
62169 "dev-libs/E-1": { "LICENSE": "TEST" },
62170 "dev-libs/E-2": { "LICENSE": "TEST" },
62171 @@ -292,40 +292,40 @@ class AutounmaskTestCase(TestCase):
62172 test_cases = (
62173 ResolverPlaygroundTestCase(
62174 ["=dev-libs/A-1"],
62175 - options = {"--autounmask": 'n'},
62176 - success = False),
62177 + options={"--autounmask": 'n'},
62178 + success=False),
62179 ResolverPlaygroundTestCase(
62180 ["=dev-libs/A-1"],
62181 - options = {"--autounmask": True},
62182 - success = False,
62183 - mergelist = ["dev-libs/A-1"],
62184 - license_changes = { "dev-libs/A-1": set(["TEST"]) }),
62185 + options={"--autounmask": True},
62186 + success=False,
62187 + mergelist=["dev-libs/A-1"],
62188 + license_changes={ "dev-libs/A-1": set(["TEST"]) }),
62189
62190 #Test license+keyword+use change at once.
62191 ResolverPlaygroundTestCase(
62192 ["=dev-libs/C-1"],
62193 - options = {"--autounmask": True},
62194 - success = False,
62195 - mergelist = ["dev-libs/B-1", "dev-libs/C-1"],
62196 - license_changes = { "dev-libs/B-1": set(["TEST"]) },
62197 - unstable_keywords = ["dev-libs/B-1"],
62198 - use_changes = { "dev-libs/B-1": { "foo": True } }),
62199 + options={"--autounmask": True},
62200 + success=False,
62201 + mergelist=["dev-libs/B-1", "dev-libs/C-1"],
62202 + license_changes={ "dev-libs/B-1": set(["TEST"]) },
62203 + unstable_keywords=["dev-libs/B-1"],
62204 + use_changes={ "dev-libs/B-1": { "foo": True } }),
62205
62206 #Test license with backtracking.
62207 ResolverPlaygroundTestCase(
62208 ["=dev-libs/D-1"],
62209 - options = {"--autounmask": True},
62210 - success = False,
62211 - mergelist = ["dev-libs/E-1", "dev-libs/F-1", "dev-libs/D-1"],
62212 - license_changes = { "dev-libs/D-1": set(["TEST"]), "dev-libs/E-1": set(["TEST"]), "dev-libs/E-2": set(["TEST"]), "dev-libs/F-1": set(["TEST"]) }),
62213 + options={"--autounmask": True},
62214 + success=False,
62215 + mergelist=["dev-libs/E-1", "dev-libs/F-1", "dev-libs/D-1"],
62216 + license_changes={ "dev-libs/D-1": set(["TEST"]), "dev-libs/E-1": set(["TEST"]), "dev-libs/E-2": set(["TEST"]), "dev-libs/F-1": set(["TEST"]) }),
62217
62218 #Test license only for bug #420847
62219 ResolverPlaygroundTestCase(
62220 ["dev-java/sun-jdk"],
62221 - options = {"--autounmask": True},
62222 - success = False,
62223 - mergelist = ["dev-java/sun-jdk-1.6.0.31"],
62224 - license_changes = { "dev-java/sun-jdk-1.6.0.31": set(["TEST"]) }),
62225 + options={"--autounmask": True},
62226 + success=False,
62227 + mergelist=["dev-java/sun-jdk-1.6.0.31"],
62228 + license_changes={ "dev-java/sun-jdk-1.6.0.31": set(["TEST"]) }),
62229 )
62230
62231 playground = ResolverPlayground(ebuilds=ebuilds)
62232 @@ -348,7 +348,7 @@ class AutounmaskTestCase(TestCase):
62233 "dev-libs/D-1": { "DEPEND": "dev-libs/A" },
62234 }
62235
62236 - world_sets = [ "@test-set" ]
62237 + world_sets = ["@test-set"]
62238 sets = {
62239 "test-set": (
62240 "dev-libs/A", "dev-libs/B", "dev-libs/C", "dev-libs/D",
62241 @@ -362,29 +362,29 @@ class AutounmaskTestCase(TestCase):
62242 ResolverPlaygroundTestCase(
62243 ["dev-libs/B", "dev-libs/C", "dev-libs/D"],
62244 all_permutations=True,
62245 - options = {"--autounmask": "y"},
62246 + options={"--autounmask": "y"},
62247 mergelist=["dev-libs/A-2", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
62248 ignore_mergelist_order=True,
62249 - unstable_keywords = ["dev-libs/A-2"],
62250 - success = False),
62251 + unstable_keywords=["dev-libs/A-2"],
62252 + success=False),
62253
62254 ResolverPlaygroundTestCase(
62255 ["@test-set"],
62256 all_permutations=True,
62257 - options = {"--autounmask": "y"},
62258 + options={"--autounmask": "y"},
62259 mergelist=["dev-libs/A-2", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
62260 ignore_mergelist_order=True,
62261 - unstable_keywords = ["dev-libs/A-2"],
62262 - success = False),
62263 + unstable_keywords=["dev-libs/A-2"],
62264 + success=False),
62265
62266 ResolverPlaygroundTestCase(
62267 ["@world"],
62268 all_permutations=True,
62269 - options = {"--autounmask": "y"},
62270 + options={"--autounmask": "y"},
62271 mergelist=["dev-libs/A-2", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
62272 ignore_mergelist_order=True,
62273 - unstable_keywords = ["dev-libs/A-2"],
62274 - success = False),
62275 + unstable_keywords=["dev-libs/A-2"],
62276 + success=False),
62277 )
62278
62279
62280 @@ -411,16 +411,16 @@ class AutounmaskTestCase(TestCase):
62281 #Test mask and keyword changes.
62282 ResolverPlaygroundTestCase(
62283 ["app-text/A"],
62284 - options = {"--autounmask": True,
62285 - "--autounmask-keep-masks": "y"},
62286 - success = False),
62287 + options={"--autounmask": True,
62288 + "--autounmask-keep-masks": "y"},
62289 + success=False),
62290 ResolverPlaygroundTestCase(
62291 ["app-text/A"],
62292 - options = {"--autounmask": True,
62293 - "--autounmask-keep-masks": "n"},
62294 - success = False,
62295 - mergelist = ["app-text/A-1"],
62296 - needed_p_mask_changes = ["app-text/A-1"]),
62297 + options={"--autounmask": True,
62298 + "--autounmask-keep-masks": "n"},
62299 + success=False,
62300 + mergelist=["app-text/A-1"],
62301 + needed_p_mask_changes=["app-text/A-1"]),
62302 )
62303
62304 profile = {
62305 @@ -460,16 +460,16 @@ class AutounmaskTestCase(TestCase):
62306 test_cases = (
62307 ResolverPlaygroundTestCase(
62308 ["dev-libs/B"],
62309 - success = False,
62310 - mergelist = ["dev-libs/A-2", "dev-libs/B-1"],
62311 - needed_p_mask_changes = set(["dev-libs/A-2"])),
62312 + success=False,
62313 + mergelist=["dev-libs/A-2", "dev-libs/B-1"],
62314 + needed_p_mask_changes=set(["dev-libs/A-2"])),
62315
62316 ResolverPlaygroundTestCase(
62317 ["dev-libs/C"],
62318 - success = False,
62319 - mergelist = ["dev-libs/A-9999", "dev-libs/C-1"],
62320 - unstable_keywords = set(["dev-libs/A-9999"]),
62321 - needed_p_mask_changes = set(["dev-libs/A-9999"])),
62322 + success=False,
62323 + mergelist=["dev-libs/A-9999", "dev-libs/C-1"],
62324 + unstable_keywords=set(["dev-libs/A-9999"]),
62325 + needed_p_mask_changes=set(["dev-libs/A-9999"])),
62326 )
62327
62328 playground = ResolverPlayground(ebuilds=ebuilds, profile=profile)
62329
62330 diff --git a/pym/portage/tests/resolver/test_autounmask_multilib_use.py b/pym/portage/tests/resolver/test_autounmask_multilib_use.py
62331 new file mode 100644
62332 index 0000000..e160c77
62333 --- /dev/null
62334 +++ b/pym/portage/tests/resolver/test_autounmask_multilib_use.py
62335 @@ -0,0 +1,85 @@
62336 +# Copyright 2013 Gentoo Foundation
62337 +# Distributed under the terms of the GNU General Public License v2
62338 +
62339 +from portage.tests import TestCase
62340 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
62341 + ResolverPlaygroundTestCase)
62342 +
62343 +class AutounmaskMultilibUseTestCase(TestCase):
62344 +
62345 + def testAutounmaskMultilibUse(self):
62346 +
62347 + self.todo = True
62348 +
62349 + ebuilds = {
62350 + "x11-proto/xextproto-7.2.1-r1": {
62351 + "EAPI": "5",
62352 + "IUSE": "abi_x86_32 abi_x86_64",
62353 + },
62354 + "x11-libs/libXaw-1.0.11-r2": {
62355 + "EAPI": "5",
62356 + "IUSE": "abi_x86_32 abi_x86_64",
62357 + "RDEPEND": "x11-proto/xextproto[abi_x86_32(-)?,abi_x86_64(-)?]"
62358 + },
62359 + "app-emulation/emul-linux-x86-xlibs-20130224-r2": {
62360 + "EAPI": "5",
62361 + "RDEPEND": "x11-libs/libXaw[abi_x86_32]"
62362 + },
62363 + "games-util/steam-client-meta-0-r20130514": {
62364 + "EAPI": "5",
62365 + "RDEPEND": "app-emulation/emul-linux-x86-xlibs"
62366 + }
62367 + }
62368 +
62369 + installed = {
62370 + "x11-proto/xextproto-7.2.1-r1": {
62371 + "EAPI": "5",
62372 + "IUSE": "abi_x86_32 abi_x86_64",
62373 + "USE": "abi_x86_32 abi_x86_64"
62374 + },
62375 + "x11-libs/libXaw-1.0.11-r2": {
62376 + "EAPI": "5",
62377 + "IUSE": "abi_x86_32 abi_x86_64",
62378 + "RDEPEND": "x11-proto/xextproto[abi_x86_32(-)?,abi_x86_64(-)?]",
62379 + "USE": "abi_x86_32 abi_x86_64"
62380 + },
62381 + "app-emulation/emul-linux-x86-xlibs-20130224-r2": {
62382 + "EAPI": "5",
62383 + "RDEPEND": "x11-libs/libXaw[abi_x86_32]"
62384 + },
62385 + "games-util/steam-client-meta-0-r20130514": {
62386 + "EAPI": "5",
62387 + "RDEPEND": "app-emulation/emul-linux-x86-xlibs"
62388 + }
62389 + }
62390 +
62391 + user_config = {
62392 + #"make.conf" : ("USE=\"abi_x86_32 abi_x86_64\"",)
62393 + "make.conf" : ("USE=\"abi_x86_64\"",)
62394 + }
62395 +
62396 + world = ("games-util/steam-client-meta",)
62397 +
62398 + test_cases = (
62399 +
62400 + # Test autounmask solving of multilib use deps for bug #481628.
62401 + # We would like it to suggest some USE changes, but instead it
62402 + # currently fails with a SLOT conflict.
62403 +
62404 + ResolverPlaygroundTestCase(
62405 + ["x11-proto/xextproto", "x11-libs/libXaw"],
62406 + options = {"--oneshot": True, "--autounmask": True,
62407 + "--backtrack": 30},
62408 + mergelist = ["x11-proto/xextproto-7.2.1-r1", "x11-libs/libXaw-1.0.11-r2"],
62409 + success = True),
62410 + )
62411 +
62412 + playground = ResolverPlayground(ebuilds=ebuilds, installed=installed,
62413 + user_config=user_config, world=world, debug=False)
62414 +
62415 + try:
62416 + for test_case in test_cases:
62417 + playground.run_TestCase(test_case)
62418 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62419 + finally:
62420 + playground.cleanup()
62421
62422 diff --git a/pym/portage/tests/resolver/test_backtracking.py b/pym/portage/tests/resolver/test_backtracking.py
62423 index 600f682..3b69eda 100644
62424 --- a/pym/portage/tests/resolver/test_backtracking.py
62425 +++ b/pym/portage/tests/resolver/test_backtracking.py
62426 @@ -1,4 +1,4 @@
62427 -# Copyright 2010 Gentoo Foundation
62428 +# Copyright 2010-2014 Gentoo Foundation
62429 # Distributed under the terms of the GNU General Public License v2
62430
62431 from portage.tests import TestCase
62432 @@ -31,7 +31,7 @@ class BacktrackingTestCase(TestCase):
62433 playground.cleanup()
62434
62435
62436 - def testHittingTheBacktrackLimit(self):
62437 + def testBacktrackNotNeeded(self):
62438 ebuilds = {
62439 "dev-libs/A-1": {},
62440 "dev-libs/A-2": {},
62441 @@ -45,47 +45,9 @@ class BacktrackingTestCase(TestCase):
62442 ResolverPlaygroundTestCase(
62443 ["dev-libs/C", "dev-libs/D"],
62444 all_permutations = True,
62445 - mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
62446 - ignore_mergelist_order = True,
62447 - success = True),
62448 - #This one hits the backtrack limit. Be aware that this depends on the argument order.
62449 - ResolverPlaygroundTestCase(
62450 - ["dev-libs/D", "dev-libs/C"],
62451 options = { "--backtrack": 1 },
62452 - mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/A-2", "dev-libs/B-2", "dev-libs/C-1", "dev-libs/D-1"],
62453 + mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"],
62454 ignore_mergelist_order = True,
62455 - slot_collision_solutions = [],
62456 - success = False),
62457 - )
62458 -
62459 - playground = ResolverPlayground(ebuilds=ebuilds)
62460 -
62461 - try:
62462 - for test_case in test_cases:
62463 - playground.run_TestCase(test_case)
62464 - self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62465 - finally:
62466 - playground.cleanup()
62467 -
62468 -
62469 - def testBacktrackingGoodVersionFirst(self):
62470 - """
62471 - When backtracking due to slot conflicts, we masked the version that has been pulled
62472 - in first. This is not always a good idea. Mask the highest version instead.
62473 - """
62474 -
62475 - ebuilds = {
62476 - "dev-libs/A-1": { "DEPEND": "=dev-libs/C-1 dev-libs/B" },
62477 - "dev-libs/B-1": { "DEPEND": "=dev-libs/C-1" },
62478 - "dev-libs/B-2": { "DEPEND": "=dev-libs/C-2" },
62479 - "dev-libs/C-1": { },
62480 - "dev-libs/C-2": { },
62481 - }
62482 -
62483 - test_cases = (
62484 - ResolverPlaygroundTestCase(
62485 - ["dev-libs/A"],
62486 - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", ],
62487 success = True),
62488 )
62489
62490 @@ -118,7 +80,7 @@ class BacktrackingTestCase(TestCase):
62491 ResolverPlaygroundTestCase(
62492 ["dev-libs/B", "dev-libs/A"],
62493 all_permutations = True,
62494 - mergelist = ["dev-libs/Z-2", "dev-libs/B-1", "dev-libs/A-1", ],
62495 + mergelist = ["dev-libs/Z-2", "dev-libs/B-1", "dev-libs/A-1",],
62496 ignore_mergelist_order = True,
62497 success = True),
62498 )
62499 @@ -190,7 +152,7 @@ class BacktrackingTestCase(TestCase):
62500 "dev-libs/D-1": { "RDEPEND": "<dev-libs/A-2" },
62501 }
62502
62503 - world = [ "dev-libs/B", "dev-libs/C" ]
62504 + world = ["dev-libs/B", "dev-libs/C"]
62505
62506 options = {'--update' : True, '--deep' : True, '--selective' : True}
62507
62508
62509 diff --git a/pym/portage/tests/resolver/test_blocker.py b/pym/portage/tests/resolver/test_blocker.py
62510 new file mode 100644
62511 index 0000000..94a88b8
62512 --- /dev/null
62513 +++ b/pym/portage/tests/resolver/test_blocker.py
62514 @@ -0,0 +1,48 @@
62515 +# Copyright 2014 Gentoo Foundation
62516 +# Distributed under the terms of the GNU General Public License v2
62517 +
62518 +from portage.tests import TestCase
62519 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
62520 +
62521 +class SlotConflictWithBlockerTestCase(TestCase):
62522 +
62523 + def testBlocker(self):
62524 + ebuilds = {
62525 + "dev-libs/A-1": { "DEPEND": "dev-libs/X" },
62526 + "dev-libs/B-1": { "DEPEND": "<dev-libs/X-2" },
62527 + "dev-libs/C-1": { "DEPEND": "<dev-libs/X-3" },
62528 +
62529 + "dev-libs/X-1": { "EAPI": "2", "RDEPEND": "!=dev-libs/Y-1" },
62530 + "dev-libs/X-2": { "EAPI": "2", "RDEPEND": "!=dev-libs/Y-2" },
62531 + "dev-libs/X-3": { "EAPI": "2", "RDEPEND": "!=dev-libs/Y-3" },
62532 +
62533 + "dev-libs/Y-1": { "SLOT": "1" },
62534 + "dev-libs/Y-2": { "SLOT": "2" },
62535 + "dev-libs/Y-3": { "SLOT": "3" },
62536 + }
62537 +
62538 + installed = {
62539 + "dev-libs/Y-1": { "SLOT": "1" },
62540 + "dev-libs/Y-2": { "SLOT": "2" },
62541 + "dev-libs/Y-3": { "SLOT": "3" },
62542 + }
62543 +
62544 + test_cases = (
62545 + ResolverPlaygroundTestCase(
62546 + ["dev-libs/A", "dev-libs/B", "dev-libs/C"],
62547 + options = { "--backtrack": 0 },
62548 + all_permutations = True,
62549 + success = True,
62550 + ambiguous_merge_order = True,
62551 + mergelist = ["dev-libs/X-1", "[uninstall]dev-libs/Y-1", "!=dev-libs/Y-1", \
62552 + ("dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1")]),
62553 + )
62554 +
62555 + playground = ResolverPlayground(ebuilds=ebuilds,
62556 + installed=installed, debug=False)
62557 + try:
62558 + for test_case in test_cases:
62559 + playground.run_TestCase(test_case)
62560 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62561 + finally:
62562 + playground.cleanup()
62563
62564 diff --git a/pym/portage/tests/resolver/test_complete_graph.py b/pym/portage/tests/resolver/test_complete_graph.py
62565 index 1b0342c..95b1f88 100644
62566 --- a/pym/portage/tests/resolver/test_complete_graph.py
62567 +++ b/pym/portage/tests/resolver/test_complete_graph.py
62568 @@ -93,7 +93,7 @@ class CompleteGraphTestCase(TestCase):
62569 test_cases = (
62570 ResolverPlaygroundTestCase(
62571 [">=sys-libs/x-2"],
62572 - options = {"--complete-graph-if-new-ver" : "n", "--rebuild-if-new-slot-abi": "n"},
62573 + options = {"--complete-graph-if-new-ver" : "n", "--rebuild-if-new-slot": "n"},
62574 mergelist = ["sys-libs/x-2"],
62575 success = True,
62576 ),
62577 @@ -106,7 +106,7 @@ class CompleteGraphTestCase(TestCase):
62578 ),
62579 ResolverPlaygroundTestCase(
62580 ["<sys-libs/x-1"],
62581 - options = {"--complete-graph-if-new-ver" : "n", "--rebuild-if-new-slot-abi": "n"},
62582 + options = {"--complete-graph-if-new-ver" : "n", "--rebuild-if-new-slot": "n"},
62583 mergelist = ["sys-libs/x-0.1"],
62584 success = True,
62585 ),
62586
62587 diff --git a/pym/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py b/pym/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
62588 new file mode 100644
62589 index 0000000..fddbead
62590 --- /dev/null
62591 +++ b/pym/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
62592 @@ -0,0 +1,74 @@
62593 +# Copyright 2013 Gentoo Foundation
62594 +# Distributed under the terms of the GNU General Public License v2
62595 +
62596 +from portage.tests import TestCase
62597 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
62598 + ResolverPlaygroundTestCase)
62599 +
62600 +class CompeteIfNewSubSlotWithoutRevBumpTestCase(TestCase):
62601 +
62602 + def testCompeteIfNewSubSlotWithoutRevBump(self):
62603 +
62604 + ebuilds = {
62605 + "media-libs/libpng-1.5.14" : {
62606 + "EAPI": "5",
62607 + "SLOT": "0"
62608 + },
62609 +
62610 + "x11-libs/gdk-pixbuf-2.26.5" : {
62611 + "EAPI": "5",
62612 + "DEPEND": ">=media-libs/libpng-1.4:=",
62613 + "RDEPEND": ">=media-libs/libpng-1.4:="
62614 + },
62615 + }
62616 +
62617 + binpkgs = {
62618 + "x11-libs/gdk-pixbuf-2.26.5" : {
62619 + "EAPI": "5",
62620 + "DEPEND": ">=media-libs/libpng-1.4:0/15=",
62621 + "RDEPEND": ">=media-libs/libpng-1.4:0/15="
62622 + },
62623 + }
62624 +
62625 + installed = {
62626 + "media-libs/libpng-1.5.14" : {
62627 + "EAPI": "5",
62628 + "SLOT": "0/15"
62629 + },
62630 +
62631 + "x11-libs/gdk-pixbuf-2.26.5" : {
62632 + "EAPI": "5",
62633 + "DEPEND": ">=media-libs/libpng-1.4:0/15=",
62634 + "RDEPEND": ">=media-libs/libpng-1.4:0/15="
62635 + },
62636 + }
62637 +
62638 + world = ["x11-libs/gdk-pixbuf"]
62639 +
62640 + test_cases = (
62641 + # Test that --complete-graph-if-new-ver=y triggers rebuild
62642 + # when the sub-slot changes without a revbump.
62643 + ResolverPlaygroundTestCase(
62644 + ["media-libs/libpng"],
62645 + options = {
62646 + "--oneshot": True,
62647 + "--complete-graph-if-new-ver": "y",
62648 + "--rebuild-if-new-slot": "n",
62649 + "--usepkg": True
62650 + },
62651 + success = True,
62652 + mergelist = [
62653 + "media-libs/libpng-1.5.14",
62654 + "x11-libs/gdk-pixbuf-2.26.5"
62655 + ]
62656 + ),
62657 + )
62658 +
62659 + playground = ResolverPlayground(ebuilds=ebuilds, binpkgs=binpkgs,
62660 + installed=installed, world=world, debug=False)
62661 + try:
62662 + for test_case in test_cases:
62663 + playground.run_TestCase(test_case)
62664 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62665 + finally:
62666 + playground.cleanup()
62667
62668 diff --git a/pym/portage/tests/resolver/test_depclean.py b/pym/portage/tests/resolver/test_depclean.py
62669 index ba70144..42350be 100644
62670 --- a/pym/portage/tests/resolver/test_depclean.py
62671 +++ b/pym/portage/tests/resolver/test_depclean.py
62672 @@ -23,9 +23,9 @@ class SimpleDepcleanTestCase(TestCase):
62673 test_cases = (
62674 ResolverPlaygroundTestCase(
62675 [],
62676 - options = {"--depclean": True},
62677 - success = True,
62678 - cleanlist = ["dev-libs/B-1"]),
62679 + options={"--depclean": True},
62680 + success=True,
62681 + cleanlist=["dev-libs/B-1"]),
62682 )
62683
62684 playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world)
62685 @@ -63,9 +63,9 @@ class DepcleanWithDepsTestCase(TestCase):
62686 test_cases = (
62687 ResolverPlaygroundTestCase(
62688 [],
62689 - options = {"--depclean": True},
62690 - success = True,
62691 - cleanlist = ["dev-libs/B-1", "dev-libs/D-1",
62692 + options={"--depclean": True},
62693 + success=True,
62694 + cleanlist=["dev-libs/B-1", "dev-libs/D-1",
62695 "dev-libs/E-1", "dev-libs/F-1"]),
62696 )
62697
62698 @@ -104,10 +104,10 @@ class DepcleanWithInstalledMaskedTestCase(TestCase):
62699 test_cases = (
62700 ResolverPlaygroundTestCase(
62701 [],
62702 - options = {"--depclean": True},
62703 - success = True,
62704 - #cleanlist = ["dev-libs/C-1"]),
62705 - cleanlist = ["dev-libs/B-1"]),
62706 + options={"--depclean": True},
62707 + success=True,
62708 + #cleanlist=["dev-libs/C-1"]),
62709 + cleanlist=["dev-libs/B-1"]),
62710 )
62711
62712 playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world)
62713 @@ -143,9 +143,9 @@ class DepcleanInstalledKeywordMaskedSlotTestCase(TestCase):
62714 test_cases = (
62715 ResolverPlaygroundTestCase(
62716 [],
62717 - options = {"--depclean": True},
62718 - success = True,
62719 - cleanlist = ["dev-libs/B-2.7"]),
62720 + options={"--depclean": True},
62721 + success=True,
62722 + cleanlist=["dev-libs/B-2.7"]),
62723 )
62724
62725 playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world)
62726 @@ -169,31 +169,31 @@ class DepcleanWithExcludeTestCase(TestCase):
62727 #Without --exclude.
62728 ResolverPlaygroundTestCase(
62729 [],
62730 - options = {"--depclean": True},
62731 - success = True,
62732 - cleanlist = ["dev-libs/B-1", "dev-libs/A-1"]),
62733 + options={"--depclean": True},
62734 + success=True,
62735 + cleanlist=["dev-libs/B-1", "dev-libs/A-1"]),
62736 ResolverPlaygroundTestCase(
62737 ["dev-libs/A"],
62738 - options = {"--depclean": True},
62739 - success = True,
62740 - cleanlist = []),
62741 + options={"--depclean": True},
62742 + success=True,
62743 + cleanlist=[]),
62744 ResolverPlaygroundTestCase(
62745 ["dev-libs/B"],
62746 - options = {"--depclean": True},
62747 - success = True,
62748 - cleanlist = ["dev-libs/B-1"]),
62749 + options={"--depclean": True},
62750 + success=True,
62751 + cleanlist=["dev-libs/B-1"]),
62752
62753 #With --exclude
62754 ResolverPlaygroundTestCase(
62755 [],
62756 - options = {"--depclean": True, "--exclude": ["dev-libs/A"]},
62757 - success = True,
62758 - cleanlist = ["dev-libs/B-1"]),
62759 + options={"--depclean": True, "--exclude": ["dev-libs/A"]},
62760 + success=True,
62761 + cleanlist=["dev-libs/B-1"]),
62762 ResolverPlaygroundTestCase(
62763 ["dev-libs/B"],
62764 - options = {"--depclean": True, "--exclude": ["dev-libs/B"]},
62765 - success = True,
62766 - cleanlist = []),
62767 + options={"--depclean": True, "--exclude": ["dev-libs/B"]},
62768 + success=True,
62769 + cleanlist=[]),
62770 )
62771
62772 playground = ResolverPlayground(installed=installed)
62773 @@ -215,25 +215,25 @@ class DepcleanWithExcludeAndSlotsTestCase(TestCase):
62774 "dev-libs/Y-2": { "RDEPEND": "=dev-libs/Z-2", "SLOT": 2 },
62775 }
62776
62777 - world = [ "dev-libs/Y" ]
62778 + world=["dev-libs/Y"]
62779
62780 test_cases = (
62781 #Without --exclude.
62782 ResolverPlaygroundTestCase(
62783 [],
62784 - options = {"--depclean": True},
62785 - success = True,
62786 - cleanlist = ["dev-libs/Y-1", "dev-libs/Z-1"]),
62787 + options={"--depclean": True},
62788 + success=True,
62789 + cleanlist=["dev-libs/Y-1", "dev-libs/Z-1"]),
62790 ResolverPlaygroundTestCase(
62791 [],
62792 - options = {"--depclean": True, "--exclude": ["dev-libs/Z"]},
62793 - success = True,
62794 - cleanlist = ["dev-libs/Y-1"]),
62795 + options={"--depclean": True, "--exclude": ["dev-libs/Z"]},
62796 + success=True,
62797 + cleanlist=["dev-libs/Y-1"]),
62798 ResolverPlaygroundTestCase(
62799 [],
62800 - options = {"--depclean": True, "--exclude": ["dev-libs/Y"]},
62801 - success = True,
62802 - cleanlist = []),
62803 + options={"--depclean": True, "--exclude": ["dev-libs/Y"]},
62804 + success=True,
62805 + cleanlist=[]),
62806 )
62807
62808 playground = ResolverPlayground(installed=installed, world=world)
62809 @@ -256,24 +256,24 @@ class DepcleanAndWildcardsTestCase(TestCase):
62810 test_cases = (
62811 ResolverPlaygroundTestCase(
62812 ["*/*"],
62813 - options = {"--depclean": True},
62814 - success = True,
62815 - cleanlist = ["dev-libs/A-1", "dev-libs/B-1"]),
62816 + options={"--depclean": True},
62817 + success=True,
62818 + cleanlist=["dev-libs/A-1", "dev-libs/B-1"]),
62819 ResolverPlaygroundTestCase(
62820 ["dev-libs/*"],
62821 - options = {"--depclean": True},
62822 - success = True,
62823 - cleanlist = ["dev-libs/A-1", "dev-libs/B-1"]),
62824 + options={"--depclean": True},
62825 + success=True,
62826 + cleanlist=["dev-libs/A-1", "dev-libs/B-1"]),
62827 ResolverPlaygroundTestCase(
62828 ["*/A"],
62829 - options = {"--depclean": True},
62830 - success = True,
62831 - cleanlist = ["dev-libs/A-1"]),
62832 + options={"--depclean": True},
62833 + success=True,
62834 + cleanlist=["dev-libs/A-1"]),
62835 ResolverPlaygroundTestCase(
62836 ["*/B"],
62837 - options = {"--depclean": True},
62838 - success = True,
62839 - cleanlist = []),
62840 + options={"--depclean": True},
62841 + success=True,
62842 + cleanlist=[]),
62843 )
62844
62845 playground = ResolverPlayground(installed=installed)
62846
62847 diff --git a/pym/portage/tests/resolver/test_depclean_order.py b/pym/portage/tests/resolver/test_depclean_order.py
62848 new file mode 100644
62849 index 0000000..9511d29
62850 --- /dev/null
62851 +++ b/pym/portage/tests/resolver/test_depclean_order.py
62852 @@ -0,0 +1,57 @@
62853 +# Copyright 2013 Gentoo Foundation
62854 +# Distributed under the terms of the GNU General Public License v2
62855 +
62856 +from portage.tests import TestCase
62857 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
62858 +
62859 +class SimpleDepcleanTestCase(TestCase):
62860 +
62861 + def testSimpleDepclean(self):
62862 +
62863 + ebuilds = {
62864 + "dev-libs/A-1": {
62865 + "EAPI": "5",
62866 + "RDEPEND": "dev-libs/B:=",
62867 + },
62868 + "dev-libs/B-1": {
62869 + "EAPI": "5",
62870 + "RDEPEND": "dev-libs/A",
62871 + },
62872 + "dev-libs/C-1": {},
62873 + }
62874 +
62875 + installed = {
62876 + "dev-libs/A-1": {
62877 + "EAPI": "5",
62878 + "RDEPEND": "dev-libs/B:0/0=",
62879 + },
62880 + "dev-libs/B-1": {
62881 + "EAPI": "5",
62882 + "RDEPEND": "dev-libs/A",
62883 + },
62884 + "dev-libs/C-1": {},
62885 + }
62886 +
62887 + world = (
62888 + "dev-libs/C",
62889 + )
62890 +
62891 + test_cases = (
62892 + # Remove dev-libs/A-1 first because of dev-libs/B:0/0= (built
62893 + # slot-operator dep).
62894 + ResolverPlaygroundTestCase(
62895 + [],
62896 + options={"--depclean": True},
62897 + success=True,
62898 + ordered=True,
62899 + cleanlist=["dev-libs/A-1", "dev-libs/B-1"]),
62900 + )
62901 +
62902 + playground = ResolverPlayground(ebuilds=ebuilds,
62903 + installed=installed, world=world)
62904 + try:
62905 + for test_case in test_cases:
62906 + playground.run_TestCase(test_case)
62907 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62908 + finally:
62909 + playground.cleanup()
62910
62911 diff --git a/pym/portage/tests/resolver/test_depclean_slot_unavailable.py b/pym/portage/tests/resolver/test_depclean_slot_unavailable.py
62912 new file mode 100644
62913 index 0000000..689392b
62914 --- /dev/null
62915 +++ b/pym/portage/tests/resolver/test_depclean_slot_unavailable.py
62916 @@ -0,0 +1,78 @@
62917 +# Copyright 2012 Gentoo Foundation
62918 +# Distributed under the terms of the GNU General Public License v2
62919 +
62920 +from portage.tests import TestCase
62921 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
62922 + ResolverPlaygroundTestCase)
62923 +
62924 +class DepcleanUnavailableSlotTestCase(TestCase):
62925 +
62926 + def testDepcleanUnavailableSlot(self):
62927 + """
62928 + Test bug #445506, where we want to remove the slot
62929 + for which the ebuild is no longer available, even
62930 + though its version is higher.
62931 + """
62932 +
62933 + ebuilds = {
62934 + "sys-kernel/gentoo-sources-3.0.53": {
62935 + "SLOT": "3.0.53",
62936 + "KEYWORDS": "x86"
62937 + },
62938 + }
62939 +
62940 + installed = {
62941 + "sys-kernel/gentoo-sources-3.0.53": {
62942 + "SLOT": "3.0.53",
62943 + "KEYWORDS": "x86"
62944 + },
62945 + "sys-kernel/gentoo-sources-3.2.21": {
62946 + "SLOT": "3.2.21",
62947 + "KEYWORDS": "x86"
62948 + },
62949 + }
62950 +
62951 + world = ["sys-kernel/gentoo-sources"]
62952 +
62953 + test_cases = (
62954 + ResolverPlaygroundTestCase(
62955 + [],
62956 + options={"--depclean": True},
62957 + success=True,
62958 + cleanlist=["sys-kernel/gentoo-sources-3.2.21"]),
62959 + )
62960 +
62961 + playground = ResolverPlayground(ebuilds=ebuilds,
62962 + installed=installed, world=world, debug=False)
62963 + try:
62964 + for test_case in test_cases:
62965 + playground.run_TestCase(test_case)
62966 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62967 + finally:
62968 + playground.cleanup()
62969 +
62970 + # Now make the newer version availale and verify that
62971 + # the lower version is depcleaned.
62972 + ebuilds.update({
62973 + "sys-kernel/gentoo-sources-3.2.21": {
62974 + "SLOT": "3.2.21",
62975 + "KEYWORDS": "x86"
62976 + },
62977 + })
62978 +
62979 + test_cases = (
62980 + ResolverPlaygroundTestCase(
62981 + [],
62982 + options={"--depclean": True},
62983 + success=True,
62984 + cleanlist=["sys-kernel/gentoo-sources-3.0.53"]),
62985 + )
62986 +
62987 + playground = ResolverPlayground(ebuilds=ebuilds,
62988 + installed=installed, world=world, debug=False)
62989 + try:
62990 + for test_case in test_cases:
62991 + playground.run_TestCase(test_case)
62992 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
62993 + finally:
62994 + playground.cleanup()
62995
62996 diff --git a/pym/portage/tests/resolver/test_features_test_use.py b/pym/portage/tests/resolver/test_features_test_use.py
62997 new file mode 100644
62998 index 0000000..bdd179d
62999 --- /dev/null
63000 +++ b/pym/portage/tests/resolver/test_features_test_use.py
63001 @@ -0,0 +1,68 @@
63002 +# Copyright 2012 Gentoo Foundation
63003 +# Distributed under the terms of the GNU General Public License v2
63004 +
63005 +from portage.tests import TestCase
63006 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
63007 + ResolverPlaygroundTestCase)
63008 +
63009 +class FeaturesTestUse(TestCase):
63010 +
63011 + def testFeaturesTestUse(self):
63012 + ebuilds = {
63013 + "dev-libs/A-1" : {
63014 + "IUSE": "test"
63015 + },
63016 + "dev-libs/B-1" : {
63017 + "IUSE": "test foo"
63018 + },
63019 + }
63020 +
63021 + installed = {
63022 + "dev-libs/A-1" : {
63023 + "USE": "",
63024 + "IUSE": "test"
63025 + },
63026 + "dev-libs/B-1" : {
63027 + "USE": "foo",
63028 + "IUSE": "test foo"
63029 + },
63030 + }
63031 +
63032 + user_config = {
63033 + "make.conf" : ("FEATURES=test", "USE=\"-test -foo\"")
63034 + }
63035 +
63036 + test_cases = (
63037 +
63038 + # USE=test state should not trigger --newuse rebuilds, as
63039 + # specified in bug #373209, comment #3.
63040 + ResolverPlaygroundTestCase(
63041 + ["dev-libs/A"],
63042 + options = {"--newuse": True, "--selective": True},
63043 + success = True,
63044 + mergelist = []),
63045 +
63046 + # USE=-test -> USE=test, with USE=test forced by FEATURES=test
63047 + ResolverPlaygroundTestCase(
63048 + ["dev-libs/A"],
63049 + options = {},
63050 + success = True,
63051 + mergelist = ["dev-libs/A-1"]),
63052 +
63053 + # USE=foo -> USE=-foo, with USE=test forced by FEATURES=test
63054 + ResolverPlaygroundTestCase(
63055 + ["dev-libs/B"],
63056 + options = {"--newuse": True, "--selective": True},
63057 + success = True,
63058 + mergelist = ["dev-libs/B-1"]),
63059 + )
63060 +
63061 + playground = ResolverPlayground(ebuilds=ebuilds,
63062 + installed=installed, user_config=user_config, debug=False)
63063 + try:
63064 + for test_case in test_cases:
63065 + playground.run_TestCase(test_case)
63066 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63067 + finally:
63068 + playground.cleanup()
63069 +
63070
63071 diff --git a/pym/portage/tests/resolver/test_merge_order.py b/pym/portage/tests/resolver/test_merge_order.py
63072 index 5b5709a..5d000d1 100644
63073 --- a/pym/portage/tests/resolver/test_merge_order.py
63074 +++ b/pym/portage/tests/resolver/test_merge_order.py
63075 @@ -1,4 +1,4 @@
63076 -# Copyright 2011 Gentoo Foundation
63077 +# Copyright 2011-2013 Gentoo Foundation
63078 # Distributed under the terms of the GNU General Public License v2
63079
63080 import portage
63081 @@ -191,6 +191,12 @@ class MergeOrderTestCase(TestCase):
63082 "DEPEND" : "kde-base/libkdegames",
63083 "RDEPEND" : "kde-base/libkdegames",
63084 },
63085 + "media-libs/mesa-9.1.3" : {
63086 + "EAPI" : "5",
63087 + "IUSE" : "+xorg",
63088 + "DEPEND" : "xorg? ( x11-base/xorg-server:= )",
63089 + "RDEPEND" : "xorg? ( x11-base/xorg-server:= )",
63090 + },
63091 "media-video/libav-0.7_pre20110327" : {
63092 "EAPI" : "2",
63093 "IUSE" : "X +encode",
63094 @@ -205,6 +211,12 @@ class MergeOrderTestCase(TestCase):
63095 "IUSE" : "X +encode",
63096 "RDEPEND" : "|| ( >=media-video/ffmpeg-0.6.90_rc0-r2[X=,encode=] >=media-video/libav-0.6.90_rc[X=,encode=] )",
63097 },
63098 + "x11-base/xorg-server-1.14.1" : {
63099 + "EAPI" : "5",
63100 + "SLOT": "0/1.14.1",
63101 + "DEPEND" : "media-libs/mesa",
63102 + "RDEPEND" : "media-libs/mesa",
63103 + },
63104 }
63105
63106 installed = {
63107 @@ -256,6 +268,13 @@ class MergeOrderTestCase(TestCase):
63108 "RDEPEND": "",
63109 },
63110 "app-arch/xz-utils-5.0.1" : {},
63111 + "media-libs/mesa-9.1.3" : {
63112 + "EAPI" : "5",
63113 + "IUSE" : "+xorg",
63114 + "USE": "xorg",
63115 + "DEPEND" : "x11-base/xorg-server:0/1.14.1=",
63116 + "RDEPEND" : "x11-base/xorg-server:0/1.14.1=",
63117 + },
63118 "media-video/ffmpeg-0.7_rc1" : {
63119 "EAPI" : "2",
63120 "IUSE" : "X +encode",
63121 @@ -267,6 +286,12 @@ class MergeOrderTestCase(TestCase):
63122 "USE" : "encode",
63123 "RDEPEND" : "|| ( >=media-video/ffmpeg-0.6.90_rc0-r2[X=,encode=] >=media-video/libav-0.6.90_rc[X=,encode=] )",
63124 },
63125 + "x11-base/xorg-server-1.14.1" : {
63126 + "EAPI" : "5",
63127 + "SLOT": "0/1.14.1",
63128 + "DEPEND" : "media-libs/mesa",
63129 + "RDEPEND" : "media-libs/mesa",
63130 + },
63131 }
63132
63133 test_cases = (
63134 @@ -434,6 +459,14 @@ class MergeOrderTestCase(TestCase):
63135 ('kde-base/libkdegames-3.5.7', 'kde-base/kmines-3.5.7'),
63136 ),
63137 mergelist = [('kde-base/kdelibs-3.5.7', 'dev-util/pkgconfig-0.25-r2', 'kde-misc/kdnssd-avahi-0.1.2', 'app-arch/xz-utils-5.0.2', 'kde-base/libkdegames-3.5.7', 'kde-base/kdnssd-3.5.7', 'kde-base/kmines-3.5.7')]),
63138 + # Test satisfied circular DEPEND/RDEPEND with one := operator.
63139 + # Both deps are already satisfied by installed packages, but
63140 + # the := dep is given higher priority in merge order.
63141 + ResolverPlaygroundTestCase(
63142 + ["media-libs/mesa", "x11-base/xorg-server"],
63143 + success=True,
63144 + all_permutations = True,
63145 + mergelist = ['x11-base/xorg-server-1.14.1', 'media-libs/mesa-9.1.3']),
63146 )
63147
63148 playground = ResolverPlayground(ebuilds=ebuilds, installed=installed)
63149
63150 diff --git a/pym/portage/tests/resolver/test_multirepo.py b/pym/portage/tests/resolver/test_multirepo.py
63151 index 34c6d45..2b1a6d0 100644
63152 --- a/pym/portage/tests/resolver/test_multirepo.py
63153 +++ b/pym/portage/tests/resolver/test_multirepo.py
63154 @@ -1,4 +1,4 @@
63155 -# Copyright 2010-2011 Gentoo Foundation
63156 +# Copyright 2010-2014 Gentoo Foundation
63157 # Distributed under the terms of the GNU General Public License v2
63158
63159 from portage.tests import TestCase
63160 @@ -37,16 +37,25 @@ class MultirepoTestCase(TestCase):
63161
63162 "dev-libs/I-1::repo2": { "SLOT" : "1"},
63163 "dev-libs/I-2::repo2": { "SLOT" : "2"},
63164 +
63165 + "dev-libs/K-1::repo2": { },
63166 }
63167
63168 installed = {
63169 "dev-libs/H-1": { "RDEPEND" : "|| ( dev-libs/I:2 dev-libs/I:1 )"},
63170 "dev-libs/I-2::repo1": {"SLOT" : "2"},
63171 + "dev-libs/K-1::repo1": { },
63172 + }
63173 +
63174 + binpkgs = {
63175 + "dev-libs/C-1::repo2": { },
63176 + "dev-libs/I-2::repo1": {"SLOT" : "2"},
63177 + "dev-libs/K-1::repo2": { },
63178 }
63179
63180 sets = {
63181 - "multirepotest":
63182 - ( "dev-libs/A::test_repo", )
63183 + "multirepotest":
63184 + ("dev-libs/A::test_repo",)
63185 }
63186
63187 test_cases = (
63188 @@ -96,6 +105,68 @@ class MultirepoTestCase(TestCase):
63189 check_repo_names = True,
63190 mergelist = ["dev-libs/D-1::repo2"]),
63191
63192 + #--usepkg: don't reinstall on new repo without --newrepo
63193 + ResolverPlaygroundTestCase(
63194 + ["dev-libs/C"],
63195 + options = {"--usepkg": True, "--selective": True},
63196 + success = True,
63197 + check_repo_names = True,
63198 + mergelist = ["[binary]dev-libs/C-1::repo2"]),
63199 +
63200 + #--usepkgonly: don't reinstall on new repo without --newrepo
63201 + ResolverPlaygroundTestCase(
63202 + ["dev-libs/C"],
63203 + options = {"--usepkgonly": True, "--selective": True},
63204 + success = True,
63205 + check_repo_names = True,
63206 + mergelist = ["[binary]dev-libs/C-1::repo2"]),
63207 +
63208 + #--newrepo: pick ebuild if binpkg/ebuild have different repo
63209 + ResolverPlaygroundTestCase(
63210 + ["dev-libs/C"],
63211 + options = {"--usepkg": True, "--newrepo": True, "--selective": True},
63212 + success = True,
63213 + check_repo_names = True,
63214 + mergelist = ["dev-libs/C-1::repo1"]),
63215 +
63216 + #--newrepo --usepkgonly: ebuild is ignored
63217 + ResolverPlaygroundTestCase(
63218 + ["dev-libs/C"],
63219 + options = {"--usepkgonly": True, "--newrepo": True, "--selective": True},
63220 + success = True,
63221 + check_repo_names = True,
63222 + mergelist = ["[binary]dev-libs/C-1::repo2"]),
63223 +
63224 + #--newrepo: pick ebuild if binpkg/ebuild have different repo
63225 + ResolverPlaygroundTestCase(
63226 + ["dev-libs/I"],
63227 + options = {"--usepkg": True, "--newrepo": True, "--selective": True},
63228 + success = True,
63229 + check_repo_names = True,
63230 + mergelist = ["dev-libs/I-2::repo2"]),
63231 +
63232 + #--newrepo --usepkgonly: if binpkg matches installed, do nothing
63233 + ResolverPlaygroundTestCase(
63234 + ["dev-libs/I"],
63235 + options = {"--usepkgonly": True, "--newrepo": True, "--selective": True},
63236 + success = True,
63237 + mergelist = []),
63238 +
63239 + #--newrepo --usepkgonly: reinstall if binpkg has new repo.
63240 + ResolverPlaygroundTestCase(
63241 + ["dev-libs/K"],
63242 + options = {"--usepkgonly": True, "--newrepo": True, "--selective": True},
63243 + success = True,
63244 + check_repo_names = True,
63245 + mergelist = ["[binary]dev-libs/K-1::repo2"]),
63246 +
63247 + #--usepkgonly: don't reinstall on new repo without --newrepo.
63248 + ResolverPlaygroundTestCase(
63249 + ["dev-libs/K"],
63250 + options = {"--usepkgonly": True, "--selective": True},
63251 + success = True,
63252 + mergelist = []),
63253 +
63254 #Atoms with slots
63255 ResolverPlaygroundTestCase(
63256 ["dev-libs/E"],
63257 @@ -137,6 +208,15 @@ class MultirepoTestCase(TestCase):
63258 success = True,
63259 mergelist = []),
63260
63261 + # Dependency on installed dev-libs/I-2 ebuild should trigger reinstall
63262 + # when --newrepo flag is used.
63263 + ResolverPlaygroundTestCase(
63264 + ["dev-libs/H"],
63265 + options = {"--update": True, "--deep": True, "--newrepo": True},
63266 + success = True,
63267 + check_repo_names = True,
63268 + mergelist = ["dev-libs/I-2::repo2"]),
63269 +
63270 # Check interaction between repo priority and unsatisfied
63271 # REQUIRED_USE, for bug #350254.
63272 ResolverPlaygroundTestCase(
63273 @@ -147,7 +227,7 @@ class MultirepoTestCase(TestCase):
63274 )
63275
63276 playground = ResolverPlayground(ebuilds=ebuilds,
63277 - installed=installed, sets=sets)
63278 + binpkgs=binpkgs, installed=installed, sets=sets)
63279 try:
63280 for test_case in test_cases:
63281 playground.run_TestCase(test_case)
63282
63283 diff --git a/pym/portage/tests/resolver/test_onlydeps.py b/pym/portage/tests/resolver/test_onlydeps.py
63284 new file mode 100644
63285 index 0000000..986769a
63286 --- /dev/null
63287 +++ b/pym/portage/tests/resolver/test_onlydeps.py
63288 @@ -0,0 +1,34 @@
63289 +# Copyright 2014 Gentoo Foundation
63290 +# Distributed under the terms of the GNU General Public License v2
63291 +
63292 +from portage.tests import TestCase
63293 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
63294 +
63295 +class OnlydepsTestCase(TestCase):
63296 +
63297 + def testOnlydeps(self):
63298 + ebuilds = {
63299 + "dev-libs/A-1": { "DEPEND": "dev-libs/B" },
63300 + "dev-libs/B-1": { },
63301 + }
63302 + installed = {
63303 + "dev-libs/B-1": { },
63304 + }
63305 +
63306 + test_cases = (
63307 + ResolverPlaygroundTestCase(
63308 + ["dev-libs/A", "dev-libs/B"],
63309 + all_permutations = True,
63310 + success = True,
63311 + options = { "--onlydeps": True },
63312 + mergelist = ["dev-libs/B-1"]),
63313 + )
63314 +
63315 + playground = ResolverPlayground(ebuilds=ebuilds,
63316 + installed=installed, debug=False)
63317 + try:
63318 + for test_case in test_cases:
63319 + playground.run_TestCase(test_case)
63320 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63321 + finally:
63322 + playground.cleanup()
63323
63324 diff --git a/pym/portage/tests/resolver/test_or_choices.py b/pym/portage/tests/resolver/test_or_choices.py
63325 new file mode 100644
63326 index 0000000..90e6814
63327 --- /dev/null
63328 +++ b/pym/portage/tests/resolver/test_or_choices.py
63329 @@ -0,0 +1,134 @@
63330 +# Copyright 2013 Gentoo Foundation
63331 +# Distributed under the terms of the GNU General Public License v2
63332 +
63333 +from portage.tests import TestCase
63334 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
63335 + ResolverPlaygroundTestCase)
63336 +
63337 +class OrChoicesTestCase(TestCase):
63338 +
63339 + def testOrChoices(self):
63340 + ebuilds = {
63341 + "dev-lang/vala-0.20.0" : {
63342 + "EAPI": "5",
63343 + "SLOT": "0.20"
63344 + },
63345 + "dev-lang/vala-0.18.0" : {
63346 + "EAPI": "5",
63347 + "SLOT": "0.18"
63348 + },
63349 + #"dev-libs/gobject-introspection-1.36.0" : {
63350 + # "EAPI": "5",
63351 + # "RDEPEND" : "!<dev-lang/vala-0.20.0",
63352 + #},
63353 + "dev-libs/gobject-introspection-1.34.0" : {
63354 + "EAPI": "5"
63355 + },
63356 + "sys-apps/systemd-ui-2" : {
63357 + "EAPI": "5",
63358 + "RDEPEND" : "|| ( dev-lang/vala:0.20 dev-lang/vala:0.18 )"
63359 + },
63360 + }
63361 +
63362 + installed = {
63363 + "dev-lang/vala-0.18.0" : {
63364 + "EAPI": "5",
63365 + "SLOT": "0.18"
63366 + },
63367 + "dev-libs/gobject-introspection-1.34.0" : {
63368 + "EAPI": "5"
63369 + },
63370 + "sys-apps/systemd-ui-2" : {
63371 + "EAPI": "5",
63372 + "RDEPEND" : "|| ( dev-lang/vala:0.20 dev-lang/vala:0.18 )"
63373 + },
63374 + }
63375 +
63376 + world = ["dev-libs/gobject-introspection", "sys-apps/systemd-ui"]
63377 +
63378 + test_cases = (
63379 + # Demonstrate that vala:0.20 update is pulled in, for bug #478188
63380 + ResolverPlaygroundTestCase(
63381 + ["@world"],
63382 + options = {"--update": True, "--deep": True},
63383 + success=True,
63384 + all_permutations = True,
63385 + mergelist = ['dev-lang/vala-0.20.0']),
63386 + # Verify that vala:0.20 is not pulled in without --deep
63387 + ResolverPlaygroundTestCase(
63388 + ["@world"],
63389 + options = {"--update": True},
63390 + success=True,
63391 + all_permutations = True,
63392 + mergelist = []),
63393 + # Verify that vala:0.20 is not pulled in without --update
63394 + ResolverPlaygroundTestCase(
63395 + ["@world"],
63396 + options = {"--selective": True, "--deep": True},
63397 + success=True,
63398 + all_permutations = True,
63399 + mergelist = []),
63400 + )
63401 +
63402 + playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world)
63403 + try:
63404 + for test_case in test_cases:
63405 + playground.run_TestCase(test_case)
63406 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63407 + finally:
63408 + playground.cleanup()
63409 +
63410 + def testOrChoicesLibpostproc(self):
63411 + ebuilds = {
63412 + "media-video/ffmpeg-0.10" : {
63413 + "EAPI": "5",
63414 + "SLOT": "0.10"
63415 + },
63416 + "media-video/ffmpeg-1.2.2" : {
63417 + "EAPI": "5",
63418 + "SLOT": "0"
63419 + },
63420 + "media-libs/libpostproc-0.8.0.20121125" : {
63421 + "EAPI": "5"
63422 + },
63423 + "media-plugins/gst-plugins-ffmpeg-0.10.13_p201211-r1" : {
63424 + "EAPI": "5",
63425 + "RDEPEND" : "|| ( media-video/ffmpeg:0 media-libs/libpostproc )"
63426 + },
63427 + }
63428 +
63429 + installed = {
63430 + "media-video/ffmpeg-0.10" : {
63431 + "EAPI": "5",
63432 + "SLOT": "0.10"
63433 + },
63434 + "media-libs/libpostproc-0.8.0.20121125" : {
63435 + "EAPI": "5"
63436 + },
63437 + "media-plugins/gst-plugins-ffmpeg-0.10.13_p201211-r1" : {
63438 + "EAPI": "5",
63439 + "RDEPEND" : "|| ( media-video/ffmpeg:0 media-libs/libpostproc )"
63440 + },
63441 + }
63442 +
63443 + world = ["media-plugins/gst-plugins-ffmpeg"]
63444 +
63445 + test_cases = (
63446 + # Demonstrate that libpostproc is preferred
63447 + # over ffmpeg:0 for bug #480736.
63448 + ResolverPlaygroundTestCase(
63449 + ["@world"],
63450 + options = {"--update": True, "--deep": True},
63451 + success=True,
63452 + all_permutations = True,
63453 + mergelist = []),
63454 + )
63455 +
63456 + playground = ResolverPlayground(ebuilds=ebuilds, installed=installed,
63457 + world=world, debug=False)
63458 + try:
63459 + for test_case in test_cases:
63460 + playground.run_TestCase(test_case)
63461 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63462 + finally:
63463 + playground.cleanup()
63464
63465 diff --git a/pym/portage/tests/resolver/test_package_tracker.py b/pym/portage/tests/resolver/test_package_tracker.py
63466 new file mode 100644
63467 index 0000000..8fa3513
63468 --- /dev/null
63469 +++ b/pym/portage/tests/resolver/test_package_tracker.py
63470 @@ -0,0 +1,261 @@
63471 +# Copyright 2014 Gentoo Foundation
63472 +# Distributed under the terms of the GNU General Public License v2
63473 +
63474 +import collections
63475 +
63476 +from portage.dep import Atom
63477 +from portage.tests import TestCase
63478 +from _emerge.resolver.package_tracker import PackageTracker, PackageTrackerDbapiWrapper
63479 +
63480 +class PackageTrackerTestCase(TestCase):
63481 +
63482 + FakePackage = collections.namedtuple("FakePackage",
63483 + ["root", "cp", "cpv", "slot", "slot_atom", "version", "repo"])
63484 +
63485 + FakeConflict = collections.namedtuple("FakeConflict",
63486 + ["description", "root", "pkgs"])
63487 +
63488 + def make_pkg(self, root, atom, repo="test_repo"):
63489 + atom = Atom(atom)
63490 + slot_atom = Atom("%s:%s" % (atom.cp, atom.slot))
63491 + slot = atom.slot
63492 +
63493 + return self.FakePackage(root=root, cp=atom.cp, cpv=atom.cpv,
63494 + slot=slot, slot_atom=slot_atom, version=atom.version, repo=repo)
63495 +
63496 + def make_conflict(self, description, root, pkgs):
63497 + return self.FakeConflict(description=description, root=root, pkgs=pkgs)
63498 +
63499 + def test_add_remove_discard(self):
63500 + p = PackageTracker()
63501 +
63502 + x1 = self.make_pkg("/", "=dev-libs/X-1:0")
63503 + x2 = self.make_pkg("/", "=dev-libs/X-2:0")
63504 +
63505 + p.add_pkg(x1)
63506 + self.assertTrue(x1 in p)
63507 + self.assertTrue(p.contains(x1, installed=True))
63508 + self.assertTrue(p.contains(x1, installed=False))
63509 + p.remove_pkg(x1)
63510 + self.assertTrue(x1 not in p)
63511 +
63512 + p.add_pkg(x1)
63513 + self.assertTrue(x1 in p)
63514 + p.add_pkg(x1)
63515 + self.assertTrue(x1 in p)
63516 +
63517 + self.assertRaises(KeyError, p.remove_pkg, x2)
63518 +
63519 + p.add_pkg(x2)
63520 + self.assertTrue(x2 in p)
63521 + p.remove_pkg(x2)
63522 + self.assertTrue(x2 not in p)
63523 + p.discard_pkg(x2)
63524 + self.assertTrue(x2 not in p)
63525 + p.add_pkg(x2)
63526 + self.assertTrue(x2 in p)
63527 +
63528 + all_pkgs = list(p.all_pkgs("/"))
63529 + self.assertEqual(len(all_pkgs), 2)
63530 + self.assertTrue(all_pkgs[0] is x1 and all_pkgs[1] is x2)
63531 +
63532 + self.assertEqual(len(list(p.all_pkgs("/"))), 2)
63533 + self.assertEqual(len(list(p.all_pkgs("/xxx"))), 0)
63534 +
63535 + def test_match(self):
63536 + p = PackageTracker()
63537 + x1 = self.make_pkg("/", "=dev-libs/X-1:0")
63538 + x2 = self.make_pkg("/", "=dev-libs/X-2:0")
63539 + x3 = self.make_pkg("/", "=dev-libs/X-3:1")
63540 +
63541 + p.add_pkg(x2)
63542 + p.add_pkg(x1)
63543 +
63544 + matches = list(p.match("/", Atom("=dev-libs/X-1")))
63545 + self.assertTrue(x1 in matches)
63546 + self.assertEqual(len(matches), 1)
63547 +
63548 + matches = list(p.match("/", Atom("dev-libs/X")))
63549 + self.assertTrue(x1 is matches[0] and x2 is matches[1])
63550 + self.assertEqual(len(matches), 2)
63551 +
63552 + matches = list(p.match("/xxx", Atom("dev-libs/X")))
63553 + self.assertEqual(len(matches), 0)
63554 +
63555 + matches = list(p.match("/", Atom("dev-libs/Y")))
63556 + self.assertEqual(len(matches), 0)
63557 +
63558 + p.add_pkg(x3)
63559 + matches = list(p.match("/", Atom("dev-libs/X")))
63560 + self.assertTrue(x1 is matches[0] and x2 is matches[1] and x3 is matches[2])
63561 + self.assertEqual(len(matches), 3)
63562 +
63563 + p.remove_pkg(x3)
63564 + matches = list(p.match("/", Atom("dev-libs/X")))
63565 + self.assertTrue(x1 is matches[0] and x2 is matches[1])
63566 + self.assertEqual(len(matches), 2)
63567 +
63568 + def test_dbapi_interface(self):
63569 + p = PackageTracker()
63570 + dbapi = PackageTrackerDbapiWrapper("/", p)
63571 + installed = self.make_pkg("/", "=dev-libs/X-0:0")
63572 + x1 = self.make_pkg("/", "=dev-libs/X-1:0")
63573 + x2 = self.make_pkg("/", "=dev-libs/X-2:0")
63574 + x3 = self.make_pkg("/", "=dev-libs/X-3:0")
63575 + x4 = self.make_pkg("/", "=dev-libs/X-4:6")
63576 + x5 = self.make_pkg("/xxx", "=dev-libs/X-5:6")
63577 +
63578 + def check_dbapi(pkgs):
63579 + all_pkgs = set(dbapi)
63580 + self.assertEqual(len(all_pkgs), len(pkgs))
63581 +
63582 + x_atom = "dev-libs/X"
63583 + y_atom = "dev-libs/Y"
63584 + matches = dbapi.cp_list(x_atom)
63585 + for pkg in pkgs:
63586 + if pkg.root == "/" and pkg.cp == x_atom:
63587 + self.assertTrue(pkg in matches)
63588 + self.assertTrue(not dbapi.cp_list(y_atom))
63589 + matches = dbapi.match(x_atom)
63590 + for pkg in pkgs:
63591 + if pkg.root == "/" and pkg.cp == x_atom:
63592 + self.assertTrue(pkg in matches)
63593 + self.assertTrue(not dbapi.match(y_atom))
63594 +
63595 + check_dbapi([])
63596 +
63597 + p.add_installed_pkg(installed)
63598 + check_dbapi([installed])
63599 +
63600 + p.add_pkg(x1)
63601 + check_dbapi([x1])
63602 +
63603 + p.remove_pkg(x1)
63604 + check_dbapi([installed])
63605 +
63606 + dbapi.cpv_inject(x1)
63607 + check_dbapi([x1])
63608 +
63609 + dbapi.cpv_inject(x2)
63610 + check_dbapi([x1, x2])
63611 +
63612 + p.remove_pkg(x1)
63613 + check_dbapi([x2])
63614 +
63615 + p.add_pkg(x5)
63616 + check_dbapi([x2])
63617 +
63618 +
63619 + def test_installed(self):
63620 + p = PackageTracker()
63621 + x1 = self.make_pkg("/", "=dev-libs/X-1:0")
63622 + x1b = self.make_pkg("/", "=dev-libs/X-1.1:0")
63623 + x2 = self.make_pkg("/", "=dev-libs/X-2:0")
63624 + x3 = self.make_pkg("/", "=dev-libs/X-3:1")
63625 +
63626 + def check_installed(x, should_contain, num_pkgs):
63627 + self.assertEqual(x in p, should_contain)
63628 + self.assertEqual(p.contains(x), should_contain)
63629 + self.assertEqual(p.contains(x1, installed=True), should_contain)
63630 + self.assertEqual(p.contains(x1, installed=False), False)
63631 + self.assertEqual(len(list(p.all_pkgs("/"))), num_pkgs)
63632 +
63633 + def check_matches(atom, expected):
63634 + matches = list(p.match("/", Atom(atom)))
63635 + self.assertEqual(len(matches), len(expected))
63636 + for x, y in zip(matches, expected):
63637 + self.assertTrue(x is y)
63638 +
63639 + p.add_installed_pkg(x1)
63640 + check_installed(x1, True, 1)
63641 + check_matches("dev-libs/X", [x1])
63642 +
63643 + p.add_installed_pkg(x1)
63644 + check_installed(x1, True, 1)
63645 + check_matches("dev-libs/X", [x1])
63646 +
63647 + p.add_pkg(x2)
63648 + check_installed(x1, False, 1)
63649 + check_matches("dev-libs/X", [x2])
63650 +
63651 + p.add_installed_pkg(x1)
63652 + check_installed(x1, False, 1)
63653 + check_matches("dev-libs/X", [x2])
63654 +
63655 + p.add_installed_pkg(x1b)
63656 + check_installed(x1, False, 1)
63657 + check_installed(x1b, False, 1)
63658 + check_matches("dev-libs/X", [x2])
63659 +
63660 + p.remove_pkg(x2)
63661 + check_installed(x1, True, 2)
63662 + check_installed(x1b, True, 2)
63663 + check_matches("dev-libs/X", [x1, x1b])
63664 +
63665 + def test_conflicts(self):
63666 + p = PackageTracker()
63667 + installed1 = self.make_pkg("/", "=dev-libs/X-0:0")
63668 + installed2 = self.make_pkg("/", "=dev-libs/X-0.1:0")
63669 + x1 = self.make_pkg("/", "=dev-libs/X-1:0")
63670 + x2 = self.make_pkg("/", "=dev-libs/X-2:0")
63671 + x3 = self.make_pkg("/", "=dev-libs/X-3:0")
63672 + x4 = self.make_pkg("/", "=dev-libs/X-4:4")
63673 + x4b = self.make_pkg("/", "=dev-libs/X-4:4b::x-repo")
63674 +
63675 + def check_conflicts(expected, slot_conflicts_only=False):
63676 + if slot_conflicts_only:
63677 + conflicts = list(p.slot_conflicts())
63678 + else:
63679 + conflicts = list(p.conflicts())
63680 + self.assertEqual(len(conflicts), len(expected))
63681 + for got, exp in zip(conflicts, expected):
63682 + self.assertEqual(got.description, exp.description)
63683 + self.assertEqual(got.root, exp.root)
63684 + self.assertEqual(len(got.pkgs), len(exp.pkgs))
63685 + self.assertEqual(len(got), len(exp.pkgs))
63686 + for x, y in zip(got.pkgs, exp.pkgs):
63687 + self.assertTrue(x is y)
63688 + for x, y in zip(got, exp.pkgs):
63689 + self.assertTrue(x is y)
63690 + for x in exp.pkgs:
63691 + self.assertTrue(x in got)
63692 +
63693 + check_conflicts([])
63694 + check_conflicts([])
63695 +
63696 + p.add_installed_pkg(installed1)
63697 + p.add_installed_pkg(installed2)
63698 + check_conflicts([])
63699 +
63700 + p.add_pkg(x1)
63701 + check_conflicts([])
63702 + p.add_pkg(x2)
63703 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x2])])
63704 + p.add_pkg(x3)
63705 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x2, x3])])
63706 + p.remove_pkg(x3)
63707 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x2])])
63708 + p.remove_pkg(x2)
63709 + check_conflicts([])
63710 + p.add_pkg(x3)
63711 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x3])])
63712 + p.add_pkg(x2)
63713 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x3, x2])])
63714 +
63715 + p.add_pkg(x4)
63716 + check_conflicts([self.make_conflict("slot conflict", "/", [x1, x3, x2])])
63717 +
63718 + p.add_pkg(x4b)
63719 + check_conflicts(
63720 + [
63721 + self.make_conflict("slot conflict", "/", [x1, x3, x2]),
63722 + self.make_conflict("cpv conflict", "/", [x4, x4b]),
63723 + ]
63724 + )
63725 +
63726 + check_conflicts(
63727 + [
63728 + self.make_conflict("slot conflict", "/", [x1, x3, x2]),
63729 + ],
63730 + slot_conflicts_only=True
63731 + )
63732
63733 diff --git a/pym/portage/tests/resolver/test_regular_slot_change_without_revbump.py b/pym/portage/tests/resolver/test_regular_slot_change_without_revbump.py
63734 new file mode 100644
63735 index 0000000..415277b
63736 --- /dev/null
63737 +++ b/pym/portage/tests/resolver/test_regular_slot_change_without_revbump.py
63738 @@ -0,0 +1,59 @@
63739 +# Copyright 2013 Gentoo Foundation
63740 +# Distributed under the terms of the GNU General Public License v2
63741 +
63742 +from portage.tests import TestCase
63743 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
63744 + ResolverPlaygroundTestCase)
63745 +
63746 +class RegularSlotChangeWithoutRevBumpTestCase(TestCase):
63747 +
63748 + def testRegularSlotChangeWithoutRevBumpTestCase(self):
63749 +
63750 + ebuilds = {
63751 + "dev-libs/boost-1.52.0" : {
63752 + "SLOT": "0"
63753 + },
63754 + "app-office/libreoffice-4.0.0.2" : {
63755 + "EAPI": "5",
63756 + "DEPEND": ">=dev-libs/boost-1.46:=",
63757 + "RDEPEND": ">=dev-libs/boost-1.46:=",
63758 + },
63759 + }
63760 +
63761 + binpkgs = {
63762 + "dev-libs/boost-1.52.0" : {
63763 + "SLOT": "1.52"
63764 + },
63765 + }
63766 +
63767 + installed = {
63768 + "dev-libs/boost-1.52.0" : {
63769 + "SLOT": "1.52"
63770 + },
63771 + }
63772 +
63773 + world = []
63774 +
63775 + test_cases = (
63776 + # Test that @__auto_slot_operator_replace_installed__
63777 + # pulls in the available slot, even though it's
63778 + # different from the installed slot (0 instead of 1.52).
63779 + ResolverPlaygroundTestCase(
63780 + ["app-office/libreoffice"],
63781 + options = {"--oneshot": True, "--usepkg": True},
63782 + success = True,
63783 + mergelist = [
63784 + 'dev-libs/boost-1.52.0',
63785 + 'app-office/libreoffice-4.0.0.2'
63786 + ]
63787 + ),
63788 + )
63789 +
63790 + playground = ResolverPlayground(ebuilds=ebuilds, binpkgs=binpkgs,
63791 + installed=installed, world=world, debug=False)
63792 + try:
63793 + for test_case in test_cases:
63794 + playground.run_TestCase(test_case)
63795 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63796 + finally:
63797 + playground.cleanup()
63798
63799 diff --git a/pym/portage/tests/resolver/test_slot_abi.py b/pym/portage/tests/resolver/test_slot_abi.py
63800 index 6381bcc..7263504 100644
63801 --- a/pym/portage/tests/resolver/test_slot_abi.py
63802 +++ b/pym/portage/tests/resolver/test_slot_abi.py
63803 @@ -1,4 +1,4 @@
63804 -# Copyright 2012 Gentoo Foundation
63805 +# Copyright 2012-2013 Gentoo Foundation
63806 # Distributed under the terms of the GNU General Public License v2
63807
63808 from portage.tests import TestCase
63809 @@ -65,7 +65,7 @@ class SlotAbiTestCase(TestCase):
63810
63811 ResolverPlaygroundTestCase(
63812 ["dev-libs/icu"],
63813 - options = {"--oneshot": True, "--ignore-built-slot-abi-deps": "y"},
63814 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
63815 success = True,
63816 mergelist = ["dev-libs/icu-49"]),
63817
63818 @@ -83,7 +83,7 @@ class SlotAbiTestCase(TestCase):
63819
63820 ResolverPlaygroundTestCase(
63821 ["dev-libs/icu"],
63822 - options = {"--oneshot": True, "--usepkgonly": True, "--ignore-built-slot-abi-deps": "y"},
63823 + options = {"--oneshot": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
63824 success = True,
63825 mergelist = ["[binary]dev-libs/icu-49"]),
63826
63827 @@ -95,7 +95,7 @@ class SlotAbiTestCase(TestCase):
63828
63829 ResolverPlaygroundTestCase(
63830 ["@world"],
63831 - options = {"--update": True, "--deep": True, "--ignore-built-slot-abi-deps": "y"},
63832 + options = {"--update": True, "--deep": True, "--ignore-built-slot-operator-deps": "y"},
63833 success = True,
63834 mergelist = ["dev-libs/icu-49"]),
63835
63836 @@ -113,7 +113,7 @@ class SlotAbiTestCase(TestCase):
63837
63838 ResolverPlaygroundTestCase(
63839 ["@world"],
63840 - options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-abi-deps": "y"},
63841 + options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
63842 success = True,
63843 mergelist = ["[binary]dev-libs/icu-49"]),
63844
63845 @@ -178,7 +178,7 @@ class SlotAbiTestCase(TestCase):
63846
63847 ResolverPlaygroundTestCase(
63848 ["sys-libs/db"],
63849 - options = {"--oneshot": True, "--ignore-built-slot-abi-deps": "y"},
63850 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
63851 success = True,
63852 mergelist = ["sys-libs/db-4.8"]),
63853
63854 @@ -196,7 +196,7 @@ class SlotAbiTestCase(TestCase):
63855
63856 ResolverPlaygroundTestCase(
63857 ["sys-libs/db"],
63858 - options = {"--oneshot": True, "--rebuild-if-new-slot-abi": "n"},
63859 + options = {"--oneshot": True, "--rebuild-if-new-slot": "n"},
63860 success = True,
63861 mergelist = ["sys-libs/db-4.8"]),
63862
63863 @@ -214,7 +214,7 @@ class SlotAbiTestCase(TestCase):
63864
63865 ResolverPlaygroundTestCase(
63866 ["@world"],
63867 - options = {"--update": True, "--deep": True, "--usepkg": True, "--ignore-built-slot-abi-deps": "y"},
63868 + options = {"--update": True, "--deep": True, "--usepkg": True, "--ignore-built-slot-operator-deps": "y"},
63869 success = True,
63870 mergelist = ["[binary]sys-libs/db-4.8"]),
63871
63872 @@ -226,13 +226,13 @@ class SlotAbiTestCase(TestCase):
63873
63874 ResolverPlaygroundTestCase(
63875 ["@world"],
63876 - options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-abi-deps": "y"},
63877 + options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
63878 success = True,
63879 mergelist = ["[binary]sys-libs/db-4.8"]),
63880
63881 ResolverPlaygroundTestCase(
63882 ["@world"],
63883 - options = {"--update": True, "--deep": True, "--rebuild-if-new-slot-abi": "n"},
63884 + options = {"--update": True, "--deep": True, "--rebuild-if-new-slot": "n"},
63885 success = True,
63886 mergelist = []),
63887
63888 @@ -247,6 +247,89 @@ class SlotAbiTestCase(TestCase):
63889 finally:
63890 playground.cleanup()
63891
63892 +
63893 + def testWholeSlotConditional(self):
63894 + ebuilds = {
63895 + "dev-libs/libnl-3.2.14" : {
63896 + "SLOT": "3"
63897 + },
63898 + "dev-libs/libnl-1.1-r3" : {
63899 + "SLOT": "1.1"
63900 + },
63901 + "net-misc/networkmanager-0.9.6.4-r1" : {
63902 + "EAPI": "5",
63903 + "IUSE": "wimax",
63904 + "DEPEND": "wimax? ( dev-libs/libnl:1.1= ) !wimax? ( dev-libs/libnl:3= )",
63905 + "RDEPEND": "wimax? ( dev-libs/libnl:1.1= ) !wimax? ( dev-libs/libnl:3= )"
63906 + },
63907 + }
63908 + installed = {
63909 + "dev-libs/libnl-1.1-r3" : {
63910 + "SLOT": "1.1"
63911 + },
63912 + "net-misc/networkmanager-0.9.6.4-r1" : {
63913 + "EAPI": "5",
63914 + "IUSE": "wimax",
63915 + "USE": "wimax",
63916 + "DEPEND": "dev-libs/libnl:1.1/1.1=",
63917 + "RDEPEND": "dev-libs/libnl:1.1/1.1="
63918 + },
63919 + }
63920 +
63921 + user_config = {
63922 + "make.conf" : ("USE=\"wimax\"",)
63923 + }
63924 +
63925 + world = ["net-misc/networkmanager"]
63926 +
63927 + test_cases = (
63928 +
63929 + # Demonstrate bug #460304, where _slot_operator_update_probe needs
63930 + # to account for USE conditional deps.
63931 + ResolverPlaygroundTestCase(
63932 + ["@world"],
63933 + options = {"--update": True, "--deep": True},
63934 + success = True,
63935 + mergelist = []),
63936 +
63937 + )
63938 +
63939 + playground = ResolverPlayground(ebuilds=ebuilds,
63940 + installed=installed, user_config=user_config, world=world,
63941 + debug=False)
63942 + try:
63943 + for test_case in test_cases:
63944 + playground.run_TestCase(test_case)
63945 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63946 + finally:
63947 + playground.cleanup()
63948 +
63949 + user_config = {
63950 + "make.conf" : ("USE=\"-wimax\"",)
63951 + }
63952 +
63953 + test_cases = (
63954 +
63955 + # Demonstrate bug #460304 again, but with inverted USE
63956 + # settings this time.
63957 + ResolverPlaygroundTestCase(
63958 + ["@world"],
63959 + options = {"--update": True, "--deep": True},
63960 + success = True,
63961 + mergelist = ['dev-libs/libnl-3.2.14', 'net-misc/networkmanager-0.9.6.4-r1']),
63962 +
63963 + )
63964 +
63965 + playground = ResolverPlayground(ebuilds=ebuilds,
63966 + installed=installed, user_config=user_config, world=world,
63967 + debug=False)
63968 + try:
63969 + for test_case in test_cases:
63970 + playground.run_TestCase(test_case)
63971 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
63972 + finally:
63973 + playground.cleanup()
63974 +
63975 def testWholeSlotSubSlotMix(self):
63976 ebuilds = {
63977 "dev-libs/glib-1.2.10" : {
63978 @@ -312,7 +395,7 @@ class SlotAbiTestCase(TestCase):
63979
63980 ResolverPlaygroundTestCase(
63981 ["dev-libs/glib"],
63982 - options = {"--oneshot": True, "--ignore-built-slot-abi-deps": "y"},
63983 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
63984 success = True,
63985 mergelist = ["dev-libs/glib-2.32.3"]),
63986
63987 @@ -330,7 +413,7 @@ class SlotAbiTestCase(TestCase):
63988
63989 ResolverPlaygroundTestCase(
63990 ["dev-libs/glib"],
63991 - options = {"--oneshot": True, "--usepkgonly": True, "--ignore-built-slot-abi-deps": "y"},
63992 + options = {"--oneshot": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
63993 success = True,
63994 mergelist = ["[binary]dev-libs/glib-2.32.3"]),
63995
63996 @@ -342,7 +425,7 @@ class SlotAbiTestCase(TestCase):
63997
63998 ResolverPlaygroundTestCase(
63999 ["@world"],
64000 - options = {"--update": True, "--deep": True, "--ignore-built-slot-abi-deps": "y"},
64001 + options = {"--update": True, "--deep": True, "--ignore-built-slot-operator-deps": "y"},
64002 success = True,
64003 mergelist = ["dev-libs/glib-2.32.3"]),
64004
64005 @@ -360,7 +443,7 @@ class SlotAbiTestCase(TestCase):
64006
64007 ResolverPlaygroundTestCase(
64008 ["@world"],
64009 - options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-abi-deps": "y"},
64010 + options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
64011 success = True,
64012 mergelist = ["[binary]dev-libs/glib-2.32.3"]),
64013
64014
64015 diff --git a/pym/portage/tests/resolver/test_slot_abi_downgrade.py b/pym/portage/tests/resolver/test_slot_abi_downgrade.py
64016 index 45a7555..08e9a9d 100644
64017 --- a/pym/portage/tests/resolver/test_slot_abi_downgrade.py
64018 +++ b/pym/portage/tests/resolver/test_slot_abi_downgrade.py
64019 @@ -61,7 +61,7 @@ class SlotAbiDowngradeTestCase(TestCase):
64020
64021 ResolverPlaygroundTestCase(
64022 ["dev-libs/icu"],
64023 - options = {"--oneshot": True, "--ignore-built-slot-abi-deps": "y"},
64024 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
64025 success = True,
64026 mergelist = ["dev-libs/icu-4.8"]),
64027
64028 @@ -85,7 +85,7 @@ class SlotAbiDowngradeTestCase(TestCase):
64029
64030 ResolverPlaygroundTestCase(
64031 ["@world"],
64032 - options = {"--update": True, "--deep": True, "--ignore-built-slot-abi-deps": "y"},
64033 + options = {"--update": True, "--deep": True, "--ignore-built-slot-operator-deps": "y"},
64034 success = True,
64035 mergelist = ["dev-libs/icu-4.8"]),
64036
64037 @@ -173,7 +173,7 @@ class SlotAbiDowngradeTestCase(TestCase):
64038
64039 ResolverPlaygroundTestCase(
64040 ["dev-libs/glib"],
64041 - options = {"--oneshot": True, "--ignore-built-slot-abi-deps": "y"},
64042 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
64043 success = True,
64044 mergelist = ["dev-libs/glib-2.30.2"]),
64045
64046 @@ -197,7 +197,7 @@ class SlotAbiDowngradeTestCase(TestCase):
64047
64048 ResolverPlaygroundTestCase(
64049 ["@world"],
64050 - options = {"--update": True, "--deep": True, "--ignore-built-slot-abi-deps": "y"},
64051 + options = {"--update": True, "--deep": True, "--ignore-built-slot-operator-deps": "y"},
64052 success = True,
64053 mergelist = ["dev-libs/glib-2.30.2"]),
64054
64055
64056 diff --git a/pym/portage/tests/resolver/test_slot_change_without_revbump.py b/pym/portage/tests/resolver/test_slot_change_without_revbump.py
64057 new file mode 100644
64058 index 0000000..d85ff7e0
64059 --- /dev/null
64060 +++ b/pym/portage/tests/resolver/test_slot_change_without_revbump.py
64061 @@ -0,0 +1,69 @@
64062 +# Copyright 2013 Gentoo Foundation
64063 +# Distributed under the terms of the GNU General Public License v2
64064 +
64065 +from portage.tests import TestCase
64066 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64067 + ResolverPlaygroundTestCase)
64068 +
64069 +class SlotChangeWithoutRevBumpTestCase(TestCase):
64070 +
64071 + def testSlotChangeWithoutRevBump(self):
64072 +
64073 + ebuilds = {
64074 + "app-arch/libarchive-3.1.1" : {
64075 + "EAPI": "5",
64076 + "SLOT": "0/13"
64077 + },
64078 + "app-arch/libarchive-3.0.4-r1" : {
64079 + "EAPI": "5",
64080 + "SLOT": "0"
64081 + },
64082 + "kde-base/ark-4.10.0" : {
64083 + "EAPI": "5",
64084 + "DEPEND": "app-arch/libarchive:=",
64085 + "RDEPEND": "app-arch/libarchive:="
64086 + },
64087 + }
64088 +
64089 + binpkgs = {
64090 + "app-arch/libarchive-3.1.1" : {
64091 + "EAPI": "5",
64092 + "SLOT": "0"
64093 + },
64094 + }
64095 +
64096 + installed = {
64097 + "app-arch/libarchive-3.1.1" : {
64098 + "EAPI": "5",
64099 + "SLOT": "0"
64100 + },
64101 +
64102 + "kde-base/ark-4.10.0" : {
64103 + "EAPI": "5",
64104 + "DEPEND": "app-arch/libarchive:0/0=",
64105 + "RDEPEND": "app-arch/libarchive:0/0="
64106 + },
64107 + }
64108 +
64109 + world = ["kde-base/ark"]
64110 +
64111 + test_cases = (
64112 +
64113 + # Demonstrate bug #456208, where a sub-slot change
64114 + # without revbump needs to trigger a rebuild.
64115 + ResolverPlaygroundTestCase(
64116 + ["kde-base/ark"],
64117 + options = {"--oneshot": True, "--usepkg": True},
64118 + success = True,
64119 + mergelist = ['app-arch/libarchive-3.1.1', "kde-base/ark-4.10.0"]),
64120 +
64121 + )
64122 +
64123 + playground = ResolverPlayground(ebuilds=ebuilds, binpkgs=binpkgs,
64124 + installed=installed, world=world, debug=False)
64125 + try:
64126 + for test_case in test_cases:
64127 + playground.run_TestCase(test_case)
64128 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64129 + finally:
64130 + playground.cleanup()
64131
64132 diff --git a/pym/portage/tests/resolver/test_slot_collisions.py b/pym/portage/tests/resolver/test_slot_collisions.py
64133 index 95d68fe..9fcd529 100644
64134 --- a/pym/portage/tests/resolver/test_slot_collisions.py
64135 +++ b/pym/portage/tests/resolver/test_slot_collisions.py
64136 @@ -1,4 +1,4 @@
64137 -# Copyright 2010-2011 Gentoo Foundation
64138 +# Copyright 2010-2014 Gentoo Foundation
64139 # Distributed under the terms of the GNU General Public License v2
64140
64141 from portage.tests import TestCase
64142 @@ -153,3 +153,107 @@ class SlotCollisionTestCase(TestCase):
64143 self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64144 finally:
64145 playground.cleanup()
64146 +
64147 + def testConnectedCollision(self):
64148 + """
64149 + Ensure that we are able to solve connected slot conflicts
64150 + which cannot be solved each on their own.
64151 + """
64152 + ebuilds = {
64153 + "dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
64154 + "dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
64155 +
64156 + "dev-libs/X-1": { "RDEPEND": "=dev-libs/Y-1" },
64157 + "dev-libs/X-2": { "RDEPEND": "=dev-libs/Y-2" },
64158 +
64159 + "dev-libs/Y-1": { "PDEPEND": "=dev-libs/X-1" },
64160 + "dev-libs/Y-2": { "PDEPEND": "=dev-libs/X-2" },
64161 + }
64162 +
64163 + test_cases = (
64164 + ResolverPlaygroundTestCase(
64165 + ["dev-libs/A", "dev-libs/B"],
64166 + all_permutations = True,
64167 + options = { "--backtrack": 0 },
64168 + success = True,
64169 + ambiguous_merge_order = True,
64170 + mergelist = ["dev-libs/Y-1", "dev-libs/X-1", ("dev-libs/A-1", "dev-libs/B-1")]),
64171 + )
64172 +
64173 + playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
64174 + try:
64175 + for test_case in test_cases:
64176 + playground.run_TestCase(test_case)
64177 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64178 + finally:
64179 + playground.cleanup()
64180 +
64181 +
64182 + def testDeeplyConnectedCollision(self):
64183 + """
64184 + Like testConnectedCollision, except that there is another
64185 + level of dependencies between the two conflicts.
64186 + """
64187 + ebuilds = {
64188 + "dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
64189 + "dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
64190 +
64191 + "dev-libs/X-1": { "RDEPEND": "dev-libs/K" },
64192 + "dev-libs/X-2": { "RDEPEND": "dev-libs/L" },
64193 +
64194 + "dev-libs/K-1": { "RDEPEND": "=dev-libs/Y-1" },
64195 + "dev-libs/L-1": { "RDEPEND": "=dev-libs/Y-2" },
64196 +
64197 + "dev-libs/Y-1": { "PDEPEND": "=dev-libs/X-1" },
64198 + "dev-libs/Y-2": { "PDEPEND": "=dev-libs/X-2" },
64199 + }
64200 +
64201 + test_cases = (
64202 + ResolverPlaygroundTestCase(
64203 + ["dev-libs/A", "dev-libs/B"],
64204 + all_permutations = True,
64205 + options = { "--backtrack": 0 },
64206 + success = True,
64207 + ignore_mergelist_order = True,
64208 + mergelist = ["dev-libs/Y-1", "dev-libs/X-1", "dev-libs/K-1", \
64209 + "dev-libs/A-1", "dev-libs/B-1"]),
64210 + )
64211 +
64212 + playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
64213 + try:
64214 + for test_case in test_cases:
64215 + playground.run_TestCase(test_case)
64216 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64217 + finally:
64218 + playground.cleanup()
64219 +
64220 +
64221 + def testSelfDEPENDRemovalCrash(self):
64222 + """
64223 + Make sure we don't try to remove a packages twice. This happened
64224 + in the past when a package had a DEPEND on itself.
64225 + """
64226 + ebuilds = {
64227 + "dev-libs/A-1": { "RDEPEND": "=dev-libs/X-1" },
64228 + "dev-libs/B-1": { "RDEPEND": "dev-libs/X" },
64229 +
64230 + "dev-libs/X-1": { },
64231 + "dev-libs/X-2": { "DEPEND": ">=dev-libs/X-2" },
64232 + }
64233 +
64234 + test_cases = (
64235 + ResolverPlaygroundTestCase(
64236 + ["dev-libs/A", "dev-libs/B"],
64237 + all_permutations = True,
64238 + success = True,
64239 + ignore_mergelist_order = True,
64240 + mergelist = ["dev-libs/X-1", "dev-libs/A-1", "dev-libs/B-1"]),
64241 + )
64242 +
64243 + playground = ResolverPlayground(ebuilds=ebuilds, debug=False)
64244 + try:
64245 + for test_case in test_cases:
64246 + playground.run_TestCase(test_case)
64247 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64248 + finally:
64249 + playground.cleanup()
64250
64251 diff --git a/pym/portage/tests/resolver/test_slot_conflict_mask_update.py b/pym/portage/tests/resolver/test_slot_conflict_mask_update.py
64252 new file mode 100644
64253 index 0000000..a90eeac
64254 --- /dev/null
64255 +++ b/pym/portage/tests/resolver/test_slot_conflict_mask_update.py
64256 @@ -0,0 +1,41 @@
64257 +# Copyright 2013 Gentoo Foundation
64258 +# Distributed under the terms of the GNU General Public License v2
64259 +
64260 +from portage.tests import TestCase
64261 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64262 + ResolverPlaygroundTestCase)
64263 +
64264 +class SlotConflictMaskUpdateTestCase(TestCase):
64265 +
64266 + def testBacktrackingGoodVersionFirst(self):
64267 + """
64268 + When backtracking due to slot conflicts, we masked the version that has been pulled
64269 + in first. This is not always a good idea. Mask the highest version instead.
64270 + """
64271 +
64272 +
64273 + self.todo = True
64274 +
64275 + ebuilds = {
64276 + "dev-libs/A-1": { "DEPEND": "=dev-libs/C-1 dev-libs/B" },
64277 + "dev-libs/B-1": { "DEPEND": "=dev-libs/C-1" },
64278 + "dev-libs/B-2": { "DEPEND": "=dev-libs/C-2" },
64279 + "dev-libs/C-1": { },
64280 + "dev-libs/C-2": { },
64281 + }
64282 +
64283 + test_cases = (
64284 + ResolverPlaygroundTestCase(
64285 + ["dev-libs/A"],
64286 + mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1",],
64287 + success = True),
64288 + )
64289 +
64290 + playground = ResolverPlayground(ebuilds=ebuilds)
64291 +
64292 + try:
64293 + for test_case in test_cases:
64294 + playground.run_TestCase(test_case)
64295 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64296 + finally:
64297 + playground.cleanup()
64298
64299 diff --git a/pym/portage/tests/resolver/test_slot_conflict_rebuild.py b/pym/portage/tests/resolver/test_slot_conflict_rebuild.py
64300 new file mode 100644
64301 index 0000000..17737cf
64302 --- /dev/null
64303 +++ b/pym/portage/tests/resolver/test_slot_conflict_rebuild.py
64304 @@ -0,0 +1,408 @@
64305 +# Copyright 2012-2014 Gentoo Foundation
64306 +# Distributed under the terms of the GNU General Public License v2
64307 +
64308 +from portage.tests import TestCase
64309 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64310 + ResolverPlaygroundTestCase)
64311 +
64312 +class SlotConflictRebuildTestCase(TestCase):
64313 +
64314 + def testSlotConflictRebuild(self):
64315 +
64316 + ebuilds = {
64317 +
64318 + "app-misc/A-1" : {
64319 + "EAPI": "5",
64320 + "SLOT": "0/1"
64321 + },
64322 +
64323 + "app-misc/A-2" : {
64324 + "EAPI": "5",
64325 + "SLOT": "0/2"
64326 + },
64327 +
64328 + "app-misc/B-0" : {
64329 + "EAPI": "5",
64330 + "DEPEND": "app-misc/A:=",
64331 + "RDEPEND": "app-misc/A:="
64332 + },
64333 +
64334 + "app-misc/C-0" : {
64335 + "EAPI": "5",
64336 + "DEPEND": "<app-misc/A-2",
64337 + "RDEPEND": "<app-misc/A-2"
64338 + },
64339 +
64340 + "app-misc/D-1" : {
64341 + "EAPI": "5",
64342 + "SLOT": "0/1"
64343 + },
64344 +
64345 + "app-misc/D-2" : {
64346 + "EAPI": "5",
64347 + "SLOT": "0/2"
64348 + },
64349 +
64350 + "app-misc/E-0" : {
64351 + "EAPI": "5",
64352 + "DEPEND": "app-misc/D:=",
64353 + "RDEPEND": "app-misc/D:="
64354 + },
64355 +
64356 + }
64357 +
64358 + installed = {
64359 +
64360 + "app-misc/A-1" : {
64361 + "EAPI": "5",
64362 + "SLOT": "0/1"
64363 + },
64364 +
64365 + "app-misc/B-0" : {
64366 + "EAPI": "5",
64367 + "DEPEND": "app-misc/A:0/1=",
64368 + "RDEPEND": "app-misc/A:0/1="
64369 + },
64370 +
64371 + "app-misc/C-0" : {
64372 + "EAPI": "5",
64373 + "DEPEND": "<app-misc/A-2",
64374 + "RDEPEND": "<app-misc/A-2"
64375 + },
64376 +
64377 + "app-misc/D-1" : {
64378 + "EAPI": "5",
64379 + "SLOT": "0/1"
64380 + },
64381 +
64382 + "app-misc/E-0" : {
64383 + "EAPI": "5",
64384 + "DEPEND": "app-misc/D:0/1=",
64385 + "RDEPEND": "app-misc/D:0/1="
64386 + },
64387 +
64388 + }
64389 +
64390 + world = ["app-misc/B", "app-misc/C", "app-misc/E"]
64391 +
64392 + test_cases = (
64393 +
64394 + # Test bug #439688, where a slot conflict prevents an
64395 + # upgrade and we don't want to trigger unnecessary rebuilds.
64396 + ResolverPlaygroundTestCase(
64397 + ["@world"],
64398 + options = {"--update": True, "--deep": True},
64399 + success = True,
64400 + mergelist = ["app-misc/D-2", "app-misc/E-0"]),
64401 +
64402 + )
64403 +
64404 + playground = ResolverPlayground(ebuilds=ebuilds,
64405 + installed=installed, world=world, debug=False)
64406 + try:
64407 + for test_case in test_cases:
64408 + playground.run_TestCase(test_case)
64409 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64410 + finally:
64411 + playground.cleanup()
64412 +
64413 +
64414 + def testSlotConflictMassRebuild(self):
64415 + """
64416 + Bug 486580
64417 + Before this bug was fixed, emerge would backtrack for each package that needs
64418 + a rebuild. This could cause it to hit the backtrack limit and not rebuild all
64419 + needed packages.
64420 + """
64421 + ebuilds = {
64422 +
64423 + "app-misc/A-1" : {
64424 + "EAPI": "5",
64425 + "DEPEND": "app-misc/B:=",
64426 + "RDEPEND": "app-misc/B:="
64427 + },
64428 +
64429 + "app-misc/B-1" : {
64430 + "EAPI": "5",
64431 + "SLOT": "1"
64432 + },
64433 +
64434 + "app-misc/B-2" : {
64435 + "EAPI": "5",
64436 + "SLOT": "2/2"
64437 + },
64438 + }
64439 +
64440 + installed = {
64441 + "app-misc/B-1" : {
64442 + "EAPI": "5",
64443 + "SLOT": "1"
64444 + },
64445 + }
64446 +
64447 + expected_mergelist = ['app-misc/A-1', 'app-misc/B-2']
64448 +
64449 + for i in range(5):
64450 + ebuilds["app-misc/C%sC-1" % i] = {
64451 + "EAPI": "5",
64452 + "DEPEND": "app-misc/B:=",
64453 + "RDEPEND": "app-misc/B:="
64454 + }
64455 +
64456 + installed["app-misc/C%sC-1" % i] = {
64457 + "EAPI": "5",
64458 + "DEPEND": "app-misc/B:1/1=",
64459 + "RDEPEND": "app-misc/B:1/1="
64460 + }
64461 + for x in ("DEPEND", "RDEPEND"):
64462 + ebuilds["app-misc/A-1"][x] += " app-misc/C%sC" % i
64463 +
64464 + expected_mergelist.append("app-misc/C%sC-1" % i)
64465 +
64466 +
64467 + test_cases = (
64468 + ResolverPlaygroundTestCase(
64469 + ["app-misc/A"],
64470 + ignore_mergelist_order=True,
64471 + all_permutations=True,
64472 + options = {"--backtrack": 3, '--deep': True},
64473 + success = True,
64474 + mergelist = expected_mergelist),
64475 + )
64476 +
64477 + world = []
64478 +
64479 + playground = ResolverPlayground(ebuilds=ebuilds,
64480 + installed=installed, world=world, debug=False)
64481 + try:
64482 + for test_case in test_cases:
64483 + playground.run_TestCase(test_case)
64484 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64485 + finally:
64486 + playground.cleanup()
64487 +
64488 + def testSlotConflictForgottenChild(self):
64489 + """
64490 + Similar to testSlotConflictMassRebuild above, but this time the rebuilds are scheduled,
64491 + but the package causing the rebuild (the child) is not installed.
64492 + """
64493 + ebuilds = {
64494 +
64495 + "app-misc/A-2" : {
64496 + "EAPI": "5",
64497 + "DEPEND": "app-misc/B:= app-misc/C",
64498 + "RDEPEND": "app-misc/B:= app-misc/C",
64499 + },
64500 +
64501 + "app-misc/B-2" : {
64502 + "EAPI": "5",
64503 + "SLOT": "2"
64504 + },
64505 +
64506 + "app-misc/C-1": {
64507 + "EAPI": "5",
64508 + "DEPEND": "app-misc/B:=",
64509 + "RDEPEND": "app-misc/B:="
64510 + },
64511 + }
64512 +
64513 + installed = {
64514 + "app-misc/A-1" : {
64515 + "EAPI": "5",
64516 + "DEPEND": "app-misc/B:1/1= app-misc/C",
64517 + "RDEPEND": "app-misc/B:1/1= app-misc/C",
64518 + },
64519 +
64520 + "app-misc/B-1" : {
64521 + "EAPI": "5",
64522 + "SLOT": "1"
64523 + },
64524 +
64525 + "app-misc/C-1": {
64526 + "EAPI": "5",
64527 + "DEPEND": "app-misc/B:1/1=",
64528 + "RDEPEND": "app-misc/B:1/1="
64529 + },
64530 + }
64531 +
64532 + test_cases = (
64533 + ResolverPlaygroundTestCase(
64534 + ["app-misc/A"],
64535 + success = True,
64536 + mergelist = ['app-misc/B-2', 'app-misc/C-1', 'app-misc/A-2']),
64537 + )
64538 +
64539 + world = []
64540 +
64541 + playground = ResolverPlayground(ebuilds=ebuilds,
64542 + installed=installed, world=world, debug=False)
64543 + try:
64544 + for test_case in test_cases:
64545 + playground.run_TestCase(test_case)
64546 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64547 + finally:
64548 + playground.cleanup()
64549 +
64550 +
64551 + def testSlotConflictDepChange(self):
64552 + """
64553 + Bug 490362
64554 + The dependency in the ebuild was changed form slot operator to
64555 + no slot operator. The vdb contained the slot operator and emerge
64556 + would refuse to rebuild.
64557 + """
64558 + ebuilds = {
64559 + "app-misc/A-1" : {
64560 + "EAPI": "5",
64561 + "DEPEND": "app-misc/B",
64562 + "RDEPEND": "app-misc/B"
64563 + },
64564 +
64565 + "app-misc/B-1" : {
64566 + "EAPI": "5",
64567 + "SLOT": "0/1"
64568 + },
64569 +
64570 + "app-misc/B-2" : {
64571 + "EAPI": "5",
64572 + "SLOT": "0/2"
64573 + },
64574 + }
64575 +
64576 + installed = {
64577 + "app-misc/A-1" : {
64578 + "EAPI": "5",
64579 + "DEPEND": "app-misc/B:0/1=",
64580 + "RDEPEND": "app-misc/B:0/1="
64581 + },
64582 + "app-misc/B-1" : {
64583 + "EAPI": "5",
64584 + "SLOT": "0/1"
64585 + },
64586 + }
64587 +
64588 + test_cases = (
64589 + ResolverPlaygroundTestCase(
64590 + ["app-misc/B"],
64591 + success = True,
64592 + mergelist = ['app-misc/B-2', 'app-misc/A-1']),
64593 + )
64594 +
64595 + world = ["app-misc/A"]
64596 +
64597 + playground = ResolverPlayground(ebuilds=ebuilds,
64598 + installed=installed, world=world, debug=False)
64599 + try:
64600 + for test_case in test_cases:
64601 + playground.run_TestCase(test_case)
64602 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64603 + finally:
64604 + playground.cleanup()
64605 +
64606 +
64607 + def testSlotConflictMixedDependencies(self):
64608 + """
64609 + Bug 487198
64610 + For parents with mixed >= and < dependencies, we scheduled rebuilds for the
64611 + >= atom, but in the end didn't install the child update because of the < atom.
64612 + """
64613 + ebuilds = {
64614 + "cat/slotted-lib-1" : {
64615 + "EAPI": "5",
64616 + "SLOT": "1"
64617 + },
64618 + "cat/slotted-lib-2" : {
64619 + "EAPI": "5",
64620 + "SLOT": "2"
64621 + },
64622 + "cat/slotted-lib-3" : {
64623 + "EAPI": "5",
64624 + "SLOT": "3"
64625 + },
64626 + "cat/slotted-lib-4" : {
64627 + "EAPI": "5",
64628 + "SLOT": "4"
64629 + },
64630 + "cat/slotted-lib-5" : {
64631 + "EAPI": "5",
64632 + "SLOT": "5"
64633 + },
64634 + "cat/user-1" : {
64635 + "EAPI": "5",
64636 + "DEPEND": ">=cat/slotted-lib-2:= <cat/slotted-lib-4:=",
64637 + "RDEPEND": ">=cat/slotted-lib-2:= <cat/slotted-lib-4:=",
64638 + },
64639 + }
64640 +
64641 + installed = {
64642 + "cat/slotted-lib-3" : {
64643 + "EAPI": "5",
64644 + "SLOT": "3"
64645 + },
64646 + "cat/user-1" : {
64647 + "EAPI": "5",
64648 + "DEPEND": ">=cat/slotted-lib-2:3/3= <cat/slotted-lib-4:3/3=",
64649 + "RDEPEND": ">=cat/slotted-lib-2:3/3= <cat/slotted-lib-4:3/3=",
64650 + },
64651 + }
64652 +
64653 + test_cases = (
64654 + ResolverPlaygroundTestCase(
64655 + ["cat/user"],
64656 + options = {"--deep": True, "--update": True},
64657 + success = True,
64658 + mergelist = []),
64659 + )
64660 +
64661 + world = []
64662 +
64663 + playground = ResolverPlayground(ebuilds=ebuilds,
64664 + installed=installed, world=world, debug=False)
64665 + try:
64666 + for test_case in test_cases:
64667 + playground.run_TestCase(test_case)
64668 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64669 + finally:
64670 + playground.cleanup()
64671 +
64672 +
64673 + def testSlotConflictMultiRepo(self):
64674 + """
64675 + Bug 497238
64676 + Different repositories contain the same cpv with different sub-slots for
64677 + a slot operator child.
64678 + Downgrading the slot operator parent would result in a sub-slot change of
64679 + the installed package by changing the source repository.
64680 + Make sure we don't perform this undesirable rebuild.
64681 + """
64682 + ebuilds = {
64683 + "net-firewall/iptables-1.4.21::overlay" : { "EAPI": "5", "SLOT": "0/10" },
64684 + "sys-apps/iproute2-3.11.0::overlay" : { "EAPI": "5", "RDEPEND": "net-firewall/iptables:=" },
64685 +
64686 + "net-firewall/iptables-1.4.21" : { "EAPI": "5", "SLOT": "0" },
64687 + "sys-apps/iproute2-3.12.0": { "EAPI": "5", "RDEPEND": "net-firewall/iptables:=" },
64688 + }
64689 +
64690 + installed = {
64691 + "net-firewall/iptables-1.4.21::overlay" : { "EAPI": "5", "SLOT": "0/10" },
64692 + "sys-apps/iproute2-3.12.0": { "EAPI": "5", "RDEPEND": "net-firewall/iptables:0/10=" },
64693 + }
64694 +
64695 + world = ["sys-apps/iproute2"]
64696 +
64697 + test_cases = (
64698 + ResolverPlaygroundTestCase(
64699 + ["@world"],
64700 + options = {"--deep": True, "--update": True, "--verbose": True},
64701 + success = True,
64702 + mergelist = []),
64703 + )
64704 +
64705 + playground = ResolverPlayground(ebuilds=ebuilds,
64706 + installed=installed, world=world, debug=False)
64707 + try:
64708 + for test_case in test_cases:
64709 + playground.run_TestCase(test_case)
64710 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64711 + finally:
64712 + playground.cleanup()
64713
64714 diff --git a/pym/portage/tests/resolver/test_slot_conflict_update.py b/pym/portage/tests/resolver/test_slot_conflict_update.py
64715 new file mode 100644
64716 index 0000000..331e578
64717 --- /dev/null
64718 +++ b/pym/portage/tests/resolver/test_slot_conflict_update.py
64719 @@ -0,0 +1,98 @@
64720 +# Copyright 2013 Gentoo Foundation
64721 +# Distributed under the terms of the GNU General Public License v2
64722 +
64723 +from portage.tests import TestCase
64724 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64725 + ResolverPlaygroundTestCase)
64726 +
64727 +class SlotConflictUpdateTestCase(TestCase):
64728 +
64729 + def testSlotConflictUpdate(self):
64730 +
64731 + ebuilds = {
64732 +
64733 + "app-text/podofo-0.9.2" : {
64734 + "EAPI": "5",
64735 + "RDEPEND" : "dev-util/boost-build"
64736 + },
64737 +
64738 + "dev-cpp/libcmis-0.3.1" : {
64739 + "EAPI": "5",
64740 + "RDEPEND" : "dev-libs/boost:="
64741 + },
64742 +
64743 + "dev-libs/boost-1.53.0" : {
64744 + "EAPI": "5",
64745 + "SLOT": "0/1.53",
64746 + "RDEPEND" : "=dev-util/boost-build-1.53.0"
64747 + },
64748 +
64749 + "dev-libs/boost-1.52.0" : {
64750 + "EAPI": "5",
64751 + "SLOT": "0/1.52",
64752 + "RDEPEND" : "=dev-util/boost-build-1.52.0"
64753 + },
64754 +
64755 + "dev-util/boost-build-1.53.0" : {
64756 + "EAPI": "5",
64757 + "SLOT": "0"
64758 + },
64759 +
64760 + "dev-util/boost-build-1.52.0" : {
64761 + "EAPI": "5",
64762 + "SLOT": "0"
64763 + },
64764 +
64765 +
64766 + }
64767 +
64768 + installed = {
64769 +
64770 + "app-text/podofo-0.9.2" : {
64771 + "EAPI": "5",
64772 + "RDEPEND" : "dev-util/boost-build"
64773 + },
64774 +
64775 + "dev-cpp/libcmis-0.3.1" : {
64776 + "EAPI": "5",
64777 + "RDEPEND" : "dev-libs/boost:0/1.52="
64778 + },
64779 +
64780 + "dev-util/boost-build-1.52.0" : {
64781 + "EAPI": "5",
64782 + "SLOT": "0"
64783 + },
64784 +
64785 + "dev-libs/boost-1.52.0" : {
64786 + "EAPI": "5",
64787 + "SLOT": "0/1.52",
64788 + "RDEPEND" : "=dev-util/boost-build-1.52.0"
64789 + }
64790 +
64791 + }
64792 +
64793 + world = ["dev-cpp/libcmis", "dev-libs/boost", "app-text/podofo"]
64794 +
64795 + test_cases = (
64796 +
64797 + # In order to avoid a missed update, first mask lower
64798 + # versions that conflict with higher versions. Note that
64799 + # this behavior makes SlotConflictMaskUpdateTestCase
64800 + # fail.
64801 + ResolverPlaygroundTestCase(
64802 + world,
64803 + all_permutations = True,
64804 + options = {"--update": True, "--deep": True},
64805 + success = True,
64806 + mergelist = ['dev-util/boost-build-1.53.0', 'dev-libs/boost-1.53.0', 'dev-cpp/libcmis-0.3.1']),
64807 +
64808 + )
64809 +
64810 + playground = ResolverPlayground(ebuilds=ebuilds,
64811 + installed=installed, world=world, debug=False)
64812 + try:
64813 + for test_case in test_cases:
64814 + playground.run_TestCase(test_case)
64815 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64816 + finally:
64817 + playground.cleanup()
64818
64819 diff --git a/pym/portage/tests/resolver/test_slot_operator_autounmask.py b/pym/portage/tests/resolver/test_slot_operator_autounmask.py
64820 new file mode 100644
64821 index 0000000..624271b
64822 --- /dev/null
64823 +++ b/pym/portage/tests/resolver/test_slot_operator_autounmask.py
64824 @@ -0,0 +1,120 @@
64825 +# Copyright 2013 Gentoo Foundation
64826 +# Distributed under the terms of the GNU General Public License v2
64827 +
64828 +from portage.tests import TestCase
64829 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64830 + ResolverPlaygroundTestCase)
64831 +
64832 +class SlotOperatorAutoUnmaskTestCase(TestCase):
64833 +
64834 + def __init__(self, *args, **kwargs):
64835 + super(SlotOperatorAutoUnmaskTestCase, self).__init__(*args, **kwargs)
64836 +
64837 + def testSubSlot(self):
64838 + ebuilds = {
64839 + "dev-libs/icu-49" : {
64840 + "EAPI": "4-slot-abi",
64841 + "SLOT": "0/49"
64842 + },
64843 + "dev-libs/icu-4.8" : {
64844 + "EAPI": "4-slot-abi",
64845 + "SLOT": "0/48"
64846 + },
64847 + "dev-libs/libxml2-2.7.8" : {
64848 + "EAPI": "4-slot-abi",
64849 + "DEPEND": "dev-libs/icu:=",
64850 + "RDEPEND": "dev-libs/icu:=",
64851 + "KEYWORDS": "~x86"
64852 + },
64853 + }
64854 + binpkgs = {
64855 + "dev-libs/icu-49" : {
64856 + "EAPI": "4-slot-abi",
64857 + "SLOT": "0/49"
64858 + },
64859 + "dev-libs/icu-4.8" : {
64860 + "EAPI": "4-slot-abi",
64861 + "SLOT": "0/48"
64862 + },
64863 + "dev-libs/libxml2-2.7.8" : {
64864 + "EAPI": "4-slot-abi",
64865 + "DEPEND": "dev-libs/icu:0/48=",
64866 + "RDEPEND": "dev-libs/icu:0/48="
64867 + },
64868 + }
64869 + installed = {
64870 + "dev-libs/icu-4.8" : {
64871 + "EAPI": "4-slot-abi",
64872 + "SLOT": "0/48"
64873 + },
64874 + "dev-libs/libxml2-2.7.8" : {
64875 + "EAPI": "4-slot-abi",
64876 + "DEPEND": "dev-libs/icu:0/48=",
64877 + "RDEPEND": "dev-libs/icu:0/48="
64878 + },
64879 + }
64880 +
64881 + world = ["dev-libs/libxml2"]
64882 +
64883 + test_cases = (
64884 +
64885 + ResolverPlaygroundTestCase(
64886 + ["dev-libs/icu"],
64887 + options = {"--autounmask": True, "--oneshot": True},
64888 + success = False,
64889 + mergelist = ["dev-libs/icu-49", "dev-libs/libxml2-2.7.8" ],
64890 + unstable_keywords = ['dev-libs/libxml2-2.7.8']),
64891 +
64892 + ResolverPlaygroundTestCase(
64893 + ["dev-libs/icu"],
64894 + options = {"--oneshot": True, "--ignore-built-slot-operator-deps": "y"},
64895 + success = True,
64896 + mergelist = ["dev-libs/icu-49"]),
64897 +
64898 + ResolverPlaygroundTestCase(
64899 + ["dev-libs/icu"],
64900 + options = {"--oneshot": True, "--usepkg": True},
64901 + success = False,
64902 + mergelist = ["[binary]dev-libs/icu-49", "dev-libs/libxml2-2.7.8" ],
64903 + unstable_keywords = ['dev-libs/libxml2-2.7.8']),
64904 +
64905 + ResolverPlaygroundTestCase(
64906 + ["dev-libs/icu"],
64907 + options = {"--oneshot": True, "--usepkgonly": True},
64908 + success = True,
64909 + mergelist = ["[binary]dev-libs/icu-4.8"]),
64910 +
64911 + ResolverPlaygroundTestCase(
64912 + ["dev-libs/icu"],
64913 + options = {"--oneshot": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
64914 + success = True,
64915 + mergelist = ["[binary]dev-libs/icu-49"]),
64916 +
64917 + ResolverPlaygroundTestCase(
64918 + ["@world"],
64919 + options = {"--update": True, "--deep": True, "--ignore-built-slot-operator-deps": "y"},
64920 + success = True,
64921 + mergelist = ["dev-libs/icu-49"]),
64922 +
64923 + ResolverPlaygroundTestCase(
64924 + ["@world"],
64925 + options = {"--update": True, "--deep": True, "--usepkgonly": True},
64926 + success = True,
64927 + mergelist = []),
64928 +
64929 + ResolverPlaygroundTestCase(
64930 + ["@world"],
64931 + options = {"--update": True, "--deep": True, "--usepkgonly": True, "--ignore-built-slot-operator-deps": "y"},
64932 + success = True,
64933 + mergelist = ["[binary]dev-libs/icu-49"]),
64934 +
64935 + )
64936 +
64937 + playground = ResolverPlayground(ebuilds=ebuilds, binpkgs=binpkgs,
64938 + installed=installed, world=world, debug=False)
64939 + try:
64940 + for test_case in test_cases:
64941 + playground.run_TestCase(test_case)
64942 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
64943 + finally:
64944 + playground.cleanup()
64945
64946 diff --git a/pym/portage/tests/resolver/test_slot_operator_unsatisfied.py b/pym/portage/tests/resolver/test_slot_operator_unsatisfied.py
64947 new file mode 100644
64948 index 0000000..e3b53d1
64949 --- /dev/null
64950 +++ b/pym/portage/tests/resolver/test_slot_operator_unsatisfied.py
64951 @@ -0,0 +1,70 @@
64952 +# Copyright 2012 Gentoo Foundation
64953 +# Distributed under the terms of the GNU General Public License v2
64954 +
64955 +from portage.tests import TestCase
64956 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
64957 + ResolverPlaygroundTestCase)
64958 +
64959 +class SlotOperatorUnsatisfiedTestCase(TestCase):
64960 +
64961 + def testSlotOperatorUnsatisfied(self):
64962 +
64963 + ebuilds = {
64964 + "app-misc/A-1" : {
64965 + "EAPI": "5",
64966 + "SLOT": "0/1"
64967 + },
64968 + "app-misc/A-2" : {
64969 + "EAPI": "5",
64970 + "SLOT": "0/2"
64971 + },
64972 + "app-misc/B-0" : {
64973 + "EAPI": "5",
64974 + "DEPEND": "app-misc/A:=",
64975 + "RDEPEND": "app-misc/A:="
64976 + },
64977 + }
64978 +
64979 + installed = {
64980 + "app-misc/A-2" : {
64981 + "EAPI": "5",
64982 + "SLOT": "0/2"
64983 + },
64984 +
64985 + "app-misc/B-0" : {
64986 + "EAPI": "5",
64987 + "DEPEND": "app-misc/A:0/1=",
64988 + "RDEPEND": "app-misc/A:0/1="
64989 + },
64990 + }
64991 +
64992 + world = ["app-misc/B"]
64993 +
64994 + test_cases = (
64995 +
64996 + # Demonstrate bug #439694, where a broken slot-operator
64997 + # sub-slot dependency needs to trigger a rebuild.
64998 + ResolverPlaygroundTestCase(
64999 + ["@world"],
65000 + options = {"--update": True, "--deep": True},
65001 + success = True,
65002 + mergelist = ["app-misc/B-0"]),
65003 +
65004 + # This doesn't trigger a rebuild, since there's no version
65005 + # change to trigger complete graph mode, and initially
65006 + # unsatisfied deps are ignored in complete graph mode anyway.
65007 + ResolverPlaygroundTestCase(
65008 + ["app-misc/A"],
65009 + options = {"--oneshot": True},
65010 + success = True,
65011 + mergelist = ["app-misc/A-2"]),
65012 + )
65013 +
65014 + playground = ResolverPlayground(ebuilds=ebuilds,
65015 + installed=installed, world=world, debug=False)
65016 + try:
65017 + for test_case in test_cases:
65018 + playground.run_TestCase(test_case)
65019 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65020 + finally:
65021 + playground.cleanup()
65022
65023 diff --git a/pym/portage/tests/resolver/test_slot_operator_unsolved.py b/pym/portage/tests/resolver/test_slot_operator_unsolved.py
65024 new file mode 100644
65025 index 0000000..c19783d
65026 --- /dev/null
65027 +++ b/pym/portage/tests/resolver/test_slot_operator_unsolved.py
65028 @@ -0,0 +1,88 @@
65029 +# Copyright 2013 Gentoo Foundation
65030 +# Distributed under the terms of the GNU General Public License v2
65031 +
65032 +from portage.tests import TestCase
65033 +from portage.tests.resolver.ResolverPlayground import (ResolverPlayground,
65034 + ResolverPlaygroundTestCase)
65035 +
65036 +class SlotOperatorUnsolvedTestCase(TestCase):
65037 + """
65038 + Demonstrate bug #456340, where an unsolved circular dependency
65039 + interacts with an unsatisfied built slot-operator dep.
65040 + """
65041 + def __init__(self, *args, **kwargs):
65042 + super(SlotOperatorUnsolvedTestCase, self).__init__(*args, **kwargs)
65043 +
65044 + def testSlotOperatorUnsolved(self):
65045 + ebuilds = {
65046 + "dev-libs/icu-50.1.2" : {
65047 + "EAPI": "5",
65048 + "SLOT": "0/50.1.2"
65049 + },
65050 + "net-libs/webkit-gtk-1.10.2-r300" : {
65051 + "EAPI": "5",
65052 + "DEPEND": ">=dev-libs/icu-3.8.1-r1:=",
65053 + "RDEPEND": ">=dev-libs/icu-3.8.1-r1:="
65054 + },
65055 + "dev-ruby/rdoc-3.12.1" : {
65056 + "EAPI": "5",
65057 + "IUSE": "test",
65058 + "DEPEND": "test? ( >=dev-ruby/hoe-2.7.0 )",
65059 + },
65060 + "dev-ruby/hoe-2.13.0" : {
65061 + "EAPI": "5",
65062 + "IUSE": "test",
65063 + "DEPEND": "test? ( >=dev-ruby/rdoc-3.10 )",
65064 + "RDEPEND": "test? ( >=dev-ruby/rdoc-3.10 )",
65065 + },
65066 + }
65067 +
65068 + binpkgs = {
65069 + "net-libs/webkit-gtk-1.10.2-r300" : {
65070 + "EAPI": "5",
65071 + "DEPEND": ">=dev-libs/icu-3.8.1-r1:0/50=",
65072 + "RDEPEND": ">=dev-libs/icu-3.8.1-r1:0/50="
65073 + },
65074 + }
65075 +
65076 + installed = {
65077 + "dev-libs/icu-50.1.2" : {
65078 + "EAPI": "5",
65079 + "SLOT": "0/50.1.2"
65080 + },
65081 + "net-libs/webkit-gtk-1.10.2-r300" : {
65082 + "EAPI": "5",
65083 + "DEPEND": ">=dev-libs/icu-3.8.1-r1:0/50=",
65084 + "RDEPEND": ">=dev-libs/icu-3.8.1-r1:0/50="
65085 + },
65086 + }
65087 +
65088 + user_config = {
65089 + "make.conf" : ("FEATURES=test",)
65090 + }
65091 +
65092 + world = ["net-libs/webkit-gtk", "dev-ruby/hoe"]
65093 +
65094 + test_cases = (
65095 +
65096 + ResolverPlaygroundTestCase(
65097 + ["@world"],
65098 + options = {"--update": True, "--deep": True, "--usepkg": True},
65099 + circular_dependency_solutions = {
65100 + 'dev-ruby/hoe-2.13.0': frozenset([frozenset([('test', False)])]),
65101 + 'dev-ruby/rdoc-3.12.1': frozenset([frozenset([('test', False)])])
65102 + },
65103 + success = False
65104 + ),
65105 +
65106 + )
65107 +
65108 + playground = ResolverPlayground(ebuilds=ebuilds, binpkgs=binpkgs,
65109 + installed=installed, user_config=user_config,
65110 + world=world, debug=False)
65111 + try:
65112 + for test_case in test_cases:
65113 + playground.run_TestCase(test_case)
65114 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65115 + finally:
65116 + playground.cleanup()
65117
65118 diff --git a/pym/portage/tests/resolver/test_targetroot.py b/pym/portage/tests/resolver/test_targetroot.py
65119 new file mode 100644
65120 index 0000000..db6c60d
65121 --- /dev/null
65122 +++ b/pym/portage/tests/resolver/test_targetroot.py
65123 @@ -0,0 +1,85 @@
65124 +# Copyright 2012 Gentoo Foundation
65125 +# Distributed under the terms of the GNU General Public License v2
65126 +
65127 +from portage.tests import TestCase
65128 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
65129 +
65130 +class TargetRootTestCase(TestCase):
65131 +
65132 + def testTargetRoot(self):
65133 + ebuilds = {
65134 + "dev-lang/python-3.2": {
65135 + "EAPI": "5-hdepend",
65136 + "IUSE": "targetroot",
65137 + "HDEPEND": "targetroot? ( ~dev-lang/python-3.2 )",
65138 + },
65139 + "dev-libs/A-1": {
65140 + "EAPI": "4",
65141 + "DEPEND": "dev-libs/B",
65142 + "RDEPEND": "dev-libs/C",
65143 + },
65144 + "dev-libs/B-1": {},
65145 + "dev-libs/C-1": {},
65146 + }
65147 +
65148 + test_cases = (
65149 + ResolverPlaygroundTestCase(
65150 + ["dev-lang/python"],
65151 + options = {},
65152 + success = True,
65153 + mergelist = ["dev-lang/python-3.2", "dev-lang/python-3.2{targetroot}"]),
65154 + ResolverPlaygroundTestCase(
65155 + ["dev-lang/python"],
65156 + options = {"--root-deps": True},
65157 + success = True,
65158 + mergelist = ["dev-lang/python-3.2", "dev-lang/python-3.2{targetroot}"]),
65159 + ResolverPlaygroundTestCase(
65160 + ["dev-lang/python"],
65161 + options = {"--root-deps": "rdeps"},
65162 + success = True,
65163 + mergelist = ["dev-lang/python-3.2", "dev-lang/python-3.2{targetroot}"]),
65164 + ResolverPlaygroundTestCase(
65165 + ["dev-libs/A"],
65166 + options = {},
65167 + ambiguous_merge_order = True,
65168 + success = True,
65169 + mergelist = [("dev-libs/B-1", "dev-libs/C-1{targetroot}"), "dev-libs/A-1{targetroot}"]),
65170 + ResolverPlaygroundTestCase(
65171 + ["dev-libs/A"],
65172 + options = {"--root-deps": True},
65173 + ambiguous_merge_order = True,
65174 + success = True,
65175 + mergelist = [("dev-libs/B-1{targetroot}", "dev-libs/C-1{targetroot}"), "dev-libs/A-1{targetroot}"]),
65176 + ResolverPlaygroundTestCase(
65177 + ["dev-libs/A"],
65178 + options = {"--root-deps": "rdeps"},
65179 + ambiguous_merge_order = True,
65180 + success = True,
65181 + mergelist = [("dev-libs/C-1{targetroot}"), "dev-libs/A-1{targetroot}"]),
65182 + )
65183 +
65184 + playground = ResolverPlayground(ebuilds=ebuilds, targetroot=True,
65185 + debug=False)
65186 + try:
65187 + for test_case in test_cases:
65188 + playground.run_TestCase(test_case)
65189 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65190 + finally:
65191 + playground.cleanup()
65192 +
65193 + test_cases = (
65194 + ResolverPlaygroundTestCase(
65195 + ["dev-lang/python"],
65196 + options = {},
65197 + success = True,
65198 + mergelist = ["dev-lang/python-3.2"]),
65199 + )
65200 +
65201 + playground = ResolverPlayground(ebuilds=ebuilds, targetroot=False,
65202 + debug=False)
65203 + try:
65204 + for test_case in test_cases:
65205 + playground.run_TestCase(test_case)
65206 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65207 + finally:
65208 + playground.cleanup()
65209
65210 diff --git a/pym/portage/tests/resolver/test_unpack_dependencies.py b/pym/portage/tests/resolver/test_unpack_dependencies.py
65211 new file mode 100644
65212 index 0000000..cfceff4
65213 --- /dev/null
65214 +++ b/pym/portage/tests/resolver/test_unpack_dependencies.py
65215 @@ -0,0 +1,65 @@
65216 +# Copyright 2012 Gentoo Foundation
65217 +# Distributed under the terms of the GNU General Public License v2
65218 +
65219 +from portage.tests import TestCase
65220 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
65221 +
65222 +class UnpackDependenciesTestCase(TestCase):
65223 + def testUnpackDependencies(self):
65224 + distfiles = {
65225 + "A-1.tar.gz": b"binary\0content",
65226 + "B-1.TAR.XZ": b"binary\0content",
65227 + "B-docs-1.tar.bz2": b"binary\0content",
65228 + "C-1.TAR.XZ": b"binary\0content",
65229 + "C-docs-1.tar.bz2": b"binary\0content",
65230 + }
65231 +
65232 + ebuilds = {
65233 + "dev-libs/A-1": {"SRC_URI": "A-1.tar.gz", "EAPI": "5-progress"},
65234 + "dev-libs/B-1": {"IUSE": "doc", "SRC_URI": "B-1.TAR.XZ doc? ( B-docs-1.tar.bz2 )", "EAPI": "5-progress"},
65235 + "dev-libs/C-1": {"IUSE": "doc", "SRC_URI": "C-1.TAR.XZ doc? ( C-docs-1.tar.bz2 )", "EAPI": "5-progress"},
65236 + "app-arch/bzip2-1": {},
65237 + "app-arch/gzip-1": {},
65238 + "app-arch/tar-1": {},
65239 + "app-arch/xz-utils-1": {},
65240 + }
65241 +
65242 + repo_configs = {
65243 + "test_repo": {
65244 + "unpack_dependencies/5-progress": (
65245 + "tar.bz2 app-arch/tar app-arch/bzip2",
65246 + "tar.gz app-arch/tar app-arch/gzip",
65247 + "tar.xz app-arch/tar app-arch/xz-utils",
65248 + ),
65249 + },
65250 + }
65251 +
65252 + test_cases = (
65253 + ResolverPlaygroundTestCase(
65254 + ["dev-libs/A"],
65255 + success = True,
65256 + ignore_mergelist_order = True,
65257 + mergelist = ["app-arch/tar-1", "app-arch/gzip-1", "dev-libs/A-1"]),
65258 + ResolverPlaygroundTestCase(
65259 + ["dev-libs/B"],
65260 + success = True,
65261 + ignore_mergelist_order = True,
65262 + mergelist = ["app-arch/tar-1", "app-arch/xz-utils-1", "dev-libs/B-1"]),
65263 + ResolverPlaygroundTestCase(
65264 + ["dev-libs/C"],
65265 + success = True,
65266 + ignore_mergelist_order = True,
65267 + mergelist = ["app-arch/tar-1", "app-arch/xz-utils-1", "app-arch/bzip2-1", "dev-libs/C-1"]),
65268 + )
65269 +
65270 + user_config = {
65271 + "package.use": ("dev-libs/C doc",)
65272 + }
65273 +
65274 + playground = ResolverPlayground(distfiles=distfiles, ebuilds=ebuilds, repo_configs=repo_configs, user_config=user_config)
65275 + try:
65276 + for test_case in test_cases:
65277 + playground.run_TestCase(test_case)
65278 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65279 + finally:
65280 + playground.cleanup()
65281
65282 diff --git a/pym/portage/tests/resolver/test_use_aliases.py b/pym/portage/tests/resolver/test_use_aliases.py
65283 new file mode 100644
65284 index 0000000..7c2debb
65285 --- /dev/null
65286 +++ b/pym/portage/tests/resolver/test_use_aliases.py
65287 @@ -0,0 +1,131 @@
65288 +# Copyright 2012 Gentoo Foundation
65289 +# Distributed under the terms of the GNU General Public License v2
65290 +
65291 +from portage.tests import TestCase
65292 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
65293 +
65294 +class UseAliasesTestCase(TestCase):
65295 + def testUseAliases(self):
65296 + ebuilds = {
65297 + "dev-libs/A-1": {"DEPEND": "dev-libs/K[x]", "RDEPEND": "dev-libs/K[x]", "EAPI": "5"},
65298 + "dev-libs/B-1": {"DEPEND": "dev-libs/L[x]", "RDEPEND": "dev-libs/L[x]", "EAPI": "5"},
65299 + "dev-libs/C-1": {"DEPEND": "dev-libs/M[xx]", "RDEPEND": "dev-libs/M[xx]", "EAPI": "5"},
65300 + "dev-libs/D-1": {"DEPEND": "dev-libs/N[-x]", "RDEPEND": "dev-libs/N[-x]", "EAPI": "5"},
65301 + "dev-libs/E-1": {"DEPEND": "dev-libs/O[-xx]", "RDEPEND": "dev-libs/O[-xx]", "EAPI": "5"},
65302 + "dev-libs/F-1": {"DEPEND": "dev-libs/P[-xx]", "RDEPEND": "dev-libs/P[-xx]", "EAPI": "5"},
65303 + "dev-libs/G-1": {"DEPEND": "dev-libs/Q[x-y]", "RDEPEND": "dev-libs/Q[x-y]", "EAPI": "5"},
65304 + "dev-libs/H-1": {"DEPEND": "=dev-libs/R-1*[yy]", "RDEPEND": "=dev-libs/R-1*[yy]", "EAPI": "5"},
65305 + "dev-libs/H-2": {"DEPEND": "=dev-libs/R-2*[yy]", "RDEPEND": "=dev-libs/R-2*[yy]", "EAPI": "5"},
65306 + "dev-libs/I-1": {"DEPEND": "dev-libs/S[y-z]", "RDEPEND": "dev-libs/S[y-z]", "EAPI": "5"},
65307 + "dev-libs/I-2": {"DEPEND": "dev-libs/S[y_z]", "RDEPEND": "dev-libs/S[y_z]", "EAPI": "5"},
65308 + "dev-libs/J-1": {"DEPEND": "dev-libs/T[x]", "RDEPEND": "dev-libs/T[x]", "EAPI": "5"},
65309 + "dev-libs/K-1": {"IUSE": "+x", "EAPI": "5"},
65310 + "dev-libs/K-2::repo1": {"IUSE": "+X", "EAPI": "5-progress"},
65311 + "dev-libs/L-1": {"IUSE": "+x", "EAPI": "5"},
65312 + "dev-libs/M-1::repo1": {"IUSE": "X", "EAPI": "5-progress"},
65313 + "dev-libs/N-1": {"IUSE": "x", "EAPI": "5"},
65314 + "dev-libs/N-2::repo1": {"IUSE": "X", "EAPI": "5-progress"},
65315 + "dev-libs/O-1": {"IUSE": "x", "EAPI": "5"},
65316 + "dev-libs/P-1::repo1": {"IUSE": "+X", "EAPI": "5-progress"},
65317 + "dev-libs/Q-1::repo2": {"IUSE": "X.Y", "EAPI": "5-progress"},
65318 + "dev-libs/R-1::repo1": {"IUSE": "Y", "EAPI": "5-progress"},
65319 + "dev-libs/R-2::repo1": {"IUSE": "y", "EAPI": "5-progress"},
65320 + "dev-libs/S-1::repo2": {"IUSE": "Y.Z", "EAPI": "5-progress"},
65321 + "dev-libs/S-2::repo2": {"IUSE": "Y.Z", "EAPI": "5-progress"},
65322 + "dev-libs/T-1::repo1": {"IUSE": "+X", "EAPI": "5"},
65323 + }
65324 +
65325 + installed = {
65326 + "dev-libs/L-2::repo1": {"IUSE": "+X", "USE": "X", "EAPI": "5-progress"},
65327 + "dev-libs/O-2::repo1": {"IUSE": "X", "USE": "", "EAPI": "5-progress"},
65328 + }
65329 +
65330 + repo_configs = {
65331 + "repo1": {
65332 + "use.aliases": ("X x xx",),
65333 + "package.use.aliases": (
65334 + "=dev-libs/R-1* Y yy",
65335 + "=dev-libs/R-2* y yy",
65336 + )
65337 + },
65338 + "repo2": {
65339 + "eapi": ("5-progress",),
65340 + "use.aliases": ("X.Y x-y",),
65341 + "package.use.aliases": (
65342 + "=dev-libs/S-1* Y.Z y-z",
65343 + "=dev-libs/S-2* Y.Z y_z",
65344 + ),
65345 + },
65346 + }
65347 +
65348 + test_cases = (
65349 + ResolverPlaygroundTestCase(
65350 + ["dev-libs/A"],
65351 + success = True,
65352 + mergelist = ["dev-libs/K-2", "dev-libs/A-1"]),
65353 + ResolverPlaygroundTestCase(
65354 + ["dev-libs/B"],
65355 + success = True,
65356 + mergelist = ["dev-libs/B-1"]),
65357 + ResolverPlaygroundTestCase(
65358 + ["dev-libs/C"],
65359 + options = {"--autounmask": True},
65360 + success = False,
65361 + mergelist = ["dev-libs/M-1", "dev-libs/C-1"],
65362 + use_changes = {"dev-libs/M-1": {"X": True}}),
65363 + ResolverPlaygroundTestCase(
65364 + ["dev-libs/D"],
65365 + success = True,
65366 + mergelist = ["dev-libs/N-2", "dev-libs/D-1"]),
65367 + ResolverPlaygroundTestCase(
65368 + ["dev-libs/E"],
65369 + success = True,
65370 + mergelist = ["dev-libs/E-1"]),
65371 + ResolverPlaygroundTestCase(
65372 + ["dev-libs/F"],
65373 + options = {"--autounmask": True},
65374 + success = False,
65375 + mergelist = ["dev-libs/P-1", "dev-libs/F-1"],
65376 + use_changes = {"dev-libs/P-1": {"X": False}}),
65377 + ResolverPlaygroundTestCase(
65378 + ["dev-libs/G"],
65379 + options = {"--autounmask": True},
65380 + success = False,
65381 + mergelist = ["dev-libs/Q-1", "dev-libs/G-1"],
65382 + use_changes = {"dev-libs/Q-1": {"X.Y": True}}),
65383 + ResolverPlaygroundTestCase(
65384 + ["=dev-libs/H-1*"],
65385 + options = {"--autounmask": True},
65386 + success = False,
65387 + mergelist = ["dev-libs/R-1", "dev-libs/H-1"],
65388 + use_changes = {"dev-libs/R-1": {"Y": True}}),
65389 + ResolverPlaygroundTestCase(
65390 + ["=dev-libs/H-2*"],
65391 + options = {"--autounmask": True},
65392 + success = False,
65393 + mergelist = ["dev-libs/R-2", "dev-libs/H-2"],
65394 + use_changes = {"dev-libs/R-2": {"y": True}}),
65395 + ResolverPlaygroundTestCase(
65396 + ["=dev-libs/I-1*"],
65397 + options = {"--autounmask": True},
65398 + success = False,
65399 + mergelist = ["dev-libs/S-1", "dev-libs/I-1"],
65400 + use_changes = {"dev-libs/S-1": {"Y.Z": True}}),
65401 + ResolverPlaygroundTestCase(
65402 + ["=dev-libs/I-2*"],
65403 + options = {"--autounmask": True},
65404 + success = False,
65405 + mergelist = ["dev-libs/S-2", "dev-libs/I-2"],
65406 + use_changes = {"dev-libs/S-2": {"Y.Z": True}}),
65407 + ResolverPlaygroundTestCase(
65408 + ["dev-libs/J"],
65409 + success = False),
65410 + )
65411 +
65412 + playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, repo_configs=repo_configs)
65413 + try:
65414 + for test_case in test_cases:
65415 + playground.run_TestCase(test_case)
65416 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65417 + finally:
65418 + playground.cleanup()
65419
65420 diff --git a/pym/portage/tests/resolver/test_useflags.py b/pym/portage/tests/resolver/test_useflags.py
65421 new file mode 100644
65422 index 0000000..0a5f3b3
65423 --- /dev/null
65424 +++ b/pym/portage/tests/resolver/test_useflags.py
65425 @@ -0,0 +1,78 @@
65426 +# Copyright 2014 Gentoo Foundation
65427 +# Distributed under the terms of the GNU General Public License v2
65428 +
65429 +from portage.tests import TestCase
65430 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase
65431 +
65432 +class UseFlagsTestCase(TestCase):
65433 +
65434 + def testUseFlags(self):
65435 + ebuilds = {
65436 + "dev-libs/A-1": { "IUSE": "X", },
65437 + "dev-libs/B-1": { "IUSE": "X Y", },
65438 + }
65439 +
65440 + installed = {
65441 + "dev-libs/A-1": { "IUSE": "X", },
65442 + "dev-libs/B-1": { "IUSE": "X", },
65443 + }
65444 +
65445 + binpkgs = installed
65446 +
65447 + user_config = {
65448 + "package.use": ( "dev-libs/A X", ),
65449 + "use.force": ( "Y", ),
65450 + }
65451 +
65452 + test_cases = (
65453 + #default: don't reinstall on use flag change
65454 + ResolverPlaygroundTestCase(
65455 + ["dev-libs/A"],
65456 + options = {"--selective": True, "--usepkg": True},
65457 + success = True,
65458 + mergelist = []),
65459 +
65460 + #default: respect use flags for binpkgs
65461 + ResolverPlaygroundTestCase(
65462 + ["dev-libs/A"],
65463 + options = {"--usepkg": True},
65464 + success = True,
65465 + mergelist = ["dev-libs/A-1"]),
65466 +
65467 + #--binpkg-respect-use=n: use binpkgs with different use flags
65468 + ResolverPlaygroundTestCase(
65469 + ["dev-libs/A"],
65470 + options = {"--binpkg-respect-use": "n", "--usepkg": True},
65471 + success = True,
65472 + mergelist = ["[binary]dev-libs/A-1"]),
65473 +
65474 + #--reinstall=changed-use: reinstall if use flag changed
65475 + ResolverPlaygroundTestCase(
65476 + ["dev-libs/A"],
65477 + options = {"--reinstall": "changed-use", "--usepkg": True},
65478 + success = True,
65479 + mergelist = ["dev-libs/A-1"]),
65480 +
65481 + #--reinstall=changed-use: don't reinstall on new use flag
65482 + ResolverPlaygroundTestCase(
65483 + ["dev-libs/B"],
65484 + options = {"--reinstall": "changed-use", "--usepkg": True},
65485 + success = True,
65486 + mergelist = []),
65487 +
65488 + #--newuse: reinstall on new use flag
65489 + ResolverPlaygroundTestCase(
65490 + ["dev-libs/B"],
65491 + options = {"--newuse": True, "--usepkg": True},
65492 + success = True,
65493 + mergelist = ["dev-libs/B-1"]),
65494 + )
65495 +
65496 + playground = ResolverPlayground(ebuilds=ebuilds,
65497 + binpkgs=binpkgs, installed=installed, user_config=user_config)
65498 + try:
65499 + for test_case in test_cases:
65500 + playground.run_TestCase(test_case)
65501 + self.assertEqual(test_case.test_success, True, test_case.fail_msg)
65502 + finally:
65503 + playground.cleanup()
65504
65505 diff --git a/pym/portage/tests/runTests b/pym/portage/tests/runTests
65506 index 1c1008d..9c45276 100755
65507 --- a/pym/portage/tests/runTests
65508 +++ b/pym/portage/tests/runTests
65509 @@ -1,6 +1,6 @@
65510 -#!/usr/bin/python -Wd
65511 +#!/usr/bin/python -bWd
65512 # runTests.py -- Portage Unit Test Functionality
65513 -# Copyright 2006-2012 Gentoo Foundation
65514 +# Copyright 2006-2014 Gentoo Foundation
65515 # Distributed under the terms of the GNU General Public License v2
65516
65517 import os, sys
65518 @@ -29,9 +29,10 @@ os.environ["PORTAGE_GRPNAME"] = grp.getgrgid(os.getgid()).gr_name
65519
65520 # Insert our parent dir so we can do shiny import "tests"
65521 # This line courtesy of Marienz and Pkgcore ;)
65522 -sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.abspath(__file__)))))
65523 +sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.realpath(__file__)))))
65524
65525 import portage
65526 +portage._internal_caller = True
65527
65528 # Ensure that we don't instantiate portage.settings, so that tests should
65529 # work the same regardless of global configuration file state/existence.
65530 @@ -44,11 +45,17 @@ import portage.tests as tests
65531 from portage.const import PORTAGE_BIN_PATH
65532 path = os.environ.get("PATH", "").split(":")
65533 path = [x for x in path if x]
65534 -if not path or not os.path.samefile(path[0], PORTAGE_BIN_PATH):
65535 +
65536 +insert_bin_path = True
65537 +try:
65538 + insert_bin_path = not path or \
65539 + not os.path.samefile(path[0], PORTAGE_BIN_PATH)
65540 +except OSError:
65541 + pass
65542 +
65543 +if insert_bin_path:
65544 path.insert(0, PORTAGE_BIN_PATH)
65545 os.environ["PATH"] = ":".join(path)
65546 -del path
65547 -
65548
65549 if __name__ == "__main__":
65550 sys.exit(tests.main())
65551
65552 diff --git a/pym/portage/tests/unicode/test_string_format.py b/pym/portage/tests/unicode/test_string_format.py
65553 index fb6e8e0..9d4366a 100644
65554 --- a/pym/portage/tests/unicode/test_string_format.py
65555 +++ b/pym/portage/tests/unicode/test_string_format.py
65556 @@ -1,15 +1,18 @@
65557 -# Copyright 2010 Gentoo Foundation
65558 +# Copyright 2010-2014 Gentoo Foundation
65559 # Distributed under the terms of the GNU General Public License v2
65560
65561 +from __future__ import unicode_literals
65562 +
65563 import sys
65564
65565 -from portage import _encodings, _unicode_decode
65566 +from portage import _encodings, _unicode_encode
65567 from portage.exception import PortageException
65568 from portage.tests import TestCase
65569 from _emerge.DependencyArg import DependencyArg
65570 from _emerge.UseFlagDisplay import UseFlagDisplay
65571
65572 if sys.hexversion >= 0x3000000:
65573 + # pylint: disable=W0622
65574 basestring = str
65575
65576 STR_IS_UNICODE = sys.hexversion >= 0x3000000
65577 @@ -20,27 +23,25 @@ class StringFormatTestCase(TestCase):
65578 which may be either python2 or python3.
65579 """
65580
65581 - # In order to get some unicode test strings in a way that works in
65582 - # both python2 and python3, write them here as byte strings and
65583 - # decode them before use. This assumes _encodings['content'] is
65584 - # utf_8.
65585 + # We need unicode_literals in order to get some unicode test strings
65586 + # in a way that works in both python2 and python3.
65587
65588 unicode_strings = (
65589 - b'\xE2\x80\x98',
65590 - b'\xE2\x80\x99',
65591 + '\u2018',
65592 + '\u2019',
65593 )
65594
65595 def testDependencyArg(self):
65596
65597 self.assertEqual(_encodings['content'], 'utf_8')
65598
65599 - for arg_bytes in self.unicode_strings:
65600 - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content'])
65601 + for arg_unicode in self.unicode_strings:
65602 + arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings['content'])
65603 dependency_arg = DependencyArg(arg=arg_unicode)
65604
65605 - # Force unicode format string so that __unicode__() is
65606 - # called in python2.
65607 - formatted_str = _unicode_decode("%s") % (dependency_arg,)
65608 + # Use unicode_literals for unicode format string so that
65609 + # __unicode__() is called in Python 2.
65610 + formatted_str = "%s" % (dependency_arg,)
65611 self.assertEqual(formatted_str, arg_unicode)
65612
65613 if STR_IS_UNICODE:
65614 @@ -52,20 +53,20 @@ class StringFormatTestCase(TestCase):
65615 else:
65616
65617 # Test the __str__ method which returns encoded bytes in python2
65618 - formatted_bytes = "%s" % (dependency_arg,)
65619 + formatted_bytes = b"%s" % (dependency_arg,)
65620 self.assertEqual(formatted_bytes, arg_bytes)
65621
65622 def testPortageException(self):
65623
65624 self.assertEqual(_encodings['content'], 'utf_8')
65625
65626 - for arg_bytes in self.unicode_strings:
65627 - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content'])
65628 + for arg_unicode in self.unicode_strings:
65629 + arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings['content'])
65630 e = PortageException(arg_unicode)
65631
65632 - # Force unicode format string so that __unicode__() is
65633 - # called in python2.
65634 - formatted_str = _unicode_decode("%s") % (e,)
65635 + # Use unicode_literals for unicode format string so that
65636 + # __unicode__() is called in Python 2.
65637 + formatted_str = "%s" % (e,)
65638 self.assertEqual(formatted_str, arg_unicode)
65639
65640 if STR_IS_UNICODE:
65641 @@ -77,7 +78,7 @@ class StringFormatTestCase(TestCase):
65642 else:
65643
65644 # Test the __str__ method which returns encoded bytes in python2
65645 - formatted_bytes = "%s" % (e,)
65646 + formatted_bytes = b"%s" % (e,)
65647 self.assertEqual(formatted_bytes, arg_bytes)
65648
65649 def testUseFlagDisplay(self):
65650 @@ -86,13 +87,12 @@ class StringFormatTestCase(TestCase):
65651
65652 for enabled in (True, False):
65653 for forced in (True, False):
65654 - for arg_bytes in self.unicode_strings:
65655 - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content'])
65656 + for arg_unicode in self.unicode_strings:
65657 e = UseFlagDisplay(arg_unicode, enabled, forced)
65658
65659 - # Force unicode format string so that __unicode__() is
65660 - # called in python2.
65661 - formatted_str = _unicode_decode("%s") % (e,)
65662 + # Use unicode_literals for unicode format string so that
65663 + # __unicode__() is called in Python 2.
65664 + formatted_str = "%s" % (e,)
65665 self.assertEqual(isinstance(formatted_str, basestring), True)
65666
65667 if STR_IS_UNICODE:
65668 @@ -104,5 +104,5 @@ class StringFormatTestCase(TestCase):
65669 else:
65670
65671 # Test the __str__ method which returns encoded bytes in python2
65672 - formatted_bytes = "%s" % (e,)
65673 + formatted_bytes = b"%s" % (e,)
65674 self.assertEqual(isinstance(formatted_bytes, bytes), True)
65675
65676 diff --git a/pym/portage/tests/update/test_move_ent.py b/pym/portage/tests/update/test_move_ent.py
65677 index 2504dee..d9647a9 100644
65678 --- a/pym/portage/tests/update/test_move_ent.py
65679 +++ b/pym/portage/tests/update/test_move_ent.py
65680 @@ -1,4 +1,4 @@
65681 -# Copyright 2012 Gentoo Foundation
65682 +# Copyright 2012-2013 Gentoo Foundation
65683 # Distributed under the terms of the GNU General Public License v2
65684
65685 import textwrap
65686 @@ -59,12 +59,12 @@ class MoveEntTestCase(TestCase):
65687 settings = playground.settings
65688 trees = playground.trees
65689 eroot = settings["EROOT"]
65690 - portdir = settings["PORTDIR"]
65691 + test_repo_location = settings.repositories["test_repo"].location
65692 portdb = trees[eroot]["porttree"].dbapi
65693 vardb = trees[eroot]["vartree"].dbapi
65694 bindb = trees[eroot]["bintree"].dbapi
65695
65696 - updates_dir = os.path.join(portdir, "profiles", "updates")
65697 + updates_dir = os.path.join(test_repo_location, "profiles", "updates")
65698
65699 try:
65700 ensure_dirs(updates_dir)
65701
65702 diff --git a/pym/portage/tests/update/test_move_slot_ent.py b/pym/portage/tests/update/test_move_slot_ent.py
65703 index fcb0cc6..3e49e11 100644
65704 --- a/pym/portage/tests/update/test_move_slot_ent.py
65705 +++ b/pym/portage/tests/update/test_move_slot_ent.py
65706 @@ -1,4 +1,4 @@
65707 -# Copyright 2012 Gentoo Foundation
65708 +# Copyright 2012-2013 Gentoo Foundation
65709 # Distributed under the terms of the GNU General Public License v2
65710
65711 import textwrap
65712 @@ -94,12 +94,12 @@ class MoveSlotEntTestCase(TestCase):
65713 settings = playground.settings
65714 trees = playground.trees
65715 eroot = settings["EROOT"]
65716 - portdir = settings["PORTDIR"]
65717 + test_repo_location = settings.repositories["test_repo"].location
65718 portdb = trees[eroot]["porttree"].dbapi
65719 vardb = trees[eroot]["vartree"].dbapi
65720 bindb = trees[eroot]["bintree"].dbapi
65721
65722 - updates_dir = os.path.join(portdir, "profiles", "updates")
65723 + updates_dir = os.path.join(test_repo_location, "profiles", "updates")
65724
65725 try:
65726 ensure_dirs(updates_dir)
65727
65728 diff --git a/pym/portage/tests/update/test_update_dbentry.py b/pym/portage/tests/update/test_update_dbentry.py
65729 index e13cfed..8895114 100644
65730 --- a/pym/portage/tests/update/test_update_dbentry.py
65731 +++ b/pym/portage/tests/update/test_update_dbentry.py
65732 @@ -1,4 +1,4 @@
65733 -# Copyright 2012 Gentoo Foundation
65734 +# Copyright 2012-2013 Gentoo Foundation
65735 # Distributed under the terms of the GNU General Public License v2
65736
65737 import re
65738 @@ -6,14 +6,107 @@ import textwrap
65739
65740 import portage
65741 from portage import os
65742 +from portage.dep import Atom
65743 from portage.tests import TestCase
65744 from portage.tests.resolver.ResolverPlayground import ResolverPlayground
65745 +from portage.update import update_dbentry
65746 from portage.util import ensure_dirs
65747 +from portage.versions import _pkg_str
65748 from portage._global_updates import _do_global_updates
65749
65750 class UpdateDbentryTestCase(TestCase):
65751
65752 def testUpdateDbentryTestCase(self):
65753 + cases = (
65754 +
65755 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "1",
65756 + " dev-libs/A:0 ", " dev-libs/B:0 "),
65757 +
65758 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "1",
65759 + " >=dev-libs/A-1:0 ", " >=dev-libs/B-1:0 "),
65760 +
65761 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "2",
65762 + " dev-libs/A[foo] ", " dev-libs/B[foo] "),
65763 +
65764 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "5",
65765 + " dev-libs/A:0/1=[foo] ", " dev-libs/B:0/1=[foo] "),
65766 +
65767 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "5",
65768 + " dev-libs/A:0/1[foo] ", " dev-libs/B:0/1[foo] "),
65769 +
65770 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "5",
65771 + " dev-libs/A:0/0[foo] ", " dev-libs/B:0/0[foo] "),
65772 +
65773 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")), "5",
65774 + " dev-libs/A:0=[foo] ", " dev-libs/B:0=[foo] "),
65775 +
65776 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "1",
65777 + " dev-libs/A:0 ", " dev-libs/A:1 "),
65778 +
65779 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "1",
65780 + " >=dev-libs/A-1:0 ", " >=dev-libs/A-1:1 "),
65781 +
65782 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "5",
65783 + " dev-libs/A:0/1=[foo] ", " dev-libs/A:1/1=[foo] "),
65784 +
65785 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "5",
65786 + " dev-libs/A:0/1[foo] ", " dev-libs/A:1/1[foo] "),
65787 +
65788 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "5",
65789 + " dev-libs/A:0/0[foo] ", " dev-libs/A:1/1[foo] "),
65790 +
65791 + (("slotmove", Atom("dev-libs/A"), "0", "1"), "5",
65792 + " dev-libs/A:0=[foo] ", " dev-libs/A:1=[foo] "),
65793 + )
65794 + for update_cmd, eapi, input_str, output_str in cases:
65795 + result = update_dbentry(update_cmd, input_str, eapi=eapi)
65796 + self.assertEqual(result, output_str)
65797 +
65798 +
65799 + def testUpdateDbentryBlockerTestCase(self):
65800 + """
65801 + Avoid creating self-blockers for bug #367215.
65802 + """
65803 + cases = (
65804 +
65805 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65806 + _pkg_str("dev-libs/B-1", eapi="1", slot="0"),
65807 + " !dev-libs/A ", " !dev-libs/A "),
65808 +
65809 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65810 + _pkg_str("dev-libs/C-1", eapi="1", slot="0"),
65811 + " !dev-libs/A ", " !dev-libs/B "),
65812 +
65813 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65814 + _pkg_str("dev-libs/B-1", eapi="1", slot="0"),
65815 + " !dev-libs/A:0 ", " !dev-libs/A:0 "),
65816 +
65817 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65818 + _pkg_str("dev-libs/C-1", eapi="1", slot="0"),
65819 + " !dev-libs/A:0 ", " !dev-libs/B:0 "),
65820 +
65821 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65822 + _pkg_str("dev-libs/C-1", eapi="1", slot="0"),
65823 + " !>=dev-libs/A-1:0 ", " !>=dev-libs/B-1:0 "),
65824 +
65825 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65826 + _pkg_str("dev-libs/B-1", eapi="1", slot="0"),
65827 + " !>=dev-libs/A-1:0 ", " !>=dev-libs/A-1:0 "),
65828 +
65829 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65830 + _pkg_str("dev-libs/C-1", eapi="1", slot="0"),
65831 + " !>=dev-libs/A-1 ", " !>=dev-libs/B-1 "),
65832 +
65833 + (("move", Atom("dev-libs/A"), Atom("dev-libs/B")),
65834 + _pkg_str("dev-libs/B-1", eapi="1", slot="0"),
65835 + " !>=dev-libs/A-1 ", " !>=dev-libs/A-1 "),
65836 +
65837 + )
65838 + for update_cmd, parent, input_str, output_str in cases:
65839 + result = update_dbentry(update_cmd, input_str, parent=parent)
65840 + self.assertEqual(result, output_str)
65841 +
65842 + def testUpdateDbentryDbapiTestCase(self):
65843
65844 ebuilds = {
65845
65846 @@ -96,14 +189,14 @@ class UpdateDbentryTestCase(TestCase):
65847 settings = playground.settings
65848 trees = playground.trees
65849 eroot = settings["EROOT"]
65850 - portdir = settings["PORTDIR"]
65851 + test_repo_location = settings.repositories["test_repo"].location
65852 portdb = trees[eroot]["porttree"].dbapi
65853 vardb = trees[eroot]["vartree"].dbapi
65854 bindb = trees[eroot]["bintree"].dbapi
65855 setconfig = trees[eroot]["root_config"].setconfig
65856 selected_set = setconfig.getSets()["selected"]
65857
65858 - updates_dir = os.path.join(portdir, "profiles", "updates")
65859 + updates_dir = os.path.join(test_repo_location, "profiles", "updates")
65860
65861 try:
65862 ensure_dirs(updates_dir)
65863 @@ -143,7 +236,7 @@ class UpdateDbentryTestCase(TestCase):
65864 self.assertTrue(old_pattern.search(rdepend) is None)
65865 self.assertTrue("dev-libs/M-moved" in rdepend)
65866
65867 - # EAPI 4-python N -> N.moved
65868 + # EAPI 4-python/*-progress N -> N.moved
65869 rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
65870 old_pattern = re.compile(r"\bdev-libs/N(\s|$)")
65871 self.assertTrue(old_pattern.search(rdepend) is None)
65872
65873 diff --git a/pym/portage/tests/util/test_getconfig.py b/pym/portage/tests/util/test_getconfig.py
65874 index f13b753..e5fd60f 100644
65875 --- a/pym/portage/tests/util/test_getconfig.py
65876 +++ b/pym/portage/tests/util/test_getconfig.py
65877 @@ -1,13 +1,15 @@
65878 -# Copyright 2010-2012 Gentoo Foundation
65879 +# Copyright 2010-2014 Gentoo Foundation
65880 # Distributed under the terms of the GNU General Public License v2
65881
65882 import tempfile
65883
65884 from portage import os
65885 +from portage import shutil
65886 from portage import _unicode_encode
65887 from portage.const import PORTAGE_BASE_PATH
65888 from portage.tests import TestCase
65889 from portage.util import getconfig
65890 +from portage.exception import ParseError
65891
65892 class GetConfigTestCase(TestCase):
65893 """
65894 @@ -18,8 +20,8 @@ class GetConfigTestCase(TestCase):
65895 _cases = {
65896 'FETCHCOMMAND' : 'wget -t 3 -T 60 --passive-ftp -O "${DISTDIR}/${FILE}" "${URI}"',
65897 'FETCHCOMMAND_RSYNC' : 'rsync -avP "${URI}" "${DISTDIR}/${FILE}"',
65898 - 'FETCHCOMMAND_SFTP' : 'bash -c "x=\\${2#sftp://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; exec sftp -P \\${port} \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" sftp "${DISTDIR}/${FILE}" "${URI}"',
65899 - 'FETCHCOMMAND_SSH' : 'bash -c "x=\\${2#ssh://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; exec rsync --rsh=\\"ssh -p\\${port}\\" -avP \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" rsync "${DISTDIR}/${FILE}" "${URI}"',
65900 + 'FETCHCOMMAND_SFTP' : 'bash -c "x=\\${2#sftp://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; eval \\"declare -a ssh_opts=(\\${3})\\" ; exec sftp -P \\${port} \\"\\${ssh_opts[@]}\\" \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" sftp "${DISTDIR}/${FILE}" "${URI}" "${PORTAGE_SSH_OPTS}"',
65901 + 'FETCHCOMMAND_SSH' : 'bash -c "x=\\${2#ssh://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; exec rsync --rsh=\\"ssh -p\\${port} \\${3}\\" -avP \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" rsync "${DISTDIR}/${FILE}" "${URI}" "${PORTAGE_SSH_OPTS}"',
65902 'PORTAGE_ELOG_MAILSUBJECT' : '[portage] ebuild log for ${PACKAGE} on ${HOST}'
65903 }
65904
65905 @@ -31,6 +33,29 @@ class GetConfigTestCase(TestCase):
65906 for k, v in self._cases.items():
65907 self.assertEqual(d[k], v)
65908
65909 + def testGetConfigSourceLex(self):
65910 + try:
65911 + tempdir = tempfile.mkdtemp()
65912 + make_conf_file = os.path.join(tempdir, 'make.conf')
65913 + with open(make_conf_file, 'w') as f:
65914 + f.write('source "${DIR}/sourced_file"\n')
65915 + sourced_file = os.path.join(tempdir, 'sourced_file')
65916 + with open(sourced_file, 'w') as f:
65917 + f.write('PASSES_SOURCING_TEST="True"\n')
65918 +
65919 + d = getconfig(make_conf_file, allow_sourcing=True, expand={"DIR": tempdir})
65920 +
65921 + # PASSES_SOURCING_TEST should exist in getconfig result.
65922 + self.assertTrue(d is not None)
65923 + self.assertEqual("True", d['PASSES_SOURCING_TEST'])
65924 +
65925 + # With allow_sourcing=True and empty expand map, this should
65926 + # throw a FileNotFound exception.
65927 + self.assertRaisesMsg("An empty expand map should throw an exception",
65928 + ParseError, getconfig, make_conf_file, allow_sourcing=True, expand={})
65929 + finally:
65930 + shutil.rmtree(tempdir)
65931 +
65932 def testGetConfigProfileEnv(self):
65933 # Test the mode which is used to parse /etc/env.d and /etc/profile.env.
65934
65935
65936 diff --git a/pym/portage/tests/util/test_stackDictList.py b/pym/portage/tests/util/test_stackDictList.py
65937 index 678001c..25a723c 100644
65938 --- a/pym/portage/tests/util/test_stackDictList.py
65939 +++ b/pym/portage/tests/util/test_stackDictList.py
65940 @@ -8,10 +8,12 @@ class StackDictListTestCase(TestCase):
65941
65942 def testStackDictList(self):
65943 from portage.util import stack_dictlist
65944 -
65945 - tests = [ ({'a':'b'},{'x':'y'},False,{'a':['b'],'x':['y']}) ]
65946 - tests.append(( {'KEYWORDS':['alpha','x86']},{'KEYWORDS':['-*']},True,{} ))
65947 - tests.append(( {'KEYWORDS':['alpha','x86']},{'KEYWORDS':['-x86']},True,{'KEYWORDS':['alpha']} ))
65948 +
65949 + tests = [
65950 + ({'a': 'b'}, {'x': 'y'}, False, {'a': ['b'], 'x': ['y']}),
65951 + ({'KEYWORDS': ['alpha', 'x86']}, {'KEYWORDS': ['-*']}, True, {}),
65952 + ({'KEYWORDS': ['alpha', 'x86']}, {'KEYWORDS': ['-x86']}, True, {'KEYWORDS': ['alpha']}),
65953 + ]
65954 for test in tests:
65955 self.assertEqual(
65956 - stack_dictlist([test[0],test[1]],incremental=test[2]), test[3] )
65957 + stack_dictlist([test[0], test[1]], incremental=test[2]), test[3])
65958
65959 diff --git a/pym/portage/tests/util/test_stackDicts.py b/pym/portage/tests/util/test_stackDicts.py
65960 index 0d2cadd..0c1dcdb 100644
65961 --- a/pym/portage/tests/util/test_stackDicts.py
65962 +++ b/pym/portage/tests/util/test_stackDicts.py
65963 @@ -7,30 +7,27 @@ from portage.util import stack_dicts
65964
65965
65966 class StackDictsTestCase(TestCase):
65967 -
65968 - def testStackDictsPass(self):
65969 -
65970 - tests = [ ( [ { "a":"b" }, { "b":"c" } ], { "a":"b", "b":"c" },
65971 - False, [], False ),
65972 - ( [ { "a":"b" }, { "a":"c" } ], { "a":"b c" },
65973 - True, [], False ),
65974 - ( [ { "a":"b" }, { "a":"c" } ], { "a":"b c" },
65975 - False, ["a"], False ),
65976 - ( [ { "a":"b" }, None ], { "a":"b" },
65977 - False, [], True ),
65978 - ( [ None ], {}, False, [], False ),
65979 - ( [ None, {}], {}, False, [], True ) ]
65980
65981 + def testStackDictsPass(self):
65982
65983 + tests = [
65984 + ([{'a': 'b'}, {'b': 'c'}], {'a': 'b', 'b': 'c'}, False, [], False),
65985 + ([{'a': 'b'}, {'a': 'c'}], {'a': 'b c'}, True, [], False),
65986 + ([{'a': 'b'}, {'a': 'c'}], {'a': 'b c'}, False, ['a'], False),
65987 + ([{'a': 'b'}, None], {'a': 'b'}, False, [], True),
65988 + ([None], {}, False, [], False),
65989 + ([None, {}], {}, False, [], True)
65990 + ]
65991 for test in tests:
65992 - result = stack_dicts( test[0], test[2], test[3], test[4] )
65993 - self.assertEqual( result, test[1] )
65994 -
65995 + result = stack_dicts(test[0], test[2], test[3], test[4])
65996 + self.assertEqual(result, test[1])
65997 +
65998 def testStackDictsFail(self):
65999 -
66000 - tests = [ ( [ None, {} ], None, False, [], True ),
66001 - ( [ { "a":"b"}, {"a":"c" } ], { "a":"b c" },
66002 - False, [], False ) ]
66003 +
66004 + tests = [
66005 + ([None, {}], None, False, [], True),
66006 + ([{'a': 'b'}, {'a': 'c'}], {'a': 'b c'}, False, [], False)
66007 + ]
66008 for test in tests:
66009 - result = stack_dicts( test[0], test[2], test[3], test[4] )
66010 - self.assertNotEqual( result , test[1] )
66011 + result = stack_dicts(test[0], test[2], test[3], test[4])
66012 + self.assertNotEqual(result, test[1])
66013
66014 diff --git a/pym/portage/tests/util/test_stackLists.py b/pym/portage/tests/util/test_stackLists.py
66015 index e524772..3ba69ec 100644
66016 --- a/pym/portage/tests/util/test_stackLists.py
66017 +++ b/pym/portage/tests/util/test_stackLists.py
66018 @@ -6,14 +6,16 @@ from portage.tests import TestCase
66019 from portage.util import stack_lists
66020
66021 class StackListsTestCase(TestCase):
66022 -
66023 +
66024 def testStackLists(self):
66025 -
66026 - tests = [ ( [ ['a','b','c'], ['d','e','f'] ], ['a','c','b','e','d','f'], False ),
66027 - ( [ ['a','x'], ['b','x'] ], ['a','x','b'], False ),
66028 - ( [ ['a','b','c'], ['-*'] ], [], True ),
66029 - ( [ ['a'], ['-a'] ], [], True ) ]
66030 +
66031 + tests = [
66032 + ([['a', 'b', 'c'], ['d', 'e', 'f']], ['a', 'c', 'b', 'e', 'd', 'f'], False),
66033 + ([['a', 'x'], ['b', 'x']], ['a', 'x', 'b'], False),
66034 + ([['a', 'b', 'c'], ['-*']], [], True),
66035 + ([['a'], ['-a']], [], True)
66036 + ]
66037
66038 for test in tests:
66039 - result = stack_lists( test[0], test[2] )
66040 - self.assertEqual( set(result) , set(test[1]) )
66041 + result = stack_lists(test[0], test[2])
66042 + self.assertEqual(set(result), set(test[1]))
66043
66044 diff --git a/pym/portage/tests/util/test_uniqueArray.py b/pym/portage/tests/util/test_uniqueArray.py
66045 index e23428c..aae88cc 100644
66046 --- a/pym/portage/tests/util/test_uniqueArray.py
66047 +++ b/pym/portage/tests/util/test_uniqueArray.py
66048 @@ -7,18 +7,20 @@ from portage.tests import TestCase
66049 from portage.util import unique_array
66050
66051 class UniqueArrayTestCase(TestCase):
66052 -
66053 +
66054 def testUniqueArrayPass(self):
66055 """
66056 test portage.util.uniqueArray()
66057 """
66058
66059 - tests = [ ( ["a","a","a",os,os,[],[],[]], ['a',os,[]] ),
66060 - ( [1,1,1,2,3,4,4] , [1,2,3,4]) ]
66061 + tests = [
66062 + (['a', 'a', 'a', os, os, [], [], []], ['a', os, []]),
66063 + ([1, 1, 1, 2, 3, 4, 4], [1, 2, 3, 4])
66064 + ]
66065
66066 for test in tests:
66067 - result = unique_array( test[0] )
66068 + result = unique_array(test[0])
66069 for item in test[1]:
66070 number = result.count(item)
66071 - self.assertFalse( number != 1, msg=("%s contains %s of %s, "
66072 - "should be only 1") % (result, number, item) )
66073 + self.assertFalse(number != 1, msg=("%s contains %s of %s, "
66074 + "should be only 1") % (result, number, item))
66075
66076 diff --git a/pym/portage/tests/util/test_varExpand.py b/pym/portage/tests/util/test_varExpand.py
66077 index 7b528d6..498b50e 100644
66078 --- a/pym/portage/tests/util/test_varExpand.py
66079 +++ b/pym/portage/tests/util/test_varExpand.py
66080 @@ -6,20 +6,20 @@ from portage.tests import TestCase
66081 from portage.util import varexpand
66082
66083 class VarExpandTestCase(TestCase):
66084 -
66085 +
66086 def testVarExpandPass(self):
66087
66088 - varDict = { "a":"5", "b":"7", "c":"-5" }
66089 + varDict = {"a": "5", "b": "7", "c": "-5"}
66090 for key in varDict:
66091 - result = varexpand( "$%s" % key, varDict )
66092 -
66093 - self.assertFalse( result != varDict[key],
66094 - msg="Got %s != %s, from varexpand( %s, %s )" % \
66095 - ( result, varDict[key], "$%s" % key, varDict ) )
66096 - result = varexpand( "${%s}" % key, varDict )
66097 - self.assertFalse( result != varDict[key],
66098 - msg="Got %s != %s, from varexpand( %s, %s )" % \
66099 - ( result, varDict[key], "${%s}" % key, varDict ) )
66100 + result = varexpand("$%s" % key, varDict)
66101 +
66102 + self.assertFalse(result != varDict[key],
66103 + msg="Got %s != %s, from varexpand(%s, %s)" %
66104 + (result, varDict[key], "$%s" % key, varDict))
66105 + result = varexpand("${%s}" % key, varDict)
66106 + self.assertFalse(result != varDict[key],
66107 + msg="Got %s != %s, from varexpand(%s, %s)" %
66108 + (result, varDict[key], "${%s}" % key, varDict))
66109
66110 def testVarExpandBackslashes(self):
66111 """
66112 @@ -49,44 +49,44 @@ class VarExpandTestCase(TestCase):
66113 ("\\'", "\\'"),
66114 ]
66115 for test in tests:
66116 - result = varexpand( test[0], varDict )
66117 - self.assertFalse( result != test[1],
66118 - msg="Got %s != %s from varexpand( %s, %s )" \
66119 - % ( result, test[1], test[0], varDict ) )
66120 + result = varexpand(test[0], varDict)
66121 + self.assertFalse(result != test[1],
66122 + msg="Got %s != %s from varexpand(%s, %s)"
66123 + % (result, test[1], test[0], varDict))
66124
66125 def testVarExpandDoubleQuotes(self):
66126 -
66127 - varDict = { "a":"5" }
66128 - tests = [ ("\"${a}\"", "\"5\"") ]
66129 +
66130 + varDict = {"a": "5"}
66131 + tests = [("\"${a}\"", "\"5\"")]
66132 for test in tests:
66133 - result = varexpand( test[0], varDict )
66134 - self.assertFalse( result != test[1],
66135 - msg="Got %s != %s from varexpand( %s, %s )" \
66136 - % ( result, test[1], test[0], varDict ) )
66137 + result = varexpand(test[0], varDict)
66138 + self.assertFalse(result != test[1],
66139 + msg="Got %s != %s from varexpand(%s, %s)"
66140 + % (result, test[1], test[0], varDict))
66141
66142 def testVarExpandSingleQuotes(self):
66143 -
66144 - varDict = { "a":"5" }
66145 - tests = [ ("\'${a}\'", "\'${a}\'") ]
66146 +
66147 + varDict = {"a": "5"}
66148 + tests = [("\'${a}\'", "\'${a}\'")]
66149 for test in tests:
66150 - result = varexpand( test[0], varDict )
66151 - self.assertFalse( result != test[1],
66152 - msg="Got %s != %s from varexpand( %s, %s )" \
66153 - % ( result, test[1], test[0], varDict ) )
66154 + result = varexpand(test[0], varDict)
66155 + self.assertFalse(result != test[1],
66156 + msg="Got %s != %s from varexpand(%s, %s)"
66157 + % (result, test[1], test[0], varDict))
66158
66159 def testVarExpandFail(self):
66160
66161 - varDict = { "a":"5", "b":"7", "c":"15" }
66162 + varDict = {"a": "5", "b": "7", "c": "15"}
66163
66164 - testVars = [ "fail" ]
66165 + testVars = ["fail"]
66166
66167 for var in testVars:
66168 - result = varexpand( "$%s" % var, varDict )
66169 - self.assertFalse( len(result),
66170 - msg="Got %s == %s, from varexpand( %s, %s )" \
66171 - % ( result, var, "$%s" % var, varDict ) )
66172 -
66173 - result = varexpand( "${%s}" % var, varDict )
66174 - self.assertFalse( len(result),
66175 - msg="Got %s == %s, from varexpand( %s, %s )" \
66176 - % ( result, var, "${%s}" % var, varDict ) )
66177 + result = varexpand("$%s" % var, varDict)
66178 + self.assertFalse(len(result),
66179 + msg="Got %s == %s, from varexpand(%s, %s)"
66180 + % (result, var, "$%s" % var, varDict))
66181 +
66182 + result = varexpand("${%s}" % var, varDict)
66183 + self.assertFalse(len(result),
66184 + msg="Got %s == %s, from varexpand(%s, %s)"
66185 + % (result, var, "${%s}" % var, varDict))
66186
66187 diff --git a/pym/portage/tests/util/test_whirlpool.py b/pym/portage/tests/util/test_whirlpool.py
66188 index dd0de89..fbe7cae 100644
66189 --- a/pym/portage/tests/util/test_whirlpool.py
66190 +++ b/pym/portage/tests/util/test_whirlpool.py
66191 @@ -1,4 +1,4 @@
66192 -# Copyright 2011 Gentoo Foundation
66193 +# Copyright 2011-2014 Gentoo Foundation
66194 # Distributed under the terms of the GNU General Public License v2
66195
66196 import subprocess
66197 @@ -11,6 +11,6 @@ from portage.tests import TestCase
66198 class WhirlpoolTestCase(TestCase):
66199 def testBundledWhirlpool(self):
66200 # execute the tests bundled with the whirlpool module
66201 - retval = subprocess.call([portage._python_interpreter, "-Wd",
66202 + retval = subprocess.call([portage._python_interpreter, "-b", "-Wd",
66203 os.path.join(PORTAGE_PYM_PATH, "portage/util/whirlpool.py")])
66204 self.assertEqual(retval, os.EX_OK)
66205
66206 diff --git a/pym/portage/tests/versions/test_cpv_sort_key.py b/pym/portage/tests/versions/test_cpv_sort_key.py
66207 index a223d78..eeb0eae 100644
66208 --- a/pym/portage/tests/versions/test_cpv_sort_key.py
66209 +++ b/pym/portage/tests/versions/test_cpv_sort_key.py
66210 @@ -8,9 +8,10 @@ class CpvSortKeyTestCase(TestCase):
66211
66212 def testCpvSortKey(self):
66213
66214 - tests = [ (("a/b-2_alpha", "a", "b", "a/b-2", "a/a-1", "a/b-1"),
66215 - ( "a", "a/a-1", "a/b-1", "a/b-2_alpha", "a/b-2", "b")),
66216 + tests = [
66217 + (("a/b-2_alpha", "a", "b", "a/b-2", "a/a-1", "a/b-1"),
66218 + ("a", "a/a-1", "a/b-1", "a/b-2_alpha", "a/b-2", "b")),
66219 ]
66220
66221 for test in tests:
66222 - self.assertEqual( tuple(sorted(test[0], key=cpv_sort_key())), test[1] )
66223 + self.assertEqual(tuple(sorted(test[0], key=cpv_sort_key())), test[1])
66224
66225 diff --git a/pym/portage/tests/versions/test_vercmp.py b/pym/portage/tests/versions/test_vercmp.py
66226 index aa7969c..78fe7ed 100644
66227 --- a/pym/portage/tests/versions/test_vercmp.py
66228 +++ b/pym/portage/tests/versions/test_vercmp.py
66229 @@ -8,10 +8,11 @@ from portage.versions import vercmp
66230 class VerCmpTestCase(TestCase):
66231 """ A simple testCase for portage.versions.vercmp()
66232 """
66233 -
66234 +
66235 def testVerCmpGreater(self):
66236 -
66237 - tests = [ ( "6.0", "5.0"), ("5.0","5"),
66238 +
66239 + tests = [
66240 + ("6.0", "5.0"), ("5.0", "5"),
66241 ("1.0-r1", "1.0-r0"),
66242 ("1.0-r1", "1.0"),
66243 ("cvs.9999", "9999"),
66244 @@ -24,14 +25,15 @@ class VerCmpTestCase(TestCase):
66245 ("12.2.5", "12.2b"),
66246 ]
66247 for test in tests:
66248 - self.assertFalse( vercmp( test[0], test[1] ) <= 0, msg="%s < %s? Wrong!" % (test[0],test[1]) )
66249 + self.assertFalse(vercmp(test[0], test[1]) <= 0, msg="%s < %s? Wrong!" % (test[0], test[1]))
66250
66251 def testVerCmpLess(self):
66252 """
66253 pre < alpha < beta < rc < p -> test each of these, they are inductive (or should be..)
66254 """
66255 - tests = [ ( "4.0", "5.0"), ("5", "5.0"), ("1.0_pre2","1.0_p2"),
66256 - ("1.0_alpha2", "1.0_p2"),("1.0_alpha1", "1.0_beta1"),("1.0_beta3","1.0_rc3"),
66257 + tests = [
66258 + ("4.0", "5.0"), ("5", "5.0"), ("1.0_pre2", "1.0_p2"),
66259 + ("1.0_alpha2", "1.0_p2"), ("1.0_alpha1", "1.0_beta1"), ("1.0_beta3", "1.0_rc3"),
66260 ("1.001000000000000000001", "1.001000000000000000002"),
66261 ("1.00100000000", "1.0010000000000000001"),
66262 ("9999", "cvs.9999"),
66263 @@ -47,23 +49,25 @@ class VerCmpTestCase(TestCase):
66264 ("12.2b", "12.2.5"),
66265 ]
66266 for test in tests:
66267 - self.assertFalse( vercmp( test[0], test[1]) >= 0, msg="%s > %s? Wrong!" % (test[0],test[1]))
66268 -
66269 -
66270 + self.assertFalse(vercmp(test[0], test[1]) >= 0, msg="%s > %s? Wrong!" % (test[0], test[1]))
66271 +
66272 def testVerCmpEqual(self):
66273 -
66274 - tests = [ ("4.0", "4.0"),
66275 +
66276 + tests = [
66277 + ("4.0", "4.0"),
66278 ("1.0", "1.0"),
66279 ("1.0-r0", "1.0"),
66280 ("1.0", "1.0-r0"),
66281 ("1.0-r0", "1.0-r0"),
66282 - ("1.0-r1", "1.0-r1")]
66283 + ("1.0-r1", "1.0-r1")
66284 + ]
66285 for test in tests:
66286 - self.assertFalse( vercmp( test[0], test[1]) != 0, msg="%s != %s? Wrong!" % (test[0],test[1]))
66287 -
66288 + self.assertFalse(vercmp(test[0], test[1]) != 0, msg="%s != %s? Wrong!" % (test[0], test[1]))
66289 +
66290 def testVerNotEqual(self):
66291 -
66292 - tests = [ ("1","2"),("1.0_alpha","1.0_pre"),("1.0_beta","1.0_alpha"),
66293 +
66294 + tests = [
66295 + ("1", "2"), ("1.0_alpha", "1.0_pre"), ("1.0_beta", "1.0_alpha"),
66296 ("0", "0.0"),
66297 ("cvs.9999", "9999"),
66298 ("1.0-r0", "1.0-r1"),
66299 @@ -77,4 +81,4 @@ class VerCmpTestCase(TestCase):
66300 ("12.2b", "12.2"),
66301 ]
66302 for test in tests:
66303 - self.assertFalse( vercmp( test[0], test[1]) == 0, msg="%s == %s? Wrong!" % (test[0],test[1]))
66304 + self.assertFalse(vercmp(test[0], test[1]) == 0, msg="%s == %s? Wrong!" % (test[0], test[1]))
66305
66306 diff --git a/pym/portage/update.py b/pym/portage/update.py
66307 index 121e957..df4e11b 100644
66308 --- a/pym/portage/update.py
66309 +++ b/pym/portage/update.py
66310 @@ -1,11 +1,14 @@
66311 -# Copyright 1999-2011 Gentoo Foundation
66312 +# Copyright 1999-2014 Gentoo Foundation
66313 # Distributed under the terms of the GNU General Public License v2
66314
66315 +from __future__ import unicode_literals
66316 +
66317 import errno
66318 import io
66319 import re
66320 import stat
66321 import sys
66322 +import warnings
66323
66324 from portage import os
66325 from portage import _encodings
66326 @@ -13,21 +16,19 @@ from portage import _unicode_decode
66327 from portage import _unicode_encode
66328 import portage
66329 portage.proxy.lazyimport.lazyimport(globals(),
66330 - 'portage.dep:Atom,dep_getkey,isvalidatom,' + \
66331 - 'remove_slot',
66332 + 'portage.dep:Atom,dep_getkey,isvalidatom,match_from_list',
66333 'portage.util:ConfigProtect,new_protect_filename,' + \
66334 'normalize_path,write_atomic,writemsg',
66335 - 'portage.util.listdir:_ignorecvs_dirs',
66336 - 'portage.versions:catsplit,ververify'
66337 + 'portage.versions:_get_slot_re',
66338 )
66339
66340 -from portage.const import USER_CONFIG_PATH
66341 -from portage.dep import _get_slot_re
66342 +from portage.const import USER_CONFIG_PATH, VCS_DIRS
66343 from portage.eapi import _get_eapi_attrs
66344 from portage.exception import DirectoryNotFound, InvalidAtom, PortageException
66345 from portage.localization import _
66346
66347 if sys.hexversion >= 0x3000000:
66348 + # pylint: disable=W0622
66349 long = int
66350 _unicode = str
66351 else:
66352 @@ -35,7 +36,10 @@ else:
66353
66354 ignored_dbentries = ("CONTENTS", "environment.bz2")
66355
66356 -def update_dbentry(update_cmd, mycontent, eapi=None):
66357 +def update_dbentry(update_cmd, mycontent, eapi=None, parent=None):
66358 +
66359 + if parent is not None:
66360 + eapi = parent.eapi
66361
66362 if update_cmd[0] == "move":
66363 old_value = _unicode(update_cmd[1])
66364 @@ -44,28 +48,76 @@ def update_dbentry(update_cmd, mycontent, eapi=None):
66365 # Use isvalidatom() to check if this move is valid for the
66366 # EAPI (characters allowed in package names may vary).
66367 if old_value in mycontent and isvalidatom(new_value, eapi=eapi):
66368 - old_value = re.escape(old_value);
66369 - mycontent = re.sub(old_value+"(:|$|\\s)", new_value+"\\1", mycontent)
66370 - def myreplace(matchobj):
66371 - # Strip slot and * operator if necessary
66372 - # so that ververify works.
66373 - ver = remove_slot(matchobj.group(2))
66374 - ver = ver.rstrip("*")
66375 - if ververify(ver):
66376 - return "%s-%s" % (new_value, matchobj.group(2))
66377 - else:
66378 - return "".join(matchobj.groups())
66379 - mycontent = re.sub("(%s-)(\\S*)" % old_value, myreplace, mycontent)
66380 + # this split preserves existing whitespace
66381 + split_content = re.split(r'(\s+)', mycontent)
66382 + modified = False
66383 + for i, token in enumerate(split_content):
66384 + if old_value not in token:
66385 + continue
66386 + try:
66387 + atom = Atom(token, eapi=eapi)
66388 + except InvalidAtom:
66389 + continue
66390 + if atom.cp != old_value:
66391 + continue
66392 +
66393 + new_atom = Atom(token.replace(old_value, new_value, 1),
66394 + eapi=eapi)
66395 +
66396 + # Avoid creating self-blockers for bug #367215.
66397 + if new_atom.blocker and parent is not None and \
66398 + parent.cp == new_atom.cp and \
66399 + match_from_list(new_atom, [parent]):
66400 + continue
66401 +
66402 + split_content[i] = _unicode(new_atom)
66403 + modified = True
66404 +
66405 + if modified:
66406 + mycontent = "".join(split_content)
66407 +
66408 elif update_cmd[0] == "slotmove" and update_cmd[1].operator is None:
66409 - pkg, origslot, newslot = update_cmd[1:]
66410 - old_value = "%s:%s" % (pkg, origslot)
66411 - if old_value in mycontent:
66412 - old_value = re.escape(old_value)
66413 - new_value = "%s:%s" % (pkg, newslot)
66414 - mycontent = re.sub(old_value+"($|\\s)", new_value+"\\1", mycontent)
66415 + orig_atom, origslot, newslot = update_cmd[1:]
66416 + orig_cp = orig_atom.cp
66417 +
66418 + # We don't support versioned slotmove atoms here, since it can be
66419 + # difficult to determine if the version constraints really match
66420 + # the atoms that we're trying to update.
66421 + if orig_atom.version is None and orig_cp in mycontent:
66422 + # this split preserves existing whitespace
66423 + split_content = re.split(r'(\s+)', mycontent)
66424 + modified = False
66425 + for i, token in enumerate(split_content):
66426 + if orig_cp not in token:
66427 + continue
66428 + try:
66429 + atom = Atom(token, eapi=eapi)
66430 + except InvalidAtom:
66431 + continue
66432 + if atom.cp != orig_cp:
66433 + continue
66434 + if atom.slot is None or atom.slot != origslot:
66435 + continue
66436 +
66437 + slot_part = newslot
66438 + if atom.sub_slot is not None:
66439 + if atom.sub_slot == origslot:
66440 + sub_slot = newslot
66441 + else:
66442 + sub_slot = atom.sub_slot
66443 + slot_part += "/" + sub_slot
66444 + if atom.slot_operator is not None:
66445 + slot_part += atom.slot_operator
66446 +
66447 + split_content[i] = atom.with_slot(slot_part)
66448 + modified = True
66449 +
66450 + if modified:
66451 + mycontent = "".join(split_content)
66452 +
66453 return mycontent
66454
66455 -def update_dbentries(update_iter, mydata, eapi=None):
66456 +def update_dbentries(update_iter, mydata, eapi=None, parent=None):
66457 """Performs update commands and returns a
66458 dict containing only the updated items."""
66459 updated_items = {}
66460 @@ -79,7 +131,8 @@ def update_dbentries(update_iter, mydata, eapi=None):
66461 is_encoded = mycontent is not orig_content
66462 orig_content = mycontent
66463 for update_cmd in update_iter:
66464 - mycontent = update_dbentry(update_cmd, mycontent, eapi=eapi)
66465 + mycontent = update_dbentry(update_cmd, mycontent,
66466 + eapi=eapi, parent=parent)
66467 if mycontent != orig_content:
66468 if is_encoded:
66469 mycontent = _unicode_encode(mycontent,
66470 @@ -88,10 +141,14 @@ def update_dbentries(update_iter, mydata, eapi=None):
66471 updated_items[k] = mycontent
66472 return updated_items
66473
66474 -def fixdbentries(update_iter, dbdir, eapi=None):
66475 +def fixdbentries(update_iter, dbdir, eapi=None, parent=None):
66476 """Performs update commands which result in search and replace operations
66477 for each of the files in dbdir (excluding CONTENTS and environment.bz2).
66478 Returns True when actual modifications are necessary and False otherwise."""
66479 +
66480 + warnings.warn("portage.update.fixdbentries() is deprecated",
66481 + DeprecationWarning, stacklevel=2)
66482 +
66483 mydata = {}
66484 for myfile in [f for f in os.listdir(dbdir) if f not in ignored_dbentries]:
66485 file_path = os.path.join(dbdir, myfile)
66486 @@ -100,7 +157,8 @@ def fixdbentries(update_iter, dbdir, eapi=None):
66487 mode='r', encoding=_encodings['repo.content'],
66488 errors='replace') as f:
66489 mydata[myfile] = f.read()
66490 - updated_items = update_dbentries(update_iter, mydata, eapi=eapi)
66491 + updated_items = update_dbentries(update_iter, mydata,
66492 + eapi=eapi, parent=parent)
66493 for myfile, mycontent in updated_items.items():
66494 file_path = os.path.join(dbdir, myfile)
66495 write_atomic(file_path, mycontent, encoding=_encodings['repo.content'])
66496 @@ -225,7 +283,8 @@ def parse_updates(mycontent):
66497 return myupd, errors
66498
66499 def update_config_files(config_root, protect, protect_mask, update_iter, match_callback = None):
66500 - """Perform global updates on /etc/portage/package.*.
66501 + """Perform global updates on /etc/portage/package.*, /etc/portage/profile/package.*,
66502 + /etc/portage/profile/packages and /etc/portage/sets.
66503 config_root - location of files to update
66504 protect - list of paths from CONFIG_PROTECT
66505 protect_mask - list of paths from CONFIG_PROTECT_MASK
66506 @@ -248,9 +307,15 @@ def update_config_files(config_root, protect, protect_mask, update_iter, match_c
66507 "package.accept_keywords", "package.env",
66508 "package.keywords", "package.license",
66509 "package.mask", "package.properties",
66510 - "package.unmask", "package.use"
66511 + "package.unmask", "package.use", "sets"
66512 ]
66513 - myxfiles += [os.path.join("profile", x) for x in myxfiles]
66514 + myxfiles += [os.path.join("profile", x) for x in (
66515 + "packages", "package.accept_keywords",
66516 + "package.keywords", "package.mask",
66517 + "package.unmask", "package.use",
66518 + "package.use.force", "package.use.mask",
66519 + "package.use.stable.force", "package.use.stable.mask"
66520 + )]
66521 abs_user_config = os.path.join(config_root, USER_CONFIG_PATH)
66522 recursivefiles = []
66523 for x in myxfiles:
66524 @@ -269,7 +334,7 @@ def update_config_files(config_root, protect, protect_mask, update_iter, match_c
66525 except UnicodeDecodeError:
66526 dirs.remove(y_enc)
66527 continue
66528 - if y.startswith(".") or y in _ignorecvs_dirs:
66529 + if y.startswith(".") or y in VCS_DIRS:
66530 dirs.remove(y_enc)
66531 for y in files:
66532 try:
66533 @@ -299,7 +364,6 @@ def update_config_files(config_root, protect, protect_mask, update_iter, match_c
66534 if f is not None:
66535 f.close()
66536
66537 - # update /etc/portage/packages.*
66538 ignore_line_re = re.compile(r'^#|^\s*$')
66539 if repo_dict is None:
66540 update_items = [(None, update_iter)]
66541 @@ -319,6 +383,9 @@ def update_config_files(config_root, protect, protect_mask, update_iter, match_c
66542 if atom[:1] == "-":
66543 # package.mask supports incrementals
66544 atom = atom[1:]
66545 + if atom[:1] == "*":
66546 + # packages file supports "*"-prefixed atoms as indication of system packages.
66547 + atom = atom[1:]
66548 if not isvalidatom(atom):
66549 continue
66550 new_atom = update_dbentry(update_cmd, atom)
66551
66552 diff --git a/pym/portage/util/ExtractKernelVersion.py b/pym/portage/util/ExtractKernelVersion.py
66553 index 69bd58a..af4a4fe 100644
66554 --- a/pym/portage/util/ExtractKernelVersion.py
66555 +++ b/pym/portage/util/ExtractKernelVersion.py
66556 @@ -61,18 +61,18 @@ def ExtractKernelVersion(base_dir):
66557
66558 # Grab a list of files named localversion* and sort them
66559 localversions = os.listdir(base_dir)
66560 - for x in range(len(localversions)-1,-1,-1):
66561 + for x in range(len(localversions) - 1, -1, -1):
66562 if localversions[x][:12] != "localversion":
66563 del localversions[x]
66564 localversions.sort()
66565
66566 # Append the contents of each to the version string, stripping ALL whitespace
66567 for lv in localversions:
66568 - version += "".join( " ".join( grabfile( base_dir+ "/" + lv ) ).split() )
66569 + version += "".join(" ".join(grabfile(base_dir + "/" + lv)).split())
66570
66571 # Check the .config for a CONFIG_LOCALVERSION and append that too, also stripping whitespace
66572 kernelconfig = getconfig(base_dir+"/.config")
66573 if kernelconfig and "CONFIG_LOCALVERSION" in kernelconfig:
66574 version += "".join(kernelconfig["CONFIG_LOCALVERSION"].split())
66575
66576 - return (version,None)
66577 + return (version, None)
66578
66579 diff --git a/pym/portage/util/SlotObject.py b/pym/portage/util/SlotObject.py
66580 index a59dfc1..4bb6822 100644
66581 --- a/pym/portage/util/SlotObject.py
66582 +++ b/pym/portage/util/SlotObject.py
66583 @@ -48,4 +48,3 @@ class SlotObject(object):
66584 setattr(obj, myattr, getattr(self, myattr))
66585
66586 return obj
66587 -
66588
66589 diff --git a/pym/portage/util/_ShelveUnicodeWrapper.py b/pym/portage/util/_ShelveUnicodeWrapper.py
66590 new file mode 100644
66591 index 0000000..adbd519
66592 --- /dev/null
66593 +++ b/pym/portage/util/_ShelveUnicodeWrapper.py
66594 @@ -0,0 +1,45 @@
66595 +# Copyright 2013 Gentoo Foundation
66596 +# Distributed under the terms of the GNU General Public License v2
66597 +
66598 +class ShelveUnicodeWrapper(object):
66599 + """
66600 + Convert unicode to str and back again, since python-2.x shelve
66601 + module doesn't support unicode.
66602 + """
66603 + def __init__(self, shelve_instance):
66604 + self._shelve = shelve_instance
66605 +
66606 + def _encode(self, s):
66607 + if isinstance(s, unicode):
66608 + s = s.encode('utf_8')
66609 + return s
66610 +
66611 + def __len__(self):
66612 + return len(self._shelve)
66613 +
66614 + def __contains__(self, k):
66615 + return self._encode(k) in self._shelve
66616 +
66617 + def __iter__(self):
66618 + return self._shelve.__iter__()
66619 +
66620 + def items(self):
66621 + return self._shelve.iteritems()
66622 +
66623 + def __setitem__(self, k, v):
66624 + self._shelve[self._encode(k)] = self._encode(v)
66625 +
66626 + def __getitem__(self, k):
66627 + return self._shelve[self._encode(k)]
66628 +
66629 + def __delitem__(self, k):
66630 + del self._shelve[self._encode(k)]
66631 +
66632 + def get(self, k, *args):
66633 + return self._shelve.get(self._encode(k), *args)
66634 +
66635 + def close(self):
66636 + self._shelve.close()
66637 +
66638 + def clear(self):
66639 + self._shelve.clear()
66640
66641 diff --git a/pym/portage/util/__init__.py b/pym/portage/util/__init__.py
66642 index 4645be5..614b2b3 100644
66643 --- a/pym/portage/util/__init__.py
66644 +++ b/pym/portage/util/__init__.py
66645 @@ -1,6 +1,8 @@
66646 -# Copyright 2004-2012 Gentoo Foundation
66647 +# Copyright 2004-2014 Gentoo Foundation
66648 # Distributed under the terms of the GNU General Public License v2
66649
66650 +from __future__ import unicode_literals
66651 +
66652 __all__ = ['apply_permissions', 'apply_recursive_permissions',
66653 'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
66654 'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
66655 @@ -31,21 +33,26 @@ import portage
66656 portage.proxy.lazyimport.lazyimport(globals(),
66657 'pickle',
66658 'portage.dep:Atom',
66659 - 'portage.util.listdir:_ignorecvs_dirs'
66660 + 'subprocess',
66661 )
66662
66663 from portage import os
66664 -from portage import subprocess_getstatusoutput
66665 from portage import _encodings
66666 from portage import _os_merge
66667 from portage import _unicode_encode
66668 from portage import _unicode_decode
66669 +from portage.const import VCS_DIRS
66670 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
66671 OperationNotPermitted, ParseError, PermissionDenied, ReadOnlyFileSystem
66672 from portage.localization import _
66673 from portage.proxy.objectproxy import ObjectProxy
66674 from portage.cache.mappings import UserDict
66675
66676 +if sys.hexversion >= 0x3000000:
66677 + _unicode = str
66678 +else:
66679 + _unicode = unicode
66680 +
66681 noiselimit = 0
66682
66683 def initialize_logger(level=logging.WARN):
66684 @@ -57,7 +64,7 @@ def initialize_logger(level=logging.WARN):
66685 """
66686 logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
66687
66688 -def writemsg(mystr,noiselevel=0,fd=None):
66689 +def writemsg(mystr, noiselevel=0, fd=None):
66690 """Prints out warning and debug messages based on the noiselimit setting"""
66691 global noiselimit
66692 if fd is None:
66693 @@ -75,7 +82,7 @@ def writemsg(mystr,noiselevel=0,fd=None):
66694 fd.write(mystr)
66695 fd.flush()
66696
66697 -def writemsg_stdout(mystr,noiselevel=0):
66698 +def writemsg_stdout(mystr, noiselevel=0):
66699 """Prints messages stdout based on the noiselimit setting"""
66700 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
66701
66702 @@ -100,7 +107,7 @@ def writemsg_level(msg, level=0, noiselevel=0):
66703 writemsg(msg, noiselevel=noiselevel, fd=fd)
66704
66705 def normalize_path(mypath):
66706 - """
66707 + """
66708 os.path.normpath("//foo") returns "//foo" instead of "/foo"
66709 We dislike this behavior so we create our own normpath func
66710 to fix it.
66711 @@ -120,8 +127,8 @@ def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False
66712 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
66713 begins with a #, it is ignored, as are empty lines"""
66714
66715 - mylines=grablines(myfilename, recursive, remember_source_file=True)
66716 - newlines=[]
66717 + mylines = grablines(myfilename, recursive, remember_source_file=True)
66718 + newlines = []
66719
66720 for x, source_file in mylines:
66721 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
66722 @@ -139,10 +146,10 @@ def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False
66723 myline = " ".join(myline)
66724 if not myline:
66725 continue
66726 - if myline[0]=="#":
66727 + if myline[0] == "#":
66728 # Check if we have a compat-level string. BC-integration data.
66729 # '##COMPAT==>N<==' 'some string attached to it'
66730 - mylinetest = myline.split("<==",1)
66731 + mylinetest = myline.split("<==", 1)
66732 if len(mylinetest) == 2:
66733 myline_potential = mylinetest[1]
66734 mylinetest = mylinetest[0].split("##COMPAT==>")
66735 @@ -159,7 +166,7 @@ def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False
66736 newlines.append(myline)
66737 return newlines
66738
66739 -def map_dictlist_vals(func,myDict):
66740 +def map_dictlist_vals(func, myDict):
66741 """Performs a function on each value of each key in a dictlist.
66742 Returns a new dictlist."""
66743 new_dl = {}
66744 @@ -173,7 +180,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
66745 Stacks an array of dict-types into one array. Optionally merging or
66746 overwriting matching key/value pairs for the dict[key]->list.
66747 Returns a single dict. Higher index in lists is preferenced.
66748 -
66749 +
66750 Example usage:
66751 >>> from portage.util import stack_dictlist
66752 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
66753 @@ -188,7 +195,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
66754 >>> { 'KEYWORDS':['alpha'] }
66755 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
66756 >>> { 'KEYWORDS':['alpha'] }
66757 -
66758 +
66759 @param original_dicts a list of (dictionary objects or None)
66760 @type list
66761 @param incremental True or false depending on whether new keys should overwrite
66762 @@ -199,7 +206,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
66763 @type list
66764 @param ignore_none Appears to be ignored, but probably was used long long ago.
66765 @type boolean
66766 -
66767 +
66768 """
66769 final_dict = {}
66770 for mydict in original_dicts:
66771 @@ -208,7 +215,7 @@ def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0
66772 for y in mydict:
66773 if not y in final_dict:
66774 final_dict[y] = []
66775 -
66776 +
66777 for thing in mydict[y]:
66778 if thing:
66779 if incremental or y in incrementals:
66780 @@ -245,12 +252,13 @@ def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
66781 def append_repo(atom_list, repo_name, remember_source_file=False):
66782 """
66783 Takes a list of valid atoms without repo spec and appends ::repo_name.
66784 + If an atom already has a repo part, then it is preserved (see bug #461948).
66785 """
66786 if remember_source_file:
66787 - return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
66788 + return [(atom.repo is not None and atom or atom.with_repo(repo_name), source) \
66789 for atom, source in atom_list]
66790 else:
66791 - return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
66792 + return [atom.repo is not None and atom or atom.with_repo(repo_name) \
66793 for atom in atom_list]
66794
66795 def stack_lists(lists, incremental=1, remember_source_file=False,
66796 @@ -334,7 +342,7 @@ def stack_lists(lists, incremental=1, remember_source_file=False,
66797 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
66798 """
66799 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
66800 -
66801 +
66802 @param myfilename: file to process
66803 @type myfilename: string (path)
66804 @param juststrings: only return strings
66805 @@ -350,9 +358,9 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
66806 1. Returns the lines in a file in a dictionary, for example:
66807 'sys-apps/portage x86 amd64 ppc'
66808 would return
66809 - { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
66810 + {"sys-apps/portage" : ['x86', 'amd64', 'ppc']}
66811 """
66812 - newdict={}
66813 + newdict = {}
66814 for x in grablines(myfilename, recursive):
66815 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
66816 #into single spaces.
66817 @@ -379,52 +387,75 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
66818 newdict[k] = " ".join(v)
66819 return newdict
66820
66821 -def read_corresponding_eapi_file(filename):
66822 +_eapi_cache = {}
66823 +
66824 +def read_corresponding_eapi_file(filename, default="0"):
66825 """
66826 Read the 'eapi' file from the directory 'filename' is in.
66827 Returns "0" if the file is not present or invalid.
66828 """
66829 - default = "0"
66830 eapi_file = os.path.join(os.path.dirname(filename), "eapi")
66831 try:
66832 - f = io.open(_unicode_encode(eapi_file,
66833 + eapi = _eapi_cache[eapi_file]
66834 + except KeyError:
66835 + pass
66836 + else:
66837 + if eapi is None:
66838 + return default
66839 + return eapi
66840 +
66841 + eapi = None
66842 + try:
66843 + with io.open(_unicode_encode(eapi_file,
66844 encoding=_encodings['fs'], errors='strict'),
66845 - mode='r', encoding=_encodings['repo.content'], errors='replace')
66846 - lines = f.readlines()
66847 + mode='r', encoding=_encodings['repo.content'], errors='replace') as f:
66848 + lines = f.readlines()
66849 if len(lines) == 1:
66850 eapi = lines[0].rstrip("\n")
66851 else:
66852 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
66853 noiselevel=-1)
66854 - eapi = default
66855 - f.close()
66856 except IOError:
66857 - eapi = default
66858 + pass
66859
66860 + _eapi_cache[eapi_file] = eapi
66861 + if eapi is None:
66862 + return default
66863 return eapi
66864
66865 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
66866 verify_eapi=False, eapi=None):
66867 """ Does the same thing as grabdict except it validates keys
66868 with isvalidatom()"""
66869 - pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
66870 - if not pkgs:
66871 - return pkgs
66872 - if verify_eapi and eapi is None:
66873 - eapi = read_corresponding_eapi_file(myfilename)
66874
66875 - # We need to call keys() here in order to avoid the possibility of
66876 - # "RuntimeError: dictionary changed size during iteration"
66877 - # when an invalid atom is deleted.
66878 + if recursive:
66879 + file_list = _recursive_file_list(myfilename)
66880 + else:
66881 + file_list = [myfilename]
66882 +
66883 atoms = {}
66884 - for k, v in pkgs.items():
66885 - try:
66886 - k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
66887 - except InvalidAtom as e:
66888 - writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
66889 - noiselevel=-1)
66890 - else:
66891 - atoms[k] = v
66892 + for filename in file_list:
66893 + d = grabdict(filename, juststrings=False,
66894 + empty=True, recursive=False, incremental=True)
66895 + if not d:
66896 + continue
66897 + if verify_eapi and eapi is None:
66898 + eapi = read_corresponding_eapi_file(myfilename)
66899 +
66900 + for k, v in d.items():
66901 + try:
66902 + k = Atom(k, allow_wildcard=allow_wildcard,
66903 + allow_repo=allow_repo, eapi=eapi)
66904 + except InvalidAtom as e:
66905 + writemsg(_("--- Invalid atom in %s: %s\n") % (filename, e),
66906 + noiselevel=-1)
66907 + else:
66908 + atoms.setdefault(k, []).extend(v)
66909 +
66910 + if juststrings:
66911 + for k, v in atoms.items():
66912 + atoms[k] = " ".join(v)
66913 +
66914 return atoms
66915
66916 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
66917 @@ -450,7 +481,7 @@ def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=Fals
66918 writemsg(_("--- Invalid atom in %s: %s\n") % (source_file, e),
66919 noiselevel=-1)
66920 else:
66921 - if pkg_orig == str(pkg):
66922 + if pkg_orig == _unicode(pkg):
66923 # normal atom, so return as Atom instance
66924 if remember_source_file:
66925 atoms.append((pkg, source_file))
66926 @@ -464,35 +495,63 @@ def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=Fals
66927 atoms.append(pkg_orig)
66928 return atoms
66929
66930 -def grablines(myfilename, recursive=0, remember_source_file=False):
66931 - mylines=[]
66932 - if recursive and os.path.isdir(myfilename):
66933 - if os.path.basename(myfilename) in _ignorecvs_dirs:
66934 - return mylines
66935 +def _recursive_basename_filter(f):
66936 + return not f.startswith(".") and not f.endswith("~")
66937 +
66938 +def _recursive_file_list(path):
66939 + # path may be a regular file or a directory
66940 +
66941 + def onerror(e):
66942 + if e.errno == PermissionDenied.errno:
66943 + raise PermissionDenied(path)
66944 +
66945 + stack = [os.path.split(path)]
66946 +
66947 + while stack:
66948 + parent, fname = stack.pop()
66949 + fullpath = os.path.join(parent, fname)
66950 +
66951 try:
66952 - dirlist = os.listdir(myfilename)
66953 + st = os.stat(fullpath)
66954 except OSError as e:
66955 - if e.errno == PermissionDenied.errno:
66956 - raise PermissionDenied(myfilename)
66957 - elif e.errno in (errno.ENOENT, errno.ESTALE):
66958 - return mylines
66959 - else:
66960 - raise
66961 - dirlist.sort()
66962 - for f in dirlist:
66963 - if not f.startswith(".") and not f.endswith("~"):
66964 - mylines.extend(grablines(
66965 - os.path.join(myfilename, f), recursive, remember_source_file))
66966 + onerror(e)
66967 + continue
66968 +
66969 + if stat.S_ISDIR(st.st_mode):
66970 + if fname in VCS_DIRS or not _recursive_basename_filter(fname):
66971 + continue
66972 + try:
66973 + children = os.listdir(fullpath)
66974 + except OSError as e:
66975 + onerror(e)
66976 + continue
66977 +
66978 + # Sort in reverse, since we pop from the end of the stack.
66979 + # Include regular files in the stack, so files are sorted
66980 + # together with directories.
66981 + children.sort(reverse=True)
66982 + stack.extend((fullpath, x) for x in children)
66983 +
66984 + elif stat.S_ISREG(st.st_mode):
66985 + if _recursive_basename_filter(fname):
66986 + yield fullpath
66987 +
66988 +def grablines(myfilename, recursive=0, remember_source_file=False):
66989 + mylines = []
66990 + if recursive:
66991 + for f in _recursive_file_list(myfilename):
66992 + mylines.extend(grablines(f, recursive=False,
66993 + remember_source_file=remember_source_file))
66994 +
66995 else:
66996 try:
66997 - myfile = io.open(_unicode_encode(myfilename,
66998 + with io.open(_unicode_encode(myfilename,
66999 encoding=_encodings['fs'], errors='strict'),
67000 - mode='r', encoding=_encodings['content'], errors='replace')
67001 - if remember_source_file:
67002 - mylines = [(line, myfilename) for line in myfile.readlines()]
67003 - else:
67004 - mylines = myfile.readlines()
67005 - myfile.close()
67006 + mode='r', encoding=_encodings['content'], errors='replace') as myfile:
67007 + if remember_source_file:
67008 + mylines = [(line, myfilename) for line in myfile.readlines()]
67009 + else:
67010 + mylines = myfile.readlines()
67011 except IOError as e:
67012 if e.errno == PermissionDenied.errno:
67013 raise PermissionDenied(myfilename)
67014 @@ -502,7 +561,7 @@ def grablines(myfilename, recursive=0, remember_source_file=False):
67015 raise
67016 return mylines
67017
67018 -def writedict(mydict,myfilename,writekey=True):
67019 +def writedict(mydict, myfilename, writekey=True):
67020 """Writes out a dict to a file; writekey=0 mode doesn't write out
67021 the key and assumes all values are strings, not lists."""
67022 lines = []
67023 @@ -528,18 +587,44 @@ def shlex_split(s):
67024 rval = [_unicode_decode(x) for x in rval]
67025 return rval
67026
67027 -class _tolerant_shlex(shlex.shlex):
67028 +class _getconfig_shlex(shlex.shlex):
67029 +
67030 + def __init__(self, portage_tolerant=False, **kwargs):
67031 + shlex.shlex.__init__(self, **kwargs)
67032 + self.__portage_tolerant = portage_tolerant
67033 +
67034 + def allow_sourcing(self, var_expand_map):
67035 + self.source = portage._native_string("source")
67036 + self.var_expand_map = var_expand_map
67037 +
67038 def sourcehook(self, newfile):
67039 try:
67040 + newfile = varexpand(newfile, self.var_expand_map)
67041 return shlex.shlex.sourcehook(self, newfile)
67042 except EnvironmentError as e:
67043 - writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
67044 - (self.infile, str(e)), noiselevel=-1)
67045 + if e.errno == PermissionDenied.errno:
67046 + raise PermissionDenied(newfile)
67047 + if e.errno not in (errno.ENOENT, errno.ENOTDIR):
67048 + writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1)
67049 + raise
67050 +
67051 + msg = self.error_leader()
67052 + if e.errno == errno.ENOTDIR:
67053 + msg += _("%s: Not a directory") % newfile
67054 + else:
67055 + msg += _("%s: No such file or directory") % newfile
67056 +
67057 + if self.__portage_tolerant:
67058 + writemsg("%s\n" % msg, noiselevel=-1)
67059 + else:
67060 + raise ParseError(msg)
67061 return (newfile, io.StringIO())
67062
67063 _invalid_var_name_re = re.compile(r'^\d|\W')
67064
67065 -def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67066 +def getconfig(mycfg, tolerant=False, allow_sourcing=False, expand=True,
67067 + recursive=False):
67068 +
67069 if isinstance(expand, dict):
67070 # Some existing variable definitions have been
67071 # passed in, for use in substitutions.
67072 @@ -548,6 +633,21 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67073 else:
67074 expand_map = {}
67075 mykeys = {}
67076 +
67077 + if recursive:
67078 + # Emulate source commands so that syntax error messages
67079 + # can display real file names and line numbers.
67080 + if not expand:
67081 + expand_map = False
67082 + fname = None
67083 + for fname in _recursive_file_list(mycfg):
67084 + mykeys.update(getconfig(fname, tolerant=tolerant,
67085 + allow_sourcing=allow_sourcing, expand=expand_map,
67086 + recursive=False) or {})
67087 + if fname is None:
67088 + return None
67089 + return mykeys
67090 +
67091 f = None
67092 try:
67093 # NOTE: shlex doesn't support unicode objects with Python 2
67094 @@ -572,49 +672,53 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67095 if f is not None:
67096 f.close()
67097
67098 + # Since this file has unicode_literals enabled, and Python 2's
67099 + # shlex implementation does not support unicode, the following code
67100 + # uses _native_string() to encode unicode literals when necessary.
67101 +
67102 # Workaround for avoiding a silent error in shlex that is
67103 # triggered by a source statement at the end of the file
67104 # without a trailing newline after the source statement.
67105 - if content and content[-1] != '\n':
67106 - content += '\n'
67107 + if content and content[-1] != portage._native_string('\n'):
67108 + content += portage._native_string('\n')
67109
67110 # Warn about dos-style line endings since that prevents
67111 # people from being able to source them with bash.
67112 - if '\r' in content:
67113 + if portage._native_string('\r') in content:
67114 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
67115 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
67116
67117 lex = None
67118 try:
67119 - if tolerant:
67120 - shlex_class = _tolerant_shlex
67121 - else:
67122 - shlex_class = shlex.shlex
67123 # The default shlex.sourcehook() implementation
67124 # only joins relative paths when the infile
67125 # attribute is properly set.
67126 - lex = shlex_class(content, infile=mycfg, posix=True)
67127 - lex.wordchars = string.digits + string.ascii_letters + \
67128 - "~!@#$%*_\:;?,./-+{}"
67129 - lex.quotes="\"'"
67130 + lex = _getconfig_shlex(instream=content, infile=mycfg, posix=True,
67131 + portage_tolerant=tolerant)
67132 + lex.wordchars = portage._native_string(string.digits +
67133 + string.ascii_letters + "~!@#$%*_\:;?,./-+{}")
67134 + lex.quotes = portage._native_string("\"'")
67135 if allow_sourcing:
67136 - lex.source="source"
67137 - while 1:
67138 - key=lex.get_token()
67139 + lex.allow_sourcing(expand_map)
67140 +
67141 + while True:
67142 + key = _unicode_decode(lex.get_token())
67143 if key == "export":
67144 - key = lex.get_token()
67145 + key = _unicode_decode(lex.get_token())
67146 if key is None:
67147 #normal end of file
67148 - break;
67149 - equ=lex.get_token()
67150 - if (equ==''):
67151 + break
67152 +
67153 + equ = _unicode_decode(lex.get_token())
67154 + if not equ:
67155 msg = lex.error_leader() + _("Unexpected EOF")
67156 if not tolerant:
67157 raise ParseError(msg)
67158 else:
67159 writemsg("%s\n" % msg, noiselevel=-1)
67160 return mykeys
67161 - elif (equ!='='):
67162 +
67163 + elif equ != "=":
67164 msg = lex.error_leader() + \
67165 _("Invalid token '%s' (not '=')") % (equ,)
67166 if not tolerant:
67167 @@ -622,7 +726,8 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67168 else:
67169 writemsg("%s\n" % msg, noiselevel=-1)
67170 return mykeys
67171 - val=lex.get_token()
67172 +
67173 + val = _unicode_decode(lex.get_token())
67174 if val is None:
67175 msg = lex.error_leader() + \
67176 _("Unexpected end of config file: variable '%s'") % (key,)
67177 @@ -631,8 +736,6 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67178 else:
67179 writemsg("%s\n" % msg, noiselevel=-1)
67180 return mykeys
67181 - key = _unicode_decode(key)
67182 - val = _unicode_decode(val)
67183
67184 if _invalid_var_name_re.search(key) is not None:
67185 msg = lex.error_leader() + \
67186 @@ -653,7 +756,7 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
67187 except Exception as e:
67188 if isinstance(e, ParseError) or lex is None:
67189 raise
67190 - msg = _unicode_decode("%s%s") % (lex.error_leader(), e)
67191 + msg = "%s%s" % (lex.error_leader(), e)
67192 writemsg("%s\n" % msg, noiselevel=-1)
67193 raise
67194
67195 @@ -671,10 +774,10 @@ def varexpand(mystring, mydict=None, error_leader=None):
67196 This code is used by the configfile code, as well as others (parser)
67197 This would be a good bunch of code to port to C.
67198 """
67199 - numvars=0
67200 - #in single, double quotes
67201 - insing=0
67202 - indoub=0
67203 + numvars = 0
67204 + # in single, double quotes
67205 + insing = 0
67206 + indoub = 0
67207 pos = 0
67208 length = len(mystring)
67209 newstring = []
67210 @@ -686,7 +789,7 @@ def varexpand(mystring, mydict=None, error_leader=None):
67211 else:
67212 newstring.append("'") # Quote removal is handled by shlex.
67213 insing=not insing
67214 - pos=pos+1
67215 + pos += 1
67216 continue
67217 elif current == '"':
67218 if (insing):
67219 @@ -694,9 +797,9 @@ def varexpand(mystring, mydict=None, error_leader=None):
67220 else:
67221 newstring.append('"') # Quote removal is handled by shlex.
67222 indoub=not indoub
67223 - pos=pos+1
67224 + pos += 1
67225 continue
67226 - if (not insing):
67227 + if not insing:
67228 #expansion time
67229 if current == "\n":
67230 #convert newlines to spaces
67231 @@ -711,7 +814,7 @@ def varexpand(mystring, mydict=None, error_leader=None):
67232 # escaped newline characters. Note that we don't handle
67233 # escaped quotes here, since getconfig() uses shlex
67234 # to handle that earlier.
67235 - if (pos+1>=len(mystring)):
67236 + if pos + 1 >= len(mystring):
67237 newstring.append(current)
67238 break
67239 else:
67240 @@ -733,15 +836,15 @@ def varexpand(mystring, mydict=None, error_leader=None):
67241 newstring.append(mystring[pos - 2:pos])
67242 continue
67243 elif current == "$":
67244 - pos=pos+1
67245 - if mystring[pos]=="{":
67246 - pos=pos+1
67247 - braced=True
67248 + pos += 1
67249 + if mystring[pos] == "{":
67250 + pos += 1
67251 + braced = True
67252 else:
67253 - braced=False
67254 - myvstart=pos
67255 + braced = False
67256 + myvstart = pos
67257 while mystring[pos] in _varexpand_word_chars:
67258 - if (pos+1)>=len(mystring):
67259 + if pos + 1 >= len(mystring):
67260 if braced:
67261 msg = _varexpand_unexpected_eof_msg
67262 if error_leader is not None:
67263 @@ -749,20 +852,20 @@ def varexpand(mystring, mydict=None, error_leader=None):
67264 writemsg(msg + "\n", noiselevel=-1)
67265 return ""
67266 else:
67267 - pos=pos+1
67268 + pos += 1
67269 break
67270 - pos=pos+1
67271 - myvarname=mystring[myvstart:pos]
67272 + pos += 1
67273 + myvarname = mystring[myvstart:pos]
67274 if braced:
67275 - if mystring[pos]!="}":
67276 + if mystring[pos] != "}":
67277 msg = _varexpand_unexpected_eof_msg
67278 if error_leader is not None:
67279 msg = error_leader() + msg
67280 writemsg(msg + "\n", noiselevel=-1)
67281 return ""
67282 else:
67283 - pos=pos+1
67284 - if len(myvarname)==0:
67285 + pos += 1
67286 + if len(myvarname) == 0:
67287 msg = "$"
67288 if braced:
67289 msg += "{}"
67290 @@ -771,7 +874,7 @@ def varexpand(mystring, mydict=None, error_leader=None):
67291 msg = error_leader() + msg
67292 writemsg(msg + "\n", noiselevel=-1)
67293 return ""
67294 - numvars=numvars+1
67295 + numvars += 1
67296 if myvarname in mydict:
67297 newstring.append(mydict[myvarname])
67298 else:
67299 @@ -786,9 +889,9 @@ def varexpand(mystring, mydict=None, error_leader=None):
67300 # broken and removed, but can still be imported
67301 pickle_write = None
67302
67303 -def pickle_read(filename,default=None,debug=0):
67304 +def pickle_read(filename, default=None, debug=0):
67305 if not os.access(filename, os.R_OK):
67306 - writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
67307 + writemsg(_("pickle_read(): File not readable. '") + filename + "'\n", 1)
67308 return default
67309 data = None
67310 try:
67311 @@ -797,12 +900,12 @@ def pickle_read(filename,default=None,debug=0):
67312 mypickle = pickle.Unpickler(myf)
67313 data = mypickle.load()
67314 myf.close()
67315 - del mypickle,myf
67316 - writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
67317 + del mypickle, myf
67318 + writemsg(_("pickle_read(): Loaded pickle. '") + filename + "'\n", 1)
67319 except SystemExit as e:
67320 raise
67321 except Exception as e:
67322 - writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
67323 + writemsg(_("!!! Failed to load pickle: ") + str(e) + "\n", 1)
67324 data = default
67325 return data
67326
67327 @@ -830,6 +933,9 @@ class cmp_sort_key(object):
67328 list.sort(), making it easier to port code for python-3.0 compatibility.
67329 It works by generating key objects which use the given cmp function to
67330 implement their __lt__ method.
67331 +
67332 + Beginning with Python 2.7 and 3.2, equivalent functionality is provided
67333 + by functools.cmp_to_key().
67334 """
67335 __slots__ = ("_cmp_func",)
67336
67337 @@ -922,6 +1028,10 @@ def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
67338
67339 modified = False
67340
67341 + # Since Python 3.4, chown requires int type (no proxies).
67342 + uid = int(uid)
67343 + gid = int(gid)
67344 +
67345 if stat_cached is None:
67346 try:
67347 if follow_links:
67348 @@ -1141,7 +1251,7 @@ class atomic_ofstream(ObjectProxy):
67349 object.__setattr__(self, '_file',
67350 open_func(_unicode_encode(tmp_name,
67351 encoding=_encodings['fs'], errors='strict'),
67352 - mode=mode, **kargs))
67353 + mode=mode, **portage._native_kwargs(kargs)))
67354 return
67355 except IOError as e:
67356 if canonical_path == filename:
67357 @@ -1223,7 +1333,7 @@ class atomic_ofstream(ObjectProxy):
67358 self.close()
67359
67360 def __del__(self):
67361 - """If the user does not explicitely call close(), it is
67362 + """If the user does not explicitly call close(), it is
67363 assumed that an error has occurred, so we abort()."""
67364 try:
67365 f = object.__getattribute__(self, '_file')
67366 @@ -1402,9 +1512,9 @@ class LazyItemsDict(UserDict):
67367 lazy_item = self.lazy_items.get(k)
67368 if lazy_item is not None:
67369 if not lazy_item.singleton:
67370 - raise TypeError(_unicode_decode("LazyItemsDict " + \
67371 + raise TypeError("LazyItemsDict " + \
67372 "deepcopy is unsafe with lazy items that are " + \
67373 - "not singletons: key=%s value=%s") % (k, lazy_item,))
67374 + "not singletons: key=%s value=%s" % (k, lazy_item,))
67375 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
67376 return result
67377
67378 @@ -1576,13 +1686,13 @@ def find_updated_config_files(target_root, config_protect):
67379 """
67380 Return a tuple of configuration files that needs to be updated.
67381 The tuple contains lists organized like this:
67382 - [ protected_dir, file_list ]
67383 + [protected_dir, file_list]
67384 If the protected config isn't a protected_dir but a procted_file, list is:
67385 - [ protected_file, None ]
67386 + [protected_file, None]
67387 If no configuration files needs to be updated, None is returned
67388 """
67389
67390 - os = _os_merge
67391 + encoding = _encodings['fs']
67392
67393 if config_protect:
67394 # directories with some protect files in them
67395 @@ -1614,10 +1724,24 @@ def find_updated_config_files(target_root, config_protect):
67396 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
67397 os.path.split(x.rstrip(os.path.sep))
67398 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
67399 - a = subprocess_getstatusoutput(mycommand)
67400 -
67401 - if a[0] == 0:
67402 - files = a[1].split('\0')
67403 + cmd = shlex_split(mycommand)
67404 +
67405 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000:
67406 + # Python 3.1 _execvp throws TypeError for non-absolute executable
67407 + # path passed as bytes (see http://bugs.python.org/issue8513).
67408 + fullname = portage.process.find_binary(cmd[0])
67409 + if fullname is None:
67410 + raise portage.exception.CommandNotFound(cmd[0])
67411 + cmd[0] = fullname
67412 +
67413 + cmd = [_unicode_encode(arg, encoding=encoding, errors='strict')
67414 + for arg in cmd]
67415 + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
67416 + stderr=subprocess.STDOUT)
67417 + output = _unicode_decode(proc.communicate()[0], encoding=encoding)
67418 + status = proc.wait()
67419 + if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
67420 + files = output.split('\0')
67421 # split always produces an empty string as the last element
67422 if files and not files[-1]:
67423 del files[-1]
67424
67425 diff --git a/pym/portage/util/_argparse.py b/pym/portage/util/_argparse.py
67426 new file mode 100644
67427 index 0000000..6ca7852
67428 --- /dev/null
67429 +++ b/pym/portage/util/_argparse.py
67430 @@ -0,0 +1,42 @@
67431 +# Copyright 2013 Gentoo Foundation
67432 +# Distributed under the terms of the GNU General Public License v2
67433 +
67434 +__all__ = ['ArgumentParser']
67435 +
67436 +try:
67437 + from argparse import ArgumentParser
67438 +except ImportError:
67439 + # Compatibility with Python 2.6 and 3.1
67440 + from optparse import OptionGroup, OptionParser
67441 +
67442 + from portage.localization import _
67443 +
67444 + class ArgumentParser(object):
67445 + def __init__(self, **kwargs):
67446 + add_help = kwargs.pop("add_help", None)
67447 + if add_help is not None:
67448 + kwargs["add_help_option"] = add_help
67449 + parser = OptionParser(**kwargs)
67450 + self._parser = parser
67451 + self.add_argument = parser.add_option
67452 + self.print_help = parser.print_help
67453 + self.error = parser.error
67454 +
67455 + def add_argument_group(self, title=None, **kwargs):
67456 + optiongroup = OptionGroup(self._parser, title, **kwargs)
67457 + self._parser.add_option_group(optiongroup)
67458 + return _ArgumentGroup(optiongroup)
67459 +
67460 + def parse_known_args(self, args=None, namespace=None):
67461 + return self._parser.parse_args(args, namespace)
67462 +
67463 + def parse_args(self, args=None, namespace=None):
67464 + args, argv = self.parse_known_args(args, namespace)
67465 + if argv:
67466 + msg = _('unrecognized arguments: %s')
67467 + self.error(msg % ' '.join(argv))
67468 + return args
67469 +
67470 + class _ArgumentGroup(object):
67471 + def __init__(self, optiongroup):
67472 + self.add_argument = optiongroup.add_option
67473
67474 diff --git a/pym/portage/util/_async/AsyncScheduler.py b/pym/portage/util/_async/AsyncScheduler.py
67475 new file mode 100644
67476 index 0000000..9b96c6f
67477 --- /dev/null
67478 +++ b/pym/portage/util/_async/AsyncScheduler.py
67479 @@ -0,0 +1,102 @@
67480 +# Copyright 2012-2013 Gentoo Foundation
67481 +# Distributed under the terms of the GNU General Public License v2
67482 +
67483 +from portage import os
67484 +from _emerge.AsynchronousTask import AsynchronousTask
67485 +from _emerge.PollScheduler import PollScheduler
67486 +
67487 +class AsyncScheduler(AsynchronousTask, PollScheduler):
67488 +
67489 + def __init__(self, max_jobs=None, max_load=None, **kwargs):
67490 + AsynchronousTask.__init__(self)
67491 + PollScheduler.__init__(self, **kwargs)
67492 +
67493 + if max_jobs is None:
67494 + max_jobs = 1
67495 + self._max_jobs = max_jobs
67496 + self._max_load = max_load
67497 + self._error_count = 0
67498 + self._running_tasks = set()
67499 + self._remaining_tasks = True
67500 + self._term_check_id = None
67501 + self._loadavg_check_id = None
67502 +
67503 + def _poll(self):
67504 + if not (self._is_work_scheduled() or self._keep_scheduling()):
67505 + self.wait()
67506 + return self.returncode
67507 +
67508 + def _cancel(self):
67509 + self._terminated.set()
67510 + self._termination_check()
67511 +
67512 + def _terminate_tasks(self):
67513 + for task in list(self._running_tasks):
67514 + task.cancel()
67515 +
67516 + def _next_task(self):
67517 + raise NotImplementedError(self)
67518 +
67519 + def _keep_scheduling(self):
67520 + return self._remaining_tasks and not self._terminated.is_set()
67521 +
67522 + def _running_job_count(self):
67523 + return len(self._running_tasks)
67524 +
67525 + def _schedule_tasks(self):
67526 + while self._keep_scheduling() and self._can_add_job():
67527 + try:
67528 + task = self._next_task()
67529 + except StopIteration:
67530 + self._remaining_tasks = False
67531 + else:
67532 + self._running_tasks.add(task)
67533 + task.scheduler = self._sched_iface
67534 + task.addExitListener(self._task_exit)
67535 + task.start()
67536 +
67537 + # Triggers cleanup and exit listeners if there's nothing left to do.
67538 + self.poll()
67539 +
67540 + def _task_exit(self, task):
67541 + self._running_tasks.discard(task)
67542 + if task.returncode != os.EX_OK:
67543 + self._error_count += 1
67544 + self._schedule()
67545 +
67546 + def _start(self):
67547 + self._term_check_id = self._event_loop.idle_add(self._termination_check)
67548 + if self._max_load is not None and \
67549 + self._loadavg_latency is not None and \
67550 + (self._max_jobs is True or self._max_jobs > 1):
67551 + # We have to schedule periodically, in case the load
67552 + # average has changed since the last call.
67553 + self._loadavg_check_id = self._event_loop.timeout_add(
67554 + self._loadavg_latency, self._schedule)
67555 + self._schedule()
67556 +
67557 + def _wait(self):
67558 + # Loop while there are jobs to be scheduled.
67559 + while self._keep_scheduling():
67560 + self._event_loop.iteration()
67561 +
67562 + # Clean shutdown of previously scheduled jobs. In the
67563 + # case of termination, this allows for basic cleanup
67564 + # such as flushing of buffered output to logs.
67565 + while self._is_work_scheduled():
67566 + self._event_loop.iteration()
67567 +
67568 + if self._term_check_id is not None:
67569 + self._event_loop.source_remove(self._term_check_id)
67570 + self._term_check_id = None
67571 +
67572 + if self._loadavg_check_id is not None:
67573 + self._event_loop.source_remove(self._loadavg_check_id)
67574 + self._loadavg_check_id = None
67575 +
67576 + if self._error_count > 0:
67577 + self.returncode = 1
67578 + else:
67579 + self.returncode = os.EX_OK
67580 +
67581 + return self.returncode
67582
67583 diff --git a/pym/portage/util/_async/FileCopier.py b/pym/portage/util/_async/FileCopier.py
67584 new file mode 100644
67585 index 0000000..27e5ab4
67586 --- /dev/null
67587 +++ b/pym/portage/util/_async/FileCopier.py
67588 @@ -0,0 +1,17 @@
67589 +# Copyright 2013 Gentoo Foundation
67590 +# Distributed under the terms of the GNU General Public License v2
67591 +
67592 +from portage import os
67593 +from portage import shutil
67594 +from portage.util._async.ForkProcess import ForkProcess
67595 +
67596 +class FileCopier(ForkProcess):
67597 + """
67598 + Asynchronously copy a file.
67599 + """
67600 +
67601 + __slots__ = ('src_path', 'dest_path')
67602 +
67603 + def _run(self):
67604 + shutil.copy(self.src_path, self.dest_path)
67605 + return os.EX_OK
67606
67607 diff --git a/pym/portage/util/_async/FileDigester.py b/pym/portage/util/_async/FileDigester.py
67608 new file mode 100644
67609 index 0000000..881c692
67610 --- /dev/null
67611 +++ b/pym/portage/util/_async/FileDigester.py
67612 @@ -0,0 +1,73 @@
67613 +# Copyright 2013 Gentoo Foundation
67614 +# Distributed under the terms of the GNU General Public License v2
67615 +
67616 +from portage import os
67617 +from portage.checksum import perform_multiple_checksums
67618 +from portage.util._async.ForkProcess import ForkProcess
67619 +from _emerge.PipeReader import PipeReader
67620 +
67621 +class FileDigester(ForkProcess):
67622 + """
67623 + Asynchronously generate file digests. Pass in file_path and
67624 + hash_names, and after successful execution, the digests
67625 + attribute will be a dict containing all of the requested
67626 + digests.
67627 + """
67628 +
67629 + __slots__ = ('file_path', 'digests', 'hash_names',
67630 + '_digest_pipe_reader', '_digest_pw')
67631 +
67632 + def _start(self):
67633 + pr, pw = os.pipe()
67634 + self.fd_pipes = {}
67635 + self.fd_pipes[pw] = pw
67636 + self._digest_pw = pw
67637 + self._digest_pipe_reader = PipeReader(
67638 + input_files={"input":pr},
67639 + scheduler=self.scheduler)
67640 + self._digest_pipe_reader.addExitListener(self._digest_pipe_reader_exit)
67641 + self._digest_pipe_reader.start()
67642 + ForkProcess._start(self)
67643 + os.close(pw)
67644 +
67645 + def _run(self):
67646 + digests = perform_multiple_checksums(self.file_path,
67647 + hashes=self.hash_names)
67648 +
67649 + buf = "".join("%s=%s\n" % item
67650 + for item in digests.items()).encode('utf_8')
67651 +
67652 + while buf:
67653 + buf = buf[os.write(self._digest_pw, buf):]
67654 +
67655 + return os.EX_OK
67656 +
67657 + def _parse_digests(self, data):
67658 +
67659 + digests = {}
67660 + for line in data.decode('utf_8').splitlines():
67661 + parts = line.split('=', 1)
67662 + if len(parts) == 2:
67663 + digests[parts[0]] = parts[1]
67664 +
67665 + self.digests = digests
67666 +
67667 + def _pipe_logger_exit(self, pipe_logger):
67668 + # Ignore this event, since we want to ensure that we
67669 + # exit only after _digest_pipe_reader has reached EOF.
67670 + self._pipe_logger = None
67671 +
67672 + def _digest_pipe_reader_exit(self, pipe_reader):
67673 + self._parse_digests(pipe_reader.getvalue())
67674 + self._digest_pipe_reader = None
67675 + self._unregister()
67676 + self.wait()
67677 +
67678 + def _unregister(self):
67679 + ForkProcess._unregister(self)
67680 +
67681 + pipe_reader = self._digest_pipe_reader
67682 + if pipe_reader is not None:
67683 + self._digest_pipe_reader = None
67684 + pipe_reader.removeExitListener(self._digest_pipe_reader_exit)
67685 + pipe_reader.cancel()
67686
67687 diff --git a/pym/portage/util/_async/ForkProcess.py b/pym/portage/util/_async/ForkProcess.py
67688 new file mode 100644
67689 index 0000000..25f72d3
67690 --- /dev/null
67691 +++ b/pym/portage/util/_async/ForkProcess.py
67692 @@ -0,0 +1,65 @@
67693 +# Copyright 2012-2013 Gentoo Foundation
67694 +# Distributed under the terms of the GNU General Public License v2
67695 +
67696 +import signal
67697 +import sys
67698 +import traceback
67699 +
67700 +import portage
67701 +from portage import os
67702 +from _emerge.SpawnProcess import SpawnProcess
67703 +
67704 +class ForkProcess(SpawnProcess):
67705 +
67706 + __slots__ = ()
67707 +
67708 + def _spawn(self, args, fd_pipes=None, **kwargs):
67709 + """
67710 + Fork a subprocess, apply local settings, and call fetch().
67711 + """
67712 +
67713 + parent_pid = os.getpid()
67714 + pid = None
67715 + try:
67716 + pid = os.fork()
67717 +
67718 + if pid != 0:
67719 + if not isinstance(pid, int):
67720 + raise AssertionError(
67721 + "fork returned non-integer: %s" % (repr(pid),))
67722 + return [pid]
67723 +
67724 + rval = 1
67725 + try:
67726 +
67727 + # Use default signal handlers in order to avoid problems
67728 + # killing subprocesses as reported in bug #353239.
67729 + signal.signal(signal.SIGINT, signal.SIG_DFL)
67730 + signal.signal(signal.SIGTERM, signal.SIG_DFL)
67731 +
67732 + portage.locks._close_fds()
67733 + # We don't exec, so use close_fds=False
67734 + # (see _setup_pipes docstring).
67735 + portage.process._setup_pipes(fd_pipes, close_fds=False)
67736 +
67737 + rval = self._run()
67738 + except SystemExit:
67739 + raise
67740 + except:
67741 + traceback.print_exc()
67742 + # os._exit() skips stderr flush!
67743 + sys.stderr.flush()
67744 + finally:
67745 + os._exit(rval)
67746 +
67747 + finally:
67748 + if pid == 0 or (pid is None and os.getpid() != parent_pid):
67749 + # Call os._exit() from a finally block in order
67750 + # to suppress any finally blocks from earlier
67751 + # in the call stack (see bug #345289). This
67752 + # finally block has to be setup before the fork
67753 + # in order to avoid a race condition.
67754 + os._exit(1)
67755 +
67756 + def _run(self):
67757 + raise NotImplementedError(self)
67758
67759 diff --git a/pym/portage/util/_async/PipeLogger.py b/pym/portage/util/_async/PipeLogger.py
67760 new file mode 100644
67761 index 0000000..aa605d9
67762 --- /dev/null
67763 +++ b/pym/portage/util/_async/PipeLogger.py
67764 @@ -0,0 +1,163 @@
67765 +# Copyright 2008-2013 Gentoo Foundation
67766 +# Distributed under the terms of the GNU General Public License v2
67767 +
67768 +import fcntl
67769 +import errno
67770 +import gzip
67771 +import sys
67772 +
67773 +import portage
67774 +from portage import os, _encodings, _unicode_encode
67775 +from _emerge.AbstractPollTask import AbstractPollTask
67776 +
67777 +class PipeLogger(AbstractPollTask):
67778 +
67779 + """
67780 + This can be used for logging output of a child process,
67781 + optionally outputing to log_file_path and/or stdout_fd. It can
67782 + also monitor for EOF on input_fd, which may be used to detect
67783 + termination of a child process. If log_file_path ends with
67784 + '.gz' then the log file is written with compression.
67785 + """
67786 +
67787 + __slots__ = ("input_fd", "log_file_path", "stdout_fd") + \
67788 + ("_log_file", "_log_file_real", "_reg_id")
67789 +
67790 + def _start(self):
67791 +
67792 + log_file_path = self.log_file_path
67793 + if log_file_path is not None:
67794 +
67795 + self._log_file = open(_unicode_encode(log_file_path,
67796 + encoding=_encodings['fs'], errors='strict'), mode='ab')
67797 + if log_file_path.endswith('.gz'):
67798 + self._log_file_real = self._log_file
67799 + self._log_file = gzip.GzipFile(filename='', mode='ab',
67800 + fileobj=self._log_file)
67801 +
67802 + portage.util.apply_secpass_permissions(log_file_path,
67803 + uid=portage.portage_uid, gid=portage.portage_gid,
67804 + mode=0o660)
67805 +
67806 + if isinstance(self.input_fd, int):
67807 + fd = self.input_fd
67808 + else:
67809 + fd = self.input_fd.fileno()
67810 +
67811 + fcntl.fcntl(fd, fcntl.F_SETFL,
67812 + fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
67813 +
67814 + # FD_CLOEXEC is enabled by default in Python >=3.4.
67815 + if sys.hexversion < 0x3040000:
67816 + try:
67817 + fcntl.FD_CLOEXEC
67818 + except AttributeError:
67819 + pass
67820 + else:
67821 + fcntl.fcntl(fd, fcntl.F_SETFD,
67822 + fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
67823 +
67824 + self._reg_id = self.scheduler.io_add_watch(fd,
67825 + self._registered_events, self._output_handler)
67826 + self._registered = True
67827 +
67828 + def _cancel(self):
67829 + self._unregister()
67830 + if self.returncode is None:
67831 + self.returncode = self._cancelled_returncode
67832 +
67833 + def _wait(self):
67834 + if self.returncode is not None:
67835 + return self.returncode
67836 + self._wait_loop()
67837 + self.returncode = os.EX_OK
67838 + return self.returncode
67839 +
67840 + def _output_handler(self, fd, event):
67841 +
67842 + background = self.background
67843 + stdout_fd = self.stdout_fd
67844 + log_file = self._log_file
67845 +
67846 + while True:
67847 + buf = self._read_buf(fd, event)
67848 +
67849 + if buf is None:
67850 + # not a POLLIN event, EAGAIN, etc...
67851 + break
67852 +
67853 + if not buf:
67854 + # EOF
67855 + self._unregister()
67856 + self.wait()
67857 + break
67858 +
67859 + else:
67860 + if not background and stdout_fd is not None:
67861 + failures = 0
67862 + stdout_buf = buf
67863 + while stdout_buf:
67864 + try:
67865 + stdout_buf = \
67866 + stdout_buf[os.write(stdout_fd, stdout_buf):]
67867 + except OSError as e:
67868 + if e.errno != errno.EAGAIN:
67869 + raise
67870 + del e
67871 + failures += 1
67872 + if failures > 50:
67873 + # Avoid a potentially infinite loop. In
67874 + # most cases, the failure count is zero
67875 + # and it's unlikely to exceed 1.
67876 + raise
67877 +
67878 + # This means that a subprocess has put an inherited
67879 + # stdio file descriptor (typically stdin) into
67880 + # O_NONBLOCK mode. This is not acceptable (see bug
67881 + # #264435), so revert it. We need to use a loop
67882 + # here since there's a race condition due to
67883 + # parallel processes being able to change the
67884 + # flags on the inherited file descriptor.
67885 + # TODO: When possible, avoid having child processes
67886 + # inherit stdio file descriptors from portage
67887 + # (maybe it can't be avoided with
67888 + # PROPERTIES=interactive).
67889 + fcntl.fcntl(stdout_fd, fcntl.F_SETFL,
67890 + fcntl.fcntl(stdout_fd,
67891 + fcntl.F_GETFL) ^ os.O_NONBLOCK)
67892 +
67893 + if log_file is not None:
67894 + log_file.write(buf)
67895 + log_file.flush()
67896 +
67897 + self._unregister_if_appropriate(event)
67898 +
67899 + return True
67900 +
67901 + def _unregister(self):
67902 +
67903 + if self._reg_id is not None:
67904 + self.scheduler.source_remove(self._reg_id)
67905 + self._reg_id = None
67906 +
67907 + if self.input_fd is not None:
67908 + if isinstance(self.input_fd, int):
67909 + os.close(self.input_fd)
67910 + else:
67911 + self.input_fd.close()
67912 + self.input_fd = None
67913 +
67914 + if self.stdout_fd is not None:
67915 + os.close(self.stdout_fd)
67916 + self.stdout_fd = None
67917 +
67918 + if self._log_file is not None:
67919 + self._log_file.close()
67920 + self._log_file = None
67921 +
67922 + if self._log_file_real is not None:
67923 + # Avoid "ResourceWarning: unclosed file" since python 3.2.
67924 + self._log_file_real.close()
67925 + self._log_file_real = None
67926 +
67927 + self._registered = False
67928
67929 diff --git a/pym/portage/util/_async/PipeReaderBlockingIO.py b/pym/portage/util/_async/PipeReaderBlockingIO.py
67930 new file mode 100644
67931 index 0000000..b06adf6
67932 --- /dev/null
67933 +++ b/pym/portage/util/_async/PipeReaderBlockingIO.py
67934 @@ -0,0 +1,91 @@
67935 +# Copyright 2012 Gentoo Foundation
67936 +# Distributed under the terms of the GNU General Public License v2
67937 +
67938 +try:
67939 + import threading
67940 +except ImportError:
67941 + # dummy_threading will not suffice
67942 + threading = None
67943 +
67944 +from portage import os
67945 +from _emerge.AbstractPollTask import AbstractPollTask
67946 +
67947 +class PipeReaderBlockingIO(AbstractPollTask):
67948 + """
67949 + Reads output from one or more files and saves it in memory, for
67950 + retrieval via the getvalue() method. This is driven by a thread
67951 + for each input file, in order to support blocking IO. This may
67952 + be useful for using threads to handle blocking IO with Jython,
67953 + since Jython lacks the fcntl module which is needed for
67954 + non-blocking IO (see http://bugs.jython.org/issue1074).
67955 + """
67956 +
67957 + __slots__ = ("input_files", "_read_data", "_terminate",
67958 + "_threads", "_thread_rlock")
67959 +
67960 + def _start(self):
67961 + self._terminate = threading.Event()
67962 + self._threads = {}
67963 + self._read_data = []
67964 +
67965 + self._registered = True
67966 + self._thread_rlock = threading.RLock()
67967 + with self._thread_rlock:
67968 + for f in self.input_files.values():
67969 + t = threading.Thread(target=self._reader_thread, args=(f,))
67970 + t.daemon = True
67971 + t.start()
67972 + self._threads[f] = t
67973 +
67974 + def _reader_thread(self, f):
67975 + try:
67976 + terminated = self._terminate.is_set
67977 + except AttributeError:
67978 + # Jython 2.7.0a2
67979 + terminated = self._terminate.isSet
67980 + bufsize = self._bufsize
67981 + while not terminated():
67982 + buf = f.read(bufsize)
67983 + with self._thread_rlock:
67984 + if terminated():
67985 + break
67986 + elif buf:
67987 + self._read_data.append(buf)
67988 + else:
67989 + del self._threads[f]
67990 + if not self._threads:
67991 + # Thread-safe callback to EventLoop
67992 + self.scheduler.idle_add(self._eof)
67993 + break
67994 + f.close()
67995 +
67996 + def _eof(self):
67997 + self._registered = False
67998 + if self.returncode is None:
67999 + self.returncode = os.EX_OK
68000 + self.wait()
68001 + return False
68002 +
68003 + def _cancel(self):
68004 + self._terminate.set()
68005 + self._registered = False
68006 + if self.returncode is None:
68007 + self.returncode = self._cancelled_returncode
68008 + self.wait()
68009 +
68010 + def _wait(self):
68011 + if self.returncode is not None:
68012 + return self.returncode
68013 + self._wait_loop()
68014 + self.returncode = os.EX_OK
68015 + return self.returncode
68016 +
68017 + def getvalue(self):
68018 + """Retrieve the entire contents"""
68019 + with self._thread_rlock:
68020 + return b''.join(self._read_data)
68021 +
68022 + def close(self):
68023 + """Free the memory buffer."""
68024 + with self._thread_rlock:
68025 + self._read_data = None
68026
68027 diff --git a/pym/portage/util/_async/PopenProcess.py b/pym/portage/util/_async/PopenProcess.py
68028 new file mode 100644
68029 index 0000000..2fc56d2
68030 --- /dev/null
68031 +++ b/pym/portage/util/_async/PopenProcess.py
68032 @@ -0,0 +1,33 @@
68033 +# Copyright 2012 Gentoo Foundation
68034 +# Distributed under the terms of the GNU General Public License v2
68035 +
68036 +from _emerge.SubProcess import SubProcess
68037 +
68038 +class PopenProcess(SubProcess):
68039 +
68040 + __slots__ = ("pipe_reader", "proc",)
68041 +
68042 + def _start(self):
68043 +
68044 + self.pid = self.proc.pid
68045 + self._registered = True
68046 +
68047 + if self.pipe_reader is None:
68048 + self._reg_id = self.scheduler.child_watch_add(
68049 + self.pid, self._child_watch_cb)
68050 + else:
68051 + try:
68052 + self.pipe_reader.scheduler = self.scheduler
68053 + except AttributeError:
68054 + pass
68055 + self.pipe_reader.addExitListener(self._pipe_reader_exit)
68056 + self.pipe_reader.start()
68057 +
68058 + def _pipe_reader_exit(self, pipe_reader):
68059 + self._reg_id = self.scheduler.child_watch_add(
68060 + self.pid, self._child_watch_cb)
68061 +
68062 + def _child_watch_cb(self, pid, condition, user_data=None):
68063 + self._reg_id = None
68064 + self._waitpid_cb(pid, condition)
68065 + self.wait()
68066
68067 diff --git a/pym/portage/util/_async/SchedulerInterface.py b/pym/portage/util/_async/SchedulerInterface.py
68068 new file mode 100644
68069 index 0000000..2ab668e
68070 --- /dev/null
68071 +++ b/pym/portage/util/_async/SchedulerInterface.py
68072 @@ -0,0 +1,79 @@
68073 +# Copyright 2012-2013 Gentoo Foundation
68074 +# Distributed under the terms of the GNU General Public License v2
68075 +
68076 +import gzip
68077 +import errno
68078 +
68079 +from portage import _encodings
68080 +from portage import _unicode_encode
68081 +from portage.util import writemsg_level
68082 +from ..SlotObject import SlotObject
68083 +
68084 +class SchedulerInterface(SlotObject):
68085 +
68086 + _event_loop_attrs = ("IO_ERR", "IO_HUP", "IO_IN",
68087 + "IO_NVAL", "IO_OUT", "IO_PRI",
68088 + "child_watch_add", "idle_add", "io_add_watch",
68089 + "iteration", "source_remove", "timeout_add")
68090 +
68091 + __slots__ = _event_loop_attrs + ("_event_loop", "_is_background")
68092 +
68093 + def __init__(self, event_loop, is_background=None, **kwargs):
68094 + SlotObject.__init__(self, **kwargs)
68095 + self._event_loop = event_loop
68096 + if is_background is None:
68097 + is_background = self._return_false
68098 + self._is_background = is_background
68099 + for k in self._event_loop_attrs:
68100 + setattr(self, k, getattr(event_loop, k))
68101 +
68102 + @staticmethod
68103 + def _return_false():
68104 + return False
68105 +
68106 + def output(self, msg, log_path=None, background=None,
68107 + level=0, noiselevel=-1):
68108 + """
68109 + Output msg to stdout if not self._is_background(). If log_path
68110 + is not None then append msg to the log (appends with
68111 + compression if the filename extension of log_path corresponds
68112 + to a supported compression type).
68113 + """
68114 +
68115 + global_background = self._is_background()
68116 + if background is None or global_background:
68117 + # Use the global value if the task does not have a local
68118 + # background value. For example, parallel-fetch tasks run
68119 + # in the background while other tasks concurrently run in
68120 + # the foreground.
68121 + background = global_background
68122 +
68123 + msg_shown = False
68124 + if not background:
68125 + writemsg_level(msg, level=level, noiselevel=noiselevel)
68126 + msg_shown = True
68127 +
68128 + if log_path is not None:
68129 + try:
68130 + f = open(_unicode_encode(log_path,
68131 + encoding=_encodings['fs'], errors='strict'),
68132 + mode='ab')
68133 + f_real = f
68134 + except IOError as e:
68135 + if e.errno not in (errno.ENOENT, errno.ESTALE):
68136 + raise
68137 + if not msg_shown:
68138 + writemsg_level(msg, level=level, noiselevel=noiselevel)
68139 + else:
68140 +
68141 + if log_path.endswith('.gz'):
68142 + # NOTE: The empty filename argument prevents us from
68143 + # triggering a bug in python3 which causes GzipFile
68144 + # to raise AttributeError if fileobj.name is bytes
68145 + # instead of unicode.
68146 + f = gzip.GzipFile(filename='', mode='ab', fileobj=f)
68147 +
68148 + f.write(_unicode_encode(msg))
68149 + f.close()
68150 + if f_real is not f:
68151 + f_real.close()
68152
68153 diff --git a/pym/portage/util/_async/TaskScheduler.py b/pym/portage/util/_async/TaskScheduler.py
68154 new file mode 100644
68155 index 0000000..35b3875
68156 --- /dev/null
68157 +++ b/pym/portage/util/_async/TaskScheduler.py
68158 @@ -0,0 +1,20 @@
68159 +# Copyright 2012 Gentoo Foundation
68160 +# Distributed under the terms of the GNU General Public License v2
68161 +
68162 +from .AsyncScheduler import AsyncScheduler
68163 +
68164 +class TaskScheduler(AsyncScheduler):
68165 +
68166 + """
68167 + A simple way to handle scheduling of AbstractPollTask instances. Simply
68168 + pass a task iterator into the constructor and call start(). Use the
68169 + poll, wait, or addExitListener methods to be notified when all of the
68170 + tasks have completed.
68171 + """
68172 +
68173 + def __init__(self, task_iter, **kwargs):
68174 + AsyncScheduler.__init__(self, **kwargs)
68175 + self._task_iter = task_iter
68176 +
68177 + def _next_task(self):
68178 + return next(self._task_iter)
68179
68180 diff --git a/pym/portage/util/_async/__init__.py b/pym/portage/util/_async/__init__.py
68181 new file mode 100644
68182 index 0000000..418ad86
68183 --- /dev/null
68184 +++ b/pym/portage/util/_async/__init__.py
68185 @@ -0,0 +1,2 @@
68186 +# Copyright 2012 Gentoo Foundation
68187 +# Distributed under the terms of the GNU General Public License v2
68188
68189 diff --git a/pym/portage/util/_async/run_main_scheduler.py b/pym/portage/util/_async/run_main_scheduler.py
68190 new file mode 100644
68191 index 0000000..10fed34
68192 --- /dev/null
68193 +++ b/pym/portage/util/_async/run_main_scheduler.py
68194 @@ -0,0 +1,41 @@
68195 +
68196 +import signal
68197 +
68198 +def run_main_scheduler(scheduler):
68199 + """
68200 + Start and run an AsyncScheduler (or compatible object), and handle
68201 + SIGINT or SIGTERM by calling its terminate() method and waiting
68202 + for it to clean up after itself. If SIGINT or SIGTERM is received,
68203 + return signum, else return None. Any previous SIGINT or SIGTERM
68204 + signal handlers are automatically saved and restored before
68205 + returning.
68206 + """
68207 +
68208 + received_signal = []
68209 +
68210 + def sighandler(signum, frame):
68211 + signal.signal(signal.SIGINT, signal.SIG_IGN)
68212 + signal.signal(signal.SIGTERM, signal.SIG_IGN)
68213 + received_signal.append(signum)
68214 + scheduler.terminate()
68215 +
68216 + earlier_sigint_handler = signal.signal(signal.SIGINT, sighandler)
68217 + earlier_sigterm_handler = signal.signal(signal.SIGTERM, sighandler)
68218 +
68219 + try:
68220 + scheduler.start()
68221 + scheduler.wait()
68222 + finally:
68223 + # Restore previous handlers
68224 + if earlier_sigint_handler is not None:
68225 + signal.signal(signal.SIGINT, earlier_sigint_handler)
68226 + else:
68227 + signal.signal(signal.SIGINT, signal.SIG_DFL)
68228 + if earlier_sigterm_handler is not None:
68229 + signal.signal(signal.SIGTERM, earlier_sigterm_handler)
68230 + else:
68231 + signal.signal(signal.SIGTERM, signal.SIG_DFL)
68232 +
68233 + if received_signal:
68234 + return received_signal[0]
68235 + return None
68236
68237 diff --git a/pym/portage/util/_ctypes.py b/pym/portage/util/_ctypes.py
68238 new file mode 100644
68239 index 0000000..aeceebc
68240 --- /dev/null
68241 +++ b/pym/portage/util/_ctypes.py
68242 @@ -0,0 +1,47 @@
68243 +# Copyright 2012 Gentoo Foundation
68244 +# Distributed under the terms of the GNU General Public License v2
68245 +
68246 +try:
68247 + import ctypes
68248 + import ctypes.util
68249 +except ImportError:
68250 + ctypes = None
68251 +else:
68252 + try:
68253 + ctypes.cdll
68254 + except AttributeError:
68255 + ctypes = None
68256 +
68257 +_library_names = {}
68258 +
68259 +def find_library(name):
68260 + """
68261 + Calls ctype.util.find_library() if the ctypes module is available,
68262 + and otherwise returns None. Results are cached for future invocations.
68263 + """
68264 + filename = _library_names.get(name)
68265 + if filename is None:
68266 + if ctypes is not None:
68267 + filename = ctypes.util.find_library(name)
68268 + if filename is None:
68269 + filename = False
68270 + _library_names[name] = filename
68271 +
68272 + if filename is False:
68273 + return None
68274 + return filename
68275 +
68276 +_library_handles = {}
68277 +
68278 +def LoadLibrary(name):
68279 + """
68280 + Calls ctypes.cdll.LoadLibrary(name) if the ctypes module is available,
68281 + and otherwise returns None. Results are cached for future invocations.
68282 + """
68283 + handle = _library_handles.get(name)
68284 +
68285 + if handle is None and ctypes is not None:
68286 + handle = ctypes.CDLL(name, use_errno=True)
68287 + _library_handles[name] = handle
68288 +
68289 + return handle
68290
68291 diff --git a/pym/portage/util/_desktop_entry.py b/pym/portage/util/_desktop_entry.py
68292 index 7901780..0b49547 100644
68293 --- a/pym/portage/util/_desktop_entry.py
68294 +++ b/pym/portage/util/_desktop_entry.py
68295 @@ -1,7 +1,8 @@
68296 -# Copyright 2012 Gentoo Foundation
68297 +# Copyright 2012-2013 Gentoo Foundation
68298 # Distributed under the terms of the GNU General Public License v2
68299
68300 import io
68301 +import re
68302 import subprocess
68303 import sys
68304
68305 @@ -10,7 +11,9 @@ try:
68306 except ImportError:
68307 from ConfigParser import Error as ConfigParserError, RawConfigParser
68308
68309 +import portage
68310 from portage import _encodings, _unicode_encode, _unicode_decode
68311 +from portage.util import writemsg
68312
68313 def parse_desktop_entry(path):
68314 """
68315 @@ -31,45 +34,71 @@ def parse_desktop_entry(path):
68316 encoding=_encodings['fs'], errors='strict'),
68317 mode='r', encoding=_encodings['repo.content'],
68318 errors='replace') as f:
68319 - read_file(f)
68320 + content = f.read()
68321 +
68322 + # In Python 3.2, read_file does not support bytes in file names
68323 + # (see bug #429544), so use StringIO to hide the file name.
68324 + read_file(io.StringIO(content))
68325
68326 return parser
68327
68328 -_ignored_service_errors = (
68329 - 'error: required key "Name" in group "Desktop Entry" is not present',
68330 - 'error: key "Actions" is present in group "Desktop Entry", but the type is "Service" while this key is only valid for type "Application"',
68331 - 'error: key "MimeType" is present in group "Desktop Entry", but the type is "Service" while this key is only valid for type "Application"',
68332 +_trivial_warnings = re.compile(r' looks redundant with value ')
68333 +
68334 +_ignored_errors = (
68335 + # Ignore error for emacs.desktop:
68336 + # https://bugs.freedesktop.org/show_bug.cgi?id=35844#c6
68337 + 'error: (will be fatal in the future): value "TextEditor" in key "Categories" in group "Desktop Entry" requires another category to be present among the following categories: Utility',
68338 + 'warning: key "Encoding" in group "Desktop Entry" is deprecated'
68339 +)
68340 +
68341 +_ShowIn_exemptions = (
68342 + # See bug #480586.
68343 + 'contains an unregistered value "Pantheon"',
68344 )
68345
68346 def validate_desktop_entry(path):
68347 args = ["desktop-file-validate", path]
68348 - if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
68349 - # Python 3.1 does not support bytes in Popen args.
68350 - args = [_unicode_encode(x, errors='strict') for x in args]
68351 +
68352 + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000:
68353 + # Python 3.1 _execvp throws TypeError for non-absolute executable
68354 + # path passed as bytes (see http://bugs.python.org/issue8513).
68355 + fullname = portage.process.find_binary(args[0])
68356 + if fullname is None:
68357 + raise portage.exception.CommandNotFound(args[0])
68358 + args[0] = fullname
68359 +
68360 + args = [_unicode_encode(x, errors='strict') for x in args]
68361 proc = subprocess.Popen(args,
68362 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
68363 output_lines = _unicode_decode(proc.communicate()[0]).splitlines()
68364 proc.wait()
68365
68366 if output_lines:
68367 - try:
68368 - desktop_entry = parse_desktop_entry(path)
68369 - except ConfigParserError:
68370 - pass
68371 - else:
68372 - if desktop_entry.has_section("Desktop Entry"):
68373 - try:
68374 - entry_type = desktop_entry.get("Desktop Entry", "Type")
68375 - except ConfigParserError:
68376 - pass
68377 - else:
68378 - if entry_type == "Service":
68379 - # Filter false errors for Type=Service (bug #414125).
68380 - filtered_output = []
68381 - for line in output_lines:
68382 - if line[len(path)+2:] in _ignored_service_errors:
68383 - continue
68384 - filtered_output.append(line)
68385 - output_lines = filtered_output
68386 + filtered_output = []
68387 + for line in output_lines:
68388 + msg = line[len(path)+2:]
68389 + # "hint:" output is new in desktop-file-utils-0.21
68390 + if msg.startswith('hint: ') or msg in _ignored_errors:
68391 + continue
68392 + if 'for key "NotShowIn" in group "Desktop Entry"' in msg or \
68393 + 'for key "OnlyShowIn" in group "Desktop Entry"' in msg:
68394 + exempt = False
68395 + for s in _ShowIn_exemptions:
68396 + if s in msg:
68397 + exempt = True
68398 + break
68399 + if exempt:
68400 + continue
68401 + filtered_output.append(line)
68402 + output_lines = filtered_output
68403 +
68404 + if output_lines:
68405 + output_lines = [line for line in output_lines
68406 + if _trivial_warnings.search(line) is None]
68407
68408 return output_lines
68409 +
68410 +if __name__ == "__main__":
68411 + for arg in sys.argv[1:]:
68412 + for line in validate_desktop_entry(arg):
68413 + writemsg(line + "\n", noiselevel=-1)
68414
68415 diff --git a/pym/portage/util/_dyn_libs/LinkageMapELF.py b/pym/portage/util/_dyn_libs/LinkageMapELF.py
68416 index e71ac73..3920f94 100644
68417 --- a/pym/portage/util/_dyn_libs/LinkageMapELF.py
68418 +++ b/pym/portage/util/_dyn_libs/LinkageMapELF.py
68419 @@ -1,4 +1,4 @@
68420 -# Copyright 1998-2011 Gentoo Foundation
68421 +# Copyright 1998-2013 Gentoo Foundation
68422 # Distributed under the terms of the GNU General Public License v2
68423
68424 import errno
68425 @@ -26,7 +26,7 @@ class LinkageMapELF(object):
68426 _soname_map_class = slot_dict_class(
68427 ("consumers", "providers"), prefix="")
68428
68429 - class _obj_properies_class(object):
68430 + class _obj_properties_class(object):
68431
68432 __slots__ = ("arch", "needed", "runpaths", "soname", "alt_paths",
68433 "owner",)
68434 @@ -316,7 +316,7 @@ class LinkageMapELF(object):
68435 myprops = obj_properties.get(obj_key)
68436 if myprops is None:
68437 indexed = False
68438 - myprops = self._obj_properies_class(
68439 + myprops = self._obj_properties_class(
68440 arch, needed, path, soname, [], owner)
68441 obj_properties[obj_key] = myprops
68442 # All object paths are added into the obj_properties tuple.
68443 @@ -678,7 +678,7 @@ class LinkageMapELF(object):
68444 rValue[soname].add(provider)
68445 return rValue
68446
68447 - def findConsumers(self, obj, exclude_providers=None):
68448 + def findConsumers(self, obj, exclude_providers=None, greedy=True):
68449 """
68450 Find consumers of an object or object key.
68451
68452 @@ -715,6 +715,9 @@ class LinkageMapELF(object):
68453 '/usr/lib/libssl.so.0.9.8'), and return True if the library is
68454 owned by a provider which is planned for removal.
68455 @type exclude_providers: collection
68456 + @param greedy: If True, then include consumers that are satisfied
68457 + by alternative providers, otherwise omit them. Default is True.
68458 + @type greedy: Boolean
68459 @rtype: set of strings (example: set(['/bin/foo', '/usr/bin/bar']))
68460 @return: The return value is a soname -> set-of-library-paths, where
68461 set-of-library-paths satisfy soname.
68462 @@ -769,16 +772,19 @@ class LinkageMapELF(object):
68463 defpath_keys = set(self._path_key(x) for x in self._defpath)
68464 satisfied_consumer_keys = set()
68465 if soname_node is not None:
68466 - if exclude_providers is not None:
68467 + if exclude_providers is not None or not greedy:
68468 relevant_dir_keys = set()
68469 for provider_key in soname_node.providers:
68470 + if not greedy and provider_key == obj_key:
68471 + continue
68472 provider_objs = self._obj_properties[provider_key].alt_paths
68473 for p in provider_objs:
68474 provider_excluded = False
68475 - for excluded_provider_isowner in exclude_providers:
68476 - if excluded_provider_isowner(p):
68477 - provider_excluded = True
68478 - break
68479 + if exclude_providers is not None:
68480 + for excluded_provider_isowner in exclude_providers:
68481 + if excluded_provider_isowner(p):
68482 + provider_excluded = True
68483 + break
68484 if not provider_excluded:
68485 # This provider is not excluded. It will
68486 # satisfy a consumer of this soname if it
68487
68488 diff --git a/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py b/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
68489 index 4bc64db..a422ffe 100644
68490 --- a/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
68491 +++ b/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
68492 @@ -1,4 +1,4 @@
68493 -# Copyright 1998-2012 Gentoo Foundation
68494 +# Copyright 1998-2014 Gentoo Foundation
68495 # Distributed under the terms of the GNU General Public License v2
68496
68497 import errno
68498 @@ -25,6 +25,7 @@ from portage.versions import cpv_getkey
68499 from portage.locks import lockfile, unlockfile
68500
68501 if sys.hexversion >= 0x3000000:
68502 + # pylint: disable=W0622
68503 basestring = str
68504
68505 class PreservedLibsRegistry(object):
68506
68507 diff --git a/pym/portage/util/_dyn_libs/display_preserved_libs.py b/pym/portage/util/_dyn_libs/display_preserved_libs.py
68508 new file mode 100644
68509 index 0000000..b16478d
68510 --- /dev/null
68511 +++ b/pym/portage/util/_dyn_libs/display_preserved_libs.py
68512 @@ -0,0 +1,98 @@
68513 +# Copyright 2007-2013 Gentoo Foundation
68514 +# Distributed under the terms of the GNU General Public License v2
68515 +
68516 +from __future__ import print_function
68517 +
68518 +import logging
68519 +
68520 +import portage
68521 +from portage.output import colorize
68522 +
68523 +def display_preserved_libs(vardb):
68524 +
68525 + MAX_DISPLAY = 3
68526 +
68527 + plibdata = vardb._plib_registry.getPreservedLibs()
68528 + linkmap = vardb._linkmap
68529 + consumer_map = {}
68530 + owners = {}
68531 +
68532 + try:
68533 + linkmap.rebuild()
68534 + except portage.exception.CommandNotFound as e:
68535 + portage.util.writemsg_level("!!! Command Not Found: %s\n" % (e,),
68536 + level=logging.ERROR, noiselevel=-1)
68537 + else:
68538 + search_for_owners = set()
68539 + for cpv in plibdata:
68540 + internal_plib_keys = set(linkmap._obj_key(f) \
68541 + for f in plibdata[cpv])
68542 + for f in plibdata[cpv]:
68543 + if f in consumer_map:
68544 + continue
68545 + consumers = []
68546 + for c in linkmap.findConsumers(f, greedy=False):
68547 + # Filter out any consumers that are also preserved libs
68548 + # belonging to the same package as the provider.
68549 + if linkmap._obj_key(c) not in internal_plib_keys:
68550 + consumers.append(c)
68551 + consumers.sort()
68552 + consumer_map[f] = consumers
68553 + search_for_owners.update(consumers[:MAX_DISPLAY+1])
68554 +
68555 + owners = {}
68556 + for f in search_for_owners:
68557 + owner_set = set()
68558 + for owner in linkmap.getOwners(f):
68559 + owner_dblink = vardb._dblink(owner)
68560 + if owner_dblink.exists():
68561 + owner_set.add(owner_dblink)
68562 + if owner_set:
68563 + owners[f] = owner_set
68564 +
68565 + all_preserved = set()
68566 + all_preserved.update(*plibdata.values())
68567 +
68568 + for cpv in plibdata:
68569 + print(colorize("WARN", ">>>") + " package: %s" % cpv)
68570 + samefile_map = {}
68571 + for f in plibdata[cpv]:
68572 + obj_key = linkmap._obj_key(f)
68573 + alt_paths = samefile_map.get(obj_key)
68574 + if alt_paths is None:
68575 + alt_paths = set()
68576 + samefile_map[obj_key] = alt_paths
68577 + alt_paths.add(f)
68578 +
68579 + for alt_paths in samefile_map.values():
68580 + alt_paths = sorted(alt_paths)
68581 + for p in alt_paths:
68582 + print(colorize("WARN", " * ") + " - %s" % (p,))
68583 + f = alt_paths[0]
68584 + consumers = consumer_map.get(f, [])
68585 + consumers_non_preserved = [c for c in consumers
68586 + if c not in all_preserved]
68587 + if consumers_non_preserved:
68588 + # Filter the consumers that are preserved libraries, since
68589 + # they don't need to be rebuilt (see bug #461908).
68590 + consumers = consumers_non_preserved
68591 +
68592 + if len(consumers) == MAX_DISPLAY + 1:
68593 + # Display 1 extra consumer, instead of displaying
68594 + # "used by 1 other files".
68595 + max_display = MAX_DISPLAY + 1
68596 + else:
68597 + max_display = MAX_DISPLAY
68598 + for c in consumers[:max_display]:
68599 + if c in all_preserved:
68600 + # The owner is displayed elsewhere due to having
68601 + # its libs preserved, so distinguish this special
68602 + # case (see bug #461908).
68603 + owners_desc = "preserved"
68604 + else:
68605 + owners_desc = ", ".join(x.mycpv for x in owners.get(c, []))
68606 + print(colorize("WARN", " * ") + " used by %s (%s)" % \
68607 + (c, owners_desc))
68608 + if len(consumers) > max_display:
68609 + print(colorize("WARN", " * ") + " used by %d other files" %
68610 + (len(consumers) - max_display))
68611
68612 diff --git a/pym/portage/util/_eventloop/EventLoop.py b/pym/portage/util/_eventloop/EventLoop.py
68613 index bbbce52..9ffcc74 100644
68614 --- a/pym/portage/util/_eventloop/EventLoop.py
68615 +++ b/pym/portage/util/_eventloop/EventLoop.py
68616 @@ -1,20 +1,37 @@
68617 -# Copyright 1999-2012 Gentoo Foundation
68618 +# Copyright 1999-2013 Gentoo Foundation
68619 # Distributed under the terms of the GNU General Public License v2
68620
68621 import errno
68622 -import fcntl
68623 import logging
68624 import os
68625 import select
68626 import signal
68627 +import sys
68628 import time
68629
68630 +try:
68631 + import fcntl
68632 +except ImportError:
68633 + # http://bugs.jython.org/issue1074
68634 + fcntl = None
68635 +
68636 +try:
68637 + import threading
68638 +except ImportError:
68639 + import dummy_threading as threading
68640 +
68641 from portage.util import writemsg_level
68642 from ..SlotObject import SlotObject
68643 from .PollConstants import PollConstants
68644 from .PollSelectAdapter import PollSelectAdapter
68645
68646 class EventLoop(object):
68647 + """
68648 + An event loop, intended to be compatible with the GLib event loop.
68649 + Call the iteration method in order to execute one iteration of the
68650 + loop. The idle_add and timeout_add methods serve as thread-safe
68651 + means to interact with the loop's thread.
68652 + """
68653
68654 supports_multiprocessing = True
68655
68656 @@ -43,7 +60,9 @@ class EventLoop(object):
68657 that global_event_loop does not need constructor arguments)
68658 @type main: bool
68659 """
68660 - self._use_signal = main
68661 + self._use_signal = main and fcntl is not None
68662 + self._thread_rlock = threading.RLock()
68663 + self._thread_condition = threading.Condition(self._thread_rlock)
68664 self._poll_event_queue = []
68665 self._poll_event_handlers = {}
68666 self._poll_event_handler_ids = {}
68667 @@ -52,14 +71,48 @@ class EventLoop(object):
68668 self._idle_callbacks = {}
68669 self._timeout_handlers = {}
68670 self._timeout_interval = None
68671 - self._poll_obj = create_poll_instance()
68672
68673 - self.IO_ERR = PollConstants.POLLERR
68674 - self.IO_HUP = PollConstants.POLLHUP
68675 - self.IO_IN = PollConstants.POLLIN
68676 - self.IO_NVAL = PollConstants.POLLNVAL
68677 - self.IO_OUT = PollConstants.POLLOUT
68678 - self.IO_PRI = PollConstants.POLLPRI
68679 + self._poll_obj = None
68680 + try:
68681 + select.epoll
68682 + except AttributeError:
68683 + pass
68684 + else:
68685 + try:
68686 + epoll_obj = select.epoll()
68687 + except IOError:
68688 + # This happens with Linux 2.4 kernels:
68689 + # IOError: [Errno 38] Function not implemented
68690 + pass
68691 + else:
68692 +
68693 + # FD_CLOEXEC is enabled by default in Python >=3.4.
68694 + if sys.hexversion < 0x3040000 and fcntl is not None:
68695 + try:
68696 + fcntl.FD_CLOEXEC
68697 + except AttributeError:
68698 + pass
68699 + else:
68700 + fcntl.fcntl(epoll_obj.fileno(), fcntl.F_SETFD,
68701 + fcntl.fcntl(epoll_obj.fileno(),
68702 + fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
68703 +
68704 + self._poll_obj = _epoll_adapter(epoll_obj)
68705 + self.IO_ERR = select.EPOLLERR
68706 + self.IO_HUP = select.EPOLLHUP
68707 + self.IO_IN = select.EPOLLIN
68708 + self.IO_NVAL = 0
68709 + self.IO_OUT = select.EPOLLOUT
68710 + self.IO_PRI = select.EPOLLPRI
68711 +
68712 + if self._poll_obj is None:
68713 + self._poll_obj = create_poll_instance()
68714 + self.IO_ERR = PollConstants.POLLERR
68715 + self.IO_HUP = PollConstants.POLLHUP
68716 + self.IO_IN = PollConstants.POLLIN
68717 + self.IO_NVAL = PollConstants.POLLNVAL
68718 + self.IO_OUT = PollConstants.POLLOUT
68719 + self.IO_PRI = PollConstants.POLLPRI
68720
68721 self._child_handlers = {}
68722 self._sigchld_read = None
68723 @@ -67,6 +120,14 @@ class EventLoop(object):
68724 self._sigchld_src_id = None
68725 self._pid = os.getpid()
68726
68727 + def _new_source_id(self):
68728 + """
68729 + Generate a new source id. This method is thread-safe.
68730 + """
68731 + with self._thread_rlock:
68732 + self._event_handler_id += 1
68733 + return self._event_handler_id
68734 +
68735 def _poll(self, timeout=None):
68736 """
68737 All poll() calls pass through here. The poll events
68738 @@ -85,9 +146,11 @@ class EventLoop(object):
68739 try:
68740 self._poll_event_queue.extend(self._poll_obj.poll(timeout))
68741 break
68742 - except select.error as e:
68743 + except (IOError, select.error) as e:
68744 # Silently handle EINTR, which is normal when we have
68745 - # received a signal such as SIGINT.
68746 + # received a signal such as SIGINT (epoll objects may
68747 + # raise IOError rather than select.error, at least in
68748 + # Python 3.2).
68749 if not (e.args and e.args[0] == errno.EINTR):
68750 writemsg_level("\n!!! select error: %s\n" % (e,),
68751 level=logging.ERROR, noiselevel=-1)
68752 @@ -101,7 +164,19 @@ class EventLoop(object):
68753
68754 def iteration(self, *args):
68755 """
68756 - Like glib.MainContext.iteration(), runs a single iteration.
68757 + Like glib.MainContext.iteration(), runs a single iteration. In order
68758 + to avoid blocking forever when may_block is True (the default),
68759 + callers must be careful to ensure that at least one of the following
68760 + conditions is met:
68761 + 1) An event source or timeout is registered which is guaranteed
68762 + to trigger at least on event (a call to an idle function
68763 + only counts as an event if it returns a False value which
68764 + causes it to stop being called)
68765 + 2) Another thread is guaranteed to call one of the thread-safe
68766 + methods which notify iteration to stop waiting (such as
68767 + idle_add or timeout_add).
68768 + These rules ensure that iteration is able to block until an event
68769 + arrives, without doing any busy waiting that would waste CPU time.
68770 @type may_block: bool
68771 @param may_block: if True the call may block waiting for an event
68772 (default is True).
68773 @@ -120,23 +195,32 @@ class EventLoop(object):
68774 event_queue = self._poll_event_queue
68775 event_handlers = self._poll_event_handlers
68776 events_handled = 0
68777 + timeouts_checked = False
68778
68779 if not event_handlers:
68780 - if self._run_timeouts():
68781 - events_handled += 1
68782 - if not event_handlers:
68783 - if not events_handled and may_block and \
68784 - self._timeout_interval is not None:
68785 + with self._thread_condition:
68786 + if self._run_timeouts():
68787 + events_handled += 1
68788 + timeouts_checked = True
68789 + if not event_handlers and not events_handled and may_block:
68790 # Block so that we don't waste cpu time by looping too
68791 # quickly. This makes EventLoop useful for code that needs
68792 # to wait for timeout callbacks regardless of whether or
68793 # not any IO handlers are currently registered.
68794 - try:
68795 - self._poll(timeout=self._timeout_interval)
68796 - except StopIteration:
68797 - pass
68798 + timeout = self._get_poll_timeout()
68799 + if timeout is None:
68800 + wait_timeout = None
68801 + else:
68802 + wait_timeout = float(timeout) / 1000
68803 + # NOTE: In order to avoid a possible infinite wait when
68804 + # wait_timeout is None, the previous _run_timeouts()
68805 + # call must have returned False *with* _thread_condition
68806 + # acquired. Otherwise, we would risk going to sleep after
68807 + # our only notify event has already passed.
68808 + self._thread_condition.wait(wait_timeout)
68809 if self._run_timeouts():
68810 events_handled += 1
68811 + timeouts_checked = True
68812
68813 # If any timeouts have executed, then return immediately,
68814 # in order to minimize latency in termination of iteration
68815 @@ -147,14 +231,18 @@ class EventLoop(object):
68816 if not event_queue:
68817
68818 if may_block:
68819 - if self._child_handlers:
68820 - if self._timeout_interval is None:
68821 - timeout = self._sigchld_interval
68822 - else:
68823 - timeout = min(self._sigchld_interval,
68824 - self._timeout_interval)
68825 - else:
68826 - timeout = self._timeout_interval
68827 + timeout = self._get_poll_timeout()
68828 +
68829 + # Avoid blocking for IO if there are any timeout
68830 + # or idle callbacks available to process.
68831 + if timeout != 0 and not timeouts_checked:
68832 + if self._run_timeouts():
68833 + events_handled += 1
68834 + timeouts_checked = True
68835 + if events_handled:
68836 + # Minimize latency for loops controlled
68837 + # by timeout or idle callback events.
68838 + timeout = 0
68839 else:
68840 timeout = 0
68841
68842 @@ -170,17 +258,37 @@ class EventLoop(object):
68843 while event_queue:
68844 events_handled += 1
68845 f, event = event_queue.pop()
68846 - x = event_handlers[f]
68847 + try:
68848 + x = event_handlers[f]
68849 + except KeyError:
68850 + # This is known to be triggered by the epoll
68851 + # implementation in qemu-user-1.2.2, and appears
68852 + # to be harmless (see bug #451326).
68853 + continue
68854 if not x.callback(f, event, *x.args):
68855 self.source_remove(x.source_id)
68856
68857 - # Run timeouts last, in order to minimize latency in
68858 - # termination of iteration loops that they may control.
68859 - if self._run_timeouts():
68860 - events_handled += 1
68861 + if not timeouts_checked:
68862 + if self._run_timeouts():
68863 + events_handled += 1
68864 + timeouts_checked = True
68865
68866 return bool(events_handled)
68867
68868 + def _get_poll_timeout(self):
68869 +
68870 + with self._thread_rlock:
68871 + if self._child_handlers:
68872 + if self._timeout_interval is None:
68873 + timeout = self._sigchld_interval
68874 + else:
68875 + timeout = min(self._sigchld_interval,
68876 + self._timeout_interval)
68877 + else:
68878 + timeout = self._timeout_interval
68879 +
68880 + return timeout
68881 +
68882 def child_watch_add(self, pid, callback, data=None):
68883 """
68884 Like glib.child_watch_add(), sets callback to be called with the
68885 @@ -201,18 +309,29 @@ class EventLoop(object):
68886 @rtype: int
68887 @return: an integer ID
68888 """
68889 - self._event_handler_id += 1
68890 - source_id = self._event_handler_id
68891 + source_id = self._new_source_id()
68892 self._child_handlers[source_id] = self._child_callback_class(
68893 callback=callback, data=data, pid=pid, source_id=source_id)
68894
68895 if self._use_signal:
68896 if self._sigchld_read is None:
68897 self._sigchld_read, self._sigchld_write = os.pipe()
68898 +
68899 fcntl.fcntl(self._sigchld_read, fcntl.F_SETFL,
68900 fcntl.fcntl(self._sigchld_read,
68901 fcntl.F_GETFL) | os.O_NONBLOCK)
68902
68903 + # FD_CLOEXEC is enabled by default in Python >=3.4.
68904 + if sys.hexversion < 0x3040000:
68905 + try:
68906 + fcntl.FD_CLOEXEC
68907 + except AttributeError:
68908 + pass
68909 + else:
68910 + fcntl.fcntl(self._sigchld_read, fcntl.F_SETFD,
68911 + fcntl.fcntl(self._sigchld_read,
68912 + fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
68913 +
68914 # The IO watch is dynamically registered and unregistered as
68915 # needed, since we don't want to consider it as a valid source
68916 # of events when there are no child listeners. It's important
68917 @@ -276,22 +395,25 @@ class EventLoop(object):
68918 """
68919 Like glib.idle_add(), if callback returns False it is
68920 automatically removed from the list of event sources and will
68921 - not be called again.
68922 + not be called again. This method is thread-safe.
68923
68924 @type callback: callable
68925 @param callback: a function to call
68926 @rtype: int
68927 @return: an integer ID
68928 """
68929 - self._event_handler_id += 1
68930 - source_id = self._event_handler_id
68931 - self._idle_callbacks[source_id] = self._idle_callback_class(
68932 - args=args, callback=callback, source_id=source_id)
68933 + with self._thread_condition:
68934 + source_id = self._new_source_id()
68935 + self._idle_callbacks[source_id] = self._idle_callback_class(
68936 + args=args, callback=callback, source_id=source_id)
68937 + self._thread_condition.notify()
68938 return source_id
68939
68940 def _run_idle_callbacks(self):
68941 + # assumes caller has acquired self._thread_rlock
68942 if not self._idle_callbacks:
68943 - return
68944 + return False
68945 + state_change = 0
68946 # Iterate of our local list, since self._idle_callbacks can be
68947 # modified during the exection of these callbacks.
68948 for x in list(self._idle_callbacks.values()):
68949 @@ -304,26 +426,32 @@ class EventLoop(object):
68950 x.calling = True
68951 try:
68952 if not x.callback(*x.args):
68953 + state_change += 1
68954 self.source_remove(x.source_id)
68955 finally:
68956 x.calling = False
68957
68958 + return bool(state_change)
68959 +
68960 def timeout_add(self, interval, function, *args):
68961 """
68962 Like glib.timeout_add(), interval argument is the number of
68963 milliseconds between calls to your function, and your function
68964 should return False to stop being called, or True to continue
68965 being called. Any additional positional arguments given here
68966 - are passed to your function when it's called.
68967 + are passed to your function when it's called. This method is
68968 + thread-safe.
68969 """
68970 - self._event_handler_id += 1
68971 - source_id = self._event_handler_id
68972 - self._timeout_handlers[source_id] = \
68973 - self._timeout_handler_class(
68974 - interval=interval, function=function, args=args,
68975 - source_id=source_id, timestamp=time.time())
68976 - if self._timeout_interval is None or self._timeout_interval > interval:
68977 - self._timeout_interval = interval
68978 + with self._thread_condition:
68979 + source_id = self._new_source_id()
68980 + self._timeout_handlers[source_id] = \
68981 + self._timeout_handler_class(
68982 + interval=interval, function=function, args=args,
68983 + source_id=source_id, timestamp=time.time())
68984 + if self._timeout_interval is None or \
68985 + self._timeout_interval > interval:
68986 + self._timeout_interval = interval
68987 + self._thread_condition.notify()
68988 return source_id
68989
68990 def _run_timeouts(self):
68991 @@ -333,37 +461,40 @@ class EventLoop(object):
68992 if self._poll_child_processes():
68993 calls += 1
68994
68995 - self._run_idle_callbacks()
68996 -
68997 - if not self._timeout_handlers:
68998 - return bool(calls)
68999 + with self._thread_rlock:
69000
69001 - ready_timeouts = []
69002 - current_time = time.time()
69003 - for x in self._timeout_handlers.values():
69004 - elapsed_seconds = current_time - x.timestamp
69005 - # elapsed_seconds < 0 means the system clock has been adjusted
69006 - if elapsed_seconds < 0 or \
69007 - (x.interval - 1000 * elapsed_seconds) <= 0:
69008 - ready_timeouts.append(x)
69009 + if self._run_idle_callbacks():
69010 + calls += 1
69011
69012 - # Iterate of our local list, since self._timeout_handlers can be
69013 - # modified during the exection of these callbacks.
69014 - for x in ready_timeouts:
69015 - if x.source_id not in self._timeout_handlers:
69016 - # it got cancelled while executing another timeout
69017 - continue
69018 - if x.calling:
69019 - # don't call it recursively
69020 - continue
69021 - calls += 1
69022 - x.calling = True
69023 - try:
69024 - x.timestamp = time.time()
69025 - if not x.function(*x.args):
69026 - self.source_remove(x.source_id)
69027 - finally:
69028 - x.calling = False
69029 + if not self._timeout_handlers:
69030 + return bool(calls)
69031 +
69032 + ready_timeouts = []
69033 + current_time = time.time()
69034 + for x in self._timeout_handlers.values():
69035 + elapsed_seconds = current_time - x.timestamp
69036 + # elapsed_seconds < 0 means the system clock has been adjusted
69037 + if elapsed_seconds < 0 or \
69038 + (x.interval - 1000 * elapsed_seconds) <= 0:
69039 + ready_timeouts.append(x)
69040 +
69041 + # Iterate of our local list, since self._timeout_handlers can be
69042 + # modified during the exection of these callbacks.
69043 + for x in ready_timeouts:
69044 + if x.source_id not in self._timeout_handlers:
69045 + # it got cancelled while executing another timeout
69046 + continue
69047 + if x.calling:
69048 + # don't call it recursively
69049 + continue
69050 + calls += 1
69051 + x.calling = True
69052 + try:
69053 + x.timestamp = time.time()
69054 + if not x.function(*x.args):
69055 + self.source_remove(x.source_id)
69056 + finally:
69057 + x.calling = False
69058
69059 return bool(calls)
69060
69061 @@ -385,8 +516,7 @@ class EventLoop(object):
69062 """
69063 if f in self._poll_event_handlers:
69064 raise AssertionError("fd %d is already registered" % f)
69065 - self._event_handler_id += 1
69066 - source_id = self._event_handler_id
69067 + source_id = self._new_source_id()
69068 self._poll_event_handler_ids[source_id] = f
69069 self._poll_event_handlers[f] = self._io_handler_class(
69070 args=args, callback=callback, f=f, source_id=source_id)
69071 @@ -406,18 +536,21 @@ class EventLoop(object):
69072 self.source_remove(self._sigchld_src_id)
69073 self._sigchld_src_id = None
69074 return True
69075 - idle_callback = self._idle_callbacks.pop(reg_id, None)
69076 - if idle_callback is not None:
69077 - return True
69078 - timeout_handler = self._timeout_handlers.pop(reg_id, None)
69079 - if timeout_handler is not None:
69080 - if timeout_handler.interval == self._timeout_interval:
69081 - if self._timeout_handlers:
69082 - self._timeout_interval = \
69083 - min(x.interval for x in self._timeout_handlers.values())
69084 - else:
69085 - self._timeout_interval = None
69086 - return True
69087 +
69088 + with self._thread_rlock:
69089 + idle_callback = self._idle_callbacks.pop(reg_id, None)
69090 + if idle_callback is not None:
69091 + return True
69092 + timeout_handler = self._timeout_handlers.pop(reg_id, None)
69093 + if timeout_handler is not None:
69094 + if timeout_handler.interval == self._timeout_interval:
69095 + if self._timeout_handlers:
69096 + self._timeout_interval = min(x.interval
69097 + for x in self._timeout_handlers.values())
69098 + else:
69099 + self._timeout_interval = None
69100 + return True
69101 +
69102 f = self._poll_event_handler_ids.pop(reg_id, None)
69103 if f is None:
69104 return False
69105 @@ -467,7 +600,12 @@ def can_poll_device():
69106 return _can_poll_device
69107
69108 p = select.poll()
69109 - p.register(dev_null.fileno(), PollConstants.POLLIN)
69110 + try:
69111 + p.register(dev_null.fileno(), PollConstants.POLLIN)
69112 + except TypeError:
69113 + # Jython: Object 'org.python.core.io.FileIO@f8f175' is not watchable
69114 + _can_poll_device = False
69115 + return _can_poll_device
69116
69117 invalid_request = False
69118 for f, event in p.poll():
69119 @@ -488,3 +626,37 @@ def create_poll_instance():
69120 if can_poll_device():
69121 return select.poll()
69122 return PollSelectAdapter()
69123 +
69124 +class _epoll_adapter(object):
69125 + """
69126 + Wraps a select.epoll instance in order to make it compatible
69127 + with select.poll instances. This is necessary since epoll instances
69128 + interpret timeout arguments differently. Note that the file descriptor
69129 + that is associated with an epoll instance will close automatically when
69130 + it is garbage collected, so it's not necessary to close it explicitly.
69131 + """
69132 + __slots__ = ('_epoll_obj',)
69133 +
69134 + def __init__(self, epoll_obj):
69135 + self._epoll_obj = epoll_obj
69136 +
69137 + def register(self, fd, *args):
69138 + self._epoll_obj.register(fd, *args)
69139 +
69140 + def unregister(self, fd):
69141 + self._epoll_obj.unregister(fd)
69142 +
69143 + def poll(self, *args):
69144 + if len(args) > 1:
69145 + raise TypeError(
69146 + "poll expected at most 2 arguments, got " + \
69147 + repr(1 + len(args)))
69148 + timeout = -1
69149 + if args:
69150 + timeout = args[0]
69151 + if timeout is None or timeout < 0:
69152 + timeout = -1
69153 + elif timeout != 0:
69154 + timeout = float(timeout) / 1000
69155 +
69156 + return self._epoll_obj.poll(timeout)
69157
69158 diff --git a/pym/portage/util/_eventloop/PollSelectAdapter.py b/pym/portage/util/_eventloop/PollSelectAdapter.py
69159 index 17e63d9..244788c 100644
69160 --- a/pym/portage/util/_eventloop/PollSelectAdapter.py
69161 +++ b/pym/portage/util/_eventloop/PollSelectAdapter.py
69162 @@ -64,7 +64,7 @@ class PollSelectAdapter(object):
69163 if timeout is not None and timeout < 0:
69164 timeout = None
69165 if timeout is not None:
69166 - select_args.append(timeout / 1000)
69167 + select_args.append(float(timeout) / 1000)
69168
69169 select_events = select.select(*select_args)
69170 poll_events = []
69171
69172 diff --git a/pym/portage/util/_get_vm_info.py b/pym/portage/util/_get_vm_info.py
69173 new file mode 100644
69174 index 0000000..e8ad938
69175 --- /dev/null
69176 +++ b/pym/portage/util/_get_vm_info.py
69177 @@ -0,0 +1,80 @@
69178 +# Copyright 2013 Gentoo Foundation
69179 +# Distributed under the terms of the GNU General Public License v2
69180 +
69181 +import os
69182 +import platform
69183 +import subprocess
69184 +
69185 +from portage import _unicode_decode
69186 +
69187 +def get_vm_info():
69188 +
69189 + vm_info = {}
69190 +
69191 + if platform.system() == 'Linux':
69192 + try:
69193 + proc = subprocess.Popen(["free"],
69194 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
69195 + except OSError:
69196 + pass
69197 + else:
69198 + output = _unicode_decode(proc.communicate()[0])
69199 + if proc.wait() == os.EX_OK:
69200 + for line in output.splitlines():
69201 + line = line.split()
69202 + if len(line) < 2:
69203 + continue
69204 + if line[0] == "Mem:":
69205 + try:
69206 + vm_info["ram.total"] = int(line[1]) * 1024
69207 + except ValueError:
69208 + pass
69209 + if len(line) > 3:
69210 + try:
69211 + vm_info["ram.free"] = int(line[3]) * 1024
69212 + except ValueError:
69213 + pass
69214 + elif line[0] == "Swap:":
69215 + try:
69216 + vm_info["swap.total"] = int(line[1]) * 1024
69217 + except ValueError:
69218 + pass
69219 + if len(line) > 3:
69220 + try:
69221 + vm_info["swap.free"] = int(line[3]) * 1024
69222 + except ValueError:
69223 + pass
69224 +
69225 + else:
69226 +
69227 + try:
69228 + proc = subprocess.Popen(["sysctl", "-a"],
69229 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
69230 + except OSError:
69231 + pass
69232 + else:
69233 + output = _unicode_decode(proc.communicate()[0])
69234 + if proc.wait() == os.EX_OK:
69235 + for line in output.splitlines():
69236 + line = line.split(":", 1)
69237 + if len(line) != 2:
69238 + continue
69239 + line[1] = line[1].strip()
69240 + if line[0] == "hw.physmem":
69241 + try:
69242 + vm_info["ram.total"] = int(line[1])
69243 + except ValueError:
69244 + pass
69245 + elif line[0] == "vm.swap_total":
69246 + try:
69247 + vm_info["swap.total"] = int(line[1])
69248 + except ValueError:
69249 + pass
69250 + elif line[0] == "Free Memory Pages":
69251 + if line[1][-1] == "K":
69252 + try:
69253 + vm_info["ram.free"] = int(line[1][:-1]) * 1024
69254 + except ValueError:
69255 + pass
69256 +
69257 + return vm_info
69258
69259 diff --git a/pym/portage/util/_info_files.py b/pym/portage/util/_info_files.py
69260 new file mode 100644
69261 index 0000000..fabf74b
69262 --- /dev/null
69263 +++ b/pym/portage/util/_info_files.py
69264 @@ -0,0 +1,138 @@
69265 +# Copyright 1999-2012 Gentoo Foundation
69266 +# Distributed under the terms of the GNU General Public License v2
69267 +
69268 +import errno
69269 +import logging
69270 +import re
69271 +import stat
69272 +import subprocess
69273 +
69274 +import portage
69275 +from portage import os
69276 +
69277 +def chk_updated_info_files(root, infodirs, prev_mtimes):
69278 +
69279 + if os.path.exists("/usr/bin/install-info"):
69280 + out = portage.output.EOutput()
69281 + regen_infodirs = []
69282 + for z in infodirs:
69283 + if z == '':
69284 + continue
69285 + inforoot = portage.util.normalize_path(root + z)
69286 + if os.path.isdir(inforoot) and \
69287 + not [x for x in os.listdir(inforoot) \
69288 + if x.startswith('.keepinfodir')]:
69289 + infomtime = os.stat(inforoot)[stat.ST_MTIME]
69290 + if inforoot not in prev_mtimes or \
69291 + prev_mtimes[inforoot] != infomtime:
69292 + regen_infodirs.append(inforoot)
69293 +
69294 + if not regen_infodirs:
69295 + portage.util.writemsg_stdout("\n")
69296 + if portage.util.noiselimit >= 0:
69297 + out.einfo("GNU info directory index is up-to-date.")
69298 + else:
69299 + portage.util.writemsg_stdout("\n")
69300 + if portage.util.noiselimit >= 0:
69301 + out.einfo("Regenerating GNU info directory index...")
69302 +
69303 + dir_extensions = ("", ".gz", ".bz2")
69304 + icount = 0
69305 + badcount = 0
69306 + errmsg = ""
69307 + for inforoot in regen_infodirs:
69308 + if inforoot == '':
69309 + continue
69310 +
69311 + if not os.path.isdir(inforoot) or \
69312 + not os.access(inforoot, os.W_OK):
69313 + continue
69314 +
69315 + file_list = os.listdir(inforoot)
69316 + file_list.sort()
69317 + dir_file = os.path.join(inforoot, "dir")
69318 + moved_old_dir = False
69319 + processed_count = 0
69320 + for x in file_list:
69321 + if x.startswith(".") or \
69322 + os.path.isdir(os.path.join(inforoot, x)):
69323 + continue
69324 + if x.startswith("dir"):
69325 + skip = False
69326 + for ext in dir_extensions:
69327 + if x == "dir" + ext or \
69328 + x == "dir" + ext + ".old":
69329 + skip = True
69330 + break
69331 + if skip:
69332 + continue
69333 + if processed_count == 0:
69334 + for ext in dir_extensions:
69335 + try:
69336 + os.rename(dir_file + ext, dir_file + ext + ".old")
69337 + moved_old_dir = True
69338 + except EnvironmentError as e:
69339 + if e.errno != errno.ENOENT:
69340 + raise
69341 + del e
69342 + processed_count += 1
69343 + try:
69344 + proc = subprocess.Popen(
69345 + ['/usr/bin/install-info',
69346 + '--dir-file=%s' % os.path.join(inforoot, "dir"),
69347 + os.path.join(inforoot, x)],
69348 + env=dict(os.environ, LANG="C", LANGUAGE="C"),
69349 + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
69350 + except OSError:
69351 + myso = None
69352 + else:
69353 + myso = portage._unicode_decode(
69354 + proc.communicate()[0]).rstrip("\n")
69355 + proc.wait()
69356 + existsstr = "already exists, for file `"
69357 + if myso:
69358 + if re.search(existsstr, myso):
69359 + # Already exists... Don't increment the count for this.
69360 + pass
69361 + elif myso[:44] == "install-info: warning: no info dir entry in ":
69362 + # This info file doesn't contain a DIR-header: install-info produces this
69363 + # (harmless) warning (the --quiet switch doesn't seem to work).
69364 + # Don't increment the count for this.
69365 + pass
69366 + else:
69367 + badcount += 1
69368 + errmsg += myso + "\n"
69369 + icount += 1
69370 +
69371 + if moved_old_dir and not os.path.exists(dir_file):
69372 + # We didn't generate a new dir file, so put the old file
69373 + # back where it was originally found.
69374 + for ext in dir_extensions:
69375 + try:
69376 + os.rename(dir_file + ext + ".old", dir_file + ext)
69377 + except EnvironmentError as e:
69378 + if e.errno != errno.ENOENT:
69379 + raise
69380 + del e
69381 +
69382 + # Clean dir.old cruft so that they don't prevent
69383 + # unmerge of otherwise empty directories.
69384 + for ext in dir_extensions:
69385 + try:
69386 + os.unlink(dir_file + ext + ".old")
69387 + except EnvironmentError as e:
69388 + if e.errno != errno.ENOENT:
69389 + raise
69390 + del e
69391 +
69392 + #update mtime so we can potentially avoid regenerating.
69393 + prev_mtimes[inforoot] = os.stat(inforoot)[stat.ST_MTIME]
69394 +
69395 + if badcount:
69396 + out.eerror("Processed %d info files; %d errors." % \
69397 + (icount, badcount))
69398 + portage.util.writemsg_level(errmsg,
69399 + level=logging.ERROR, noiselevel=-1)
69400 + else:
69401 + if icount > 0 and portage.util.noiselimit >= 0:
69402 + out.einfo("Processed %d info files." % (icount,))
69403
69404 diff --git a/pym/portage/util/_path.py b/pym/portage/util/_path.py
69405 new file mode 100644
69406 index 0000000..6fbcb43
69407 --- /dev/null
69408 +++ b/pym/portage/util/_path.py
69409 @@ -0,0 +1,27 @@
69410 +# Copyright 2013 Gentoo Foundation
69411 +# Distributed under the terms of the GNU General Public License v2
69412 +
69413 +import stat
69414 +
69415 +from portage import os
69416 +from portage.exception import PermissionDenied
69417 +
69418 +def exists_raise_eaccess(path):
69419 + try:
69420 + os.stat(path)
69421 + except OSError as e:
69422 + if e.errno == PermissionDenied.errno:
69423 + raise PermissionDenied("stat('%s')" % path)
69424 + return False
69425 + else:
69426 + return True
69427 +
69428 +def isdir_raise_eaccess(path):
69429 + try:
69430 + st = os.stat(path)
69431 + except OSError as e:
69432 + if e.errno == PermissionDenied.errno:
69433 + raise PermissionDenied("stat('%s')" % path)
69434 + return False
69435 + else:
69436 + return stat.S_ISDIR(st.st_mode)
69437
69438 diff --git a/pym/portage/util/_urlopen.py b/pym/portage/util/_urlopen.py
69439 index 307624b..4cfe183 100644
69440 --- a/pym/portage/util/_urlopen.py
69441 +++ b/pym/portage/util/_urlopen.py
69442 @@ -1,7 +1,11 @@
69443 -# Copyright 2012 Gentoo Foundation
69444 +# Copyright 2012-2014 Gentoo Foundation
69445 # Distributed under the terms of the GNU General Public License v2
69446
69447 +import io
69448 import sys
69449 +from datetime import datetime
69450 +from time import mktime
69451 +from email.utils import formatdate, parsedate
69452
69453 try:
69454 from urllib.request import urlopen as _urlopen
69455 @@ -14,29 +18,75 @@ except ImportError:
69456 import urllib2 as urllib_request
69457 from urllib import splituser as urllib_parse_splituser
69458
69459 -def urlopen(url):
69460 - try:
69461 - return _urlopen(url)
69462 - except SystemExit:
69463 - raise
69464 - except Exception:
69465 - if sys.hexversion < 0x3000000:
69466 - raise
69467 - parse_result = urllib_parse.urlparse(url)
69468 - if parse_result.scheme not in ("http", "https") or \
69469 - not parse_result.username:
69470 - raise
69471 -
69472 - return _new_urlopen(url)
69473 -
69474 -def _new_urlopen(url):
69475 - # This is experimental code for bug #413983.
69476 +if sys.hexversion >= 0x3000000:
69477 + # pylint: disable=W0622
69478 + long = int
69479 +
69480 +# to account for the difference between TIMESTAMP of the index' contents
69481 +# and the file-'mtime'
69482 +TIMESTAMP_TOLERANCE = 5
69483 +
69484 +def urlopen(url, if_modified_since=None):
69485 parse_result = urllib_parse.urlparse(url)
69486 - netloc = urllib_parse_splituser(parse_result.netloc)[1]
69487 - url = urllib_parse.urlunparse((parse_result.scheme, netloc, parse_result.path, parse_result.params, parse_result.query, parse_result.fragment))
69488 - password_manager = urllib_request.HTTPPasswordMgrWithDefaultRealm()
69489 - if parse_result.username is not None:
69490 - password_manager.add_password(None, url, parse_result.username, parse_result.password)
69491 - auth_handler = urllib_request.HTTPBasicAuthHandler(password_manager)
69492 - opener = urllib_request.build_opener(auth_handler)
69493 - return opener.open(url)
69494 + if parse_result.scheme not in ("http", "https"):
69495 + return _urlopen(url)
69496 + else:
69497 + netloc = urllib_parse_splituser(parse_result.netloc)[1]
69498 + url = urllib_parse.urlunparse((parse_result.scheme, netloc, parse_result.path, parse_result.params, parse_result.query, parse_result.fragment))
69499 + password_manager = urllib_request.HTTPPasswordMgrWithDefaultRealm()
69500 + request = urllib_request.Request(url)
69501 + request.add_header('User-Agent', 'Gentoo Portage')
69502 + if if_modified_since:
69503 + request.add_header('If-Modified-Since', _timestamp_to_http(if_modified_since))
69504 + if parse_result.username is not None:
69505 + password_manager.add_password(None, url, parse_result.username, parse_result.password)
69506 + auth_handler = CompressedResponseProcessor(password_manager)
69507 + opener = urllib_request.build_opener(auth_handler)
69508 + hdl = opener.open(request)
69509 + if hdl.headers.get('last-modified', ''):
69510 + try:
69511 + add_header = hdl.headers.add_header
69512 + except AttributeError:
69513 + # Python 2
69514 + add_header = hdl.headers.addheader
69515 + add_header('timestamp', _http_to_timestamp(hdl.headers.get('last-modified')))
69516 + return hdl
69517 +
69518 +def _timestamp_to_http(timestamp):
69519 + dt = datetime.fromtimestamp(float(long(timestamp)+TIMESTAMP_TOLERANCE))
69520 + stamp = mktime(dt.timetuple())
69521 + return formatdate(timeval=stamp, localtime=False, usegmt=True)
69522 +
69523 +def _http_to_timestamp(http_datetime_string):
69524 + tuple = parsedate(http_datetime_string)
69525 + timestamp = mktime(tuple)
69526 + return str(long(timestamp))
69527 +
69528 +class CompressedResponseProcessor(urllib_request.HTTPBasicAuthHandler):
69529 + # Handler for compressed responses.
69530 +
69531 + def http_request(self, req):
69532 + req.add_header('Accept-Encoding', 'bzip2,gzip,deflate')
69533 + return req
69534 + https_request = http_request
69535 +
69536 + def http_response(self, req, response):
69537 + decompressed = None
69538 + if response.headers.get('content-encoding') == 'bzip2':
69539 + import bz2
69540 + decompressed = io.BytesIO(bz2.decompress(response.read()))
69541 + elif response.headers.get('content-encoding') == 'gzip':
69542 + from gzip import GzipFile
69543 + decompressed = GzipFile(fileobj=io.BytesIO(response.read()), mode='r')
69544 + elif response.headers.get('content-encoding') == 'deflate':
69545 + import zlib
69546 + try:
69547 + decompressed = io.BytesIO(zlib.decompress(response.read()))
69548 + except zlib.error: # they ignored RFC1950
69549 + decompressed = io.BytesIO(zlib.decompress(response.read(), -zlib.MAX_WBITS))
69550 + if decompressed:
69551 + old_response = response
69552 + response = urllib_request.addinfourl(decompressed, old_response.headers, old_response.url, old_response.code)
69553 + response.msg = old_response.msg
69554 + return response
69555 + https_response = http_response
69556
69557 diff --git a/pym/portage/util/digraph.py b/pym/portage/util/digraph.py
69558 index f3ae658..4a9cb43 100644
69559 --- a/pym/portage/util/digraph.py
69560 +++ b/pym/portage/util/digraph.py
69561 @@ -1,12 +1,13 @@
69562 -# Copyright 2010-2011 Gentoo Foundation
69563 +# Copyright 2010-2014 Gentoo Foundation
69564 # Distributed under the terms of the GNU General Public License v2
69565
69566 +from __future__ import unicode_literals
69567 +
69568 __all__ = ['digraph']
69569
69570 from collections import deque
69571 import sys
69572
69573 -from portage import _unicode_decode
69574 from portage.util import writemsg
69575
69576 class digraph(object):
69577 @@ -16,24 +17,24 @@ class digraph(object):
69578
69579 def __init__(self):
69580 """Create an empty digraph"""
69581 -
69582 +
69583 # { node : ( { child : priority } , { parent : priority } ) }
69584 self.nodes = {}
69585 self.order = []
69586
69587 def add(self, node, parent, priority=0):
69588 """Adds the specified node with the specified parent.
69589 -
69590 +
69591 If the dep is a soft-dep and the node already has a hard
69592 relationship to the parent, the relationship is left as hard."""
69593 -
69594 +
69595 if node not in self.nodes:
69596 self.nodes[node] = ({}, {}, node)
69597 self.order.append(node)
69598 -
69599 +
69600 if not parent:
69601 return
69602 -
69603 +
69604 if parent not in self.nodes:
69605 self.nodes[parent] = ({}, {}, parent)
69606 self.order.append(parent)
69607 @@ -46,19 +47,29 @@ class digraph(object):
69608 priorities.append(priority)
69609 priorities.sort()
69610
69611 + def discard(self, node):
69612 + """
69613 + Like remove(), except it doesn't raises KeyError if the
69614 + node doesn't exist.
69615 + """
69616 + try:
69617 + self.remove(node)
69618 + except KeyError:
69619 + pass
69620 +
69621 def remove(self, node):
69622 """Removes the specified node from the digraph, also removing
69623 and ties to other nodes in the digraph. Raises KeyError if the
69624 node doesn't exist."""
69625 -
69626 +
69627 if node not in self.nodes:
69628 raise KeyError(node)
69629 -
69630 +
69631 for parent in self.nodes[node][1]:
69632 del self.nodes[parent][0][node]
69633 for child in self.nodes[node][0]:
69634 del self.nodes[child][1][node]
69635 -
69636 +
69637 del self.nodes[node]
69638 self.order.remove(node)
69639
69640 @@ -157,10 +168,10 @@ class digraph(object):
69641
69642 def leaf_nodes(self, ignore_priority=None):
69643 """Return all nodes that have no children
69644 -
69645 +
69646 If ignore_soft_deps is True, soft deps are not counted as
69647 children in calculations."""
69648 -
69649 +
69650 leaf_nodes = []
69651 if ignore_priority is None:
69652 for node in self.order:
69653 @@ -191,10 +202,10 @@ class digraph(object):
69654
69655 def root_nodes(self, ignore_priority=None):
69656 """Return all nodes that have no parents.
69657 -
69658 +
69659 If ignore_soft_deps is True, soft deps are not counted as
69660 parents in calculations."""
69661 -
69662 +
69663 root_nodes = []
69664 if ignore_priority is None:
69665 for node in self.order:
69666 @@ -272,18 +283,17 @@ class digraph(object):
69667 def debug_print(self):
69668 def output(s):
69669 writemsg(s, noiselevel=-1)
69670 - # Use _unicode_decode() to force unicode format
69671 + # Use unicode_literals to force unicode format
69672 # strings for python-2.x safety, ensuring that
69673 # node.__unicode__() is used when necessary.
69674 for node in self.nodes:
69675 - output(_unicode_decode("%s ") % (node,))
69676 + output("%s " % (node,))
69677 if self.nodes[node][0]:
69678 output("depends on\n")
69679 else:
69680 output("(no children)\n")
69681 for child, priorities in self.nodes[node][0].items():
69682 - output(_unicode_decode(" %s (%s)\n") % \
69683 - (child, priorities[-1],))
69684 + output(" %s (%s)\n" % (child, priorities[-1],))
69685
69686 def bfs(self, start, ignore_priority=None):
69687 if start not in self:
69688
69689 diff --git a/pym/portage/util/env_update.py b/pym/portage/util/env_update.py
69690 index ace4077..c0a93a8 100644
69691 --- a/pym/portage/util/env_update.py
69692 +++ b/pym/portage/util/env_update.py
69693 @@ -1,16 +1,17 @@
69694 -# Copyright 2010-2011 Gentoo Foundation
69695 +# Copyright 2010-2014 Gentoo Foundation
69696 # Distributed under the terms of the GNU General Public License v2
69697
69698 __all__ = ['env_update']
69699
69700 import errno
69701 +import glob
69702 import io
69703 import stat
69704 import sys
69705 import time
69706
69707 import portage
69708 -from portage import os, _encodings, _unicode_encode
69709 +from portage import os, _encodings, _unicode_decode, _unicode_encode
69710 from portage.checksum import prelink_capable
69711 from portage.data import ostype
69712 from portage.exception import ParseError
69713 @@ -23,6 +24,7 @@ from portage.dbapi.vartree import vartree
69714 from portage.package.ebuild.config import config
69715
69716 if sys.hexversion >= 0x3000000:
69717 + # pylint: disable=W0622
69718 long = int
69719
69720 def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None,
69721 @@ -88,7 +90,8 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69722
69723 eprefix = settings.get("EPREFIX", "")
69724 eprefix_lstrip = eprefix.lstrip(os.sep)
69725 - envd_dir = os.path.join(target_root, eprefix_lstrip, "etc", "env.d")
69726 + eroot = normalize_path(os.path.join(target_root, eprefix_lstrip)).rstrip(os.sep) + os.sep
69727 + envd_dir = os.path.join(eroot, "etc", "env.d")
69728 ensure_dirs(envd_dir, mode=0o755)
69729 fns = listdir(envd_dir, EmptyOnError=1)
69730 fns.sort()
69731 @@ -164,15 +167,14 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69732 they won't be overwritten by this dict.update call."""
69733 env.update(myconfig)
69734
69735 - ldsoconf_path = os.path.join(
69736 - target_root, eprefix_lstrip, "etc", "ld.so.conf")
69737 + ldsoconf_path = os.path.join(eroot, "etc", "ld.so.conf")
69738 try:
69739 myld = io.open(_unicode_encode(ldsoconf_path,
69740 encoding=_encodings['fs'], errors='strict'),
69741 mode='r', encoding=_encodings['content'], errors='replace')
69742 - myldlines=myld.readlines()
69743 + myldlines = myld.readlines()
69744 myld.close()
69745 - oldld=[]
69746 + oldld = []
69747 for x in myldlines:
69748 #each line has at least one char (a newline)
69749 if x[:1] == "#":
69750 @@ -193,20 +195,34 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69751 myfd.write(x + "\n")
69752 myfd.close()
69753
69754 + potential_lib_dirs = set()
69755 + for lib_dir_glob in ('usr/lib*', 'lib*'):
69756 + x = os.path.join(eroot, lib_dir_glob)
69757 + for y in glob.glob(_unicode_encode(x,
69758 + encoding=_encodings['fs'], errors='strict')):
69759 + try:
69760 + y = _unicode_decode(y,
69761 + encoding=_encodings['fs'], errors='strict')
69762 + except UnicodeDecodeError:
69763 + continue
69764 + if os.path.basename(y) != 'libexec':
69765 + potential_lib_dirs.add(y[len(eroot):])
69766 +
69767 # Update prelink.conf if we are prelink-enabled
69768 if prelink_capable:
69769 - newprelink = atomic_ofstream(os.path.join(
69770 - target_root, eprefix_lstrip, "etc", "prelink.conf"))
69771 + prelink_d = os.path.join(eroot, 'etc', 'prelink.conf.d')
69772 + ensure_dirs(prelink_d)
69773 + newprelink = atomic_ofstream(os.path.join(prelink_d, 'portage.conf'))
69774 newprelink.write("# prelink.conf autogenerated by env-update; make all changes to\n")
69775 newprelink.write("# contents of /etc/env.d directory\n")
69776
69777 - for x in ["/bin","/sbin","/usr/bin","/usr/sbin","/lib","/usr/lib"]:
69778 - newprelink.write("-l %s\n" % (x,));
69779 - prelink_paths = []
69780 - prelink_paths += specials.get("LDPATH", [])
69781 - prelink_paths += specials.get("PATH", [])
69782 - prelink_paths += specials.get("PRELINK_PATH", [])
69783 - prelink_path_mask = specials.get("PRELINK_PATH_MASK", [])
69784 + for x in sorted(potential_lib_dirs) + ['bin', 'sbin']:
69785 + newprelink.write('-l /%s\n' % (x,));
69786 + prelink_paths = set()
69787 + prelink_paths |= set(specials.get('LDPATH', []))
69788 + prelink_paths |= set(specials.get('PATH', []))
69789 + prelink_paths |= set(specials.get('PRELINK_PATH', []))
69790 + prelink_path_mask = specials.get('PRELINK_PATH_MASK', [])
69791 for x in prelink_paths:
69792 if not x:
69793 continue
69794 @@ -227,12 +243,26 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69795 newprelink.write("-b %s\n" % (x,))
69796 newprelink.close()
69797
69798 + # Migration code path. If /etc/prelink.conf was generated by us, then
69799 + # point it to the new stuff until the prelink package re-installs.
69800 + prelink_conf = os.path.join(eroot, 'etc', 'prelink.conf')
69801 + try:
69802 + with open(_unicode_encode(prelink_conf,
69803 + encoding=_encodings['fs'], errors='strict'), 'rb') as f:
69804 + if f.readline() == b'# prelink.conf autogenerated by env-update; make all changes to\n':
69805 + f = atomic_ofstream(prelink_conf)
69806 + f.write('-c /etc/prelink.conf.d/*.conf\n')
69807 + f.close()
69808 + except IOError as e:
69809 + if e.errno != errno.ENOENT:
69810 + raise
69811 +
69812 current_time = long(time.time())
69813 mtime_changed = False
69814 +
69815 lib_dirs = set()
69816 - for lib_dir in set(specials["LDPATH"] + \
69817 - ['usr/lib','usr/lib64','usr/lib32','lib','lib64','lib32']):
69818 - x = os.path.join(target_root, eprefix_lstrip, lib_dir.lstrip(os.sep))
69819 + for lib_dir in set(specials['LDPATH']) | potential_lib_dirs:
69820 + x = os.path.join(eroot, lib_dir.lstrip(os.sep))
69821 try:
69822 newldpathtime = os.stat(x)[stat.ST_MTIME]
69823 lib_dirs.add(normalize_path(x))
69824 @@ -292,7 +322,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69825 writemsg_level(_(">>> Regenerating %setc/ld.so.cache...\n") % \
69826 (target_root,))
69827 os.system("cd / ; %s -X -r '%s'" % (ldconfig, target_root))
69828 - elif ostype in ("FreeBSD","DragonFly"):
69829 + elif ostype in ("FreeBSD", "DragonFly"):
69830 writemsg_level(_(">>> Regenerating %svar/run/ld-elf.so.hints...\n") % \
69831 target_root)
69832 os.system(("cd / ; %s -elf -i " + \
69833 @@ -308,11 +338,10 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69834 cenvnotice += "# GO INTO /etc/csh.cshrc NOT /etc/csh.env\n\n"
69835
69836 #create /etc/profile.env for bash support
69837 - outfile = atomic_ofstream(os.path.join(
69838 - target_root, eprefix_lstrip, "etc", "profile.env"))
69839 + outfile = atomic_ofstream(os.path.join(eroot, "etc", "profile.env"))
69840 outfile.write(penvnotice)
69841
69842 - env_keys = [ x for x in env if x != "LDPATH" ]
69843 + env_keys = [x for x in env if x != "LDPATH"]
69844 env_keys.sort()
69845 for k in env_keys:
69846 v = env[k]
69847 @@ -323,8 +352,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env,
69848 outfile.close()
69849
69850 #create /etc/csh.env for (t)csh support
69851 - outfile = atomic_ofstream(os.path.join(
69852 - target_root, eprefix_lstrip, "etc", "csh.env"))
69853 + outfile = atomic_ofstream(os.path.join(eroot, "etc", "csh.env"))
69854 outfile.write(cenvnotice)
69855 for x in env_keys:
69856 outfile.write("setenv %s '%s'\n" % (x, env[x]))
69857
69858 diff --git a/pym/portage/util/lafilefixer.py b/pym/portage/util/lafilefixer.py
69859 index 54ff20d..2562d9a 100644
69860 --- a/pym/portage/util/lafilefixer.py
69861 +++ b/pym/portage/util/lafilefixer.py
69862 @@ -11,7 +11,7 @@ from portage.exception import InvalidData
69863 # This an re-implementaion of dev-util/lafilefixer-0.5.
69864 # rewrite_lafile() takes the contents of an lafile as a string
69865 # It then parses the dependency_libs and inherited_linker_flags
69866 -# entries.
69867 +# entries.
69868 # We insist on dependency_libs being present. inherited_linker_flags
69869 # is optional.
69870 # There are strict rules about the syntax imposed by libtool's libltdl.
69871 @@ -21,7 +21,7 @@ from portage.exception import InvalidData
69872 # lafilefixer does).
69873 # What it does:
69874 # * Replaces all .la files with absolut paths in dependency_libs with
69875 -# corresponding -l* and -L* entries
69876 +# corresponding -l* and -L* entries
69877 # (/usr/lib64/libfoo.la -> -L/usr/lib64 -lfoo)
69878 # * Moves various flags (see flag_re below) to inherited_linker_flags,
69879 # if such an entry was present.
69880 @@ -36,7 +36,7 @@ from portage.exception import InvalidData
69881 dep_libs_re = re.compile(b"dependency_libs='(?P<value>[^']*)'$")
69882 inh_link_flags_re = re.compile(b"inherited_linker_flags='(?P<value>[^']*)'$")
69883
69884 -#regexes for replacing stuff in -L entries.
69885 +#regexes for replacing stuff in -L entries.
69886 #replace 'X11R6/lib' and 'local/lib' with 'lib', no idea what's this about.
69887 X11_local_sub = re.compile(b"X11R6/lib|local/lib")
69888 #get rid of the '..'
69889 @@ -129,11 +129,11 @@ def rewrite_lafile(contents):
69890 #This allows us to place all -L entries at the beginning
69891 #of 'dependency_libs'.
69892 ladir = dep_libs_entry
69893 -
69894 +
69895 ladir = X11_local_sub.sub(b"lib", ladir)
69896 ladir = pkgconfig_sub1.sub(b"usr", ladir)
69897 ladir = pkgconfig_sub2.sub(b"\g<usrlib>", ladir)
69898 -
69899 +
69900 if ladir not in libladir:
69901 libladir.append(ladir)
69902
69903
69904 diff --git a/pym/portage/util/listdir.py b/pym/portage/util/listdir.py
69905 index c2628cb..2012e14 100644
69906 --- a/pym/portage/util/listdir.py
69907 +++ b/pym/portage/util/listdir.py
69908 @@ -1,36 +1,33 @@
69909 -# Copyright 2010-2011 Gentoo Foundation
69910 +# Copyright 2010-2013 Gentoo Foundation
69911 # Distributed under the terms of the GNU General Public License v2
69912
69913 __all__ = ['cacheddir', 'listdir']
69914
69915 import errno
69916 import stat
69917 -import time
69918 +import sys
69919 +
69920 +if sys.hexversion < 0x3000000:
69921 + from itertools import izip as zip
69922
69923 from portage import os
69924 +from portage.const import VCS_DIRS
69925 from portage.exception import DirectoryNotFound, PermissionDenied, PortageException
69926 -from portage.util import normalize_path, writemsg
69927 -
69928 -_ignorecvs_dirs = ('CVS', 'RCS', 'SCCS', '.svn', '.git')
69929 +from portage.util import normalize_path
69930 +
69931 +# The global dircache is no longer supported, since it could
69932 +# be a memory leak for API consumers. Any cacheddir callers
69933 +# should use higher-level caches instead, when necessary.
69934 +# TODO: Remove dircache variable after stable portage does
69935 +# not use is (keep it for now, in case API consumers clear
69936 +# it manually).
69937 dircache = {}
69938 -cacheHit = 0
69939 -cacheMiss = 0
69940 -cacheStale = 0
69941
69942 def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymlinks=True):
69943 - global cacheHit,cacheMiss,cacheStale
69944 mypath = normalize_path(my_original_path)
69945 - if mypath in dircache:
69946 - cacheHit += 1
69947 - cached_mtime, list, ftype = dircache[mypath]
69948 - else:
69949 - cacheMiss += 1
69950 - cached_mtime, list, ftype = -1, [], []
69951 try:
69952 pathstat = os.stat(mypath)
69953 - if stat.S_ISDIR(pathstat[stat.ST_MODE]):
69954 - mtime = pathstat.st_mtime
69955 - else:
69956 + if not stat.S_ISDIR(pathstat.st_mode):
69957 raise DirectoryNotFound(mypath)
69958 except EnvironmentError as e:
69959 if e.errno == PermissionDenied.errno:
69960 @@ -39,19 +36,16 @@ def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymli
69961 return [], []
69962 except PortageException:
69963 return [], []
69964 - # Python retuns mtime in seconds, so if it was changed in the last few seconds, it could be invalid
69965 - if mtime != cached_mtime or time.time() - mtime < 4:
69966 - if mypath in dircache:
69967 - cacheStale += 1
69968 + else:
69969 try:
69970 - list = os.listdir(mypath)
69971 + fpaths = os.listdir(mypath)
69972 except EnvironmentError as e:
69973 if e.errno != errno.EACCES:
69974 raise
69975 del e
69976 raise PermissionDenied(mypath)
69977 ftype = []
69978 - for x in list:
69979 + for x in fpaths:
69980 try:
69981 if followSymlinks:
69982 pathstat = os.stat(mypath+"/"+x)
69983 @@ -68,23 +62,22 @@ def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymli
69984 ftype.append(3)
69985 except (IOError, OSError):
69986 ftype.append(3)
69987 - dircache[mypath] = mtime, list, ftype
69988 -
69989 - ret_list = []
69990 - ret_ftype = []
69991 - for x in range(0, len(list)):
69992 - if list[x] in ignorelist:
69993 - pass
69994 - elif ignorecvs:
69995 - if list[x][:2] != ".#" and \
69996 - not (ftype[x] == 1 and list[x] in _ignorecvs_dirs):
69997 - ret_list.append(list[x])
69998 - ret_ftype.append(ftype[x])
69999 - else:
70000 - ret_list.append(list[x])
70001 - ret_ftype.append(ftype[x])
70002 -
70003 - writemsg("cacheddirStats: H:%d/M:%d/S:%d\n" % (cacheHit, cacheMiss, cacheStale),10)
70004 +
70005 + if ignorelist or ignorecvs:
70006 + ret_list = []
70007 + ret_ftype = []
70008 + for file_path, file_type in zip(fpaths, ftype):
70009 + if file_path in ignorelist:
70010 + pass
70011 + elif ignorecvs:
70012 + if file_path[:2] != ".#" and \
70013 + not (file_type == 1 and file_path in VCS_DIRS):
70014 + ret_list.append(file_path)
70015 + ret_ftype.append(file_type)
70016 + else:
70017 + ret_list = fpaths
70018 + ret_ftype = ftype
70019 +
70020 return ret_list, ret_ftype
70021
70022 def listdir(mypath, recursive=False, filesonly=False, ignorecvs=False, ignorelist=[], followSymlinks=True,
70023 @@ -98,7 +91,7 @@ def listdir(mypath, recursive=False, filesonly=False, ignorecvs=False, ignorelis
70024 @type recursive: Boolean
70025 @param filesonly; Only return files, not more directories
70026 @type filesonly: Boolean
70027 - @param ignorecvs: Ignore CVS directories ('CVS','SCCS','.svn','.git')
70028 + @param ignorecvs: Ignore VCS directories
70029 @type ignorecvs: Boolean
70030 @param ignorelist: List of filenames/directories to exclude
70031 @type ignorelist: List
70032 @@ -112,40 +105,35 @@ def listdir(mypath, recursive=False, filesonly=False, ignorecvs=False, ignorelis
70033 @return: A list of files and directories (or just files or just directories) or an empty list.
70034 """
70035
70036 - list, ftype = cacheddir(mypath, ignorecvs, ignorelist, EmptyOnError, followSymlinks)
70037 + fpaths, ftype = cacheddir(mypath, ignorecvs, ignorelist, EmptyOnError, followSymlinks)
70038
70039 - if list is None:
70040 - list=[]
70041 + if fpaths is None:
70042 + fpaths = []
70043 if ftype is None:
70044 - ftype=[]
70045 + ftype = []
70046
70047 if not (filesonly or dirsonly or recursive):
70048 - return list
70049 + return fpaths
70050
70051 if recursive:
70052 - x=0
70053 - while x<len(ftype):
70054 - if ftype[x] == 1:
70055 - l,f = cacheddir(mypath+"/"+list[x], ignorecvs, ignorelist, EmptyOnError,
70056 - followSymlinks)
70057 -
70058 - l=l[:]
70059 - for y in range(0,len(l)):
70060 - l[y]=list[x]+"/"+l[y]
70061 - list=list+l
70062 - ftype=ftype+f
70063 - x+=1
70064 + stack = list(zip(fpaths, ftype))
70065 + fpaths = []
70066 + ftype = []
70067 + while stack:
70068 + file_path, file_type = stack.pop()
70069 + fpaths.append(file_path)
70070 + ftype.append(file_type)
70071 + if file_type == 1:
70072 + subdir_list, subdir_types = cacheddir(
70073 + os.path.join(mypath, file_path), ignorecvs,
70074 + ignorelist, EmptyOnError, followSymlinks)
70075 + stack.extend((os.path.join(file_path, x), x_type)
70076 + for x, x_type in zip(subdir_list, subdir_types))
70077 +
70078 if filesonly:
70079 - rlist=[]
70080 - for x in range(0,len(ftype)):
70081 - if ftype[x]==0:
70082 - rlist=rlist+[list[x]]
70083 + fpaths = [x for x, x_type in zip(fpaths, ftype) if x_type == 0]
70084 +
70085 elif dirsonly:
70086 - rlist = []
70087 - for x in range(0, len(ftype)):
70088 - if ftype[x] == 1:
70089 - rlist = rlist + [list[x]]
70090 - else:
70091 - rlist=list
70092 + fpaths = [x for x, x_type in zip(fpaths, ftype) if x_type == 1]
70093
70094 - return rlist
70095 + return fpaths
70096
70097 diff --git a/pym/portage/util/movefile.py b/pym/portage/util/movefile.py
70098 index 10577b5..452e77f 100644
70099 --- a/pym/portage/util/movefile.py
70100 +++ b/pym/portage/util/movefile.py
70101 @@ -1,18 +1,22 @@
70102 -# Copyright 2010-2012 Gentoo Foundation
70103 +# Copyright 2010-2013 Gentoo Foundation
70104 # Distributed under the terms of the GNU General Public License v2
70105
70106 +from __future__ import unicode_literals
70107 +
70108 __all__ = ['movefile']
70109
70110 import errno
70111 +import fnmatch
70112 import os as _os
70113 import shutil as _shutil
70114 import stat
70115 +import sys
70116 import subprocess
70117 import textwrap
70118
70119 import portage
70120 from portage import bsd_chflags, _encodings, _os_overrides, _selinux, \
70121 - _unicode_decode, _unicode_encode, _unicode_func_wrapper,\
70122 + _unicode_decode, _unicode_encode, _unicode_func_wrapper, \
70123 _unicode_module_wrapper
70124 from portage.const import MOVE_BINARY
70125 from portage.exception import OperationNotSupported
70126 @@ -24,43 +28,113 @@ def _apply_stat(src_stat, dest):
70127 _os.chown(dest, src_stat.st_uid, src_stat.st_gid)
70128 _os.chmod(dest, stat.S_IMODE(src_stat.st_mode))
70129
70130 +_xattr_excluder_cache = {}
70131 +
70132 +def _get_xattr_excluder(pattern):
70133 +
70134 + try:
70135 + value = _xattr_excluder_cache[pattern]
70136 + except KeyError:
70137 + value = _xattr_excluder(pattern)
70138 + _xattr_excluder_cache[pattern] = value
70139 +
70140 + return value
70141 +
70142 +class _xattr_excluder(object):
70143 +
70144 + __slots__ = ('_pattern_split',)
70145 +
70146 + def __init__(self, pattern):
70147 +
70148 + if pattern is None:
70149 + self._pattern_split = None
70150 + else:
70151 + pattern = pattern.split()
70152 + if not pattern:
70153 + self._pattern_split = None
70154 + else:
70155 + pattern.sort()
70156 + self._pattern_split = tuple(pattern)
70157 +
70158 + def __call__(self, attr):
70159 +
70160 + if self._pattern_split is None:
70161 + return False
70162 +
70163 + match = fnmatch.fnmatch
70164 + for x in self._pattern_split:
70165 + if match(attr, x):
70166 + return True
70167 +
70168 + return False
70169 +
70170 if hasattr(_os, "getxattr"):
70171 # Python >=3.3 and GNU/Linux
70172 - def _copyxattr(src, dest):
70173 - for attr in _os.listxattr(src):
70174 + def _copyxattr(src, dest, exclude=None):
70175 +
70176 + try:
70177 + attrs = _os.listxattr(src)
70178 + except OSError as e:
70179 + if e.errno != OperationNotSupported.errno:
70180 + raise
70181 + attrs = ()
70182 + if attrs:
70183 + if exclude is not None and isinstance(attrs[0], bytes):
70184 + exclude = exclude.encode(_encodings['fs'])
70185 + exclude = _get_xattr_excluder(exclude)
70186 +
70187 + for attr in attrs:
70188 + if exclude(attr):
70189 + continue
70190 try:
70191 _os.setxattr(dest, attr, _os.getxattr(src, attr))
70192 raise_exception = False
70193 except OSError:
70194 raise_exception = True
70195 if raise_exception:
70196 - raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest)
70197 + raise OperationNotSupported(_("Filesystem containing file '%s' "
70198 + "does not support extended attribute '%s'") %
70199 + (_unicode_decode(dest), _unicode_decode(attr)))
70200 else:
70201 try:
70202 import xattr
70203 except ImportError:
70204 xattr = None
70205 if xattr is not None:
70206 - def _copyxattr(src, dest):
70207 - for attr in xattr.list(src):
70208 + def _copyxattr(src, dest, exclude=None):
70209 +
70210 + try:
70211 + attrs = xattr.list(src)
70212 + except IOError as e:
70213 + if e.errno != OperationNotSupported.errno:
70214 + raise
70215 + attrs = ()
70216 +
70217 + if attrs:
70218 + if exclude is not None and isinstance(attrs[0], bytes):
70219 + exclude = exclude.encode(_encodings['fs'])
70220 + exclude = _get_xattr_excluder(exclude)
70221 +
70222 + for attr in attrs:
70223 + if exclude(attr):
70224 + continue
70225 try:
70226 xattr.set(dest, attr, xattr.get(src, attr))
70227 raise_exception = False
70228 except IOError:
70229 raise_exception = True
70230 if raise_exception:
70231 - raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest)
70232 + raise OperationNotSupported(_("Filesystem containing file '%s' "
70233 + "does not support extended attribute '%s'") %
70234 + (_unicode_decode(dest), _unicode_decode(attr)))
70235 else:
70236 - _devnull = open("/dev/null", "wb")
70237 try:
70238 - subprocess.call(["getfattr", "--version"], stdout=_devnull)
70239 - subprocess.call(["setfattr", "--version"], stdout=_devnull)
70240 - _has_getfattr_and_setfattr = True
70241 + with open(_os.devnull, 'wb') as f:
70242 + subprocess.call(["getfattr", "--version"], stdout=f)
70243 + subprocess.call(["setfattr", "--version"], stdout=f)
70244 except OSError:
70245 - _has_getfattr_and_setfattr = False
70246 - _devnull.close()
70247 - if _has_getfattr_and_setfattr:
70248 - def _copyxattr(src, dest):
70249 + def _copyxattr(src, dest, exclude=None):
70250 + # TODO: implement exclude
70251 getfattr_process = subprocess.Popen(["getfattr", "-d", "--absolute-names", src], stdout=subprocess.PIPE)
70252 getfattr_process.wait()
70253 extended_attributes = getfattr_process.stdout.readlines()
70254 @@ -72,14 +146,15 @@ else:
70255 if setfattr_process.returncode != 0:
70256 raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest)
70257 else:
70258 - def _copyxattr(src, dest):
70259 + def _copyxattr(src, dest, exclude=None):
70260 pass
70261
70262 def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70263 hardlink_candidates=None, encoding=_encodings['fs']):
70264 """moves a file from src to dest, preserving all permissions and attributes; mtime will
70265 - be preserved even when moving across filesystems. Returns true on success and false on
70266 - failure. Move is atomic."""
70267 + be preserved even when moving across filesystems. Returns mtime as integer on success
70268 + and None on failure. mtime is expressed in seconds in Python <3.3 and nanoseconds in
70269 + Python >=3.3. Move is atomic."""
70270
70271 if mysettings is None:
70272 mysettings = portage.settings
70273 @@ -102,22 +177,22 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70274
70275 try:
70276 if not sstat:
70277 - sstat=os.lstat(src)
70278 + sstat = os.lstat(src)
70279
70280 except SystemExit as e:
70281 raise
70282 except Exception as e:
70283 writemsg("!!! %s\n" % _("Stating source file failed... movefile()"),
70284 noiselevel=-1)
70285 - writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1)
70286 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
70287 return None
70288
70289 - destexists=1
70290 + destexists = 1
70291 try:
70292 - dstat=os.lstat(dest)
70293 + dstat = os.lstat(dest)
70294 except (OSError, IOError):
70295 - dstat=os.lstat(os.path.dirname(dest))
70296 - destexists=0
70297 + dstat = os.lstat(os.path.dirname(dest))
70298 + destexists = 0
70299
70300 if bsd_chflags:
70301 if destexists and dstat.st_flags != 0:
70302 @@ -132,7 +207,7 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70303 if stat.S_ISLNK(dstat[stat.ST_MODE]):
70304 try:
70305 os.unlink(dest)
70306 - destexists=0
70307 + destexists = 0
70308 except SystemExit as e:
70309 raise
70310 except Exception as e:
70311 @@ -140,7 +215,7 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70312
70313 if stat.S_ISLNK(sstat[stat.ST_MODE]):
70314 try:
70315 - target=os.readlink(src)
70316 + target = os.readlink(src)
70317 if mysettings and "D" in mysettings and \
70318 target.startswith(mysettings["D"]):
70319 target = target[len(mysettings["D"])-1:]
70320 @@ -159,17 +234,32 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70321 if e.errno not in (errno.ENOENT, errno.EEXIST) or \
70322 target != os.readlink(dest):
70323 raise
70324 - lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
70325 - # utime() only works on the target of a symlink, so it's not
70326 - # possible to perserve mtime on symlinks.
70327 - return os.lstat(dest)[stat.ST_MTIME]
70328 + lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
70329 +
70330 + try:
70331 + _os.unlink(src_bytes)
70332 + except OSError:
70333 + pass
70334 +
70335 + if sys.hexversion >= 0x3030000:
70336 + try:
70337 + os.utime(dest, ns=(sstat.st_mtime_ns, sstat.st_mtime_ns), follow_symlinks=False)
70338 + except NotImplementedError:
70339 + # utimensat() and lutimes() missing in libc.
70340 + return os.stat(dest, follow_symlinks=False).st_mtime_ns
70341 + else:
70342 + return sstat.st_mtime_ns
70343 + else:
70344 + # utime() in Python <3.3 only works on the target of a symlink, so it's not
70345 + # possible to preserve mtime on symlinks.
70346 + return os.lstat(dest)[stat.ST_MTIME]
70347 except SystemExit as e:
70348 raise
70349 except Exception as e:
70350 writemsg("!!! %s\n" % _("failed to properly create symlink:"),
70351 noiselevel=-1)
70352 writemsg("!!! %s -> %s\n" % (dest, target), noiselevel=-1)
70353 - writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1)
70354 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
70355 return None
70356
70357 hardlinked = False
70358 @@ -204,9 +294,13 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70359 writemsg("!!! %s\n" % (e,), noiselevel=-1)
70360 return None
70361 hardlinked = True
70362 + try:
70363 + _os.unlink(src_bytes)
70364 + except OSError:
70365 + pass
70366 break
70367
70368 - renamefailed=1
70369 + renamefailed = 1
70370 if hardlinked:
70371 renamefailed = False
70372 if not hardlinked and (selinux_enabled or sstat.st_dev == dstat.st_dev):
70373 @@ -214,14 +308,14 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70374 if selinux_enabled:
70375 selinux.rename(src, dest)
70376 else:
70377 - os.rename(src,dest)
70378 - renamefailed=0
70379 + os.rename(src, dest)
70380 + renamefailed = 0
70381 except OSError as e:
70382 if e.errno != errno.EXDEV:
70383 # Some random error.
70384 writemsg("!!! %s\n" % _("Failed to move %(src)s to %(dest)s") %
70385 {"src": src, "dest": dest}, noiselevel=-1)
70386 - writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1)
70387 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
70388 return None
70389 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
70390 if renamefailed:
70391 @@ -233,7 +327,8 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70392 _copyfile(src_bytes, dest_tmp_bytes)
70393 if xattr_enabled:
70394 try:
70395 - _copyxattr(src_bytes, dest_tmp_bytes)
70396 + _copyxattr(src_bytes, dest_tmp_bytes,
70397 + exclude=mysettings.get("PORTAGE_XATTR_EXCLUDE", "security.* system.nfs4_acl"))
70398 except SystemExit:
70399 raise
70400 except:
70401 @@ -252,7 +347,7 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70402 except Exception as e:
70403 writemsg("!!! %s\n" % _('copy %(src)s -> %(dest)s failed.') %
70404 {"src": src, "dest": dest}, noiselevel=-1)
70405 - writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1)
70406 + writemsg("!!! %s\n" % (e,), noiselevel=-1)
70407 return None
70408 else:
70409 #we don't yet handle special, so we need to fall back to /bin/mv
70410 @@ -265,35 +360,54 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None,
70411 writemsg("!!! %s\n" % a, noiselevel=-1)
70412 return None # failure
70413
70414 - # Always use stat_obj[stat.ST_MTIME] for the integral timestamp which
70415 - # is returned, since the stat_obj.st_mtime float attribute rounds *up*
70416 + # In Python <3.3 always use stat_obj[stat.ST_MTIME] for the integral timestamp
70417 + # which is returned, since the stat_obj.st_mtime float attribute rounds *up*
70418 # if the nanosecond part of the timestamp is 999999881 ns or greater.
70419 try:
70420 if hardlinked:
70421 - newmtime = os.stat(dest)[stat.ST_MTIME]
70422 + if sys.hexversion >= 0x3030000:
70423 + newmtime = os.stat(dest).st_mtime_ns
70424 + else:
70425 + newmtime = os.stat(dest)[stat.ST_MTIME]
70426 else:
70427 # Note: It is not possible to preserve nanosecond precision
70428 # (supported in POSIX.1-2008 via utimensat) with the IEEE 754
70429 # double precision float which only has a 53 bit significand.
70430 if newmtime is not None:
70431 - os.utime(dest, (newmtime, newmtime))
70432 + if sys.hexversion >= 0x3030000:
70433 + os.utime(dest, ns=(newmtime, newmtime))
70434 + else:
70435 + os.utime(dest, (newmtime, newmtime))
70436 else:
70437 - newmtime = sstat[stat.ST_MTIME]
70438 + if sys.hexversion >= 0x3030000:
70439 + newmtime = sstat.st_mtime_ns
70440 + else:
70441 + newmtime = sstat[stat.ST_MTIME]
70442 if renamefailed:
70443 - # If rename succeeded then timestamps are automatically
70444 - # preserved with complete precision because the source
70445 - # and destination inode are the same. Otherwise, round
70446 - # down to the nearest whole second since python's float
70447 - # st_mtime cannot be used to preserve the st_mtim.tv_nsec
70448 - # field with complete precision. Note that we have to use
70449 - # stat_obj[stat.ST_MTIME] here because the float
70450 - # stat_obj.st_mtime rounds *up* sometimes.
70451 - os.utime(dest, (newmtime, newmtime))
70452 + if sys.hexversion >= 0x3030000:
70453 + # If rename succeeded then timestamps are automatically
70454 + # preserved with complete precision because the source
70455 + # and destination inodes are the same. Otherwise, manually
70456 + # update timestamps with nanosecond precision.
70457 + os.utime(dest, ns=(newmtime, newmtime))
70458 + else:
70459 + # If rename succeeded then timestamps are automatically
70460 + # preserved with complete precision because the source
70461 + # and destination inodes are the same. Otherwise, round
70462 + # down to the nearest whole second since python's float
70463 + # st_mtime cannot be used to preserve the st_mtim.tv_nsec
70464 + # field with complete precision. Note that we have to use
70465 + # stat_obj[stat.ST_MTIME] here because the float
70466 + # stat_obj.st_mtime rounds *up* sometimes.
70467 + os.utime(dest, (newmtime, newmtime))
70468 except OSError:
70469 # The utime can fail here with EPERM even though the move succeeded.
70470 # Instead of failing, use stat to return the mtime if possible.
70471 try:
70472 - newmtime = os.stat(dest)[stat.ST_MTIME]
70473 + if sys.hexversion >= 0x3030000:
70474 + newmtime = os.stat(dest).st_mtime_ns
70475 + else:
70476 + newmtime = os.stat(dest)[stat.ST_MTIME]
70477 except OSError as e:
70478 writemsg(_("!!! Failed to stat in movefile()\n"), noiselevel=-1)
70479 writemsg("!!! %s\n" % dest, noiselevel=-1)
70480
70481 diff --git a/pym/portage/util/whirlpool.py b/pym/portage/util/whirlpool.py
70482 index c696f6f..170ae73 100644
70483 --- a/pym/portage/util/whirlpool.py
70484 +++ b/pym/portage/util/whirlpool.py
70485 @@ -639,6 +639,8 @@ def WhirlpoolInit(ctx):
70486 return
70487
70488 def WhirlpoolAdd(source, sourceBits, ctx):
70489 + if not isinstance(source, bytes):
70490 + raise TypeError("Expected %s, got %s" % (bytes, type(source)))
70491 if sys.hexversion < 0x3000000:
70492 source = [ord(s)&0xff for s in source]
70493
70494
70495 diff --git a/pym/portage/util/writeable_check.py b/pym/portage/util/writeable_check.py
70496 new file mode 100644
70497 index 0000000..e6ddce6
70498 --- /dev/null
70499 +++ b/pym/portage/util/writeable_check.py
70500 @@ -0,0 +1,79 @@
70501 +#-*- coding:utf-8 -*-
70502 +# Copyright 2014 Gentoo Foundation
70503 +# Distributed under the terms of the GNU General Public License v2
70504 +"""
70505 +Methods to check whether Portage is going to write to read-only filesystems.
70506 +Since the methods are not portable across different OSes, each OS needs its
70507 +own method. To expand RO checking for different OSes, add a method which
70508 +accepts a list of directories and returns a list of mounts which need to be
70509 +remounted RW, then add "elif ostype == (the ostype value for your OS)" to
70510 +get_ro_checker().
70511 +"""
70512 +from __future__ import unicode_literals
70513 +
70514 +import io
70515 +import logging
70516 +import re
70517 +
70518 +from portage import _encodings
70519 +from portage.util import writemsg_level
70520 +from portage.localization import _
70521 +from portage.data import ostype
70522 +
70523 +
70524 +def get_ro_checker():
70525 + """
70526 + Uses the system type to find an appropriate method for testing whether Portage
70527 + is going to write to any read-only filesystems.
70528 +
70529 + @return:
70530 + 1. A method for testing for RO filesystems appropriate to the current system.
70531 + """
70532 + return _CHECKERS.get(ostype, empty_ro_checker)
70533 +
70534 +
70535 +def linux_ro_checker(dir_list):
70536 + """
70537 + Use /proc/mounts to check that no directories installed by the ebuild are set
70538 + to be installed to a read-only filesystem.
70539 +
70540 + @param dir_list: A list of directories installed by the ebuild.
70541 + @type dir_list: List
70542 + @return:
70543 + 1. A list of filesystems which are both set to be written to and are mounted
70544 + read-only, may be empty.
70545 + """
70546 + ro_filesystems = set()
70547 +
70548 + try:
70549 + with io.open("/proc/mounts", mode='r', encoding=_encodings['content'],
70550 + errors='replace') as f:
70551 + roregex = re.compile(r'(\A|,)ro(\Z|,)')
70552 + for line in f:
70553 + if roregex.search(line.split(" ")[3].strip()) is not None:
70554 + romount = line.split(" ")[1].strip()
70555 + ro_filesystems.add(romount)
70556 +
70557 + # If /proc/mounts can't be read, assume that there are no RO
70558 + # filesystems and return.
70559 + except EnvironmentError:
70560 + writemsg_level(_("!!! /proc/mounts cannot be read"),
70561 + level=logging.WARNING, noiselevel=-1)
70562 + return []
70563 +
70564 + return set.intersection(ro_filesystems, set(dir_list))
70565 +
70566 +
70567 +def empty_ro_checker(dir_list):
70568 + """
70569 + Always returns [], this is the fallback function if the system does not have
70570 + an ro_checker method defined.
70571 + """
70572 + return []
70573 +
70574 +
70575 +# _CHECKERS is a map from ostype output to the appropriate function to return
70576 +# in get_ro_checker.
70577 +_CHECKERS = {
70578 + "Linux": linux_ro_checker,
70579 +}
70580
70581 diff --git a/pym/portage/versions.py b/pym/portage/versions.py
70582 index 2794753..2c9fe5b 100644
70583 --- a/pym/portage/versions.py
70584 +++ b/pym/portage/versions.py
70585 @@ -1,7 +1,9 @@
70586 # versions.py -- core Portage functionality
70587 -# Copyright 1998-2012 Gentoo Foundation
70588 +# Copyright 1998-2014 Gentoo Foundation
70589 # Distributed under the terms of the GNU General Public License v2
70590
70591 +from __future__ import unicode_literals
70592 +
70593 __all__ = [
70594 'best', 'catpkgsplit', 'catsplit',
70595 'cpv_getkey', 'cpv_getversion', 'cpv_sort_key', 'pkgcmp', 'pkgsplit',
70596 @@ -19,7 +21,6 @@ else:
70597
70598 import portage
70599 portage.proxy.lazyimport.lazyimport(globals(),
70600 - 'portage.dep:_get_slot_re',
70601 'portage.repository.config:_gen_valid_repo',
70602 'portage.util:cmp_sort_key',
70603 )
70604 @@ -32,6 +33,10 @@ _unknown_repo = "__unknown__"
70605
70606 # \w is [a-zA-Z0-9_]
70607
70608 +# PMS 3.1.3: A slot name may contain any of the characters [A-Za-z0-9+_.-].
70609 +# It must not begin with a hyphen or a dot.
70610 +_slot = r'([\w+][\w+.-]*)'
70611 +
70612 # 2.1.1 A category name may contain any of the characters [A-Za-z0-9+_.-].
70613 # It must not begin with a hyphen or a dot.
70614 _cat = r'[\w+][\w+.-]*'
70615 @@ -66,6 +71,24 @@ suffix_regexp = re.compile("^(alpha|beta|rc|pre|p)(\\d*)$")
70616 suffix_value = {"pre": -2, "p": 0, "alpha": -4, "beta": -3, "rc": -1}
70617 endversion_keys = ["pre", "p", "alpha", "beta", "rc"]
70618
70619 +_slot_re_cache = {}
70620 +
70621 +def _get_slot_re(eapi_attrs):
70622 + cache_key = eapi_attrs.slot_operator
70623 + slot_re = _slot_re_cache.get(cache_key)
70624 + if slot_re is not None:
70625 + return slot_re
70626 +
70627 + if eapi_attrs.slot_operator:
70628 + slot_re = _slot + r'(/' + _slot + r')?'
70629 + else:
70630 + slot_re = _slot
70631 +
70632 + slot_re = re.compile('^' + slot_re + '$', re.VERBOSE | re.UNICODE)
70633 +
70634 + _slot_re_cache[cache_key] = slot_re
70635 + return slot_re
70636 +
70637 _pv_re_cache = {}
70638
70639 def _get_pv_re(eapi_attrs):
70640 @@ -79,18 +102,18 @@ def _get_pv_re(eapi_attrs):
70641 else:
70642 pv_re = _pv['dots_disallowed_in_PN']
70643
70644 - pv_re = re.compile('^' + pv_re + '$', re.VERBOSE)
70645 + pv_re = re.compile(r'^' + pv_re + r'$', re.VERBOSE | re.UNICODE)
70646
70647 _pv_re_cache[cache_key] = pv_re
70648 return pv_re
70649
70650 def ververify(myver, silent=1):
70651 if ver_regexp.match(myver):
70652 - return 1
70653 + return True
70654 else:
70655 if not silent:
70656 print(_("!!! syntax error in version: %s") % myver)
70657 - return 0
70658 + return False
70659
70660 def vercmp(ver1, ver2, silent=1):
70661 """
70662 @@ -292,7 +315,7 @@ def _pkgsplit(mypkg, eapi=None):
70663
70664 return (m.group('pn'), m.group('ver'), rev)
70665
70666 -_cat_re = re.compile('^%s$' % _cat)
70667 +_cat_re = re.compile('^%s$' % _cat, re.UNICODE)
70668 _missing_cat = 'null'
70669
70670 def catpkgsplit(mydata, silent=1, eapi=None):
70671 @@ -314,11 +337,11 @@ def catpkgsplit(mydata, silent=1, eapi=None):
70672 except AttributeError:
70673 pass
70674 mysplit = mydata.split('/', 1)
70675 - p_split=None
70676 - if len(mysplit)==1:
70677 + p_split = None
70678 + if len(mysplit) == 1:
70679 cat = _missing_cat
70680 p_split = _pkgsplit(mydata, eapi=eapi)
70681 - elif len(mysplit)==2:
70682 + elif len(mysplit) == 2:
70683 cat = mysplit[0]
70684 if _cat_re.match(cat) is not None:
70685 p_split = _pkgsplit(mysplit[1], eapi=eapi)
70686 @@ -337,14 +360,23 @@ class _pkg_str(_unicode):
70687 manually convert them to a plain unicode object first.
70688 """
70689
70690 - def __new__(cls, cpv, slot=None, repo=None, eapi=None):
70691 + def __new__(cls, cpv, metadata=None, settings=None, eapi=None,
70692 + repo=None, slot=None):
70693 return _unicode.__new__(cls, cpv)
70694
70695 - def __init__(self, cpv, slot=None, repo=None, eapi=None):
70696 + def __init__(self, cpv, metadata=None, settings=None, eapi=None,
70697 + repo=None, slot=None):
70698 if not isinstance(cpv, _unicode):
70699 # Avoid TypeError from _unicode.__init__ with PyPy.
70700 cpv = _unicode_decode(cpv)
70701 _unicode.__init__(cpv)
70702 + if metadata is not None:
70703 + self.__dict__['_metadata'] = metadata
70704 + slot = metadata.get('SLOT', slot)
70705 + repo = metadata.get('repository', repo)
70706 + eapi = metadata.get('EAPI', eapi)
70707 + if settings is not None:
70708 + self.__dict__['_settings'] = settings
70709 if eapi is not None:
70710 self.__dict__['eapi'] = eapi
70711 self.__dict__['cpv_split'] = catpkgsplit(cpv, eapi=eapi)
70712 @@ -363,19 +395,19 @@ class _pkg_str(_unicode):
70713 if slot_match is None:
70714 # Avoid an InvalidAtom exception when creating SLOT atoms
70715 self.__dict__['slot'] = '0'
70716 - self.__dict__['slot_abi'] = '0'
70717 + self.__dict__['sub_slot'] = '0'
70718 self.__dict__['slot_invalid'] = slot
70719 else:
70720 - if eapi_attrs.slot_abi:
70721 + if eapi_attrs.slot_operator:
70722 slot_split = slot.split("/")
70723 self.__dict__['slot'] = slot_split[0]
70724 if len(slot_split) > 1:
70725 - self.__dict__['slot_abi'] = slot_split[1]
70726 + self.__dict__['sub_slot'] = slot_split[1]
70727 else:
70728 - self.__dict__['slot_abi'] = slot_split[0]
70729 + self.__dict__['sub_slot'] = slot_split[0]
70730 else:
70731 self.__dict__['slot'] = slot
70732 - self.__dict__['slot_abi'] = slot
70733 + self.__dict__['sub_slot'] = slot
70734
70735 if repo is not None:
70736 repo = _gen_valid_repo(repo)
70737 @@ -387,6 +419,25 @@ class _pkg_str(_unicode):
70738 raise AttributeError("_pkg_str instances are immutable",
70739 self.__class__, name, value)
70740
70741 + @property
70742 + def stable(self):
70743 + try:
70744 + return self._stable
70745 + except AttributeError:
70746 + try:
70747 + metadata = self._metadata
70748 + settings = self._settings
70749 + except AttributeError:
70750 + raise AttributeError('stable')
70751 + if not settings.local_config:
70752 + # Since repoman uses different config instances for
70753 + # different profiles, our local instance does not
70754 + # refer to the correct profile.
70755 + raise AssertionError('invalid context')
70756 + stable = settings._isStable(self)
70757 + self.__dict__['_stable'] = stable
70758 + return stable
70759 +
70760 def pkgsplit(mypkg, silent=1, eapi=None):
70761 """
70762 @param mypkg: either a pv or cpv
70763 @@ -488,7 +539,7 @@ def cpv_sort_key(eapi=None):
70764 return cmp_sort_key(cmp_cpv)
70765
70766 def catsplit(mydep):
70767 - return mydep.split("/", 1)
70768 + return mydep.split("/", 1)
70769
70770 def best(mymatches, eapi=None):
70771 """Accepts None arguments; assumes matches are valid."""
70772
70773 diff --git a/pym/portage/xml/metadata.py b/pym/portage/xml/metadata.py
70774 index f820e54..fcd9dc0 100644
70775 --- a/pym/portage/xml/metadata.py
70776 +++ b/pym/portage/xml/metadata.py
70777 @@ -1,4 +1,4 @@
70778 -# Copyright 2010-2012 Gentoo Foundation
70779 +# Copyright 2010-2013 Gentoo Foundation
70780 # Distributed under the terms of the GNU General Public License v2
70781
70782 """Provides an easy-to-use python interface to Gentoo's metadata.xml file.
70783 @@ -28,6 +28,8 @@
70784 'Thomas Mills Hinkle'
70785 """
70786
70787 +from __future__ import unicode_literals
70788 +
70789 __all__ = ('MetaDataXML',)
70790
70791 import sys
70792 @@ -58,8 +60,7 @@ except (ImportError, SystemError, RuntimeError, Exception):
70793
70794 import re
70795 import xml.etree.ElementTree
70796 -import portage
70797 -from portage import os, _unicode_decode
70798 +from portage import _encodings, _unicode_encode
70799 from portage.util import unique_everseen
70800
70801 class _MetadataTreeBuilder(xml.etree.ElementTree.TreeBuilder):
70802 @@ -203,12 +204,13 @@ class MetaDataXML(object):
70803 self._xml_tree = None
70804
70805 try:
70806 - self._xml_tree = etree.parse(metadata_xml_path,
70807 + self._xml_tree = etree.parse(_unicode_encode(metadata_xml_path,
70808 + encoding=_encodings['fs'], errors='strict'),
70809 parser=etree.XMLParser(target=_MetadataTreeBuilder()))
70810 except ImportError:
70811 pass
70812 except ExpatError as e:
70813 - raise SyntaxError(_unicode_decode("%s") % (e,))
70814 + raise SyntaxError("%s" % (e,))
70815
70816 if isinstance(herds, etree.ElementTree):
70817 herds_etree = herds
70818 @@ -241,7 +243,8 @@ class MetaDataXML(object):
70819
70820 if self._herdstree is None:
70821 try:
70822 - self._herdstree = etree.parse(self._herds_path,
70823 + self._herdstree = etree.parse(_unicode_encode(self._herds_path,
70824 + encoding=_encodings['fs'], errors='strict'),
70825 parser=etree.XMLParser(target=_MetadataTreeBuilder()))
70826 except (ImportError, IOError, SyntaxError):
70827 return None
70828
70829 diff --git a/pym/portage/xpak.py b/pym/portage/xpak.py
70830 index 73f84ab..b4567be 100644
70831 --- a/pym/portage/xpak.py
70832 +++ b/pym/portage/xpak.py
70833 @@ -1,4 +1,4 @@
70834 -# Copyright 2001-2012 Gentoo Foundation
70835 +# Copyright 2001-2014 Gentoo Foundation
70836 # Distributed under the terms of the GNU General Public License v2
70837
70838
70839 @@ -15,10 +15,12 @@
70840 # (integer) == encodeint(integer) ===> 4 characters (big-endian copy)
70841 # '+' means concatenate the fields ===> All chunks are strings
70842
70843 -__all__ = ['addtolist', 'decodeint', 'encodeint', 'getboth',
70844 +__all__ = [
70845 + 'addtolist', 'decodeint', 'encodeint', 'getboth',
70846 'getindex', 'getindex_mem', 'getitem', 'listindex',
70847 'searchindex', 'tbz2', 'xpak_mem', 'xpak', 'xpand',
70848 - 'xsplit', 'xsplit_mem']
70849 + 'xsplit', 'xsplit_mem',
70850 +]
70851
70852 import array
70853 import errno
70854
70855 diff --git a/pym/repoman/checks.py b/pym/repoman/checks.py
70856 index ca4c260..8032b28 100644
70857 --- a/pym/repoman/checks.py
70858 +++ b/pym/repoman/checks.py
70859 @@ -1,10 +1,12 @@
70860 # repoman: Checks
70861 -# Copyright 2007-2012 Gentoo Foundation
70862 +# Copyright 2007-2014 Gentoo Foundation
70863 # Distributed under the terms of the GNU General Public License v2
70864
70865 """This module contains functions used in Repoman to ascertain the quality
70866 and correctness of an ebuild."""
70867
70868 +from __future__ import unicode_literals
70869 +
70870 import codecs
70871 from itertools import chain
70872 import re
70873 @@ -13,8 +15,7 @@ import repoman.errors as errors
70874 import portage
70875 from portage.eapi import eapi_supports_prefix, eapi_has_implicit_rdepend, \
70876 eapi_has_src_prepare_and_src_configure, eapi_has_dosed_dohard, \
70877 - eapi_exports_AA
70878 -from portage.const import _ENABLE_INHERIT_CHECK
70879 + eapi_exports_AA, eapi_has_pkg_pretend
70880
70881 class LineCheck(object):
70882 """Run a check on a line of an ebuild."""
70883 @@ -69,7 +70,7 @@ class EbuildHeader(LineCheck):
70884 Copyright header errors
70885 CVS header errors
70886 License header errors
70887 -
70888 +
70889 Args:
70890 modification_year - Year the ebuild was last modified
70891 """
70892 @@ -112,7 +113,7 @@ class EbuildWhitespace(LineCheck):
70893 ignore_line = re.compile(r'(^$)|(^(\t)*#)')
70894 ignore_comment = False
70895 leading_spaces = re.compile(r'^[\S\t]')
70896 - trailing_whitespace = re.compile(r'.*([\S]$)')
70897 + trailing_whitespace = re.compile(r'.*([\S]$)')
70898
70899 def check(self, num, line):
70900 if self.leading_spaces.match(line) is None:
70901 @@ -162,6 +163,9 @@ class EbuildQuote(LineCheck):
70902 "GAMES_DATADIR_BASE", "GAMES_SYSCONFDIR", "GAMES_STATEDIR",
70903 "GAMES_LOGDIR", "GAMES_BINDIR"]
70904
70905 + # variables for multibuild.eclass
70906 + var_names += ["BUILD_DIR"]
70907 +
70908 var_names = "(%s)" % "|".join(var_names)
70909 var_reference = re.compile(r'\$(\{'+var_names+'\}|' + \
70910 var_names + '\W)')
70911 @@ -169,7 +173,7 @@ class EbuildQuote(LineCheck):
70912 r'\}?[^"\'\s]*(\s|$)')
70913 cond_begin = re.compile(r'(^|\s+)\[\[($|\\$|\s+)')
70914 cond_end = re.compile(r'(^|\s+)\]\]($|\\$|\s+)')
70915 -
70916 +
70917 def check(self, num, line):
70918 if self.var_reference.search(line) is None:
70919 return
70920 @@ -221,21 +225,13 @@ class EbuildAssignment(LineCheck):
70921 """Ensure ebuilds don't assign to readonly variables."""
70922
70923 repoman_check_name = 'variable.readonly'
70924 -
70925 readonly_assignment = re.compile(r'^\s*(export\s+)?(A|CATEGORY|P|PV|PN|PR|PVR|PF|D|WORKDIR|FILESDIR|FEATURES|USE)=')
70926 - line_continuation = re.compile(r'([^#]*\S)(\s+|\t)\\$')
70927 - ignore_line = re.compile(r'(^$)|(^(\t)*#)')
70928 - ignore_comment = False
70929 -
70930 - def __init__(self):
70931 - self.previous_line = None
70932
70933 def check(self, num, line):
70934 match = self.readonly_assignment.match(line)
70935 e = None
70936 - if match and (not self.previous_line or not self.line_continuation.match(self.previous_line)):
70937 + if match is not None:
70938 e = errors.READONLY_ASSIGNMENT_ERROR
70939 - self.previous_line = line
70940 return e
70941
70942 class Eapi3EbuildAssignment(EbuildAssignment):
70943 @@ -247,11 +243,11 @@ class Eapi3EbuildAssignment(EbuildAssignment):
70944 return eapi_supports_prefix(eapi)
70945
70946 class EbuildNestedDie(LineCheck):
70947 - """Check ebuild for nested die statements (die statements in subshells"""
70948 -
70949 + """Check ebuild for nested die statements (die statements in subshells)"""
70950 +
70951 repoman_check_name = 'ebuild.nesteddie'
70952 nesteddie_re = re.compile(r'^[^#]*\s\(\s[^)]*\bdie\b')
70953 -
70954 +
70955 def check(self, num, line):
70956 if self.nesteddie_re.match(line):
70957 return errors.NESTED_DIE_ERROR
70958 @@ -296,7 +292,7 @@ class EapiDefinition(LineCheck):
70959 _eapi_re = portage._pms_eapi_re
70960
70961 def new(self, pkg):
70962 - self._cached_eapi = pkg.metadata['EAPI']
70963 + self._cached_eapi = pkg.eapi
70964 self._parsed_eapi = None
70965 self._eapi_line_num = None
70966
70967 @@ -386,13 +382,18 @@ class InheritDeprecated(LineCheck):
70968 # deprecated eclass : new eclass (False if no new eclass)
70969 deprecated_classes = {
70970 "bash-completion": "bash-completion-r1",
70971 + "boost-utils": False,
70972 + "distutils": "distutils-r1",
70973 "gems": "ruby-fakegem",
70974 "git": "git-2",
70975 + "mono": "mono-env",
70976 "mozconfig-2": "mozconfig-3",
70977 "mozcoreconf": "mozcoreconf-2",
70978 "php-ext-pecl-r1": "php-ext-pecl-r2",
70979 "php-ext-source-r1": "php-ext-source-r2",
70980 "php-pear": "php-pear-r1",
70981 + "python": "python-r1 / python-single-r1 / python-any-r1",
70982 + "python-distutils-ng": "python-r1 + distutils-r1",
70983 "qt3": False,
70984 "qt4": "qt4-r2",
70985 "ruby": "ruby-ng",
70986 @@ -471,13 +472,13 @@ class InheritEclass(LineCheck):
70987 self._inherit_re = re.compile(r'^(\s*|.*[|&]\s*)\binherit\s(.*\s)?%s(\s|$)' % inherit_re)
70988 # Match when the function is preceded only by leading whitespace, a
70989 # shell operator such as (, {, |, ||, or &&, or optional variable
70990 - # setting(s). This prevents false postives in things like elog
70991 + # setting(s). This prevents false positives in things like elog
70992 # messages, as reported in bug #413285.
70993 self._func_re = re.compile(r'(^|[|&{(])\s*(\w+=.*)?\b(' + '|'.join(funcs) + r')\b')
70994
70995 def new(self, pkg):
70996 self.repoman_check_name = 'inherit.missing'
70997 - # We can't use pkg.inherited because that tells us all the eclass that
70998 + # We can't use pkg.inherited because that tells us all the eclasses that
70999 # have been inherited and not just the ones we inherit directly.
71000 self._inherit = False
71001 self._func_call = False
71002 @@ -486,6 +487,7 @@ class InheritEclass(LineCheck):
71003 self._disabled = any(x in inherited for x in self._exempt_eclasses)
71004 else:
71005 self._disabled = False
71006 + self._eapi = pkg.eapi
71007
71008 def check(self, num, line):
71009 if not self._inherit:
71010 @@ -494,10 +496,14 @@ class InheritEclass(LineCheck):
71011 if self._disabled or self._ignore_missing:
71012 return
71013 s = self._func_re.search(line)
71014 - if s:
71015 - self._func_call = True
71016 - return '%s.eclass is not inherited, but "%s" found at line: %s' % \
71017 - (self._eclass, s.group(3), '%d')
71018 + if s is not None:
71019 + func_name = s.group(3)
71020 + eapi_func = _eclass_eapi_functions.get(func_name)
71021 + if eapi_func is None or not eapi_func(self._eapi):
71022 + self._func_call = True
71023 + return ('%s.eclass is not inherited, '
71024 + 'but "%s" found at line: %s') % \
71025 + (self._eclass, func_name, '%d')
71026 elif not self._func_call:
71027 self._func_call = self._func_re.search(line)
71028
71029 @@ -506,6 +512,10 @@ class InheritEclass(LineCheck):
71030 self.repoman_check_name = 'inherit.unused'
71031 yield 'no function called from %s.eclass; please drop' % self._eclass
71032
71033 +_eclass_eapi_functions = {
71034 + "usex" : lambda eapi: eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
71035 +}
71036 +
71037 # eclasses that export ${ECLASS}_src_(compile|configure|install)
71038 _eclass_export_functions = (
71039 'ant-tasks', 'apache-2', 'apache-module', 'aspell-dict',
71040 @@ -558,8 +568,7 @@ _eclass_info = {
71041 'funcs': (
71042 'estack_push', 'estack_pop', 'eshopts_push', 'eshopts_pop',
71043 'eumask_push', 'eumask_pop', 'epatch', 'epatch_user',
71044 - 'emktemp', 'edos2unix', 'in_iuse', 'use_if_iuse', 'usex',
71045 - 'makeopts_jobs'
71046 + 'emktemp', 'edos2unix', 'in_iuse', 'use_if_iuse', 'usex'
71047 ),
71048 'comprehensive': False,
71049
71050 @@ -589,8 +598,16 @@ _eclass_info = {
71051 ),
71052
71053 # These are "eclasses are the whole ebuild" type thing.
71054 - 'exempt_eclasses': _eclass_export_functions + ('autotools', 'libtool'),
71055 + 'exempt_eclasses': _eclass_export_functions + ('autotools', 'libtool',
71056 + 'multilib-minimal'),
71057 +
71058 + 'comprehensive': False
71059 + },
71060
71061 + 'multiprocessing': {
71062 + 'funcs': (
71063 + 'makeopts_jobs',
71064 + ),
71065 'comprehensive': False
71066 },
71067
71068 @@ -617,49 +634,6 @@ _eclass_info = {
71069 }
71070 }
71071
71072 -if not _ENABLE_INHERIT_CHECK:
71073 - # Since the InheritEclass check is experimental, in the stable branch
71074 - # we emulate the old eprefixify.defined and inherit.autotools checks.
71075 - _eclass_info = {
71076 - 'autotools': {
71077 - 'funcs': (
71078 - 'eaclocal', 'eautoconf', 'eautoheader',
71079 - 'eautomake', 'eautoreconf', '_elibtoolize',
71080 - 'eautopoint'
71081 - ),
71082 - 'comprehensive': True,
71083 - 'ignore_missing': True,
71084 - 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils')
71085 - },
71086 -
71087 - 'prefix': {
71088 - 'funcs': (
71089 - 'eprefixify',
71090 - ),
71091 - 'comprehensive': False
71092 - }
71093 - }
71094 -
71095 -class IUseUndefined(LineCheck):
71096 - """
71097 - Make sure the ebuild defines IUSE (style guideline
71098 - says to define IUSE even when empty).
71099 - """
71100 -
71101 - repoman_check_name = 'IUSE.undefined'
71102 - _iuse_def_re = re.compile(r'^IUSE=.*')
71103 -
71104 - def new(self, pkg):
71105 - self._iuse_def = None
71106 -
71107 - def check(self, num, line):
71108 - if self._iuse_def is None:
71109 - self._iuse_def = self._iuse_def_re.match(line)
71110 -
71111 - def end(self):
71112 - if self._iuse_def is None:
71113 - yield 'IUSE is not defined'
71114 -
71115 class EMakeParallelDisabled(PhaseCheck):
71116 """Check for emake -j1 calls which disable parallelization."""
71117 repoman_check_name = 'upstream.workaround'
71118 @@ -684,8 +658,8 @@ class NoAsNeeded(LineCheck):
71119 error = errors.NO_AS_NEEDED
71120
71121 class PreserveOldLib(LineCheck):
71122 - """Check for calls to the preserve_old_lib function."""
71123 - repoman_check_name = 'upstream.workaround'
71124 + """Check for calls to the deprecated preserve_old_lib function."""
71125 + repoman_check_name = 'ebuild.minorsyn'
71126 re = re.compile(r'.*preserve_old_lib')
71127 error = errors.PRESERVE_OLD_LIB
71128
71129 @@ -757,6 +731,21 @@ class DeprecatedHasq(LineCheck):
71130 re = re.compile(r'(^|.*\b)hasq\b')
71131 error = errors.HASQ_ERROR
71132
71133 +# EAPI <2 checks
71134 +class UndefinedSrcPrepareSrcConfigurePhases(LineCheck):
71135 + repoman_check_name = 'EAPI.incompatible'
71136 + src_configprepare_re = re.compile(r'\s*(src_configure|src_prepare)\s*\(\)')
71137 +
71138 + def check_eapi(self, eapi):
71139 + return not eapi_has_src_prepare_and_src_configure(eapi)
71140 +
71141 + def check(self, num, line):
71142 + m = self.src_configprepare_re.match(line)
71143 + if m is not None:
71144 + return ("'%s'" % m.group(1)) + \
71145 + " phase is not defined in EAPI < 2 on line: %d"
71146 +
71147 +
71148 # EAPI-3 checks
71149 class Eapi3DeprecatedFuncs(LineCheck):
71150 repoman_check_name = 'EAPI.deprecated'
71151 @@ -771,6 +760,20 @@ class Eapi3DeprecatedFuncs(LineCheck):
71152 return ("'%s'" % m.group(1)) + \
71153 " has been deprecated in EAPI=3 on line: %d"
71154
71155 +# EAPI <4 checks
71156 +class UndefinedPkgPretendPhase(LineCheck):
71157 + repoman_check_name = 'EAPI.incompatible'
71158 + pkg_pretend_re = re.compile(r'\s*(pkg_pretend)\s*\(\)')
71159 +
71160 + def check_eapi(self, eapi):
71161 + return not eapi_has_pkg_pretend(eapi)
71162 +
71163 + def check(self, num, line):
71164 + m = self.pkg_pretend_re.match(line)
71165 + if m is not None:
71166 + return ("'%s'" % m.group(1)) + \
71167 + " phase is not defined in EAPI < 4 on line: %d"
71168 +
71169 # EAPI-4 checks
71170 class Eapi4IncompatibleFuncs(LineCheck):
71171 repoman_check_name = 'EAPI.incompatible'
71172 @@ -803,7 +806,7 @@ class PortageInternal(LineCheck):
71173 repoman_check_name = 'portage.internal'
71174 ignore_comment = True
71175 # Match when the command is preceded only by leading whitespace or a shell
71176 - # operator such as (, {, |, ||, or &&. This prevents false postives in
71177 + # operator such as (, {, |, ||, or &&. This prevents false positives in
71178 # things like elog messages, as reported in bug #413285.
71179 re = re.compile(r'^(\s*|.*[|&{(]+\s*)\b(ecompress|ecompressdir|env-update|prepall|prepalldocs|preplib)\b')
71180
71181 @@ -813,19 +816,52 @@ class PortageInternal(LineCheck):
71182 if m is not None:
71183 return ("'%s'" % m.group(2)) + " called on line: %d"
71184
71185 -_constant_checks = tuple(chain((c() for c in (
71186 - EbuildHeader, EbuildWhitespace, EbuildBlankLine, EbuildQuote,
71187 - EbuildAssignment, Eapi3EbuildAssignment, EbuildUselessDodoc,
71188 - EbuildUselessCdS, EbuildNestedDie,
71189 - EbuildPatches, EbuildQuotedA, EapiDefinition,
71190 - ImplicitRuntimeDeps, IUseUndefined,
71191 - EMakeParallelDisabled, EMakeParallelDisabledViaMAKEOPTS, NoAsNeeded,
71192 - DeprecatedBindnowFlags, SrcUnpackPatches, WantAutoDefaultValue,
71193 - SrcCompileEconf, Eapi3DeprecatedFuncs, NoOffsetWithHelpers,
71194 - Eapi4IncompatibleFuncs, Eapi4GoneVars, BuiltWithUse,
71195 - PreserveOldLib, SandboxAddpredict, PortageInternal,
71196 - DeprecatedUseq, DeprecatedHasq)),
71197 - (InheritEclass(k, **kwargs) for k, kwargs in _eclass_info.items())))
71198 +class PortageInternalVariableAssignment(LineCheck):
71199 + repoman_check_name = 'portage.internal'
71200 + internal_assignment = re.compile(r'\s*(export\s+)?(EXTRA_ECONF|EXTRA_EMAKE)\+?=')
71201 +
71202 + def check(self, num, line):
71203 + match = self.internal_assignment.match(line)
71204 + e = None
71205 + if match is not None:
71206 + e = 'Assignment to variable %s' % match.group(2)
71207 + e += ' on line: %d'
71208 + return e
71209 +
71210 +_base_check_classes = (InheritEclass, LineCheck, PhaseCheck)
71211 +_constant_checks = None
71212 +
71213 +def _init(experimental_inherit=False):
71214 +
71215 + global _constant_checks, _eclass_info
71216 +
71217 + if not experimental_inherit:
71218 + # Emulate the old eprefixify.defined and inherit.autotools checks.
71219 + _eclass_info = {
71220 + 'autotools': {
71221 + 'funcs': (
71222 + 'eaclocal', 'eautoconf', 'eautoheader',
71223 + 'eautomake', 'eautoreconf', '_elibtoolize',
71224 + 'eautopoint'
71225 + ),
71226 + 'comprehensive': True,
71227 + 'ignore_missing': True,
71228 + 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils')
71229 + },
71230 +
71231 + 'prefix': {
71232 + 'funcs': (
71233 + 'eprefixify',
71234 + ),
71235 + 'comprehensive': False
71236 + }
71237 + }
71238 +
71239 + _constant_checks = tuple(chain((v() for k, v in globals().items()
71240 + if isinstance(v, type) and issubclass(v, LineCheck) and
71241 + v not in _base_check_classes),
71242 + (InheritEclass(k, **portage._native_kwargs(kwargs))
71243 + for k, kwargs in _eclass_info.items())))
71244
71245 _here_doc_re = re.compile(r'.*\s<<[-]?(\w+)$')
71246 _ignore_comment_re = re.compile(r'^\s*#')
71247 @@ -833,6 +869,8 @@ _ignore_comment_re = re.compile(r'^\s*#')
71248 def run_checks(contents, pkg):
71249 unicode_escape_codec = codecs.lookup('unicode_escape')
71250 unicode_escape = lambda x: unicode_escape_codec.decode(x)[0]
71251 + if _constant_checks is None:
71252 + _init()
71253 checks = _constant_checks
71254 here_doc_delim = None
71255 multiline = None
71256 @@ -888,17 +926,18 @@ def run_checks(contents, pkg):
71257 multiline = line
71258 continue
71259
71260 - # Finally we have a full line to parse.
71261 - is_comment = _ignore_comment_re.match(line) is not None
71262 - for lc in checks:
71263 - if is_comment and lc.ignore_comment:
71264 - continue
71265 - if lc.check_eapi(pkg.metadata['EAPI']):
71266 - ignore = lc.ignore_line
71267 - if not ignore or not ignore.match(line):
71268 - e = lc.check(num, line)
71269 - if e:
71270 - yield lc.repoman_check_name, e % (num + 1)
71271 + if not line.endswith("#nowarn\n"):
71272 + # Finally we have a full line to parse.
71273 + is_comment = _ignore_comment_re.match(line) is not None
71274 + for lc in checks:
71275 + if is_comment and lc.ignore_comment:
71276 + continue
71277 + if lc.check_eapi(pkg.eapi):
71278 + ignore = lc.ignore_line
71279 + if not ignore or not ignore.match(line):
71280 + e = lc.check(num, line)
71281 + if e:
71282 + yield lc.repoman_check_name, e % (num + 1)
71283
71284 for lc in checks:
71285 i = lc.end()
71286
71287 diff --git a/pym/repoman/errors.py b/pym/repoman/errors.py
71288 index c515502..3833be6 100644
71289 --- a/pym/repoman/errors.py
71290 +++ b/pym/repoman/errors.py
71291 @@ -1,7 +1,9 @@
71292 # repoman: Error Messages
71293 -# Copyright 2007-2011 Gentoo Foundation
71294 +# Copyright 2007-2013 Gentoo Foundation
71295 # Distributed under the terms of the GNU General Public License v2
71296
71297 +from __future__ import unicode_literals
71298 +
71299 COPYRIGHT_ERROR = 'Invalid Gentoo Copyright on line: %d'
71300 LICENSE_ERROR = 'Invalid Gentoo/GPL License on line: %d'
71301 CVS_HEADER_ERROR = 'Malformed CVS Header on line: %d'
71302 @@ -17,7 +19,7 @@ EMAKE_PARALLEL_DISABLED_VIA_MAKEOPTS = 'Upstream parallel compilation bug (MAKEO
71303 DEPRECATED_BINDNOW_FLAGS = 'Deprecated bindnow-flags call on line: %d'
71304 EAPI_DEFINED_AFTER_INHERIT = 'EAPI defined after inherit on line: %d'
71305 NO_AS_NEEDED = 'Upstream asneeded linking bug (no-as-needed on line: %d)'
71306 -PRESERVE_OLD_LIB = 'Upstream ABI change workaround on line: %d'
71307 +PRESERVE_OLD_LIB = 'Ebuild calls deprecated preserve_old_lib on line: %d'
71308 BUILT_WITH_USE = 'built_with_use on line: %d'
71309 NO_OFFSET_WITH_HELPERS = "Helper function is used with D, ROOT, ED, EROOT or EPREFIX on line :%d"
71310 SANDBOX_ADDPREDICT = 'Ebuild calls addpredict on line: %d'
71311
71312 diff --git a/pym/repoman/herdbase.py b/pym/repoman/herdbase.py
71313 index fcf58b3..c5b88ff 100644
71314 --- a/pym/repoman/herdbase.py
71315 +++ b/pym/repoman/herdbase.py
71316 @@ -1,8 +1,10 @@
71317 # -*- coding: utf-8 -*-
71318 # repoman: Herd database analysis
71319 -# Copyright 2010-2012 Gentoo Foundation
71320 +# Copyright 2010-2013 Gentoo Foundation
71321 # Distributed under the terms of the GNU General Public License v2 or later
71322
71323 +from __future__ import unicode_literals
71324 +
71325 import errno
71326 import xml.etree.ElementTree
71327 try:
71328 @@ -17,6 +19,8 @@ except (ImportError, SystemError, RuntimeError, Exception):
71329 # modules, so that ImportModulesTestCase can succeed (or
71330 # possibly alert us about unexpected import failures).
71331 pass
71332 +
71333 +from portage import _encodings, _unicode_encode
71334 from portage.exception import FileNotFound, ParseError, PermissionDenied
71335
71336 __all__ = [
71337 @@ -56,11 +60,12 @@ def make_herd_base(filename):
71338 all_emails = set()
71339
71340 try:
71341 - xml_tree = xml.etree.ElementTree.parse(filename,
71342 + xml_tree = xml.etree.ElementTree.parse(_unicode_encode(filename,
71343 + encoding=_encodings['fs'], errors='strict'),
71344 parser=xml.etree.ElementTree.XMLParser(
71345 target=_HerdsTreeBuilder()))
71346 except ExpatError as e:
71347 - raise ParseError("metadata.xml: " + str(e))
71348 + raise ParseError("metadata.xml: %s" % (e,))
71349 except EnvironmentError as e:
71350 func_call = "open('%s')" % filename
71351 if e.errno == errno.EACCES:
71352
71353 diff --git a/pym/repoman/utilities.py b/pym/repoman/utilities.py
71354 index 013858a..aec61fe 100644
71355 --- a/pym/repoman/utilities.py
71356 +++ b/pym/repoman/utilities.py
71357 @@ -1,11 +1,11 @@
71358 # repoman: Utilities
71359 -# Copyright 2007-2012 Gentoo Foundation
71360 +# Copyright 2007-2013 Gentoo Foundation
71361 # Distributed under the terms of the GNU General Public License v2
71362
71363 """This module contains utility functions to help repoman find ebuilds to
71364 scan"""
71365
71366 -from __future__ import print_function
71367 +from __future__ import print_function, unicode_literals
71368
71369 __all__ = [
71370 "detect_vcs_conflicts",
71371 @@ -25,6 +25,7 @@ __all__ = [
71372 "UpdateChangeLog"
71373 ]
71374
71375 +import collections
71376 import errno
71377 import io
71378 from itertools import chain
71379 @@ -33,18 +34,20 @@ import pwd
71380 import re
71381 import stat
71382 import sys
71383 +import subprocess
71384 import time
71385 import textwrap
71386 import difflib
71387 from tempfile import mkstemp
71388
71389 +import portage
71390 from portage import os
71391 from portage import shutil
71392 -from portage import subprocess_getstatusoutput
71393 from portage import _encodings
71394 from portage import _unicode_decode
71395 from portage import _unicode_encode
71396 from portage import output
71397 +from portage.const import BASH_BINARY
71398 from portage.localization import _
71399 from portage.output import red, green
71400 from portage.process import find_binary
71401 @@ -71,22 +74,31 @@ def detect_vcs_conflicts(options, vcs):
71402 Returns:
71403 None (calls sys.exit on fatal problems)
71404 """
71405 - retval = ("","")
71406 +
71407 + cmd = None
71408 if vcs == 'cvs':
71409 logging.info("Performing a " + output.green("cvs -n up") + \
71410 " with a little magic grep to check for updates.")
71411 - retval = subprocess_getstatusoutput("cvs -n up 2>/dev/null | " + \
71412 + cmd = "cvs -n up 2>/dev/null | " + \
71413 "egrep '^[^\?] .*' | " + \
71414 - "egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
71415 + "egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'"
71416 if vcs == 'svn':
71417 logging.info("Performing a " + output.green("svn status -u") + \
71418 " with a little magic grep to check for updates.")
71419 - retval = subprocess_getstatusoutput("svn status -u 2>&1 | " + \
71420 + cmd = "svn status -u 2>&1 | " + \
71421 "egrep -v '^. +.*/digest-[^/]+' | " + \
71422 - "head -n-1")
71423 -
71424 - if vcs in ['cvs', 'svn']:
71425 - mylines = retval[1].splitlines()
71426 + "head -n-1"
71427 +
71428 + if cmd is not None:
71429 + # Use Popen instead of getstatusoutput(), in order to avoid
71430 + # unicode handling problems (see bug #310789).
71431 + args = [BASH_BINARY, "-c", cmd]
71432 + args = [_unicode_encode(x) for x in args]
71433 + proc = subprocess.Popen(args, stdout=subprocess.PIPE,
71434 + stderr=subprocess.STDOUT)
71435 + out = _unicode_decode(proc.communicate()[0])
71436 + proc.wait()
71437 + mylines = out.splitlines()
71438 myupdates = []
71439 for line in mylines:
71440 if not line:
71441 @@ -98,7 +110,7 @@ def detect_vcs_conflicts(options, vcs):
71442 logging.error(red("!!! Please fix the following issues reported " + \
71443 "from cvs: ")+green("(U,P,M,A,R,D are ok)"))
71444 logging.error(red("!!! Note: This is a pretend/no-modify pass..."))
71445 - logging.error(retval[1])
71446 + logging.error(out)
71447 sys.exit(1)
71448 elif vcs == 'cvs' and line[0] in "UP":
71449 myupdates.append(line[2:])
71450 @@ -298,12 +310,12 @@ def format_qa_output(formatter, stats, fails, dofull, dofail, options, qawarning
71451 # we only want key value pairs where value > 0
71452 for category, number in \
71453 filter(lambda myitem: myitem[1] > 0, iter(stats.items())):
71454 - formatter.add_literal_data(_unicode_decode(" " + category.ljust(30)))
71455 + formatter.add_literal_data(" " + category.ljust(30))
71456 if category in qawarnings:
71457 formatter.push_style("WARN")
71458 else:
71459 formatter.push_style("BAD")
71460 - formatter.add_literal_data(_unicode_decode(str(number)))
71461 + formatter.add_literal_data("%s" % number)
71462 formatter.pop_style()
71463 formatter.add_line_break()
71464 if not dofull:
71465 @@ -314,10 +326,54 @@ def format_qa_output(formatter, stats, fails, dofull, dofail, options, qawarning
71466 if not full and len(fails_list) > 12:
71467 fails_list = fails_list[:12]
71468 for failure in fails_list:
71469 - formatter.add_literal_data(_unicode_decode(" " + failure))
71470 + formatter.add_literal_data(" " + failure)
71471 formatter.add_line_break()
71472
71473
71474 +def format_qa_output_column(formatter, stats, fails, dofull, dofail, options, qawarnings):
71475 + """Helper function that formats output in a machine-parseable column format
71476 +
71477 + @param formatter: an instance of Formatter
71478 + @type formatter: Formatter
71479 + @param path: dict of qa status items
71480 + @type path: dict
71481 + @param fails: dict of qa status failures
71482 + @type fails: dict
71483 + @param dofull: Whether to print full results or a summary
71484 + @type dofull: boolean
71485 + @param dofail: Whether failure was hard or soft
71486 + @type dofail: boolean
71487 + @param options: The command-line options provided to repoman
71488 + @type options: Namespace
71489 + @param qawarnings: the set of warning types
71490 + @type qawarnings: set
71491 + @return: None (modifies formatter)
71492 + """
71493 + full = options.mode == 'full'
71494 + for category, number in stats.items():
71495 + # we only want key value pairs where value > 0
71496 + if number < 1:
71497 + continue
71498 +
71499 + formatter.add_literal_data("NumberOf " + category + " ")
71500 + if category in qawarnings:
71501 + formatter.push_style("WARN")
71502 + else:
71503 + formatter.push_style("BAD")
71504 + formatter.add_literal_data("%s" % number)
71505 + formatter.pop_style()
71506 + formatter.add_line_break()
71507 + if not dofull:
71508 + if not full and dofail and category in qawarnings:
71509 + # warnings are considered noise when there are failures
71510 + continue
71511 + fails_list = fails[category]
71512 + if not full and len(fails_list) > 12:
71513 + fails_list = fails_list[:12]
71514 + for failure in fails_list:
71515 + formatter.add_literal_data(category + " " + failure)
71516 + formatter.add_line_break()
71517 +
71518 def editor_is_executable(editor):
71519 """
71520 Given an EDITOR string, validate that it refers to
71521 @@ -367,10 +423,11 @@ def get_commit_message_with_editor(editor, message=None):
71522 if not (os.WIFEXITED(retval) and os.WEXITSTATUS(retval) == os.EX_OK):
71523 return None
71524 try:
71525 - mylines = io.open(_unicode_encode(filename,
71526 + with io.open(_unicode_encode(filename,
71527 encoding=_encodings['fs'], errors='strict'),
71528 mode='r', encoding=_encodings['content'], errors='replace'
71529 - ).readlines()
71530 + ) as f:
71531 + mylines = f.readlines()
71532 except OSError as e:
71533 if e.errno != errno.ENOENT:
71534 raise
71535 @@ -427,7 +484,7 @@ def FindPortdir(settings):
71536 portdir = None
71537 portdir_overlay = None
71538 location = os.getcwd()
71539 - pwd = os.environ.get('PWD', '')
71540 + pwd = _unicode_decode(os.environ.get('PWD', ''), encoding=_encodings['fs'])
71541 if pwd and pwd != location and os.path.realpath(pwd) == location:
71542 # getcwd() returns the canonical path but that makes it hard for repoman to
71543 # orient itself if the user has symlinks in their portage tree structure.
71544 @@ -449,7 +506,7 @@ def FindPortdir(settings):
71545 if location[-1] != "/":
71546 location += "/"
71547
71548 - for overlay in settings["PORTDIR_OVERLAY"].split():
71549 + for overlay in portage.util.shlex_split(settings["PORTDIR_OVERLAY"]):
71550 overlay = os.path.realpath(overlay)
71551 try:
71552 s = os.stat(overlay)
71553 @@ -509,6 +566,28 @@ def FindPortdir(settings):
71554
71555 return [normalize_path(x) for x in (portdir, portdir_overlay, location)]
71556
71557 +_vcs_type = collections.namedtuple('_vcs_type',
71558 + 'name dir_name')
71559 +
71560 +_FindVCS_data = (
71561 + _vcs_type(
71562 + name = 'git',
71563 + dir_name = '.git'
71564 + ),
71565 + _vcs_type(
71566 + name = 'bzr',
71567 + dir_name = '.bzr'
71568 + ),
71569 + _vcs_type(
71570 + name = 'hg',
71571 + dir_name = '.hg'
71572 + ),
71573 + _vcs_type(
71574 + name = 'svn',
71575 + dir_name = '.svn'
71576 + )
71577 +)
71578 +
71579 def FindVCS():
71580 """ Try to figure out in what VCS' working tree we are. """
71581
71582 @@ -520,14 +599,13 @@ def FindVCS():
71583 pathprep = ''
71584
71585 while depth is None or depth > 0:
71586 - if os.path.isdir(os.path.join(pathprep, '.git')):
71587 - retvcs.append('git')
71588 - if os.path.isdir(os.path.join(pathprep, '.bzr')):
71589 - retvcs.append('bzr')
71590 - if os.path.isdir(os.path.join(pathprep, '.hg')):
71591 - retvcs.append('hg')
71592 - if os.path.isdir(os.path.join(pathprep, '.svn')): # >=1.7
71593 - retvcs.append('svn')
71594 + for vcs_type in _FindVCS_data:
71595 + vcs_dir = os.path.join(pathprep, vcs_type.dir_name)
71596 + if os.path.isdir(vcs_dir):
71597 + logging.debug('FindVCS: found %(name)s dir: %(vcs_dir)s' %
71598 + {'name': vcs_type.name,
71599 + 'vcs_dir': os.path.abspath(vcs_dir)})
71600 + retvcs.append(vcs_type.name)
71601
71602 if retvcs:
71603 break
71604 @@ -763,7 +841,7 @@ def UpdateChangeLog(pkgdir, user, msg, skel_path, category, package,
71605 line = line.replace('<PACKAGE_NAME>', package)
71606 line = _update_copyright_year(year, line)
71607 header_lines.append(line)
71608 - header_lines.append(_unicode_decode('\n'))
71609 + header_lines.append('\n')
71610 clskel_file.close()
71611
71612 # write new ChangeLog entry
71613 @@ -773,10 +851,10 @@ def UpdateChangeLog(pkgdir, user, msg, skel_path, category, package,
71614 if not fn.endswith('.ebuild'):
71615 continue
71616 ebuild = fn.split(os.sep)[-1][0:-7]
71617 - clnew_lines.append(_unicode_decode('*%s (%s)\n' % (ebuild, date)))
71618 + clnew_lines.append('*%s (%s)\n' % (ebuild, date))
71619 newebuild = True
71620 if newebuild:
71621 - clnew_lines.append(_unicode_decode('\n'))
71622 + clnew_lines.append('\n')
71623 trivial_files = ('ChangeLog', 'Manifest')
71624 display_new = ['+' + elem for elem in new
71625 if elem not in trivial_files]
71626 @@ -803,19 +881,19 @@ def UpdateChangeLog(pkgdir, user, msg, skel_path, category, package,
71627 for line in textwrap.wrap(mesg, 80, \
71628 initial_indent=' ', subsequent_indent=' ', \
71629 break_on_hyphens=False):
71630 - clnew_lines.append(_unicode_decode('%s\n' % line))
71631 + clnew_lines.append('%s\n' % line)
71632 for line in textwrap.wrap(msg, 80, \
71633 initial_indent=' ', subsequent_indent=' '):
71634 - clnew_lines.append(_unicode_decode('%s\n' % line))
71635 + clnew_lines.append('%s\n' % line)
71636 # Don't append a trailing newline if the file is new.
71637 if clold_file is not None:
71638 - clnew_lines.append(_unicode_decode('\n'))
71639 + clnew_lines.append('\n')
71640
71641 f = io.open(f, mode='w', encoding=_encodings['repo.content'],
71642 errors='backslashreplace')
71643
71644 for line in clnew_lines:
71645 - f.write(_unicode_decode(line))
71646 + f.write(line)
71647
71648 # append stuff from old ChangeLog
71649 if clold_file is not None:
71650
71651 diff --git a/runtests.sh b/runtests.sh
71652 index f65bb61..7999220 100755
71653 --- a/runtests.sh
71654 +++ b/runtests.sh
71655 @@ -1,8 +1,10 @@
71656 #!/bin/bash
71657 -# Copyright 2010-2012 Gentoo Foundation
71658 +# Copyright 2010-2014 Gentoo Foundation
71659 # Distributed under the terms of the GNU General Public License v2
71660
71661 -PYTHON_VERSIONS="2.6 2.7 2.7-pypy-1.8 2.7-pypy-1.9 3.1 3.2 3.3"
71662 +# These are the versions we care about. The rest are just "nice to have".
71663 +PYTHON_SUPPORTED_VERSIONS="2.6 2.7 3.2 3.3 3.4"
71664 +PYTHON_VERSIONS="2.6 2.7 2.7-pypy-1.8 2.7-pypy-1.9 2.7-pypy-2.0 3.1 3.2 3.3 3.4 3.5"
71665
71666 # has to be run from portage root dir
71667 cd "${0%/*}" || exit 1
71668 @@ -28,15 +30,18 @@ interrupted() {
71669 trap interrupted SIGINT
71670
71671 unused_args=()
71672 +IGNORE_MISSING_VERSIONS=true
71673
71674 while [ $# -gt 0 ] ; do
71675 case "$1" in
71676 --python-versions=*)
71677 PYTHON_VERSIONS=${1#--python-versions=}
71678 + IGNORE_MISSING_VERSIONS=false
71679 ;;
71680 --python-versions)
71681 shift
71682 PYTHON_VERSIONS=$1
71683 + IGNORE_MISSING_VERSIONS=false
71684 ;;
71685 *)
71686 unused_args[${#unused_args[@]}]=$1
71687 @@ -44,11 +49,16 @@ while [ $# -gt 0 ] ; do
71688 esac
71689 shift
71690 done
71691 +if [[ ${PYTHON_VERSIONS} == "supported" ]] ; then
71692 + PYTHON_VERSIONS=${PYTHON_SUPPORTED_VERSIONS}
71693 +fi
71694
71695 set -- "${unused_args[@]}"
71696
71697 eprefix=${PORTAGE_OVERRIDE_EPREFIX}
71698 exit_status="0"
71699 +found_versions=()
71700 +status_array=()
71701 for version in ${PYTHON_VERSIONS}; do
71702 if [[ $version =~ ^([[:digit:]]+\.[[:digit:]]+)-pypy-([[:digit:]]+\.[[:digit:]]+)$ ]] ; then
71703 executable=${eprefix}/usr/bin/pypy-c${BASH_REMATCH[2]}
71704 @@ -57,12 +67,43 @@ for version in ${PYTHON_VERSIONS}; do
71705 fi
71706 if [[ -x "${executable}" ]]; then
71707 echo -e "${GOOD}Testing with Python ${version}...${NORMAL}"
71708 - if ! "${executable}" -Wd pym/portage/tests/runTests "$@" ; then
71709 + "${executable}" -b -Wd pym/portage/tests/runTests "$@"
71710 + status=$?
71711 + status_array[${#status_array[@]}]=${status}
71712 + found_versions[${#found_versions[@]}]=${version}
71713 + if [ ${status} -ne 0 ] ; then
71714 echo -e "${BAD}Testing with Python ${version} failed${NORMAL}"
71715 exit_status="1"
71716 fi
71717 echo
71718 + elif [[ ${IGNORE_MISSING_VERSIONS} != "true" ]] ; then
71719 + echo -e "${BAD}Could not find requested Python ${version}${NORMAL}"
71720 + exit_status="1"
71721 fi
71722 done
71723
71724 +if [ ${#status_array[@]} -gt 0 ] ; then
71725 + max_len=0
71726 + for version in ${found_versions[@]} ; do
71727 + [ ${#version} -gt ${max_len} ] && max_len=${#version}
71728 + done
71729 + (( columns = max_len + 2 ))
71730 + (( columns >= 7 )) || columns=7
71731 + printf "\nSummary:\n\n"
71732 + printf "| %-${columns}s | %s\n|" "Version" "Status"
71733 + (( total_cols = columns + 11 ))
71734 + eval "printf -- '-%.0s' {1..${total_cols}}"
71735 + printf "\n"
71736 + row=0
71737 + for version in ${found_versions[@]} ; do
71738 + if [ ${status_array[${row}]} -eq 0 ] ; then
71739 + status="success"
71740 + else
71741 + status="fail"
71742 + fi
71743 + printf "| %-${columns}s | %s\n" "${version}" "${status}"
71744 + (( row++ ))
71745 + done
71746 +fi
71747 +
71748 exit ${exit_status}
71749
71750 diff --git a/tabcheck.py b/tabcheck.py
71751 index 1699e98..2d45cde 100755
71752 --- a/tabcheck.py
71753 +++ b/tabcheck.py
71754 @@ -1,4 +1,4 @@
71755 -#!/usr/bin/python -O
71756 +#!/usr/bin/python -bO
71757
71758 import tabnanny,sys