Gentoo Archives: gentoo-commits

From: "Fabian Groffen (grobian)" <grobian@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] portage r13711 - main/branches/prefix/pym/_emerge
Date: Sat, 27 Jun 2009 14:25:01
Message-Id: E1MKYpl-0002dH-D2@stork.gentoo.org
1 Author: grobian
2 Date: 2009-06-27 14:24:48 +0000 (Sat, 27 Jun 2009)
3 New Revision: 13711
4
5 Added:
6 main/branches/prefix/pym/_emerge/actions.py
7 Modified:
8 main/branches/prefix/pym/_emerge/__init__.py
9 Log:
10 Merged from trunk -r13672:13673
11
12 | 13673 | Bug #275047 - Split _emerge/__init__.py into smaller pieces |
13 | zmedico | (part 6). Thanks to Sebastian Mingramm (few) |
14 | | <s.mingramm@×××.de> for this patch. |
15
16
17 Modified: main/branches/prefix/pym/_emerge/__init__.py
18 ===================================================================
19 --- main/branches/prefix/pym/_emerge/__init__.py 2009-06-27 14:07:14 UTC (rev 13710)
20 +++ main/branches/prefix/pym/_emerge/__init__.py 2009-06-27 14:24:48 UTC (rev 13711)
21 @@ -4,12 +4,11 @@
22 # $Id$
23
24 import logging
25 -import pwd
26 import shlex
27 import signal
28 import sys
29 import textwrap
30 -import os, stat
31 +import os
32 import platform
33
34 # Portage module path logic (Prefix way)
35 @@ -31,13 +30,9 @@
36 sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "pym"))
37 import portage
38
39 -from portage import digraph
40 -from portage.const import NEWS_LIB_PATH
41 -
42 import _emerge.help
43 -import portage.xpak, commands, errno, re, socket, time
44 -from portage.output import blue, bold, colorize, darkgreen, \
45 - red, xtermTitleReset, yellow
46 +import portage.xpak, commands, errno, re, time
47 +from portage.output import colorize, xtermTitleReset
48 from portage.output import create_color_func
49 good = create_color_func("GOOD")
50 bad = create_color_func("BAD")
51 @@ -48,38 +43,20 @@
52 import portage.util
53 import portage.locks
54 import portage.exception
55 -from portage.cache.cache_errors import CacheError
56 from portage.const import EPREFIX, BPREFIX, EPREFIX_LSTRIP
57 from portage.data import secpass
58 from portage.util import normalize_path as normpath
59 -from portage.util import cmp_sort_key, writemsg, writemsg_level
60 -from portage.sets import load_default_config, SETPREFIX
61 -from portage.sets.base import InternalPackageSet
62 +from portage.util import writemsg, writemsg_level
63 +from portage.sets import SETPREFIX
64
65 -from itertools import chain, izip
66 -
67 -from _emerge.clear_caches import clear_caches
68 -from _emerge.countdown import countdown
69 -from _emerge.create_depgraph_params import create_depgraph_params
70 -from _emerge.Dependency import Dependency
71 -from _emerge.depgraph import depgraph, resume_depgraph
72 -from _emerge.DepPrioritySatisfiedRange import DepPrioritySatisfiedRange
73 +from _emerge.actions import action_config, action_sync, action_metadata, \
74 + action_regen, action_search, action_uninstall, action_info, action_build, \
75 + adjust_config, chk_updated_cfg_files, display_missing_pkg_set, \
76 + display_news_notification, getportageversion, load_emerge_config
77 from _emerge.emergelog import emergelog
78 from _emerge._flush_elog_mod_echo import _flush_elog_mod_echo
79 from _emerge.is_valid_package_atom import is_valid_package_atom
80 -from _emerge.MetadataRegen import MetadataRegen
81 -from _emerge.Package import Package
82 -from _emerge.ProgressHandler import ProgressHandler
83 -from _emerge.RootConfig import RootConfig
84 -from _emerge.Scheduler import Scheduler
85 -from _emerge.search import search
86 -from _emerge.SetArg import SetArg
87 -from _emerge.show_invalid_depstring_notice import show_invalid_depstring_notice
88 from _emerge.stdout_spinner import stdout_spinner
89 -from _emerge.unmerge import unmerge
90 -from _emerge.UnmergeDepPriority import UnmergeDepPriority
91 -from _emerge.UseFlagDisplay import UseFlagDisplay
92 -from _emerge.userquery import userquery
93
94
95 actions = frozenset([
96 @@ -136,69 +113,6 @@
97 "v":"--verbose", "V":"--version"
98 }
99
100 -def getgccversion(chost):
101 - """
102 - rtype: C{str}
103 - return: the current in-use gcc version
104 - """
105 -
106 - gcc_ver_command = 'gcc -dumpversion'
107 - gcc_ver_prefix = 'gcc-'
108 -
109 - gcc_not_found_error = red(
110 - "!!! No gcc found. You probably need to 'source /etc/profile'\n" +
111 - "!!! to update the environment of this terminal and possibly\n" +
112 - "!!! other terminals also.\n"
113 - )
114 -
115 - mystatus, myoutput = commands.getstatusoutput("gcc-config -c")
116 - if mystatus == os.EX_OK and myoutput.startswith(chost + "-"):
117 - return myoutput.replace(chost + "-", gcc_ver_prefix, 1)
118 -
119 - mystatus, myoutput = commands.getstatusoutput(
120 - chost + "-" + gcc_ver_command)
121 - if mystatus == os.EX_OK:
122 - return gcc_ver_prefix + myoutput
123 -
124 - mystatus, myoutput = commands.getstatusoutput(gcc_ver_command)
125 - if mystatus == os.EX_OK:
126 - return gcc_ver_prefix + myoutput
127 -
128 - portage.writemsg(gcc_not_found_error, noiselevel=-1)
129 - return "[unavailable]"
130 -
131 -def getportageversion(portdir, target_root, profile, chost, vardb):
132 - profilever = "unavailable"
133 - if profile:
134 - realpath = os.path.realpath(profile)
135 - basepath = os.path.realpath(os.path.join(portdir, "profiles"))
136 - if realpath.startswith(basepath):
137 - profilever = realpath[1 + len(basepath):]
138 - else:
139 - try:
140 - profilever = "!" + os.readlink(profile)
141 - except (OSError):
142 - pass
143 - del realpath, basepath
144 -
145 - libcver=[]
146 - libclist = vardb.match("virtual/libc")
147 - libclist += vardb.match("virtual/glibc")
148 - libclist = portage.util.unique_array(libclist)
149 - for x in libclist:
150 - xs=portage.catpkgsplit(x)
151 - if libcver:
152 - libcver+=","+"-".join(xs[1:])
153 - else:
154 - libcver="-".join(xs[1:])
155 - if libcver==[]:
156 - libcver="unavailable"
157 -
158 - gccver = getgccversion(chost)
159 - unameout=platform.release()+" "+platform.machine()
160 -
161 - return "Portage " + portage.VERSION +" ("+profilever+", "+gccver+", "+libcver+", "+unameout+")"
162 -
163 def chk_updated_info_files(root, infodirs, prev_mtimes, retval):
164
165 if os.path.exists(EPREFIX + "/usr/bin/install-info"):
166 @@ -310,33 +224,6 @@
167 out.einfo("Processed %d info files." % (icount,))
168
169
170 -def display_news_notification(root_config, myopts):
171 - target_root = root_config.root
172 - trees = root_config.trees
173 - settings = trees["vartree"].settings
174 - portdb = trees["porttree"].dbapi
175 - vardb = trees["vartree"].dbapi
176 - NEWS_PATH = os.path.join("metadata", "news")
177 - UNREAD_PATH = os.path.join(target_root, NEWS_LIB_PATH, "news")
178 - newsReaderDisplay = False
179 - update = "--pretend" not in myopts
180 -
181 - for repo in portdb.getRepositories():
182 - unreadItems = checkUpdatedNewsItems(
183 - portdb, vardb, NEWS_PATH, UNREAD_PATH, repo, update=update)
184 - if unreadItems:
185 - if not newsReaderDisplay:
186 - newsReaderDisplay = True
187 - print
188 - print colorize("WARN", " * IMPORTANT:"),
189 - print "%s news items need reading for repository '%s'." % (unreadItems, repo)
190 -
191 -
192 - if newsReaderDisplay:
193 - print colorize("WARN", " *"),
194 - print "Use " + colorize("GOOD", "eselect news") + " to read news items."
195 - print
196 -
197 def display_preserved_libs(vardbapi):
198 MAX_DISPLAY = 3
199
200 @@ -488,2507 +375,6 @@
201 sys.exit(retval)
202
203
204 -def chk_updated_cfg_files(target_root, config_protect):
205 - if config_protect:
206 - #number of directories with some protect files in them
207 - procount=0
208 - for x in config_protect:
209 - x = os.path.join(target_root, x.lstrip(os.path.sep))
210 - if not os.access(x, os.W_OK):
211 - # Avoid Permission denied errors generated
212 - # later by `find`.
213 - continue
214 - try:
215 - mymode = os.lstat(x).st_mode
216 - except OSError:
217 - continue
218 - if stat.S_ISLNK(mymode):
219 - # We want to treat it like a directory if it
220 - # is a symlink to an existing directory.
221 - try:
222 - real_mode = os.stat(x).st_mode
223 - if stat.S_ISDIR(real_mode):
224 - mymode = real_mode
225 - except OSError:
226 - pass
227 - if stat.S_ISDIR(mymode):
228 - mycommand = "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
229 - else:
230 - mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
231 - os.path.split(x.rstrip(os.path.sep))
232 - mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
233 - a = commands.getstatusoutput(mycommand)
234 - if a[0] != 0:
235 - sys.stderr.write(" %s error scanning '%s': " % (bad("*"), x))
236 - sys.stderr.flush()
237 - # Show the error message alone, sending stdout to /dev/null.
238 - os.system(mycommand + " 1>/dev/null")
239 - else:
240 - files = a[1].split('\0')
241 - # split always produces an empty string as the last element
242 - if files and not files[-1]:
243 - del files[-1]
244 - if files:
245 - procount += 1
246 - print "\n"+colorize("WARN", " * IMPORTANT:"),
247 - if stat.S_ISDIR(mymode):
248 - print "%d config files in '%s' need updating." % \
249 - (len(files), x)
250 - else:
251 - print "config file '%s' needs updating." % x
252 -
253 - if procount:
254 - print " "+yellow("*")+" See the "+colorize("INFORM","CONFIGURATION FILES")+ \
255 - " section of the " + bold("emerge")
256 - print " "+yellow("*")+" man page to learn how to update config files."
257 -
258 -def checkUpdatedNewsItems(portdb, vardb, NEWS_PATH, UNREAD_PATH, repo_id,
259 - update=False):
260 - """
261 - Examines news items in repodir + '/' + NEWS_PATH and attempts to find unread items
262 - Returns the number of unread (yet relevent) items.
263 -
264 - @param portdb: a portage tree database
265 - @type portdb: pordbapi
266 - @param vardb: an installed package database
267 - @type vardb: vardbapi
268 - @param NEWS_PATH:
269 - @type NEWS_PATH:
270 - @param UNREAD_PATH:
271 - @type UNREAD_PATH:
272 - @param repo_id:
273 - @type repo_id:
274 - @rtype: Integer
275 - @returns:
276 - 1. The number of unread but relevant news items.
277 -
278 - """
279 - from portage.news import NewsManager
280 - manager = NewsManager(portdb, vardb, NEWS_PATH, UNREAD_PATH)
281 - return manager.getUnreadItems( repo_id, update=update )
282 -
283 -def action_sync(settings, trees, mtimedb, myopts, myaction):
284 - xterm_titles = "notitles" not in settings.features
285 - emergelog(xterm_titles, " === sync")
286 - portdb = trees[settings["ROOT"]]["porttree"].dbapi
287 - myportdir = portdb.porttree_root
288 - out = portage.output.EOutput()
289 - if not myportdir:
290 - sys.stderr.write("!!! PORTDIR is undefined. Is /etc/make.globals missing?\n")
291 - sys.exit(1)
292 - if myportdir[-1]=="/":
293 - myportdir=myportdir[:-1]
294 - try:
295 - st = os.stat(myportdir)
296 - except OSError:
297 - st = None
298 - if st is None:
299 - print ">>>",myportdir,"not found, creating it."
300 - os.makedirs(myportdir,0755)
301 - st = os.stat(myportdir)
302 -
303 - spawn_kwargs = {}
304 - spawn_kwargs["env"] = settings.environ()
305 - if 'usersync' in settings.features and \
306 - portage.data.secpass >= 2 and \
307 - (st.st_uid != os.getuid() and st.st_mode & 0700 or \
308 - st.st_gid != os.getgid() and st.st_mode & 0070):
309 - try:
310 - homedir = pwd.getpwuid(st.st_uid).pw_dir
311 - except KeyError:
312 - pass
313 - else:
314 - # Drop privileges when syncing, in order to match
315 - # existing uid/gid settings.
316 - spawn_kwargs["uid"] = st.st_uid
317 - spawn_kwargs["gid"] = st.st_gid
318 - spawn_kwargs["groups"] = [st.st_gid]
319 - spawn_kwargs["env"]["HOME"] = homedir
320 - umask = 0002
321 - if not st.st_mode & 0020:
322 - umask = umask | 0020
323 - spawn_kwargs["umask"] = umask
324 -
325 - syncuri = settings.get("SYNC", "").strip()
326 - if not syncuri:
327 - writemsg_level("!!! SYNC is undefined. Is /etc/make.globals missing?\n",
328 - noiselevel=-1, level=logging.ERROR)
329 - return 1
330 -
331 - vcs_dirs = frozenset([".git", ".svn", "CVS", ".hg"])
332 - vcs_dirs = vcs_dirs.intersection(os.listdir(myportdir))
333 -
334 - os.umask(0022)
335 - dosyncuri = syncuri
336 - updatecache_flg = False
337 - if myaction == "metadata":
338 - print "skipping sync"
339 - updatecache_flg = True
340 - elif ".git" in vcs_dirs:
341 - # Update existing git repository, and ignore the syncuri. We are
342 - # going to trust the user and assume that the user is in the branch
343 - # that he/she wants updated. We'll let the user manage branches with
344 - # git directly.
345 - if portage.process.find_binary("git") is None:
346 - msg = ["Command not found: git",
347 - "Type \"emerge dev-util/git\" to enable git support."]
348 - for l in msg:
349 - writemsg_level("!!! %s\n" % l,
350 - level=logging.ERROR, noiselevel=-1)
351 - return 1
352 - msg = ">>> Starting git pull in %s..." % myportdir
353 - emergelog(xterm_titles, msg )
354 - writemsg_level(msg + "\n")
355 - exitcode = portage.process.spawn_bash("cd %s ; git pull" % \
356 - (portage._shell_quote(myportdir),), **spawn_kwargs)
357 - if exitcode != os.EX_OK:
358 - msg = "!!! git pull error in %s." % myportdir
359 - emergelog(xterm_titles, msg)
360 - writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
361 - return exitcode
362 - msg = ">>> Git pull in %s successful" % myportdir
363 - emergelog(xterm_titles, msg)
364 - writemsg_level(msg + "\n")
365 - exitcode = git_sync_timestamps(settings, myportdir)
366 - if exitcode == os.EX_OK:
367 - updatecache_flg = True
368 - elif syncuri[:8]=="rsync://":
369 - for vcs_dir in vcs_dirs:
370 - writemsg_level(("!!! %s appears to be under revision " + \
371 - "control (contains %s).\n!!! Aborting rsync sync.\n") % \
372 - (myportdir, vcs_dir), level=logging.ERROR, noiselevel=-1)
373 - return 1
374 - if not os.path.exists(EPREFIX + "/usr/bin/rsync"):
375 - print "!!! /usr/bin/rsync does not exist, so rsync support is disabled."
376 - print "!!! Type \"emerge net-misc/rsync\" to enable rsync support."
377 - sys.exit(1)
378 - mytimeout=180
379 -
380 - rsync_opts = []
381 - if settings["PORTAGE_RSYNC_OPTS"] == "":
382 - portage.writemsg("PORTAGE_RSYNC_OPTS empty or unset, using hardcoded defaults\n")
383 - rsync_opts.extend([
384 - "--recursive", # Recurse directories
385 - "--links", # Consider symlinks
386 - "--safe-links", # Ignore links outside of tree
387 - "--perms", # Preserve permissions
388 - "--times", # Preserive mod times
389 - "--compress", # Compress the data transmitted
390 - "--force", # Force deletion on non-empty dirs
391 - "--whole-file", # Don't do block transfers, only entire files
392 - "--delete", # Delete files that aren't in the master tree
393 - "--stats", # Show final statistics about what was transfered
394 - "--timeout="+str(mytimeout), # IO timeout if not done in X seconds
395 - "--exclude=/distfiles", # Exclude distfiles from consideration
396 - "--exclude=/local", # Exclude local from consideration
397 - "--exclude=/packages", # Exclude packages from consideration
398 - ])
399 -
400 - else:
401 - # The below validation is not needed when using the above hardcoded
402 - # defaults.
403 -
404 - portage.writemsg("Using PORTAGE_RSYNC_OPTS instead of hardcoded defaults\n", 1)
405 - rsync_opts.extend(
406 - shlex.split(settings.get("PORTAGE_RSYNC_OPTS","")))
407 - for opt in ("--recursive", "--times"):
408 - if opt not in rsync_opts:
409 - portage.writemsg(yellow("WARNING:") + " adding required option " + \
410 - "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
411 - rsync_opts.append(opt)
412 -
413 - for exclude in ("distfiles", "local", "packages"):
414 - opt = "--exclude=/%s" % exclude
415 - if opt not in rsync_opts:
416 - portage.writemsg(yellow("WARNING:") + \
417 - " adding required option %s not included in " % opt + \
418 - "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n")
419 - rsync_opts.append(opt)
420 -
421 - if syncuri.rstrip("/").endswith(".gentoo.org/gentoo-portage"):
422 - def rsync_opt_startswith(opt_prefix):
423 - for x in rsync_opts:
424 - if x.startswith(opt_prefix):
425 - return True
426 - return False
427 -
428 - if not rsync_opt_startswith("--timeout="):
429 - rsync_opts.append("--timeout=%d" % mytimeout)
430 -
431 - for opt in ("--compress", "--whole-file"):
432 - if opt not in rsync_opts:
433 - portage.writemsg(yellow("WARNING:") + " adding required option " + \
434 - "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
435 - rsync_opts.append(opt)
436 -
437 - if "--quiet" in myopts:
438 - rsync_opts.append("--quiet") # Shut up a lot
439 - else:
440 - rsync_opts.append("--verbose") # Print filelist
441 -
442 - if "--verbose" in myopts:
443 - rsync_opts.append("--progress") # Progress meter for each file
444 -
445 - if "--debug" in myopts:
446 - rsync_opts.append("--checksum") # Force checksum on all files
447 -
448 - # Real local timestamp file.
449 - servertimestampfile = os.path.join(
450 - myportdir, "metadata", "timestamp.chk")
451 -
452 - content = portage.util.grabfile(servertimestampfile)
453 - mytimestamp = 0
454 - if content:
455 - try:
456 - mytimestamp = time.mktime(time.strptime(content[0],
457 - "%a, %d %b %Y %H:%M:%S +0000"))
458 - except (OverflowError, ValueError):
459 - pass
460 - del content
461 -
462 - try:
463 - rsync_initial_timeout = \
464 - int(settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
465 - except ValueError:
466 - rsync_initial_timeout = 15
467 -
468 - try:
469 - maxretries=int(settings["PORTAGE_RSYNC_RETRIES"])
470 - except SystemExit, e:
471 - raise # Needed else can't exit
472 - except:
473 - maxretries=3 #default number of retries
474 -
475 - retries=0
476 - user_name, hostname, port = re.split(
477 - "rsync://([^:/]+@)?([^:/]*)(:[0-9]+)?", syncuri, maxsplit=3)[1:4]
478 - if port is None:
479 - port=""
480 - if user_name is None:
481 - user_name=""
482 - updatecache_flg=True
483 - all_rsync_opts = set(rsync_opts)
484 - extra_rsync_opts = shlex.split(
485 - settings.get("PORTAGE_RSYNC_EXTRA_OPTS",""))
486 - all_rsync_opts.update(extra_rsync_opts)
487 - family = socket.AF_INET
488 - if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
489 - family = socket.AF_INET
490 - elif socket.has_ipv6 and \
491 - ("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
492 - family = socket.AF_INET6
493 - ips=[]
494 - SERVER_OUT_OF_DATE = -1
495 - EXCEEDED_MAX_RETRIES = -2
496 - while (1):
497 - if ips:
498 - del ips[0]
499 - if ips==[]:
500 - try:
501 - for addrinfo in socket.getaddrinfo(
502 - hostname, None, family, socket.SOCK_STREAM):
503 - if socket.has_ipv6 and addrinfo[0] == socket.AF_INET6:
504 - # IPv6 addresses need to be enclosed in square brackets
505 - ips.append("[%s]" % addrinfo[4][0])
506 - else:
507 - ips.append(addrinfo[4][0])
508 - from random import shuffle
509 - shuffle(ips)
510 - except SystemExit, e:
511 - raise # Needed else can't exit
512 - except Exception, e:
513 - print "Notice:",str(e)
514 - dosyncuri=syncuri
515 -
516 - if ips:
517 - try:
518 - dosyncuri = syncuri.replace(
519 - "//" + user_name + hostname + port + "/",
520 - "//" + user_name + ips[0] + port + "/", 1)
521 - except SystemExit, e:
522 - raise # Needed else can't exit
523 - except Exception, e:
524 - print "Notice:",str(e)
525 - dosyncuri=syncuri
526 -
527 - if (retries==0):
528 - if "--ask" in myopts:
529 - if userquery("Do you want to sync your Portage tree with the mirror at\n" + blue(dosyncuri) + bold("?"))=="No":
530 - print
531 - print "Quitting."
532 - print
533 - sys.exit(0)
534 - emergelog(xterm_titles, ">>> Starting rsync with " + dosyncuri)
535 - if "--quiet" not in myopts:
536 - print ">>> Starting rsync with "+dosyncuri+"..."
537 - else:
538 - emergelog(xterm_titles,
539 - ">>> Starting retry %d of %d with %s" % \
540 - (retries,maxretries,dosyncuri))
541 - print "\n\n>>> Starting retry %d of %d with %s" % (retries,maxretries,dosyncuri)
542 -
543 - if mytimestamp != 0 and "--quiet" not in myopts:
544 - print ">>> Checking server timestamp ..."
545 -
546 - rsynccommand = [EPREFIX + "/usr/bin/rsync"] + rsync_opts + extra_rsync_opts
547 -
548 - if "--debug" in myopts:
549 - print rsynccommand
550 -
551 - exitcode = os.EX_OK
552 - servertimestamp = 0
553 - # Even if there's no timestamp available locally, fetch the
554 - # timestamp anyway as an initial probe to verify that the server is
555 - # responsive. This protects us from hanging indefinitely on a
556 - # connection attempt to an unresponsive server which rsync's
557 - # --timeout option does not prevent.
558 - if True:
559 - # Temporary file for remote server timestamp comparison.
560 - from tempfile import mkstemp
561 - fd, tmpservertimestampfile = mkstemp()
562 - os.close(fd)
563 - mycommand = rsynccommand[:]
564 - mycommand.append(dosyncuri.rstrip("/") + \
565 - "/metadata/timestamp.chk")
566 - mycommand.append(tmpservertimestampfile)
567 - content = None
568 - mypids = []
569 - try:
570 - def timeout_handler(signum, frame):
571 - raise portage.exception.PortageException("timed out")
572 - signal.signal(signal.SIGALRM, timeout_handler)
573 - # Timeout here in case the server is unresponsive. The
574 - # --timeout rsync option doesn't apply to the initial
575 - # connection attempt.
576 - if rsync_initial_timeout:
577 - signal.alarm(rsync_initial_timeout)
578 - try:
579 - mypids.extend(portage.process.spawn(
580 - mycommand, env=settings.environ(), returnpid=True))
581 - exitcode = os.waitpid(mypids[0], 0)[1]
582 - content = portage.grabfile(tmpservertimestampfile)
583 - finally:
584 - if rsync_initial_timeout:
585 - signal.alarm(0)
586 - try:
587 - os.unlink(tmpservertimestampfile)
588 - except OSError:
589 - pass
590 - except portage.exception.PortageException, e:
591 - # timed out
592 - print e
593 - del e
594 - if mypids and os.waitpid(mypids[0], os.WNOHANG) == (0,0):
595 - os.kill(mypids[0], signal.SIGTERM)
596 - os.waitpid(mypids[0], 0)
597 - # This is the same code rsync uses for timeout.
598 - exitcode = 30
599 - else:
600 - if exitcode != os.EX_OK:
601 - if exitcode & 0xff:
602 - exitcode = (exitcode & 0xff) << 8
603 - else:
604 - exitcode = exitcode >> 8
605 - if mypids:
606 - portage.process.spawned_pids.remove(mypids[0])
607 - if content:
608 - try:
609 - servertimestamp = time.mktime(time.strptime(
610 - content[0], "%a, %d %b %Y %H:%M:%S +0000"))
611 - except (OverflowError, ValueError):
612 - pass
613 - del mycommand, mypids, content
614 - if exitcode == os.EX_OK:
615 - if (servertimestamp != 0) and (servertimestamp == mytimestamp):
616 - emergelog(xterm_titles,
617 - ">>> Cancelling sync -- Already current.")
618 - print
619 - print ">>>"
620 - print ">>> Timestamps on the server and in the local repository are the same."
621 - print ">>> Cancelling all further sync action. You are already up to date."
622 - print ">>>"
623 - print ">>> In order to force sync, remove '%s'." % servertimestampfile
624 - print ">>>"
625 - print
626 - sys.exit(0)
627 - elif (servertimestamp != 0) and (servertimestamp < mytimestamp):
628 - emergelog(xterm_titles,
629 - ">>> Server out of date: %s" % dosyncuri)
630 - print
631 - print ">>>"
632 - print ">>> SERVER OUT OF DATE: %s" % dosyncuri
633 - print ">>>"
634 - print ">>> In order to force sync, remove '%s'." % servertimestampfile
635 - print ">>>"
636 - print
637 - exitcode = SERVER_OUT_OF_DATE
638 - elif (servertimestamp == 0) or (servertimestamp > mytimestamp):
639 - # actual sync
640 - mycommand = rsynccommand + [dosyncuri+"/", myportdir]
641 - exitcode = portage.process.spawn(mycommand, **spawn_kwargs)
642 - if exitcode in [0,1,3,4,11,14,20,21]:
643 - break
644 - elif exitcode in [1,3,4,11,14,20,21]:
645 - break
646 - else:
647 - # Code 2 indicates protocol incompatibility, which is expected
648 - # for servers with protocol < 29 that don't support
649 - # --prune-empty-directories. Retry for a server that supports
650 - # at least rsync protocol version 29 (>=rsync-2.6.4).
651 - pass
652 -
653 - retries=retries+1
654 -
655 - if retries<=maxretries:
656 - print ">>> Retrying..."
657 - time.sleep(11)
658 - else:
659 - # over retries
660 - # exit loop
661 - updatecache_flg=False
662 - exitcode = EXCEEDED_MAX_RETRIES
663 - break
664 -
665 - if (exitcode==0):
666 - emergelog(xterm_titles, "=== Sync completed with %s" % dosyncuri)
667 - elif exitcode == SERVER_OUT_OF_DATE:
668 - sys.exit(1)
669 - elif exitcode == EXCEEDED_MAX_RETRIES:
670 - sys.stderr.write(
671 - ">>> Exceeded PORTAGE_RSYNC_RETRIES: %s\n" % maxretries)
672 - sys.exit(1)
673 - elif (exitcode>0):
674 - msg = []
675 - if exitcode==1:
676 - msg.append("Rsync has reported that there is a syntax error. Please ensure")
677 - msg.append("that your SYNC statement is proper.")
678 - msg.append("SYNC=" + settings["SYNC"])
679 - elif exitcode==11:
680 - msg.append("Rsync has reported that there is a File IO error. Normally")
681 - msg.append("this means your disk is full, but can be caused by corruption")
682 - msg.append("on the filesystem that contains PORTDIR. Please investigate")
683 - msg.append("and try again after the problem has been fixed.")
684 - msg.append("PORTDIR=" + settings["PORTDIR"])
685 - elif exitcode==20:
686 - msg.append("Rsync was killed before it finished.")
687 - else:
688 - msg.append("Rsync has not successfully finished. It is recommended that you keep")
689 - msg.append("trying or that you use the 'emerge-webrsync' option if you are unable")
690 - msg.append("to use rsync due to firewall or other restrictions. This should be a")
691 - msg.append("temporary problem unless complications exist with your network")
692 - msg.append("(and possibly your system's filesystem) configuration.")
693 - for line in msg:
694 - out.eerror(line)
695 - sys.exit(exitcode)
696 - elif syncuri[:6]=="cvs://":
697 - if not os.path.exists(EPREFIX + "/usr/bin/cvs"):
698 - print "!!! cvs does not exist, so CVS support is disabled."
699 - print "!!! Type \"emerge dev-util/cvs\" to enable CVS support."
700 - sys.exit(1)
701 - cvsroot=syncuri[6:]
702 - cvsdir=os.path.dirname(myportdir)
703 - if not os.path.exists(myportdir+"/CVS"):
704 - #initial checkout
705 - print ">>> Starting initial cvs checkout with "+syncuri+"..."
706 - if os.path.exists(cvsdir+"/gentoo-x86"):
707 - print "!!! existing",cvsdir+"/gentoo-x86 directory; exiting."
708 - sys.exit(1)
709 - try:
710 - os.rmdir(myportdir)
711 - except OSError, e:
712 - if e.errno != errno.ENOENT:
713 - sys.stderr.write(
714 - "!!! existing '%s' directory; exiting.\n" % myportdir)
715 - sys.exit(1)
716 - del e
717 - if portage.spawn("cd "+cvsdir+"; cvs -z0 -d "+cvsroot+" co -P gentoo-x86",settings,free=1):
718 - print "!!! cvs checkout error; exiting."
719 - sys.exit(1)
720 - os.rename(os.path.join(cvsdir, "gentoo-x86"), myportdir)
721 - else:
722 - #cvs update
723 - print ">>> Starting cvs update with "+syncuri+"..."
724 - retval = portage.process.spawn_bash(
725 - "cd %s; cvs -z0 -q update -dP" % \
726 - (portage._shell_quote(myportdir),), **spawn_kwargs)
727 - if retval != os.EX_OK:
728 - sys.exit(retval)
729 - dosyncuri = syncuri
730 - elif syncuri[:11]=="svn+http://" or syncuri[:6]=="svn://" or syncuri[:12]=="svn+https://":
731 - # Prefix hardcoded case
732 - if not os.path.exists(EPREFIX + "/usr/bin/svn"):
733 - print "!!! svn does not exist, so SVN support is disabled."
734 - print "!!! Type \"emerge dev-util/subversion\" to enable SVN support."
735 - sys.exit(1)
736 - svndir=os.path.dirname(myportdir)
737 - if not os.path.exists(myportdir+"/.svn"):
738 - #initial checkout
739 - if syncuri[:4] == "svn+":
740 - syncuri = syncuri[4:]
741 - print ">>> Starting initial svn checkout with "+syncuri+"..."
742 - if os.path.exists(svndir+"/prefix-overlay"):
743 - print "!!! existing",svndir+"/prefix-overlay directory; exiting."
744 - sys.exit(1)
745 - try:
746 - os.rmdir(myportdir)
747 - except OSError, e:
748 - if e.errno != errno.ENOENT:
749 - sys.stderr.write(
750 - "!!! existing '%s' directory; exiting.\n" % myportdir)
751 - sys.exit(1)
752 - del e
753 - if portage.spawn("cd "+svndir+"; svn checkout "+syncuri,settings,free=1):
754 - print "!!! svn checkout error; exiting."
755 - sys.exit(1)
756 - os.rename(os.path.join(svndir, "prefix-overlay"), myportdir)
757 - else:
758 - #svn update
759 - print ">>> Starting svn update..."
760 - retval = portage.spawn("cd '%s'; svn update" % myportdir, \
761 - settings, free=1)
762 - if retval != os.EX_OK:
763 - sys.exit(retval)
764 -
765 - # write timestamp.chk
766 - try:
767 - if not os.path.exists(os.path.join(myportdir, "metadata")):
768 - os.mkdir(os.path.join(myportdir, "metadata"))
769 - f = open(os.path.join(myportdir, "metadata", "timestamp.chk"), 'w')
770 - f.write(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()))
771 - f.write('\n')
772 - f.close()
773 - except IOError, e:
774 - # too bad, next time better luck!
775 - pass
776 -
777 - dosyncuri = syncuri
778 - else:
779 - writemsg_level("!!! Unrecognized protocol: SYNC='%s'\n" % (syncuri,),
780 - noiselevel=-1, level=logging.ERROR)
781 - return 1
782 -
783 - if updatecache_flg and \
784 - myaction != "metadata" and \
785 - "metadata-transfer" not in settings.features:
786 - updatecache_flg = False
787 -
788 - # Reload the whole config from scratch.
789 - settings, trees, mtimedb = load_emerge_config(trees=trees)
790 - root_config = trees[settings["ROOT"]]["root_config"]
791 - portdb = trees[settings["ROOT"]]["porttree"].dbapi
792 -
793 - if updatecache_flg and \
794 - os.path.exists(os.path.join(myportdir, 'metadata', 'cache')):
795 -
796 - # Only update cache for myportdir since that's
797 - # the only one that's been synced here.
798 - action_metadata(settings, portdb, myopts, porttrees=[myportdir])
799 -
800 - if portage._global_updates(trees, mtimedb["updates"]):
801 - mtimedb.commit()
802 - # Reload the whole config from scratch.
803 - settings, trees, mtimedb = load_emerge_config(trees=trees)
804 - portdb = trees[settings["ROOT"]]["porttree"].dbapi
805 - root_config = trees[settings["ROOT"]]["root_config"]
806 -
807 - mybestpv = portdb.xmatch("bestmatch-visible",
808 - portage.const.PORTAGE_PACKAGE_ATOM)
809 - mypvs = portage.best(
810 - trees[settings["ROOT"]]["vartree"].dbapi.match(
811 - portage.const.PORTAGE_PACKAGE_ATOM))
812 -
813 - chk_updated_cfg_files(EPREFIX, settings.get("CONFIG_PROTECT","").split())
814 -
815 - if myaction != "metadata":
816 - if os.access(EPREFIX + portage.USER_CONFIG_PATH + "/bin/post_sync", os.X_OK):
817 - retval = portage.process.spawn(
818 - [os.path.join('/', EPREFIX_LSTRIP, portage.USER_CONFIG_PATH.lstrip(os.path.sep), "bin", "post_sync"),
819 - dosyncuri], env=settings.environ())
820 - if retval != os.EX_OK:
821 - print red(" * ")+bold("spawn failed of " + EPREFIX + portage.USER_CONFIG_PATH + "/bin/post_sync")
822 -
823 - if(mybestpv != mypvs) and not "--quiet" in myopts:
824 - print
825 - print red(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended"
826 - print red(" * ")+"that you update portage now, before any other packages are updated."
827 - print
828 - print red(" * ")+"To update portage, run 'emerge portage' now."
829 - print
830 -
831 - display_news_notification(root_config, myopts)
832 - return os.EX_OK
833 -
834 -def git_sync_timestamps(settings, portdir):
835 - """
836 - Since git doesn't preserve timestamps, synchronize timestamps between
837 - entries and ebuilds/eclasses. Assume the cache has the correct timestamp
838 - for a given file as long as the file in the working tree is not modified
839 - (relative to HEAD).
840 - """
841 - cache_dir = os.path.join(portdir, "metadata", "cache")
842 - if not os.path.isdir(cache_dir):
843 - return os.EX_OK
844 - writemsg_level(">>> Synchronizing timestamps...\n")
845 -
846 - from portage.cache.cache_errors import CacheError
847 - try:
848 - cache_db = settings.load_best_module("portdbapi.metadbmodule")(
849 - portdir, "metadata/cache", portage.auxdbkeys[:], readonly=True)
850 - except CacheError, e:
851 - writemsg_level("!!! Unable to instantiate cache: %s\n" % (e,),
852 - level=logging.ERROR, noiselevel=-1)
853 - return 1
854 -
855 - ec_dir = os.path.join(portdir, "eclass")
856 - try:
857 - ec_names = set(f[:-7] for f in os.listdir(ec_dir) \
858 - if f.endswith(".eclass"))
859 - except OSError, e:
860 - writemsg_level("!!! Unable to list eclasses: %s\n" % (e,),
861 - level=logging.ERROR, noiselevel=-1)
862 - return 1
863 -
864 - args = [portage.const.BASH_BINARY, "-c",
865 - "cd %s && git diff-index --name-only --diff-filter=M HEAD" % \
866 - portage._shell_quote(portdir)]
867 - import subprocess
868 - proc = subprocess.Popen(args, stdout=subprocess.PIPE)
869 - modified_files = set(l.rstrip("\n") for l in proc.stdout)
870 - rval = proc.wait()
871 - if rval != os.EX_OK:
872 - return rval
873 -
874 - modified_eclasses = set(ec for ec in ec_names \
875 - if os.path.join("eclass", ec + ".eclass") in modified_files)
876 -
877 - updated_ec_mtimes = {}
878 -
879 - for cpv in cache_db:
880 - cpv_split = portage.catpkgsplit(cpv)
881 - if cpv_split is None:
882 - writemsg_level("!!! Invalid cache entry: %s\n" % (cpv,),
883 - level=logging.ERROR, noiselevel=-1)
884 - continue
885 -
886 - cat, pn, ver, rev = cpv_split
887 - cat, pf = portage.catsplit(cpv)
888 - relative_eb_path = os.path.join(cat, pn, pf + ".ebuild")
889 - if relative_eb_path in modified_files:
890 - continue
891 -
892 - try:
893 - cache_entry = cache_db[cpv]
894 - eb_mtime = cache_entry.get("_mtime_")
895 - ec_mtimes = cache_entry.get("_eclasses_")
896 - except KeyError:
897 - writemsg_level("!!! Missing cache entry: %s\n" % (cpv,),
898 - level=logging.ERROR, noiselevel=-1)
899 - continue
900 - except CacheError, e:
901 - writemsg_level("!!! Unable to access cache entry: %s %s\n" % \
902 - (cpv, e), level=logging.ERROR, noiselevel=-1)
903 - continue
904 -
905 - if eb_mtime is None:
906 - writemsg_level("!!! Missing ebuild mtime: %s\n" % (cpv,),
907 - level=logging.ERROR, noiselevel=-1)
908 - continue
909 -
910 - try:
911 - eb_mtime = long(eb_mtime)
912 - except ValueError:
913 - writemsg_level("!!! Invalid ebuild mtime: %s %s\n" % \
914 - (cpv, eb_mtime), level=logging.ERROR, noiselevel=-1)
915 - continue
916 -
917 - if ec_mtimes is None:
918 - writemsg_level("!!! Missing eclass mtimes: %s\n" % (cpv,),
919 - level=logging.ERROR, noiselevel=-1)
920 - continue
921 -
922 - if modified_eclasses.intersection(ec_mtimes):
923 - continue
924 -
925 - missing_eclasses = set(ec_mtimes).difference(ec_names)
926 - if missing_eclasses:
927 - writemsg_level("!!! Non-existent eclass(es): %s %s\n" % \
928 - (cpv, sorted(missing_eclasses)), level=logging.ERROR,
929 - noiselevel=-1)
930 - continue
931 -
932 - eb_path = os.path.join(portdir, relative_eb_path)
933 - try:
934 - current_eb_mtime = os.stat(eb_path)
935 - except OSError:
936 - writemsg_level("!!! Missing ebuild: %s\n" % \
937 - (cpv,), level=logging.ERROR, noiselevel=-1)
938 - continue
939 -
940 - inconsistent = False
941 - for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
942 - updated_mtime = updated_ec_mtimes.get(ec)
943 - if updated_mtime is not None and updated_mtime != ec_mtime:
944 - writemsg_level("!!! Inconsistent eclass mtime: %s %s\n" % \
945 - (cpv, ec), level=logging.ERROR, noiselevel=-1)
946 - inconsistent = True
947 - break
948 -
949 - if inconsistent:
950 - continue
951 -
952 - if current_eb_mtime != eb_mtime:
953 - os.utime(eb_path, (eb_mtime, eb_mtime))
954 -
955 - for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
956 - if ec in updated_ec_mtimes:
957 - continue
958 - ec_path = os.path.join(ec_dir, ec + ".eclass")
959 - current_mtime = long(os.stat(ec_path).st_mtime)
960 - if current_mtime != ec_mtime:
961 - os.utime(ec_path, (ec_mtime, ec_mtime))
962 - updated_ec_mtimes[ec] = ec_mtime
963 -
964 - return os.EX_OK
965 -
966 -def action_metadata(settings, portdb, myopts, porttrees=None):
967 - if porttrees is None:
968 - porttrees = portdb.porttrees
969 - portage.writemsg_stdout("\n>>> Updating Portage cache\n")
970 - old_umask = os.umask(0002)
971 - cachedir = os.path.normpath(settings.depcachedir)
972 - if cachedir in ["/", "/bin", "/dev", "/etc", "/home",
973 - "/lib", "/opt", "/proc", "/root", "/sbin",
974 - "/sys", "/tmp", "/usr", "/var"]:
975 - print >> sys.stderr, "!!! PORTAGE_DEPCACHEDIR IS SET TO A PRIMARY " + \
976 - "ROOT DIRECTORY ON YOUR SYSTEM."
977 - print >> sys.stderr, \
978 - "!!! This is ALMOST CERTAINLY NOT what you want: '%s'" % cachedir
979 - sys.exit(73)
980 - if not os.path.exists(cachedir):
981 - os.makedirs(cachedir)
982 -
983 - auxdbkeys = [x for x in portage.auxdbkeys if not x.startswith("UNUSED_0")]
984 - auxdbkeys = tuple(auxdbkeys)
985 -
986 - class TreeData(object):
987 - __slots__ = ('dest_db', 'eclass_db', 'path', 'src_db', 'valid_nodes')
988 - def __init__(self, dest_db, eclass_db, path, src_db):
989 - self.dest_db = dest_db
990 - self.eclass_db = eclass_db
991 - self.path = path
992 - self.src_db = src_db
993 - self.valid_nodes = set()
994 -
995 - porttrees_data = []
996 - for path in porttrees:
997 - src_db = portdb._pregen_auxdb.get(path)
998 - if src_db is None and \
999 - os.path.isdir(os.path.join(path, 'metadata', 'cache')):
1000 - src_db = portdb.metadbmodule(
1001 - path, 'metadata/cache', auxdbkeys, readonly=True)
1002 - try:
1003 - src_db.ec = portdb._repo_info[path].eclass_db
1004 - except AttributeError:
1005 - pass
1006 -
1007 - if src_db is not None:
1008 - porttrees_data.append(TreeData(portdb.auxdb[path],
1009 - portdb._repo_info[path].eclass_db, path, src_db))
1010 -
1011 - porttrees = [tree_data.path for tree_data in porttrees_data]
1012 -
1013 - isatty = sys.stdout.isatty()
1014 - quiet = not isatty or '--quiet' in myopts
1015 - onProgress = None
1016 - if not quiet:
1017 - progressBar = portage.output.TermProgressBar()
1018 - progressHandler = ProgressHandler()
1019 - onProgress = progressHandler.onProgress
1020 - def display():
1021 - progressBar.set(progressHandler.curval, progressHandler.maxval)
1022 - progressHandler.display = display
1023 - def sigwinch_handler(signum, frame):
1024 - lines, progressBar.term_columns = \
1025 - portage.output.get_term_size()
1026 - signal.signal(signal.SIGWINCH, sigwinch_handler)
1027 -
1028 - # Temporarily override portdb.porttrees so portdb.cp_all()
1029 - # will only return the relevant subset.
1030 - portdb_porttrees = portdb.porttrees
1031 - portdb.porttrees = porttrees
1032 - try:
1033 - cp_all = portdb.cp_all()
1034 - finally:
1035 - portdb.porttrees = portdb_porttrees
1036 -
1037 - curval = 0
1038 - maxval = len(cp_all)
1039 - if onProgress is not None:
1040 - onProgress(maxval, curval)
1041 -
1042 - from portage.cache.util import quiet_mirroring
1043 - from portage import eapi_is_supported, \
1044 - _validate_cache_for_unsupported_eapis
1045 -
1046 - # TODO: Display error messages, but do not interfere with the progress bar.
1047 - # Here's how:
1048 - # 1) erase the progress bar
1049 - # 2) show the error message
1050 - # 3) redraw the progress bar on a new line
1051 - noise = quiet_mirroring()
1052 -
1053 - for cp in cp_all:
1054 - for tree_data in porttrees_data:
1055 - for cpv in portdb.cp_list(cp, mytree=tree_data.path):
1056 - tree_data.valid_nodes.add(cpv)
1057 - try:
1058 - src = tree_data.src_db[cpv]
1059 - except KeyError, e:
1060 - noise.missing_entry(cpv)
1061 - del e
1062 - continue
1063 - except CacheError, ce:
1064 - noise.exception(cpv, ce)
1065 - del ce
1066 - continue
1067 -
1068 - eapi = src.get('EAPI')
1069 - if not eapi:
1070 - eapi = '0'
1071 - eapi = eapi.lstrip('-')
1072 - eapi_supported = eapi_is_supported(eapi)
1073 - if not eapi_supported:
1074 - if not _validate_cache_for_unsupported_eapis:
1075 - noise.misc(cpv, "unable to validate " + \
1076 - "cache for EAPI='%s'" % eapi)
1077 - continue
1078 -
1079 - dest = None
1080 - try:
1081 - dest = tree_data.dest_db[cpv]
1082 - except (KeyError, CacheError):
1083 - pass
1084 -
1085 - for d in (src, dest):
1086 - if d is not None and d.get('EAPI') in ('', '0'):
1087 - del d['EAPI']
1088 -
1089 - if dest is not None:
1090 - if not (dest['_mtime_'] == src['_mtime_'] and \
1091 - tree_data.eclass_db.is_eclass_data_valid(
1092 - dest['_eclasses_']) and \
1093 - set(dest['_eclasses_']) == set(src['_eclasses_'])):
1094 - dest = None
1095 - else:
1096 - # We don't want to skip the write unless we're really
1097 - # sure that the existing cache is identical, so don't
1098 - # trust _mtime_ and _eclasses_ alone.
1099 - for k in set(chain(src, dest)).difference(
1100 - ('_mtime_', '_eclasses_')):
1101 - if dest.get(k, '') != src.get(k, ''):
1102 - dest = None
1103 - break
1104 -
1105 - if dest is not None:
1106 - # The existing data is valid and identical,
1107 - # so there's no need to overwrite it.
1108 - continue
1109 -
1110 - try:
1111 - inherited = src.get('INHERITED', '')
1112 - eclasses = src.get('_eclasses_')
1113 - except CacheError, ce:
1114 - noise.exception(cpv, ce)
1115 - del ce
1116 - continue
1117 -
1118 - if eclasses is not None:
1119 - if not tree_data.eclass_db.is_eclass_data_valid(
1120 - src['_eclasses_']):
1121 - noise.eclass_stale(cpv)
1122 - continue
1123 - inherited = eclasses
1124 - else:
1125 - inherited = inherited.split()
1126 -
1127 - if tree_data.src_db.complete_eclass_entries and \
1128 - eclasses is None:
1129 - noise.corruption(cpv, "missing _eclasses_ field")
1130 - continue
1131 -
1132 - if inherited:
1133 - # Even if _eclasses_ already exists, replace it with data from
1134 - # eclass_cache, in order to insert local eclass paths.
1135 - try:
1136 - eclasses = tree_data.eclass_db.get_eclass_data(inherited)
1137 - except KeyError:
1138 - # INHERITED contains a non-existent eclass.
1139 - noise.eclass_stale(cpv)
1140 - continue
1141 -
1142 - if eclasses is None:
1143 - noise.eclass_stale(cpv)
1144 - continue
1145 - src['_eclasses_'] = eclasses
1146 - else:
1147 - src['_eclasses_'] = {}
1148 -
1149 - if not eapi_supported:
1150 - src = {
1151 - 'EAPI' : '-' + eapi,
1152 - '_mtime_' : src['_mtime_'],
1153 - '_eclasses_' : src['_eclasses_'],
1154 - }
1155 -
1156 - try:
1157 - tree_data.dest_db[cpv] = src
1158 - except CacheError, ce:
1159 - noise.exception(cpv, ce)
1160 - del ce
1161 -
1162 - curval += 1
1163 - if onProgress is not None:
1164 - onProgress(maxval, curval)
1165 -
1166 - if onProgress is not None:
1167 - onProgress(maxval, curval)
1168 -
1169 - for tree_data in porttrees_data:
1170 - try:
1171 - dead_nodes = set(tree_data.dest_db.iterkeys())
1172 - except CacheError, e:
1173 - writemsg_level("Error listing cache entries for " + \
1174 - "'%s': %s, continuing...\n" % (tree_data.path, e),
1175 - level=logging.ERROR, noiselevel=-1)
1176 - del e
1177 - else:
1178 - dead_nodes.difference_update(tree_data.valid_nodes)
1179 - for cpv in dead_nodes:
1180 - try:
1181 - del tree_data.dest_db[cpv]
1182 - except (KeyError, CacheError):
1183 - pass
1184 -
1185 - if not quiet:
1186 - # make sure the final progress is displayed
1187 - progressHandler.display()
1188 - print
1189 - signal.signal(signal.SIGWINCH, signal.SIG_DFL)
1190 -
1191 - sys.stdout.flush()
1192 - os.umask(old_umask)
1193 -
1194 -def action_regen(settings, portdb, max_jobs, max_load):
1195 - xterm_titles = "notitles" not in settings.features
1196 - emergelog(xterm_titles, " === regen")
1197 - #regenerate cache entries
1198 - portage.writemsg_stdout("Regenerating cache entries...\n")
1199 - try:
1200 - os.close(sys.stdin.fileno())
1201 - except SystemExit, e:
1202 - raise # Needed else can't exit
1203 - except:
1204 - pass
1205 - sys.stdout.flush()
1206 -
1207 - regen = MetadataRegen(portdb, max_jobs=max_jobs, max_load=max_load)
1208 - regen.run()
1209 -
1210 - portage.writemsg_stdout("done!\n")
1211 - return regen.returncode
1212 -
1213 -def action_config(settings, trees, myopts, myfiles):
1214 - if len(myfiles) != 1:
1215 - print red("!!! config can only take a single package atom at this time\n")
1216 - sys.exit(1)
1217 - if not is_valid_package_atom(myfiles[0]):
1218 - portage.writemsg("!!! '%s' is not a valid package atom.\n" % myfiles[0],
1219 - noiselevel=-1)
1220 - portage.writemsg("!!! Please check ebuild(5) for full details.\n")
1221 - portage.writemsg("!!! (Did you specify a version but forget to prefix with '='?)\n")
1222 - sys.exit(1)
1223 - print
1224 - try:
1225 - pkgs = trees[settings["ROOT"]]["vartree"].dbapi.match(myfiles[0])
1226 - except portage.exception.AmbiguousPackageName, e:
1227 - # Multiple matches thrown from cpv_expand
1228 - pkgs = e.args[0]
1229 - if len(pkgs) == 0:
1230 - print "No packages found.\n"
1231 - sys.exit(0)
1232 - elif len(pkgs) > 1:
1233 - if "--ask" in myopts:
1234 - options = []
1235 - print "Please select a package to configure:"
1236 - idx = 0
1237 - for pkg in pkgs:
1238 - idx += 1
1239 - options.append(str(idx))
1240 - print options[-1]+") "+pkg
1241 - print "X) Cancel"
1242 - options.append("X")
1243 - idx = userquery("Selection?", options)
1244 - if idx == "X":
1245 - sys.exit(0)
1246 - pkg = pkgs[int(idx)-1]
1247 - else:
1248 - print "The following packages available:"
1249 - for pkg in pkgs:
1250 - print "* "+pkg
1251 - print "\nPlease use a specific atom or the --ask option."
1252 - sys.exit(1)
1253 - else:
1254 - pkg = pkgs[0]
1255 -
1256 - print
1257 - if "--ask" in myopts:
1258 - if userquery("Ready to configure "+pkg+"?") == "No":
1259 - sys.exit(0)
1260 - else:
1261 - print "Configuring pkg..."
1262 - print
1263 - ebuildpath = trees[settings["ROOT"]]["vartree"].dbapi.findname(pkg)
1264 - mysettings = portage.config(clone=settings)
1265 - vardb = trees[mysettings["ROOT"]]["vartree"].dbapi
1266 - debug = mysettings.get("PORTAGE_DEBUG") == "1"
1267 - retval = portage.doebuild(ebuildpath, "config", mysettings["ROOT"],
1268 - mysettings,
1269 - debug=(settings.get("PORTAGE_DEBUG", "") == 1), cleanup=True,
1270 - mydbapi=trees[settings["ROOT"]]["vartree"].dbapi, tree="vartree")
1271 - if retval == os.EX_OK:
1272 - portage.doebuild(ebuildpath, "clean", mysettings["ROOT"],
1273 - mysettings, debug=debug, mydbapi=vardb, tree="vartree")
1274 - print
1275 -
1276 -def action_info(settings, trees, myopts, myfiles):
1277 - print getportageversion(settings["PORTDIR"], settings["ROOT"],
1278 - settings.profile_path, settings["CHOST"],
1279 - trees[settings["ROOT"]]["vartree"].dbapi)
1280 - header_width = 65
1281 - header_title = "System Settings"
1282 - if myfiles:
1283 - print header_width * "="
1284 - print header_title.rjust(int(header_width/2 + len(header_title)/2))
1285 - print header_width * "="
1286 - print "System uname: "+platform.platform(aliased=1)
1287 -
1288 - lastSync = portage.grabfile(os.path.join(
1289 - settings["PORTDIR"], "metadata", "timestamp.chk"))
1290 - print "Timestamp of tree:",
1291 - if lastSync:
1292 - print lastSync[0]
1293 - else:
1294 - print "Unknown"
1295 -
1296 - output=commands.getstatusoutput("distcc --version")
1297 - if not output[0]:
1298 - print str(output[1].split("\n",1)[0]),
1299 - if "distcc" in settings.features:
1300 - print "[enabled]"
1301 - else:
1302 - print "[disabled]"
1303 -
1304 - output=commands.getstatusoutput("ccache -V")
1305 - if not output[0]:
1306 - print str(output[1].split("\n",1)[0]),
1307 - if "ccache" in settings.features:
1308 - print "[enabled]"
1309 - else:
1310 - print "[disabled]"
1311 -
1312 - myvars = ["sys-devel/autoconf", "sys-devel/automake", "virtual/os-headers",
1313 - "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"]
1314 - myvars += portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_pkgs")
1315 - myvars = portage.util.unique_array(myvars)
1316 - myvars.sort()
1317 -
1318 - for x in myvars:
1319 - if portage.isvalidatom(x):
1320 - pkg_matches = trees["/"]["vartree"].dbapi.match(x)
1321 - pkg_matches = [portage.catpkgsplit(cpv)[1:] for cpv in pkg_matches]
1322 - pkg_matches.sort(key=cmp_sort_key(portage.pkgcmp))
1323 - pkgs = []
1324 - for pn, ver, rev in pkg_matches:
1325 - if rev != "r0":
1326 - pkgs.append(ver + "-" + rev)
1327 - else:
1328 - pkgs.append(ver)
1329 - if pkgs:
1330 - pkgs = ", ".join(pkgs)
1331 - print "%-20s %s" % (x+":", pkgs)
1332 - else:
1333 - print "%-20s %s" % (x+":", "[NOT VALID]")
1334 -
1335 - libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool"))
1336 -
1337 - if "--verbose" in myopts:
1338 - myvars=settings.keys()
1339 - else:
1340 - myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK',
1341 - 'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR',
1342 - 'PORTDIR_OVERLAY', 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS',
1343 - 'ACCEPT_KEYWORDS', 'SYNC', 'FEATURES', 'EMERGE_DEFAULT_OPTS',
1344 - 'EPREFIX']
1345 -
1346 - myvars.extend(portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_vars"))
1347 -
1348 - myvars = portage.util.unique_array(myvars)
1349 - use_expand = settings.get('USE_EXPAND', '').split()
1350 - use_expand.sort()
1351 - use_expand_hidden = set(
1352 - settings.get('USE_EXPAND_HIDDEN', '').upper().split())
1353 - alphabetical_use = '--alphabetical' in myopts
1354 - root_config = trees[settings["ROOT"]]['root_config']
1355 - unset_vars = []
1356 - myvars.sort()
1357 - for x in myvars:
1358 - if x in settings:
1359 - if x != "USE":
1360 - print '%s="%s"' % (x, settings[x])
1361 - else:
1362 - use = set(settings["USE"].split())
1363 - for varname in use_expand:
1364 - flag_prefix = varname.lower() + "_"
1365 - for f in list(use):
1366 - if f.startswith(flag_prefix):
1367 - use.remove(f)
1368 - use = list(use)
1369 - use.sort()
1370 - print 'USE="%s"' % " ".join(use),
1371 - for varname in use_expand:
1372 - myval = settings.get(varname)
1373 - if myval:
1374 - print '%s="%s"' % (varname, myval),
1375 - print
1376 - else:
1377 - unset_vars.append(x)
1378 - if unset_vars:
1379 - print "Unset: "+", ".join(unset_vars)
1380 - print
1381 -
1382 - if "--debug" in myopts:
1383 - for x in dir(portage):
1384 - module = getattr(portage, x)
1385 - if "cvs_id_string" in dir(module):
1386 - print "%s: %s" % (str(x), str(module.cvs_id_string))
1387 -
1388 - # See if we can find any packages installed matching the strings
1389 - # passed on the command line
1390 - mypkgs = []
1391 - vardb = trees[settings["ROOT"]]["vartree"].dbapi
1392 - portdb = trees[settings["ROOT"]]["porttree"].dbapi
1393 - for x in myfiles:
1394 - mypkgs.extend(vardb.match(x))
1395 -
1396 - # If some packages were found...
1397 - if mypkgs:
1398 - # Get our global settings (we only print stuff if it varies from
1399 - # the current config)
1400 - mydesiredvars = [ 'CHOST', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'EPREFIX' ]
1401 - auxkeys = mydesiredvars + list(vardb._aux_cache_keys)
1402 - auxkeys.append('DEFINED_PHASES')
1403 - global_vals = {}
1404 - pkgsettings = portage.config(clone=settings)
1405 -
1406 - # Loop through each package
1407 - # Only print settings if they differ from global settings
1408 - header_title = "Package Settings"
1409 - print header_width * "="
1410 - print header_title.rjust(int(header_width/2 + len(header_title)/2))
1411 - print header_width * "="
1412 - from portage.output import EOutput
1413 - out = EOutput()
1414 - for cpv in mypkgs:
1415 - # Get all package specific variables
1416 - metadata = dict(izip(auxkeys, vardb.aux_get(cpv, auxkeys)))
1417 - pkg = Package(built=True, cpv=cpv,
1418 - installed=True, metadata=izip(Package.metadata_keys,
1419 - (metadata.get(x, '') for x in Package.metadata_keys)),
1420 - root_config=root_config, type_name='installed')
1421 -
1422 - print "\n%s was built with the following:" % \
1423 - colorize("INFORM", str(pkg.cpv))
1424 -
1425 - pkgsettings.setcpv(pkg)
1426 - forced_flags = set(chain(pkgsettings.useforce,
1427 - pkgsettings.usemask))
1428 - use = set(pkg.use.enabled)
1429 - use.discard(pkgsettings.get('ARCH'))
1430 - use_expand_flags = set()
1431 - use_enabled = {}
1432 - use_disabled = {}
1433 - for varname in use_expand:
1434 - flag_prefix = varname.lower() + "_"
1435 - for f in use:
1436 - if f.startswith(flag_prefix):
1437 - use_expand_flags.add(f)
1438 - use_enabled.setdefault(
1439 - varname.upper(), []).append(f[len(flag_prefix):])
1440 -
1441 - for f in pkg.iuse.all:
1442 - if f.startswith(flag_prefix):
1443 - use_expand_flags.add(f)
1444 - if f not in use:
1445 - use_disabled.setdefault(
1446 - varname.upper(), []).append(f[len(flag_prefix):])
1447 -
1448 - var_order = set(use_enabled)
1449 - var_order.update(use_disabled)
1450 - var_order = sorted(var_order)
1451 - var_order.insert(0, 'USE')
1452 - use.difference_update(use_expand_flags)
1453 - use_enabled['USE'] = list(use)
1454 - use_disabled['USE'] = []
1455 -
1456 - for f in pkg.iuse.all:
1457 - if f not in use and \
1458 - f not in use_expand_flags:
1459 - use_disabled['USE'].append(f)
1460 -
1461 - for varname in var_order:
1462 - if varname in use_expand_hidden:
1463 - continue
1464 - flags = []
1465 - for f in use_enabled.get(varname, []):
1466 - flags.append(UseFlagDisplay(f, True, f in forced_flags))
1467 - for f in use_disabled.get(varname, []):
1468 - flags.append(UseFlagDisplay(f, False, f in forced_flags))
1469 - if alphabetical_use:
1470 - flags.sort(key=UseFlagDisplay.sort_combined)
1471 - else:
1472 - flags.sort(key=UseFlagDisplay.sort_separated)
1473 - print '%s="%s"' % (varname, ' '.join(str(f) for f in flags)),
1474 - print
1475 -
1476 - for myvar in mydesiredvars:
1477 - if metadata[myvar].split() != settings.get(myvar, '').split():
1478 - print "%s=\"%s\"" % (myvar, metadata[myvar])
1479 - print
1480 -
1481 - if metadata['DEFINED_PHASES']:
1482 - if 'info' not in metadata['DEFINED_PHASES'].split():
1483 - continue
1484 -
1485 - print ">>> Attempting to run pkg_info() for '%s'" % pkg.cpv
1486 - ebuildpath = vardb.findname(pkg.cpv)
1487 - if not ebuildpath or not os.path.exists(ebuildpath):
1488 - out.ewarn("No ebuild found for '%s'" % pkg.cpv)
1489 - continue
1490 - portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"],
1491 - pkgsettings, debug=(settings.get("PORTAGE_DEBUG", "") == 1),
1492 - mydbapi=trees[settings["ROOT"]]["vartree"].dbapi,
1493 - tree="vartree")
1494 -
1495 -def action_search(root_config, myopts, myfiles, spinner):
1496 - if not myfiles:
1497 - print "emerge: no search terms provided."
1498 - else:
1499 - searchinstance = search(root_config,
1500 - spinner, "--searchdesc" in myopts,
1501 - "--quiet" not in myopts, "--usepkg" in myopts,
1502 - "--usepkgonly" in myopts)
1503 - for mysearch in myfiles:
1504 - try:
1505 - searchinstance.execute(mysearch)
1506 - except re.error, comment:
1507 - print "\n!!! Regular expression error in \"%s\": %s" % ( mysearch, comment )
1508 - sys.exit(1)
1509 - searchinstance.output()
1510 -
1511 -def action_uninstall(settings, trees, ldpath_mtimes,
1512 - opts, action, files, spinner):
1513 -
1514 - # For backward compat, some actions do not require leading '='.
1515 - ignore_missing_eq = action in ('clean', 'unmerge')
1516 - root = settings['ROOT']
1517 - vardb = trees[root]['vartree'].dbapi
1518 - valid_atoms = []
1519 - lookup_owners = []
1520 -
1521 - # Ensure atoms are valid before calling unmerge().
1522 - # For backward compat, leading '=' is not required.
1523 - for x in files:
1524 - if is_valid_package_atom(x) or \
1525 - (ignore_missing_eq and is_valid_package_atom('=' + x)):
1526 -
1527 - try:
1528 - valid_atoms.append(
1529 - portage.dep_expand(x, mydb=vardb, settings=settings))
1530 - except portage.exception.AmbiguousPackageName, e:
1531 - msg = "The short ebuild name \"" + x + \
1532 - "\" is ambiguous. Please specify " + \
1533 - "one of the following " + \
1534 - "fully-qualified ebuild names instead:"
1535 - for line in textwrap.wrap(msg, 70):
1536 - writemsg_level("!!! %s\n" % (line,),
1537 - level=logging.ERROR, noiselevel=-1)
1538 - for i in e[0]:
1539 - writemsg_level(" %s\n" % colorize("INFORM", i),
1540 - level=logging.ERROR, noiselevel=-1)
1541 - writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
1542 - return 1
1543 -
1544 - elif x.startswith(os.sep):
1545 - if not x.startswith(root):
1546 - writemsg_level(("!!! '%s' does not start with" + \
1547 - " $ROOT.\n") % x, level=logging.ERROR, noiselevel=-1)
1548 - return 1
1549 - # Queue these up since it's most efficient to handle
1550 - # multiple files in a single iter_owners() call.
1551 - lookup_owners.append(x)
1552 -
1553 - else:
1554 - msg = []
1555 - msg.append("'%s' is not a valid package atom." % (x,))
1556 - msg.append("Please check ebuild(5) for full details.")
1557 - writemsg_level("".join("!!! %s\n" % line for line in msg),
1558 - level=logging.ERROR, noiselevel=-1)
1559 - return 1
1560 -
1561 - if lookup_owners:
1562 - relative_paths = []
1563 - search_for_multiple = False
1564 - if len(lookup_owners) > 1:
1565 - search_for_multiple = True
1566 -
1567 - for x in lookup_owners:
1568 - if not search_for_multiple and os.path.isdir(x):
1569 - search_for_multiple = True
1570 - relative_paths.append(x[len(root):])
1571 -
1572 - owners = set()
1573 - for pkg, relative_path in \
1574 - vardb._owners.iter_owners(relative_paths):
1575 - owners.add(pkg.mycpv)
1576 - if not search_for_multiple:
1577 - break
1578 -
1579 - if owners:
1580 - for cpv in owners:
1581 - slot = vardb.aux_get(cpv, ['SLOT'])[0]
1582 - if not slot:
1583 - # portage now masks packages with missing slot, but it's
1584 - # possible that one was installed by an older version
1585 - atom = portage.cpv_getkey(cpv)
1586 - else:
1587 - atom = '%s:%s' % (portage.cpv_getkey(cpv), slot)
1588 - valid_atoms.append(portage.dep.Atom(atom))
1589 - else:
1590 - writemsg_level(("!!! '%s' is not claimed " + \
1591 - "by any package.\n") % lookup_owners[0],
1592 - level=logging.WARNING, noiselevel=-1)
1593 -
1594 - if files and not valid_atoms:
1595 - return 1
1596 -
1597 - if action in ('clean', 'unmerge') or \
1598 - (action == 'prune' and "--nodeps" in opts):
1599 - # When given a list of atoms, unmerge them in the order given.
1600 - ordered = action == 'unmerge'
1601 - unmerge(trees[settings["ROOT"]]['root_config'], opts, action,
1602 - valid_atoms, ldpath_mtimes, ordered=ordered)
1603 - rval = os.EX_OK
1604 - elif action == 'deselect':
1605 - rval = action_deselect(settings, trees, opts, valid_atoms)
1606 - else:
1607 - rval = action_depclean(settings, trees, ldpath_mtimes,
1608 - opts, action, valid_atoms, spinner)
1609 -
1610 - return rval
1611 -
1612 -def action_deselect(settings, trees, opts, atoms):
1613 - root_config = trees[settings['ROOT']]['root_config']
1614 - world_set = root_config.sets['world']
1615 - if not hasattr(world_set, 'update'):
1616 - writemsg_level("World set does not appear to be mutable.\n",
1617 - level=logging.ERROR, noiselevel=-1)
1618 - return 1
1619 -
1620 - vardb = root_config.trees['vartree'].dbapi
1621 - expanded_atoms = set(atoms)
1622 - from portage.dep import Atom
1623 - for atom in atoms:
1624 - for cpv in vardb.match(atom):
1625 - slot, = vardb.aux_get(cpv, ['SLOT'])
1626 - if not slot:
1627 - slot = '0'
1628 - expanded_atoms.add(Atom('%s:%s' % (portage.cpv_getkey(cpv), slot)))
1629 -
1630 - pretend = '--pretend' in opts
1631 - locked = False
1632 - if not pretend and hasattr(world_set, 'lock'):
1633 - world_set.lock()
1634 - locked = True
1635 - try:
1636 - discard_atoms = set()
1637 - world_set.load()
1638 - for atom in world_set:
1639 - if not isinstance(atom, Atom):
1640 - # nested set
1641 - continue
1642 - for arg_atom in expanded_atoms:
1643 - if arg_atom.intersects(atom) and \
1644 - not (arg_atom.slot and not atom.slot):
1645 - discard_atoms.add(atom)
1646 - break
1647 - if discard_atoms:
1648 - for atom in sorted(discard_atoms):
1649 - print ">>> Removing %s from \"world\" favorites file..." % \
1650 - colorize("INFORM", str(atom))
1651 -
1652 - if '--ask' in opts:
1653 - prompt = "Would you like to remove these " + \
1654 - "packages from your world favorites?"
1655 - if userquery(prompt) == 'No':
1656 - return os.EX_OK
1657 -
1658 - remaining = set(world_set)
1659 - remaining.difference_update(discard_atoms)
1660 - if not pretend:
1661 - world_set.replace(remaining)
1662 - else:
1663 - print ">>> No matching atoms found in \"world\" favorites file..."
1664 - finally:
1665 - if locked:
1666 - world_set.unlock()
1667 - return os.EX_OK
1668 -
1669 -def action_depclean(settings, trees, ldpath_mtimes,
1670 - myopts, action, myfiles, spinner):
1671 - # Kill packages that aren't explicitly merged or are required as a
1672 - # dependency of another package. World file is explicit.
1673 -
1674 - # Global depclean or prune operations are not very safe when there are
1675 - # missing dependencies since it's unknown how badly incomplete
1676 - # the dependency graph is, and we might accidentally remove packages
1677 - # that should have been pulled into the graph. On the other hand, it's
1678 - # relatively safe to ignore missing deps when only asked to remove
1679 - # specific packages.
1680 - allow_missing_deps = len(myfiles) > 0
1681 -
1682 - msg = []
1683 - msg.append("Always study the list of packages to be cleaned for any obvious\n")
1684 - msg.append("mistakes. Packages that are part of the world set will always\n")
1685 - msg.append("be kept. They can be manually added to this set with\n")
1686 - msg.append(good("`emerge --noreplace <atom>`") + ". Packages that are listed in\n")
1687 - msg.append("package.provided (see portage(5)) will be removed by\n")
1688 - msg.append("depclean, even if they are part of the world set.\n")
1689 - msg.append("\n")
1690 - msg.append("As a safety measure, depclean will not remove any packages\n")
1691 - msg.append("unless *all* required dependencies have been resolved. As a\n")
1692 - msg.append("consequence, it is often necessary to run %s\n" % \
1693 - good("`emerge --update"))
1694 - msg.append(good("--newuse --deep @system @world`") + \
1695 - " prior to depclean.\n")
1696 -
1697 - if action == "depclean" and "--quiet" not in myopts and not myfiles:
1698 - portage.writemsg_stdout("\n")
1699 - for x in msg:
1700 - portage.writemsg_stdout(colorize("WARN", " * ") + x)
1701 -
1702 - xterm_titles = "notitles" not in settings.features
1703 - myroot = settings["ROOT"]
1704 - root_config = trees[myroot]["root_config"]
1705 - getSetAtoms = root_config.setconfig.getSetAtoms
1706 - vardb = trees[myroot]["vartree"].dbapi
1707 - deselect = myopts.get('--deselect') != 'n'
1708 -
1709 - required_set_names = ("system", "world")
1710 - required_sets = {}
1711 - set_args = []
1712 -
1713 - for s in required_set_names:
1714 - required_sets[s] = InternalPackageSet(
1715 - initial_atoms=getSetAtoms(s))
1716 -
1717 -
1718 - # When removing packages, use a temporary version of world
1719 - # which excludes packages that are intended to be eligible for
1720 - # removal.
1721 - world_temp_set = required_sets["world"]
1722 - system_set = required_sets["system"]
1723 -
1724 - if not system_set or not world_temp_set:
1725 -
1726 - if not system_set:
1727 - writemsg_level("!!! You have no system list.\n",
1728 - level=logging.ERROR, noiselevel=-1)
1729 -
1730 - if not world_temp_set:
1731 - writemsg_level("!!! You have no world file.\n",
1732 - level=logging.WARNING, noiselevel=-1)
1733 -
1734 - writemsg_level("!!! Proceeding is likely to " + \
1735 - "break your installation.\n",
1736 - level=logging.WARNING, noiselevel=-1)
1737 - if "--pretend" not in myopts:
1738 - countdown(int(settings["EMERGE_WARNING_DELAY"]), ">>> Depclean")
1739 -
1740 - if action == "depclean":
1741 - emergelog(xterm_titles, " >>> depclean")
1742 -
1743 - import textwrap
1744 - args_set = InternalPackageSet()
1745 - if myfiles:
1746 - args_set.update(myfiles)
1747 - matched_packages = False
1748 - for x in args_set:
1749 - if vardb.match(x):
1750 - matched_packages = True
1751 - break
1752 - if not matched_packages:
1753 - writemsg_level(">>> No packages selected for removal by %s\n" % \
1754 - action)
1755 - return
1756 -
1757 - writemsg_level("\nCalculating dependencies ")
1758 - resolver_params = create_depgraph_params(myopts, "remove")
1759 - resolver = depgraph(settings, trees, myopts, resolver_params, spinner)
1760 - vardb = resolver.trees[myroot]["vartree"].dbapi
1761 -
1762 - if action == "depclean":
1763 -
1764 - if args_set:
1765 -
1766 - if deselect:
1767 - world_temp_set.clear()
1768 -
1769 - # Pull in everything that's installed but not matched
1770 - # by an argument atom since we don't want to clean any
1771 - # package if something depends on it.
1772 - for pkg in vardb:
1773 - spinner.update()
1774 -
1775 - try:
1776 - if args_set.findAtomForPackage(pkg) is None:
1777 - world_temp_set.add("=" + pkg.cpv)
1778 - continue
1779 - except portage.exception.InvalidDependString, e:
1780 - show_invalid_depstring_notice(pkg,
1781 - pkg.metadata["PROVIDE"], str(e))
1782 - del e
1783 - world_temp_set.add("=" + pkg.cpv)
1784 - continue
1785 -
1786 - elif action == "prune":
1787 -
1788 - if deselect:
1789 - world_temp_set.clear()
1790 -
1791 - # Pull in everything that's installed since we don't
1792 - # to prune a package if something depends on it.
1793 - world_temp_set.update(vardb.cp_all())
1794 -
1795 - if not args_set:
1796 -
1797 - # Try to prune everything that's slotted.
1798 - for cp in vardb.cp_all():
1799 - if len(vardb.cp_list(cp)) > 1:
1800 - args_set.add(cp)
1801 -
1802 - # Remove atoms from world that match installed packages
1803 - # that are also matched by argument atoms, but do not remove
1804 - # them if they match the highest installed version.
1805 - for pkg in vardb:
1806 - spinner.update()
1807 - pkgs_for_cp = vardb.match_pkgs(pkg.cp)
1808 - if not pkgs_for_cp or pkg not in pkgs_for_cp:
1809 - raise AssertionError("package expected in matches: " + \
1810 - "cp = %s, cpv = %s matches = %s" % \
1811 - (pkg.cp, pkg.cpv, [str(x) for x in pkgs_for_cp]))
1812 -
1813 - highest_version = pkgs_for_cp[-1]
1814 - if pkg == highest_version:
1815 - # pkg is the highest version
1816 - world_temp_set.add("=" + pkg.cpv)
1817 - continue
1818 -
1819 - if len(pkgs_for_cp) <= 1:
1820 - raise AssertionError("more packages expected: " + \
1821 - "cp = %s, cpv = %s matches = %s" % \
1822 - (pkg.cp, pkg.cpv, [str(x) for x in pkgs_for_cp]))
1823 -
1824 - try:
1825 - if args_set.findAtomForPackage(pkg) is None:
1826 - world_temp_set.add("=" + pkg.cpv)
1827 - continue
1828 - except portage.exception.InvalidDependString, e:
1829 - show_invalid_depstring_notice(pkg,
1830 - pkg.metadata["PROVIDE"], str(e))
1831 - del e
1832 - world_temp_set.add("=" + pkg.cpv)
1833 - continue
1834 -
1835 - set_args = {}
1836 - for s, package_set in required_sets.iteritems():
1837 - set_atom = SETPREFIX + s
1838 - set_arg = SetArg(arg=set_atom, set=package_set,
1839 - root_config=resolver.roots[myroot])
1840 - set_args[s] = set_arg
1841 - for atom in set_arg.set:
1842 - resolver._dep_stack.append(
1843 - Dependency(atom=atom, root=myroot, parent=set_arg))
1844 - resolver.digraph.add(set_arg, None)
1845 -
1846 - success = resolver._complete_graph()
1847 - writemsg_level("\b\b... done!\n")
1848 -
1849 - resolver.display_problems()
1850 -
1851 - if not success:
1852 - return 1
1853 -
1854 - def unresolved_deps():
1855 -
1856 - unresolvable = set()
1857 - for dep in resolver._initially_unsatisfied_deps:
1858 - if isinstance(dep.parent, Package) and \
1859 - (dep.priority > UnmergeDepPriority.SOFT):
1860 - unresolvable.add((dep.atom, dep.parent.cpv))
1861 -
1862 - if not unresolvable:
1863 - return False
1864 -
1865 - if unresolvable and not allow_missing_deps:
1866 - prefix = bad(" * ")
1867 - msg = []
1868 - msg.append("Dependencies could not be completely resolved due to")
1869 - msg.append("the following required packages not being installed:")
1870 - msg.append("")
1871 - for atom, parent in unresolvable:
1872 - msg.append(" %s pulled in by:" % (atom,))
1873 - msg.append(" %s" % (parent,))
1874 - msg.append("")
1875 - msg.append("Have you forgotten to run " + \
1876 - good("`emerge --update --newuse --deep @system @world`") + " prior")
1877 - msg.append(("to %s? It may be necessary to manually " + \
1878 - "uninstall packages that no longer") % action)
1879 - msg.append("exist in the portage tree since " + \
1880 - "it may not be possible to satisfy their")
1881 - msg.append("dependencies. Also, be aware of " + \
1882 - "the --with-bdeps option that is documented")
1883 - msg.append("in " + good("`man emerge`") + ".")
1884 - if action == "prune":
1885 - msg.append("")
1886 - msg.append("If you would like to ignore " + \
1887 - "dependencies then use %s." % good("--nodeps"))
1888 - writemsg_level("".join("%s%s\n" % (prefix, line) for line in msg),
1889 - level=logging.ERROR, noiselevel=-1)
1890 - return True
1891 - return False
1892 -
1893 - if unresolved_deps():
1894 - return 1
1895 -
1896 - graph = resolver.digraph.copy()
1897 - required_pkgs_total = 0
1898 - for node in graph:
1899 - if isinstance(node, Package):
1900 - required_pkgs_total += 1
1901 -
1902 - def show_parents(child_node):
1903 - parent_nodes = graph.parent_nodes(child_node)
1904 - if not parent_nodes:
1905 - # With --prune, the highest version can be pulled in without any
1906 - # real parent since all installed packages are pulled in. In that
1907 - # case there's nothing to show here.
1908 - return
1909 - parent_strs = []
1910 - for node in parent_nodes:
1911 - parent_strs.append(str(getattr(node, "cpv", node)))
1912 - parent_strs.sort()
1913 - msg = []
1914 - msg.append(" %s pulled in by:\n" % (child_node.cpv,))
1915 - for parent_str in parent_strs:
1916 - msg.append(" %s\n" % (parent_str,))
1917 - msg.append("\n")
1918 - portage.writemsg_stdout("".join(msg), noiselevel=-1)
1919 -
1920 - def cmp_pkg_cpv(pkg1, pkg2):
1921 - """Sort Package instances by cpv."""
1922 - if pkg1.cpv > pkg2.cpv:
1923 - return 1
1924 - elif pkg1.cpv == pkg2.cpv:
1925 - return 0
1926 - else:
1927 - return -1
1928 -
1929 - def create_cleanlist():
1930 - pkgs_to_remove = []
1931 -
1932 - if action == "depclean":
1933 - if args_set:
1934 -
1935 - for pkg in sorted(vardb, key=cmp_sort_key(cmp_pkg_cpv)):
1936 - arg_atom = None
1937 - try:
1938 - arg_atom = args_set.findAtomForPackage(pkg)
1939 - except portage.exception.InvalidDependString:
1940 - # this error has already been displayed by now
1941 - continue
1942 -
1943 - if arg_atom:
1944 - if pkg not in graph:
1945 - pkgs_to_remove.append(pkg)
1946 - elif "--verbose" in myopts:
1947 - show_parents(pkg)
1948 -
1949 - else:
1950 - for pkg in sorted(vardb, key=cmp_sort_key(cmp_pkg_cpv)):
1951 - if pkg not in graph:
1952 - pkgs_to_remove.append(pkg)
1953 - elif "--verbose" in myopts:
1954 - show_parents(pkg)
1955 -
1956 - elif action == "prune":
1957 - # Prune really uses all installed instead of world. It's not
1958 - # a real reverse dependency so don't display it as such.
1959 - graph.remove(set_args["world"])
1960 -
1961 - for atom in args_set:
1962 - for pkg in vardb.match_pkgs(atom):
1963 - if pkg not in graph:
1964 - pkgs_to_remove.append(pkg)
1965 - elif "--verbose" in myopts:
1966 - show_parents(pkg)
1967 -
1968 - if not pkgs_to_remove:
1969 - writemsg_level(
1970 - ">>> No packages selected for removal by %s\n" % action)
1971 - if "--verbose" not in myopts:
1972 - writemsg_level(
1973 - ">>> To see reverse dependencies, use %s\n" % \
1974 - good("--verbose"))
1975 - if action == "prune":
1976 - writemsg_level(
1977 - ">>> To ignore dependencies, use %s\n" % \
1978 - good("--nodeps"))
1979 -
1980 - return pkgs_to_remove
1981 -
1982 - cleanlist = create_cleanlist()
1983 -
1984 - if len(cleanlist):
1985 - clean_set = set(cleanlist)
1986 -
1987 - # Check if any of these package are the sole providers of libraries
1988 - # with consumers that have not been selected for removal. If so, these
1989 - # packages and any dependencies need to be added to the graph.
1990 - real_vardb = trees[myroot]["vartree"].dbapi
1991 - linkmap = real_vardb.linkmap
1992 - liblist = linkmap.listLibraryObjects()
1993 - consumer_cache = {}
1994 - provider_cache = {}
1995 - soname_cache = {}
1996 - consumer_map = {}
1997 -
1998 - writemsg_level(">>> Checking for lib consumers...\n")
1999 -
2000 - for pkg in cleanlist:
2001 - pkg_dblink = real_vardb._dblink(pkg.cpv)
2002 - provided_libs = set()
2003 -
2004 - for lib in liblist:
2005 - if pkg_dblink.isowner(lib, myroot):
2006 - provided_libs.add(lib)
2007 -
2008 - if not provided_libs:
2009 - continue
2010 -
2011 - consumers = {}
2012 - for lib in provided_libs:
2013 - lib_consumers = consumer_cache.get(lib)
2014 - if lib_consumers is None:
2015 - lib_consumers = linkmap.findConsumers(lib)
2016 - consumer_cache[lib] = lib_consumers
2017 - if lib_consumers:
2018 - consumers[lib] = lib_consumers
2019 -
2020 - if not consumers:
2021 - continue
2022 -
2023 - for lib, lib_consumers in consumers.items():
2024 - for consumer_file in list(lib_consumers):
2025 - if pkg_dblink.isowner(consumer_file, myroot):
2026 - lib_consumers.remove(consumer_file)
2027 - if not lib_consumers:
2028 - del consumers[lib]
2029 -
2030 - if not consumers:
2031 - continue
2032 -
2033 - for lib, lib_consumers in consumers.iteritems():
2034 -
2035 - soname = soname_cache.get(lib)
2036 - if soname is None:
2037 - soname = linkmap.getSoname(lib)
2038 - soname_cache[lib] = soname
2039 -
2040 - consumer_providers = []
2041 - for lib_consumer in lib_consumers:
2042 - providers = provider_cache.get(lib)
2043 - if providers is None:
2044 - providers = linkmap.findProviders(lib_consumer)
2045 - provider_cache[lib_consumer] = providers
2046 - if soname not in providers:
2047 - # Why does this happen?
2048 - continue
2049 - consumer_providers.append(
2050 - (lib_consumer, providers[soname]))
2051 -
2052 - consumers[lib] = consumer_providers
2053 -
2054 - consumer_map[pkg] = consumers
2055 -
2056 - if consumer_map:
2057 -
2058 - search_files = set()
2059 - for consumers in consumer_map.itervalues():
2060 - for lib, consumer_providers in consumers.iteritems():
2061 - for lib_consumer, providers in consumer_providers:
2062 - search_files.add(lib_consumer)
2063 - search_files.update(providers)
2064 -
2065 - writemsg_level(">>> Assigning files to packages...\n")
2066 - file_owners = real_vardb._owners.getFileOwnerMap(search_files)
2067 -
2068 - for pkg, consumers in consumer_map.items():
2069 - for lib, consumer_providers in consumers.items():
2070 - lib_consumers = set()
2071 -
2072 - for lib_consumer, providers in consumer_providers:
2073 - owner_set = file_owners.get(lib_consumer)
2074 - provider_dblinks = set()
2075 - provider_pkgs = set()
2076 -
2077 - if len(providers) > 1:
2078 - for provider in providers:
2079 - provider_set = file_owners.get(provider)
2080 - if provider_set is not None:
2081 - provider_dblinks.update(provider_set)
2082 -
2083 - if len(provider_dblinks) > 1:
2084 - for provider_dblink in provider_dblinks:
2085 - pkg_key = ("installed", myroot,
2086 - provider_dblink.mycpv, "nomerge")
2087 - if pkg_key not in clean_set:
2088 - provider_pkgs.add(vardb.get(pkg_key))
2089 -
2090 - if provider_pkgs:
2091 - continue
2092 -
2093 - if owner_set is not None:
2094 - lib_consumers.update(owner_set)
2095 -
2096 - for consumer_dblink in list(lib_consumers):
2097 - if ("installed", myroot, consumer_dblink.mycpv,
2098 - "nomerge") in clean_set:
2099 - lib_consumers.remove(consumer_dblink)
2100 - continue
2101 -
2102 - if lib_consumers:
2103 - consumers[lib] = lib_consumers
2104 - else:
2105 - del consumers[lib]
2106 - if not consumers:
2107 - del consumer_map[pkg]
2108 -
2109 - if consumer_map:
2110 - # TODO: Implement a package set for rebuilding consumer packages.
2111 -
2112 - msg = "In order to avoid breakage of link level " + \
2113 - "dependencies, one or more packages will not be removed. " + \
2114 - "This can be solved by rebuilding " + \
2115 - "the packages that pulled them in."
2116 -
2117 - prefix = bad(" * ")
2118 - from textwrap import wrap
2119 - writemsg_level("".join(prefix + "%s\n" % line for \
2120 - line in wrap(msg, 70)), level=logging.WARNING, noiselevel=-1)
2121 -
2122 - msg = []
2123 - for pkg, consumers in consumer_map.iteritems():
2124 - unique_consumers = set(chain(*consumers.values()))
2125 - unique_consumers = sorted(consumer.mycpv \
2126 - for consumer in unique_consumers)
2127 - msg.append("")
2128 - msg.append(" %s pulled in by:" % (pkg.cpv,))
2129 - for consumer in unique_consumers:
2130 - msg.append(" %s" % (consumer,))
2131 - msg.append("")
2132 - writemsg_level("".join(prefix + "%s\n" % line for line in msg),
2133 - level=logging.WARNING, noiselevel=-1)
2134 -
2135 - # Add lib providers to the graph as children of lib consumers,
2136 - # and also add any dependencies pulled in by the provider.
2137 - writemsg_level(">>> Adding lib providers to graph...\n")
2138 -
2139 - for pkg, consumers in consumer_map.iteritems():
2140 - for consumer_dblink in set(chain(*consumers.values())):
2141 - consumer_pkg = vardb.get(("installed", myroot,
2142 - consumer_dblink.mycpv, "nomerge"))
2143 - if not resolver._add_pkg(pkg,
2144 - Dependency(parent=consumer_pkg,
2145 - priority=UnmergeDepPriority(runtime=True),
2146 - root=pkg.root)):
2147 - resolver.display_problems()
2148 - return 1
2149 -
2150 - writemsg_level("\nCalculating dependencies ")
2151 - success = resolver._complete_graph()
2152 - writemsg_level("\b\b... done!\n")
2153 - resolver.display_problems()
2154 - if not success:
2155 - return 1
2156 - if unresolved_deps():
2157 - return 1
2158 -
2159 - graph = resolver.digraph.copy()
2160 - required_pkgs_total = 0
2161 - for node in graph:
2162 - if isinstance(node, Package):
2163 - required_pkgs_total += 1
2164 - cleanlist = create_cleanlist()
2165 - if not cleanlist:
2166 - return 0
2167 - clean_set = set(cleanlist)
2168 -
2169 - # Use a topological sort to create an unmerge order such that
2170 - # each package is unmerged before it's dependencies. This is
2171 - # necessary to avoid breaking things that may need to run
2172 - # during pkg_prerm or pkg_postrm phases.
2173 -
2174 - # Create a new graph to account for dependencies between the
2175 - # packages being unmerged.
2176 - graph = digraph()
2177 - del cleanlist[:]
2178 -
2179 - dep_keys = ["DEPEND", "RDEPEND", "PDEPEND"]
2180 - runtime = UnmergeDepPriority(runtime=True)
2181 - runtime_post = UnmergeDepPriority(runtime_post=True)
2182 - buildtime = UnmergeDepPriority(buildtime=True)
2183 - priority_map = {
2184 - "RDEPEND": runtime,
2185 - "PDEPEND": runtime_post,
2186 - "DEPEND": buildtime,
2187 - }
2188 -
2189 - for node in clean_set:
2190 - graph.add(node, None)
2191 - mydeps = []
2192 - node_use = node.metadata["USE"].split()
2193 - for dep_type in dep_keys:
2194 - depstr = node.metadata[dep_type]
2195 - if not depstr:
2196 - continue
2197 - try:
2198 - portage.dep._dep_check_strict = False
2199 - success, atoms = portage.dep_check(depstr, None, settings,
2200 - myuse=node_use, trees=resolver._graph_trees,
2201 - myroot=myroot)
2202 - finally:
2203 - portage.dep._dep_check_strict = True
2204 - if not success:
2205 - # Ignore invalid deps of packages that will
2206 - # be uninstalled anyway.
2207 - continue
2208 -
2209 - priority = priority_map[dep_type]
2210 - for atom in atoms:
2211 - if not isinstance(atom, portage.dep.Atom):
2212 - # Ignore invalid atoms returned from dep_check().
2213 - continue
2214 - if atom.blocker:
2215 - continue
2216 - matches = vardb.match_pkgs(atom)
2217 - if not matches:
2218 - continue
2219 - for child_node in matches:
2220 - if child_node in clean_set:
2221 - graph.add(child_node, node, priority=priority)
2222 -
2223 - ordered = True
2224 - if len(graph.order) == len(graph.root_nodes()):
2225 - # If there are no dependencies between packages
2226 - # let unmerge() group them by cat/pn.
2227 - ordered = False
2228 - cleanlist = [pkg.cpv for pkg in graph.order]
2229 - else:
2230 - # Order nodes from lowest to highest overall reference count for
2231 - # optimal root node selection.
2232 - node_refcounts = {}
2233 - for node in graph.order:
2234 - node_refcounts[node] = len(graph.parent_nodes(node))
2235 - def cmp_reference_count(node1, node2):
2236 - return node_refcounts[node1] - node_refcounts[node2]
2237 - graph.order.sort(key=cmp_sort_key(cmp_reference_count))
2238 -
2239 - ignore_priority_range = [None]
2240 - ignore_priority_range.extend(
2241 - xrange(UnmergeDepPriority.MIN, UnmergeDepPriority.MAX + 1))
2242 - while not graph.empty():
2243 - for ignore_priority in ignore_priority_range:
2244 - nodes = graph.root_nodes(ignore_priority=ignore_priority)
2245 - if nodes:
2246 - break
2247 - if not nodes:
2248 - raise AssertionError("no root nodes")
2249 - if ignore_priority is not None:
2250 - # Some deps have been dropped due to circular dependencies,
2251 - # so only pop one node in order do minimize the number that
2252 - # are dropped.
2253 - del nodes[1:]
2254 - for node in nodes:
2255 - graph.remove(node)
2256 - cleanlist.append(node.cpv)
2257 -
2258 - unmerge(root_config, myopts, "unmerge", cleanlist,
2259 - ldpath_mtimes, ordered=ordered)
2260 -
2261 - if action == "prune":
2262 - return
2263 -
2264 - if not cleanlist and "--quiet" in myopts:
2265 - return
2266 -
2267 - print "Packages installed: "+str(len(vardb.cpv_all()))
2268 - print "Packages in world: " + \
2269 - str(len(root_config.sets["world"].getAtoms()))
2270 - print "Packages in system: " + \
2271 - str(len(root_config.sets["system"].getAtoms()))
2272 - print "Required packages: "+str(required_pkgs_total)
2273 - if "--pretend" in myopts:
2274 - print "Number to remove: "+str(len(cleanlist))
2275 - else:
2276 - print "Number removed: "+str(len(cleanlist))
2277 -
2278 -def action_build(settings, trees, mtimedb,
2279 - myopts, myaction, myfiles, spinner):
2280 -
2281 - # validate the state of the resume data
2282 - # so that we can make assumptions later.
2283 - for k in ("resume", "resume_backup"):
2284 - if k not in mtimedb:
2285 - continue
2286 - resume_data = mtimedb[k]
2287 - if not isinstance(resume_data, dict):
2288 - del mtimedb[k]
2289 - continue
2290 - mergelist = resume_data.get("mergelist")
2291 - if not isinstance(mergelist, list):
2292 - del mtimedb[k]
2293 - continue
2294 - for x in mergelist:
2295 - if not (isinstance(x, list) and len(x) == 4):
2296 - continue
2297 - pkg_type, pkg_root, pkg_key, pkg_action = x
2298 - if pkg_root not in trees:
2299 - # Current $ROOT setting differs,
2300 - # so the list must be stale.
2301 - mergelist = None
2302 - break
2303 - if not mergelist:
2304 - del mtimedb[k]
2305 - continue
2306 - resume_opts = resume_data.get("myopts")
2307 - if not isinstance(resume_opts, (dict, list)):
2308 - del mtimedb[k]
2309 - continue
2310 - favorites = resume_data.get("favorites")
2311 - if not isinstance(favorites, list):
2312 - del mtimedb[k]
2313 - continue
2314 -
2315 - resume = False
2316 - if "--resume" in myopts and \
2317 - ("resume" in mtimedb or
2318 - "resume_backup" in mtimedb):
2319 - resume = True
2320 - if "resume" not in mtimedb:
2321 - mtimedb["resume"] = mtimedb["resume_backup"]
2322 - del mtimedb["resume_backup"]
2323 - mtimedb.commit()
2324 - # "myopts" is a list for backward compatibility.
2325 - resume_opts = mtimedb["resume"].get("myopts", [])
2326 - if isinstance(resume_opts, list):
2327 - resume_opts = dict((k,True) for k in resume_opts)
2328 - for opt in ("--ask", "--color", "--skipfirst", "--tree"):
2329 - resume_opts.pop(opt, None)
2330 -
2331 - # Current options always override resume_opts.
2332 - resume_opts.update(myopts)
2333 - myopts.clear()
2334 - myopts.update(resume_opts)
2335 -
2336 - if "--debug" in myopts:
2337 - writemsg_level("myopts %s\n" % (myopts,))
2338 -
2339 - # Adjust config according to options of the command being resumed.
2340 - for myroot in trees:
2341 - mysettings = trees[myroot]["vartree"].settings
2342 - mysettings.unlock()
2343 - adjust_config(myopts, mysettings)
2344 - mysettings.lock()
2345 - del myroot, mysettings
2346 -
2347 - ldpath_mtimes = mtimedb["ldpath"]
2348 - favorites=[]
2349 - merge_count = 0
2350 - buildpkgonly = "--buildpkgonly" in myopts
2351 - pretend = "--pretend" in myopts
2352 - fetchonly = "--fetchonly" in myopts or "--fetch-all-uri" in myopts
2353 - ask = "--ask" in myopts
2354 - nodeps = "--nodeps" in myopts
2355 - oneshot = "--oneshot" in myopts or "--onlydeps" in myopts
2356 - tree = "--tree" in myopts
2357 - if nodeps and tree:
2358 - tree = False
2359 - del myopts["--tree"]
2360 - portage.writemsg(colorize("WARN", " * ") + \
2361 - "--tree is broken with --nodeps. Disabling...\n")
2362 - debug = "--debug" in myopts
2363 - verbose = "--verbose" in myopts
2364 - quiet = "--quiet" in myopts
2365 - if pretend or fetchonly:
2366 - # make the mtimedb readonly
2367 - mtimedb.filename = None
2368 - if '--digest' in myopts or 'digest' in settings.features:
2369 - if '--digest' in myopts:
2370 - msg = "The --digest option"
2371 - else:
2372 - msg = "The FEATURES=digest setting"
2373 -
2374 - msg += " can prevent corruption from being" + \
2375 - " noticed. The `repoman manifest` command is the preferred" + \
2376 - " way to generate manifests and it is capable of doing an" + \
2377 - " entire repository or category at once."
2378 - prefix = bad(" * ")
2379 - writemsg(prefix + "\n")
2380 - from textwrap import wrap
2381 - for line in wrap(msg, 72):
2382 - writemsg("%s%s\n" % (prefix, line))
2383 - writemsg(prefix + "\n")
2384 -
2385 - if "--quiet" not in myopts and \
2386 - ("--pretend" in myopts or "--ask" in myopts or \
2387 - "--tree" in myopts or "--verbose" in myopts):
2388 - action = ""
2389 - if "--fetchonly" in myopts or "--fetch-all-uri" in myopts:
2390 - action = "fetched"
2391 - elif "--buildpkgonly" in myopts:
2392 - action = "built"
2393 - else:
2394 - action = "merged"
2395 - if "--tree" in myopts and action != "fetched": # Tree doesn't work with fetching
2396 - print
2397 - print darkgreen("These are the packages that would be %s, in reverse order:") % action
2398 - print
2399 - else:
2400 - print
2401 - print darkgreen("These are the packages that would be %s, in order:") % action
2402 - print
2403 -
2404 - show_spinner = "--quiet" not in myopts and "--nodeps" not in myopts
2405 - if not show_spinner:
2406 - spinner.update = spinner.update_quiet
2407 -
2408 - if resume:
2409 - favorites = mtimedb["resume"].get("favorites")
2410 - if not isinstance(favorites, list):
2411 - favorites = []
2412 -
2413 - if show_spinner:
2414 - print "Calculating dependencies ",
2415 - myparams = create_depgraph_params(myopts, myaction)
2416 -
2417 - resume_data = mtimedb["resume"]
2418 - mergelist = resume_data["mergelist"]
2419 - if mergelist and "--skipfirst" in myopts:
2420 - for i, task in enumerate(mergelist):
2421 - if isinstance(task, list) and \
2422 - task and task[-1] == "merge":
2423 - del mergelist[i]
2424 - break
2425 -
2426 - success = False
2427 - mydepgraph = None
2428 - try:
2429 - success, mydepgraph, dropped_tasks = resume_depgraph(
2430 - settings, trees, mtimedb, myopts, myparams, spinner)
2431 - except (portage.exception.PackageNotFound,
2432 - depgraph.UnsatisfiedResumeDep), e:
2433 - if isinstance(e, depgraph.UnsatisfiedResumeDep):
2434 - mydepgraph = e.depgraph
2435 - if show_spinner:
2436 - print
2437 - from textwrap import wrap
2438 - from portage.output import EOutput
2439 - out = EOutput()
2440 -
2441 - resume_data = mtimedb["resume"]
2442 - mergelist = resume_data.get("mergelist")
2443 - if not isinstance(mergelist, list):
2444 - mergelist = []
2445 - if mergelist and debug or (verbose and not quiet):
2446 - out.eerror("Invalid resume list:")
2447 - out.eerror("")
2448 - indent = " "
2449 - for task in mergelist:
2450 - if isinstance(task, list):
2451 - out.eerror(indent + str(tuple(task)))
2452 - out.eerror("")
2453 -
2454 - if isinstance(e, depgraph.UnsatisfiedResumeDep):
2455 - out.eerror("One or more packages are either masked or " + \
2456 - "have missing dependencies:")
2457 - out.eerror("")
2458 - indent = " "
2459 - for dep in e.value:
2460 - if dep.atom is None:
2461 - out.eerror(indent + "Masked package:")
2462 - out.eerror(2 * indent + str(dep.parent))
2463 - out.eerror("")
2464 - else:
2465 - out.eerror(indent + str(dep.atom) + " pulled in by:")
2466 - out.eerror(2 * indent + str(dep.parent))
2467 - out.eerror("")
2468 - msg = "The resume list contains packages " + \
2469 - "that are either masked or have " + \
2470 - "unsatisfied dependencies. " + \
2471 - "Please restart/continue " + \
2472 - "the operation manually, or use --skipfirst " + \
2473 - "to skip the first package in the list and " + \
2474 - "any other packages that may be " + \
2475 - "masked or have missing dependencies."
2476 - for line in wrap(msg, 72):
2477 - out.eerror(line)
2478 - elif isinstance(e, portage.exception.PackageNotFound):
2479 - out.eerror("An expected package is " + \
2480 - "not available: %s" % str(e))
2481 - out.eerror("")
2482 - msg = "The resume list contains one or more " + \
2483 - "packages that are no longer " + \
2484 - "available. Please restart/continue " + \
2485 - "the operation manually."
2486 - for line in wrap(msg, 72):
2487 - out.eerror(line)
2488 - else:
2489 - if show_spinner:
2490 - print "\b\b... done!"
2491 -
2492 - if success:
2493 - if dropped_tasks:
2494 - portage.writemsg("!!! One or more packages have been " + \
2495 - "dropped due to\n" + \
2496 - "!!! masking or unsatisfied dependencies:\n\n",
2497 - noiselevel=-1)
2498 - for task in dropped_tasks:
2499 - portage.writemsg(" " + str(task) + "\n", noiselevel=-1)
2500 - portage.writemsg("\n", noiselevel=-1)
2501 - del dropped_tasks
2502 - else:
2503 - if mydepgraph is not None:
2504 - mydepgraph.display_problems()
2505 - if not (ask or pretend):
2506 - # delete the current list and also the backup
2507 - # since it's probably stale too.
2508 - for k in ("resume", "resume_backup"):
2509 - mtimedb.pop(k, None)
2510 - mtimedb.commit()
2511 -
2512 - return 1
2513 - else:
2514 - if ("--resume" in myopts):
2515 - print darkgreen("emerge: It seems we have nothing to resume...")
2516 - return os.EX_OK
2517 -
2518 - myparams = create_depgraph_params(myopts, myaction)
2519 - if "--quiet" not in myopts and "--nodeps" not in myopts:
2520 - print "Calculating dependencies ",
2521 - sys.stdout.flush()
2522 - mydepgraph = depgraph(settings, trees, myopts, myparams, spinner)
2523 - try:
2524 - retval, favorites = mydepgraph.select_files(myfiles)
2525 - except portage.exception.PackageNotFound, e:
2526 - portage.writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
2527 - return 1
2528 - except portage.exception.PackageSetNotFound, e:
2529 - root_config = trees[settings["ROOT"]]["root_config"]
2530 - display_missing_pkg_set(root_config, e.value)
2531 - return 1
2532 - if show_spinner:
2533 - print "\b\b... done!"
2534 - if not retval:
2535 - mydepgraph.display_problems()
2536 - return 1
2537 -
2538 - if "--pretend" not in myopts and \
2539 - ("--ask" in myopts or "--tree" in myopts or \
2540 - "--verbose" in myopts) and \
2541 - not ("--quiet" in myopts and "--ask" not in myopts):
2542 - if "--resume" in myopts:
2543 - mymergelist = mydepgraph.altlist()
2544 - if len(mymergelist) == 0:
2545 - print colorize("INFORM", "emerge: It seems we have nothing to resume...")
2546 - return os.EX_OK
2547 - favorites = mtimedb["resume"]["favorites"]
2548 - retval = mydepgraph.display(
2549 - mydepgraph.altlist(reversed=tree),
2550 - favorites=favorites)
2551 - mydepgraph.display_problems()
2552 - if retval != os.EX_OK:
2553 - return retval
2554 - prompt="Would you like to resume merging these packages?"
2555 - else:
2556 - retval = mydepgraph.display(
2557 - mydepgraph.altlist(reversed=("--tree" in myopts)),
2558 - favorites=favorites)
2559 - mydepgraph.display_problems()
2560 - if retval != os.EX_OK:
2561 - return retval
2562 - mergecount=0
2563 - for x in mydepgraph.altlist():
2564 - if isinstance(x, Package) and x.operation == "merge":
2565 - mergecount += 1
2566 -
2567 - if mergecount==0:
2568 - sets = trees[settings["ROOT"]]["root_config"].sets
2569 - world_candidates = None
2570 - if "--noreplace" in myopts and \
2571 - not oneshot and favorites:
2572 - # Sets that are not world candidates are filtered
2573 - # out here since the favorites list needs to be
2574 - # complete for depgraph.loadResumeCommand() to
2575 - # operate correctly.
2576 - world_candidates = [x for x in favorites \
2577 - if not (x.startswith(SETPREFIX) and \
2578 - not sets[x[1:]].world_candidate)]
2579 - if "--noreplace" in myopts and \
2580 - not oneshot and world_candidates:
2581 - print
2582 - for x in world_candidates:
2583 - print " %s %s" % (good("*"), x)
2584 - prompt="Would you like to add these packages to your world favorites?"
2585 - elif settings["AUTOCLEAN"] and "yes"==settings["AUTOCLEAN"]:
2586 - prompt="Nothing to merge; would you like to auto-clean packages?"
2587 - else:
2588 - print
2589 - print "Nothing to merge; quitting."
2590 - print
2591 - return os.EX_OK
2592 - elif "--fetchonly" in myopts or "--fetch-all-uri" in myopts:
2593 - prompt="Would you like to fetch the source files for these packages?"
2594 - else:
2595 - prompt="Would you like to merge these packages?"
2596 - print
2597 - if "--ask" in myopts and userquery(prompt) == "No":
2598 - print
2599 - print "Quitting."
2600 - print
2601 - return os.EX_OK
2602 - # Don't ask again (e.g. when auto-cleaning packages after merge)
2603 - myopts.pop("--ask", None)
2604 -
2605 - if ("--pretend" in myopts) and not ("--fetchonly" in myopts or "--fetch-all-uri" in myopts):
2606 - if ("--resume" in myopts):
2607 - mymergelist = mydepgraph.altlist()
2608 - if len(mymergelist) == 0:
2609 - print colorize("INFORM", "emerge: It seems we have nothing to resume...")
2610 - return os.EX_OK
2611 - favorites = mtimedb["resume"]["favorites"]
2612 - retval = mydepgraph.display(
2613 - mydepgraph.altlist(reversed=tree),
2614 - favorites=favorites)
2615 - mydepgraph.display_problems()
2616 - if retval != os.EX_OK:
2617 - return retval
2618 - else:
2619 - retval = mydepgraph.display(
2620 - mydepgraph.altlist(reversed=("--tree" in myopts)),
2621 - favorites=favorites)
2622 - mydepgraph.display_problems()
2623 - if retval != os.EX_OK:
2624 - return retval
2625 - if "--buildpkgonly" in myopts:
2626 - graph_copy = mydepgraph.digraph.clone()
2627 - removed_nodes = set()
2628 - for node in graph_copy:
2629 - if not isinstance(node, Package) or \
2630 - node.operation == "nomerge":
2631 - removed_nodes.add(node)
2632 - graph_copy.difference_update(removed_nodes)
2633 - if not graph_copy.hasallzeros(ignore_priority = \
2634 - DepPrioritySatisfiedRange.ignore_medium):
2635 - print "\n!!! --buildpkgonly requires all dependencies to be merged."
2636 - print "!!! You have to merge the dependencies before you can build this package.\n"
2637 - return 1
2638 - else:
2639 - if "--buildpkgonly" in myopts:
2640 - graph_copy = mydepgraph.digraph.clone()
2641 - removed_nodes = set()
2642 - for node in graph_copy:
2643 - if not isinstance(node, Package) or \
2644 - node.operation == "nomerge":
2645 - removed_nodes.add(node)
2646 - graph_copy.difference_update(removed_nodes)
2647 - if not graph_copy.hasallzeros(ignore_priority = \
2648 - DepPrioritySatisfiedRange.ignore_medium):
2649 - print "\n!!! --buildpkgonly requires all dependencies to be merged."
2650 - print "!!! Cannot merge requested packages. Merge deps and try again.\n"
2651 - return 1
2652 -
2653 - if ("--resume" in myopts):
2654 - favorites=mtimedb["resume"]["favorites"]
2655 - mymergelist = mydepgraph.altlist()
2656 - mydepgraph.break_refs(mymergelist)
2657 - mergetask = Scheduler(settings, trees, mtimedb, myopts,
2658 - spinner, mymergelist, favorites, mydepgraph.schedulerGraph())
2659 - del mydepgraph, mymergelist
2660 - clear_caches(trees)
2661 -
2662 - retval = mergetask.merge()
2663 - merge_count = mergetask.curval
2664 - else:
2665 - if "resume" in mtimedb and \
2666 - "mergelist" in mtimedb["resume"] and \
2667 - len(mtimedb["resume"]["mergelist"]) > 1:
2668 - mtimedb["resume_backup"] = mtimedb["resume"]
2669 - del mtimedb["resume"]
2670 - mtimedb.commit()
2671 - mtimedb["resume"]={}
2672 - # Stored as a dict starting with portage-2.1.6_rc1, and supported
2673 - # by >=portage-2.1.3_rc8. Versions <portage-2.1.3_rc8 only support
2674 - # a list type for options.
2675 - mtimedb["resume"]["myopts"] = myopts.copy()
2676 -
2677 - # Convert Atom instances to plain str.
2678 - mtimedb["resume"]["favorites"] = [str(x) for x in favorites]
2679 -
2680 - pkglist = mydepgraph.altlist()
2681 - mydepgraph.saveNomergeFavorites()
2682 - mydepgraph.break_refs(pkglist)
2683 - mergetask = Scheduler(settings, trees, mtimedb, myopts,
2684 - spinner, pkglist, favorites, mydepgraph.schedulerGraph())
2685 - del mydepgraph, pkglist
2686 - clear_caches(trees)
2687 -
2688 - retval = mergetask.merge()
2689 - merge_count = mergetask.curval
2690 -
2691 - if retval == os.EX_OK and not (buildpkgonly or fetchonly or pretend):
2692 - if "yes" == settings.get("AUTOCLEAN"):
2693 - portage.writemsg_stdout(">>> Auto-cleaning packages...\n")
2694 - unmerge(trees[settings["ROOT"]]["root_config"],
2695 - myopts, "clean", [],
2696 - ldpath_mtimes, autoclean=1)
2697 - else:
2698 - portage.writemsg_stdout(colorize("WARN", "WARNING:")
2699 - + " AUTOCLEAN is disabled. This can cause serious"
2700 - + " problems due to overlapping packages.\n")
2701 - trees[settings["ROOT"]]["vartree"].dbapi.plib_registry.pruneNonExisting()
2702 -
2703 - return retval
2704 -
2705 def multiple_actions(action1, action2):
2706 sys.stderr.write("\n!!! Multiple actions requested... Please choose one only.\n")
2707 sys.stderr.write("!!! '%s' or '%s'\n\n" % (action1, action2))
2708 @@ -3227,120 +613,6 @@
2709 settings = trees[myroot]["vartree"].settings
2710 settings.validate()
2711
2712 -def load_emerge_config(trees=None):
2713 - kwargs = {}
2714 - for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"), ("target_root", "ROOT")):
2715 - v = os.environ.get(envvar, None)
2716 - if v and v.strip():
2717 - kwargs[k] = v
2718 - trees = portage.create_trees(trees=trees, **kwargs)
2719 -
2720 - for root, root_trees in trees.iteritems():
2721 - settings = root_trees["vartree"].settings
2722 - setconfig = load_default_config(settings, root_trees)
2723 - root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
2724 -
2725 - settings = trees["/"]["vartree"].settings
2726 -
2727 - for myroot in trees:
2728 - if myroot != "/":
2729 - settings = trees[myroot]["vartree"].settings
2730 - break
2731 -
2732 - mtimedbfile = os.path.join("/", portage.CACHE_PATH.lstrip(os.path.sep), "mtimedb")
2733 - mtimedb = portage.MtimeDB(mtimedbfile)
2734 -
2735 - return settings, trees, mtimedb
2736 -
2737 -def adjust_config(myopts, settings):
2738 - """Make emerge specific adjustments to the config."""
2739 -
2740 - # To enhance usability, make some vars case insensitive by forcing them to
2741 - # lower case.
2742 - for myvar in ("AUTOCLEAN", "NOCOLOR"):
2743 - if myvar in settings:
2744 - settings[myvar] = settings[myvar].lower()
2745 - settings.backup_changes(myvar)
2746 - del myvar
2747 -
2748 - # Kill noauto as it will break merges otherwise.
2749 - if "noauto" in settings.features:
2750 - settings.features.remove('noauto')
2751 - settings['FEATURES'] = ' '.join(sorted(settings.features))
2752 - settings.backup_changes("FEATURES")
2753 -
2754 - CLEAN_DELAY = 5
2755 - try:
2756 - CLEAN_DELAY = int(settings.get("CLEAN_DELAY", str(CLEAN_DELAY)))
2757 - except ValueError, e:
2758 - portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
2759 - portage.writemsg("!!! Unable to parse integer: CLEAN_DELAY='%s'\n" % \
2760 - settings["CLEAN_DELAY"], noiselevel=-1)
2761 - settings["CLEAN_DELAY"] = str(CLEAN_DELAY)
2762 - settings.backup_changes("CLEAN_DELAY")
2763 -
2764 - EMERGE_WARNING_DELAY = 10
2765 - try:
2766 - EMERGE_WARNING_DELAY = int(settings.get(
2767 - "EMERGE_WARNING_DELAY", str(EMERGE_WARNING_DELAY)))
2768 - except ValueError, e:
2769 - portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
2770 - portage.writemsg("!!! Unable to parse integer: EMERGE_WARNING_DELAY='%s'\n" % \
2771 - settings["EMERGE_WARNING_DELAY"], noiselevel=-1)
2772 - settings["EMERGE_WARNING_DELAY"] = str(EMERGE_WARNING_DELAY)
2773 - settings.backup_changes("EMERGE_WARNING_DELAY")
2774 -
2775 - if "--quiet" in myopts:
2776 - settings["PORTAGE_QUIET"]="1"
2777 - settings.backup_changes("PORTAGE_QUIET")
2778 -
2779 - if "--verbose" in myopts:
2780 - settings["PORTAGE_VERBOSE"] = "1"
2781 - settings.backup_changes("PORTAGE_VERBOSE")
2782 -
2783 - # Set so that configs will be merged regardless of remembered status
2784 - if ("--noconfmem" in myopts):
2785 - settings["NOCONFMEM"]="1"
2786 - settings.backup_changes("NOCONFMEM")
2787 -
2788 - # Set various debug markers... They should be merged somehow.
2789 - PORTAGE_DEBUG = 0
2790 - try:
2791 - PORTAGE_DEBUG = int(settings.get("PORTAGE_DEBUG", str(PORTAGE_DEBUG)))
2792 - if PORTAGE_DEBUG not in (0, 1):
2793 - portage.writemsg("!!! Invalid value: PORTAGE_DEBUG='%i'\n" % \
2794 - PORTAGE_DEBUG, noiselevel=-1)
2795 - portage.writemsg("!!! PORTAGE_DEBUG must be either 0 or 1\n",
2796 - noiselevel=-1)
2797 - PORTAGE_DEBUG = 0
2798 - except ValueError, e:
2799 - portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
2800 - portage.writemsg("!!! Unable to parse integer: PORTAGE_DEBUG='%s'\n" %\
2801 - settings["PORTAGE_DEBUG"], noiselevel=-1)
2802 - del e
2803 - if "--debug" in myopts:
2804 - PORTAGE_DEBUG = 1
2805 - settings["PORTAGE_DEBUG"] = str(PORTAGE_DEBUG)
2806 - settings.backup_changes("PORTAGE_DEBUG")
2807 -
2808 - if settings.get("NOCOLOR") not in ("yes","true"):
2809 - portage.output.havecolor = 1
2810 -
2811 - """The explicit --color < y | n > option overrides the NOCOLOR environment
2812 - variable and stdout auto-detection."""
2813 - if "--color" in myopts:
2814 - if "y" == myopts["--color"]:
2815 - portage.output.havecolor = 1
2816 - settings["NOCOLOR"] = "false"
2817 - else:
2818 - portage.output.havecolor = 0
2819 - settings["NOCOLOR"] = "true"
2820 - settings.backup_changes("NOCOLOR")
2821 - elif not sys.stdout.isatty() and settings.get("NOCOLOR") != "no":
2822 - portage.output.havecolor = 0
2823 - settings["NOCOLOR"] = "true"
2824 - settings.backup_changes("NOCOLOR")
2825 -
2826 def apply_priorities(settings):
2827 ionice(settings)
2828 nice(settings)
2829 @@ -3378,21 +650,6 @@
2830 out.eerror("PORTAGE_IONICE_COMMAND returned %d" % (rval,))
2831 out.eerror("See the make.conf(5) man page for PORTAGE_IONICE_COMMAND usage instructions.")
2832
2833 -def display_missing_pkg_set(root_config, set_name):
2834 -
2835 - msg = []
2836 - msg.append(("emerge: There are no sets to satisfy '%s'. " + \
2837 - "The following sets exist:") % \
2838 - colorize("INFORM", set_name))
2839 - msg.append("")
2840 -
2841 - for s in sorted(root_config.sets):
2842 - msg.append(" %s" % s)
2843 - msg.append("")
2844 -
2845 - writemsg_level("".join("%s\n" % l for l in msg),
2846 - level=logging.ERROR, noiselevel=-1)
2847 -
2848 def expand_set_arguments(myfiles, myaction, root_config):
2849 retval = os.EX_OK
2850 setconfig = root_config.setconfig
2851
2852 Copied: main/branches/prefix/pym/_emerge/actions.py (from rev 13673, main/trunk/pym/_emerge/actions.py)
2853 ===================================================================
2854 --- main/branches/prefix/pym/_emerge/actions.py (rev 0)
2855 +++ main/branches/prefix/pym/_emerge/actions.py 2009-06-27 14:24:48 UTC (rev 13711)
2856 @@ -0,0 +1,2775 @@
2857 +import commands
2858 +import errno
2859 +import logging
2860 +import os
2861 +import platform
2862 +import pwd
2863 +import re
2864 +import shlex
2865 +import signal
2866 +import socket
2867 +import stat
2868 +import sys
2869 +import textwrap
2870 +import time
2871 +from itertools import chain, izip
2872 +
2873 +# for an explanation on this logic, see pym/_emerge/__init__.py
2874 +import os
2875 +import sys
2876 +if os.environ.__contains__("PORTAGE_PYTHONPATH"):
2877 + sys.path.insert(0, os.environ["PORTAGE_PYTHONPATH"])
2878 +else:
2879 + sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "pym"))
2880 +import portage
2881 +
2882 +from portage import digraph
2883 +from portage.cache.cache_errors import CacheError
2884 +from portage.const import NEWS_LIB_PATH
2885 +from portage.output import blue, bold, colorize, create_color_func, darkgreen, \
2886 + red, yellow
2887 +good = create_color_func("GOOD")
2888 +bad = create_color_func("BAD")
2889 +from portage.sets import load_default_config, SETPREFIX
2890 +from portage.sets.base import InternalPackageSet
2891 +from portage.util import cmp_sort_key, writemsg, writemsg_level
2892 +
2893 +from _emerge.clear_caches import clear_caches
2894 +from _emerge.countdown import countdown
2895 +from _emerge.create_depgraph_params import create_depgraph_params
2896 +from _emerge.Dependency import Dependency
2897 +from _emerge.depgraph import depgraph, resume_depgraph
2898 +from _emerge.DepPrioritySatisfiedRange import DepPrioritySatisfiedRange
2899 +from _emerge.emergelog import emergelog
2900 +from _emerge.is_valid_package_atom import is_valid_package_atom
2901 +from _emerge.MetadataRegen import MetadataRegen
2902 +from _emerge.Package import Package
2903 +from _emerge.ProgressHandler import ProgressHandler
2904 +from _emerge.RootConfig import RootConfig
2905 +from _emerge.Scheduler import Scheduler
2906 +from _emerge.search import search
2907 +from _emerge.SetArg import SetArg
2908 +from _emerge.show_invalid_depstring_notice import show_invalid_depstring_notice
2909 +from _emerge.unmerge import unmerge
2910 +from _emerge.UnmergeDepPriority import UnmergeDepPriority
2911 +from _emerge.UseFlagDisplay import UseFlagDisplay
2912 +from _emerge.userquery import userquery
2913 +
2914 +def action_build(settings, trees, mtimedb,
2915 + myopts, myaction, myfiles, spinner):
2916 +
2917 + # validate the state of the resume data
2918 + # so that we can make assumptions later.
2919 + for k in ("resume", "resume_backup"):
2920 + if k not in mtimedb:
2921 + continue
2922 + resume_data = mtimedb[k]
2923 + if not isinstance(resume_data, dict):
2924 + del mtimedb[k]
2925 + continue
2926 + mergelist = resume_data.get("mergelist")
2927 + if not isinstance(mergelist, list):
2928 + del mtimedb[k]
2929 + continue
2930 + for x in mergelist:
2931 + if not (isinstance(x, list) and len(x) == 4):
2932 + continue
2933 + pkg_type, pkg_root, pkg_key, pkg_action = x
2934 + if pkg_root not in trees:
2935 + # Current $ROOT setting differs,
2936 + # so the list must be stale.
2937 + mergelist = None
2938 + break
2939 + if not mergelist:
2940 + del mtimedb[k]
2941 + continue
2942 + resume_opts = resume_data.get("myopts")
2943 + if not isinstance(resume_opts, (dict, list)):
2944 + del mtimedb[k]
2945 + continue
2946 + favorites = resume_data.get("favorites")
2947 + if not isinstance(favorites, list):
2948 + del mtimedb[k]
2949 + continue
2950 +
2951 + resume = False
2952 + if "--resume" in myopts and \
2953 + ("resume" in mtimedb or
2954 + "resume_backup" in mtimedb):
2955 + resume = True
2956 + if "resume" not in mtimedb:
2957 + mtimedb["resume"] = mtimedb["resume_backup"]
2958 + del mtimedb["resume_backup"]
2959 + mtimedb.commit()
2960 + # "myopts" is a list for backward compatibility.
2961 + resume_opts = mtimedb["resume"].get("myopts", [])
2962 + if isinstance(resume_opts, list):
2963 + resume_opts = dict((k,True) for k in resume_opts)
2964 + for opt in ("--ask", "--color", "--skipfirst", "--tree"):
2965 + resume_opts.pop(opt, None)
2966 +
2967 + # Current options always override resume_opts.
2968 + resume_opts.update(myopts)
2969 + myopts.clear()
2970 + myopts.update(resume_opts)
2971 +
2972 + if "--debug" in myopts:
2973 + writemsg_level("myopts %s\n" % (myopts,))
2974 +
2975 + # Adjust config according to options of the command being resumed.
2976 + for myroot in trees:
2977 + mysettings = trees[myroot]["vartree"].settings
2978 + mysettings.unlock()
2979 + adjust_config(myopts, mysettings)
2980 + mysettings.lock()
2981 + del myroot, mysettings
2982 +
2983 + ldpath_mtimes = mtimedb["ldpath"]
2984 + favorites=[]
2985 + merge_count = 0
2986 + buildpkgonly = "--buildpkgonly" in myopts
2987 + pretend = "--pretend" in myopts
2988 + fetchonly = "--fetchonly" in myopts or "--fetch-all-uri" in myopts
2989 + ask = "--ask" in myopts
2990 + nodeps = "--nodeps" in myopts
2991 + oneshot = "--oneshot" in myopts or "--onlydeps" in myopts
2992 + tree = "--tree" in myopts
2993 + if nodeps and tree:
2994 + tree = False
2995 + del myopts["--tree"]
2996 + portage.writemsg(colorize("WARN", " * ") + \
2997 + "--tree is broken with --nodeps. Disabling...\n")
2998 + debug = "--debug" in myopts
2999 + verbose = "--verbose" in myopts
3000 + quiet = "--quiet" in myopts
3001 + if pretend or fetchonly:
3002 + # make the mtimedb readonly
3003 + mtimedb.filename = None
3004 + if '--digest' in myopts or 'digest' in settings.features:
3005 + if '--digest' in myopts:
3006 + msg = "The --digest option"
3007 + else:
3008 + msg = "The FEATURES=digest setting"
3009 +
3010 + msg += " can prevent corruption from being" + \
3011 + " noticed. The `repoman manifest` command is the preferred" + \
3012 + " way to generate manifests and it is capable of doing an" + \
3013 + " entire repository or category at once."
3014 + prefix = bad(" * ")
3015 + writemsg(prefix + "\n")
3016 + from textwrap import wrap
3017 + for line in wrap(msg, 72):
3018 + writemsg("%s%s\n" % (prefix, line))
3019 + writemsg(prefix + "\n")
3020 +
3021 + if "--quiet" not in myopts and \
3022 + ("--pretend" in myopts or "--ask" in myopts or \
3023 + "--tree" in myopts or "--verbose" in myopts):
3024 + action = ""
3025 + if "--fetchonly" in myopts or "--fetch-all-uri" in myopts:
3026 + action = "fetched"
3027 + elif "--buildpkgonly" in myopts:
3028 + action = "built"
3029 + else:
3030 + action = "merged"
3031 + if "--tree" in myopts and action != "fetched": # Tree doesn't work with fetching
3032 + print
3033 + print darkgreen("These are the packages that would be %s, in reverse order:") % action
3034 + print
3035 + else:
3036 + print
3037 + print darkgreen("These are the packages that would be %s, in order:") % action
3038 + print
3039 +
3040 + show_spinner = "--quiet" not in myopts and "--nodeps" not in myopts
3041 + if not show_spinner:
3042 + spinner.update = spinner.update_quiet
3043 +
3044 + if resume:
3045 + favorites = mtimedb["resume"].get("favorites")
3046 + if not isinstance(favorites, list):
3047 + favorites = []
3048 +
3049 + if show_spinner:
3050 + print "Calculating dependencies ",
3051 + myparams = create_depgraph_params(myopts, myaction)
3052 +
3053 + resume_data = mtimedb["resume"]
3054 + mergelist = resume_data["mergelist"]
3055 + if mergelist and "--skipfirst" in myopts:
3056 + for i, task in enumerate(mergelist):
3057 + if isinstance(task, list) and \
3058 + task and task[-1] == "merge":
3059 + del mergelist[i]
3060 + break
3061 +
3062 + success = False
3063 + mydepgraph = None
3064 + try:
3065 + success, mydepgraph, dropped_tasks = resume_depgraph(
3066 + settings, trees, mtimedb, myopts, myparams, spinner)
3067 + except (portage.exception.PackageNotFound,
3068 + depgraph.UnsatisfiedResumeDep), e:
3069 + if isinstance(e, depgraph.UnsatisfiedResumeDep):
3070 + mydepgraph = e.depgraph
3071 + if show_spinner:
3072 + print
3073 + from textwrap import wrap
3074 + from portage.output import EOutput
3075 + out = EOutput()
3076 +
3077 + resume_data = mtimedb["resume"]
3078 + mergelist = resume_data.get("mergelist")
3079 + if not isinstance(mergelist, list):
3080 + mergelist = []
3081 + if mergelist and debug or (verbose and not quiet):
3082 + out.eerror("Invalid resume list:")
3083 + out.eerror("")
3084 + indent = " "
3085 + for task in mergelist:
3086 + if isinstance(task, list):
3087 + out.eerror(indent + str(tuple(task)))
3088 + out.eerror("")
3089 +
3090 + if isinstance(e, depgraph.UnsatisfiedResumeDep):
3091 + out.eerror("One or more packages are either masked or " + \
3092 + "have missing dependencies:")
3093 + out.eerror("")
3094 + indent = " "
3095 + for dep in e.value:
3096 + if dep.atom is None:
3097 + out.eerror(indent + "Masked package:")
3098 + out.eerror(2 * indent + str(dep.parent))
3099 + out.eerror("")
3100 + else:
3101 + out.eerror(indent + str(dep.atom) + " pulled in by:")
3102 + out.eerror(2 * indent + str(dep.parent))
3103 + out.eerror("")
3104 + msg = "The resume list contains packages " + \
3105 + "that are either masked or have " + \
3106 + "unsatisfied dependencies. " + \
3107 + "Please restart/continue " + \
3108 + "the operation manually, or use --skipfirst " + \
3109 + "to skip the first package in the list and " + \
3110 + "any other packages that may be " + \
3111 + "masked or have missing dependencies."
3112 + for line in wrap(msg, 72):
3113 + out.eerror(line)
3114 + elif isinstance(e, portage.exception.PackageNotFound):
3115 + out.eerror("An expected package is " + \
3116 + "not available: %s" % str(e))
3117 + out.eerror("")
3118 + msg = "The resume list contains one or more " + \
3119 + "packages that are no longer " + \
3120 + "available. Please restart/continue " + \
3121 + "the operation manually."
3122 + for line in wrap(msg, 72):
3123 + out.eerror(line)
3124 + else:
3125 + if show_spinner:
3126 + print "\b\b... done!"
3127 +
3128 + if success:
3129 + if dropped_tasks:
3130 + portage.writemsg("!!! One or more packages have been " + \
3131 + "dropped due to\n" + \
3132 + "!!! masking or unsatisfied dependencies:\n\n",
3133 + noiselevel=-1)
3134 + for task in dropped_tasks:
3135 + portage.writemsg(" " + str(task) + "\n", noiselevel=-1)
3136 + portage.writemsg("\n", noiselevel=-1)
3137 + del dropped_tasks
3138 + else:
3139 + if mydepgraph is not None:
3140 + mydepgraph.display_problems()
3141 + if not (ask or pretend):
3142 + # delete the current list and also the backup
3143 + # since it's probably stale too.
3144 + for k in ("resume", "resume_backup"):
3145 + mtimedb.pop(k, None)
3146 + mtimedb.commit()
3147 +
3148 + return 1
3149 + else:
3150 + if ("--resume" in myopts):
3151 + print darkgreen("emerge: It seems we have nothing to resume...")
3152 + return os.EX_OK
3153 +
3154 + myparams = create_depgraph_params(myopts, myaction)
3155 + if "--quiet" not in myopts and "--nodeps" not in myopts:
3156 + print "Calculating dependencies ",
3157 + sys.stdout.flush()
3158 + mydepgraph = depgraph(settings, trees, myopts, myparams, spinner)
3159 + try:
3160 + retval, favorites = mydepgraph.select_files(myfiles)
3161 + except portage.exception.PackageNotFound, e:
3162 + portage.writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
3163 + return 1
3164 + except portage.exception.PackageSetNotFound, e:
3165 + root_config = trees[settings["ROOT"]]["root_config"]
3166 + display_missing_pkg_set(root_config, e.value)
3167 + return 1
3168 + if show_spinner:
3169 + print "\b\b... done!"
3170 + if not retval:
3171 + mydepgraph.display_problems()
3172 + return 1
3173 +
3174 + if "--pretend" not in myopts and \
3175 + ("--ask" in myopts or "--tree" in myopts or \
3176 + "--verbose" in myopts) and \
3177 + not ("--quiet" in myopts and "--ask" not in myopts):
3178 + if "--resume" in myopts:
3179 + mymergelist = mydepgraph.altlist()
3180 + if len(mymergelist) == 0:
3181 + print colorize("INFORM", "emerge: It seems we have nothing to resume...")
3182 + return os.EX_OK
3183 + favorites = mtimedb["resume"]["favorites"]
3184 + retval = mydepgraph.display(
3185 + mydepgraph.altlist(reversed=tree),
3186 + favorites=favorites)
3187 + mydepgraph.display_problems()
3188 + if retval != os.EX_OK:
3189 + return retval
3190 + prompt="Would you like to resume merging these packages?"
3191 + else:
3192 + retval = mydepgraph.display(
3193 + mydepgraph.altlist(reversed=("--tree" in myopts)),
3194 + favorites=favorites)
3195 + mydepgraph.display_problems()
3196 + if retval != os.EX_OK:
3197 + return retval
3198 + mergecount=0
3199 + for x in mydepgraph.altlist():
3200 + if isinstance(x, Package) and x.operation == "merge":
3201 + mergecount += 1
3202 +
3203 + if mergecount==0:
3204 + sets = trees[settings["ROOT"]]["root_config"].sets
3205 + world_candidates = None
3206 + if "--noreplace" in myopts and \
3207 + not oneshot and favorites:
3208 + # Sets that are not world candidates are filtered
3209 + # out here since the favorites list needs to be
3210 + # complete for depgraph.loadResumeCommand() to
3211 + # operate correctly.
3212 + world_candidates = [x for x in favorites \
3213 + if not (x.startswith(SETPREFIX) and \
3214 + not sets[x[1:]].world_candidate)]
3215 + if "--noreplace" in myopts and \
3216 + not oneshot and world_candidates:
3217 + print
3218 + for x in world_candidates:
3219 + print " %s %s" % (good("*"), x)
3220 + prompt="Would you like to add these packages to your world favorites?"
3221 + elif settings["AUTOCLEAN"] and "yes"==settings["AUTOCLEAN"]:
3222 + prompt="Nothing to merge; would you like to auto-clean packages?"
3223 + else:
3224 + print
3225 + print "Nothing to merge; quitting."
3226 + print
3227 + return os.EX_OK
3228 + elif "--fetchonly" in myopts or "--fetch-all-uri" in myopts:
3229 + prompt="Would you like to fetch the source files for these packages?"
3230 + else:
3231 + prompt="Would you like to merge these packages?"
3232 + print
3233 + if "--ask" in myopts and userquery(prompt) == "No":
3234 + print
3235 + print "Quitting."
3236 + print
3237 + return os.EX_OK
3238 + # Don't ask again (e.g. when auto-cleaning packages after merge)
3239 + myopts.pop("--ask", None)
3240 +
3241 + if ("--pretend" in myopts) and not ("--fetchonly" in myopts or "--fetch-all-uri" in myopts):
3242 + if ("--resume" in myopts):
3243 + mymergelist = mydepgraph.altlist()
3244 + if len(mymergelist) == 0:
3245 + print colorize("INFORM", "emerge: It seems we have nothing to resume...")
3246 + return os.EX_OK
3247 + favorites = mtimedb["resume"]["favorites"]
3248 + retval = mydepgraph.display(
3249 + mydepgraph.altlist(reversed=tree),
3250 + favorites=favorites)
3251 + mydepgraph.display_problems()
3252 + if retval != os.EX_OK:
3253 + return retval
3254 + else:
3255 + retval = mydepgraph.display(
3256 + mydepgraph.altlist(reversed=("--tree" in myopts)),
3257 + favorites=favorites)
3258 + mydepgraph.display_problems()
3259 + if retval != os.EX_OK:
3260 + return retval
3261 + if "--buildpkgonly" in myopts:
3262 + graph_copy = mydepgraph.digraph.clone()
3263 + removed_nodes = set()
3264 + for node in graph_copy:
3265 + if not isinstance(node, Package) or \
3266 + node.operation == "nomerge":
3267 + removed_nodes.add(node)
3268 + graph_copy.difference_update(removed_nodes)
3269 + if not graph_copy.hasallzeros(ignore_priority = \
3270 + DepPrioritySatisfiedRange.ignore_medium):
3271 + print "\n!!! --buildpkgonly requires all dependencies to be merged."
3272 + print "!!! You have to merge the dependencies before you can build this package.\n"
3273 + return 1
3274 + else:
3275 + if "--buildpkgonly" in myopts:
3276 + graph_copy = mydepgraph.digraph.clone()
3277 + removed_nodes = set()
3278 + for node in graph_copy:
3279 + if not isinstance(node, Package) or \
3280 + node.operation == "nomerge":
3281 + removed_nodes.add(node)
3282 + graph_copy.difference_update(removed_nodes)
3283 + if not graph_copy.hasallzeros(ignore_priority = \
3284 + DepPrioritySatisfiedRange.ignore_medium):
3285 + print "\n!!! --buildpkgonly requires all dependencies to be merged."
3286 + print "!!! Cannot merge requested packages. Merge deps and try again.\n"
3287 + return 1
3288 +
3289 + if ("--resume" in myopts):
3290 + favorites=mtimedb["resume"]["favorites"]
3291 + mymergelist = mydepgraph.altlist()
3292 + mydepgraph.break_refs(mymergelist)
3293 + mergetask = Scheduler(settings, trees, mtimedb, myopts,
3294 + spinner, mymergelist, favorites, mydepgraph.schedulerGraph())
3295 + del mydepgraph, mymergelist
3296 + clear_caches(trees)
3297 +
3298 + retval = mergetask.merge()
3299 + merge_count = mergetask.curval
3300 + else:
3301 + if "resume" in mtimedb and \
3302 + "mergelist" in mtimedb["resume"] and \
3303 + len(mtimedb["resume"]["mergelist"]) > 1:
3304 + mtimedb["resume_backup"] = mtimedb["resume"]
3305 + del mtimedb["resume"]
3306 + mtimedb.commit()
3307 + mtimedb["resume"]={}
3308 + # Stored as a dict starting with portage-2.1.6_rc1, and supported
3309 + # by >=portage-2.1.3_rc8. Versions <portage-2.1.3_rc8 only support
3310 + # a list type for options.
3311 + mtimedb["resume"]["myopts"] = myopts.copy()
3312 +
3313 + # Convert Atom instances to plain str.
3314 + mtimedb["resume"]["favorites"] = [str(x) for x in favorites]
3315 +
3316 + pkglist = mydepgraph.altlist()
3317 + mydepgraph.saveNomergeFavorites()
3318 + mydepgraph.break_refs(pkglist)
3319 + mergetask = Scheduler(settings, trees, mtimedb, myopts,
3320 + spinner, pkglist, favorites, mydepgraph.schedulerGraph())
3321 + del mydepgraph, pkglist
3322 + clear_caches(trees)
3323 +
3324 + retval = mergetask.merge()
3325 + merge_count = mergetask.curval
3326 +
3327 + if retval == os.EX_OK and not (buildpkgonly or fetchonly or pretend):
3328 + if "yes" == settings.get("AUTOCLEAN"):
3329 + portage.writemsg_stdout(">>> Auto-cleaning packages...\n")
3330 + unmerge(trees[settings["ROOT"]]["root_config"],
3331 + myopts, "clean", [],
3332 + ldpath_mtimes, autoclean=1)
3333 + else:
3334 + portage.writemsg_stdout(colorize("WARN", "WARNING:")
3335 + + " AUTOCLEAN is disabled. This can cause serious"
3336 + + " problems due to overlapping packages.\n")
3337 + trees[settings["ROOT"]]["vartree"].dbapi.plib_registry.pruneNonExisting()
3338 +
3339 + return retval
3340 +
3341 +def action_config(settings, trees, myopts, myfiles):
3342 + if len(myfiles) != 1:
3343 + print red("!!! config can only take a single package atom at this time\n")
3344 + sys.exit(1)
3345 + if not is_valid_package_atom(myfiles[0]):
3346 + portage.writemsg("!!! '%s' is not a valid package atom.\n" % myfiles[0],
3347 + noiselevel=-1)
3348 + portage.writemsg("!!! Please check ebuild(5) for full details.\n")
3349 + portage.writemsg("!!! (Did you specify a version but forget to prefix with '='?)\n")
3350 + sys.exit(1)
3351 + print
3352 + try:
3353 + pkgs = trees[settings["ROOT"]]["vartree"].dbapi.match(myfiles[0])
3354 + except portage.exception.AmbiguousPackageName, e:
3355 + # Multiple matches thrown from cpv_expand
3356 + pkgs = e.args[0]
3357 + if len(pkgs) == 0:
3358 + print "No packages found.\n"
3359 + sys.exit(0)
3360 + elif len(pkgs) > 1:
3361 + if "--ask" in myopts:
3362 + options = []
3363 + print "Please select a package to configure:"
3364 + idx = 0
3365 + for pkg in pkgs:
3366 + idx += 1
3367 + options.append(str(idx))
3368 + print options[-1]+") "+pkg
3369 + print "X) Cancel"
3370 + options.append("X")
3371 + idx = userquery("Selection?", options)
3372 + if idx == "X":
3373 + sys.exit(0)
3374 + pkg = pkgs[int(idx)-1]
3375 + else:
3376 + print "The following packages available:"
3377 + for pkg in pkgs:
3378 + print "* "+pkg
3379 + print "\nPlease use a specific atom or the --ask option."
3380 + sys.exit(1)
3381 + else:
3382 + pkg = pkgs[0]
3383 +
3384 + print
3385 + if "--ask" in myopts:
3386 + if userquery("Ready to configure "+pkg+"?") == "No":
3387 + sys.exit(0)
3388 + else:
3389 + print "Configuring pkg..."
3390 + print
3391 + ebuildpath = trees[settings["ROOT"]]["vartree"].dbapi.findname(pkg)
3392 + mysettings = portage.config(clone=settings)
3393 + vardb = trees[mysettings["ROOT"]]["vartree"].dbapi
3394 + debug = mysettings.get("PORTAGE_DEBUG") == "1"
3395 + retval = portage.doebuild(ebuildpath, "config", mysettings["ROOT"],
3396 + mysettings,
3397 + debug=(settings.get("PORTAGE_DEBUG", "") == 1), cleanup=True,
3398 + mydbapi=trees[settings["ROOT"]]["vartree"].dbapi, tree="vartree")
3399 + if retval == os.EX_OK:
3400 + portage.doebuild(ebuildpath, "clean", mysettings["ROOT"],
3401 + mysettings, debug=debug, mydbapi=vardb, tree="vartree")
3402 + print
3403 +
3404 +def action_depclean(settings, trees, ldpath_mtimes,
3405 + myopts, action, myfiles, spinner):
3406 + # Kill packages that aren't explicitly merged or are required as a
3407 + # dependency of another package. World file is explicit.
3408 +
3409 + # Global depclean or prune operations are not very safe when there are
3410 + # missing dependencies since it's unknown how badly incomplete
3411 + # the dependency graph is, and we might accidentally remove packages
3412 + # that should have been pulled into the graph. On the other hand, it's
3413 + # relatively safe to ignore missing deps when only asked to remove
3414 + # specific packages.
3415 + allow_missing_deps = len(myfiles) > 0
3416 +
3417 + msg = []
3418 + msg.append("Always study the list of packages to be cleaned for any obvious\n")
3419 + msg.append("mistakes. Packages that are part of the world set will always\n")
3420 + msg.append("be kept. They can be manually added to this set with\n")
3421 + msg.append(good("`emerge --noreplace <atom>`") + ". Packages that are listed in\n")
3422 + msg.append("package.provided (see portage(5)) will be removed by\n")
3423 + msg.append("depclean, even if they are part of the world set.\n")
3424 + msg.append("\n")
3425 + msg.append("As a safety measure, depclean will not remove any packages\n")
3426 + msg.append("unless *all* required dependencies have been resolved. As a\n")
3427 + msg.append("consequence, it is often necessary to run %s\n" % \
3428 + good("`emerge --update"))
3429 + msg.append(good("--newuse --deep @system @world`") + \
3430 + " prior to depclean.\n")
3431 +
3432 + if action == "depclean" and "--quiet" not in myopts and not myfiles:
3433 + portage.writemsg_stdout("\n")
3434 + for x in msg:
3435 + portage.writemsg_stdout(colorize("WARN", " * ") + x)
3436 +
3437 + xterm_titles = "notitles" not in settings.features
3438 + myroot = settings["ROOT"]
3439 + root_config = trees[myroot]["root_config"]
3440 + getSetAtoms = root_config.setconfig.getSetAtoms
3441 + vardb = trees[myroot]["vartree"].dbapi
3442 + deselect = myopts.get('--deselect') != 'n'
3443 +
3444 + required_set_names = ("system", "world")
3445 + required_sets = {}
3446 + set_args = []
3447 +
3448 + for s in required_set_names:
3449 + required_sets[s] = InternalPackageSet(
3450 + initial_atoms=getSetAtoms(s))
3451 +
3452 +
3453 + # When removing packages, use a temporary version of world
3454 + # which excludes packages that are intended to be eligible for
3455 + # removal.
3456 + world_temp_set = required_sets["world"]
3457 + system_set = required_sets["system"]
3458 +
3459 + if not system_set or not world_temp_set:
3460 +
3461 + if not system_set:
3462 + writemsg_level("!!! You have no system list.\n",
3463 + level=logging.ERROR, noiselevel=-1)
3464 +
3465 + if not world_temp_set:
3466 + writemsg_level("!!! You have no world file.\n",
3467 + level=logging.WARNING, noiselevel=-1)
3468 +
3469 + writemsg_level("!!! Proceeding is likely to " + \
3470 + "break your installation.\n",
3471 + level=logging.WARNING, noiselevel=-1)
3472 + if "--pretend" not in myopts:
3473 + countdown(int(settings["EMERGE_WARNING_DELAY"]), ">>> Depclean")
3474 +
3475 + if action == "depclean":
3476 + emergelog(xterm_titles, " >>> depclean")
3477 +
3478 + args_set = InternalPackageSet()
3479 + if myfiles:
3480 + args_set.update(myfiles)
3481 + matched_packages = False
3482 + for x in args_set:
3483 + if vardb.match(x):
3484 + matched_packages = True
3485 + break
3486 + if not matched_packages:
3487 + writemsg_level(">>> No packages selected for removal by %s\n" % \
3488 + action)
3489 + return
3490 +
3491 + writemsg_level("\nCalculating dependencies ")
3492 + resolver_params = create_depgraph_params(myopts, "remove")
3493 + resolver = depgraph(settings, trees, myopts, resolver_params, spinner)
3494 + vardb = resolver.trees[myroot]["vartree"].dbapi
3495 +
3496 + if action == "depclean":
3497 +
3498 + if args_set:
3499 +
3500 + if deselect:
3501 + world_temp_set.clear()
3502 +
3503 + # Pull in everything that's installed but not matched
3504 + # by an argument atom since we don't want to clean any
3505 + # package if something depends on it.
3506 + for pkg in vardb:
3507 + spinner.update()
3508 +
3509 + try:
3510 + if args_set.findAtomForPackage(pkg) is None:
3511 + world_temp_set.add("=" + pkg.cpv)
3512 + continue
3513 + except portage.exception.InvalidDependString, e:
3514 + show_invalid_depstring_notice(pkg,
3515 + pkg.metadata["PROVIDE"], str(e))
3516 + del e
3517 + world_temp_set.add("=" + pkg.cpv)
3518 + continue
3519 +
3520 + elif action == "prune":
3521 +
3522 + if deselect:
3523 + world_temp_set.clear()
3524 +
3525 + # Pull in everything that's installed since we don't
3526 + # to prune a package if something depends on it.
3527 + world_temp_set.update(vardb.cp_all())
3528 +
3529 + if not args_set:
3530 +
3531 + # Try to prune everything that's slotted.
3532 + for cp in vardb.cp_all():
3533 + if len(vardb.cp_list(cp)) > 1:
3534 + args_set.add(cp)
3535 +
3536 + # Remove atoms from world that match installed packages
3537 + # that are also matched by argument atoms, but do not remove
3538 + # them if they match the highest installed version.
3539 + for pkg in vardb:
3540 + spinner.update()
3541 + pkgs_for_cp = vardb.match_pkgs(pkg.cp)
3542 + if not pkgs_for_cp or pkg not in pkgs_for_cp:
3543 + raise AssertionError("package expected in matches: " + \
3544 + "cp = %s, cpv = %s matches = %s" % \
3545 + (pkg.cp, pkg.cpv, [str(x) for x in pkgs_for_cp]))
3546 +
3547 + highest_version = pkgs_for_cp[-1]
3548 + if pkg == highest_version:
3549 + # pkg is the highest version
3550 + world_temp_set.add("=" + pkg.cpv)
3551 + continue
3552 +
3553 + if len(pkgs_for_cp) <= 1:
3554 + raise AssertionError("more packages expected: " + \
3555 + "cp = %s, cpv = %s matches = %s" % \
3556 + (pkg.cp, pkg.cpv, [str(x) for x in pkgs_for_cp]))
3557 +
3558 + try:
3559 + if args_set.findAtomForPackage(pkg) is None:
3560 + world_temp_set.add("=" + pkg.cpv)
3561 + continue
3562 + except portage.exception.InvalidDependString, e:
3563 + show_invalid_depstring_notice(pkg,
3564 + pkg.metadata["PROVIDE"], str(e))
3565 + del e
3566 + world_temp_set.add("=" + pkg.cpv)
3567 + continue
3568 +
3569 + set_args = {}
3570 + for s, package_set in required_sets.iteritems():
3571 + set_atom = SETPREFIX + s
3572 + set_arg = SetArg(arg=set_atom, set=package_set,
3573 + root_config=resolver.roots[myroot])
3574 + set_args[s] = set_arg
3575 + for atom in set_arg.set:
3576 + resolver._dep_stack.append(
3577 + Dependency(atom=atom, root=myroot, parent=set_arg))
3578 + resolver.digraph.add(set_arg, None)
3579 +
3580 + success = resolver._complete_graph()
3581 + writemsg_level("\b\b... done!\n")
3582 +
3583 + resolver.display_problems()
3584 +
3585 + if not success:
3586 + return 1
3587 +
3588 + def unresolved_deps():
3589 +
3590 + unresolvable = set()
3591 + for dep in resolver._initially_unsatisfied_deps:
3592 + if isinstance(dep.parent, Package) and \
3593 + (dep.priority > UnmergeDepPriority.SOFT):
3594 + unresolvable.add((dep.atom, dep.parent.cpv))
3595 +
3596 + if not unresolvable:
3597 + return False
3598 +
3599 + if unresolvable and not allow_missing_deps:
3600 + prefix = bad(" * ")
3601 + msg = []
3602 + msg.append("Dependencies could not be completely resolved due to")
3603 + msg.append("the following required packages not being installed:")
3604 + msg.append("")
3605 + for atom, parent in unresolvable:
3606 + msg.append(" %s pulled in by:" % (atom,))
3607 + msg.append(" %s" % (parent,))
3608 + msg.append("")
3609 + msg.append("Have you forgotten to run " + \
3610 + good("`emerge --update --newuse --deep @system @world`") + " prior")
3611 + msg.append(("to %s? It may be necessary to manually " + \
3612 + "uninstall packages that no longer") % action)
3613 + msg.append("exist in the portage tree since " + \
3614 + "it may not be possible to satisfy their")
3615 + msg.append("dependencies. Also, be aware of " + \
3616 + "the --with-bdeps option that is documented")
3617 + msg.append("in " + good("`man emerge`") + ".")
3618 + if action == "prune":
3619 + msg.append("")
3620 + msg.append("If you would like to ignore " + \
3621 + "dependencies then use %s." % good("--nodeps"))
3622 + writemsg_level("".join("%s%s\n" % (prefix, line) for line in msg),
3623 + level=logging.ERROR, noiselevel=-1)
3624 + return True
3625 + return False
3626 +
3627 + if unresolved_deps():
3628 + return 1
3629 +
3630 + graph = resolver.digraph.copy()
3631 + required_pkgs_total = 0
3632 + for node in graph:
3633 + if isinstance(node, Package):
3634 + required_pkgs_total += 1
3635 +
3636 + def show_parents(child_node):
3637 + parent_nodes = graph.parent_nodes(child_node)
3638 + if not parent_nodes:
3639 + # With --prune, the highest version can be pulled in without any
3640 + # real parent since all installed packages are pulled in. In that
3641 + # case there's nothing to show here.
3642 + return
3643 + parent_strs = []
3644 + for node in parent_nodes:
3645 + parent_strs.append(str(getattr(node, "cpv", node)))
3646 + parent_strs.sort()
3647 + msg = []
3648 + msg.append(" %s pulled in by:\n" % (child_node.cpv,))
3649 + for parent_str in parent_strs:
3650 + msg.append(" %s\n" % (parent_str,))
3651 + msg.append("\n")
3652 + portage.writemsg_stdout("".join(msg), noiselevel=-1)
3653 +
3654 + def cmp_pkg_cpv(pkg1, pkg2):
3655 + """Sort Package instances by cpv."""
3656 + if pkg1.cpv > pkg2.cpv:
3657 + return 1
3658 + elif pkg1.cpv == pkg2.cpv:
3659 + return 0
3660 + else:
3661 + return -1
3662 +
3663 + def create_cleanlist():
3664 + pkgs_to_remove = []
3665 +
3666 + if action == "depclean":
3667 + if args_set:
3668 +
3669 + for pkg in sorted(vardb, key=cmp_sort_key(cmp_pkg_cpv)):
3670 + arg_atom = None
3671 + try:
3672 + arg_atom = args_set.findAtomForPackage(pkg)
3673 + except portage.exception.InvalidDependString:
3674 + # this error has already been displayed by now
3675 + continue
3676 +
3677 + if arg_atom:
3678 + if pkg not in graph:
3679 + pkgs_to_remove.append(pkg)
3680 + elif "--verbose" in myopts:
3681 + show_parents(pkg)
3682 +
3683 + else:
3684 + for pkg in sorted(vardb, key=cmp_sort_key(cmp_pkg_cpv)):
3685 + if pkg not in graph:
3686 + pkgs_to_remove.append(pkg)
3687 + elif "--verbose" in myopts:
3688 + show_parents(pkg)
3689 +
3690 + elif action == "prune":
3691 + # Prune really uses all installed instead of world. It's not
3692 + # a real reverse dependency so don't display it as such.
3693 + graph.remove(set_args["world"])
3694 +
3695 + for atom in args_set:
3696 + for pkg in vardb.match_pkgs(atom):
3697 + if pkg not in graph:
3698 + pkgs_to_remove.append(pkg)
3699 + elif "--verbose" in myopts:
3700 + show_parents(pkg)
3701 +
3702 + if not pkgs_to_remove:
3703 + writemsg_level(
3704 + ">>> No packages selected for removal by %s\n" % action)
3705 + if "--verbose" not in myopts:
3706 + writemsg_level(
3707 + ">>> To see reverse dependencies, use %s\n" % \
3708 + good("--verbose"))
3709 + if action == "prune":
3710 + writemsg_level(
3711 + ">>> To ignore dependencies, use %s\n" % \
3712 + good("--nodeps"))
3713 +
3714 + return pkgs_to_remove
3715 +
3716 + cleanlist = create_cleanlist()
3717 +
3718 + if len(cleanlist):
3719 + clean_set = set(cleanlist)
3720 +
3721 + # Check if any of these package are the sole providers of libraries
3722 + # with consumers that have not been selected for removal. If so, these
3723 + # packages and any dependencies need to be added to the graph.
3724 + real_vardb = trees[myroot]["vartree"].dbapi
3725 + linkmap = real_vardb.linkmap
3726 + liblist = linkmap.listLibraryObjects()
3727 + consumer_cache = {}
3728 + provider_cache = {}
3729 + soname_cache = {}
3730 + consumer_map = {}
3731 +
3732 + writemsg_level(">>> Checking for lib consumers...\n")
3733 +
3734 + for pkg in cleanlist:
3735 + pkg_dblink = real_vardb._dblink(pkg.cpv)
3736 + provided_libs = set()
3737 +
3738 + for lib in liblist:
3739 + if pkg_dblink.isowner(lib, myroot):
3740 + provided_libs.add(lib)
3741 +
3742 + if not provided_libs:
3743 + continue
3744 +
3745 + consumers = {}
3746 + for lib in provided_libs:
3747 + lib_consumers = consumer_cache.get(lib)
3748 + if lib_consumers is None:
3749 + lib_consumers = linkmap.findConsumers(lib)
3750 + consumer_cache[lib] = lib_consumers
3751 + if lib_consumers:
3752 + consumers[lib] = lib_consumers
3753 +
3754 + if not consumers:
3755 + continue
3756 +
3757 + for lib, lib_consumers in consumers.items():
3758 + for consumer_file in list(lib_consumers):
3759 + if pkg_dblink.isowner(consumer_file, myroot):
3760 + lib_consumers.remove(consumer_file)
3761 + if not lib_consumers:
3762 + del consumers[lib]
3763 +
3764 + if not consumers:
3765 + continue
3766 +
3767 + for lib, lib_consumers in consumers.iteritems():
3768 +
3769 + soname = soname_cache.get(lib)
3770 + if soname is None:
3771 + soname = linkmap.getSoname(lib)
3772 + soname_cache[lib] = soname
3773 +
3774 + consumer_providers = []
3775 + for lib_consumer in lib_consumers:
3776 + providers = provider_cache.get(lib)
3777 + if providers is None:
3778 + providers = linkmap.findProviders(lib_consumer)
3779 + provider_cache[lib_consumer] = providers
3780 + if soname not in providers:
3781 + # Why does this happen?
3782 + continue
3783 + consumer_providers.append(
3784 + (lib_consumer, providers[soname]))
3785 +
3786 + consumers[lib] = consumer_providers
3787 +
3788 + consumer_map[pkg] = consumers
3789 +
3790 + if consumer_map:
3791 +
3792 + search_files = set()
3793 + for consumers in consumer_map.itervalues():
3794 + for lib, consumer_providers in consumers.iteritems():
3795 + for lib_consumer, providers in consumer_providers:
3796 + search_files.add(lib_consumer)
3797 + search_files.update(providers)
3798 +
3799 + writemsg_level(">>> Assigning files to packages...\n")
3800 + file_owners = real_vardb._owners.getFileOwnerMap(search_files)
3801 +
3802 + for pkg, consumers in consumer_map.items():
3803 + for lib, consumer_providers in consumers.items():
3804 + lib_consumers = set()
3805 +
3806 + for lib_consumer, providers in consumer_providers:
3807 + owner_set = file_owners.get(lib_consumer)
3808 + provider_dblinks = set()
3809 + provider_pkgs = set()
3810 +
3811 + if len(providers) > 1:
3812 + for provider in providers:
3813 + provider_set = file_owners.get(provider)
3814 + if provider_set is not None:
3815 + provider_dblinks.update(provider_set)
3816 +
3817 + if len(provider_dblinks) > 1:
3818 + for provider_dblink in provider_dblinks:
3819 + pkg_key = ("installed", myroot,
3820 + provider_dblink.mycpv, "nomerge")
3821 + if pkg_key not in clean_set:
3822 + provider_pkgs.add(vardb.get(pkg_key))
3823 +
3824 + if provider_pkgs:
3825 + continue
3826 +
3827 + if owner_set is not None:
3828 + lib_consumers.update(owner_set)
3829 +
3830 + for consumer_dblink in list(lib_consumers):
3831 + if ("installed", myroot, consumer_dblink.mycpv,
3832 + "nomerge") in clean_set:
3833 + lib_consumers.remove(consumer_dblink)
3834 + continue
3835 +
3836 + if lib_consumers:
3837 + consumers[lib] = lib_consumers
3838 + else:
3839 + del consumers[lib]
3840 + if not consumers:
3841 + del consumer_map[pkg]
3842 +
3843 + if consumer_map:
3844 + # TODO: Implement a package set for rebuilding consumer packages.
3845 +
3846 + msg = "In order to avoid breakage of link level " + \
3847 + "dependencies, one or more packages will not be removed. " + \
3848 + "This can be solved by rebuilding " + \
3849 + "the packages that pulled them in."
3850 +
3851 + prefix = bad(" * ")
3852 + from textwrap import wrap
3853 + writemsg_level("".join(prefix + "%s\n" % line for \
3854 + line in wrap(msg, 70)), level=logging.WARNING, noiselevel=-1)
3855 +
3856 + msg = []
3857 + for pkg, consumers in consumer_map.iteritems():
3858 + unique_consumers = set(chain(*consumers.values()))
3859 + unique_consumers = sorted(consumer.mycpv \
3860 + for consumer in unique_consumers)
3861 + msg.append("")
3862 + msg.append(" %s pulled in by:" % (pkg.cpv,))
3863 + for consumer in unique_consumers:
3864 + msg.append(" %s" % (consumer,))
3865 + msg.append("")
3866 + writemsg_level("".join(prefix + "%s\n" % line for line in msg),
3867 + level=logging.WARNING, noiselevel=-1)
3868 +
3869 + # Add lib providers to the graph as children of lib consumers,
3870 + # and also add any dependencies pulled in by the provider.
3871 + writemsg_level(">>> Adding lib providers to graph...\n")
3872 +
3873 + for pkg, consumers in consumer_map.iteritems():
3874 + for consumer_dblink in set(chain(*consumers.values())):
3875 + consumer_pkg = vardb.get(("installed", myroot,
3876 + consumer_dblink.mycpv, "nomerge"))
3877 + if not resolver._add_pkg(pkg,
3878 + Dependency(parent=consumer_pkg,
3879 + priority=UnmergeDepPriority(runtime=True),
3880 + root=pkg.root)):
3881 + resolver.display_problems()
3882 + return 1
3883 +
3884 + writemsg_level("\nCalculating dependencies ")
3885 + success = resolver._complete_graph()
3886 + writemsg_level("\b\b... done!\n")
3887 + resolver.display_problems()
3888 + if not success:
3889 + return 1
3890 + if unresolved_deps():
3891 + return 1
3892 +
3893 + graph = resolver.digraph.copy()
3894 + required_pkgs_total = 0
3895 + for node in graph:
3896 + if isinstance(node, Package):
3897 + required_pkgs_total += 1
3898 + cleanlist = create_cleanlist()
3899 + if not cleanlist:
3900 + return 0
3901 + clean_set = set(cleanlist)
3902 +
3903 + # Use a topological sort to create an unmerge order such that
3904 + # each package is unmerged before it's dependencies. This is
3905 + # necessary to avoid breaking things that may need to run
3906 + # during pkg_prerm or pkg_postrm phases.
3907 +
3908 + # Create a new graph to account for dependencies between the
3909 + # packages being unmerged.
3910 + graph = digraph()
3911 + del cleanlist[:]
3912 +
3913 + dep_keys = ["DEPEND", "RDEPEND", "PDEPEND"]
3914 + runtime = UnmergeDepPriority(runtime=True)
3915 + runtime_post = UnmergeDepPriority(runtime_post=True)
3916 + buildtime = UnmergeDepPriority(buildtime=True)
3917 + priority_map = {
3918 + "RDEPEND": runtime,
3919 + "PDEPEND": runtime_post,
3920 + "DEPEND": buildtime,
3921 + }
3922 +
3923 + for node in clean_set:
3924 + graph.add(node, None)
3925 + mydeps = []
3926 + node_use = node.metadata["USE"].split()
3927 + for dep_type in dep_keys:
3928 + depstr = node.metadata[dep_type]
3929 + if not depstr:
3930 + continue
3931 + try:
3932 + portage.dep._dep_check_strict = False
3933 + success, atoms = portage.dep_check(depstr, None, settings,
3934 + myuse=node_use, trees=resolver._graph_trees,
3935 + myroot=myroot)
3936 + finally:
3937 + portage.dep._dep_check_strict = True
3938 + if not success:
3939 + # Ignore invalid deps of packages that will
3940 + # be uninstalled anyway.
3941 + continue
3942 +
3943 + priority = priority_map[dep_type]
3944 + for atom in atoms:
3945 + if not isinstance(atom, portage.dep.Atom):
3946 + # Ignore invalid atoms returned from dep_check().
3947 + continue
3948 + if atom.blocker:
3949 + continue
3950 + matches = vardb.match_pkgs(atom)
3951 + if not matches:
3952 + continue
3953 + for child_node in matches:
3954 + if child_node in clean_set:
3955 + graph.add(child_node, node, priority=priority)
3956 +
3957 + ordered = True
3958 + if len(graph.order) == len(graph.root_nodes()):
3959 + # If there are no dependencies between packages
3960 + # let unmerge() group them by cat/pn.
3961 + ordered = False
3962 + cleanlist = [pkg.cpv for pkg in graph.order]
3963 + else:
3964 + # Order nodes from lowest to highest overall reference count for
3965 + # optimal root node selection.
3966 + node_refcounts = {}
3967 + for node in graph.order:
3968 + node_refcounts[node] = len(graph.parent_nodes(node))
3969 + def cmp_reference_count(node1, node2):
3970 + return node_refcounts[node1] - node_refcounts[node2]
3971 + graph.order.sort(key=cmp_sort_key(cmp_reference_count))
3972 +
3973 + ignore_priority_range = [None]
3974 + ignore_priority_range.extend(
3975 + xrange(UnmergeDepPriority.MIN, UnmergeDepPriority.MAX + 1))
3976 + while not graph.empty():
3977 + for ignore_priority in ignore_priority_range:
3978 + nodes = graph.root_nodes(ignore_priority=ignore_priority)
3979 + if nodes:
3980 + break
3981 + if not nodes:
3982 + raise AssertionError("no root nodes")
3983 + if ignore_priority is not None:
3984 + # Some deps have been dropped due to circular dependencies,
3985 + # so only pop one node in order do minimize the number that
3986 + # are dropped.
3987 + del nodes[1:]
3988 + for node in nodes:
3989 + graph.remove(node)
3990 + cleanlist.append(node.cpv)
3991 +
3992 + unmerge(root_config, myopts, "unmerge", cleanlist,
3993 + ldpath_mtimes, ordered=ordered)
3994 +
3995 + if action == "prune":
3996 + return
3997 +
3998 + if not cleanlist and "--quiet" in myopts:
3999 + return
4000 +
4001 + print "Packages installed: "+str(len(vardb.cpv_all()))
4002 + print "Packages in world: " + \
4003 + str(len(root_config.sets["world"].getAtoms()))
4004 + print "Packages in system: " + \
4005 + str(len(root_config.sets["system"].getAtoms()))
4006 + print "Required packages: "+str(required_pkgs_total)
4007 + if "--pretend" in myopts:
4008 + print "Number to remove: "+str(len(cleanlist))
4009 + else:
4010 + print "Number removed: "+str(len(cleanlist))
4011 +
4012 +def action_deselect(settings, trees, opts, atoms):
4013 + root_config = trees[settings['ROOT']]['root_config']
4014 + world_set = root_config.sets['world']
4015 + if not hasattr(world_set, 'update'):
4016 + writemsg_level("World set does not appear to be mutable.\n",
4017 + level=logging.ERROR, noiselevel=-1)
4018 + return 1
4019 +
4020 + vardb = root_config.trees['vartree'].dbapi
4021 + expanded_atoms = set(atoms)
4022 + from portage.dep import Atom
4023 + for atom in atoms:
4024 + for cpv in vardb.match(atom):
4025 + slot, = vardb.aux_get(cpv, ['SLOT'])
4026 + if not slot:
4027 + slot = '0'
4028 + expanded_atoms.add(Atom('%s:%s' % (portage.cpv_getkey(cpv), slot)))
4029 +
4030 + pretend = '--pretend' in opts
4031 + locked = False
4032 + if not pretend and hasattr(world_set, 'lock'):
4033 + world_set.lock()
4034 + locked = True
4035 + try:
4036 + discard_atoms = set()
4037 + world_set.load()
4038 + for atom in world_set:
4039 + if not isinstance(atom, Atom):
4040 + # nested set
4041 + continue
4042 + for arg_atom in expanded_atoms:
4043 + if arg_atom.intersects(atom) and \
4044 + not (arg_atom.slot and not atom.slot):
4045 + discard_atoms.add(atom)
4046 + break
4047 + if discard_atoms:
4048 + for atom in sorted(discard_atoms):
4049 + print ">>> Removing %s from \"world\" favorites file..." % \
4050 + colorize("INFORM", str(atom))
4051 +
4052 + if '--ask' in opts:
4053 + prompt = "Would you like to remove these " + \
4054 + "packages from your world favorites?"
4055 + if userquery(prompt) == 'No':
4056 + return os.EX_OK
4057 +
4058 + remaining = set(world_set)
4059 + remaining.difference_update(discard_atoms)
4060 + if not pretend:
4061 + world_set.replace(remaining)
4062 + else:
4063 + print ">>> No matching atoms found in \"world\" favorites file..."
4064 + finally:
4065 + if locked:
4066 + world_set.unlock()
4067 + return os.EX_OK
4068 +
4069 +def action_info(settings, trees, myopts, myfiles):
4070 + print getportageversion(settings["PORTDIR"], settings["ROOT"],
4071 + settings.profile_path, settings["CHOST"],
4072 + trees[settings["ROOT"]]["vartree"].dbapi)
4073 + header_width = 65
4074 + header_title = "System Settings"
4075 + if myfiles:
4076 + print header_width * "="
4077 + print header_title.rjust(int(header_width/2 + len(header_title)/2))
4078 + print header_width * "="
4079 + print "System uname: "+platform.platform(aliased=1)
4080 +
4081 + lastSync = portage.grabfile(os.path.join(
4082 + settings["PORTDIR"], "metadata", "timestamp.chk"))
4083 + print "Timestamp of tree:",
4084 + if lastSync:
4085 + print lastSync[0]
4086 + else:
4087 + print "Unknown"
4088 +
4089 + output=commands.getstatusoutput("distcc --version")
4090 + if not output[0]:
4091 + print str(output[1].split("\n",1)[0]),
4092 + if "distcc" in settings.features:
4093 + print "[enabled]"
4094 + else:
4095 + print "[disabled]"
4096 +
4097 + output=commands.getstatusoutput("ccache -V")
4098 + if not output[0]:
4099 + print str(output[1].split("\n",1)[0]),
4100 + if "ccache" in settings.features:
4101 + print "[enabled]"
4102 + else:
4103 + print "[disabled]"
4104 +
4105 + myvars = ["sys-devel/autoconf", "sys-devel/automake", "virtual/os-headers",
4106 + "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"]
4107 + myvars += portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_pkgs")
4108 + myvars = portage.util.unique_array(myvars)
4109 + myvars.sort()
4110 +
4111 + for x in myvars:
4112 + if portage.isvalidatom(x):
4113 + pkg_matches = trees["/"]["vartree"].dbapi.match(x)
4114 + pkg_matches = [portage.catpkgsplit(cpv)[1:] for cpv in pkg_matches]
4115 + pkg_matches.sort(key=cmp_sort_key(portage.pkgcmp))
4116 + pkgs = []
4117 + for pn, ver, rev in pkg_matches:
4118 + if rev != "r0":
4119 + pkgs.append(ver + "-" + rev)
4120 + else:
4121 + pkgs.append(ver)
4122 + if pkgs:
4123 + pkgs = ", ".join(pkgs)
4124 + print "%-20s %s" % (x+":", pkgs)
4125 + else:
4126 + print "%-20s %s" % (x+":", "[NOT VALID]")
4127 +
4128 + libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool"))
4129 +
4130 + if "--verbose" in myopts:
4131 + myvars=settings.keys()
4132 + else:
4133 + myvars = ['GENTOO_MIRRORS', 'CONFIG_PROTECT', 'CONFIG_PROTECT_MASK',
4134 + 'PORTDIR', 'DISTDIR', 'PKGDIR', 'PORTAGE_TMPDIR',
4135 + 'PORTDIR_OVERLAY', 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS',
4136 + 'ACCEPT_KEYWORDS', 'SYNC', 'FEATURES', 'EMERGE_DEFAULT_OPTS']
4137 +
4138 + myvars.extend(portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_vars"))
4139 +
4140 + myvars = portage.util.unique_array(myvars)
4141 + use_expand = settings.get('USE_EXPAND', '').split()
4142 + use_expand.sort()
4143 + use_expand_hidden = set(
4144 + settings.get('USE_EXPAND_HIDDEN', '').upper().split())
4145 + alphabetical_use = '--alphabetical' in myopts
4146 + root_config = trees[settings["ROOT"]]['root_config']
4147 + unset_vars = []
4148 + myvars.sort()
4149 + for x in myvars:
4150 + if x in settings:
4151 + if x != "USE":
4152 + print '%s="%s"' % (x, settings[x])
4153 + else:
4154 + use = set(settings["USE"].split())
4155 + for varname in use_expand:
4156 + flag_prefix = varname.lower() + "_"
4157 + for f in list(use):
4158 + if f.startswith(flag_prefix):
4159 + use.remove(f)
4160 + use = list(use)
4161 + use.sort()
4162 + print 'USE="%s"' % " ".join(use),
4163 + for varname in use_expand:
4164 + myval = settings.get(varname)
4165 + if myval:
4166 + print '%s="%s"' % (varname, myval),
4167 + print
4168 + else:
4169 + unset_vars.append(x)
4170 + if unset_vars:
4171 + print "Unset: "+", ".join(unset_vars)
4172 + print
4173 +
4174 + if "--debug" in myopts:
4175 + for x in dir(portage):
4176 + module = getattr(portage, x)
4177 + if "cvs_id_string" in dir(module):
4178 + print "%s: %s" % (str(x), str(module.cvs_id_string))
4179 +
4180 + # See if we can find any packages installed matching the strings
4181 + # passed on the command line
4182 + mypkgs = []
4183 + vardb = trees[settings["ROOT"]]["vartree"].dbapi
4184 + portdb = trees[settings["ROOT"]]["porttree"].dbapi
4185 + for x in myfiles:
4186 + mypkgs.extend(vardb.match(x))
4187 +
4188 + # If some packages were found...
4189 + if mypkgs:
4190 + # Get our global settings (we only print stuff if it varies from
4191 + # the current config)
4192 + mydesiredvars = [ 'CHOST', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS' ]
4193 + auxkeys = mydesiredvars + list(vardb._aux_cache_keys)
4194 + auxkeys.append('DEFINED_PHASES')
4195 + global_vals = {}
4196 + pkgsettings = portage.config(clone=settings)
4197 +
4198 + # Loop through each package
4199 + # Only print settings if they differ from global settings
4200 + header_title = "Package Settings"
4201 + print header_width * "="
4202 + print header_title.rjust(int(header_width/2 + len(header_title)/2))
4203 + print header_width * "="
4204 + from portage.output import EOutput
4205 + out = EOutput()
4206 + for cpv in mypkgs:
4207 + # Get all package specific variables
4208 + metadata = dict(izip(auxkeys, vardb.aux_get(cpv, auxkeys)))
4209 + pkg = Package(built=True, cpv=cpv,
4210 + installed=True, metadata=izip(Package.metadata_keys,
4211 + (metadata.get(x, '') for x in Package.metadata_keys)),
4212 + root_config=root_config, type_name='installed')
4213 +
4214 + print "\n%s was built with the following:" % \
4215 + colorize("INFORM", str(pkg.cpv))
4216 +
4217 + pkgsettings.setcpv(pkg)
4218 + forced_flags = set(chain(pkgsettings.useforce,
4219 + pkgsettings.usemask))
4220 + use = set(pkg.use.enabled)
4221 + use.discard(pkgsettings.get('ARCH'))
4222 + use_expand_flags = set()
4223 + use_enabled = {}
4224 + use_disabled = {}
4225 + for varname in use_expand:
4226 + flag_prefix = varname.lower() + "_"
4227 + for f in use:
4228 + if f.startswith(flag_prefix):
4229 + use_expand_flags.add(f)
4230 + use_enabled.setdefault(
4231 + varname.upper(), []).append(f[len(flag_prefix):])
4232 +
4233 + for f in pkg.iuse.all:
4234 + if f.startswith(flag_prefix):
4235 + use_expand_flags.add(f)
4236 + if f not in use:
4237 + use_disabled.setdefault(
4238 + varname.upper(), []).append(f[len(flag_prefix):])
4239 +
4240 + var_order = set(use_enabled)
4241 + var_order.update(use_disabled)
4242 + var_order = sorted(var_order)
4243 + var_order.insert(0, 'USE')
4244 + use.difference_update(use_expand_flags)
4245 + use_enabled['USE'] = list(use)
4246 + use_disabled['USE'] = []
4247 +
4248 + for f in pkg.iuse.all:
4249 + if f not in use and \
4250 + f not in use_expand_flags:
4251 + use_disabled['USE'].append(f)
4252 +
4253 + for varname in var_order:
4254 + if varname in use_expand_hidden:
4255 + continue
4256 + flags = []
4257 + for f in use_enabled.get(varname, []):
4258 + flags.append(UseFlagDisplay(f, True, f in forced_flags))
4259 + for f in use_disabled.get(varname, []):
4260 + flags.append(UseFlagDisplay(f, False, f in forced_flags))
4261 + if alphabetical_use:
4262 + flags.sort(key=UseFlagDisplay.sort_combined)
4263 + else:
4264 + flags.sort(key=UseFlagDisplay.sort_separated)
4265 + print '%s="%s"' % (varname, ' '.join(str(f) for f in flags)),
4266 + print
4267 +
4268 + for myvar in mydesiredvars:
4269 + if metadata[myvar].split() != settings.get(myvar, '').split():
4270 + print "%s=\"%s\"" % (myvar, metadata[myvar])
4271 + print
4272 +
4273 + if metadata['DEFINED_PHASES']:
4274 + if 'info' not in metadata['DEFINED_PHASES'].split():
4275 + continue
4276 +
4277 + print ">>> Attempting to run pkg_info() for '%s'" % pkg.cpv
4278 + ebuildpath = vardb.findname(pkg.cpv)
4279 + if not ebuildpath or not os.path.exists(ebuildpath):
4280 + out.ewarn("No ebuild found for '%s'" % pkg.cpv)
4281 + continue
4282 + portage.doebuild(ebuildpath, "info", pkgsettings["ROOT"],
4283 + pkgsettings, debug=(settings.get("PORTAGE_DEBUG", "") == 1),
4284 + mydbapi=trees[settings["ROOT"]]["vartree"].dbapi,
4285 + tree="vartree")
4286 +
4287 +def action_metadata(settings, portdb, myopts, porttrees=None):
4288 + if porttrees is None:
4289 + porttrees = portdb.porttrees
4290 + portage.writemsg_stdout("\n>>> Updating Portage cache\n")
4291 + old_umask = os.umask(0002)
4292 + cachedir = os.path.normpath(settings.depcachedir)
4293 + if cachedir in ["/", "/bin", "/dev", "/etc", "/home",
4294 + "/lib", "/opt", "/proc", "/root", "/sbin",
4295 + "/sys", "/tmp", "/usr", "/var"]:
4296 + print >> sys.stderr, "!!! PORTAGE_DEPCACHEDIR IS SET TO A PRIMARY " + \
4297 + "ROOT DIRECTORY ON YOUR SYSTEM."
4298 + print >> sys.stderr, \
4299 + "!!! This is ALMOST CERTAINLY NOT what you want: '%s'" % cachedir
4300 + sys.exit(73)
4301 + if not os.path.exists(cachedir):
4302 + os.makedirs(cachedir)
4303 +
4304 + auxdbkeys = [x for x in portage.auxdbkeys if not x.startswith("UNUSED_0")]
4305 + auxdbkeys = tuple(auxdbkeys)
4306 +
4307 + class TreeData(object):
4308 + __slots__ = ('dest_db', 'eclass_db', 'path', 'src_db', 'valid_nodes')
4309 + def __init__(self, dest_db, eclass_db, path, src_db):
4310 + self.dest_db = dest_db
4311 + self.eclass_db = eclass_db
4312 + self.path = path
4313 + self.src_db = src_db
4314 + self.valid_nodes = set()
4315 +
4316 + porttrees_data = []
4317 + for path in porttrees:
4318 + src_db = portdb._pregen_auxdb.get(path)
4319 + if src_db is None and \
4320 + os.path.isdir(os.path.join(path, 'metadata', 'cache')):
4321 + src_db = portdb.metadbmodule(
4322 + path, 'metadata/cache', auxdbkeys, readonly=True)
4323 + try:
4324 + src_db.ec = portdb._repo_info[path].eclass_db
4325 + except AttributeError:
4326 + pass
4327 +
4328 + if src_db is not None:
4329 + porttrees_data.append(TreeData(portdb.auxdb[path],
4330 + portdb._repo_info[path].eclass_db, path, src_db))
4331 +
4332 + porttrees = [tree_data.path for tree_data in porttrees_data]
4333 +
4334 + isatty = sys.stdout.isatty()
4335 + quiet = not isatty or '--quiet' in myopts
4336 + onProgress = None
4337 + if not quiet:
4338 + progressBar = portage.output.TermProgressBar()
4339 + progressHandler = ProgressHandler()
4340 + onProgress = progressHandler.onProgress
4341 + def display():
4342 + progressBar.set(progressHandler.curval, progressHandler.maxval)
4343 + progressHandler.display = display
4344 + def sigwinch_handler(signum, frame):
4345 + lines, progressBar.term_columns = \
4346 + portage.output.get_term_size()
4347 + signal.signal(signal.SIGWINCH, sigwinch_handler)
4348 +
4349 + # Temporarily override portdb.porttrees so portdb.cp_all()
4350 + # will only return the relevant subset.
4351 + portdb_porttrees = portdb.porttrees
4352 + portdb.porttrees = porttrees
4353 + try:
4354 + cp_all = portdb.cp_all()
4355 + finally:
4356 + portdb.porttrees = portdb_porttrees
4357 +
4358 + curval = 0
4359 + maxval = len(cp_all)
4360 + if onProgress is not None:
4361 + onProgress(maxval, curval)
4362 +
4363 + from portage.cache.util import quiet_mirroring
4364 + from portage import eapi_is_supported, \
4365 + _validate_cache_for_unsupported_eapis
4366 +
4367 + # TODO: Display error messages, but do not interfere with the progress bar.
4368 + # Here's how:
4369 + # 1) erase the progress bar
4370 + # 2) show the error message
4371 + # 3) redraw the progress bar on a new line
4372 + noise = quiet_mirroring()
4373 +
4374 + for cp in cp_all:
4375 + for tree_data in porttrees_data:
4376 + for cpv in portdb.cp_list(cp, mytree=tree_data.path):
4377 + tree_data.valid_nodes.add(cpv)
4378 + try:
4379 + src = tree_data.src_db[cpv]
4380 + except KeyError, e:
4381 + noise.missing_entry(cpv)
4382 + del e
4383 + continue
4384 + except CacheError, ce:
4385 + noise.exception(cpv, ce)
4386 + del ce
4387 + continue
4388 +
4389 + eapi = src.get('EAPI')
4390 + if not eapi:
4391 + eapi = '0'
4392 + eapi = eapi.lstrip('-')
4393 + eapi_supported = eapi_is_supported(eapi)
4394 + if not eapi_supported:
4395 + if not _validate_cache_for_unsupported_eapis:
4396 + noise.misc(cpv, "unable to validate " + \
4397 + "cache for EAPI='%s'" % eapi)
4398 + continue
4399 +
4400 + dest = None
4401 + try:
4402 + dest = tree_data.dest_db[cpv]
4403 + except (KeyError, CacheError):
4404 + pass
4405 +
4406 + for d in (src, dest):
4407 + if d is not None and d.get('EAPI') in ('', '0'):
4408 + del d['EAPI']
4409 +
4410 + if dest is not None:
4411 + if not (dest['_mtime_'] == src['_mtime_'] and \
4412 + tree_data.eclass_db.is_eclass_data_valid(
4413 + dest['_eclasses_']) and \
4414 + set(dest['_eclasses_']) == set(src['_eclasses_'])):
4415 + dest = None
4416 + else:
4417 + # We don't want to skip the write unless we're really
4418 + # sure that the existing cache is identical, so don't
4419 + # trust _mtime_ and _eclasses_ alone.
4420 + for k in set(chain(src, dest)).difference(
4421 + ('_mtime_', '_eclasses_')):
4422 + if dest.get(k, '') != src.get(k, ''):
4423 + dest = None
4424 + break
4425 +
4426 + if dest is not None:
4427 + # The existing data is valid and identical,
4428 + # so there's no need to overwrite it.
4429 + continue
4430 +
4431 + try:
4432 + inherited = src.get('INHERITED', '')
4433 + eclasses = src.get('_eclasses_')
4434 + except CacheError, ce:
4435 + noise.exception(cpv, ce)
4436 + del ce
4437 + continue
4438 +
4439 + if eclasses is not None:
4440 + if not tree_data.eclass_db.is_eclass_data_valid(
4441 + src['_eclasses_']):
4442 + noise.eclass_stale(cpv)
4443 + continue
4444 + inherited = eclasses
4445 + else:
4446 + inherited = inherited.split()
4447 +
4448 + if tree_data.src_db.complete_eclass_entries and \
4449 + eclasses is None:
4450 + noise.corruption(cpv, "missing _eclasses_ field")
4451 + continue
4452 +
4453 + if inherited:
4454 + # Even if _eclasses_ already exists, replace it with data from
4455 + # eclass_cache, in order to insert local eclass paths.
4456 + try:
4457 + eclasses = tree_data.eclass_db.get_eclass_data(inherited)
4458 + except KeyError:
4459 + # INHERITED contains a non-existent eclass.
4460 + noise.eclass_stale(cpv)
4461 + continue
4462 +
4463 + if eclasses is None:
4464 + noise.eclass_stale(cpv)
4465 + continue
4466 + src['_eclasses_'] = eclasses
4467 + else:
4468 + src['_eclasses_'] = {}
4469 +
4470 + if not eapi_supported:
4471 + src = {
4472 + 'EAPI' : '-' + eapi,
4473 + '_mtime_' : src['_mtime_'],
4474 + '_eclasses_' : src['_eclasses_'],
4475 + }
4476 +
4477 + try:
4478 + tree_data.dest_db[cpv] = src
4479 + except CacheError, ce:
4480 + noise.exception(cpv, ce)
4481 + del ce
4482 +
4483 + curval += 1
4484 + if onProgress is not None:
4485 + onProgress(maxval, curval)
4486 +
4487 + if onProgress is not None:
4488 + onProgress(maxval, curval)
4489 +
4490 + for tree_data in porttrees_data:
4491 + try:
4492 + dead_nodes = set(tree_data.dest_db.iterkeys())
4493 + except CacheError, e:
4494 + writemsg_level("Error listing cache entries for " + \
4495 + "'%s': %s, continuing...\n" % (tree_data.path, e),
4496 + level=logging.ERROR, noiselevel=-1)
4497 + del e
4498 + else:
4499 + dead_nodes.difference_update(tree_data.valid_nodes)
4500 + for cpv in dead_nodes:
4501 + try:
4502 + del tree_data.dest_db[cpv]
4503 + except (KeyError, CacheError):
4504 + pass
4505 +
4506 + if not quiet:
4507 + # make sure the final progress is displayed
4508 + progressHandler.display()
4509 + print
4510 + signal.signal(signal.SIGWINCH, signal.SIG_DFL)
4511 +
4512 + sys.stdout.flush()
4513 + os.umask(old_umask)
4514 +
4515 +def action_regen(settings, portdb, max_jobs, max_load):
4516 + xterm_titles = "notitles" not in settings.features
4517 + emergelog(xterm_titles, " === regen")
4518 + #regenerate cache entries
4519 + portage.writemsg_stdout("Regenerating cache entries...\n")
4520 + try:
4521 + os.close(sys.stdin.fileno())
4522 + except SystemExit, e:
4523 + raise # Needed else can't exit
4524 + except:
4525 + pass
4526 + sys.stdout.flush()
4527 +
4528 + regen = MetadataRegen(portdb, max_jobs=max_jobs, max_load=max_load)
4529 + regen.run()
4530 +
4531 + portage.writemsg_stdout("done!\n")
4532 + return regen.returncode
4533 +
4534 +def action_search(root_config, myopts, myfiles, spinner):
4535 + if not myfiles:
4536 + print "emerge: no search terms provided."
4537 + else:
4538 + searchinstance = search(root_config,
4539 + spinner, "--searchdesc" in myopts,
4540 + "--quiet" not in myopts, "--usepkg" in myopts,
4541 + "--usepkgonly" in myopts)
4542 + for mysearch in myfiles:
4543 + try:
4544 + searchinstance.execute(mysearch)
4545 + except re.error, comment:
4546 + print "\n!!! Regular expression error in \"%s\": %s" % ( mysearch, comment )
4547 + sys.exit(1)
4548 + searchinstance.output()
4549 +
4550 +def action_sync(settings, trees, mtimedb, myopts, myaction):
4551 + xterm_titles = "notitles" not in settings.features
4552 + emergelog(xterm_titles, " === sync")
4553 + portdb = trees[settings["ROOT"]]["porttree"].dbapi
4554 + myportdir = portdb.porttree_root
4555 + out = portage.output.EOutput()
4556 + if not myportdir:
4557 + sys.stderr.write("!!! PORTDIR is undefined. Is /etc/make.globals missing?\n")
4558 + sys.exit(1)
4559 + if myportdir[-1]=="/":
4560 + myportdir=myportdir[:-1]
4561 + try:
4562 + st = os.stat(myportdir)
4563 + except OSError:
4564 + st = None
4565 + if st is None:
4566 + print ">>>",myportdir,"not found, creating it."
4567 + os.makedirs(myportdir,0755)
4568 + st = os.stat(myportdir)
4569 +
4570 + spawn_kwargs = {}
4571 + spawn_kwargs["env"] = settings.environ()
4572 + if 'usersync' in settings.features and \
4573 + portage.data.secpass >= 2 and \
4574 + (st.st_uid != os.getuid() and st.st_mode & 0700 or \
4575 + st.st_gid != os.getgid() and st.st_mode & 0070):
4576 + try:
4577 + homedir = pwd.getpwuid(st.st_uid).pw_dir
4578 + except KeyError:
4579 + pass
4580 + else:
4581 + # Drop privileges when syncing, in order to match
4582 + # existing uid/gid settings.
4583 + spawn_kwargs["uid"] = st.st_uid
4584 + spawn_kwargs["gid"] = st.st_gid
4585 + spawn_kwargs["groups"] = [st.st_gid]
4586 + spawn_kwargs["env"]["HOME"] = homedir
4587 + umask = 0002
4588 + if not st.st_mode & 0020:
4589 + umask = umask | 0020
4590 + spawn_kwargs["umask"] = umask
4591 +
4592 + syncuri = settings.get("SYNC", "").strip()
4593 + if not syncuri:
4594 + writemsg_level("!!! SYNC is undefined. Is /etc/make.globals missing?\n",
4595 + noiselevel=-1, level=logging.ERROR)
4596 + return 1
4597 +
4598 + vcs_dirs = frozenset([".git", ".svn", "CVS", ".hg"])
4599 + vcs_dirs = vcs_dirs.intersection(os.listdir(myportdir))
4600 +
4601 + os.umask(0022)
4602 + dosyncuri = syncuri
4603 + updatecache_flg = False
4604 + if myaction == "metadata":
4605 + print "skipping sync"
4606 + updatecache_flg = True
4607 + elif ".git" in vcs_dirs:
4608 + # Update existing git repository, and ignore the syncuri. We are
4609 + # going to trust the user and assume that the user is in the branch
4610 + # that he/she wants updated. We'll let the user manage branches with
4611 + # git directly.
4612 + if portage.process.find_binary("git") is None:
4613 + msg = ["Command not found: git",
4614 + "Type \"emerge dev-util/git\" to enable git support."]
4615 + for l in msg:
4616 + writemsg_level("!!! %s\n" % l,
4617 + level=logging.ERROR, noiselevel=-1)
4618 + return 1
4619 + msg = ">>> Starting git pull in %s..." % myportdir
4620 + emergelog(xterm_titles, msg )
4621 + writemsg_level(msg + "\n")
4622 + exitcode = portage.process.spawn_bash("cd %s ; git pull" % \
4623 + (portage._shell_quote(myportdir),), **spawn_kwargs)
4624 + if exitcode != os.EX_OK:
4625 + msg = "!!! git pull error in %s." % myportdir
4626 + emergelog(xterm_titles, msg)
4627 + writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
4628 + return exitcode
4629 + msg = ">>> Git pull in %s successful" % myportdir
4630 + emergelog(xterm_titles, msg)
4631 + writemsg_level(msg + "\n")
4632 + exitcode = git_sync_timestamps(settings, myportdir)
4633 + if exitcode == os.EX_OK:
4634 + updatecache_flg = True
4635 + elif syncuri[:8]=="rsync://":
4636 + for vcs_dir in vcs_dirs:
4637 + writemsg_level(("!!! %s appears to be under revision " + \
4638 + "control (contains %s).\n!!! Aborting rsync sync.\n") % \
4639 + (myportdir, vcs_dir), level=logging.ERROR, noiselevel=-1)
4640 + return 1
4641 + if not os.path.exists(EPREFIX + "/usr/bin/rsync"):
4642 + print "!!! " + EPREFIX + "/usr/bin/rsync does not exist, so rsync support is disabled."
4643 + print "!!! Type \"emerge net-misc/rsync\" to enable rsync support."
4644 + sys.exit(1)
4645 + mytimeout=180
4646 +
4647 + rsync_opts = []
4648 + if settings["PORTAGE_RSYNC_OPTS"] == "":
4649 + portage.writemsg("PORTAGE_RSYNC_OPTS empty or unset, using hardcoded defaults\n")
4650 + rsync_opts.extend([
4651 + "--recursive", # Recurse directories
4652 + "--links", # Consider symlinks
4653 + "--safe-links", # Ignore links outside of tree
4654 + "--perms", # Preserve permissions
4655 + "--times", # Preserive mod times
4656 + "--compress", # Compress the data transmitted
4657 + "--force", # Force deletion on non-empty dirs
4658 + "--whole-file", # Don't do block transfers, only entire files
4659 + "--delete", # Delete files that aren't in the master tree
4660 + "--stats", # Show final statistics about what was transfered
4661 + "--timeout="+str(mytimeout), # IO timeout if not done in X seconds
4662 + "--exclude=/distfiles", # Exclude distfiles from consideration
4663 + "--exclude=/local", # Exclude local from consideration
4664 + "--exclude=/packages", # Exclude packages from consideration
4665 + ])
4666 +
4667 + else:
4668 + # The below validation is not needed when using the above hardcoded
4669 + # defaults.
4670 +
4671 + portage.writemsg("Using PORTAGE_RSYNC_OPTS instead of hardcoded defaults\n", 1)
4672 + rsync_opts.extend(
4673 + shlex.split(settings.get("PORTAGE_RSYNC_OPTS","")))
4674 + for opt in ("--recursive", "--times"):
4675 + if opt not in rsync_opts:
4676 + portage.writemsg(yellow("WARNING:") + " adding required option " + \
4677 + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
4678 + rsync_opts.append(opt)
4679 +
4680 + for exclude in ("distfiles", "local", "packages"):
4681 + opt = "--exclude=/%s" % exclude
4682 + if opt not in rsync_opts:
4683 + portage.writemsg(yellow("WARNING:") + \
4684 + " adding required option %s not included in " % opt + \
4685 + "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n")
4686 + rsync_opts.append(opt)
4687 +
4688 + if syncuri.rstrip("/").endswith(".gentoo.org/gentoo-portage"):
4689 + def rsync_opt_startswith(opt_prefix):
4690 + for x in rsync_opts:
4691 + if x.startswith(opt_prefix):
4692 + return True
4693 + return False
4694 +
4695 + if not rsync_opt_startswith("--timeout="):
4696 + rsync_opts.append("--timeout=%d" % mytimeout)
4697 +
4698 + for opt in ("--compress", "--whole-file"):
4699 + if opt not in rsync_opts:
4700 + portage.writemsg(yellow("WARNING:") + " adding required option " + \
4701 + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
4702 + rsync_opts.append(opt)
4703 +
4704 + if "--quiet" in myopts:
4705 + rsync_opts.append("--quiet") # Shut up a lot
4706 + else:
4707 + rsync_opts.append("--verbose") # Print filelist
4708 +
4709 + if "--verbose" in myopts:
4710 + rsync_opts.append("--progress") # Progress meter for each file
4711 +
4712 + if "--debug" in myopts:
4713 + rsync_opts.append("--checksum") # Force checksum on all files
4714 +
4715 + # Real local timestamp file.
4716 + servertimestampfile = os.path.join(
4717 + myportdir, "metadata", "timestamp.chk")
4718 +
4719 + content = portage.util.grabfile(servertimestampfile)
4720 + mytimestamp = 0
4721 + if content:
4722 + try:
4723 + mytimestamp = time.mktime(time.strptime(content[0],
4724 + "%a, %d %b %Y %H:%M:%S +0000"))
4725 + except (OverflowError, ValueError):
4726 + pass
4727 + del content
4728 +
4729 + try:
4730 + rsync_initial_timeout = \
4731 + int(settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
4732 + except ValueError:
4733 + rsync_initial_timeout = 15
4734 +
4735 + try:
4736 + maxretries=int(settings["PORTAGE_RSYNC_RETRIES"])
4737 + except SystemExit, e:
4738 + raise # Needed else can't exit
4739 + except:
4740 + maxretries=3 #default number of retries
4741 +
4742 + retries=0
4743 + user_name, hostname, port = re.split(
4744 + "rsync://([^:/]+@)?([^:/]*)(:[0-9]+)?", syncuri, maxsplit=3)[1:4]
4745 + if port is None:
4746 + port=""
4747 + if user_name is None:
4748 + user_name=""
4749 + updatecache_flg=True
4750 + all_rsync_opts = set(rsync_opts)
4751 + extra_rsync_opts = shlex.split(
4752 + settings.get("PORTAGE_RSYNC_EXTRA_OPTS",""))
4753 + all_rsync_opts.update(extra_rsync_opts)
4754 + family = socket.AF_INET
4755 + if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
4756 + family = socket.AF_INET
4757 + elif socket.has_ipv6 and \
4758 + ("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
4759 + family = socket.AF_INET6
4760 + ips=[]
4761 + SERVER_OUT_OF_DATE = -1
4762 + EXCEEDED_MAX_RETRIES = -2
4763 + while (1):
4764 + if ips:
4765 + del ips[0]
4766 + if ips==[]:
4767 + try:
4768 + for addrinfo in socket.getaddrinfo(
4769 + hostname, None, family, socket.SOCK_STREAM):
4770 + if socket.has_ipv6 and addrinfo[0] == socket.AF_INET6:
4771 + # IPv6 addresses need to be enclosed in square brackets
4772 + ips.append("[%s]" % addrinfo[4][0])
4773 + else:
4774 + ips.append(addrinfo[4][0])
4775 + from random import shuffle
4776 + shuffle(ips)
4777 + except SystemExit, e:
4778 + raise # Needed else can't exit
4779 + except Exception, e:
4780 + print "Notice:",str(e)
4781 + dosyncuri=syncuri
4782 +
4783 + if ips:
4784 + try:
4785 + dosyncuri = syncuri.replace(
4786 + "//" + user_name + hostname + port + "/",
4787 + "//" + user_name + ips[0] + port + "/", 1)
4788 + except SystemExit, e:
4789 + raise # Needed else can't exit
4790 + except Exception, e:
4791 + print "Notice:",str(e)
4792 + dosyncuri=syncuri
4793 +
4794 + if (retries==0):
4795 + if "--ask" in myopts:
4796 + if userquery("Do you want to sync your Portage tree with the mirror at\n" + blue(dosyncuri) + bold("?"))=="No":
4797 + print
4798 + print "Quitting."
4799 + print
4800 + sys.exit(0)
4801 + emergelog(xterm_titles, ">>> Starting rsync with " + dosyncuri)
4802 + if "--quiet" not in myopts:
4803 + print ">>> Starting rsync with "+dosyncuri+"..."
4804 + else:
4805 + emergelog(xterm_titles,
4806 + ">>> Starting retry %d of %d with %s" % \
4807 + (retries,maxretries,dosyncuri))
4808 + print "\n\n>>> Starting retry %d of %d with %s" % (retries,maxretries,dosyncuri)
4809 +
4810 + if mytimestamp != 0 and "--quiet" not in myopts:
4811 + print ">>> Checking server timestamp ..."
4812 +
4813 + rsynccommand = ["/usr/bin/rsync"] + rsync_opts + extra_rsync_opts
4814 +
4815 + if "--debug" in myopts:
4816 + print rsynccommand
4817 +
4818 + exitcode = os.EX_OK
4819 + servertimestamp = 0
4820 + # Even if there's no timestamp available locally, fetch the
4821 + # timestamp anyway as an initial probe to verify that the server is
4822 + # responsive. This protects us from hanging indefinitely on a
4823 + # connection attempt to an unresponsive server which rsync's
4824 + # --timeout option does not prevent.
4825 + if True:
4826 + # Temporary file for remote server timestamp comparison.
4827 + from tempfile import mkstemp
4828 + fd, tmpservertimestampfile = mkstemp()
4829 + os.close(fd)
4830 + mycommand = rsynccommand[:]
4831 + mycommand.append(dosyncuri.rstrip("/") + \
4832 + "/metadata/timestamp.chk")
4833 + mycommand.append(tmpservertimestampfile)
4834 + content = None
4835 + mypids = []
4836 + try:
4837 + def timeout_handler(signum, frame):
4838 + raise portage.exception.PortageException("timed out")
4839 + signal.signal(signal.SIGALRM, timeout_handler)
4840 + # Timeout here in case the server is unresponsive. The
4841 + # --timeout rsync option doesn't apply to the initial
4842 + # connection attempt.
4843 + if rsync_initial_timeout:
4844 + signal.alarm(rsync_initial_timeout)
4845 + try:
4846 + mypids.extend(portage.process.spawn(
4847 + mycommand, env=settings.environ(), returnpid=True))
4848 + exitcode = os.waitpid(mypids[0], 0)[1]
4849 + content = portage.grabfile(tmpservertimestampfile)
4850 + finally:
4851 + if rsync_initial_timeout:
4852 + signal.alarm(0)
4853 + try:
4854 + os.unlink(tmpservertimestampfile)
4855 + except OSError:
4856 + pass
4857 + except portage.exception.PortageException, e:
4858 + # timed out
4859 + print e
4860 + del e
4861 + if mypids and os.waitpid(mypids[0], os.WNOHANG) == (0,0):
4862 + os.kill(mypids[0], signal.SIGTERM)
4863 + os.waitpid(mypids[0], 0)
4864 + # This is the same code rsync uses for timeout.
4865 + exitcode = 30
4866 + else:
4867 + if exitcode != os.EX_OK:
4868 + if exitcode & 0xff:
4869 + exitcode = (exitcode & 0xff) << 8
4870 + else:
4871 + exitcode = exitcode >> 8
4872 + if mypids:
4873 + portage.process.spawned_pids.remove(mypids[0])
4874 + if content:
4875 + try:
4876 + servertimestamp = time.mktime(time.strptime(
4877 + content[0], "%a, %d %b %Y %H:%M:%S +0000"))
4878 + except (OverflowError, ValueError):
4879 + pass
4880 + del mycommand, mypids, content
4881 + if exitcode == os.EX_OK:
4882 + if (servertimestamp != 0) and (servertimestamp == mytimestamp):
4883 + emergelog(xterm_titles,
4884 + ">>> Cancelling sync -- Already current.")
4885 + print
4886 + print ">>>"
4887 + print ">>> Timestamps on the server and in the local repository are the same."
4888 + print ">>> Cancelling all further sync action. You are already up to date."
4889 + print ">>>"
4890 + print ">>> In order to force sync, remove '%s'." % servertimestampfile
4891 + print ">>>"
4892 + print
4893 + sys.exit(0)
4894 + elif (servertimestamp != 0) and (servertimestamp < mytimestamp):
4895 + emergelog(xterm_titles,
4896 + ">>> Server out of date: %s" % dosyncuri)
4897 + print
4898 + print ">>>"
4899 + print ">>> SERVER OUT OF DATE: %s" % dosyncuri
4900 + print ">>>"
4901 + print ">>> In order to force sync, remove '%s'." % servertimestampfile
4902 + print ">>>"
4903 + print
4904 + exitcode = SERVER_OUT_OF_DATE
4905 + elif (servertimestamp == 0) or (servertimestamp > mytimestamp):
4906 + # actual sync
4907 + mycommand = rsynccommand + [dosyncuri+"/", myportdir]
4908 + exitcode = portage.process.spawn(mycommand, **spawn_kwargs)
4909 + if exitcode in [0,1,3,4,11,14,20,21]:
4910 + break
4911 + elif exitcode in [1,3,4,11,14,20,21]:
4912 + break
4913 + else:
4914 + # Code 2 indicates protocol incompatibility, which is expected
4915 + # for servers with protocol < 29 that don't support
4916 + # --prune-empty-directories. Retry for a server that supports
4917 + # at least rsync protocol version 29 (>=rsync-2.6.4).
4918 + pass
4919 +
4920 + retries=retries+1
4921 +
4922 + if retries<=maxretries:
4923 + print ">>> Retrying..."
4924 + time.sleep(11)
4925 + else:
4926 + # over retries
4927 + # exit loop
4928 + updatecache_flg=False
4929 + exitcode = EXCEEDED_MAX_RETRIES
4930 + break
4931 +
4932 + if (exitcode==0):
4933 + emergelog(xterm_titles, "=== Sync completed with %s" % dosyncuri)
4934 + elif exitcode == SERVER_OUT_OF_DATE:
4935 + sys.exit(1)
4936 + elif exitcode == EXCEEDED_MAX_RETRIES:
4937 + sys.stderr.write(
4938 + ">>> Exceeded PORTAGE_RSYNC_RETRIES: %s\n" % maxretries)
4939 + sys.exit(1)
4940 + elif (exitcode>0):
4941 + msg = []
4942 + if exitcode==1:
4943 + msg.append("Rsync has reported that there is a syntax error. Please ensure")
4944 + msg.append("that your SYNC statement is proper.")
4945 + msg.append("SYNC=" + settings["SYNC"])
4946 + elif exitcode==11:
4947 + msg.append("Rsync has reported that there is a File IO error. Normally")
4948 + msg.append("this means your disk is full, but can be caused by corruption")
4949 + msg.append("on the filesystem that contains PORTDIR. Please investigate")
4950 + msg.append("and try again after the problem has been fixed.")
4951 + msg.append("PORTDIR=" + settings["PORTDIR"])
4952 + elif exitcode==20:
4953 + msg.append("Rsync was killed before it finished.")
4954 + else:
4955 + msg.append("Rsync has not successfully finished. It is recommended that you keep")
4956 + msg.append("trying or that you use the 'emerge-webrsync' option if you are unable")
4957 + msg.append("to use rsync due to firewall or other restrictions. This should be a")
4958 + msg.append("temporary problem unless complications exist with your network")
4959 + msg.append("(and possibly your system's filesystem) configuration.")
4960 + for line in msg:
4961 + out.eerror(line)
4962 + sys.exit(exitcode)
4963 + elif syncuri[:6]=="cvs://":
4964 + if not os.path.exists(EPREFIX + "/usr/bin/cvs"):
4965 + print "!!! " + EPREFIX + "/usr/bin/cvs does not exist, so CVS support is disabled."
4966 + print "!!! Type \"emerge dev-util/cvs\" to enable CVS support."
4967 + sys.exit(1)
4968 + cvsroot=syncuri[6:]
4969 + cvsdir=os.path.dirname(myportdir)
4970 + if not os.path.exists(myportdir+"/CVS"):
4971 + #initial checkout
4972 + print ">>> Starting initial cvs checkout with "+syncuri+"..."
4973 + if os.path.exists(cvsdir+"/gentoo-x86"):
4974 + print "!!! existing",cvsdir+"/gentoo-x86 directory; exiting."
4975 + sys.exit(1)
4976 + try:
4977 + os.rmdir(myportdir)
4978 + except OSError, e:
4979 + if e.errno != errno.ENOENT:
4980 + sys.stderr.write(
4981 + "!!! existing '%s' directory; exiting.\n" % myportdir)
4982 + sys.exit(1)
4983 + del e
4984 + if portage.spawn("cd "+cvsdir+"; cvs -z0 -d "+cvsroot+" co -P gentoo-x86",settings,free=1):
4985 + print "!!! cvs checkout error; exiting."
4986 + sys.exit(1)
4987 + os.rename(os.path.join(cvsdir, "gentoo-x86"), myportdir)
4988 + else:
4989 + #cvs update
4990 + print ">>> Starting cvs update with "+syncuri+"..."
4991 + retval = portage.process.spawn_bash(
4992 + "cd %s; cvs -z0 -q update -dP" % \
4993 + (portage._shell_quote(myportdir),), **spawn_kwargs)
4994 + if retval != os.EX_OK:
4995 + sys.exit(retval)
4996 + dosyncuri = syncuri
4997 + elif syncuri[:11]=="svn+http://" or syncuri[:6]=="svn://" or syncuri[:12]=="svn+https://":
4998 + # Gentoo Prefix hardcoded SVN support
4999 + if not os.path.exists(EPREFIX + "/usr/bin/svn"):
5000 + print "!!! " + EPREFIX + "/usr/bin/svn does not exist, so SVN support is disabled."
5001 + print "!!! Type \"emerge dev-util/subversion\" to enable SVN support."
5002 + sys.exit(1)
5003 + svndir=os.path.dirname(myportdir)
5004 + if not os.path.exists(myportdir+"/.svn"):
5005 + #initial checkout
5006 + if syncuri[:4] == "svn+":
5007 + syncuri = syncuri[4:]
5008 + print ">>> Starting initial svn checkout with "+syncuri+"..."
5009 + if os.path.exists(svndir+"/prefix-overlay"):
5010 + print "!!! existing",svndir+"/prefix-overlay directory; exiting."
5011 + sys.exit(1)
5012 + try:
5013 + os.rmdir(myportdir)
5014 + except OSError, e:
5015 + if e.errno != errno.ENOENT:
5016 + sys.stderr.write(
5017 + "!!! existing '%s' directory; exiting.\n" % myportdir)
5018 + sys.exit(1)
5019 + del e
5020 + if portage.spawn("cd "+svndir+"; svn checkout "+syncuri,settings,free=1):
5021 + print "!!! svn checkout error; exiting."
5022 + sys.exit(1)
5023 + os.rename(os.path.join(svndir, "prefix-overlay"), myportdir)
5024 + else:
5025 + #svn update
5026 + print ">>> Starting svn update..."
5027 + retval = portage.spawn("cd '%s'; svn update" % myportdir, \
5028 + settings, free=1)
5029 + if retval != os.EX_OK:
5030 + sys.exit(retval)
5031 +
5032 + # write timestamp.chk
5033 + try:
5034 + if not os.path.exists(os.path.join(myportdir, "metadata")):
5035 + os.mkdir(os.path.join(myportdir, "metadata"))
5036 + f = open(os.path.join(myportdir, "metadata", "timestamp.chk"), 'w')
5037 + f.write(time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()))
5038 + f.write('\n')
5039 + f.close()
5040 + except IOError, e:
5041 + # too bad, next time better luck!
5042 + pass
5043 +
5044 + dosyncuri = syncuri
5045 + else:
5046 + writemsg_level("!!! Unrecognized protocol: SYNC='%s'\n" % (syncuri,),
5047 + noiselevel=-1, level=logging.ERROR)
5048 + return 1
5049 +
5050 + if updatecache_flg and \
5051 + myaction != "metadata" and \
5052 + "metadata-transfer" not in settings.features:
5053 + updatecache_flg = False
5054 +
5055 + # Reload the whole config from scratch.
5056 + settings, trees, mtimedb = load_emerge_config(trees=trees)
5057 + root_config = trees[settings["ROOT"]]["root_config"]
5058 + portdb = trees[settings["ROOT"]]["porttree"].dbapi
5059 +
5060 + if updatecache_flg and \
5061 + os.path.exists(os.path.join(myportdir, 'metadata', 'cache')):
5062 +
5063 + # Only update cache for myportdir since that's
5064 + # the only one that's been synced here.
5065 + action_metadata(settings, portdb, myopts, porttrees=[myportdir])
5066 +
5067 + if portage._global_updates(trees, mtimedb["updates"]):
5068 + mtimedb.commit()
5069 + # Reload the whole config from scratch.
5070 + settings, trees, mtimedb = load_emerge_config(trees=trees)
5071 + portdb = trees[settings["ROOT"]]["porttree"].dbapi
5072 + root_config = trees[settings["ROOT"]]["root_config"]
5073 +
5074 + mybestpv = portdb.xmatch("bestmatch-visible",
5075 + portage.const.PORTAGE_PACKAGE_ATOM)
5076 + mypvs = portage.best(
5077 + trees[settings["ROOT"]]["vartree"].dbapi.match(
5078 + portage.const.PORTAGE_PACKAGE_ATOM))
5079 +
5080 + chk_updated_cfg_files("/", settings.get("CONFIG_PROTECT","").split())
5081 +
5082 + if myaction != "metadata":
5083 + if os.access(portage.USER_CONFIG_PATH + "/bin/post_sync", os.X_OK):
5084 + retval = portage.process.spawn(
5085 + [os.path.join(portage.USER_CONFIG_PATH, "bin", "post_sync"),
5086 + dosyncuri], env=settings.environ())
5087 + if retval != os.EX_OK:
5088 + print red(" * ")+bold("spawn failed of "+ portage.USER_CONFIG_PATH + "/bin/post_sync")
5089 +
5090 + if(mybestpv != mypvs) and not "--quiet" in myopts:
5091 + print
5092 + print red(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended"
5093 + print red(" * ")+"that you update portage now, before any other packages are updated."
5094 + print
5095 + print red(" * ")+"To update portage, run 'emerge portage' now."
5096 + print
5097 +
5098 + display_news_notification(root_config, myopts)
5099 + return os.EX_OK
5100 +
5101 +def action_uninstall(settings, trees, ldpath_mtimes,
5102 + opts, action, files, spinner):
5103 +
5104 + # For backward compat, some actions do not require leading '='.
5105 + ignore_missing_eq = action in ('clean', 'unmerge')
5106 + root = settings['ROOT']
5107 + vardb = trees[root]['vartree'].dbapi
5108 + valid_atoms = []
5109 + lookup_owners = []
5110 +
5111 + # Ensure atoms are valid before calling unmerge().
5112 + # For backward compat, leading '=' is not required.
5113 + for x in files:
5114 + if is_valid_package_atom(x) or \
5115 + (ignore_missing_eq and is_valid_package_atom('=' + x)):
5116 +
5117 + try:
5118 + valid_atoms.append(
5119 + portage.dep_expand(x, mydb=vardb, settings=settings))
5120 + except portage.exception.AmbiguousPackageName, e:
5121 + msg = "The short ebuild name \"" + x + \
5122 + "\" is ambiguous. Please specify " + \
5123 + "one of the following " + \
5124 + "fully-qualified ebuild names instead:"
5125 + for line in textwrap.wrap(msg, 70):
5126 + writemsg_level("!!! %s\n" % (line,),
5127 + level=logging.ERROR, noiselevel=-1)
5128 + for i in e[0]:
5129 + writemsg_level(" %s\n" % colorize("INFORM", i),
5130 + level=logging.ERROR, noiselevel=-1)
5131 + writemsg_level("\n", level=logging.ERROR, noiselevel=-1)
5132 + return 1
5133 +
5134 + elif x.startswith(os.sep):
5135 + if not x.startswith(root):
5136 + writemsg_level(("!!! '%s' does not start with" + \
5137 + " $ROOT.\n") % x, level=logging.ERROR, noiselevel=-1)
5138 + return 1
5139 + # Queue these up since it's most efficient to handle
5140 + # multiple files in a single iter_owners() call.
5141 + lookup_owners.append(x)
5142 +
5143 + else:
5144 + msg = []
5145 + msg.append("'%s' is not a valid package atom." % (x,))
5146 + msg.append("Please check ebuild(5) for full details.")
5147 + writemsg_level("".join("!!! %s\n" % line for line in msg),
5148 + level=logging.ERROR, noiselevel=-1)
5149 + return 1
5150 +
5151 + if lookup_owners:
5152 + relative_paths = []
5153 + search_for_multiple = False
5154 + if len(lookup_owners) > 1:
5155 + search_for_multiple = True
5156 +
5157 + for x in lookup_owners:
5158 + if not search_for_multiple and os.path.isdir(x):
5159 + search_for_multiple = True
5160 + relative_paths.append(x[len(root):])
5161 +
5162 + owners = set()
5163 + for pkg, relative_path in \
5164 + vardb._owners.iter_owners(relative_paths):
5165 + owners.add(pkg.mycpv)
5166 + if not search_for_multiple:
5167 + break
5168 +
5169 + if owners:
5170 + for cpv in owners:
5171 + slot = vardb.aux_get(cpv, ['SLOT'])[0]
5172 + if not slot:
5173 + # portage now masks packages with missing slot, but it's
5174 + # possible that one was installed by an older version
5175 + atom = portage.cpv_getkey(cpv)
5176 + else:
5177 + atom = '%s:%s' % (portage.cpv_getkey(cpv), slot)
5178 + valid_atoms.append(portage.dep.Atom(atom))
5179 + else:
5180 + writemsg_level(("!!! '%s' is not claimed " + \
5181 + "by any package.\n") % lookup_owners[0],
5182 + level=logging.WARNING, noiselevel=-1)
5183 +
5184 + if files and not valid_atoms:
5185 + return 1
5186 +
5187 + if action in ('clean', 'unmerge') or \
5188 + (action == 'prune' and "--nodeps" in opts):
5189 + # When given a list of atoms, unmerge them in the order given.
5190 + ordered = action == 'unmerge'
5191 + unmerge(trees[settings["ROOT"]]['root_config'], opts, action,
5192 + valid_atoms, ldpath_mtimes, ordered=ordered)
5193 + rval = os.EX_OK
5194 + elif action == 'deselect':
5195 + rval = action_deselect(settings, trees, opts, valid_atoms)
5196 + else:
5197 + rval = action_depclean(settings, trees, ldpath_mtimes,
5198 + opts, action, valid_atoms, spinner)
5199 +
5200 + return rval
5201 +
5202 +def adjust_config(myopts, settings):
5203 + """Make emerge specific adjustments to the config."""
5204 +
5205 + # To enhance usability, make some vars case insensitive by forcing them to
5206 + # lower case.
5207 + for myvar in ("AUTOCLEAN", "NOCOLOR"):
5208 + if myvar in settings:
5209 + settings[myvar] = settings[myvar].lower()
5210 + settings.backup_changes(myvar)
5211 + del myvar
5212 +
5213 + # Kill noauto as it will break merges otherwise.
5214 + if "noauto" in settings.features:
5215 + settings.features.remove('noauto')
5216 + settings['FEATURES'] = ' '.join(sorted(settings.features))
5217 + settings.backup_changes("FEATURES")
5218 +
5219 + CLEAN_DELAY = 5
5220 + try:
5221 + CLEAN_DELAY = int(settings.get("CLEAN_DELAY", str(CLEAN_DELAY)))
5222 + except ValueError, e:
5223 + portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
5224 + portage.writemsg("!!! Unable to parse integer: CLEAN_DELAY='%s'\n" % \
5225 + settings["CLEAN_DELAY"], noiselevel=-1)
5226 + settings["CLEAN_DELAY"] = str(CLEAN_DELAY)
5227 + settings.backup_changes("CLEAN_DELAY")
5228 +
5229 + EMERGE_WARNING_DELAY = 10
5230 + try:
5231 + EMERGE_WARNING_DELAY = int(settings.get(
5232 + "EMERGE_WARNING_DELAY", str(EMERGE_WARNING_DELAY)))
5233 + except ValueError, e:
5234 + portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
5235 + portage.writemsg("!!! Unable to parse integer: EMERGE_WARNING_DELAY='%s'\n" % \
5236 + settings["EMERGE_WARNING_DELAY"], noiselevel=-1)
5237 + settings["EMERGE_WARNING_DELAY"] = str(EMERGE_WARNING_DELAY)
5238 + settings.backup_changes("EMERGE_WARNING_DELAY")
5239 +
5240 + if "--quiet" in myopts:
5241 + settings["PORTAGE_QUIET"]="1"
5242 + settings.backup_changes("PORTAGE_QUIET")
5243 +
5244 + if "--verbose" in myopts:
5245 + settings["PORTAGE_VERBOSE"] = "1"
5246 + settings.backup_changes("PORTAGE_VERBOSE")
5247 +
5248 + # Set so that configs will be merged regardless of remembered status
5249 + if ("--noconfmem" in myopts):
5250 + settings["NOCONFMEM"]="1"
5251 + settings.backup_changes("NOCONFMEM")
5252 +
5253 + # Set various debug markers... They should be merged somehow.
5254 + PORTAGE_DEBUG = 0
5255 + try:
5256 + PORTAGE_DEBUG = int(settings.get("PORTAGE_DEBUG", str(PORTAGE_DEBUG)))
5257 + if PORTAGE_DEBUG not in (0, 1):
5258 + portage.writemsg("!!! Invalid value: PORTAGE_DEBUG='%i'\n" % \
5259 + PORTAGE_DEBUG, noiselevel=-1)
5260 + portage.writemsg("!!! PORTAGE_DEBUG must be either 0 or 1\n",
5261 + noiselevel=-1)
5262 + PORTAGE_DEBUG = 0
5263 + except ValueError, e:
5264 + portage.writemsg("!!! %s\n" % str(e), noiselevel=-1)
5265 + portage.writemsg("!!! Unable to parse integer: PORTAGE_DEBUG='%s'\n" %\
5266 + settings["PORTAGE_DEBUG"], noiselevel=-1)
5267 + del e
5268 + if "--debug" in myopts:
5269 + PORTAGE_DEBUG = 1
5270 + settings["PORTAGE_DEBUG"] = str(PORTAGE_DEBUG)
5271 + settings.backup_changes("PORTAGE_DEBUG")
5272 +
5273 + if settings.get("NOCOLOR") not in ("yes","true"):
5274 + portage.output.havecolor = 1
5275 +
5276 + """The explicit --color < y | n > option overrides the NOCOLOR environment
5277 + variable and stdout auto-detection."""
5278 + if "--color" in myopts:
5279 + if "y" == myopts["--color"]:
5280 + portage.output.havecolor = 1
5281 + settings["NOCOLOR"] = "false"
5282 + else:
5283 + portage.output.havecolor = 0
5284 + settings["NOCOLOR"] = "true"
5285 + settings.backup_changes("NOCOLOR")
5286 + elif not sys.stdout.isatty() and settings.get("NOCOLOR") != "no":
5287 + portage.output.havecolor = 0
5288 + settings["NOCOLOR"] = "true"
5289 + settings.backup_changes("NOCOLOR")
5290 +
5291 +def display_missing_pkg_set(root_config, set_name):
5292 +
5293 + msg = []
5294 + msg.append(("emerge: There are no sets to satisfy '%s'. " + \
5295 + "The following sets exist:") % \
5296 + colorize("INFORM", set_name))
5297 + msg.append("")
5298 +
5299 + for s in sorted(root_config.sets):
5300 + msg.append(" %s" % s)
5301 + msg.append("")
5302 +
5303 + writemsg_level("".join("%s\n" % l for l in msg),
5304 + level=logging.ERROR, noiselevel=-1)
5305 +
5306 +def getportageversion(portdir, target_root, profile, chost, vardb):
5307 + profilever = "unavailable"
5308 + if profile:
5309 + realpath = os.path.realpath(profile)
5310 + basepath = os.path.realpath(os.path.join(portdir, "profiles"))
5311 + if realpath.startswith(basepath):
5312 + profilever = realpath[1 + len(basepath):]
5313 + else:
5314 + try:
5315 + profilever = "!" + os.readlink(profile)
5316 + except (OSError):
5317 + pass
5318 + del realpath, basepath
5319 +
5320 + libcver=[]
5321 + libclist = vardb.match("virtual/libc")
5322 + libclist += vardb.match("virtual/glibc")
5323 + libclist = portage.util.unique_array(libclist)
5324 + for x in libclist:
5325 + xs=portage.catpkgsplit(x)
5326 + if libcver:
5327 + libcver+=","+"-".join(xs[1:])
5328 + else:
5329 + libcver="-".join(xs[1:])
5330 + if libcver==[]:
5331 + libcver="unavailable"
5332 +
5333 + gccver = getgccversion(chost)
5334 + unameout=platform.release()+" "+platform.machine()
5335 +
5336 + return "Portage " + portage.VERSION +" ("+profilever+", "+gccver+", "+libcver+", "+unameout+")"
5337 +
5338 +def git_sync_timestamps(settings, portdir):
5339 + """
5340 + Since git doesn't preserve timestamps, synchronize timestamps between
5341 + entries and ebuilds/eclasses. Assume the cache has the correct timestamp
5342 + for a given file as long as the file in the working tree is not modified
5343 + (relative to HEAD).
5344 + """
5345 + cache_dir = os.path.join(portdir, "metadata", "cache")
5346 + if not os.path.isdir(cache_dir):
5347 + return os.EX_OK
5348 + writemsg_level(">>> Synchronizing timestamps...\n")
5349 +
5350 + from portage.cache.cache_errors import CacheError
5351 + try:
5352 + cache_db = settings.load_best_module("portdbapi.metadbmodule")(
5353 + portdir, "metadata/cache", portage.auxdbkeys[:], readonly=True)
5354 + except CacheError, e:
5355 + writemsg_level("!!! Unable to instantiate cache: %s\n" % (e,),
5356 + level=logging.ERROR, noiselevel=-1)
5357 + return 1
5358 +
5359 + ec_dir = os.path.join(portdir, "eclass")
5360 + try:
5361 + ec_names = set(f[:-7] for f in os.listdir(ec_dir) \
5362 + if f.endswith(".eclass"))
5363 + except OSError, e:
5364 + writemsg_level("!!! Unable to list eclasses: %s\n" % (e,),
5365 + level=logging.ERROR, noiselevel=-1)
5366 + return 1
5367 +
5368 + args = [portage.const.BASH_BINARY, "-c",
5369 + "cd %s && git diff-index --name-only --diff-filter=M HEAD" % \
5370 + portage._shell_quote(portdir)]
5371 + import subprocess
5372 + proc = subprocess.Popen(args, stdout=subprocess.PIPE)
5373 + modified_files = set(l.rstrip("\n") for l in proc.stdout)
5374 + rval = proc.wait()
5375 + if rval != os.EX_OK:
5376 + return rval
5377 +
5378 + modified_eclasses = set(ec for ec in ec_names \
5379 + if os.path.join("eclass", ec + ".eclass") in modified_files)
5380 +
5381 + updated_ec_mtimes = {}
5382 +
5383 + for cpv in cache_db:
5384 + cpv_split = portage.catpkgsplit(cpv)
5385 + if cpv_split is None:
5386 + writemsg_level("!!! Invalid cache entry: %s\n" % (cpv,),
5387 + level=logging.ERROR, noiselevel=-1)
5388 + continue
5389 +
5390 + cat, pn, ver, rev = cpv_split
5391 + cat, pf = portage.catsplit(cpv)
5392 + relative_eb_path = os.path.join(cat, pn, pf + ".ebuild")
5393 + if relative_eb_path in modified_files:
5394 + continue
5395 +
5396 + try:
5397 + cache_entry = cache_db[cpv]
5398 + eb_mtime = cache_entry.get("_mtime_")
5399 + ec_mtimes = cache_entry.get("_eclasses_")
5400 + except KeyError:
5401 + writemsg_level("!!! Missing cache entry: %s\n" % (cpv,),
5402 + level=logging.ERROR, noiselevel=-1)
5403 + continue
5404 + except CacheError, e:
5405 + writemsg_level("!!! Unable to access cache entry: %s %s\n" % \
5406 + (cpv, e), level=logging.ERROR, noiselevel=-1)
5407 + continue
5408 +
5409 + if eb_mtime is None:
5410 + writemsg_level("!!! Missing ebuild mtime: %s\n" % (cpv,),
5411 + level=logging.ERROR, noiselevel=-1)
5412 + continue
5413 +
5414 + try:
5415 + eb_mtime = long(eb_mtime)
5416 + except ValueError:
5417 + writemsg_level("!!! Invalid ebuild mtime: %s %s\n" % \
5418 + (cpv, eb_mtime), level=logging.ERROR, noiselevel=-1)
5419 + continue
5420 +
5421 + if ec_mtimes is None:
5422 + writemsg_level("!!! Missing eclass mtimes: %s\n" % (cpv,),
5423 + level=logging.ERROR, noiselevel=-1)
5424 + continue
5425 +
5426 + if modified_eclasses.intersection(ec_mtimes):
5427 + continue
5428 +
5429 + missing_eclasses = set(ec_mtimes).difference(ec_names)
5430 + if missing_eclasses:
5431 + writemsg_level("!!! Non-existent eclass(es): %s %s\n" % \
5432 + (cpv, sorted(missing_eclasses)), level=logging.ERROR,
5433 + noiselevel=-1)
5434 + continue
5435 +
5436 + eb_path = os.path.join(portdir, relative_eb_path)
5437 + try:
5438 + current_eb_mtime = os.stat(eb_path)
5439 + except OSError:
5440 + writemsg_level("!!! Missing ebuild: %s\n" % \
5441 + (cpv,), level=logging.ERROR, noiselevel=-1)
5442 + continue
5443 +
5444 + inconsistent = False
5445 + for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
5446 + updated_mtime = updated_ec_mtimes.get(ec)
5447 + if updated_mtime is not None and updated_mtime != ec_mtime:
5448 + writemsg_level("!!! Inconsistent eclass mtime: %s %s\n" % \
5449 + (cpv, ec), level=logging.ERROR, noiselevel=-1)
5450 + inconsistent = True
5451 + break
5452 +
5453 + if inconsistent:
5454 + continue
5455 +
5456 + if current_eb_mtime != eb_mtime:
5457 + os.utime(eb_path, (eb_mtime, eb_mtime))
5458 +
5459 + for ec, (ec_path, ec_mtime) in ec_mtimes.iteritems():
5460 + if ec in updated_ec_mtimes:
5461 + continue
5462 + ec_path = os.path.join(ec_dir, ec + ".eclass")
5463 + current_mtime = long(os.stat(ec_path).st_mtime)
5464 + if current_mtime != ec_mtime:
5465 + os.utime(ec_path, (ec_mtime, ec_mtime))
5466 + updated_ec_mtimes[ec] = ec_mtime
5467 +
5468 + return os.EX_OK
5469 +
5470 +def load_emerge_config(trees=None):
5471 + kwargs = {}
5472 + for k, envvar in (("config_root", "PORTAGE_CONFIGROOT"), ("target_root", "ROOT")):
5473 + v = os.environ.get(envvar, None)
5474 + if v and v.strip():
5475 + kwargs[k] = v
5476 + trees = portage.create_trees(trees=trees, **kwargs)
5477 +
5478 + for root, root_trees in trees.iteritems():
5479 + settings = root_trees["vartree"].settings
5480 + setconfig = load_default_config(settings, root_trees)
5481 + root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
5482 +
5483 + settings = trees["/"]["vartree"].settings
5484 +
5485 + for myroot in trees:
5486 + if myroot != "/":
5487 + settings = trees[myroot]["vartree"].settings
5488 + break
5489 +
5490 + mtimedbfile = os.path.join("/", portage.CACHE_PATH.lstrip(os.path.sep), "mtimedb")
5491 + mtimedb = portage.MtimeDB(mtimedbfile)
5492 +
5493 + return settings, trees, mtimedb
5494 +
5495 +def chk_updated_cfg_files(target_root, config_protect):
5496 + if config_protect:
5497 + #number of directories with some protect files in them
5498 + procount=0
5499 + for x in config_protect:
5500 + x = os.path.join(target_root, x.lstrip(os.path.sep))
5501 + if not os.access(x, os.W_OK):
5502 + # Avoid Permission denied errors generated
5503 + # later by `find`.
5504 + continue
5505 + try:
5506 + mymode = os.lstat(x).st_mode
5507 + except OSError:
5508 + continue
5509 + if stat.S_ISLNK(mymode):
5510 + # We want to treat it like a directory if it
5511 + # is a symlink to an existing directory.
5512 + try:
5513 + real_mode = os.stat(x).st_mode
5514 + if stat.S_ISDIR(real_mode):
5515 + mymode = real_mode
5516 + except OSError:
5517 + pass
5518 + if stat.S_ISDIR(mymode):
5519 + mycommand = "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
5520 + else:
5521 + mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
5522 + os.path.split(x.rstrip(os.path.sep))
5523 + mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
5524 + a = commands.getstatusoutput(mycommand)
5525 + if a[0] != 0:
5526 + sys.stderr.write(" %s error scanning '%s': " % (bad("*"), x))
5527 + sys.stderr.flush()
5528 + # Show the error message alone, sending stdout to /dev/null.
5529 + os.system(mycommand + " 1>/dev/null")
5530 + else:
5531 + files = a[1].split('\0')
5532 + # split always produces an empty string as the last element
5533 + if files and not files[-1]:
5534 + del files[-1]
5535 + if files:
5536 + procount += 1
5537 + print "\n"+colorize("WARN", " * IMPORTANT:"),
5538 + if stat.S_ISDIR(mymode):
5539 + print "%d config files in '%s' need updating." % \
5540 + (len(files), x)
5541 + else:
5542 + print "config file '%s' needs updating." % x
5543 +
5544 + if procount:
5545 + print " "+yellow("*")+" See the "+colorize("INFORM","CONFIGURATION FILES")+ \
5546 + " section of the " + bold("emerge")
5547 + print " "+yellow("*")+" man page to learn how to update config files."
5548 +
5549 +def display_news_notification(root_config, myopts):
5550 + target_root = root_config.root
5551 + trees = root_config.trees
5552 + settings = trees["vartree"].settings
5553 + portdb = trees["porttree"].dbapi
5554 + vardb = trees["vartree"].dbapi
5555 + NEWS_PATH = os.path.join("metadata", "news")
5556 + UNREAD_PATH = os.path.join(target_root, NEWS_LIB_PATH, "news")
5557 + newsReaderDisplay = False
5558 + update = "--pretend" not in myopts
5559 +
5560 + for repo in portdb.getRepositories():
5561 + unreadItems = checkUpdatedNewsItems(
5562 + portdb, vardb, NEWS_PATH, UNREAD_PATH, repo, update=update)
5563 + if unreadItems:
5564 + if not newsReaderDisplay:
5565 + newsReaderDisplay = True
5566 + print
5567 + print colorize("WARN", " * IMPORTANT:"),
5568 + print "%s news items need reading for repository '%s'." % (unreadItems, repo)
5569 +
5570 +
5571 + if newsReaderDisplay:
5572 + print colorize("WARN", " *"),
5573 + print "Use " + colorize("GOOD", "eselect news") + " to read news items."
5574 + print
5575 +
5576 +def getgccversion(chost):
5577 + """
5578 + rtype: C{str}
5579 + return: the current in-use gcc version
5580 + """
5581 +
5582 + gcc_ver_command = 'gcc -dumpversion'
5583 + gcc_ver_prefix = 'gcc-'
5584 +
5585 + gcc_not_found_error = red(
5586 + "!!! No gcc found. You probably need to 'source /etc/profile'\n" +
5587 + "!!! to update the environment of this terminal and possibly\n" +
5588 + "!!! other terminals also.\n"
5589 + )
5590 +
5591 + mystatus, myoutput = commands.getstatusoutput("gcc-config -c")
5592 + if mystatus == os.EX_OK and myoutput.startswith(chost + "-"):
5593 + return myoutput.replace(chost + "-", gcc_ver_prefix, 1)
5594 +
5595 + mystatus, myoutput = commands.getstatusoutput(
5596 + chost + "-" + gcc_ver_command)
5597 + if mystatus == os.EX_OK:
5598 + return gcc_ver_prefix + myoutput
5599 +
5600 + mystatus, myoutput = commands.getstatusoutput(gcc_ver_command)
5601 + if mystatus == os.EX_OK:
5602 + return gcc_ver_prefix + myoutput
5603 +
5604 + portage.writemsg(gcc_not_found_error, noiselevel=-1)
5605 + return "[unavailable]"
5606 +
5607 +def checkUpdatedNewsItems(portdb, vardb, NEWS_PATH, UNREAD_PATH, repo_id,
5608 + update=False):
5609 + """
5610 + Examines news items in repodir + '/' + NEWS_PATH and attempts to find unread items
5611 + Returns the number of unread (yet relevent) items.
5612 +
5613 + @param portdb: a portage tree database
5614 + @type portdb: pordbapi
5615 + @param vardb: an installed package database
5616 + @type vardb: vardbapi
5617 + @param NEWS_PATH:
5618 + @type NEWS_PATH:
5619 + @param UNREAD_PATH:
5620 + @type UNREAD_PATH:
5621 + @param repo_id:
5622 + @type repo_id:
5623 + @rtype: Integer
5624 + @returns:
5625 + 1. The number of unread but relevant news items.
5626 +
5627 + """
5628 + from portage.news import NewsManager
5629 + manager = NewsManager(portdb, vardb, NEWS_PATH, UNREAD_PATH)
5630 + return manager.getUnreadItems( repo_id, update=update )
5631 +