Gentoo Archives: gentoo-commits

From: "Zac Medico (zmedico)" <zmedico@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] portage r13512 - main/branches/2.1.6/pym/_emerge
Date: Thu, 30 Apr 2009 07:17:17
Message-Id: E1LzQWA-0000Fn-BQ@stork.gentoo.org
1 Author: zmedico
2 Date: 2009-04-30 07:17:13 +0000 (Thu, 30 Apr 2009)
3 New Revision: 13512
4
5 Modified:
6 main/branches/2.1.6/pym/_emerge/__init__.py
7 Log:
8 Add support to emerge --metadata for transfering cache from overlays that
9 have a metadata/cache directory. (trunk r13356)
10
11 Modified: main/branches/2.1.6/pym/_emerge/__init__.py
12 ===================================================================
13 --- main/branches/2.1.6/pym/_emerge/__init__.py 2009-04-30 07:16:56 UTC (rev 13511)
14 +++ main/branches/2.1.6/pym/_emerge/__init__.py 2009-04-30 07:17:13 UTC (rev 13512)
15 @@ -49,6 +49,7 @@
16 import portage.util
17 import portage.locks
18 import portage.exception
19 +from portage.cache.cache_errors import CacheError
20 from portage.data import secpass
21 from portage.elog.messages import eerror
22 from portage.util import normalize_path as normpath
23 @@ -10054,6 +10055,24 @@
24
25 xtermTitle(" ".join(plain_output.split()))
26
27 +class ProgressHandler(object):
28 + def __init__(self):
29 + self.curval = 0
30 + self.maxval = 0
31 + self._last_update = 0
32 + self.min_latency = 0.2
33 +
34 + def onProgress(self, maxval, curval):
35 + self.maxval = maxval
36 + self.curval = curval
37 + cur_time = time.time()
38 + if cur_time - self._last_update >= self.min_latency:
39 + self._last_update = cur_time
40 + self.display()
41 +
42 + def display(self):
43 + raise NotImplementedError(self)
44 +
45 class Scheduler(PollScheduler):
46
47 _opts_ignore_blockers = \
48 @@ -12567,7 +12586,8 @@
49 def action_sync(settings, trees, mtimedb, myopts, myaction):
50 xterm_titles = "notitles" not in settings.features
51 emergelog(xterm_titles, " === sync")
52 - myportdir = settings.get("PORTDIR", None)
53 + portdb = trees[settings["ROOT"]]["porttree"].dbapi
54 + myportdir = portdb.porttree_root
55 out = portage.output.EOutput()
56 if not myportdir:
57 sys.stderr.write("!!! PORTDIR is undefined. Is /etc/make.globals missing?\n")
58 @@ -13025,9 +13045,13 @@
59 root_config = trees[settings["ROOT"]]["root_config"]
60 portdb = trees[settings["ROOT"]]["porttree"].dbapi
61
62 - if os.path.exists(myportdir+"/metadata/cache") and updatecache_flg:
63 - action_metadata(settings, portdb, myopts)
64 + if updatecache_flg and \
65 + os.path.exists(os.path.join(myportdir, 'metadata', 'cache')):
66
67 + # Only update cache for myportdir since that's
68 + # the only one that's been synced here.
69 + action_metadata(settings, portdb, myopts, porttrees=[myportdir])
70 +
71 if portage._global_updates(trees, mtimedb["updates"]):
72 mtimedb.commit()
73 # Reload the whole config from scratch.
74 @@ -13194,8 +13218,10 @@
75
76 return os.EX_OK
77
78 -def action_metadata(settings, portdb, myopts):
79 - portage.writemsg_stdout("\n>>> Updating Portage cache: ")
80 +def action_metadata(settings, portdb, myopts, porttrees=None):
81 + if porttrees is None:
82 + porttrees = portdb.porttrees
83 + portage.writemsg_stdout("\n>>> Updating Portage cache\n")
84 old_umask = os.umask(0002)
85 cachedir = os.path.normpath(settings.depcachedir)
86 if cachedir in ["/", "/bin", "/dev", "/etc", "/home",
87 @@ -13207,61 +13233,214 @@
88 "!!! This is ALMOST CERTAINLY NOT what you want: '%s'" % cachedir
89 sys.exit(73)
90 if not os.path.exists(cachedir):
91 - os.mkdir(cachedir)
92 + os.makedirs(cachedir)
93
94 - ec = portage.eclass_cache.cache(portdb.porttree_root)
95 - myportdir = os.path.realpath(settings["PORTDIR"])
96 - cm = settings.load_best_module("portdbapi.metadbmodule")(
97 - myportdir, "metadata/cache", portage.auxdbkeys[:], readonly=True)
98 + auxdbkeys = [x for x in portage.auxdbkeys if not x.startswith("UNUSED_0")]
99 + auxdbkeys = tuple(auxdbkeys)
100
101 - from portage.cache import util
102 + class TreeData(object):
103 + __slots__ = ('dest_db', 'eclass_db', 'path', 'src_db', 'valid_nodes')
104 + def __init__(self, dest_db, eclass_db, path, src_db):
105 + self.dest_db = dest_db
106 + self.eclass_db = eclass_db
107 + self.path = path
108 + self.src_db = src_db
109 + self.valid_nodes = set()
110
111 - class percentage_noise_maker(util.quiet_mirroring):
112 - def __init__(self, dbapi):
113 - self.dbapi = dbapi
114 - self.cp_all = dbapi.cp_all()
115 - l = len(self.cp_all)
116 - self.call_update_min = 100000000
117 - self.min_cp_all = l/100.0
118 - self.count = 1
119 - self.pstr = ''
120 + porttrees_data = []
121 + for path in porttrees:
122 + src_db = portdb._pregen_auxdb.get(path)
123 + if src_db is None and \
124 + os.path.isdir(os.path.join(path, 'metadata', 'cache')):
125 + src_db = portdb.metadbmodule(
126 + path, 'metadata/cache', auxdbkeys, readonly=True)
127 + try:
128 + src_db.ec = portdb._repo_info[path].eclass_db
129 + except AttributeError:
130 + pass
131
132 - def __iter__(self):
133 - for x in self.cp_all:
134 - self.count += 1
135 - if self.count > self.min_cp_all:
136 - self.call_update_min = 0
137 - self.count = 0
138 - for y in self.dbapi.cp_list(x):
139 - yield y
140 - self.call_update_mine = 0
141 + if src_db is not None:
142 + porttrees_data.append(TreeData(portdb.auxdb[path],
143 + portdb._repo_info[path].eclass_db, path, src_db))
144
145 - def update(self, *arg):
146 - try:
147 - self.pstr = int(self.pstr) + 1
148 - except ValueError:
149 - self.pstr = 1
150 - sys.stdout.write("%s%i%%" % \
151 - ("\b" * (len(str(self.pstr))+1), self.pstr))
152 - sys.stdout.flush()
153 - self.call_update_min = 10000000
154 + porttrees = [tree_data.path for tree_data in porttrees_data]
155
156 - def finish(self, *arg):
157 - sys.stdout.write("\b\b\b\b100%\n")
158 - sys.stdout.flush()
159 + isatty = sys.stdout.isatty()
160 + quiet = not isatty or '--quiet' in myopts
161 + onProgress = None
162 + if not quiet:
163 + progressBar = portage.output.TermProgressBar()
164 + progressHandler = ProgressHandler()
165 + onProgress = progressHandler.onProgress
166 + def display():
167 + progressBar.set(progressHandler.curval, progressHandler.maxval)
168 + progressHandler.display = display
169 + def sigwinch_handler(signum, frame):
170 + lines, progressBar.term_columns = \
171 + portage.output.get_term_size()
172 + signal.signal(signal.SIGWINCH, sigwinch_handler)
173
174 - if "--quiet" in myopts:
175 - def quicky_cpv_generator(cp_all_list):
176 - for x in cp_all_list:
177 - for y in portdb.cp_list(x):
178 - yield y
179 - source = quicky_cpv_generator(portdb.cp_all())
180 - noise_maker = portage.cache.util.quiet_mirroring()
181 - else:
182 - noise_maker = source = percentage_noise_maker(portdb)
183 - portage.cache.util.mirror_cache(source, cm, portdb.auxdb[myportdir],
184 - eclass_cache=ec, verbose_instance=noise_maker)
185 + # Temporarily override portdb.porttrees so portdb.cp_all()
186 + # will only return the relevant subset.
187 + portdb_porttrees = portdb.porttrees
188 + portdb.porttrees = porttrees
189 + try:
190 + cp_all = portdb.cp_all()
191 + finally:
192 + portdb.porttrees = portdb_porttrees
193
194 + curval = 0
195 + maxval = len(cp_all)
196 + if onProgress is not None:
197 + onProgress(maxval, curval)
198 +
199 + from portage.cache.util import quiet_mirroring
200 + from portage import eapi_is_supported, \
201 + _validate_cache_for_unsupported_eapis
202 +
203 + # TODO: Display error messages, but do not interfere with the progress bar.
204 + # Here's how:
205 + # 1) erase the progress bar
206 + # 2) show the error message
207 + # 3) redraw the progress bar on a new line
208 + noise = quiet_mirroring()
209 +
210 + for cp in cp_all:
211 + for tree_data in porttrees_data:
212 + for cpv in portdb.cp_list(cp, mytree=tree_data.path):
213 + tree_data.valid_nodes.add(cpv)
214 + try:
215 + src = tree_data.src_db[cpv]
216 + except KeyError, e:
217 + noise.missing_entry(cpv)
218 + del e
219 + continue
220 + except CacheError, ce:
221 + noise.exception(cpv, ce)
222 + del ce
223 + continue
224 +
225 + eapi = src.get('EAPI')
226 + if not eapi:
227 + eapi = '0'
228 + eapi = eapi.lstrip('-')
229 + eapi_supported = eapi_is_supported(eapi)
230 + if not eapi_supported:
231 + if not _validate_cache_for_unsupported_eapis:
232 + noise.misc(cpv, "unable to validate " + \
233 + "cache for EAPI='%s'" % eapi)
234 + continue
235 +
236 + dest = None
237 + try:
238 + dest = tree_data.dest_db[cpv]
239 + except (KeyError, CacheError):
240 + pass
241 +
242 + for d in (src, dest):
243 + if d is not None and d.get('EAPI') in ('', '0'):
244 + del d['EAPI']
245 +
246 + if dest is not None:
247 + if not (dest['_mtime_'] == src['_mtime_'] and \
248 + tree_data.eclass_db.is_eclass_data_valid(
249 + dest['_eclasses_']) and \
250 + set(dest['_eclasses_']) == set(src['_eclasses_'])):
251 + dest = None
252 + else:
253 + # We don't want to skip the write unless we're really
254 + # sure that the existing cache is identical, so don't
255 + # trust _mtime_ and _eclasses_ alone.
256 + for k in set(chain(src, dest)).difference(
257 + ('_mtime_', '_eclasses_')):
258 + if dest.get(k, '') != src.get(k, ''):
259 + dest = None
260 + break
261 +
262 + if dest is not None:
263 + # The existing data is valid and identical,
264 + # so there's no need to overwrite it.
265 + continue
266 +
267 + try:
268 + inherited = src.get('INHERITED', '')
269 + eclasses = src.get('_eclasses_')
270 + except CacheError, ce:
271 + noise.exception(cpv, ce)
272 + del ce
273 + continue
274 +
275 + if eclasses is not None:
276 + if not tree_data.eclass_db.is_eclass_data_valid(
277 + src['_eclasses_']):
278 + noise.eclass_stale(cpv)
279 + continue
280 + inherited = eclasses
281 + else:
282 + inherited = inherited.split()
283 +
284 + if inherited:
285 + if tree_data.src_db.complete_eclass_entries and \
286 + eclasses is None:
287 + noise.corruption(cpv, "missing _eclasses_ field")
288 + continue
289 +
290 + # Even if _eclasses_ already exists, replace it with data from
291 + # eclass_cache, in order to insert local eclass paths.
292 + try:
293 + eclasses = tree_data.eclass_db.get_eclass_data(inherited)
294 + except KeyError:
295 + # INHERITED contains a non-existent eclass.
296 + noise.eclass_stale(cpv)
297 + continue
298 +
299 + if eclasses is None:
300 + noise.eclass_stale(cpv)
301 + continue
302 + src['_eclasses_'] = eclasses
303 +
304 + if not eapi_supported:
305 + src = {
306 + 'EAPI' : '-' + eapi,
307 + '_mtime_' : src['_mtime_'],
308 + '_eclasses_' : src['_eclasses_'],
309 + }
310 +
311 + try:
312 + tree_data.dest_db[cpv] = src
313 + except CacheError, ce:
314 + noise.exception(cpv, ce)
315 + del ce
316 +
317 + curval += 1
318 + if onProgress is not None:
319 + onProgress(maxval, curval)
320 +
321 + if onProgress is not None:
322 + onProgress(maxval, curval)
323 +
324 + for tree_data in porttrees_data:
325 + try:
326 + dead_nodes = set(tree_data.dest_db.iterkeys())
327 + except CacheError, e:
328 + writemsg_level("Error listing cache entries for " + \
329 + "'%s': %s, continuing...\n" % (tree_data.path, e),
330 + level=logging.ERROR, noiselevel=-1)
331 + del e
332 + else:
333 + dead_nodes.difference_update(tree_data.valid_nodes)
334 + for cpv in dead_nodes:
335 + try:
336 + tree_data.dest_db[cpv]
337 + except (KeyError, CacheError):
338 + pass
339 +
340 + if not quiet:
341 + # make sure the final progress is displayed
342 + progressHandler.display()
343 + print
344 + signal.signal(signal.SIGWINCH, signal.SIG_DFL)
345 +
346 sys.stdout.flush()
347 os.umask(old_umask)