Gentoo Archives: gentoo-commits

From: Zac Medico <zmedico@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] proj/portage:master commit in: lib/portage/package/ebuild/, lib/portage/tests/ebuild/, lib/_emerge/
Date: Tue, 30 Apr 2019 02:47:50
Message-Id: 1556591301.ebbde237d33e783c562cc6c70987969ac7228b96.zmedico@gentoo
1 commit: ebbde237d33e783c562cc6c70987969ac7228b96
2 Author: Zac Medico <zmedico <AT> gentoo <DOT> org>
3 AuthorDate: Sat Apr 27 21:59:57 2019 +0000
4 Commit: Zac Medico <zmedico <AT> gentoo <DOT> org>
5 CommitDate: Tue Apr 30 02:28:21 2019 +0000
6 URL: https://gitweb.gentoo.org/proj/portage.git/commit/?id=ebbde237
7
8 fetch: atomic downloads (bug 175612)
9
10 Direct FETCHCOMMAND/RESUMECOMMAND output to a temporary file with
11 a constant .__download__ suffix, and atomically rename the file
12 to remove the suffix only after the download has completed
13 successfully (includes digest verification when applicable).
14 Also add unit tests to cover most fetch cases.
15
16 Bug: https://bugs.gentoo.org/175612
17 Signed-off-by: Zac Medico <zmedico <AT> gentoo.org>
18
19 lib/_emerge/BinpkgVerifier.py | 4 +-
20 lib/portage/package/ebuild/fetch.py | 105 ++++++++++-----
21 lib/portage/tests/ebuild/test_fetch.py | 230 +++++++++++++++++++++++++++++++++
22 3 files changed, 303 insertions(+), 36 deletions(-)
23
24 diff --git a/lib/_emerge/BinpkgVerifier.py b/lib/_emerge/BinpkgVerifier.py
25 index 7a6d15e80..f98f511a8 100644
26 --- a/lib/_emerge/BinpkgVerifier.py
27 +++ b/lib/_emerge/BinpkgVerifier.py
28 @@ -1,4 +1,4 @@
29 -# Copyright 1999-2013 Gentoo Foundation
30 +# Copyright 1999-2019 Gentoo Authors
31 # Distributed under the terms of the GNU General Public License v2
32
33 import errno
34 @@ -108,7 +108,7 @@ class BinpkgVerifier(CompositeTask):
35 def _digest_exception(self, name, value, expected):
36
37 head, tail = os.path.split(self._pkg_path)
38 - temp_filename = _checksum_failure_temp_file(head, tail)
39 + temp_filename = _checksum_failure_temp_file(self.pkg.root_config.settings, head, tail)
40
41 self.scheduler.output((
42 "\n!!! Digest verification failed:\n"
43
44 diff --git a/lib/portage/package/ebuild/fetch.py b/lib/portage/package/ebuild/fetch.py
45 index bfd97601c..227bf45ae 100644
46 --- a/lib/portage/package/ebuild/fetch.py
47 +++ b/lib/portage/package/ebuild/fetch.py
48 @@ -30,7 +30,7 @@ portage.proxy.lazyimport.lazyimport(globals(),
49 )
50
51 from portage import os, selinux, shutil, _encodings, \
52 - _shell_quote, _unicode_encode
53 + _movefile, _shell_quote, _unicode_encode
54 from portage.checksum import (get_valid_checksum_keys, perform_md5, verify_all,
55 _filter_unaccelarated_hashes, _hash_filter, _apply_hash_filter)
56 from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \
57 @@ -46,6 +46,8 @@ from portage.util import apply_recursive_permissions, \
58 varexpand, writemsg, writemsg_level, writemsg_stdout
59 from portage.process import spawn
60
61 +_download_suffix = '.__download__'
62 +
63 _userpriv_spawn_kwargs = (
64 ("uid", portage_uid),
65 ("gid", portage_gid),
66 @@ -139,7 +141,7 @@ def _userpriv_test_write_file(settings, file_path):
67 _userpriv_test_write_file_cache[file_path] = rval
68 return rval
69
70 -def _checksum_failure_temp_file(distdir, basename):
71 +def _checksum_failure_temp_file(settings, distdir, basename):
72 """
73 First try to find a duplicate temp file with the same checksum and return
74 that filename if available. Otherwise, use mkstemp to create a new unique
75 @@ -149,9 +151,13 @@ def _checksum_failure_temp_file(distdir, basename):
76 """
77
78 filename = os.path.join(distdir, basename)
79 + if basename.endswith(_download_suffix):
80 + normal_basename = basename[:-len(_download_suffix)]
81 + else:
82 + normal_basename = basename
83 size = os.stat(filename).st_size
84 checksum = None
85 - tempfile_re = re.compile(re.escape(basename) + r'\._checksum_failure_\..*')
86 + tempfile_re = re.compile(re.escape(normal_basename) + r'\._checksum_failure_\..*')
87 for temp_filename in os.listdir(distdir):
88 if not tempfile_re.match(temp_filename):
89 continue
90 @@ -173,9 +179,9 @@ def _checksum_failure_temp_file(distdir, basename):
91 return temp_filename
92
93 fd, temp_filename = \
94 - tempfile.mkstemp("", basename + "._checksum_failure_.", distdir)
95 + tempfile.mkstemp("", normal_basename + "._checksum_failure_.", distdir)
96 os.close(fd)
97 - os.rename(filename, temp_filename)
98 + _movefile(filename, temp_filename, mysettings=settings)
99 return temp_filename
100
101 def _check_digests(filename, digests, show_errors=1):
102 @@ -602,6 +608,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
103 pruned_digests["size"] = size
104
105 myfile_path = os.path.join(mysettings["DISTDIR"], myfile)
106 + download_path = myfile_path + _download_suffix
107 has_space = True
108 has_space_superuser = True
109 file_lock = None
110 @@ -679,12 +686,15 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
111 del e
112 continue
113
114 - if distdir_writable and mystat is None:
115 - # Remove broken symlinks if necessary.
116 + # Remove broken symlinks or symlinks to files which
117 + # _check_distfile did not match above.
118 + if distdir_writable and mystat is None or os.path.islink(myfile_path):
119 try:
120 os.unlink(myfile_path)
121 - except OSError:
122 - pass
123 + except OSError as e:
124 + if e.errno not in (errno.ENOENT, errno.ESTALE):
125 + raise
126 + mystat = None
127
128 if mystat is not None:
129 if stat.S_ISDIR(mystat.st_mode):
130 @@ -695,10 +705,30 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
131 level=logging.ERROR, noiselevel=-1)
132 return 0
133
134 + if distdir_writable:
135 + # Since _check_distfile did not match above, the file
136 + # is either corrupt or its identity has changed since
137 + # the last time it was fetched, so rename it.
138 + temp_filename = _checksum_failure_temp_file(
139 + mysettings, mysettings["DISTDIR"], myfile)
140 + writemsg_stdout(_("Refetching... "
141 + "File renamed to '%s'\n\n") % \
142 + temp_filename, noiselevel=-1)
143 +
144 + # Stat the temporary download file for comparison with
145 + # fetch_resume_size.
146 + try:
147 + mystat = os.stat(download_path)
148 + except OSError as e:
149 + if e.errno not in (errno.ENOENT, errno.ESTALE):
150 + raise
151 + mystat = None
152 +
153 + if mystat is not None:
154 if mystat.st_size == 0:
155 if distdir_writable:
156 try:
157 - os.unlink(myfile_path)
158 + os.unlink(download_path)
159 except OSError:
160 pass
161 elif distdir_writable and size is not None:
162 @@ -717,14 +747,16 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
163 "ME_MIN_SIZE)\n") % mystat.st_size)
164 temp_filename = \
165 _checksum_failure_temp_file(
166 - mysettings["DISTDIR"], myfile)
167 + mysettings, mysettings["DISTDIR"],
168 + os.path.basename(download_path))
169 writemsg_stdout(_("Refetching... "
170 "File renamed to '%s'\n\n") % \
171 temp_filename, noiselevel=-1)
172 elif mystat.st_size >= size:
173 temp_filename = \
174 _checksum_failure_temp_file(
175 - mysettings["DISTDIR"], myfile)
176 + mysettings, mysettings["DISTDIR"],
177 + os.path.basename(download_path))
178 writemsg_stdout(_("Refetching... "
179 "File renamed to '%s'\n\n") % \
180 temp_filename, noiselevel=-1)
181 @@ -766,7 +798,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
182 for mydir in fsmirrors:
183 mirror_file = os.path.join(mydir, myfile)
184 try:
185 - shutil.copyfile(mirror_file, myfile_path)
186 + shutil.copyfile(mirror_file, download_path)
187 writemsg(_("Local mirror has file: %s\n") % myfile)
188 break
189 except (IOError, OSError) as e:
190 @@ -775,7 +807,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
191 del e
192
193 try:
194 - mystat = os.stat(myfile_path)
195 + mystat = os.stat(download_path)
196 except OSError as e:
197 if e.errno not in (errno.ENOENT, errno.ESTALE):
198 raise
199 @@ -784,13 +816,13 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
200 # Skip permission adjustment for symlinks, since we don't
201 # want to modify anything outside of the primary DISTDIR,
202 # and symlinks typically point to PORTAGE_RO_DISTDIRS.
203 - if not os.path.islink(myfile_path):
204 + if not os.path.islink(download_path):
205 try:
206 - apply_secpass_permissions(myfile_path,
207 + apply_secpass_permissions(download_path,
208 gid=portage_gid, mode=0o664, mask=0o2,
209 stat_cached=mystat)
210 except PortageException as e:
211 - if not os.access(myfile_path, os.R_OK):
212 + if not os.access(download_path, os.R_OK):
213 writemsg(_("!!! Failed to adjust permissions:"
214 " %s\n") % (e,), noiselevel=-1)
215
216 @@ -799,7 +831,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
217 if mystat.st_size == 0:
218 if distdir_writable:
219 try:
220 - os.unlink(myfile_path)
221 + os.unlink(download_path)
222 except EnvironmentError:
223 pass
224 elif myfile not in mydigests:
225 @@ -824,7 +856,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
226 digests = _filter_unaccelarated_hashes(mydigests[myfile])
227 if hash_filter is not None:
228 digests = _apply_hash_filter(digests, hash_filter)
229 - verified_ok, reason = verify_all(myfile_path, digests)
230 + verified_ok, reason = verify_all(download_path, digests)
231 if not verified_ok:
232 writemsg(_("!!! Previously fetched"
233 " file: '%s'\n") % myfile, noiselevel=-1)
234 @@ -838,11 +870,13 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
235 if distdir_writable:
236 temp_filename = \
237 _checksum_failure_temp_file(
238 - mysettings["DISTDIR"], myfile)
239 + mysettings, mysettings["DISTDIR"],
240 + os.path.basename(download_path))
241 writemsg_stdout(_("Refetching... "
242 "File renamed to '%s'\n\n") % \
243 temp_filename, noiselevel=-1)
244 else:
245 + _movefile(download_path, myfile_path, mysettings=mysettings)
246 eout = EOutput()
247 eout.quiet = \
248 mysettings.get("PORTAGE_QUIET", None) == "1"
249 @@ -928,7 +962,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
250 if not can_fetch:
251 if fetched != 2:
252 try:
253 - mysize = os.stat(myfile_path).st_size
254 + mysize = os.stat(download_path).st_size
255 except OSError as e:
256 if e.errno not in (errno.ENOENT, errno.ESTALE):
257 raise
258 @@ -952,7 +986,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
259 #we either need to resume or start the download
260 if fetched == 1:
261 try:
262 - mystat = os.stat(myfile_path)
263 + mystat = os.stat(download_path)
264 except OSError as e:
265 if e.errno not in (errno.ENOENT, errno.ESTALE):
266 raise
267 @@ -964,7 +998,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
268 "%d (smaller than " "PORTAGE_FETCH_RESU"
269 "ME_MIN_SIZE)\n") % mystat.st_size)
270 try:
271 - os.unlink(myfile_path)
272 + os.unlink(download_path)
273 except OSError as e:
274 if e.errno not in \
275 (errno.ENOENT, errno.ESTALE):
276 @@ -984,7 +1018,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
277 _hide_url_passwd(loc))
278 variables = {
279 "URI": loc,
280 - "FILE": myfile
281 + "FILE": os.path.basename(download_path)
282 }
283
284 for k in ("DISTDIR", "PORTAGE_SSH_OPTS"):
285 @@ -1001,12 +1035,12 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
286
287 finally:
288 try:
289 - apply_secpass_permissions(myfile_path,
290 + apply_secpass_permissions(download_path,
291 gid=portage_gid, mode=0o664, mask=0o2)
292 except FileNotFound:
293 pass
294 except PortageException as e:
295 - if not os.access(myfile_path, os.R_OK):
296 + if not os.access(download_path, os.R_OK):
297 writemsg(_("!!! Failed to adjust permissions:"
298 " %s\n") % str(e), noiselevel=-1)
299 del e
300 @@ -1015,8 +1049,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
301 # trust the return value from the fetcher. Remove the
302 # empty file and try to download again.
303 try:
304 - if os.stat(myfile_path).st_size == 0:
305 - os.unlink(myfile_path)
306 + if os.stat(download_path).st_size == 0:
307 + os.unlink(download_path)
308 fetched = 0
309 continue
310 except EnvironmentError:
311 @@ -1024,7 +1058,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
312
313 if mydigests is not None and myfile in mydigests:
314 try:
315 - mystat = os.stat(myfile_path)
316 + mystat = os.stat(download_path)
317 except OSError as e:
318 if e.errno not in (errno.ENOENT, errno.ESTALE):
319 raise
320 @@ -1065,13 +1099,13 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
321 if (mystat[stat.ST_SIZE]<100000) and (len(myfile)>4) and not ((myfile[-5:]==".html") or (myfile[-4:]==".htm")):
322 html404=re.compile("<title>.*(not found|404).*</title>",re.I|re.M)
323 with io.open(
324 - _unicode_encode(myfile_path,
325 + _unicode_encode(download_path,
326 encoding=_encodings['fs'], errors='strict'),
327 mode='r', encoding=_encodings['content'], errors='replace'
328 ) as f:
329 if html404.search(f.read()):
330 try:
331 - os.unlink(mysettings["DISTDIR"]+"/"+myfile)
332 + os.unlink(download_path)
333 writemsg(_(">>> Deleting invalid distfile. (Improper 404 redirect from server.)\n"))
334 fetched = 0
335 continue
336 @@ -1087,7 +1121,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
337 digests = _filter_unaccelarated_hashes(mydigests[myfile])
338 if hash_filter is not None:
339 digests = _apply_hash_filter(digests, hash_filter)
340 - verified_ok, reason = verify_all(myfile_path, digests)
341 + verified_ok, reason = verify_all(download_path, digests)
342 if not verified_ok:
343 writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,
344 noiselevel=-1)
345 @@ -1099,7 +1133,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
346 return 0
347 temp_filename = \
348 _checksum_failure_temp_file(
349 - mysettings["DISTDIR"], myfile)
350 + mysettings, mysettings["DISTDIR"],
351 + os.path.basename(download_path))
352 writemsg_stdout(_("Refetching... "
353 "File renamed to '%s'\n\n") % \
354 temp_filename, noiselevel=-1)
355 @@ -1119,6 +1154,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
356 checksum_failure_max_tries:
357 break
358 else:
359 + _movefile(download_path, myfile_path, mysettings=mysettings)
360 eout = EOutput()
361 eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
362 if digests:
363 @@ -1127,8 +1163,9 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
364 eout.eend(0)
365 fetched=2
366 break
367 - else:
368 + else: # no digests available
369 if not myret:
370 + _movefile(download_path, myfile_path, mysettings=mysettings)
371 fetched=2
372 break
373 elif mydigests!=None:
374
375 diff --git a/lib/portage/tests/ebuild/test_fetch.py b/lib/portage/tests/ebuild/test_fetch.py
376 new file mode 100644
377 index 000000000..83321fed7
378 --- /dev/null
379 +++ b/lib/portage/tests/ebuild/test_fetch.py
380 @@ -0,0 +1,230 @@
381 +# Copyright 2019 Gentoo Authors
382 +# Distributed under the terms of the GNU General Public License v2
383 +
384 +from __future__ import unicode_literals
385 +
386 +import functools
387 +import tempfile
388 +
389 +import portage
390 +from portage import shutil, os
391 +from portage.tests import TestCase
392 +from portage.tests.resolver.ResolverPlayground import ResolverPlayground
393 +from portage.tests.util.test_socks5 import AsyncHTTPServer
394 +from portage.util.futures.executor.fork import ForkExecutor
395 +from portage.util._async.SchedulerInterface import SchedulerInterface
396 +from portage.util._eventloop.global_event_loop import global_event_loop
397 +from portage.package.ebuild.config import config
398 +from portage.package.ebuild.digestgen import digestgen
399 +from portage.package.ebuild.fetch import _download_suffix
400 +from _emerge.EbuildFetcher import EbuildFetcher
401 +from _emerge.Package import Package
402 +
403 +
404 +class EbuildFetchTestCase(TestCase):
405 +
406 + def testEbuildFetch(self):
407 +
408 + distfiles = {
409 + 'bar': b'bar\n',
410 + 'foo': b'foo\n',
411 + }
412 +
413 + ebuilds = {
414 + 'dev-libs/A-1': {
415 + 'EAPI': '7',
416 + 'RESTRICT': 'primaryuri',
417 + 'SRC_URI': '''{scheme}://{host}:{port}/distfiles/bar.txt -> bar
418 + {scheme}://{host}:{port}/distfiles/foo.txt -> foo''',
419 + },
420 + }
421 +
422 + loop = SchedulerInterface(global_event_loop())
423 + scheme = 'http'
424 + host = '127.0.0.1'
425 + content = {}
426 + for k, v in distfiles.items():
427 + content['/distfiles/{}.txt'.format(k)] = v
428 +
429 + with AsyncHTTPServer(host, content, loop) as server:
430 + ebuilds_subst = {}
431 + for cpv, metadata in ebuilds.items():
432 + metadata = metadata.copy()
433 + metadata['SRC_URI'] = metadata['SRC_URI'].format(
434 + scheme=scheme, host=host, port=server.server_port)
435 + ebuilds_subst[cpv] = metadata
436 +
437 + playground = ResolverPlayground(ebuilds=ebuilds_subst, distfiles=distfiles)
438 + ro_distdir = tempfile.mkdtemp()
439 + try:
440 + fetchcommand = portage.util.shlex_split(playground.settings['FETCHCOMMAND'])
441 + fetch_bin = portage.process.find_binary(fetchcommand[0])
442 + if fetch_bin is None:
443 + self.skipTest('FETCHCOMMAND not found: {}'.format(playground.settings['FETCHCOMMAND']))
444 + resumecommand = portage.util.shlex_split(playground.settings['RESUMECOMMAND'])
445 + resume_bin = portage.process.find_binary(resumecommand[0])
446 + if resume_bin is None:
447 + self.skipTest('RESUMECOMMAND not found: {}'.format(playground.settings['RESUMECOMMAND']))
448 + root_config = playground.trees[playground.eroot]['root_config']
449 + portdb = root_config.trees["porttree"].dbapi
450 + settings = config(clone=playground.settings)
451 +
452 + # Tests only work with one ebuild at a time, so the config
453 + # pool only needs a single config instance.
454 + class config_pool:
455 + @staticmethod
456 + def allocate():
457 + return settings
458 + @staticmethod
459 + def deallocate(settings):
460 + pass
461 +
462 + def async_fetch(pkg, ebuild_path):
463 + fetcher = EbuildFetcher(config_pool=config_pool, ebuild_path=ebuild_path,
464 + fetchonly=False, fetchall=True, pkg=pkg, scheduler=loop)
465 + fetcher.start()
466 + return fetcher.async_wait()
467 +
468 + for cpv in ebuilds:
469 + metadata = dict(zip(Package.metadata_keys,
470 + portdb.aux_get(cpv, Package.metadata_keys)))
471 +
472 + pkg = Package(built=False, cpv=cpv, installed=False,
473 + metadata=metadata, root_config=root_config,
474 + type_name='ebuild')
475 +
476 + settings.setcpv(pkg)
477 + ebuild_path = portdb.findname(pkg.cpv)
478 + portage.doebuild_environment(ebuild_path, 'fetch', settings=settings, db=portdb)
479 +
480 + # Test good files in DISTDIR
481 + for k in settings['AA'].split():
482 + os.stat(os.path.join(settings['DISTDIR'], k))
483 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
484 + for k in settings['AA'].split():
485 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
486 + self.assertEqual(f.read(), distfiles[k])
487 +
488 + # Test digestgen with fetch
489 + os.unlink(os.path.join(os.path.dirname(ebuild_path), 'Manifest'))
490 + for k in settings['AA'].split():
491 + os.unlink(os.path.join(settings['DISTDIR'], k))
492 + with ForkExecutor(loop=loop) as executor:
493 + self.assertTrue(bool(loop.run_until_complete(
494 + loop.run_in_executor(executor, functools.partial(
495 + digestgen, mysettings=settings, myportdb=portdb)))))
496 + for k in settings['AA'].split():
497 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
498 + self.assertEqual(f.read(), distfiles[k])
499 +
500 + # Test missing files in DISTDIR
501 + for k in settings['AA'].split():
502 + os.unlink(os.path.join(settings['DISTDIR'], k))
503 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
504 + for k in settings['AA'].split():
505 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
506 + self.assertEqual(f.read(), distfiles[k])
507 +
508 + # Test empty files in DISTDIR
509 + for k in settings['AA'].split():
510 + file_path = os.path.join(settings['DISTDIR'], k)
511 + with open(file_path, 'wb') as f:
512 + pass
513 + self.assertEqual(os.stat(file_path).st_size, 0)
514 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
515 + for k in settings['AA'].split():
516 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
517 + self.assertEqual(f.read(), distfiles[k])
518 +
519 + # Test non-empty files containing null bytes in DISTDIR
520 + for k in settings['AA'].split():
521 + file_path = os.path.join(settings['DISTDIR'], k)
522 + with open(file_path, 'wb') as f:
523 + f.write(len(distfiles[k]) * b'\0')
524 + self.assertEqual(os.stat(file_path).st_size, len(distfiles[k]))
525 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
526 + for k in settings['AA'].split():
527 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
528 + self.assertEqual(f.read(), distfiles[k])
529 +
530 + # Test PORTAGE_RO_DISTDIRS
531 + settings['PORTAGE_RO_DISTDIRS'] = '"{}"'.format(ro_distdir)
532 + orig_fetchcommand = settings['FETCHCOMMAND']
533 + orig_resumecommand = settings['RESUMECOMMAND']
534 + try:
535 + settings['FETCHCOMMAND'] = settings['RESUMECOMMAND'] = ''
536 + for k in settings['AA'].split():
537 + file_path = os.path.join(settings['DISTDIR'], k)
538 + os.rename(file_path, os.path.join(ro_distdir, k))
539 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
540 + for k in settings['AA'].split():
541 + file_path = os.path.join(settings['DISTDIR'], k)
542 + self.assertTrue(os.path.islink(file_path))
543 + with open(file_path, 'rb') as f:
544 + self.assertEqual(f.read(), distfiles[k])
545 + os.unlink(file_path)
546 + finally:
547 + settings.pop('PORTAGE_RO_DISTDIRS')
548 + settings['FETCHCOMMAND'] = orig_fetchcommand
549 + settings['RESUMECOMMAND'] = orig_resumecommand
550 +
551 + # Test local filesystem in GENTOO_MIRRORS
552 + orig_mirrors = settings['GENTOO_MIRRORS']
553 + orig_fetchcommand = settings['FETCHCOMMAND']
554 + try:
555 + settings['GENTOO_MIRRORS'] = ro_distdir
556 + settings['FETCHCOMMAND'] = settings['RESUMECOMMAND'] = ''
557 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
558 + for k in settings['AA'].split():
559 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
560 + self.assertEqual(f.read(), distfiles[k])
561 + finally:
562 + settings['GENTOO_MIRRORS'] = orig_mirrors
563 + settings['FETCHCOMMAND'] = orig_fetchcommand
564 + settings['RESUMECOMMAND'] = orig_resumecommand
565 +
566 + # Test readonly DISTDIR
567 + orig_distdir_mode = os.stat(settings['DISTDIR']).st_mode
568 + try:
569 + os.chmod(settings['DISTDIR'], 0o555)
570 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
571 + for k in settings['AA'].split():
572 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
573 + self.assertEqual(f.read(), distfiles[k])
574 + finally:
575 + os.chmod(settings['DISTDIR'], orig_distdir_mode)
576 +
577 + # Test parallel-fetch mode
578 + settings['PORTAGE_PARALLEL_FETCHONLY'] = '1'
579 + try:
580 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
581 + for k in settings['AA'].split():
582 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
583 + self.assertEqual(f.read(), distfiles[k])
584 + for k in settings['AA'].split():
585 + os.unlink(os.path.join(settings['DISTDIR'], k))
586 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
587 + for k in settings['AA'].split():
588 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
589 + self.assertEqual(f.read(), distfiles[k])
590 + finally:
591 + settings.pop('PORTAGE_PARALLEL_FETCHONLY')
592 +
593 + # Test RESUMECOMMAND
594 + orig_resume_min_size = settings['PORTAGE_FETCH_RESUME_MIN_SIZE']
595 + try:
596 + settings['PORTAGE_FETCH_RESUME_MIN_SIZE'] = '2'
597 + for k in settings['AA'].split():
598 + file_path = os.path.join(settings['DISTDIR'], k)
599 + os.unlink(file_path)
600 + with open(file_path + _download_suffix, 'wb') as f:
601 + f.write(distfiles[k][:2])
602 + self.assertEqual(loop.run_until_complete(async_fetch(pkg, ebuild_path)), 0)
603 + for k in settings['AA'].split():
604 + with open(os.path.join(settings['DISTDIR'], k), 'rb') as f:
605 + self.assertEqual(f.read(), distfiles[k])
606 + finally:
607 + settings['PORTAGE_FETCH_RESUME_MIN_SIZE'] = orig_resume_min_size
608 + finally:
609 + shutil.rmtree(ro_distdir)
610 + playground.cleanup()