Gentoo Archives: gentoo-commits

From: Arthur Zamarin <arthurzam@g.o>
To: gentoo-commits@l.g.o
Subject: [gentoo-commits] proj/pkgcore/pkgcheck:master commit in: testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/, ...
Date: Thu, 29 Dec 2022 17:51:37
Message-Id: 1672336283.ae7dd4184f63185880738c5133f326fe47c6606a.arthurzam@gentoo
1 commit: ae7dd4184f63185880738c5133f326fe47c6606a
2 Author: Arthur Zamarin <arthurzam <AT> gentoo <DOT> org>
3 AuthorDate: Thu Dec 29 17:51:23 2022 +0000
4 Commit: Arthur Zamarin <arthurzam <AT> gentoo <DOT> org>
5 CommitDate: Thu Dec 29 17:51:23 2022 +0000
6 URL: https://gitweb.gentoo.org/proj/pkgcore/pkgcheck.git/commit/?id=ae7dd418
7
8 format using black
9
10 Signed-off-by: Arthur Zamarin <arthurzam <AT> gentoo.org>
11
12 Makefile | 4 +
13 data/share/pkgcheck/ci.py | 10 +-
14 doc/conf.py | 193 ++---
15 doc/generate/pkgcheck/checks.py | 37 +-
16 doc/generate/pkgcheck/keywords.py | 25 +-
17 doc/generate/pkgcheck/reporters.py | 16 +-
18 pyproject.toml | 3 +
19 setup.py | 139 ++--
20 src/pkgcheck/__init__.py | 10 +-
21 src/pkgcheck/addons/__init__.py | 125 +--
22 src/pkgcheck/addons/caches.py | 33 +-
23 src/pkgcheck/addons/eclass.py | 15 +-
24 src/pkgcheck/addons/git.py | 253 ++++---
25 src/pkgcheck/addons/net.py | 12 +-
26 src/pkgcheck/addons/profiles.py | 236 +++---
27 src/pkgcheck/api.py | 4 +-
28 src/pkgcheck/base.py | 52 +-
29 src/pkgcheck/bash/__init__.py | 32 +-
30 src/pkgcheck/checks/__init__.py | 26 +-
31 src/pkgcheck/checks/acct.py | 53 +-
32 src/pkgcheck/checks/cleanup.py | 44 +-
33 src/pkgcheck/checks/codingstyle.py | 554 ++++++++------
34 src/pkgcheck/checks/dropped_keywords.py | 6 +-
35 src/pkgcheck/checks/eclass.py | 132 ++--
36 src/pkgcheck/checks/git.py | 256 ++++---
37 src/pkgcheck/checks/glsa.py | 20 +-
38 src/pkgcheck/checks/header.py | 78 +-
39 src/pkgcheck/checks/imlate.py | 53 +-
40 src/pkgcheck/checks/metadata.py | 540 +++++++------
41 src/pkgcheck/checks/metadata_xml.py | 249 +++---
42 src/pkgcheck/checks/network.py | 144 ++--
43 src/pkgcheck/checks/overlays.py | 36 +-
44 src/pkgcheck/checks/perl.py | 36 +-
45 src/pkgcheck/checks/pkgdir.py | 89 ++-
46 src/pkgcheck/checks/profiles.py | 208 ++---
47 src/pkgcheck/checks/python.py | 238 +++---
48 src/pkgcheck/checks/repo.py | 11 +-
49 src/pkgcheck/checks/repo_metadata.py | 165 ++--
50 src/pkgcheck/checks/reserved.py | 46 +-
51 src/pkgcheck/checks/stablereq.py | 22 +-
52 src/pkgcheck/checks/unstable_only.py | 14 +-
53 src/pkgcheck/checks/visibility.py | 134 ++--
54 src/pkgcheck/checks/whitespace.py | 81 +-
55 src/pkgcheck/cli.py | 21 +-
56 src/pkgcheck/const.py | 29 +-
57 src/pkgcheck/feeds.py | 24 +-
58 src/pkgcheck/log.py | 2 +-
59 src/pkgcheck/objects.py | 46 +-
60 src/pkgcheck/packages.py | 25 +-
61 src/pkgcheck/pipeline.py | 42 +-
62 src/pkgcheck/reporters.py | 103 ++-
63 src/pkgcheck/results.py | 57 +-
64 src/pkgcheck/runners.py | 11 +-
65 src/pkgcheck/scripts/__init__.py | 22 +-
66 src/pkgcheck/scripts/argparse_actions.py | 104 +--
67 src/pkgcheck/scripts/argparsers.py | 49 +-
68 src/pkgcheck/scripts/pkgcheck.py | 3 +-
69 src/pkgcheck/scripts/pkgcheck_cache.py | 51 +-
70 src/pkgcheck/scripts/pkgcheck_ci.py | 6 +-
71 src/pkgcheck/scripts/pkgcheck_replay.py | 18 +-
72 src/pkgcheck/scripts/pkgcheck_scan.py | 212 ++++--
73 src/pkgcheck/scripts/pkgcheck_show.py | 117 +--
74 src/pkgcheck/sources.py | 60 +-
75 src/pkgcheck/utils.py | 15 +-
76 .../FetchablesUrlCheck/DeadUrl/responses.py | 4 +-
77 .../HttpsUrlAvailable/responses.py | 8 +-
78 .../FetchablesUrlCheck/RedirectedUrl/responses.py | 10 +-
79 .../SSLCertificateError/responses.py | 2 +-
80 .../DeadUrl-connection-error/responses.py | 2 +-
81 .../network/HomepageUrlCheck/DeadUrl/responses.py | 4 +-
82 .../HttpsUrlAvailable/responses.py | 8 +-
83 .../HomepageUrlCheck/RedirectedUrl/responses.py | 10 +-
84 .../SSLCertificateError/responses.py | 2 +-
85 .../DeadUrl-bitbucket/responses.py | 4 +-
86 .../MetadataUrlCheck/DeadUrl-cpan/responses.py | 4 +-
87 .../MetadataUrlCheck/DeadUrl-cran/responses.py | 4 +-
88 .../MetadataUrlCheck/DeadUrl-ctan/responses.py | 4 +-
89 .../DeadUrl-freedesktop-gitlab/responses.py | 4 +-
90 .../MetadataUrlCheck/DeadUrl-gentoo/responses.py | 4 +-
91 .../DeadUrl-gnome-gitlab/responses.py | 4 +-
92 .../MetadataUrlCheck/DeadUrl-hackage/responses.py | 4 +-
93 .../DeadUrl-launchpad/responses.py | 4 +-
94 .../MetadataUrlCheck/DeadUrl-osdn/responses.py | 4 +-
95 .../MetadataUrlCheck/DeadUrl-pecl/responses.py | 4 +-
96 .../MetadataUrlCheck/DeadUrl-pypi/responses.py | 4 +-
97 .../MetadataUrlCheck/DeadUrl-rubygems/responses.py | 4 +-
98 .../DeadUrl-savannah-nongnu/responses.py | 4 +-
99 .../MetadataUrlCheck/DeadUrl-savannah/responses.py | 4 +-
100 .../DeadUrl-sourceforge/responses.py | 4 +-
101 .../DeadUrl-sourcehut/responses.py | 4 +-
102 .../MetadataUrlCheck/DeadUrl-vim/responses.py | 4 +-
103 .../network/MetadataUrlCheck/DeadUrl/responses.py | 4 +-
104 .../HttpsUrlAvailable/responses.py | 8 +-
105 .../MetadataUrlCheck/RedirectedUrl/responses.py | 11 +-
106 .../SSLCertificateError/responses.py | 2 +-
107 tests/addons/test_addons.py | 271 +++----
108 tests/addons/test_eclass.py | 122 +--
109 tests/addons/test_git.py | 525 +++++++------
110 tests/checks/test_acct.py | 118 +--
111 tests/checks/test_all.py | 61 +-
112 tests/checks/test_cleanup.py | 70 +-
113 tests/checks/test_codingstyle.py | 340 +++++----
114 tests/checks/test_dropped_keywords.py | 50 +-
115 tests/checks/test_git.py | 559 +++++++-------
116 tests/checks/test_glsa.py | 36 +-
117 tests/checks/test_header.py | 48 +-
118 tests/checks/test_imlate.py | 111 ++-
119 tests/checks/test_metadata.py | 839 +++++++++++----------
120 tests/checks/test_network.py | 85 ++-
121 tests/checks/test_perl.py | 50 +-
122 tests/checks/test_pkgdir.py | 335 ++++----
123 tests/checks/test_python.py | 603 ++++++++-------
124 tests/checks/test_repo.py | 85 ++-
125 tests/checks/test_repo_metadata.py | 95 +--
126 tests/checks/test_stablereq.py | 151 ++--
127 tests/checks/test_whitespace.py | 35 +-
128 tests/conftest.py | 50 +-
129 tests/misc.py | 65 +-
130 tests/scripts/test_argparse_actions.py | 231 +++---
131 tests/scripts/test_pkgcheck.py | 12 +-
132 tests/scripts/test_pkgcheck_cache.py | 58 +-
133 tests/scripts/test_pkgcheck_ci.py | 38 +-
134 tests/scripts/test_pkgcheck_replay.py | 41 +-
135 tests/scripts/test_pkgcheck_scan.py | 480 ++++++------
136 tests/scripts/test_pkgcheck_show.py | 64 +-
137 tests/test_api.py | 21 +-
138 tests/test_base.py | 14 +-
139 tests/test_cli.py | 81 +-
140 tests/test_feeds.py | 99 +--
141 tests/test_reporters.py | 196 ++---
142 130 files changed, 6347 insertions(+), 5290 deletions(-)
143
144 diff --git a/Makefile b/Makefile
145 index 736ed18b..1d48fcb2 100644
146 --- a/Makefile
147 +++ b/Makefile
148 @@ -12,3 +12,7 @@ sdist wheel:
149 .PHONY: clean
150 clean:
151 $(RM) -r build doc/man/pkgcheck doc/generated dist
152 +
153 +.PHONY: format
154 +format:
155 + $(PYTHON) -m black .
156
157 diff --git a/data/share/pkgcheck/ci.py b/data/share/pkgcheck/ci.py
158 index c9f438cd..9920a910 100755
159 --- a/data/share/pkgcheck/ci.py
160 +++ b/data/share/pkgcheck/ci.py
161 @@ -3,13 +3,13 @@
162 import json
163 import urllib.request
164
165 -JSON_URL = 'https://raw.githubusercontent.com/mgorny/pkgcheck2html/master/pkgcheck2html.conf.json'
166 +JSON_URL = "https://raw.githubusercontent.com/mgorny/pkgcheck2html/master/pkgcheck2html.conf.json"
167
168 with urllib.request.urlopen(JSON_URL) as f:
169 ci_data = json.loads(f.read())
170
171 -with open('pkgcheck.conf', 'w') as f:
172 - f.write('[CHECKSETS]\nGentooCI =\n')
173 +with open("pkgcheck.conf", "w") as f:
174 + f.write("[CHECKSETS]\nGentooCI =\n")
175 for k, v in sorted(ci_data.items()):
176 - if v == 'err':
177 - f.write(f' {k}\n')
178 + if v == "err":
179 + f.write(f" {k}\n")
180
181 diff --git a/doc/conf.py b/doc/conf.py
182 index 440c6603..84aae9ea 100644
183 --- a/doc/conf.py
184 +++ b/doc/conf.py
185 @@ -16,208 +16,205 @@
186 # -- General configuration ------------------------------------------------
187
188 # If your documentation needs a minimal Sphinx version, state it here.
189 -#needs_sphinx = '1.0'
190 +# needs_sphinx = '1.0'
191
192 # Add any Sphinx extension module names here, as strings. They can be
193 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
194 # ones.
195 extensions = [
196 - 'sphinx.ext.autodoc',
197 - 'sphinx.ext.autosummary',
198 - 'sphinx.ext.autosectionlabel',
199 - 'sphinx.ext.doctest',
200 - 'sphinx.ext.extlinks',
201 - 'sphinx.ext.intersphinx',
202 - 'sphinx.ext.todo',
203 - 'sphinx.ext.coverage',
204 - 'sphinx.ext.ifconfig',
205 - 'sphinx.ext.viewcode',
206 - 'snakeoil.dist.sphinxext',
207 + "sphinx.ext.autodoc",
208 + "sphinx.ext.autosummary",
209 + "sphinx.ext.autosectionlabel",
210 + "sphinx.ext.doctest",
211 + "sphinx.ext.extlinks",
212 + "sphinx.ext.intersphinx",
213 + "sphinx.ext.todo",
214 + "sphinx.ext.coverage",
215 + "sphinx.ext.ifconfig",
216 + "sphinx.ext.viewcode",
217 + "snakeoil.dist.sphinxext",
218 ]
219
220 # Add any paths that contain templates here, relative to this directory.
221 -#templates_path = ['_templates']
222 +# templates_path = ['_templates']
223
224 # The suffix of source filenames.
225 -source_suffix = '.rst'
226 +source_suffix = ".rst"
227
228 # The encoding of source files.
229 -#source_encoding = 'utf-8-sig'
230 +# source_encoding = 'utf-8-sig'
231
232 # The master toctree document.
233 -master_doc = 'index'
234 +master_doc = "index"
235
236 # General information about the project.
237 -project = 'pkgcheck'
238 -authors = ''
239 -copyright = '2006-2022, pkgcheck contributors'
240 +project = "pkgcheck"
241 +authors = ""
242 +copyright = "2006-2022, pkgcheck contributors"
243
244 # version is set by snakeoil extension
245 -release = 'master'
246 +release = "master"
247
248 # The language for content autogenerated by Sphinx. Refer to documentation
249 # for a list of supported languages.
250 -#language = None
251 +# language = None
252
253 # There are two options for replacing |today|: either, you set today to some
254 # non-false value, then it is used:
255 -#today = ''
256 +# today = ''
257 # Else, today_fmt is used as the format for a strftime call.
258 -#today_fmt = '%B %d, %Y'
259 +# today_fmt = '%B %d, %Y'
260
261 # List of patterns, relative to source directory, that match files and
262 # directories to ignore when looking for source files.
263 -exclude_patterns = ['_build', 'generated']
264 +exclude_patterns = ["_build", "generated"]
265
266 # The reST default role (used for this markup: `text`) to use for all
267 # documents.
268 -#default_role = None
269 +# default_role = None
270
271 # If true, '()' will be appended to :func: etc. cross-reference text.
272 -#add_function_parentheses = True
273 +# add_function_parentheses = True
274
275 # If true, the current module name will be prepended to all description
276 # unit titles (such as .. function::).
277 -#add_module_names = True
278 +# add_module_names = True
279
280 # If true, sectionauthor and moduleauthor directives will be shown in the
281 # output. They are ignored by default.
282 -#show_authors = False
283 +# show_authors = False
284
285 # The name of the Pygments (syntax highlighting) style to use.
286 -pygments_style = 'sphinx'
287 +pygments_style = "sphinx"
288
289 # A list of ignored prefixes for module index sorting.
290 -#modindex_common_prefix = []
291 +# modindex_common_prefix = []
292
293 # If true, keep warnings as "system message" paragraphs in the built documents.
294 -#keep_warnings = False
295 +# keep_warnings = False
296
297 # -- Options for HTML output ----------------------------------------------
298
299 # The theme to use for HTML and HTML Help pages. See the documentation for
300 # a list of builtin themes.
301 -html_theme = 'default'
302 +html_theme = "default"
303
304 # Theme options are theme-specific and customize the look and feel of a theme
305 # further. For a list of options available for each theme, see the
306 # documentation.
307 -#html_theme_options = {}
308 +# html_theme_options = {}
309
310 # Add any paths that contain custom themes here, relative to this directory.
311 -#html_theme_path = []
312 +# html_theme_path = []
313
314 # The name for this set of Sphinx documents. If None, it defaults to
315 # "<project> v<release> documentation".
316 -#html_title = None
317 +# html_title = None
318
319 # A shorter title for the navigation bar. Default is the same as html_title.
320 -#html_short_title = None
321 +# html_short_title = None
322
323 # The name of an image file (relative to this directory) to place at the top
324 # of the sidebar.
325 -#html_logo = None
326 +# html_logo = None
327
328 # The name of an image file (within the static path) to use as favicon of the
329 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
330 # pixels large.
331 -#html_favicon = None
332 +# html_favicon = None
333
334 # Add any paths that contain custom static files (such as style sheets) here,
335 # relative to this directory. They are copied after the builtin static files,
336 # so a file named "default.css" will overwrite the builtin "default.css".
337 -#html_static_path = ['_static']
338 +# html_static_path = ['_static']
339
340 # Add any extra paths that contain custom files (such as robots.txt or
341 # .htaccess) here, relative to this directory. These files are copied
342 # directly to the root of the documentation.
343 -#html_extra_path = []
344 +# html_extra_path = []
345
346 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
347 # using the given strftime format.
348 -#html_last_updated_fmt = '%b %d, %Y'
349 +# html_last_updated_fmt = '%b %d, %Y'
350
351 # If true, SmartyPants will be used to convert quotes and dashes to
352 # typographically correct entities.
353 -#html_use_smartypants = True
354 +# html_use_smartypants = True
355
356 # Custom sidebar templates, maps document names to template names.
357 -#html_sidebars = {}
358 +# html_sidebars = {}
359
360 # Additional templates that should be rendered to pages, maps page names to
361 # template names.
362 -#html_additional_pages = {}
363 +# html_additional_pages = {}
364
365 # If false, no module index is generated.
366 -#html_domain_indices = True
367 +# html_domain_indices = True
368
369 # If false, no index is generated.
370 -#html_use_index = True
371 +# html_use_index = True
372
373 # If true, the index is split into individual pages for each letter.
374 -#html_split_index = False
375 +# html_split_index = False
376
377 # If true, links to the reST sources are added to the pages.
378 -#html_show_sourcelink = True
379 +# html_show_sourcelink = True
380
381 # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
382 -#html_show_sphinx = True
383 +# html_show_sphinx = True
384
385 # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
386 -#html_show_copyright = True
387 +# html_show_copyright = True
388
389 # If true, an OpenSearch description file will be output, and all pages will
390 # contain a <link> tag referring to it. The value of this option must be the
391 # base URL from which the finished HTML is served.
392 -#html_use_opensearch = ''
393 +# html_use_opensearch = ''
394
395 # This is the file name suffix for HTML files (e.g. ".xhtml").
396 -#html_file_suffix = None
397 +# html_file_suffix = None
398
399 # Output file base name for HTML help builder.
400 -htmlhelp_basename = 'pkgcheckdoc'
401 +htmlhelp_basename = "pkgcheckdoc"
402
403
404 # -- Options for LaTeX output ---------------------------------------------
405
406 latex_elements = {
407 -# The paper size ('letterpaper' or 'a4paper').
408 -#'papersize': 'letterpaper',
409 -
410 -# The font size ('10pt', '11pt' or '12pt').
411 -#'pointsize': '10pt',
412 -
413 -# Additional stuff for the LaTeX preamble.
414 -#'preamble': '',
415 + # The paper size ('letterpaper' or 'a4paper').
416 + #'papersize': 'letterpaper',
417 + # The font size ('10pt', '11pt' or '12pt').
418 + #'pointsize': '10pt',
419 + # Additional stuff for the LaTeX preamble.
420 + #'preamble': '',
421 }
422
423 # Grouping the document tree into LaTeX files. List of tuples
424 # (source start file, target name, title,
425 # author, documentclass [howto, manual, or own class]).
426 latex_documents = [
427 - ('index', 'pkgcheck.tex', 'pkgcheck Documentation',
428 - authors, 'manual'),
429 + ("index", "pkgcheck.tex", "pkgcheck Documentation", authors, "manual"),
430 ]
431
432 # The name of an image file (relative to this directory) to place at the top of
433 # the title page.
434 -#latex_logo = None
435 +# latex_logo = None
436
437 # For "manual" documents, if this is true, then toplevel headings are parts,
438 # not chapters.
439 -#latex_use_parts = False
440 +# latex_use_parts = False
441
442 # If true, show page references after internal links.
443 -#latex_show_pagerefs = False
444 +# latex_show_pagerefs = False
445
446 # If true, show URL addresses after external links.
447 -#latex_show_urls = False
448 +# latex_show_urls = False
449
450 # Documents to append as an appendix to all manuals.
451 -#latex_appendices = []
452 +# latex_appendices = []
453
454 # If false, no module index is generated.
455 -#latex_domain_indices = True
456 +# latex_domain_indices = True
457
458
459 # -- Options for manual page output ---------------------------------------
460 @@ -227,7 +224,7 @@ latex_documents = [
461 man_pages = []
462
463 # If true, show URL addresses after external links.
464 -#man_show_urls = False
465 +# man_show_urls = False
466
467 # -- Options for Texinfo output -------------------------------------------
468
469 @@ -235,22 +232,28 @@ man_pages = []
470 # (source start file, target name, title, author,
471 # dir menu entry, description, category)
472 texinfo_documents = [
473 - ('index', 'pkgcheck', 'pkgcheck Documentation',
474 - authors, 'pkgcheck', 'One line description of project.',
475 - 'Miscellaneous'),
476 + (
477 + "index",
478 + "pkgcheck",
479 + "pkgcheck Documentation",
480 + authors,
481 + "pkgcheck",
482 + "One line description of project.",
483 + "Miscellaneous",
484 + ),
485 ]
486
487 # Documents to append as an appendix to all manuals.
488 -#texinfo_appendices = []
489 +# texinfo_appendices = []
490
491 # If false, no module index is generated.
492 -#texinfo_domain_indices = True
493 +# texinfo_domain_indices = True
494
495 # How to display URL addresses: 'footnote', 'no', or 'inline'.
496 -#texinfo_show_urls = 'footnote'
497 +# texinfo_show_urls = 'footnote'
498
499 # If true, do not generate a @detailmenu in the "Top" node's menu.
500 -#texinfo_no_detailmenu = False
501 +# texinfo_no_detailmenu = False
502
503
504 # -- Options for Epub output ----------------------------------------------
505 @@ -262,62 +265,62 @@ epub_publisher = authors
506 epub_copyright = copyright
507
508 # The basename for the epub file. It defaults to the project name.
509 -#epub_basename = 'pkgcheck'
510 +# epub_basename = 'pkgcheck'
511
512 # The HTML theme for the epub output. Since the default themes are not optimized
513 # for small screen space, using the same theme for HTML and epub output is
514 # usually not wise. This defaults to 'epub', a theme designed to save visual
515 # space.
516 -#epub_theme = 'epub'
517 +# epub_theme = 'epub'
518
519 # The language of the text. It defaults to the language option
520 # or en if the language is not set.
521 -#epub_language = ''
522 +# epub_language = ''
523
524 # The scheme of the identifier. Typical schemes are ISBN or URL.
525 -#epub_scheme = ''
526 +# epub_scheme = ''
527
528 # The unique identifier of the text. This can be a ISBN number
529 # or the project homepage.
530 -#epub_identifier = ''
531 +# epub_identifier = ''
532
533 # A unique identification for the text.
534 -#epub_uid = ''
535 +# epub_uid = ''
536
537 # A tuple containing the cover image and cover page html template filenames.
538 -#epub_cover = ()
539 +# epub_cover = ()
540
541 # A sequence of (type, uri, title) tuples for the guide element of content.opf.
542 -#epub_guide = ()
543 +# epub_guide = ()
544
545 # HTML files that should be inserted before the pages created by sphinx.
546 # The format is a list of tuples containing the path and title.
547 -#epub_pre_files = []
548 +# epub_pre_files = []
549
550 # HTML files shat should be inserted after the pages created by sphinx.
551 # The format is a list of tuples containing the path and title.
552 -#epub_post_files = []
553 +# epub_post_files = []
554
555 # A list of files that should not be packed into the epub file.
556 -epub_exclude_files = ['search.html']
557 +epub_exclude_files = ["search.html"]
558
559 # The depth of the table of contents in toc.ncx.
560 -#epub_tocdepth = 3
561 +# epub_tocdepth = 3
562
563 # Allow duplicate toc entries.
564 -#epub_tocdup = True
565 +# epub_tocdup = True
566
567 # Choose between 'default' and 'includehidden'.
568 -#epub_tocscope = 'default'
569 +# epub_tocscope = 'default'
570
571 # Fix unsupported image types using the PIL.
572 -#epub_fix_images = False
573 +# epub_fix_images = False
574
575 # Scale large images.
576 -#epub_max_image_width = 0
577 +# epub_max_image_width = 0
578
579 # How to display URL addresses: 'footnote', 'no', or 'inline'.
580 -#epub_show_urls = 'inline'
581 +# epub_show_urls = 'inline'
582
583 # If false, no index is generated.
584 -#epub_use_index = True
585 +# epub_use_index = True
586
587 diff --git a/doc/generate/pkgcheck/checks.py b/doc/generate/pkgcheck/checks.py
588 index 171186fe..429a17e3 100755
589 --- a/doc/generate/pkgcheck/checks.py
590 +++ b/doc/generate/pkgcheck/checks.py
591 @@ -30,7 +30,7 @@ def main(f=sys.stdout, **kwargs):
592
593 def _rst_header(char, text, newline=True, leading=False):
594 if newline:
595 - out('\n', end='')
596 + out("\n", end="")
597 if leading:
598 out(char * len(text))
599 out(text)
600 @@ -43,33 +43,38 @@ def main(f=sys.stdout, **kwargs):
601 wrapper = TextWrapper(width=85)
602
603 for scope in base.scopes.values():
604 - _rst_header('-', scope.desc.capitalize() + ' scope', leading=True)
605 + _rst_header("-", scope.desc.capitalize() + " scope", leading=True)
606
607 checks = (x for x in objects.CHECKS.values() if x.scope == scope)
608 for check in checks:
609 if check.__doc__ is not None:
610 try:
611 - summary, explanation = check.__doc__.split('\n', 1)
612 + summary, explanation = check.__doc__.split("\n", 1)
613 except ValueError:
614 summary = check.__doc__
615 explanation = None
616 else:
617 summary = None
618
619 - _rst_header('-', check.__name__)
620 + _rst_header("-", check.__name__)
621 if summary:
622 - out('\n' + dedent(summary).strip())
623 + out("\n" + dedent(summary).strip())
624 if explanation:
625 - explanation = '\n'.join(dedent(explanation).strip().split('\n'))
626 - out('\n' + explanation)
627 + explanation = "\n".join(dedent(explanation).strip().split("\n"))
628 + out("\n" + explanation)
629 if issubclass(check, GentooRepoCheck):
630 - out('\n\n- Gentoo repo specific')
631 - known_results = ', '.join(
632 - f'`{r.__name__}`_' for r in
633 - sorted(check.known_results, key=attrgetter('__name__')))
634 - out('\n' + '\n'.join(wrapper.wrap(
635 - f"(known result{_pl(check.known_results)}: {known_results})")))
636 -
637 -
638 -if __name__ == '__main__':
639 + out("\n\n- Gentoo repo specific")
640 + known_results = ", ".join(
641 + f"`{r.__name__}`_"
642 + for r in sorted(check.known_results, key=attrgetter("__name__"))
643 + )
644 + out(
645 + "\n"
646 + + "\n".join(
647 + wrapper.wrap(f"(known result{_pl(check.known_results)}: {known_results})")
648 + )
649 + )
650 +
651 +
652 +if __name__ == "__main__":
653 main()
654
655 diff --git a/doc/generate/pkgcheck/keywords.py b/doc/generate/pkgcheck/keywords.py
656 index 628d4c8f..c6cfe432 100755
657 --- a/doc/generate/pkgcheck/keywords.py
658 +++ b/doc/generate/pkgcheck/keywords.py
659 @@ -25,7 +25,7 @@ def main(f=sys.stdout, **kwargs):
660
661 def _rst_header(char, text, newline=True, leading=False):
662 if newline:
663 - out('\n', end='')
664 + out("\n", end="")
665 if leading:
666 out(char * len(text))
667 out(text)
668 @@ -41,32 +41,31 @@ def main(f=sys.stdout, **kwargs):
669 related_checks[keyword].add(check)
670
671 for scope in base.scopes.values():
672 - _rst_header('-', scope.desc.capitalize() + ' scope', leading=True)
673 + _rst_header("-", scope.desc.capitalize() + " scope", leading=True)
674
675 keywords = (x for x in objects.KEYWORDS.values() if x.scope == scope)
676 for keyword in keywords:
677 if keyword.__doc__ is not None:
678 try:
679 - summary, explanation = keyword.__doc__.split('\n', 1)
680 + summary, explanation = keyword.__doc__.split("\n", 1)
681 except ValueError:
682 summary = keyword.__doc__
683 explanation = None
684 else:
685 summary = None
686
687 - _rst_header('-', keyword.__name__)
688 + _rst_header("-", keyword.__name__)
689 if summary:
690 - out('\n' + dedent(summary).strip())
691 + out("\n" + dedent(summary).strip())
692 if explanation:
693 - explanation = '\n'.join(dedent(explanation).strip().split('\n'))
694 - out('\n' + explanation)
695 + explanation = "\n".join(dedent(explanation).strip().split("\n"))
696 + out("\n" + explanation)
697 if all(issubclass(x, GentooRepoCheck) for x in related_checks[keyword]):
698 - out(f'\n- Gentoo repo specific')
699 - out('\n' + f'- level: {keyword.level}')
700 - checks = ', '.join(sorted(
701 - f'`{c.__name__}`_' for c in related_checks[keyword]))
702 - out(f'- related check{_pl(related_checks[keyword])}: {checks}')
703 + out("\n- Gentoo repo specific")
704 + out("\n" + f"- level: {keyword.level}")
705 + checks = ", ".join(sorted(f"`{c.__name__}`_" for c in related_checks[keyword]))
706 + out(f"- related check{_pl(related_checks[keyword])}: {checks}")
707
708
709 -if __name__ == '__main__':
710 +if __name__ == "__main__":
711 main()
712
713 diff --git a/doc/generate/pkgcheck/reporters.py b/doc/generate/pkgcheck/reporters.py
714 index e0debc35..9f503811 100755
715 --- a/doc/generate/pkgcheck/reporters.py
716 +++ b/doc/generate/pkgcheck/reporters.py
717 @@ -14,7 +14,7 @@ def main(f=sys.stdout, **kwargs):
718
719 def _rst_header(char, text, newline=True, leading=False):
720 if newline:
721 - out('\n', end='')
722 + out("\n", end="")
723 if leading:
724 out(char * len(text))
725 out(text)
726 @@ -24,25 +24,25 @@ def main(f=sys.stdout, **kwargs):
727 if __doc__ is not None:
728 out(__doc__.strip())
729
730 - _rst_header('=', 'Reporters', newline=False)
731 + _rst_header("=", "Reporters", newline=False)
732
733 for reporter in objects.REPORTERS.values():
734 if reporter.__doc__ is not None:
735 try:
736 - summary, explanation = reporter.__doc__.split('\n', 1)
737 + summary, explanation = reporter.__doc__.split("\n", 1)
738 except ValueError:
739 summary = reporter.__doc__
740 explanation = None
741 else:
742 summary = None
743
744 - _rst_header('-', reporter.__name__, leading=True)
745 + _rst_header("-", reporter.__name__, leading=True)
746 if summary:
747 - out('\n' + dedent(summary).strip())
748 + out("\n" + dedent(summary).strip())
749 if explanation:
750 - explanation = '\n'.join(dedent(explanation).strip().split('\n'))
751 - out('\n' + explanation)
752 + explanation = "\n".join(dedent(explanation).strip().split("\n"))
753 + out("\n" + explanation)
754
755
756 -if __name__ == '__main__':
757 +if __name__ == "__main__":
758 main()
759
760 diff --git a/pyproject.toml b/pyproject.toml
761 index 74945a14..da1db2bc 100644
762 --- a/pyproject.toml
763 +++ b/pyproject.toml
764 @@ -73,6 +73,9 @@ zip-safe = false
765 [tool.setuptools.dynamic]
766 version = {attr = "pkgcheck.__version__"}
767
768 +[tool.black]
769 +line-length = 100
770 +
771 [tool.pytest.ini_options]
772 minversion = "6.0"
773 addopts = "-vv -ra -l"
774
775 diff --git a/setup.py b/setup.py
776 index 4149fae3..f6e96a36 100644
777 --- a/setup.py
778 +++ b/setup.py
779 @@ -13,14 +13,13 @@ from setuptools.command.sdist import sdist as orig_sdist
780 from wheel.bdist_wheel import bdist_wheel as orig_bdist_wheel
781
782
783 -use_system_tree_sitter_bash = bool(os.environ.get(
784 - 'USE_SYSTEM_TREE_SITTER_BASH', False))
785 +use_system_tree_sitter_bash = bool(os.environ.get("USE_SYSTEM_TREE_SITTER_BASH", False))
786
787
788 @contextmanager
789 def sys_path():
790 orig_path = sys.path[:]
791 - sys.path.insert(0, str(Path.cwd() / 'src'))
792 + sys.path.insert(0, str(Path.cwd() / "src"))
793 try:
794 yield
795 finally:
796 @@ -28,7 +27,7 @@ def sys_path():
797
798
799 class build_treesitter(Command, SubCommand):
800 - description = 'build tree-sitter-bash library'
801 + description = "build tree-sitter-bash library"
802
803 def initialize_options(self):
804 pass
805 @@ -37,29 +36,30 @@ class build_treesitter(Command, SubCommand):
806 pass
807
808 def get_source_files(self):
809 - cwd = Path(__file__).parent / 'tree-sitter-bash/src'
810 + cwd = Path(__file__).parent / "tree-sitter-bash/src"
811 return [
812 - str(cwd / 'GNUmakefile'), str(cwd / 'tree_sitter/parser.h'),
813 - str(cwd / 'parser.c'), str(cwd / 'scanner.cc'),
814 + str(cwd / "GNUmakefile"),
815 + str(cwd / "tree_sitter/parser.h"),
816 + str(cwd / "parser.c"),
817 + str(cwd / "scanner.cc"),
818 ]
819
820 - library_path = Path(__file__).parent / 'src/pkgcheck/bash/lang.so'
821 + library_path = Path(__file__).parent / "src/pkgcheck/bash/lang.so"
822
823 def run(self):
824 if not use_system_tree_sitter_bash:
825 if not self.library_path.exists():
826 - logging.info('building tree-sitter-bash library')
827 + logging.info("building tree-sitter-bash library")
828 with sys_path():
829 from pkgcheck.bash import build_library
830 - build_library(self.library_path, ['tree-sitter-bash'])
831 + build_library(self.library_path, ["tree-sitter-bash"])
832
833
834 class build(orig_build):
835 - sub_commands = orig_build.sub_commands + [('build_treesitter', None)]
836 + sub_commands = orig_build.sub_commands + [("build_treesitter", None)]
837
838
839 class install(orig_install):
840 -
841 def finalize_options(self):
842 """Force platlib install since non-python libraries are included."""
843 super().finalize_options()
844 @@ -70,14 +70,18 @@ class install(orig_install):
845 self.write_obj_lists()
846 self.generate_files()
847
848 - self.copy_tree('data', self.install_data)
849 + self.copy_tree("data", self.install_data)
850
851 install_dir = Path(self.install_lib)
852 if not use_system_tree_sitter_bash:
853 - self.reinitialize_command('build').ensure_finalized()
854 - (dst := install_dir / 'pkgcheck/bash').mkdir(parents=True, exist_ok=True)
855 - self.copy_file(build_treesitter.library_path, dst / 'lang.so',
856 - preserve_mode=True, preserve_times=False)
857 + self.reinitialize_command("build").ensure_finalized()
858 + (dst := install_dir / "pkgcheck/bash").mkdir(parents=True, exist_ok=True)
859 + self.copy_file(
860 + build_treesitter.library_path,
861 + dst / "lang.so",
862 + preserve_mode=True,
863 + preserve_times=False,
864 + )
865
866 def write_obj_lists(self):
867 """Generate config file of keyword, check, and other object lists."""
868 @@ -88,7 +92,6 @@ class install(orig_install):
869
870 # hack to drop quotes on modules in generated files
871 class _kls:
872 -
873 def __init__(self, module):
874 self.module = module
875
876 @@ -100,41 +103,50 @@ class install(orig_install):
877
878 modules = defaultdict(set)
879 objs = defaultdict(list)
880 - for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'):
881 + for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
882 for name, cls in getattr(objects, obj).items():
883 - parent, module = cls.__module__.rsplit('.', 1)
884 + parent, module = cls.__module__.rsplit(".", 1)
885 modules[parent].add(module)
886 - objs[obj].append((name, _kls(f'{module}.{name}')))
887 + objs[obj].append((name, _kls(f"{module}.{name}")))
888
889 - keywords = tuple(objs['KEYWORDS'])
890 - checks = tuple(objs['CHECKS'])
891 - reporters = tuple(objs['REPORTERS'])
892 + keywords = tuple(objs["KEYWORDS"])
893 + checks = tuple(objs["CHECKS"])
894 + reporters = tuple(objs["REPORTERS"])
895
896 - logging.info(f'writing objects to {objects_path!r}')
897 - with objects_path.open('w') as f:
898 + logging.info(f"writing objects to {objects_path!r}")
899 + with objects_path.open("w") as f:
900 objects_path.chmod(0o644)
901 for k, v in sorted(modules.items()):
902 f.write(f"from {k} import {', '.join(sorted(v))}\n")
903 - f.write(dedent(f"""\
904 - KEYWORDS = {keywords}
905 - CHECKS = {checks}
906 - REPORTERS = {reporters}
907 - """))
908 -
909 - logging.info(f'writing path constants to {const_path!r}')
910 - with const_path.open('w') as f:
911 + f.write(
912 + dedent(
913 + f"""\
914 + KEYWORDS = {keywords}
915 + CHECKS = {checks}
916 + REPORTERS = {reporters}
917 + """
918 + )
919 + )
920 +
921 + logging.info(f"writing path constants to {const_path!r}")
922 + with const_path.open("w") as f:
923 const_path.chmod(0o644)
924 - f.write(dedent("""\
925 - from os.path import abspath, exists, join
926 - import sys
927 - INSTALL_PREFIX = abspath(sys.prefix)
928 - if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
929 - INSTALL_PREFIX = abspath(sys.base_prefix)
930 - DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
931 - """))
932 + f.write(
933 + dedent(
934 + """\
935 + from os.path import abspath, exists, join
936 + import sys
937 + INSTALL_PREFIX = abspath(sys.prefix)
938 + if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
939 + INSTALL_PREFIX = abspath(sys.base_prefix)
940 + DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
941 + """
942 + )
943 + )
944
945 logging.info("generating version info")
946 from snakeoil.version import get_git_version
947 +
948 verinfo_path.write_text(f"version_info={get_git_version(Path(__file__).parent)!r}")
949
950 def generate_files(self):
951 @@ -142,22 +154,21 @@ class install(orig_install):
952 from pkgcheck import base, objects
953 from pkgcheck.addons import caches
954
955 - (dst := Path(self.install_data) / 'share/pkgcheck').mkdir(parents=True, exist_ok=True)
956 + (dst := Path(self.install_data) / "share/pkgcheck").mkdir(parents=True, exist_ok=True)
957
958 - logging.info('Generating available scopes')
959 - (dst / 'scopes').write_text('\n'.join(base.scopes) + '\n')
960 + logging.info("Generating available scopes")
961 + (dst / "scopes").write_text("\n".join(base.scopes) + "\n")
962
963 - logging.info('Generating available cache types')
964 + logging.info("Generating available cache types")
965 cache_objs = caches.CachedAddon.caches.values()
966 - (dst / 'caches').write_text('\n'.join(x.type for x in cache_objs) + '\n')
967 + (dst / "caches").write_text("\n".join(x.type for x in cache_objs) + "\n")
968
969 - for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'):
970 - logging.info(f'Generating {obj.lower()} list')
971 - (dst / obj.lower()).write_text('\n'.join(getattr(objects, obj)) + '\n')
972 + for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
973 + logging.info(f"Generating {obj.lower()} list")
974 + (dst / obj.lower()).write_text("\n".join(getattr(objects, obj)) + "\n")
975
976
977 class bdist_wheel(orig_bdist_wheel):
978 -
979 def finalize_options(self):
980 super().finalize_options()
981 self.root_is_pure = False # Mark us as not a pure python package
982 @@ -165,30 +176,34 @@ class bdist_wheel(orig_bdist_wheel):
983 def get_tag(self):
984 _, _, plat = super().get_tag()
985 # We don't contain any python source, nor any python extensions
986 - return 'py3', 'none', plat
987 + return "py3", "none", plat
988
989
990 class sdist(orig_sdist):
991 -
992 def make_release_tree(self, base_dir, files):
993 super().make_release_tree(base_dir, files)
994 base_dir = Path(base_dir)
995
996 - if (man_page := Path(__file__).parent / 'build/sphinx/man/pkgcheck.1').exists():
997 - (base_dir / 'man').mkdir(parents=True, exist_ok=True)
998 - self.copy_file(man_page, base_dir / 'man/pkgcheck.1', preserve_mode=False, preserve_times=False)
999 + if (man_page := Path(__file__).parent / "build/sphinx/man/pkgcheck.1").exists():
1000 + (base_dir / "man").mkdir(parents=True, exist_ok=True)
1001 + self.copy_file(
1002 + man_page, base_dir / "man/pkgcheck.1", preserve_mode=False, preserve_times=False
1003 + )
1004
1005 logging.info("generating version info")
1006 from snakeoil.version import get_git_version
1007 - (base_dir / 'src/pkgcheck/_verinfo.py').write_text(f"version_info={get_git_version(Path(__file__).parent)!r}")
1008 +
1009 + (base_dir / "src/pkgcheck/_verinfo.py").write_text(
1010 + f"version_info={get_git_version(Path(__file__).parent)!r}"
1011 + )
1012
1013
1014 setup(
1015 cmdclass={
1016 - 'bdist_wheel': bdist_wheel,
1017 - 'build': build,
1018 - 'build_treesitter': build_treesitter,
1019 - 'install': install,
1020 - 'sdist': sdist,
1021 + "bdist_wheel": bdist_wheel,
1022 + "build": build,
1023 + "build_treesitter": build_treesitter,
1024 + "install": install,
1025 + "sdist": sdist,
1026 }
1027 )
1028
1029 diff --git a/src/pkgcheck/__init__.py b/src/pkgcheck/__init__.py
1030 index 699538a1..8e50bdfc 100644
1031 --- a/src/pkgcheck/__init__.py
1032 +++ b/src/pkgcheck/__init__.py
1033 @@ -4,9 +4,9 @@ from .api import keywords, scan
1034 from .base import PkgcheckException
1035 from .results import Result
1036
1037 -__all__ = ('keywords', 'scan', 'PkgcheckException', 'Result')
1038 -__title__ = 'pkgcheck'
1039 -__version__ = '0.10.20'
1040 +__all__ = ("keywords", "scan", "PkgcheckException", "Result")
1041 +__title__ = "pkgcheck"
1042 +__version__ = "0.10.20"
1043
1044
1045 def __getattr__(name):
1046 @@ -15,9 +15,9 @@ def __getattr__(name):
1047 return keywords[name]
1048
1049 try:
1050 - return _import('.' + name, __name__)
1051 + return _import("." + name, __name__)
1052 except ImportError:
1053 - raise AttributeError(f'module {__name__} has no attribute {name}')
1054 + raise AttributeError(f"module {__name__} has no attribute {name}")
1055
1056
1057 def __dir__():
1058
1059 diff --git a/src/pkgcheck/addons/__init__.py b/src/pkgcheck/addons/__init__.py
1060 index 4d7aff48..5a5be2d0 100644
1061 --- a/src/pkgcheck/addons/__init__.py
1062 +++ b/src/pkgcheck/addons/__init__.py
1063 @@ -26,18 +26,18 @@ class ArchesArgs(arghparse.CommaSeparatedNegations):
1064
1065 if not enabled:
1066 # enable all non-prefix arches
1067 - enabled = set(arch for arch in all_arches if '-' not in arch)
1068 + enabled = set(arch for arch in all_arches if "-" not in arch)
1069
1070 arches = set(enabled).difference(disabled)
1071 if all_arches and (unknown_arches := arches.difference(all_arches)):
1072 - es = pluralism(unknown_arches, plural='es')
1073 - unknown = ', '.join(unknown_arches)
1074 - valid = ', '.join(sorted(all_arches))
1075 - parser.error(f'unknown arch{es}: {unknown} (valid arches: {valid})')
1076 + es = pluralism(unknown_arches, plural="es")
1077 + unknown = ", ".join(unknown_arches)
1078 + valid = ", ".join(sorted(all_arches))
1079 + parser.error(f"unknown arch{es}: {unknown} (valid arches: {valid})")
1080
1081 # check if any selected arch only has experimental profiles
1082 for arch in arches:
1083 - if all(p.status == 'exp' for p in namespace.target_repo.profiles if p.arch == arch):
1084 + if all(p.status == "exp" for p in namespace.target_repo.profiles if p.arch == arch):
1085 namespace.exp_profiles_required = True
1086 break
1087
1088 @@ -51,11 +51,17 @@ class ArchesAddon(base.Addon):
1089
1090 @classmethod
1091 def mangle_argparser(cls, parser):
1092 - group = parser.add_argument_group('arches')
1093 + group = parser.add_argument_group("arches")
1094 group.add_argument(
1095 - '-a', '--arches', dest='selected_arches', metavar='ARCH', default=(),
1096 - action=arghparse.Delayed, target=ArchesArgs, priority=100,
1097 - help='comma separated list of arches to enable/disable',
1098 + "-a",
1099 + "--arches",
1100 + dest="selected_arches",
1101 + metavar="ARCH",
1102 + default=(),
1103 + action=arghparse.Delayed,
1104 + target=ArchesArgs,
1105 + priority=100,
1106 + help="comma separated list of arches to enable/disable",
1107 docs="""
1108 Comma separated list of arches to enable and disable.
1109
1110 @@ -67,8 +73,9 @@ class ArchesAddon(base.Addon):
1111 By default all repo defined arches are used; however,
1112 stable-related checks (e.g. UnstableOnly) default to the set of
1113 arches having stable profiles in the target repo.
1114 - """)
1115 - parser.bind_delayed_default(1000, 'arches')(cls._default_arches)
1116 + """,
1117 + )
1118 + parser.bind_delayed_default(1000, "arches")(cls._default_arches)
1119
1120 @staticmethod
1121 def _default_arches(namespace, attr):
1122 @@ -81,14 +88,14 @@ class KeywordsAddon(base.Addon):
1123
1124 def __init__(self, *args):
1125 super().__init__(*args)
1126 - special = {'-*'}
1127 + special = {"-*"}
1128 self.arches = self.options.target_repo.known_arches
1129 - unstable = {'~' + x for x in self.arches}
1130 - disabled = {'-' + x for x in chain(self.arches, unstable)}
1131 + unstable = {"~" + x for x in self.arches}
1132 + disabled = {"-" + x for x in chain(self.arches, unstable)}
1133 self.valid = special | self.arches | unstable | disabled
1134 # Note: '*' and '~*' are portage-only, i.e. not in the spec, so they
1135 # don't belong in the main tree.
1136 - self.portage = {'*', '~*'}
1137 + self.portage = {"*", "~*"}
1138
1139
1140 class StableArchesAddon(base.Addon):
1141 @@ -98,7 +105,7 @@ class StableArchesAddon(base.Addon):
1142
1143 @classmethod
1144 def mangle_argparser(cls, parser):
1145 - parser.bind_delayed_default(1001, 'stable_arches')(cls._default_stable_arches)
1146 + parser.bind_delayed_default(1001, "stable_arches")(cls._default_stable_arches)
1147
1148 @staticmethod
1149 def _default_stable_arches(namespace, attr):
1150 @@ -106,11 +113,12 @@ class StableArchesAddon(base.Addon):
1151 target_repo = namespace.target_repo
1152 if not namespace.selected_arches:
1153 # use known stable arches (GLEP 72) if arches aren't specified
1154 - stable_arches = target_repo.config.arches_desc['stable']
1155 + stable_arches = target_repo.config.arches_desc["stable"]
1156 # fallback to determining stable arches from profiles.desc if arches.desc doesn't exist
1157 if not stable_arches:
1158 - stable_arches = set().union(*(
1159 - repo.profiles.arches('stable') for repo in target_repo.trees))
1160 + stable_arches = set().union(
1161 + *(repo.profiles.arches("stable") for repo in target_repo.trees)
1162 + )
1163 else:
1164 stable_arches = namespace.arches
1165
1166 @@ -129,17 +137,17 @@ class UnstatedIuse(results.VersionResult, results.Error):
1167
1168 @property
1169 def desc(self):
1170 - msg = [f'attr({self.attr})']
1171 + msg = [f"attr({self.attr})"]
1172 if self.profile is not None:
1173 if self.num_profiles is not None:
1174 - num_profiles = f' ({self.num_profiles} total)'
1175 + num_profiles = f" ({self.num_profiles} total)"
1176 else:
1177 - num_profiles = ''
1178 - msg.append(f'profile {self.profile!r}{num_profiles}')
1179 - flags = ', '.join(self.flags)
1180 + num_profiles = ""
1181 + msg.append(f"profile {self.profile!r}{num_profiles}")
1182 + flags = ", ".join(self.flags)
1183 s = pluralism(self.flags)
1184 - msg.extend([f'unstated flag{s}', f'[ {flags} ]'])
1185 - return ': '.join(msg)
1186 + msg.extend([f"unstated flag{s}", f"[ {flags} ]"])
1187 + return ": ".join(msg)
1188
1189
1190 class UseAddon(base.Addon):
1191 @@ -153,7 +161,8 @@ class UseAddon(base.Addon):
1192 for p in target_repo.profiles:
1193 try:
1194 self.profiles.append(
1195 - target_repo.profiles.create_profile(p, load_profile_base=False))
1196 + target_repo.profiles.create_profile(p, load_profile_base=False)
1197 + )
1198 except profiles_mod.ProfileError:
1199 continue
1200
1201 @@ -173,8 +182,8 @@ class UseAddon(base.Addon):
1202 for repo in target_repo.trees:
1203 known_iuse.update(flag for matcher, (flag, desc) in repo.config.use_desc)
1204 known_iuse_expand.update(
1205 - flag for flags in repo.config.use_expand_desc.values()
1206 - for flag, desc in flags)
1207 + flag for flags in repo.config.use_expand_desc.values() for flag, desc in flags
1208 + )
1209
1210 self.collapsed_iuse = misc.non_incremental_collapsed_restrict_to_data(
1211 ((packages.AlwaysTrue, known_iuse),),
1212 @@ -186,8 +195,9 @@ class UseAddon(base.Addon):
1213 self.ignore = not (c_implicit_iuse or known_iuse or known_iuse_expand)
1214 if self.ignore:
1215 logger.debug(
1216 - 'disabling use/iuse validity checks since no usable '
1217 - 'use.desc and use.local.desc were found')
1218 + "disabling use/iuse validity checks since no usable "
1219 + "use.desc and use.local.desc were found"
1220 + )
1221
1222 def allowed_iuse(self, pkg):
1223 return self.collapsed_iuse.pull_data(pkg).union(pkg.local_use)
1224 @@ -213,9 +223,14 @@ class UseAddon(base.Addon):
1225 v.append(node.restriction)
1226 yield from self._flatten_restricts(
1227 iflatten_instance(node.payload, skip_filter),
1228 - skip_filter, stated, unstated, attr, v)
1229 + skip_filter,
1230 + stated,
1231 + unstated,
1232 + attr,
1233 + v,
1234 + )
1235 continue
1236 - elif attr == 'required_use':
1237 + elif attr == "required_use":
1238 unstated.update(filterfalse(stated.__contains__, node.vals))
1239 yield k, tuple(v)
1240
1241 @@ -248,8 +263,11 @@ class UseAddon(base.Addon):
1242 skip_filter = (packages.Conditional,) + klasses
1243 nodes = iflatten_instance(seq, skip_filter)
1244 unstated = set()
1245 - vals = dict(self._flatten_restricts(
1246 - nodes, skip_filter, stated=pkg.iuse_stripped, unstated=unstated, attr=attr))
1247 + vals = dict(
1248 + self._flatten_restricts(
1249 + nodes, skip_filter, stated=pkg.iuse_stripped, unstated=unstated, attr=attr
1250 + )
1251 + )
1252 return vals, self._unstated_iuse(pkg, attr, unstated)
1253
1254
1255 @@ -258,24 +276,27 @@ class NetAddon(base.Addon):
1256
1257 @classmethod
1258 def mangle_argparser(cls, parser):
1259 - group = parser.add_argument_group('network')
1260 + group = parser.add_argument_group("network")
1261 group.add_argument(
1262 - '--timeout', type=float, default='5',
1263 - help='timeout used for network checks')
1264 + "--timeout", type=float, default="5", help="timeout used for network checks"
1265 + )
1266 group.add_argument(
1267 - '--user-agent', default='Wget/1.20.3 (linux-gnu)',
1268 - help='custom user agent spoofing')
1269 + "--user-agent", default="Wget/1.20.3 (linux-gnu)", help="custom user agent spoofing"
1270 + )
1271
1272 @property
1273 def session(self):
1274 try:
1275 from .net import Session
1276 +
1277 return Session(
1278 - concurrent=self.options.tasks, timeout=self.options.timeout,
1279 - user_agent=self.options.user_agent)
1280 + concurrent=self.options.tasks,
1281 + timeout=self.options.timeout,
1282 + user_agent=self.options.user_agent,
1283 + )
1284 except ImportError as e:
1285 - if e.name == 'requests':
1286 - raise PkgcheckUserException('network checks require requests to be installed')
1287 + if e.name == "requests":
1288 + raise PkgcheckUserException("network checks require requests to be installed")
1289 raise
1290
1291
1292 @@ -290,10 +311,14 @@ def init_addon(cls, options, addons_map=None, **kwargs):
1293 # initialize and inject all required addons for a given addon's inheritance
1294 # tree as kwargs
1295 required_addons = chain.from_iterable(
1296 - x.required_addons for x in cls.__mro__ if issubclass(x, base.Addon))
1297 - kwargs.update({
1298 - base.param_name(addon): init_addon(addon, options, addons_map)
1299 - for addon in required_addons})
1300 + x.required_addons for x in cls.__mro__ if issubclass(x, base.Addon)
1301 + )
1302 + kwargs.update(
1303 + {
1304 + base.param_name(addon): init_addon(addon, options, addons_map)
1305 + for addon in required_addons
1306 + }
1307 + )
1308
1309 # verify the cache type is enabled
1310 if issubclass(cls, caches.CachedAddon) and not options.cache[cls.cache.type]:
1311 @@ -302,7 +327,7 @@ def init_addon(cls, options, addons_map=None, **kwargs):
1312 addon = addons_map[cls] = cls(options, **kwargs)
1313
1314 # force cache updates
1315 - force_cache = getattr(options, 'force_cache', False)
1316 + force_cache = getattr(options, "force_cache", False)
1317 if isinstance(addon, caches.CachedAddon):
1318 addon.update_cache(force=force_cache)
1319
1320
1321 diff --git a/src/pkgcheck/addons/caches.py b/src/pkgcheck/addons/caches.py
1322 index 665efc4f..9cd13e58 100644
1323 --- a/src/pkgcheck/addons/caches.py
1324 +++ b/src/pkgcheck/addons/caches.py
1325 @@ -23,6 +23,7 @@ from ..log import logger
1326 @dataclass(frozen=True)
1327 class CacheData:
1328 """Cache registry data."""
1329 +
1330 type: str
1331 file: str
1332 version: int
1333 @@ -31,7 +32,7 @@ class CacheData:
1334 class Cache:
1335 """Mixin for data caches."""
1336
1337 - __getattr__ = klass.GetAttrProxy('_cache')
1338 + __getattr__ = klass.GetAttrProxy("_cache")
1339
1340
1341 class DictCache(UserDict, Cache):
1342 @@ -46,7 +47,7 @@ class CacheDisabled(PkgcheckException):
1343 """Exception flagging that a requested cache type is disabled."""
1344
1345 def __init__(self, cache):
1346 - super().__init__(f'{cache.type} cache support required')
1347 + super().__init__(f"{cache.type} cache support required")
1348
1349
1350 class CachedAddon(Addon):
1351 @@ -61,7 +62,7 @@ class CachedAddon(Addon):
1352 """Register available caches."""
1353 super().__init_subclass__(**kwargs)
1354 if cls.cache is None:
1355 - raise ValueError(f'invalid cache registry: {cls!r}')
1356 + raise ValueError(f"invalid cache registry: {cls!r}")
1357 cls.caches[cls] = cls.cache
1358
1359 def update_cache(self, repo=None, force=False):
1360 @@ -75,17 +76,16 @@ class CachedAddon(Addon):
1361 using the same identifier don't use the same cache file.
1362 """
1363 token = blake2b(repo.location.encode()).hexdigest()[:10]
1364 - dirname = f'{repo.repo_id.lstrip(os.sep)}-{token}'
1365 - return pjoin(self.options.cache_dir, 'repos', dirname, self.cache.file)
1366 + dirname = f"{repo.repo_id.lstrip(os.sep)}-{token}"
1367 + return pjoin(self.options.cache_dir, "repos", dirname, self.cache.file)
1368
1369 def load_cache(self, path, fallback=None):
1370 cache = fallback
1371 try:
1372 - with open(path, 'rb') as f:
1373 + with open(path, "rb") as f:
1374 cache = pickle.load(f)
1375 if cache.version != self.cache.version:
1376 - logger.debug(
1377 - 'forcing %s cache regen due to outdated version', self.cache.type)
1378 + logger.debug("forcing %s cache regen due to outdated version", self.cache.type)
1379 os.remove(path)
1380 cache = fallback
1381 except IGNORED_EXCEPTIONS:
1382 @@ -93,7 +93,7 @@ class CachedAddon(Addon):
1383 except FileNotFoundError:
1384 pass
1385 except Exception as e:
1386 - logger.debug('forcing %s cache regen: %s', self.cache.type, e)
1387 + logger.debug("forcing %s cache regen: %s", self.cache.type, e)
1388 os.remove(path)
1389 cache = fallback
1390 return cache
1391 @@ -104,17 +104,16 @@ class CachedAddon(Addon):
1392 with AtomicWriteFile(path, binary=True) as f:
1393 pickle.dump(data, f, protocol=-1)
1394 except IOError as e:
1395 - msg = f'failed dumping {self.cache.type} cache: {path!r}: {e.strerror}'
1396 + msg = f"failed dumping {self.cache.type} cache: {path!r}: {e.strerror}"
1397 raise PkgcheckUserException(msg)
1398
1399 @klass.jit_attr
1400 def existing_caches(self):
1401 """Mapping of all existing cache types to file paths."""
1402 caches_map = {}
1403 - repos_dir = pjoin(self.options.cache_dir, 'repos')
1404 - for cache in sorted(self.caches.values(), key=attrgetter('type')):
1405 - caches_map[cache.type] = tuple(sorted(
1406 - pathlib.Path(repos_dir).rglob(cache.file)))
1407 + repos_dir = pjoin(self.options.cache_dir, "repos")
1408 + for cache in sorted(self.caches.values(), key=attrgetter("type")):
1409 + caches_map[cache.type] = tuple(sorted(pathlib.Path(repos_dir).rglob(cache.file)))
1410 return ImmutableDict(caches_map)
1411
1412 def remove_caches(self):
1413 @@ -125,14 +124,14 @@ class CachedAddon(Addon):
1414 except FileNotFoundError:
1415 pass
1416 except IOError as e:
1417 - raise PkgcheckUserException(f'failed removing cache dir: {e}')
1418 + raise PkgcheckUserException(f"failed removing cache dir: {e}")
1419 else:
1420 try:
1421 for cache_type, paths in self.existing_caches.items():
1422 if self.options.cache.get(cache_type, False):
1423 for path in paths:
1424 if self.options.dry_run:
1425 - print(f'Would remove {path}')
1426 + print(f"Would remove {path}")
1427 else:
1428 os.unlink(path)
1429 # remove empty cache dirs
1430 @@ -145,4 +144,4 @@ class CachedAddon(Addon):
1431 continue
1432 raise
1433 except IOError as e:
1434 - raise PkgcheckUserException(f'failed removing {cache_type} cache: {path!r}: {e}')
1435 + raise PkgcheckUserException(f"failed removing {cache_type} cache: {path!r}: {e}")
1436
1437 diff --git a/src/pkgcheck/addons/eclass.py b/src/pkgcheck/addons/eclass.py
1438 index fd4d085e..5e5e77ee 100644
1439 --- a/src/pkgcheck/addons/eclass.py
1440 +++ b/src/pkgcheck/addons/eclass.py
1441 @@ -49,8 +49,7 @@ class EclassAddon(caches.CachedAddon):
1442 """Eclass support for various checks."""
1443
1444 # cache registry
1445 - cache = caches.CacheData(type='eclass', file='eclass.pickle',
1446 - version=EclassDoc.ABI_VERSION)
1447 + cache = caches.CacheData(type="eclass", file="eclass.pickle", version=EclassDoc.ABI_VERSION)
1448
1449 def __init__(self, *args):
1450 super().__init__(*args)
1451 @@ -81,7 +80,7 @@ class EclassAddon(caches.CachedAddon):
1452 def update_cache(self, force=False):
1453 """Update related cache and push updates to disk."""
1454 for repo in self.options.target_repo.trees:
1455 - eclass_dir = pjoin(repo.location, 'eclass')
1456 + eclass_dir = pjoin(repo.location, "eclass")
1457 cache_file = self.cache_file(repo)
1458 cache_eclasses = False
1459 eclasses = {}
1460 @@ -91,15 +90,17 @@ class EclassAddon(caches.CachedAddon):
1461
1462 # check for eclass removals
1463 for name in list(eclasses):
1464 - if not os.path.exists(pjoin(eclass_dir, f'{name}.eclass')):
1465 + if not os.path.exists(pjoin(eclass_dir, f"{name}.eclass")):
1466 del eclasses[name]
1467 cache_eclasses = True
1468
1469 # verify the repo has eclasses
1470 try:
1471 repo_eclasses = sorted(
1472 - (x[:-7], pjoin(eclass_dir, x)) for x in os.listdir(eclass_dir)
1473 - if x.endswith('.eclass'))
1474 + (x[:-7], pjoin(eclass_dir, x))
1475 + for x in os.listdir(eclass_dir)
1476 + if x.endswith(".eclass")
1477 + )
1478 except FileNotFoundError:
1479 repo_eclasses = []
1480
1481 @@ -115,7 +116,7 @@ class EclassAddon(caches.CachedAddon):
1482 raise KeyError
1483 except (KeyError, AttributeError):
1484 try:
1485 - progress(f'{repo} -- updating eclass cache: {name:<{padding}}')
1486 + progress(f"{repo} -- updating eclass cache: {name:<{padding}}")
1487 eclasses[name] = EclassDoc(path, sourced=True, repo=repo)
1488 cache_eclasses = True
1489 except IOError:
1490
1491 diff --git a/src/pkgcheck/addons/git.py b/src/pkgcheck/addons/git.py
1492 index 02ec96ea..26c1d06d 100644
1493 --- a/src/pkgcheck/addons/git.py
1494 +++ b/src/pkgcheck/addons/git.py
1495 @@ -37,6 +37,7 @@ from . import caches
1496 @dataclass(frozen=True, eq=False)
1497 class GitCommit:
1498 """Git commit objects."""
1499 +
1500 hash: str
1501 commit_time: int
1502 author: str
1503 @@ -57,6 +58,7 @@ class GitCommit:
1504 @dataclass(frozen=True)
1505 class GitPkgChange:
1506 """Git package change objects."""
1507 +
1508 atom: atom_cls
1509 status: str
1510 commit: str
1511 @@ -82,16 +84,18 @@ class GitConfig:
1512
1513 def __init__(self):
1514 fd, self.path = tempfile.mkstemp()
1515 - os.write(fd, b'[safe]\n\tdirectory = *\n')
1516 + os.write(fd, b"[safe]\n\tdirectory = *\n")
1517 os.close(fd)
1518
1519 @property
1520 def config_env(self):
1521 # ignore global user and system git config, but disable safe.directory
1522 - return ImmutableDict({
1523 - 'GIT_CONFIG_GLOBAL': self.path,
1524 - 'GIT_CONFIG_SYSTEM': '',
1525 - })
1526 + return ImmutableDict(
1527 + {
1528 + "GIT_CONFIG_GLOBAL": self.path,
1529 + "GIT_CONFIG_SYSTEM": "",
1530 + }
1531 + )
1532
1533 def close(self):
1534 os.unlink(self.path)
1535 @@ -104,21 +108,25 @@ class GitLog:
1536 self._running = False
1537 self.git_config = GitConfig()
1538 self.proc = subprocess.Popen(
1539 - cmd, cwd=path,
1540 - stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=self.git_config.config_env)
1541 + cmd,
1542 + cwd=path,
1543 + stdout=subprocess.PIPE,
1544 + stderr=subprocess.PIPE,
1545 + env=self.git_config.config_env,
1546 + )
1547
1548 def __iter__(self):
1549 return self
1550
1551 def __next__(self):
1552 # use replacement character for non-UTF8 decoding issues (issue #166)
1553 - line = self.proc.stdout.readline().decode('utf-8', 'replace')
1554 + line = self.proc.stdout.readline().decode("utf-8", "replace")
1555
1556 # verify git log is running as expected after pulling the first line
1557 if not self._running:
1558 if self.proc.poll() or not line:
1559 error = self.proc.stderr.read().decode().strip()
1560 - raise GitError(f'failed running git log: {error}')
1561 + raise GitError(f"failed running git log: {error}")
1562 self._running = True
1563 self.git_config.close()
1564
1565 @@ -133,14 +141,14 @@ class _ParseGitRepo:
1566 """Generic iterator for custom git log output parsing support."""
1567
1568 # git command to run on the targeted repo
1569 - _git_cmd = 'git log --name-status --diff-filter=ARMD -z'
1570 + _git_cmd = "git log --name-status --diff-filter=ARMD -z"
1571
1572 # custom git log format lines, see the "PRETTY FORMATS" section of
1573 # the git log man page for details
1574 _format = ()
1575
1576 # path regexes for git log parsing, validation is handled on instantiation
1577 - _ebuild_re = re.compile(r'^(?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$')
1578 + _ebuild_re = re.compile(r"^(?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$")
1579
1580 def __init__(self, path, commit_range):
1581 self.path = os.path.realpath(path)
1582 @@ -161,12 +169,12 @@ class _ParseGitRepo:
1583 @property
1584 def changes(self):
1585 """Generator of file change status with changed packages."""
1586 - changes = deque(next(self.git_log).strip('\x00').split('\x00'))
1587 + changes = deque(next(self.git_log).strip("\x00").split("\x00"))
1588 while changes:
1589 status = changes.popleft()
1590 - if status.startswith('R'):
1591 + if status.startswith("R"):
1592 # matched R status change
1593 - status = 'R'
1594 + status = "R"
1595 old = changes.popleft()
1596 new = changes.popleft()
1597 if (mo := self._ebuild_re.match(old)) and (mn := self._ebuild_re.match(new)):
1598 @@ -191,11 +199,11 @@ class GitRepoCommits(_ParseGitRepo):
1599 """Parse git log output into an iterator of commit objects."""
1600
1601 _format = (
1602 - '%h', # abbreviated commit hash
1603 - '%ct', # commit timestamp
1604 - '%an <%ae>', # Author Name <author@×××××.com>
1605 - '%cn <%ce>', # Committer Name <committer@×××××.com>
1606 - '%B', # commit message
1607 + "%h", # abbreviated commit hash
1608 + "%ct", # commit timestamp
1609 + "%an <%ae>", # Author Name <author@×××××.com>
1610 + "%cn <%ce>", # Committer Name <committer@×××××.com>
1611 + "%B", # commit message
1612 )
1613
1614 def __next__(self):
1615 @@ -203,13 +211,13 @@ class GitRepoCommits(_ParseGitRepo):
1616 commit_time = int(next(self.git_log))
1617 author = next(self.git_log)
1618 committer = next(self.git_log)
1619 - message = list(takewhile(lambda x: x != '\x00', self.git_log))
1620 + message = list(takewhile(lambda x: x != "\x00", self.git_log))
1621 pkgs = defaultdict(set)
1622 for status, atoms in self.changes:
1623 - if status == 'R':
1624 + if status == "R":
1625 old, new = atoms
1626 - pkgs['A'].add(new)
1627 - pkgs['D'].add(old)
1628 + pkgs["A"].add(new)
1629 + pkgs["D"].add(old)
1630 else:
1631 pkgs[status].update(atoms)
1632 return GitCommit(commit_hash, commit_time, author, committer, message, ImmutableDict(pkgs))
1633 @@ -219,8 +227,8 @@ class GitRepoPkgs(_ParseGitRepo):
1634 """Parse git log output into an iterator of package change objects."""
1635
1636 _format = (
1637 - '%h', # abbreviated commit hash
1638 - '%ct', # commit time
1639 + "%h", # abbreviated commit hash
1640 + "%ct", # commit time
1641 )
1642
1643 def __init__(self, *args, local=False):
1644 @@ -234,24 +242,21 @@ class GitRepoPkgs(_ParseGitRepo):
1645 return self._pkgs.popleft()
1646 except IndexError:
1647 commit_hash = next(self.git_log)
1648 - commit_time = int(next(self.git_log).rstrip('\x00'))
1649 + commit_time = int(next(self.git_log).rstrip("\x00"))
1650 self._pkg_changes(commit_hash, commit_time)
1651
1652 def _pkg_changes(self, commit_hash, commit_time):
1653 """Queue package change objects from git log file changes."""
1654 for status, pkgs in self.changes:
1655 - if status == 'R':
1656 + if status == "R":
1657 old, new = pkgs
1658 if not self.local: # treat rename as addition and removal
1659 - self._pkgs.append(
1660 - GitPkgChange(new, 'A', commit_hash, commit_time))
1661 - self._pkgs.append(
1662 - GitPkgChange(old, 'D', commit_hash, commit_time))
1663 + self._pkgs.append(GitPkgChange(new, "A", commit_hash, commit_time))
1664 + self._pkgs.append(GitPkgChange(old, "D", commit_hash, commit_time))
1665 else:
1666 # renames are split into add/remove ops at
1667 # the check level for the local commits repo
1668 - self._pkgs.append(GitPkgChange(
1669 - new, 'R', commit_hash, commit_time, old))
1670 + self._pkgs.append(GitPkgChange(new, "R", commit_hash, commit_time, old))
1671 else:
1672 self._pkgs.append(GitPkgChange(pkgs[0], status, commit_hash, commit_time))
1673
1674 @@ -264,26 +269,31 @@ class _GitCommitPkg(cpv.VersionedCPV):
1675
1676 # add additional attrs
1677 sf = object.__setattr__
1678 - sf(self, 'time', time)
1679 - sf(self, 'status', status)
1680 - sf(self, 'commit', commit)
1681 - sf(self, 'old', old)
1682 + sf(self, "time", time)
1683 + sf(self, "status", status)
1684 + sf(self, "commit", commit)
1685 + sf(self, "old", old)
1686
1687 def old_pkg(self):
1688 """Create a new object from a rename commit's old atom."""
1689 return self.__class__(
1690 - self.old.category, self.old.package, self.status, self.old.version,
1691 - self.time, self.commit)
1692 + self.old.category,
1693 + self.old.package,
1694 + self.status,
1695 + self.old.version,
1696 + self.time,
1697 + self.commit,
1698 + )
1699
1700
1701 class GitChangedRepo(SimpleTree):
1702 """Historical git repo consisting of the latest changed packages."""
1703
1704 # selected pkg status filter
1705 - _status_filter = {'A', 'R', 'M', 'D'}
1706 + _status_filter = {"A", "R", "M", "D"}
1707
1708 def __init__(self, *args, **kwargs):
1709 - kwargs.setdefault('pkg_klass', _GitCommitPkg)
1710 + kwargs.setdefault("pkg_klass", _GitCommitPkg)
1711 super().__init__(*args, **kwargs)
1712
1713 def _get_versions(self, cp):
1714 @@ -298,25 +308,26 @@ class GitChangedRepo(SimpleTree):
1715 for cp in sorter(candidates):
1716 yield from sorter(
1717 raw_pkg_cls(cp[0], cp[1], status, *commit)
1718 - for status, commit in self.versions.get(cp, ()))
1719 + for status, commit in self.versions.get(cp, ())
1720 + )
1721
1722
1723 class GitModifiedRepo(GitChangedRepo):
1724 """Historical git repo consisting of the latest modified packages."""
1725
1726 - _status_filter = {'A', 'M'}
1727 + _status_filter = {"A", "M"}
1728
1729
1730 class GitAddedRepo(GitChangedRepo):
1731 """Historical git repo consisting of added packages."""
1732
1733 - _status_filter = {'A'}
1734 + _status_filter = {"A"}
1735
1736
1737 class GitRemovedRepo(GitChangedRepo):
1738 """Historical git repo consisting of removed packages."""
1739
1740 - _status_filter = {'D'}
1741 + _status_filter = {"D"}
1742
1743
1744 class _ScanGit(argparse.Action):
1745 @@ -325,11 +336,11 @@ class _ScanGit(argparse.Action):
1746 def __init__(self, *args, staged=False, **kwargs):
1747 super().__init__(*args, **kwargs)
1748 if staged:
1749 - default_ref = 'HEAD'
1750 - diff_cmd = ['git', 'diff-index', '--name-only', '--cached', '-z']
1751 + default_ref = "HEAD"
1752 + diff_cmd = ["git", "diff-index", "--name-only", "--cached", "-z"]
1753 else:
1754 - default_ref = 'origin..HEAD'
1755 - diff_cmd = ['git', 'diff-tree', '-r', '--name-only', '-z']
1756 + default_ref = "origin..HEAD"
1757 + diff_cmd = ["git", "diff-tree", "-r", "--name-only", "-z"]
1758
1759 self.staged = staged
1760 self.default_ref = default_ref
1761 @@ -340,26 +351,30 @@ class _ScanGit(argparse.Action):
1762 try:
1763 p = subprocess.run(
1764 self.diff_cmd + [ref],
1765 - stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1766 - cwd=namespace.target_repo.location, check=True, encoding='utf8')
1767 + stdout=subprocess.PIPE,
1768 + stderr=subprocess.PIPE,
1769 + cwd=namespace.target_repo.location,
1770 + check=True,
1771 + encoding="utf8",
1772 + )
1773 except FileNotFoundError as e:
1774 parser.error(str(e))
1775 except subprocess.CalledProcessError as e:
1776 error = e.stderr.splitlines()[0]
1777 - parser.error(f'failed running git: {error}')
1778 + parser.error(f"failed running git: {error}")
1779
1780 if not p.stdout:
1781 # no changes exist, exit early
1782 parser.exit()
1783
1784 - eclass_re = re.compile(r'^eclass/(?P<eclass>\S+)\.eclass$')
1785 + eclass_re = re.compile(r"^eclass/(?P<eclass>\S+)\.eclass$")
1786 eclasses, profiles, pkgs = OrderedSet(), OrderedSet(), OrderedSet()
1787
1788 - for path in p.stdout.strip('\x00').split('\x00'):
1789 + for path in p.stdout.strip("\x00").split("\x00"):
1790 path_components = path.split(os.sep)
1791 if mo := eclass_re.match(path):
1792 - eclasses.add(mo.group('eclass'))
1793 - elif path_components[0] == 'profiles':
1794 + eclasses.add(mo.group("eclass"))
1795 + elif path_components[0] == "profiles":
1796 profiles.add(path)
1797 elif path_components[0] in namespace.target_repo.categories:
1798 try:
1799 @@ -384,13 +399,14 @@ class _ScanGit(argparse.Action):
1800
1801 def __call__(self, parser, namespace, value, option_string=None):
1802 if namespace.targets:
1803 - targets = ' '.join(namespace.targets)
1804 + targets = " ".join(namespace.targets)
1805 s = pluralism(namespace.targets)
1806 - parser.error(f'{option_string} is mutually exclusive with target{s}: {targets}')
1807 + parser.error(f"{option_string} is mutually exclusive with target{s}: {targets}")
1808
1809 if not self.staged:
1810 # avoid circular import issues
1811 from .. import objects
1812 +
1813 # enable git checks
1814 namespace.enabled_checks.update(objects.CHECKS.select(GitCommitsCheck).values())
1815
1816 @@ -422,16 +438,21 @@ class GitAddon(caches.CachedAddon):
1817 """
1818
1819 # cache registry
1820 - cache = caches.CacheData(type='git', file='git.pickle', version=5)
1821 + cache = caches.CacheData(type="git", file="git.pickle", version=5)
1822
1823 @classmethod
1824 def mangle_argparser(cls, parser):
1825 - group = parser.add_argument_group('git', docs=cls.__doc__)
1826 + group = parser.add_argument_group("git", docs=cls.__doc__)
1827 git_opts = group.add_mutually_exclusive_group()
1828 git_opts.add_argument(
1829 - '--commits', nargs='?', default=False, metavar='tree-ish',
1830 - action=arghparse.Delayed, target=_ScanGit, priority=10,
1831 - help='determine scan targets from unpushed commits',
1832 + "--commits",
1833 + nargs="?",
1834 + default=False,
1835 + metavar="tree-ish",
1836 + action=arghparse.Delayed,
1837 + target=_ScanGit,
1838 + priority=10,
1839 + help="determine scan targets from unpushed commits",
1840 docs="""
1841 Targets are determined from the committed changes compared to a
1842 given reference that defaults to the repo's origin.
1843 @@ -440,21 +461,28 @@ class GitAddon(caches.CachedAddon):
1844 the current branch compared to the branch named 'old' use
1845 ``pkgcheck scan --commits old``. For two separate branches
1846 named 'old' and 'new' use ``pkgcheck scan --commits old..new``.
1847 - """)
1848 + """,
1849 + )
1850 git_opts.add_argument(
1851 - '--staged', nargs='?', default=False, metavar='tree-ish',
1852 - action=arghparse.Delayed, target=partial(_ScanGit, staged=True), priority=10,
1853 - help='determine scan targets from staged changes',
1854 + "--staged",
1855 + nargs="?",
1856 + default=False,
1857 + metavar="tree-ish",
1858 + action=arghparse.Delayed,
1859 + target=partial(_ScanGit, staged=True),
1860 + priority=10,
1861 + help="determine scan targets from staged changes",
1862 docs="""
1863 Targets are determined using all staged changes for the git
1864 repo. Unstaged changes and untracked files are ignored by
1865 temporarily stashing them during the scanning process.
1866 - """)
1867 + """,
1868 + )
1869
1870 def __init__(self, *args):
1871 super().__init__(*args)
1872 try:
1873 - find_binary('git')
1874 + find_binary("git")
1875 except CommandNotFound:
1876 raise caches.CacheDisabled(self.cache)
1877
1878 @@ -465,14 +493,14 @@ class GitAddon(caches.CachedAddon):
1879 def _gitignore(self):
1880 """Load a repo's .gitignore and .git/info/exclude files for path matching."""
1881 patterns = []
1882 - for path in ('.gitignore', '.git/info/exclude'):
1883 + for path in (".gitignore", ".git/info/exclude"):
1884 try:
1885 with open(pjoin(self.options.target_repo.location, path)) as f:
1886 patterns.extend(f)
1887 except (FileNotFoundError, IOError):
1888 pass
1889 if patterns:
1890 - return PathSpec.from_lines('gitwildmatch', patterns)
1891 + return PathSpec.from_lines("gitwildmatch", patterns)
1892 return None
1893
1894 def gitignored(self, path):
1895 @@ -489,23 +517,31 @@ class GitAddon(caches.CachedAddon):
1896 """Retrieve a git repo's commit hash for a specific commit object."""
1897 try:
1898 p = subprocess.run(
1899 - ['git', 'rev-parse', commit],
1900 - stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
1901 - cwd=path, check=True, encoding='utf8')
1902 + ["git", "rev-parse", commit],
1903 + stdout=subprocess.PIPE,
1904 + stderr=subprocess.DEVNULL,
1905 + cwd=path,
1906 + check=True,
1907 + encoding="utf8",
1908 + )
1909 except subprocess.CalledProcessError:
1910 - raise GitError(f'failed retrieving commit hash for git repo: {path!r}')
1911 + raise GitError(f"failed retrieving commit hash for git repo: {path!r}")
1912 return p.stdout.strip()
1913
1914 @staticmethod
1915 - def _get_current_branch(path, commit='HEAD'):
1916 + def _get_current_branch(path, commit="HEAD"):
1917 """Retrieve a git repo's current branch for a specific commit object."""
1918 try:
1919 p = subprocess.run(
1920 - ['git', 'rev-parse', '--abbrev-ref', commit],
1921 - stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
1922 - cwd=path, check=True, encoding='utf8')
1923 + ["git", "rev-parse", "--abbrev-ref", commit],
1924 + stdout=subprocess.PIPE,
1925 + stderr=subprocess.DEVNULL,
1926 + cwd=path,
1927 + check=True,
1928 + encoding="utf8",
1929 + )
1930 except subprocess.CalledProcessError:
1931 - raise GitError(f'failed retrieving branch for git repo: {path!r}')
1932 + raise GitError(f"failed retrieving branch for git repo: {path!r}")
1933 return p.stdout.strip()
1934
1935 @staticmethod
1936 @@ -513,12 +549,16 @@ class GitAddon(caches.CachedAddon):
1937 """Retrieve a git repo's default branch used with origin remote."""
1938 try:
1939 p = subprocess.run(
1940 - ['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
1941 - stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
1942 - cwd=path, check=True, encoding='utf8')
1943 + ["git", "symbolic-ref", "refs/remotes/origin/HEAD"],
1944 + stdout=subprocess.PIPE,
1945 + stderr=subprocess.DEVNULL,
1946 + cwd=path,
1947 + check=True,
1948 + encoding="utf8",
1949 + )
1950 except subprocess.CalledProcessError:
1951 - raise GitError(f'failed retrieving branch for git repo: {path!r}')
1952 - return p.stdout.strip().split('/')[-1]
1953 + raise GitError(f"failed retrieving branch for git repo: {path!r}")
1954 + return p.stdout.strip().split("/")[-1]
1955
1956 @staticmethod
1957 def pkg_history(repo, commit_range, data=None, local=False, verbosity=-1):
1958 @@ -535,11 +575,12 @@ class GitAddon(caches.CachedAddon):
1959 if local:
1960 commit = (atom.fullver, pkg.commit_time, pkg.commit, pkg.old)
1961 else:
1962 - date = datetime.fromtimestamp(pkg.commit_time).strftime('%Y-%m-%d')
1963 - progress(f'{repo} -- updating git cache: commit date: {date}')
1964 + date = datetime.fromtimestamp(pkg.commit_time).strftime("%Y-%m-%d")
1965 + progress(f"{repo} -- updating git cache: commit date: {date}")
1966 commit = (atom.fullver, pkg.commit_time, pkg.commit)
1967 - data.setdefault(atom.category, {}).setdefault(
1968 - atom.package, {}).setdefault(pkg.status, []).append(commit)
1969 + data.setdefault(atom.category, {}).setdefault(atom.package, {}).setdefault(
1970 + pkg.status, []
1971 + ).append(commit)
1972 return data
1973
1974 def update_cache(self, force=False):
1975 @@ -551,10 +592,12 @@ class GitAddon(caches.CachedAddon):
1976 # skip cache usage when not running on the default branch
1977 if branch != default_branch:
1978 logger.debug(
1979 - 'skipping %s git repo cache update on '
1980 - 'non-default branch %r', repo, branch)
1981 + "skipping %s git repo cache update on " "non-default branch %r",
1982 + repo,
1983 + branch,
1984 + )
1985 continue
1986 - commit = self._get_commit_hash(repo.location, 'origin/HEAD')
1987 + commit = self._get_commit_hash(repo.location, "origin/HEAD")
1988 except GitError:
1989 continue
1990
1991 @@ -567,18 +610,18 @@ class GitAddon(caches.CachedAddon):
1992 git_cache = self.load_cache(cache_file)
1993
1994 if git_cache is None or commit != git_cache.commit:
1995 - logger.debug('updating %s git repo cache to %s', repo, commit[:13])
1996 + logger.debug("updating %s git repo cache to %s", repo, commit[:13])
1997 if git_cache is None:
1998 data = {}
1999 - commit_range = 'origin/HEAD'
2000 + commit_range = "origin/HEAD"
2001 else:
2002 data = git_cache.data
2003 - commit_range = f'{git_cache.commit}..origin/HEAD'
2004 + commit_range = f"{git_cache.commit}..origin/HEAD"
2005
2006 try:
2007 self.pkg_history(
2008 - repo, commit_range, data=data,
2009 - verbosity=self.options.verbosity)
2010 + repo, commit_range, data=data, verbosity=self.options.verbosity
2011 + )
2012 except GitError as e:
2013 raise PkgcheckUserException(str(e))
2014 git_cache = GitCache(data, self.cache, commit=commit)
2015 @@ -595,7 +638,7 @@ class GitAddon(caches.CachedAddon):
2016 git_repos = []
2017 for repo in self.options.target_repo.trees:
2018 git_cache = self._cached_repos.get(repo.location, {})
2019 - git_repos.append(repo_cls(git_cache, repo_id=f'{repo.repo_id}-history'))
2020 + git_repos.append(repo_cls(git_cache, repo_id=f"{repo.repo_id}-history"))
2021
2022 if len(git_repos) > 1:
2023 return multiplex.tree(*git_repos)
2024 @@ -606,14 +649,14 @@ class GitAddon(caches.CachedAddon):
2025 data = {}
2026
2027 try:
2028 - origin = self._get_commit_hash(target_repo.location, 'origin/HEAD')
2029 - head = self._get_commit_hash(target_repo.location, 'HEAD')
2030 + origin = self._get_commit_hash(target_repo.location, "origin/HEAD")
2031 + head = self._get_commit_hash(target_repo.location, "HEAD")
2032 if origin != head:
2033 - data = self.pkg_history(target_repo, 'origin/HEAD..HEAD', local=True)
2034 + data = self.pkg_history(target_repo, "origin/HEAD..HEAD", local=True)
2035 except GitError as e:
2036 raise PkgcheckUserException(str(e))
2037
2038 - repo_id = f'{target_repo.repo_id}-commits'
2039 + repo_id = f"{target_repo.repo_id}-commits"
2040 return repo_cls(data, repo_id=repo_id)
2041
2042 def commits(self):
2043 @@ -621,10 +664,10 @@ class GitAddon(caches.CachedAddon):
2044 commits = ()
2045
2046 try:
2047 - origin = self._get_commit_hash(target_repo.location, 'origin/HEAD')
2048 - head = self._get_commit_hash(target_repo.location, 'HEAD')
2049 + origin = self._get_commit_hash(target_repo.location, "origin/HEAD")
2050 + head = self._get_commit_hash(target_repo.location, "HEAD")
2051 if origin != head:
2052 - commits = GitRepoCommits(target_repo.location, 'origin/HEAD..HEAD')
2053 + commits = GitRepoCommits(target_repo.location, "origin/HEAD..HEAD")
2054 except GitError as e:
2055 raise PkgcheckUserException(str(e))
2056
2057
2058 diff --git a/src/pkgcheck/addons/net.py b/src/pkgcheck/addons/net.py
2059 index 0fad98b0..6db5432d 100644
2060 --- a/src/pkgcheck/addons/net.py
2061 +++ b/src/pkgcheck/addons/net.py
2062 @@ -8,7 +8,7 @@ import requests
2063 from ..checks.network import RequestError, SSLError
2064
2065 # suppress all urllib3 log messages
2066 -logging.getLogger('urllib3').propagate = False
2067 +logging.getLogger("urllib3").propagate = False
2068
2069
2070 class Session(requests.Session):
2071 @@ -26,15 +26,15 @@ class Session(requests.Session):
2072 # block when urllib3 connection pool is full
2073 concurrent = concurrent if concurrent is not None else os.cpu_count() * 5
2074 a = requests.adapters.HTTPAdapter(pool_maxsize=concurrent, pool_block=True)
2075 - self.mount('https://', a)
2076 - self.mount('http://', a)
2077 + self.mount("https://", a)
2078 + self.mount("http://", a)
2079
2080 # spoof user agent
2081 - self.headers['User-Agent'] = user_agent
2082 + self.headers["User-Agent"] = user_agent
2083
2084 def send(self, req, **kwargs):
2085 # forcibly use the session timeout
2086 - kwargs['timeout'] = self.timeout
2087 + kwargs["timeout"] = self.timeout
2088 try:
2089 with super().send(req, **kwargs) as r:
2090 r.raise_for_status()
2091 @@ -42,6 +42,6 @@ class Session(requests.Session):
2092 except requests.exceptions.SSLError as e:
2093 raise SSLError(e)
2094 except requests.exceptions.ConnectionError as e:
2095 - raise RequestError(e, 'connection failed')
2096 + raise RequestError(e, "connection failed")
2097 except requests.exceptions.RequestException as e:
2098 raise RequestError(e)
2099
2100 diff --git a/src/pkgcheck/addons/profiles.py b/src/pkgcheck/addons/profiles.py
2101 index 02b31eda..799cd94a 100644
2102 --- a/src/pkgcheck/addons/profiles.py
2103 +++ b/src/pkgcheck/addons/profiles.py
2104 @@ -22,15 +22,28 @@ from . import ArchesAddon, caches
2105
2106
2107 class ProfileData:
2108 -
2109 - def __init__(self, repo, profile_name, key, provides, vfilter,
2110 - iuse_effective, use, pkg_use, masked_use, forced_use, lookup_cache, insoluble,
2111 - status, deprecated):
2112 + def __init__(
2113 + self,
2114 + repo,
2115 + profile_name,
2116 + key,
2117 + provides,
2118 + vfilter,
2119 + iuse_effective,
2120 + use,
2121 + pkg_use,
2122 + masked_use,
2123 + forced_use,
2124 + lookup_cache,
2125 + insoluble,
2126 + status,
2127 + deprecated,
2128 + ):
2129 self.repo = repo
2130 self.name = profile_name
2131 self.key = key
2132 self.provides_repo = provides
2133 - self.provides_has_match = getattr(provides, 'has_match', provides.match)
2134 + self.provides_has_match = getattr(provides, "has_match", provides.match)
2135 self.iuse_effective = iuse_effective
2136 self.use = use
2137 self.pkg_use = pkg_use
2138 @@ -47,8 +60,7 @@ class ProfileData:
2139 # pointless intermediate sets unless required
2140 # kindly don't change that in any modifications, it adds up.
2141 enabled = known_flags.intersection(self.forced_use.pull_data(pkg))
2142 - immutable = enabled.union(
2143 - filter(known_flags.__contains__, self.masked_use.pull_data(pkg)))
2144 + immutable = enabled.union(filter(known_flags.__contains__, self.masked_use.pull_data(pkg)))
2145 if force_disabled := self.masked_use.pull_data(pkg):
2146 enabled = enabled.difference(force_disabled)
2147 return immutable, enabled
2148 @@ -64,19 +76,19 @@ class ProfilesArgs(arghparse.CommaSeparatedNegations):
2149 @staticmethod
2150 def norm_name(repo, s):
2151 """Expand status keywords and format paths."""
2152 - if s in ('dev', 'exp', 'stable', 'deprecated'):
2153 + if s in ("dev", "exp", "stable", "deprecated"):
2154 yield from repo.profiles.get_profiles(status=s)
2155 - elif s == 'all':
2156 + elif s == "all":
2157 yield from repo.profiles
2158 else:
2159 try:
2160 yield repo.profiles[os.path.normpath(s)]
2161 except KeyError:
2162 - raise ValueError(f'nonexistent profile: {s!r}')
2163 + raise ValueError(f"nonexistent profile: {s!r}")
2164
2165 def __call__(self, parser, namespace, values, option_string=None):
2166 disabled, enabled = self.parse_values(values)
2167 - namespace.ignore_deprecated_profiles = 'deprecated' not in enabled
2168 + namespace.ignore_deprecated_profiles = "deprecated" not in enabled
2169
2170 # Expand status keywords, e.g. 'stable' -> set of stable profiles, and
2171 # translate selections into profile objs.
2172 @@ -104,18 +116,23 @@ class ProfileAddon(caches.CachedAddon):
2173
2174 # non-profile dirs found in the profiles directory, generally only in
2175 # the gentoo repo, but could be in overlays as well
2176 - non_profile_dirs = frozenset(['desc', 'updates'])
2177 + non_profile_dirs = frozenset(["desc", "updates"])
2178
2179 # cache registry
2180 - cache = caches.CacheData(type='profiles', file='profiles.pickle', version=2)
2181 + cache = caches.CacheData(type="profiles", file="profiles.pickle", version=2)
2182
2183 @classmethod
2184 def mangle_argparser(cls, parser):
2185 - group = parser.add_argument_group('profiles')
2186 + group = parser.add_argument_group("profiles")
2187 group.add_argument(
2188 - '-p', '--profiles', metavar='PROFILE', dest='selected_profiles',
2189 - action=arghparse.Delayed, target=ProfilesArgs, priority=101,
2190 - help='comma separated list of profiles to enable/disable',
2191 + "-p",
2192 + "--profiles",
2193 + metavar="PROFILE",
2194 + dest="selected_profiles",
2195 + action=arghparse.Delayed,
2196 + target=ProfilesArgs,
2197 + priority=101,
2198 + help="comma separated list of profiles to enable/disable",
2199 docs="""
2200 Comma separated list of profiles to enable and disable for
2201 scanning. Any profiles specified in this fashion will be the
2202 @@ -137,8 +154,9 @@ class ProfileAddon(caches.CachedAddon):
2203 to only scan all stable profiles pass the ``stable`` argument
2204 to --profiles. Additionally the keyword ``all`` can be used to
2205 scan all defined profiles in the target repo.
2206 - """)
2207 - parser.bind_delayed_default(1001, 'profiles')(cls._default_profiles)
2208 + """,
2209 + )
2210 + parser.bind_delayed_default(1001, "profiles")(cls._default_profiles)
2211
2212 @staticmethod
2213 def _default_profiles(namespace, attr):
2214 @@ -148,8 +166,8 @@ class ProfileAddon(caches.CachedAddon):
2215 # that require them to operate properly.
2216 target_repo = namespace.target_repo
2217 profiles = set(target_repo.profiles)
2218 - if not getattr(namespace, 'exp_profiles_required', False):
2219 - profiles -= set(ProfilesArgs.norm_name(target_repo, 'exp'))
2220 + if not getattr(namespace, "exp_profiles_required", False):
2221 + profiles -= set(ProfilesArgs.norm_name(target_repo, "exp"))
2222 setattr(namespace, attr, profiles)
2223
2224 def __init__(self, *args, arches_addon):
2225 @@ -160,7 +178,7 @@ class ProfileAddon(caches.CachedAddon):
2226
2227 self.arch_profiles = defaultdict(list)
2228 self.target_repo = self.options.target_repo
2229 - ignore_deprecated = getattr(self.options, 'ignore_deprecated_profiles', True)
2230 + ignore_deprecated = getattr(self.options, "ignore_deprecated_profiles", True)
2231
2232 for p in sorted(self.options.profiles):
2233 if p.deprecated and ignore_deprecated:
2234 @@ -171,7 +189,7 @@ class ProfileAddon(caches.CachedAddon):
2235 # Only throw errors if the profile was selected by the user, bad
2236 # repo profiles will be caught during repo metadata scans.
2237 if self.options.selected_profiles is not None:
2238 - raise PkgcheckUserException(f'invalid profile: {e.path!r}: {e.error}')
2239 + raise PkgcheckUserException(f"invalid profile: {e.path!r}: {e.error}")
2240 continue
2241 self.arch_profiles[p.arch].append((profile, p))
2242
2243 @@ -180,7 +198,7 @@ class ProfileAddon(caches.CachedAddon):
2244 """Given a profile object, return its file set and most recent mtime."""
2245 cache = {}
2246 while True:
2247 - profile = (yield)
2248 + profile = yield
2249 profile_mtime = 0
2250 profile_files = []
2251 for node in profile.stack:
2252 @@ -204,8 +222,7 @@ class ProfileAddon(caches.CachedAddon):
2253 """Mapping of profile age and file sets used to check cache viability."""
2254 data = {}
2255 gen_profile_data = self._profile_files()
2256 - for profile_obj, profile in chain.from_iterable(
2257 - self.arch_profiles.values()):
2258 + for profile_obj, profile in chain.from_iterable(self.arch_profiles.values()):
2259 mtime, files = gen_profile_data.send(profile_obj)
2260 data[profile] = (mtime, files)
2261 next(gen_profile_data)
2262 @@ -220,7 +237,7 @@ class ProfileAddon(caches.CachedAddon):
2263 for repo in self.target_repo.trees:
2264 cache_file = self.cache_file(repo)
2265 # add profiles-base -> repo mapping to ease storage procedure
2266 - cached_profiles[repo.config.profiles_base]['repo'] = repo
2267 + cached_profiles[repo.config.profiles_base]["repo"] = repo
2268 if not force:
2269 cache = self.load_cache(cache_file, fallback={})
2270 cached_profiles[repo.config.profiles_base].update(cache)
2271 @@ -228,14 +245,21 @@ class ProfileAddon(caches.CachedAddon):
2272 chunked_data_cache = {}
2273
2274 for arch in sorted(self.options.arches):
2275 - stable_key, unstable_key = arch, f'~{arch}'
2276 + stable_key, unstable_key = arch, f"~{arch}"
2277 stable_r = packages.PackageRestriction(
2278 - "keywords", values.ContainmentMatch2((stable_key,)))
2279 + "keywords", values.ContainmentMatch2((stable_key,))
2280 + )
2281 unstable_r = packages.PackageRestriction(
2282 - "keywords", values.ContainmentMatch2((stable_key, unstable_key,)))
2283 + "keywords",
2284 + values.ContainmentMatch2(
2285 + (
2286 + stable_key,
2287 + unstable_key,
2288 + )
2289 + ),
2290 + )
2291
2292 - default_masked_use = tuple(set(
2293 - x for x in official_arches if x != stable_key))
2294 + default_masked_use = tuple(set(x for x in official_arches if x != stable_key))
2295
2296 # padding for progress output
2297 padding = max(len(x) for x in self.options.arches)
2298 @@ -244,23 +268,25 @@ class ProfileAddon(caches.CachedAddon):
2299 files = self.profile_data.get(profile)
2300 try:
2301 cached_profile = cached_profiles[profile.base][profile.path]
2302 - if files != cached_profile['files']:
2303 + if files != cached_profile["files"]:
2304 # force refresh of outdated cache entry
2305 raise KeyError
2306
2307 - masks = cached_profile['masks']
2308 - unmasks = cached_profile['unmasks']
2309 - immutable_flags = cached_profile['immutable_flags']
2310 - stable_immutable_flags = cached_profile['stable_immutable_flags']
2311 - enabled_flags = cached_profile['enabled_flags']
2312 - stable_enabled_flags = cached_profile['stable_enabled_flags']
2313 - pkg_use = cached_profile['pkg_use']
2314 - iuse_effective = cached_profile['iuse_effective']
2315 - use = cached_profile['use']
2316 - provides_repo = cached_profile['provides_repo']
2317 + masks = cached_profile["masks"]
2318 + unmasks = cached_profile["unmasks"]
2319 + immutable_flags = cached_profile["immutable_flags"]
2320 + stable_immutable_flags = cached_profile["stable_immutable_flags"]
2321 + enabled_flags = cached_profile["enabled_flags"]
2322 + stable_enabled_flags = cached_profile["stable_enabled_flags"]
2323 + pkg_use = cached_profile["pkg_use"]
2324 + iuse_effective = cached_profile["iuse_effective"]
2325 + use = cached_profile["use"]
2326 + provides_repo = cached_profile["provides_repo"]
2327 except KeyError:
2328 try:
2329 - progress(f'{repo} -- updating profiles cache: {profile.arch:<{padding}}')
2330 + progress(
2331 + f"{repo} -- updating profiles cache: {profile.arch:<{padding}}"
2332 + )
2333
2334 masks = profile_obj.masks
2335 unmasks = profile_obj.unmasks
2336 @@ -270,7 +296,9 @@ class ProfileAddon(caches.CachedAddon):
2337 immutable_flags.optimize(cache=chunked_data_cache)
2338 immutable_flags.freeze()
2339
2340 - stable_immutable_flags = profile_obj.stable_masked_use.clone(unfreeze=True)
2341 + stable_immutable_flags = profile_obj.stable_masked_use.clone(
2342 + unfreeze=True
2343 + )
2344 stable_immutable_flags.add_bare_global((), default_masked_use)
2345 stable_immutable_flags.optimize(cache=chunked_data_cache)
2346 stable_immutable_flags.freeze()
2347 @@ -280,7 +308,9 @@ class ProfileAddon(caches.CachedAddon):
2348 enabled_flags.optimize(cache=chunked_data_cache)
2349 enabled_flags.freeze()
2350
2351 - stable_enabled_flags = profile_obj.stable_forced_use.clone(unfreeze=True)
2352 + stable_enabled_flags = profile_obj.stable_forced_use.clone(
2353 + unfreeze=True
2354 + )
2355 stable_enabled_flags.add_bare_global((), (stable_key,))
2356 stable_enabled_flags.optimize(cache=chunked_data_cache)
2357 stable_enabled_flags.freeze()
2358 @@ -290,25 +320,28 @@ class ProfileAddon(caches.CachedAddon):
2359 provides_repo = profile_obj.provides_repo
2360
2361 # finalize enabled USE flags
2362 - use = frozenset(misc.incremental_expansion(
2363 - profile_obj.use, msg_prefix='while expanding USE'))
2364 + use = frozenset(
2365 + misc.incremental_expansion(
2366 + profile_obj.use, msg_prefix="while expanding USE"
2367 + )
2368 + )
2369 except profiles_mod.ProfileError:
2370 # unsupported EAPI or other issue, profile checks will catch this
2371 continue
2372
2373 - cached_profiles[profile.base]['update'] = True
2374 + cached_profiles[profile.base]["update"] = True
2375 cached_profiles[profile.base][profile.path] = {
2376 - 'files': files,
2377 - 'masks': masks,
2378 - 'unmasks': unmasks,
2379 - 'immutable_flags': immutable_flags,
2380 - 'stable_immutable_flags': stable_immutable_flags,
2381 - 'enabled_flags': enabled_flags,
2382 - 'stable_enabled_flags': stable_enabled_flags,
2383 - 'pkg_use': pkg_use,
2384 - 'iuse_effective': iuse_effective,
2385 - 'use': use,
2386 - 'provides_repo': provides_repo,
2387 + "files": files,
2388 + "masks": masks,
2389 + "unmasks": unmasks,
2390 + "immutable_flags": immutable_flags,
2391 + "stable_immutable_flags": stable_immutable_flags,
2392 + "enabled_flags": enabled_flags,
2393 + "stable_enabled_flags": stable_enabled_flags,
2394 + "pkg_use": pkg_use,
2395 + "iuse_effective": iuse_effective,
2396 + "use": use,
2397 + "provides_repo": provides_repo,
2398 }
2399
2400 # used to interlink stable/unstable lookups so that if
2401 @@ -323,50 +356,63 @@ class ProfileAddon(caches.CachedAddon):
2402 # note that the cache/insoluble are inversly paired;
2403 # stable cache is usable for unstable, but not vice versa.
2404 # unstable insoluble is usable for stable, but not vice versa
2405 - vfilter = domain.generate_filter(self.target_repo.pkg_masks | masks, unmasks)
2406 - self.profile_filters.setdefault(stable_key, []).append(ProfileData(
2407 - repo.repo_id,
2408 - profile.path, stable_key,
2409 - provides_repo,
2410 - packages.AndRestriction(vfilter, stable_r),
2411 - iuse_effective,
2412 - use,
2413 - pkg_use,
2414 - stable_immutable_flags, stable_enabled_flags,
2415 - stable_cache,
2416 - ProtectedSet(unstable_insoluble),
2417 - profile.status,
2418 - profile.deprecated))
2419 -
2420 - self.profile_filters.setdefault(unstable_key, []).append(ProfileData(
2421 - repo.repo_id,
2422 - profile.path, unstable_key,
2423 - provides_repo,
2424 - packages.AndRestriction(vfilter, unstable_r),
2425 - iuse_effective,
2426 - use,
2427 - pkg_use,
2428 - immutable_flags, enabled_flags,
2429 - ProtectedSet(stable_cache),
2430 - unstable_insoluble,
2431 - profile.status,
2432 - profile.deprecated))
2433 + vfilter = domain.generate_filter(
2434 + self.target_repo.pkg_masks | masks, unmasks
2435 + )
2436 + self.profile_filters.setdefault(stable_key, []).append(
2437 + ProfileData(
2438 + repo.repo_id,
2439 + profile.path,
2440 + stable_key,
2441 + provides_repo,
2442 + packages.AndRestriction(vfilter, stable_r),
2443 + iuse_effective,
2444 + use,
2445 + pkg_use,
2446 + stable_immutable_flags,
2447 + stable_enabled_flags,
2448 + stable_cache,
2449 + ProtectedSet(unstable_insoluble),
2450 + profile.status,
2451 + profile.deprecated,
2452 + )
2453 + )
2454 +
2455 + self.profile_filters.setdefault(unstable_key, []).append(
2456 + ProfileData(
2457 + repo.repo_id,
2458 + profile.path,
2459 + unstable_key,
2460 + provides_repo,
2461 + packages.AndRestriction(vfilter, unstable_r),
2462 + iuse_effective,
2463 + use,
2464 + pkg_use,
2465 + immutable_flags,
2466 + enabled_flags,
2467 + ProtectedSet(stable_cache),
2468 + unstable_insoluble,
2469 + profile.status,
2470 + profile.deprecated,
2471 + )
2472 + )
2473
2474 # dump updated profile filters
2475 for k, v in cached_profiles.items():
2476 - if v.pop('update', False):
2477 - repo = v.pop('repo')
2478 + if v.pop("update", False):
2479 + repo = v.pop("repo")
2480 cache_file = self.cache_file(repo)
2481 - cache = caches.DictCache(
2482 - cached_profiles[repo.config.profiles_base], self.cache)
2483 + cache = caches.DictCache(cached_profiles[repo.config.profiles_base], self.cache)
2484 self.save_cache(cache, cache_file)
2485
2486 for key, profile_list in self.profile_filters.items():
2487 similar = self.profile_evaluate_dict[key] = []
2488 for profile in profile_list:
2489 for existing in similar:
2490 - if (existing[0].masked_use == profile.masked_use and
2491 - existing[0].forced_use == profile.forced_use):
2492 + if (
2493 + existing[0].masked_use == profile.masked_use
2494 + and existing[0].forced_use == profile.forced_use
2495 + ):
2496 existing.append(profile)
2497 break
2498 else:
2499 @@ -377,7 +423,7 @@ class ProfileAddon(caches.CachedAddon):
2500 # the use processing across each of 'em.
2501 groups = []
2502 keywords = pkg.keywords
2503 - unstable_keywords = (f'~{x}' for x in keywords if x[0] != '~')
2504 + unstable_keywords = (f"~{x}" for x in keywords if x[0] != "~")
2505 for key in chain(keywords, unstable_keywords):
2506 if profile_grps := self.profile_evaluate_dict.get(key):
2507 for profiles in profile_grps:
2508
2509 diff --git a/src/pkgcheck/api.py b/src/pkgcheck/api.py
2510 index bcf30234..c704f8c6 100644
2511 --- a/src/pkgcheck/api.py
2512 +++ b/src/pkgcheck/api.py
2513 @@ -39,8 +39,8 @@ def scan(args=None, /, *, base_args=None):
2514 if base_args is None:
2515 base_args = []
2516
2517 - with patch('argparse.ArgumentParser.exit', parser_exit):
2518 - options = pkgcheck.argparser.parse_args(base_args + ['scan'] + args)
2519 + with patch("argparse.ArgumentParser.exit", parser_exit):
2520 + options = pkgcheck.argparser.parse_args(base_args + ["scan"] + args)
2521 return Pipeline(options)
2522
2523
2524
2525 diff --git a/src/pkgcheck/base.py b/src/pkgcheck/base.py
2526 index fc77dee6..ac49dbe4 100644
2527 --- a/src/pkgcheck/base.py
2528 +++ b/src/pkgcheck/base.py
2529 @@ -26,12 +26,13 @@ from snakeoil.mappings import ImmutableDict
2530 @dataclass(frozen=True, eq=False)
2531 class Scope:
2532 """Generic scope for scans, checks, and results."""
2533 +
2534 desc: str
2535 level: int
2536 _children: tuple = ()
2537
2538 def __str__(self):
2539 - return f'{self.__class__.__name__}({self.desc!r})'
2540 + return f"{self.__class__.__name__}({self.desc!r})"
2541
2542 def __lt__(self, other):
2543 if isinstance(other, Scope):
2544 @@ -62,8 +63,8 @@ class Scope:
2545 return hash(self.desc)
2546
2547 def __repr__(self):
2548 - address = '@%#8x' % (id(self),)
2549 - return f'<{self.__class__.__name__} desc={self.desc!r} {address}>'
2550 + address = "@%#8x" % (id(self),)
2551 + return f"<{self.__class__.__name__} desc={self.desc!r} {address}>"
2552
2553 def __contains__(self, key):
2554 return self == key or key in self._children
2555 @@ -80,37 +81,41 @@ class PackageScope(Scope):
2556 @dataclass(repr=False, frozen=True, eq=False)
2557 class ConditionalScope(Scope):
2558 """Scope for checks run only in certain circumstances."""
2559 +
2560 level: int = -99
2561
2562
2563 @dataclass(repr=False, frozen=True, eq=False)
2564 class LocationScope(Scope):
2565 """Scope for location-specific checks."""
2566 +
2567 level: int = 0
2568
2569
2570 # pkg-related scopes (level increasing by granularity)
2571 -repo_scope = PackageScope('repo', 1)
2572 -category_scope = PackageScope('category', 2)
2573 -package_scope = PackageScope('package', 3)
2574 -version_scope = PackageScope('version', 4)
2575 +repo_scope = PackageScope("repo", 1)
2576 +category_scope = PackageScope("category", 2)
2577 +package_scope = PackageScope("package", 3)
2578 +version_scope = PackageScope("version", 4)
2579
2580 # conditional (negative level) and location-specific scopes (zero level)
2581 -commit_scope = ConditionalScope('commit')
2582 -profile_node_scope = LocationScope('profile_node')
2583 -profiles_scope = LocationScope('profiles', 0, (profile_node_scope,))
2584 -eclass_scope = LocationScope('eclass')
2585 +commit_scope = ConditionalScope("commit")
2586 +profile_node_scope = LocationScope("profile_node")
2587 +profiles_scope = LocationScope("profiles", 0, (profile_node_scope,))
2588 +eclass_scope = LocationScope("eclass")
2589
2590 # mapping for -S/--scopes option, ordered for sorted output in the case of unknown scopes
2591 -scopes = ImmutableDict({
2592 - 'git': commit_scope,
2593 - 'profiles': profiles_scope,
2594 - 'eclass': eclass_scope,
2595 - 'repo': repo_scope,
2596 - 'cat': category_scope,
2597 - 'pkg': package_scope,
2598 - 'ver': version_scope,
2599 -})
2600 +scopes = ImmutableDict(
2601 + {
2602 + "git": commit_scope,
2603 + "profiles": profiles_scope,
2604 + "eclass": eclass_scope,
2605 + "repo": repo_scope,
2606 + "cat": category_scope,
2607 + "pkg": package_scope,
2608 + "ver": version_scope,
2609 + }
2610 +)
2611
2612
2613 class PkgcheckException(Exception):
2614 @@ -182,12 +187,13 @@ def param_name(cls):
2615
2616 For example, GitAddon -> git_addon and GitCache -> git_cache.
2617 """
2618 - return re.sub(r'([a-z])([A-Z])', r'\1_\2', cls.__name__).lower()
2619 + return re.sub(r"([a-z])([A-Z])", r"\1_\2", cls.__name__).lower()
2620
2621
2622 @dataclass(frozen=True)
2623 class LogMap:
2624 """Log function to callable mapping."""
2625 +
2626 func: str
2627 call: typing.Callable
2628
2629 @@ -223,7 +229,7 @@ class ProgressManager(AbstractContextManager):
2630 """Callback used for progressive output."""
2631 # avoid rewriting the same output
2632 if s != self._cached:
2633 - sys.stderr.write(f'{s}\r')
2634 + sys.stderr.write(f"{s}\r")
2635 self._cached = s
2636
2637 def __enter__(self):
2638 @@ -233,4 +239,4 @@ class ProgressManager(AbstractContextManager):
2639
2640 def __exit__(self, _exc_type, _exc_value, _traceback):
2641 if self._cached is not None:
2642 - sys.stderr.write('\n')
2643 + sys.stderr.write("\n")
2644
2645 diff --git a/src/pkgcheck/bash/__init__.py b/src/pkgcheck/bash/__init__.py
2646 index 6faf2bb5..38f9424d 100644
2647 --- a/src/pkgcheck/bash/__init__.py
2648 +++ b/src/pkgcheck/bash/__init__.py
2649 @@ -11,10 +11,10 @@ from .. import const
2650 from ctypes.util import find_library
2651
2652 # path to bash parsing library on the system (may be None)
2653 -syslib = find_library('tree-sitter-bash')
2654 +syslib = find_library("tree-sitter-bash")
2655
2656 # path to bash parsing library (vendored)
2657 -lib = pjoin(os.path.dirname(__file__), 'lang.so')
2658 +lib = pjoin(os.path.dirname(__file__), "lang.so")
2659
2660 # copied from tree-sitter with the following changes:
2661 # - prefer stdc++ over c++ when linking
2662 @@ -50,9 +50,7 @@ def build_library(output_path, repo_paths): # pragma: no cover
2663 source_paths.append(path.join(src_path, "scanner.cc"))
2664 elif path.exists(path.join(src_path, "scanner.c")):
2665 source_paths.append(path.join(src_path, "scanner.c"))
2666 - source_mtimes = [path.getmtime(__file__)] + [
2667 - path.getmtime(path_) for path_ in source_paths
2668 - ]
2669 + source_mtimes = [path.getmtime(__file__)] + [path.getmtime(path_) for path_ in source_paths]
2670
2671 compiler = new_compiler()
2672 # force `c++` compiler so the appropriate standard library is used
2673 @@ -91,21 +89,25 @@ try:
2674 from .. import _const
2675 except ImportError: # pragma: no cover
2676 # build library when running from git repo or tarball
2677 - if syslib is None and not os.path.exists(lib) and 'tree-sitter-bash' in os.listdir(const.REPO_PATH):
2678 - bash_src = pjoin(const.REPO_PATH, 'tree-sitter-bash')
2679 + if (
2680 + syslib is None
2681 + and not os.path.exists(lib)
2682 + and "tree-sitter-bash" in os.listdir(const.REPO_PATH)
2683 + ):
2684 + bash_src = pjoin(const.REPO_PATH, "tree-sitter-bash")
2685 build_library(lib, [bash_src])
2686
2687 if syslib is not None or os.path.exists(lib):
2688 - lang = Language(syslib or lib, 'bash')
2689 + lang = Language(syslib or lib, "bash")
2690 query = partial(lang.query)
2691 parser = Parser()
2692 parser.set_language(lang)
2693
2694 # various parse tree queries
2695 - cmd_query = query('(command) @call')
2696 - func_query = query('(function_definition) @func')
2697 - var_assign_query = query('(variable_assignment) @assign')
2698 - var_query = query('(variable_name) @var')
2699 + cmd_query = query("(command) @call")
2700 + func_query = query("(function_definition) @func")
2701 + var_assign_query = query("(variable_assignment) @assign")
2702 + var_query = query("(variable_name) @var")
2703
2704
2705 class ParseTree:
2706 @@ -118,13 +120,13 @@ class ParseTree:
2707
2708 def node_str(self, node):
2709 """Return the ebuild string associated with a given parse tree node."""
2710 - return self.data[node.start_byte:node.end_byte].decode('utf8')
2711 + return self.data[node.start_byte : node.end_byte].decode("utf8")
2712
2713 def global_query(self, query):
2714 """Run a given parse tree query returning only those nodes in global scope."""
2715 for x in self.tree.root_node.children:
2716 # skip nodes in function scope
2717 - if x.type != 'function_definition':
2718 + if x.type != "function_definition":
2719 for node, _ in query.captures(x):
2720 yield node
2721
2722 @@ -132,6 +134,6 @@ class ParseTree:
2723 """Run a given parse tree query returning only those nodes in function scope."""
2724 for x in self.tree.root_node.children:
2725 # only return nodes in function scope
2726 - if x.type == 'function_definition':
2727 + if x.type == "function_definition":
2728 for node, _ in query.captures(x):
2729 yield node
2730
2731 diff --git a/src/pkgcheck/checks/__init__.py b/src/pkgcheck/checks/__init__.py
2732 index f0959257..b5caa244 100644
2733 --- a/src/pkgcheck/checks/__init__.py
2734 +++ b/src/pkgcheck/checks/__init__.py
2735 @@ -42,13 +42,13 @@ class Check(feeds.Feed):
2736 return (
2737 sources.FilteredRepoSource,
2738 (sources.LatestVersionsFilter,),
2739 - (('source', self._source),)
2740 + (("source", self._source),),
2741 )
2742 elif max(x.scope for x in self.known_results) >= base.version_scope:
2743 return (
2744 sources.FilteredPackageRepoSource,
2745 (sources.LatestPkgsFilter,),
2746 - (('source', self._source),)
2747 + (("source", self._source),),
2748 )
2749 return self._source
2750
2751 @@ -79,9 +79,9 @@ class GentooRepoCheck(Check):
2752 if not self.options.gentoo_repo:
2753 check = self.__class__.__name__
2754 if check in self.options.selected_checks:
2755 - self.options.override_skip['gentoo'].append(check)
2756 + self.options.override_skip["gentoo"].append(check)
2757 else:
2758 - raise SkipCheck(self, 'not running against gentoo repo')
2759 + raise SkipCheck(self, "not running against gentoo repo")
2760
2761
2762 class OverlayRepoCheck(Check):
2763 @@ -90,7 +90,7 @@ class OverlayRepoCheck(Check):
2764 def __init__(self, *args):
2765 super().__init__(*args)
2766 if not self.options.target_repo.masters:
2767 - raise SkipCheck(self, 'not running against overlay')
2768 + raise SkipCheck(self, "not running against overlay")
2769
2770
2771 class OptionalCheck(Check):
2772 @@ -105,7 +105,7 @@ class GitCommitsCheck(OptionalCheck):
2773 def __init__(self, *args):
2774 super().__init__(*args)
2775 if not self.options.commits:
2776 - raise SkipCheck(self, 'not scanning against git commits')
2777 + raise SkipCheck(self, "not scanning against git commits")
2778
2779
2780 class AsyncCheck(Check):
2781 @@ -126,7 +126,7 @@ class NetworkCheck(AsyncCheck, OptionalCheck):
2782 def __init__(self, *args, net_addon, **kwargs):
2783 super().__init__(*args, **kwargs)
2784 if not self.options.net:
2785 - raise SkipCheck(self, 'network checks not enabled')
2786 + raise SkipCheck(self, "network checks not enabled")
2787 self.timeout = self.options.timeout
2788 self.session = net_addon.session
2789
2790 @@ -138,13 +138,15 @@ class MirrorsCheck(Check):
2791
2792 def __init__(self, *args, use_addon):
2793 super().__init__(*args)
2794 - self.iuse_filter = use_addon.get_filter('fetchables')
2795 + self.iuse_filter = use_addon.get_filter("fetchables")
2796
2797 def get_mirrors(self, pkg):
2798 mirrors = []
2799 fetchables, _ = self.iuse_filter(
2800 - (fetch.fetchable,), pkg,
2801 - pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True))
2802 + (fetch.fetchable,),
2803 + pkg,
2804 + pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True),
2805 + )
2806 for f in fetchables:
2807 for m in f.uri.visit_mirrors(treat_default_as_mirror=False):
2808 mirrors.append(m[0].mirror_name)
2809 @@ -164,7 +166,7 @@ class SkipCheck(base.PkgcheckUserException):
2810 else:
2811 # assume the check param is a raw class object
2812 check_name = check.__name__
2813 - super().__init__(f'{check_name}: {msg}')
2814 + super().__init__(f"{check_name}: {msg}")
2815
2816
2817 def init_checks(enabled_addons, options, results_q, *, addons_map=None, source_map=None):
2818 @@ -205,7 +207,7 @@ def init_checks(enabled_addons, options, results_q, *, addons_map=None, source_m
2819 # report which check skips were overridden
2820 for skip_type, checks in sorted(options.override_skip.items()):
2821 s = pluralism(checks)
2822 - checks_str = ', '.join(sorted(checks))
2823 + checks_str = ", ".join(sorted(checks))
2824 logger.warning(f"running {skip_type} specific check{s}: {checks_str}")
2825
2826 return enabled
2827
2828 diff --git a/src/pkgcheck/checks/acct.py b/src/pkgcheck/checks/acct.py
2829 index 4f144023..30953c89 100644
2830 --- a/src/pkgcheck/checks/acct.py
2831 +++ b/src/pkgcheck/checks/acct.py
2832 @@ -37,7 +37,7 @@ class ConflictingAccountIdentifiers(results.Error):
2833
2834 @property
2835 def desc(self):
2836 - pkgs = ', '.join(self.pkgs)
2837 + pkgs = ", ".join(self.pkgs)
2838 return f"conflicting {self.kind} id {self.identifier} usage: [ {pkgs} ]"
2839
2840
2841 @@ -55,8 +55,7 @@ class OutsideRangeAccountIdentifier(results.VersionResult, results.Error):
2842
2843 @property
2844 def desc(self):
2845 - return (f"{self.kind} id {self.identifier} outside permitted "
2846 - f"static allocation range")
2847 + return f"{self.kind} id {self.identifier} outside permitted " f"static allocation range"
2848
2849
2850 class AcctCheck(GentooRepoCheck, RepoCheck):
2851 @@ -71,33 +70,43 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
2852 exist or is wrongly defined, this check is skipped.
2853 """
2854
2855 - _restricted_source = (sources.RestrictionRepoSource, (packages.OrRestriction(*(
2856 - restricts.CategoryDep('acct-user'), restricts.CategoryDep('acct-group'))),))
2857 - _source = (sources.RepositoryRepoSource, (), (('source', _restricted_source),))
2858 - known_results = frozenset([
2859 - MissingAccountIdentifier, ConflictingAccountIdentifiers,
2860 - OutsideRangeAccountIdentifier,
2861 - ])
2862 + _restricted_source = (
2863 + sources.RestrictionRepoSource,
2864 + (
2865 + packages.OrRestriction(
2866 + *(restricts.CategoryDep("acct-user"), restricts.CategoryDep("acct-group"))
2867 + ),
2868 + ),
2869 + )
2870 + _source = (sources.RepositoryRepoSource, (), (("source", _restricted_source),))
2871 + known_results = frozenset(
2872 + [
2873 + MissingAccountIdentifier,
2874 + ConflictingAccountIdentifiers,
2875 + OutsideRangeAccountIdentifier,
2876 + ]
2877 + )
2878
2879 def __init__(self, *args):
2880 super().__init__(*args)
2881 self.id_re = re.compile(
2882 - r'ACCT_(?P<var>USER|GROUP)_ID=(?P<quot>[\'"]?)(?P<id>[0-9]+)(?P=quot)')
2883 + r'ACCT_(?P<var>USER|GROUP)_ID=(?P<quot>[\'"]?)(?P<id>[0-9]+)(?P=quot)'
2884 + )
2885 self.seen_uids = defaultdict(partial(defaultdict, list))
2886 self.seen_gids = defaultdict(partial(defaultdict, list))
2887 uid_range, gid_range = self.load_ids_from_configuration(self.options.target_repo)
2888 self.category_map = {
2889 - 'acct-user': (self.seen_uids, 'USER', tuple(uid_range)),
2890 - 'acct-group': (self.seen_gids, 'GROUP', tuple(gid_range)),
2891 + "acct-user": (self.seen_uids, "USER", tuple(uid_range)),
2892 + "acct-group": (self.seen_gids, "GROUP", tuple(gid_range)),
2893 }
2894
2895 def parse_config_id_range(self, config: ConfigParser, config_key: str):
2896 - id_ranges = config['user-group-ids'].get(config_key, None)
2897 + id_ranges = config["user-group-ids"].get(config_key, None)
2898 if not id_ranges:
2899 raise SkipCheck(self, f"metadata/qa-policy.conf: missing value for {config_key}")
2900 try:
2901 - for id_range in map(str.strip, id_ranges.split(',')):
2902 - start, *end = map(int, id_range.split('-', maxsplit=1))
2903 + for id_range in map(str.strip, id_ranges.split(",")):
2904 + start, *end = map(int, id_range.split("-", maxsplit=1))
2905 if len(end) == 0:
2906 yield range(start, start + 1)
2907 else:
2908 @@ -107,11 +116,13 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
2909
2910 def load_ids_from_configuration(self, repo):
2911 config = ConfigParser()
2912 - if not config.read(pjoin(repo.location, 'metadata', 'qa-policy.conf')):
2913 + if not config.read(pjoin(repo.location, "metadata", "qa-policy.conf")):
2914 raise SkipCheck(self, "failed loading 'metadata/qa-policy.conf'")
2915 - if 'user-group-ids' not in config:
2916 + if "user-group-ids" not in config:
2917 raise SkipCheck(self, "metadata/qa-policy.conf: missing section user-group-ids")
2918 - return self.parse_config_id_range(config, 'uid-range'), self.parse_config_id_range(config, 'gid-range')
2919 + return self.parse_config_id_range(config, "uid-range"), self.parse_config_id_range(
2920 + config, "gid-range"
2921 + )
2922
2923 def feed(self, pkg):
2924 try:
2925 @@ -121,8 +132,8 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
2926
2927 for line in pkg.ebuild.text_fileobj():
2928 m = self.id_re.match(line)
2929 - if m is not None and m.group('var') == expected_var:
2930 - found_id = int(m.group('id'))
2931 + if m is not None and m.group("var") == expected_var:
2932 + found_id = int(m.group("id"))
2933 break
2934 else:
2935 yield MissingAccountIdentifier(f"ACCT_{expected_var}_ID", pkg=pkg)
2936
2937 diff --git a/src/pkgcheck/checks/cleanup.py b/src/pkgcheck/checks/cleanup.py
2938 index 076d56be..6a42a42f 100644
2939 --- a/src/pkgcheck/checks/cleanup.py
2940 +++ b/src/pkgcheck/checks/cleanup.py
2941 @@ -18,8 +18,8 @@ class RedundantVersion(results.VersionResult, results.Info):
2942 @property
2943 def desc(self):
2944 s = pluralism(self.later_versions)
2945 - versions = ', '.join(self.later_versions)
2946 - return f'slot({self.slot}) keywords are overshadowed by version{s}: {versions}'
2947 + versions = ", ".join(self.later_versions)
2948 + return f"slot({self.slot}) keywords are overshadowed by version{s}: {versions}"
2949
2950
2951 class RedundantVersionCheck(Check):
2952 @@ -40,38 +40,45 @@ class RedundantVersionCheck(Check):
2953 @staticmethod
2954 def mangle_argparser(parser):
2955 parser.plugin.add_argument(
2956 - '--stable-only', action='store_true',
2957 - help='consider redundant versions only within stable',
2958 + "--stable-only",
2959 + action="store_true",
2960 + help="consider redundant versions only within stable",
2961 docs="""
2962 If enabled, for each slot, only consider redundant versions
2963 with stable keywords. This is useful for cases of cleanup after
2964 successful stabilization.
2965 - """)
2966 + """,
2967 + )
2968
2969 def __init__(self, *args, profile_addon):
2970 super().__init__(*args)
2971 self.keywords_profiles = {
2972 - keyword: sorted(profiles, key=attrgetter('name'))
2973 - for keyword, profiles in profile_addon.items()}
2974 + keyword: sorted(profiles, key=attrgetter("name"))
2975 + for keyword, profiles in profile_addon.items()
2976 + }
2977
2978 def filter_later_profiles_masks(self, visible_cache, pkg, later_versions):
2979 # check both stable/unstable profiles for stable KEYWORDS and only
2980 # unstable profiles for unstable KEYWORDS
2981 keywords = []
2982 for keyword in pkg.sorted_keywords:
2983 - if keyword[0] != '~':
2984 - keywords.append('~' + keyword)
2985 + if keyword[0] != "~":
2986 + keywords.append("~" + keyword)
2987 keywords.append(keyword)
2988
2989 # if a profile exists, where the package is visible, but the later aren't
2990 # then it isn't redundant
2991 - visible_profiles = tuple(profile
2992 + visible_profiles = tuple(
2993 + profile
2994 for keyword in keywords
2995 for profile in self.keywords_profiles.get(keyword, ())
2996 - if visible_cache[(profile, pkg)])
2997 + if visible_cache[(profile, pkg)]
2998 + )
2999 return tuple(
3000 - later for later in later_versions
3001 - if all(visible_cache[(profile, later)] for profile in visible_profiles))
3002 + later
3003 + for later in later_versions
3004 + if all(visible_cache[(profile, later)] for profile in visible_profiles)
3005 + )
3006
3007 def feed(self, pkgset):
3008 if len(pkgset) == 1:
3009 @@ -91,8 +98,9 @@ class RedundantVersionCheck(Check):
3010 if not curr_set:
3011 continue
3012
3013 - matches = [ver for ver, keys in stack if ver.slot == pkg.slot and
3014 - not curr_set.difference(keys)]
3015 + matches = [
3016 + ver for ver, keys in stack if ver.slot == pkg.slot and not curr_set.difference(keys)
3017 + ]
3018
3019 # we've done our checks; now we inject unstable for any stable
3020 # via this, earlier versions that are unstable only get flagged
3021 @@ -100,7 +108,7 @@ class RedundantVersionCheck(Check):
3022 # stable.
3023
3024 # also, yes, have to use list comp here- we're adding as we go
3025 - curr_set.update([f'~{x}' for x in curr_set if not x.startswith('~')])
3026 + curr_set.update([f"~{x}" for x in curr_set if not x.startswith("~")])
3027
3028 stack.append((pkg, curr_set))
3029 if matches:
3030 @@ -108,7 +116,9 @@ class RedundantVersionCheck(Check):
3031
3032 visible_cache = defaultdictkey(lambda profile_pkg: profile_pkg[0].visible(profile_pkg[1]))
3033 for pkg, matches in reversed(bad):
3034 - if self.options.stable_only and all(key.startswith('~') for x in matches for key in x.keywords):
3035 + if self.options.stable_only and all(
3036 + key.startswith("~") for x in matches for key in x.keywords
3037 + ):
3038 continue
3039 if matches := self.filter_later_profiles_masks(visible_cache, pkg, matches):
3040 later_versions = (x.fullver for x in sorted(matches))
3041
3042 diff --git a/src/pkgcheck/checks/codingstyle.py b/src/pkgcheck/checks/codingstyle.py
3043 index a7d64aca..6d3e53ca 100644
3044 --- a/src/pkgcheck/checks/codingstyle.py
3045 +++ b/src/pkgcheck/checks/codingstyle.py
3046 @@ -12,8 +12,8 @@ from .. import addons, bash
3047 from .. import results, sources
3048 from . import Check
3049
3050 -PREFIX_VARIABLES = ('EROOT', 'ED', 'EPREFIX')
3051 -PATH_VARIABLES = ('BROOT', 'ROOT', 'D') + PREFIX_VARIABLES
3052 +PREFIX_VARIABLES = ("EROOT", "ED", "EPREFIX")
3053 +PATH_VARIABLES = ("BROOT", "ROOT", "D") + PREFIX_VARIABLES
3054
3055
3056 class _CommandResult(results.LineResult):
3057 @@ -25,13 +25,13 @@ class _CommandResult(results.LineResult):
3058
3059 @property
3060 def usage_desc(self):
3061 - return f'{self.command!r}'
3062 + return f"{self.command!r}"
3063
3064 @property
3065 def desc(self):
3066 - s = f'{self.usage_desc}, used on line {self.lineno}'
3067 + s = f"{self.usage_desc}, used on line {self.lineno}"
3068 if self.line != self.command:
3069 - s += f': {self.line!r}'
3070 + s += f": {self.line!r}"
3071 return s
3072
3073
3074 @@ -46,19 +46,19 @@ class _EapiCommandResult(_CommandResult):
3075
3076 @property
3077 def usage_desc(self):
3078 - return f'{self.command!r} {self._status} in EAPI {self.eapi}'
3079 + return f"{self.command!r} {self._status} in EAPI {self.eapi}"
3080
3081
3082 class DeprecatedEapiCommand(_EapiCommandResult, results.Warning):
3083 """Ebuild uses a deprecated EAPI command."""
3084
3085 - _status = 'deprecated'
3086 + _status = "deprecated"
3087
3088
3089 class BannedEapiCommand(_EapiCommandResult, results.Error):
3090 """Ebuild uses a banned EAPI command."""
3091
3092 - _status = 'banned'
3093 + _status = "banned"
3094
3095
3096 class BadCommandsCheck(Check):
3097 @@ -71,12 +71,16 @@ class BadCommandsCheck(Check):
3098 for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
3099 for node, _ in bash.cmd_query.captures(func_node):
3100 call = pkg.node_str(node)
3101 - name = pkg.node_str(node.child_by_field_name('name'))
3102 + name = pkg.node_str(node.child_by_field_name("name"))
3103 lineno, colno = node.start_point
3104 if name in pkg.eapi.bash_cmds_banned:
3105 - yield BannedEapiCommand(name, line=call, lineno=lineno+1, eapi=pkg.eapi, pkg=pkg)
3106 + yield BannedEapiCommand(
3107 + name, line=call, lineno=lineno + 1, eapi=pkg.eapi, pkg=pkg
3108 + )
3109 elif name in pkg.eapi.bash_cmds_deprecated:
3110 - yield DeprecatedEapiCommand(name, line=call, lineno=lineno+1, eapi=pkg.eapi, pkg=pkg)
3111 + yield DeprecatedEapiCommand(
3112 + name, line=call, lineno=lineno + 1, eapi=pkg.eapi, pkg=pkg
3113 + )
3114
3115
3116 class EendMissingArg(results.LineResult, results.Warning):
3117 @@ -84,7 +88,7 @@ class EendMissingArg(results.LineResult, results.Warning):
3118
3119 @property
3120 def desc(self):
3121 - return f'eend with no arguments, on line {self.lineno}'
3122 + return f"eend with no arguments, on line {self.lineno}"
3123
3124
3125 class EendMissingArgCheck(Check):
3126 @@ -99,7 +103,7 @@ class EendMissingArgCheck(Check):
3127 line = pkg.node_str(node)
3128 if line == "eend":
3129 lineno, _ = node.start_point
3130 - yield EendMissingArg(line=line, lineno=lineno+1, pkg=pkg)
3131 + yield EendMissingArg(line=line, lineno=lineno + 1, pkg=pkg)
3132
3133
3134 class MissingSlash(results.LinesResult, results.Error):
3135 @@ -111,7 +115,7 @@ class MissingSlash(results.LinesResult, results.Error):
3136
3137 @property
3138 def desc(self):
3139 - return f'{self.match} missing trailing slash {self.lines_str}'
3140 + return f"{self.match} missing trailing slash {self.lines_str}"
3141
3142
3143 class UnnecessarySlashStrip(results.LinesResult, results.Style):
3144 @@ -123,7 +127,7 @@ class UnnecessarySlashStrip(results.LinesResult, results.Style):
3145
3146 @property
3147 def desc(self):
3148 - return f'{self.match} unnecessary slash strip {self.lines_str}'
3149 + return f"{self.match} unnecessary slash strip {self.lines_str}"
3150
3151
3152 class DoublePrefixInPath(results.LinesResult, results.Error):
3153 @@ -143,7 +147,7 @@ class DoublePrefixInPath(results.LinesResult, results.Error):
3154
3155 @property
3156 def desc(self):
3157 - return f'{self.match}: concatenates two paths containing EPREFIX {self.lines_str}'
3158 + return f"{self.match}: concatenates two paths containing EPREFIX {self.lines_str}"
3159
3160
3161 class PathVariablesCheck(Check):
3162 @@ -152,63 +156,84 @@ class PathVariablesCheck(Check):
3163 _source = sources.EbuildFileRepoSource
3164 known_results = frozenset([MissingSlash, UnnecessarySlashStrip, DoublePrefixInPath])
3165 prefixed_dir_functions = (
3166 - 'insinto', 'exeinto',
3167 - 'dodir', 'keepdir',
3168 - 'fowners', 'fperms',
3169 + "insinto",
3170 + "exeinto",
3171 + "dodir",
3172 + "keepdir",
3173 + "fowners",
3174 + "fperms",
3175 # java-pkg-2
3176 - 'java-pkg_jarinto', 'java-pkg_sointo',
3177 + "java-pkg_jarinto",
3178 + "java-pkg_sointo",
3179 # python-utils-r1
3180 - 'python_scriptinto', 'python_moduleinto',
3181 + "python_scriptinto",
3182 + "python_moduleinto",
3183 )
3184 # TODO: add variables to mark this status in the eclasses in order to pull
3185 # this data from parsed eclass docs
3186 prefixed_getters = (
3187 # bash-completion-r1.eclass
3188 - 'get_bashcompdir', 'get_bashhelpersdir',
3189 + "get_bashcompdir",
3190 + "get_bashhelpersdir",
3191 # db-use.eclass
3192 - 'db_includedir',
3193 + "db_includedir",
3194 # golang-base.eclass
3195 - 'get_golibdir_gopath',
3196 + "get_golibdir_gopath",
3197 # llvm.eclass
3198 - 'get_llvm_prefix',
3199 + "get_llvm_prefix",
3200 # python-utils-r1.eclass
3201 - 'python_get_sitedir', 'python_get_includedir',
3202 - 'python_get_library_path', 'python_get_scriptdir',
3203 + "python_get_sitedir",
3204 + "python_get_includedir",
3205 + "python_get_library_path",
3206 + "python_get_scriptdir",
3207 # qmake-utils.eclass
3208 - 'qt4_get_bindir', 'qt5_get_bindir',
3209 + "qt4_get_bindir",
3210 + "qt5_get_bindir",
3211 # s6.eclass
3212 - 's6_get_servicedir',
3213 + "s6_get_servicedir",
3214 # systemd.eclass
3215 - 'systemd_get_systemunitdir', 'systemd_get_userunitdir',
3216 - 'systemd_get_utildir', 'systemd_get_systemgeneratordir',
3217 + "systemd_get_systemunitdir",
3218 + "systemd_get_userunitdir",
3219 + "systemd_get_utildir",
3220 + "systemd_get_systemgeneratordir",
3221 )
3222 prefixed_rhs_variables = (
3223 # catch silly ${ED}${EPREFIX} mistake ;-)
3224 - 'EPREFIX',
3225 + "EPREFIX",
3226 # python-utils-r1.eclass
3227 - 'PYTHON', 'PYTHON_SITEDIR', 'PYTHON_INCLUDEDIR', 'PYTHON_LIBPATH',
3228 - 'PYTHON_CONFIG', 'PYTHON_SCRIPTDIR',
3229 + "PYTHON",
3230 + "PYTHON_SITEDIR",
3231 + "PYTHON_INCLUDEDIR",
3232 + "PYTHON_LIBPATH",
3233 + "PYTHON_CONFIG",
3234 + "PYTHON_SCRIPTDIR",
3235 )
3236
3237 def __init__(self, *args):
3238 super().__init__(*args)
3239 - self.missing_regex = re.compile(r'(\${(%s)})"?\w+/' % r'|'.join(PATH_VARIABLES))
3240 - self.unnecessary_regex = re.compile(r'(\${(%s)%%/})' % r'|'.join(PATH_VARIABLES))
3241 + self.missing_regex = re.compile(r'(\${(%s)})"?\w+/' % r"|".join(PATH_VARIABLES))
3242 + self.unnecessary_regex = re.compile(r"(\${(%s)%%/})" % r"|".join(PATH_VARIABLES))
3243 self.double_prefix_regex = re.compile(
3244 - r'(\${(%s)(%%/)?}/?\$(\((%s)\)|{(%s)}))' % (
3245 - r'|'.join(PREFIX_VARIABLES),
3246 - r'|'.join(self.prefixed_getters),
3247 - r'|'.join(self.prefixed_rhs_variables)))
3248 + r"(\${(%s)(%%/)?}/?\$(\((%s)\)|{(%s)}))"
3249 + % (
3250 + r"|".join(PREFIX_VARIABLES),
3251 + r"|".join(self.prefixed_getters),
3252 + r"|".join(self.prefixed_rhs_variables),
3253 + )
3254 + )
3255 self.double_prefix_func_regex = re.compile(
3256 - r'\b(%s)\s[^&|;]*\$(\((%s)\)|{(%s)})' % (
3257 - r'|'.join(self.prefixed_dir_functions),
3258 - r'|'.join(self.prefixed_getters),
3259 - r'|'.join(self.prefixed_rhs_variables)))
3260 + r"\b(%s)\s[^&|;]*\$(\((%s)\)|{(%s)})"
3261 + % (
3262 + r"|".join(self.prefixed_dir_functions),
3263 + r"|".join(self.prefixed_getters),
3264 + r"|".join(self.prefixed_rhs_variables),
3265 + )
3266 + )
3267 # do not catch ${foo#${EPREFIX}} and similar
3268 self.double_prefix_func_false_positive_regex = re.compile(
3269 - r'.*?[#]["]?\$(\((%s)\)|{(%s)})' % (
3270 - r'|'.join(self.prefixed_getters),
3271 - r'|'.join(self.prefixed_rhs_variables)))
3272 + r'.*?[#]["]?\$(\((%s)\)|{(%s)})'
3273 + % (r"|".join(self.prefixed_getters), r"|".join(self.prefixed_rhs_variables))
3274 + )
3275
3276 def feed(self, pkg):
3277 missing = defaultdict(list)
3278 @@ -221,7 +246,7 @@ class PathVariablesCheck(Check):
3279 continue
3280
3281 # flag double path prefix usage on uncommented lines only
3282 - if line[0] != '#':
3283 + if line[0] != "#":
3284 if mo := self.double_prefix_regex.search(line):
3285 double_prefix[mo.group(1)].append(lineno)
3286 if mo := self.double_prefix_func_regex.search(line):
3287 @@ -262,22 +287,22 @@ class AbsoluteSymlinkCheck(Check):
3288 _source = sources.EbuildFileRepoSource
3289 known_results = frozenset([AbsoluteSymlink])
3290
3291 - DIRS = ('bin', 'etc', 'lib', 'opt', 'sbin', 'srv', 'usr', 'var')
3292 + DIRS = ("bin", "etc", "lib", "opt", "sbin", "srv", "usr", "var")
3293
3294 def __init__(self, *args):
3295 super().__init__(*args)
3296 - dirs = '|'.join(self.DIRS)
3297 - path_vars = '|'.join(PATH_VARIABLES)
3298 + dirs = "|".join(self.DIRS)
3299 + path_vars = "|".join(PATH_VARIABLES)
3300 prefixed_regex = rf'"\${{({path_vars})(%/)?}}(?P<cp>")?(?(cp)\S*|.*?")'
3301 non_prefixed_regex = rf'(?P<op>["\'])?/({dirs})(?(op).*?(?P=op)|\S*)'
3302 - self.regex = re.compile(rf'^\s*(?P<cmd>dosym\s+({prefixed_regex}|{non_prefixed_regex}))')
3303 + self.regex = re.compile(rf"^\s*(?P<cmd>dosym\s+({prefixed_regex}|{non_prefixed_regex}))")
3304
3305 def feed(self, pkg):
3306 for lineno, line in enumerate(pkg.lines, 1):
3307 if not line.strip():
3308 continue
3309 if mo := self.regex.match(line):
3310 - yield AbsoluteSymlink(mo.group('cmd'), line=line, lineno=lineno, pkg=pkg)
3311 + yield AbsoluteSymlink(mo.group("cmd"), line=line, lineno=lineno, pkg=pkg)
3312
3313
3314 class DeprecatedInsinto(results.LineResult, results.Warning):
3315 @@ -290,8 +315,8 @@ class DeprecatedInsinto(results.LineResult, results.Warning):
3316 @property
3317 def desc(self):
3318 return (
3319 - f'deprecated insinto usage (use {self.cmd} instead), '
3320 - f'line {self.lineno}: {self.line}'
3321 + f"deprecated insinto usage (use {self.cmd} instead), "
3322 + f"line {self.lineno}: {self.line}"
3323 )
3324
3325
3326 @@ -301,21 +326,25 @@ class InsintoCheck(Check):
3327 _source = sources.EbuildFileRepoSource
3328 known_results = frozenset([DeprecatedInsinto])
3329
3330 - path_mapping = ImmutableDict({
3331 - '/etc/conf.d': 'doconfd or newconfd',
3332 - '/etc/env.d': 'doenvd or newenvd',
3333 - '/etc/init.d': 'doinitd or newinitd',
3334 - '/etc/pam.d': 'dopamd or newpamd from pam.eclass',
3335 - '/usr/share/applications': 'domenu or newmenu from desktop.eclass',
3336 - })
3337 + path_mapping = ImmutableDict(
3338 + {
3339 + "/etc/conf.d": "doconfd or newconfd",
3340 + "/etc/env.d": "doenvd or newenvd",
3341 + "/etc/init.d": "doinitd or newinitd",
3342 + "/etc/pam.d": "dopamd or newpamd from pam.eclass",
3343 + "/usr/share/applications": "domenu or newmenu from desktop.eclass",
3344 + }
3345 + )
3346
3347 def __init__(self, *args):
3348 super().__init__(*args)
3349 - paths = '|'.join(s.replace('/', '/+') + '/?' for s in self.path_mapping)
3350 + paths = "|".join(s.replace("/", "/+") + "/?" for s in self.path_mapping)
3351 self._insinto_re = re.compile(
3352 - rf'(?P<insinto>insinto[ \t]+(?P<path>{paths})(?!/\w+))(?:$|[/ \t])')
3353 + rf"(?P<insinto>insinto[ \t]+(?P<path>{paths})(?!/\w+))(?:$|[/ \t])"
3354 + )
3355 self._insinto_doc_re = re.compile(
3356 - r'(?P<insinto>insinto[ \t]+/usr/share/doc/(")?\$\{PF?\}(?(2)\2)(/\w+)*)(?:$|[/ \t])')
3357 + r'(?P<insinto>insinto[ \t]+/usr/share/doc/(")?\$\{PF?\}(?(2)\2)(/\w+)*)(?:$|[/ \t])'
3358 + )
3359
3360 def feed(self, pkg):
3361 for lineno, line in enumerate(pkg.lines, 1):
3362 @@ -323,10 +352,9 @@ class InsintoCheck(Check):
3363 continue
3364 matches = self._insinto_re.search(line)
3365 if matches is not None:
3366 - path = re.sub('//+', '/', matches.group('path'))
3367 - cmd = self.path_mapping[path.rstrip('/')]
3368 - yield DeprecatedInsinto(
3369 - cmd, line=matches.group('insinto'), lineno=lineno, pkg=pkg)
3370 + path = re.sub("//+", "/", matches.group("path"))
3371 + cmd = self.path_mapping[path.rstrip("/")]
3372 + yield DeprecatedInsinto(cmd, line=matches.group("insinto"), lineno=lineno, pkg=pkg)
3373 continue
3374 # Check for insinto usage that should be replaced with
3375 # docinto/dodoc [-r] under supported EAPIs.
3376 @@ -334,8 +362,8 @@ class InsintoCheck(Check):
3377 matches = self._insinto_doc_re.search(line)
3378 if matches is not None:
3379 yield DeprecatedInsinto(
3380 - 'docinto/dodoc', line=matches.group('insinto'),
3381 - lineno=lineno, pkg=pkg)
3382 + "docinto/dodoc", line=matches.group("insinto"), lineno=lineno, pkg=pkg
3383 + )
3384
3385
3386 class ObsoleteUri(results.VersionResult, results.Style):
3387 @@ -356,8 +384,10 @@ class ObsoleteUri(results.VersionResult, results.Style):
3388
3389 @property
3390 def desc(self):
3391 - return (f"obsolete fetch URI: {self.uri} on line "
3392 - f"{self.line}, should be replaced by: {self.replacement}")
3393 + return (
3394 + f"obsolete fetch URI: {self.uri} on line "
3395 + f"{self.line}, should be replaced by: {self.replacement}"
3396 + )
3397
3398
3399 class ObsoleteUriCheck(Check):
3400 @@ -367,13 +397,17 @@ class ObsoleteUriCheck(Check):
3401 known_results = frozenset([ObsoleteUri])
3402
3403 REGEXPS = (
3404 - (r'.*\b(?P<uri>(?P<prefix>https?://github\.com/.*?/.*?/)'
3405 - r'(?:tar|zip)ball(?P<ref>\S*))',
3406 - r'\g<prefix>archive\g<ref>.tar.gz'),
3407 - (r'.*\b(?P<uri>(?P<prefix>https?://gitlab\.com/.*?/(?P<pkg>.*?)/)'
3408 - r'repository/archive\.(?P<format>tar|tar\.gz|tar\.bz2|zip)'
3409 - r'\?ref=(?P<ref>\S*))',
3410 - r'\g<prefix>-/archive/\g<ref>/\g<pkg>-\g<ref>.\g<format>'),
3411 + (
3412 + r".*\b(?P<uri>(?P<prefix>https?://github\.com/.*?/.*?/)"
3413 + r"(?:tar|zip)ball(?P<ref>\S*))",
3414 + r"\g<prefix>archive\g<ref>.tar.gz",
3415 + ),
3416 + (
3417 + r".*\b(?P<uri>(?P<prefix>https?://gitlab\.com/.*?/(?P<pkg>.*?)/)"
3418 + r"repository/archive\.(?P<format>tar|tar\.gz|tar\.bz2|zip)"
3419 + r"\?ref=(?P<ref>\S*))",
3420 + r"\g<prefix>-/archive/\g<ref>/\g<pkg>-\g<ref>.\g<format>",
3421 + ),
3422 )
3423
3424 def __init__(self, *args):
3425 @@ -382,12 +416,12 @@ class ObsoleteUriCheck(Check):
3426
3427 def feed(self, pkg):
3428 for lineno, line in enumerate(pkg.lines, 1):
3429 - if not line.strip() or line.startswith('#'):
3430 + if not line.strip() or line.startswith("#"):
3431 continue
3432 # searching for multiple matches on a single line is too slow
3433 for regexp, repl in self.regexes:
3434 if mo := regexp.match(line):
3435 - uri = mo.group('uri')
3436 + uri = mo.group("uri")
3437 yield ObsoleteUri(lineno, uri, regexp.sub(repl, uri), pkg=pkg)
3438
3439
3440 @@ -405,8 +439,10 @@ class BetterCompressionUri(results.LineResult, results.Style):
3441
3442 @property
3443 def desc(self):
3444 - return (f"line {self.lineno}: better compression URI using extension "
3445 - f"{self.replacement!r} for {self.line!r}")
3446 + return (
3447 + f"line {self.lineno}: better compression URI using extension "
3448 + f"{self.replacement!r} for {self.line!r}"
3449 + )
3450
3451
3452 class BetterCompressionCheck(Check):
3453 @@ -416,8 +452,10 @@ class BetterCompressionCheck(Check):
3454 known_results = frozenset([BetterCompressionUri])
3455
3456 REGEXPS = (
3457 - (r'.*\b(?P<uri>https?://[^/]*?gitlab[^/]*?/.*/-/archive/.*?/\S*\.(?:tar\.gz|tar(?!.bz2)|zip))',
3458 - '.tar.bz2'),
3459 + (
3460 + r".*\b(?P<uri>https?://[^/]*?gitlab[^/]*?/.*/-/archive/.*?/\S*\.(?:tar\.gz|tar(?!.bz2)|zip))",
3461 + ".tar.bz2",
3462 + ),
3463 )
3464
3465 def __init__(self, *args):
3466 @@ -426,12 +464,12 @@ class BetterCompressionCheck(Check):
3467
3468 def feed(self, pkg):
3469 for lineno, line in enumerate(pkg.lines, 1):
3470 - if not line.strip() or line.startswith('#'):
3471 + if not line.strip() or line.startswith("#"):
3472 continue
3473 # searching for multiple matches on a single line is too slow
3474 for regexp, replacement in self.regexes:
3475 if mo := regexp.match(line):
3476 - uri = mo.group('uri')
3477 + uri = mo.group("uri")
3478 yield BetterCompressionUri(replacement, lineno=lineno, line=uri, pkg=pkg)
3479
3480
3481 @@ -445,7 +483,7 @@ class HomepageInSrcUri(results.VersionResult, results.Style):
3482
3483 @property
3484 def desc(self):
3485 - return '${HOMEPAGE} in SRC_URI'
3486 + return "${HOMEPAGE} in SRC_URI"
3487
3488
3489 class StaticSrcUri(results.VersionResult, results.Style):
3490 @@ -462,7 +500,7 @@ class StaticSrcUri(results.VersionResult, results.Style):
3491
3492 @property
3493 def desc(self):
3494 - return f'{self.static_str!r} in SRC_URI, replace with {self.replacement}'
3495 + return f"{self.static_str!r} in SRC_URI, replace with {self.replacement}"
3496
3497
3498 class ReferenceInMetadataVar(results.VersionResult, results.Style):
3499 @@ -491,8 +529,8 @@ class ReferenceInMetadataVar(results.VersionResult, results.Style):
3500 @property
3501 def desc(self):
3502 s = pluralism(self.refs)
3503 - refs = ', '.join(self.refs)
3504 - return f'{self.variable} includes variable{s}: {refs}'
3505 + refs = ", ".join(self.refs)
3506 + return f"{self.variable} includes variable{s}: {refs}"
3507
3508
3509 class MultipleKeywordsLines(results.LinesResult, results.Style):
3510 @@ -530,13 +568,14 @@ class MetadataVarCheck(Check):
3511 """Scan various globally assigned metadata variables for issues."""
3512
3513 _source = sources.EbuildParseRepoSource
3514 - known_results = frozenset([
3515 - HomepageInSrcUri, StaticSrcUri, ReferenceInMetadataVar, MultipleKeywordsLines])
3516 + known_results = frozenset(
3517 + [HomepageInSrcUri, StaticSrcUri, ReferenceInMetadataVar, MultipleKeywordsLines]
3518 + )
3519
3520 # mapping between registered variables and verification methods
3521 known_variables = {}
3522
3523 - @verify_vars('HOMEPAGE', 'KEYWORDS')
3524 + @verify_vars("HOMEPAGE", "KEYWORDS")
3525 def _raw_text(self, var, node, value, pkg):
3526 matches = []
3527 for var_node, _ in bash.var_query.captures(node):
3528 @@ -544,12 +583,12 @@ class MetadataVarCheck(Check):
3529 if matches:
3530 yield ReferenceInMetadataVar(var, stable_unique(matches), pkg=pkg)
3531
3532 - @verify_vars('LICENSE')
3533 + @verify_vars("LICENSE")
3534 def _raw_text_license(self, var, node, value, pkg):
3535 matches = []
3536 for var_node, _ in bash.var_query.captures(node):
3537 var_str = pkg.node_str(var_node.parent).strip()
3538 - if var_str in ['$LICENSE', '${LICENSE}']:
3539 + if var_str in ["$LICENSE", "${LICENSE}"]:
3540 continue # LICENSE in LICENSE is ok
3541 matches.append(var_str)
3542 if matches:
3543 @@ -557,47 +596,43 @@ class MetadataVarCheck(Check):
3544
3545 def build_src_uri_variants_regex(self, pkg):
3546 p, pv = pkg.P, pkg.PV
3547 - replacements = {
3548 - p: '${P}',
3549 - pv: '${PV}'
3550 - }
3551 + replacements = {p: "${P}", pv: "${PV}"}
3552 replacements.setdefault(p.capitalize(), "${P^}")
3553 replacements.setdefault(p.upper(), "${P^^}")
3554
3555 for value, replacement in tuple(replacements.items()):
3556 - replacements.setdefault(value.replace('.', ''), replacement.replace('}', '//.}'))
3557 - replacements.setdefault(value.replace('.', '_'), replacement.replace('}', '//./_}'))
3558 - replacements.setdefault(value.replace('.', '-'), replacement.replace('}', '//./-}'))
3559 + replacements.setdefault(value.replace(".", ""), replacement.replace("}", "//.}"))
3560 + replacements.setdefault(value.replace(".", "_"), replacement.replace("}", "//./_}"))
3561 + replacements.setdefault(value.replace(".", "-"), replacement.replace("}", "//./-}"))
3562
3563 pos = 0
3564 - positions = [pos := pv.find('.', pos+1) for _ in range(pv.count('.'))]
3565 + positions = [pos := pv.find(".", pos + 1) for _ in range(pv.count("."))]
3566
3567 - for sep in ('', '-', '_'):
3568 - replacements.setdefault(pv.replace('.', sep, 1), f"$(ver_rs 1 {sep!r})")
3569 - for count in range(2, pv.count('.')):
3570 - replacements.setdefault(pv.replace('.', sep, count), f"$(ver_rs 1-{count} {sep!r})")
3571 + for sep in ("", "-", "_"):
3572 + replacements.setdefault(pv.replace(".", sep, 1), f"$(ver_rs 1 {sep!r})")
3573 + for count in range(2, pv.count(".")):
3574 + replacements.setdefault(pv.replace(".", sep, count), f"$(ver_rs 1-{count} {sep!r})")
3575
3576 for pos, index in enumerate(positions[1:], start=2):
3577 replacements.setdefault(pv[:index], f"$(ver_cut 1-{pos})")
3578
3579 replacements = sorted(replacements.items(), key=lambda x: -len(x[0]))
3580
3581 - return tuple(zip(*replacements))[1], '|'.join(
3582 - rf'(?P<r{index}>{re.escape(s)})'
3583 - for index, (s, _) in enumerate(replacements)
3584 + return tuple(zip(*replacements))[1], "|".join(
3585 + rf"(?P<r{index}>{re.escape(s)})" for index, (s, _) in enumerate(replacements)
3586 )
3587
3588 - @verify_vars('SRC_URI')
3589 + @verify_vars("SRC_URI")
3590 def _src_uri(self, var, node, value, pkg):
3591 - if '${HOMEPAGE}' in value:
3592 + if "${HOMEPAGE}" in value:
3593 yield HomepageInSrcUri(pkg=pkg)
3594
3595 replacements, regex = self.build_src_uri_variants_regex(pkg)
3596 - static_src_uri_re = rf'(?:/|{re.escape(pkg.PN)}[-._]?|->\s*)[v]?(?P<static_str>({regex}))'
3597 + static_src_uri_re = rf"(?:/|{re.escape(pkg.PN)}[-._]?|->\s*)[v]?(?P<static_str>({regex}))"
3598 static_urls = {}
3599 for match in re.finditer(static_src_uri_re, value):
3600 relevant = {key: value for key, value in match.groupdict().items() if value is not None}
3601 - static_str = relevant.pop('static_str')
3602 + static_str = relevant.pop("static_str")
3603 assert len(relevant) == 1
3604 key = int(tuple(relevant.keys())[0][1:])
3605 static_urls[static_str] = replacements[key]
3606 @@ -608,12 +643,12 @@ class MetadataVarCheck(Check):
3607 def feed(self, pkg):
3608 keywords_lines = set()
3609 for node in pkg.global_query(bash.var_assign_query):
3610 - name = pkg.node_str(node.child_by_field_name('name'))
3611 + name = pkg.node_str(node.child_by_field_name("name"))
3612 if name in self.known_variables:
3613 # RHS value node should be last
3614 val_node = node.children[-1]
3615 val_str = pkg.node_str(val_node)
3616 - if name == 'KEYWORDS':
3617 + if name == "KEYWORDS":
3618 keywords_lines.add(node.start_point[0] + 1)
3619 keywords_lines.add(node.end_point[0] + 1)
3620 yield from self.known_variables[name](self, name, val_node, val_str, pkg)
3621 @@ -633,7 +668,7 @@ class MissingInherits(results.VersionResult, results.Warning):
3622
3623 @property
3624 def desc(self):
3625 - return f'{self.eclass}: missing inherit usage: {repr(self.usage)}, line {self.lineno}'
3626 + return f"{self.eclass}: missing inherit usage: {repr(self.usage)}, line {self.lineno}"
3627
3628
3629 class IndirectInherits(results.VersionResult, results.Warning):
3630 @@ -651,7 +686,7 @@ class IndirectInherits(results.VersionResult, results.Warning):
3631
3632 @property
3633 def desc(self):
3634 - return f'{self.eclass}: indirect inherit usage: {repr(self.usage)}, line {self.lineno}'
3635 + return f"{self.eclass}: indirect inherit usage: {repr(self.usage)}, line {self.lineno}"
3636
3637
3638 class UnusedInherits(results.VersionResult, results.Warning):
3639 @@ -663,9 +698,9 @@ class UnusedInherits(results.VersionResult, results.Warning):
3640
3641 @property
3642 def desc(self):
3643 - es = pluralism(self.eclasses, plural='es')
3644 - eclasses = ', '.join(self.eclasses)
3645 - return f'unused eclass{es}: {eclasses}'
3646 + es = pluralism(self.eclasses, plural="es")
3647 + eclasses = ", ".join(self.eclasses)
3648 + return f"unused eclass{es}: {eclasses}"
3649
3650
3651 class InternalEclassUsage(results.VersionResult, results.Warning):
3652 @@ -679,7 +714,7 @@ class InternalEclassUsage(results.VersionResult, results.Warning):
3653
3654 @property
3655 def desc(self):
3656 - return f'{self.eclass}: internal usage: {repr(self.usage)}, line {self.lineno}'
3657 + return f"{self.eclass}: internal usage: {repr(self.usage)}, line {self.lineno}"
3658
3659
3660 class InheritsCheck(Check):
3661 @@ -690,8 +725,9 @@ class InheritsCheck(Check):
3662 """
3663
3664 _source = sources.EbuildParseRepoSource
3665 - known_results = frozenset([
3666 - MissingInherits, IndirectInherits, UnusedInherits, InternalEclassUsage])
3667 + known_results = frozenset(
3668 + [MissingInherits, IndirectInherits, UnusedInherits, InternalEclassUsage]
3669 + )
3670 required_addons = (addons.eclass.EclassAddon,)
3671
3672 def __init__(self, *args, eclass_addon):
3673 @@ -703,7 +739,8 @@ class InheritsCheck(Check):
3674 # register internal and exported funcs/vars for all eclasses
3675 for eclass, eclass_obj in self.eclass_cache.items():
3676 self.internals[eclass] = (
3677 - eclass_obj.internal_function_names | eclass_obj.internal_variable_names)
3678 + eclass_obj.internal_function_names | eclass_obj.internal_variable_names
3679 + )
3680 for name in eclass_obj.exported_function_names:
3681 self.exported.setdefault(name, set()).add(eclass)
3682 # Don't use all exported vars in order to avoid
3683 @@ -716,9 +753,7 @@ class InheritsCheck(Check):
3684 self.eapi_funcs = {}
3685 for eapi in EAPI.known_eapis.values():
3686 s = set(eapi.bash_cmds_internal | eapi.bash_cmds_deprecated)
3687 - s.update(
3688 - x for x in (eapi.bash_funcs | eapi.bash_funcs_global)
3689 - if not x.startswith('_'))
3690 + s.update(x for x in (eapi.bash_funcs | eapi.bash_funcs_global) if not x.startswith("_"))
3691 self.eapi_funcs[eapi] = frozenset(s)
3692
3693 # register EAPI-related vars to ignore
3694 @@ -751,7 +786,7 @@ class InheritsCheck(Check):
3695 # register variables assigned in ebuilds
3696 assigned_vars = dict()
3697 for node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
3698 - name = pkg.node_str(node.child_by_field_name('name'))
3699 + name = pkg.node_str(node.child_by_field_name("name"))
3700 if eclass := self.get_eclass(name, pkg):
3701 assigned_vars[name] = eclass
3702
3703 @@ -759,8 +794,8 @@ class InheritsCheck(Check):
3704 used = defaultdict(list)
3705 for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
3706 call = pkg.node_str(node)
3707 - name = pkg.node_str(node.child_by_field_name('name'))
3708 - if name == 'inherit':
3709 + name = pkg.node_str(node.child_by_field_name("name"))
3710 + if name == "inherit":
3711 # register conditional eclasses
3712 eclasses = call.split()[1:]
3713 if not pkg.inherited.intersection(eclasses):
3714 @@ -770,12 +805,12 @@ class InheritsCheck(Check):
3715 elif name not in self.eapi_funcs[pkg.eapi] | assigned_vars.keys():
3716 lineno, colno = node.start_point
3717 if eclass := self.get_eclass(name, pkg):
3718 - used[eclass].append((lineno + 1, name, call.split('\n', 1)[0]))
3719 + used[eclass].append((lineno + 1, name, call.split("\n", 1)[0]))
3720
3721 # match captured variables with eclasses
3722 for node, _ in bash.var_query.captures(pkg.tree.root_node):
3723 name = pkg.node_str(node)
3724 - if node.parent.type == 'unset_command':
3725 + if node.parent.type == "unset_command":
3726 continue
3727 if name not in self.eapi_vars[pkg.eapi] | assigned_vars.keys():
3728 lineno, colno = node.start_point
3729 @@ -793,7 +828,8 @@ class InheritsCheck(Check):
3730 phases = [pkg.eapi.phases[x] for x in pkg.defined_phases]
3731 for eclass in list(unused):
3732 if self.eclass_cache[eclass].exported_function_names.intersection(
3733 - f'{eclass}_{phase}' for phase in phases):
3734 + f"{eclass}_{phase}" for phase in phases
3735 + ):
3736 unused.discard(eclass)
3737
3738 for eclass in list(unused):
3739 @@ -802,7 +838,8 @@ class InheritsCheck(Check):
3740 unused.discard(eclass)
3741 else:
3742 exported_eclass_keys = pkg.eapi.eclass_keys.intersection(
3743 - self.eclass_cache[eclass].exported_variable_names)
3744 + self.eclass_cache[eclass].exported_variable_names
3745 + )
3746 if not self.eclass_cache[eclass].exported_function_names and exported_eclass_keys:
3747 # ignore eclasses that export ebuild metadata (e.g.
3748 # SRC_URI, S, ...) and no functions
3749 @@ -844,15 +881,38 @@ class ReadonlyVariableCheck(Check):
3750 known_results = frozenset([ReadonlyVariable])
3751
3752 # https://devmanual.gentoo.org/ebuild-writing/variables/#predefined-read-only-variables
3753 - readonly_vars = frozenset([
3754 - 'P', 'PN', 'PV', 'PR', 'PVR', 'PF', 'A', 'CATEGORY', 'FILESDIR', 'WORKDIR',
3755 - 'T', 'D', 'HOME', 'ROOT', 'DISTDIR', 'EPREFIX', 'ED', 'EROOT', 'SYSROOT',
3756 - 'ESYSROOT', 'BROOT', 'MERGE_TYPE', 'REPLACING_VERSIONS', 'REPLACED_BY_VERSION',
3757 - ])
3758 + readonly_vars = frozenset(
3759 + [
3760 + "P",
3761 + "PN",
3762 + "PV",
3763 + "PR",
3764 + "PVR",
3765 + "PF",
3766 + "A",
3767 + "CATEGORY",
3768 + "FILESDIR",
3769 + "WORKDIR",
3770 + "T",
3771 + "D",
3772 + "HOME",
3773 + "ROOT",
3774 + "DISTDIR",
3775 + "EPREFIX",
3776 + "ED",
3777 + "EROOT",
3778 + "SYSROOT",
3779 + "ESYSROOT",
3780 + "BROOT",
3781 + "MERGE_TYPE",
3782 + "REPLACING_VERSIONS",
3783 + "REPLACED_BY_VERSION",
3784 + ]
3785 + )
3786
3787 def feed(self, pkg):
3788 for node in pkg.global_query(bash.var_assign_query):
3789 - name = pkg.node_str(node.child_by_field_name('name'))
3790 + name = pkg.node_str(node.child_by_field_name("name"))
3791 if name in self.readonly_vars:
3792 call = pkg.node_str(node)
3793 lineno, colno = node.start_point
3794 @@ -862,7 +922,7 @@ class ReadonlyVariableCheck(Check):
3795 class VariableScope(results.BaseLinesResult, results.AliasResult, results.Warning):
3796 """Variable used outside its defined scope."""
3797
3798 - _name = 'VariableScope'
3799 + _name = "VariableScope"
3800
3801 def __init__(self, variable, func, **kwargs):
3802 super().__init__(**kwargs)
3803 @@ -871,7 +931,7 @@ class VariableScope(results.BaseLinesResult, results.AliasResult, results.Warnin
3804
3805 @property
3806 def desc(self):
3807 - return f'variable {self.variable!r} used in {self.func!r} {self.lines_str}'
3808 + return f"variable {self.variable!r} used in {self.func!r} {self.lines_str}"
3809
3810
3811 class EbuildVariableScope(VariableScope, results.VersionResult):
3812 @@ -885,28 +945,30 @@ class VariableScopeCheck(Check):
3813 known_results = frozenset([EbuildVariableScope])
3814
3815 # see https://projects.gentoo.org/pms/7/pms.html#x1-10900011.1
3816 - variable_map = ImmutableDict({
3817 - 'A': ('src_', 'pkg_nofetch'),
3818 - 'AA': ('src_', 'pkg_nofetch'),
3819 - 'FILESDIR': 'src_',
3820 - 'DISTDIR': 'src_',
3821 - 'WORKDIR': 'src_',
3822 - 'S': 'src_',
3823 - 'PORTDIR': 'src_',
3824 - 'ECLASSDIR': 'src_',
3825 - 'ROOT': 'pkg_',
3826 - 'EROOT': 'pkg_',
3827 - 'SYSROOT': ('src_', 'pkg_setup'),
3828 - 'ESYSROOT': ('src_', 'pkg_setup'),
3829 - 'BROOT': ('src_', 'pkg_setup'),
3830 - 'D': ('src_install', 'pkg_preinst', 'pkg_postint'),
3831 - 'ED': ('src_install', 'pkg_preinst', 'pkg_postint'),
3832 - 'DESTTREE': 'src_install',
3833 - 'INSDESTTREE': 'src_install',
3834 - 'MERGE_TYPE': 'pkg_',
3835 - 'REPLACING_VERSIONS': 'pkg_',
3836 - 'REPLACED_BY_VERSION': ('pkg_prerm', 'pkg_postrm'),
3837 - })
3838 + variable_map = ImmutableDict(
3839 + {
3840 + "A": ("src_", "pkg_nofetch"),
3841 + "AA": ("src_", "pkg_nofetch"),
3842 + "FILESDIR": "src_",
3843 + "DISTDIR": "src_",
3844 + "WORKDIR": "src_",
3845 + "S": "src_",
3846 + "PORTDIR": "src_",
3847 + "ECLASSDIR": "src_",
3848 + "ROOT": "pkg_",
3849 + "EROOT": "pkg_",
3850 + "SYSROOT": ("src_", "pkg_setup"),
3851 + "ESYSROOT": ("src_", "pkg_setup"),
3852 + "BROOT": ("src_", "pkg_setup"),
3853 + "D": ("src_install", "pkg_preinst", "pkg_postint"),
3854 + "ED": ("src_install", "pkg_preinst", "pkg_postint"),
3855 + "DESTTREE": "src_install",
3856 + "INSDESTTREE": "src_install",
3857 + "MERGE_TYPE": "pkg_",
3858 + "REPLACING_VERSIONS": "pkg_",
3859 + "REPLACED_BY_VERSION": ("pkg_prerm", "pkg_postrm"),
3860 + }
3861 + )
3862
3863 # mapping of bad variables for each EAPI phase function
3864 scoped_vars = {}
3865 @@ -919,7 +981,7 @@ class VariableScopeCheck(Check):
3866
3867 def feed(self, pkg):
3868 for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
3869 - func_name = pkg.node_str(func_node.child_by_field_name('name'))
3870 + func_name = pkg.node_str(func_node.child_by_field_name("name"))
3871 if variables := self.scoped_vars[pkg.eapi].get(func_name):
3872 usage = defaultdict(set)
3873 for var_node, _ in bash.var_query.captures(func_node):
3874 @@ -951,23 +1013,23 @@ class RedundantDodirCheck(Check):
3875
3876 def __init__(self, *args):
3877 super().__init__(*args)
3878 - cmds = r'|'.join(('insinto', 'exeinto', 'docinto'))
3879 - self.cmds_regex = re.compile(rf'^\s*(?P<cmd>({cmds}))\s+(?P<path>\S+)')
3880 - self.dodir_regex = re.compile(r'^\s*(?P<call>dodir\s+(?P<path>\S+))')
3881 + cmds = r"|".join(("insinto", "exeinto", "docinto"))
3882 + self.cmds_regex = re.compile(rf"^\s*(?P<cmd>({cmds}))\s+(?P<path>\S+)")
3883 + self.dodir_regex = re.compile(r"^\s*(?P<call>dodir\s+(?P<path>\S+))")
3884
3885 def feed(self, pkg):
3886 lines = enumerate(pkg.lines, 1)
3887 for lineno, line in lines:
3888 line = line.strip()
3889 - if not line or line[0] == '#':
3890 + if not line or line[0] == "#":
3891 continue
3892 if dodir := self.dodir_regex.match(line):
3893 lineno, line = next(lines)
3894 if cmd := self.cmds_regex.match(line):
3895 - if dodir.group('path') == cmd.group('path'):
3896 + if dodir.group("path") == cmd.group("path"):
3897 yield RedundantDodir(
3898 - cmd.group('cmd'), line=dodir.group('call'),
3899 - lineno=lineno - 1, pkg=pkg)
3900 + cmd.group("cmd"), line=dodir.group("call"), lineno=lineno - 1, pkg=pkg
3901 + )
3902
3903
3904 class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.Warning):
3905 @@ -977,7 +1039,7 @@ class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.War
3906 contexts.
3907 """
3908
3909 - _name = 'UnquotedVariable'
3910 + _name = "UnquotedVariable"
3911
3912 def __init__(self, variable, **kwargs):
3913 super().__init__(**kwargs)
3914 @@ -985,7 +1047,7 @@ class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.War
3915
3916 @property
3917 def desc(self):
3918 - return f'unquoted variable {self.variable} {self.lines_str}'
3919 + return f"unquoted variable {self.variable} {self.lines_str}"
3920
3921
3922 class EbuildUnquotedVariable(UnquotedVariable, results.VersionResult):
3923 @@ -997,48 +1059,65 @@ class EclassUnquotedVariable(UnquotedVariable, results.EclassResult):
3924
3925 @property
3926 def desc(self):
3927 - return f'{self.eclass}: {super().desc}'
3928 + return f"{self.eclass}: {super().desc}"
3929
3930
3931 class _UnquotedVariablesCheck(Check):
3932 """Scan files for variables that should be quoted like D, FILESDIR, etc."""
3933
3934 - message_commands = frozenset({
3935 - "die", "echo", "eerror", "einfo", "elog", "eqawarn", "ewarn", ":"
3936 - })
3937 - var_names = frozenset({
3938 - "D", "DISTDIR", "FILESDIR", "S", "T", "ROOT", "BROOT", "WORKDIR", "ED",
3939 - "EPREFIX", "EROOT", "SYSROOT", "ESYSROOT", "TMPDIR", "HOME",
3940 - # variables for multibuild.eclass
3941 - "BUILD_DIR",
3942 - })
3943 -
3944 - node_types_ok = frozenset({
3945 - # Variable is sitting in a string, all good
3946 - 'string',
3947 - # Variable is part of a shell assignment, and does not need to be
3948 - # quoted. for example S=${WORKDIR}/${PN} is ok.
3949 - 'variable_assignment',
3950 - # Variable is being used in a unset command.
3951 - 'unset_command',
3952 - # Variable is part of declaring variables, and does not need to be
3953 - # quoted. for example local TMPDIR is ok.
3954 - 'declaration_command',
3955 - # Variable sits inside a [[ ]] test command and it's OK not to be quoted
3956 - 'test_command',
3957 - # Variable is being used in a heredoc body, no need to specify quotes.
3958 - 'heredoc_body',
3959 - })
3960 + message_commands = frozenset(
3961 + {"die", "echo", "eerror", "einfo", "elog", "eqawarn", "ewarn", ":"}
3962 + )
3963 + var_names = frozenset(
3964 + {
3965 + "D",
3966 + "DISTDIR",
3967 + "FILESDIR",
3968 + "S",
3969 + "T",
3970 + "ROOT",
3971 + "BROOT",
3972 + "WORKDIR",
3973 + "ED",
3974 + "EPREFIX",
3975 + "EROOT",
3976 + "SYSROOT",
3977 + "ESYSROOT",
3978 + "TMPDIR",
3979 + "HOME",
3980 + # variables for multibuild.eclass
3981 + "BUILD_DIR",
3982 + }
3983 + )
3984 +
3985 + node_types_ok = frozenset(
3986 + {
3987 + # Variable is sitting in a string, all good
3988 + "string",
3989 + # Variable is part of a shell assignment, and does not need to be
3990 + # quoted. for example S=${WORKDIR}/${PN} is ok.
3991 + "variable_assignment",
3992 + # Variable is being used in a unset command.
3993 + "unset_command",
3994 + # Variable is part of declaring variables, and does not need to be
3995 + # quoted. for example local TMPDIR is ok.
3996 + "declaration_command",
3997 + # Variable sits inside a [[ ]] test command and it's OK not to be quoted
3998 + "test_command",
3999 + # Variable is being used in a heredoc body, no need to specify quotes.
4000 + "heredoc_body",
4001 + }
4002 + )
4003
4004 def _var_needs_quotes(self, pkg, node):
4005 pnode = node.parent
4006 while pnode != node:
4007 if pnode.type in self.node_types_ok:
4008 return False
4009 - elif pnode.type == 'command':
4010 - cmd = pkg.node_str(pnode.child_by_field_name('name'))
4011 + elif pnode.type == "command":
4012 + cmd = pkg.node_str(pnode.child_by_field_name("name"))
4013 return cmd not in self.message_commands
4014 - elif pnode.type in 'array':
4015 + elif pnode.type in "array":
4016 # Variable is sitting unquoted in an array
4017 return True
4018 pnode = pnode.parent
4019 @@ -1058,7 +1137,7 @@ class _UnquotedVariablesCheck(Check):
4020 if var_name in self.var_names:
4021 if self._var_needs_quotes(item, var_node):
4022 lineno, _ = var_node.start_point
4023 - hits[var_name].add(lineno+1)
4024 + hits[var_name].add(lineno + 1)
4025 for var_name, lines in hits.items():
4026 yield var_name, sorted(lines)
4027
4028 @@ -1094,7 +1173,7 @@ class ExcessiveLineLength(results.LinesResult, results.Style):
4029
4030 @property
4031 def desc(self):
4032 - return f'excessive line length (over {self.line_length} characters) {self.lines_str}'
4033 + return f"excessive line length (over {self.line_length} characters) {self.lines_str}"
4034
4035
4036 class LineLengthCheck(Check):
4037 @@ -1105,8 +1184,8 @@ class LineLengthCheck(Check):
4038
4039 def __init__(self, options, **kwargs):
4040 super().__init__(options, **kwargs)
4041 - self.exception = re.compile(r'\s*(?:DESCRIPTION|KEYWORDS|IUSE)=')
4042 - str_length = f'[^\'\"]{{{ExcessiveLineLength.word_length},}}'
4043 + self.exception = re.compile(r"\s*(?:DESCRIPTION|KEYWORDS|IUSE)=")
4044 + str_length = f"[^'\"]{{{ExcessiveLineLength.word_length},}}"
4045 self.long_string = re.compile(rf'"{str_length}"|\'{str_length}\'')
4046
4047 def feed(self, pkg):
4048 @@ -1115,11 +1194,11 @@ class LineLengthCheck(Check):
4049 if len(line) <= ExcessiveLineLength.line_length:
4050 continue
4051 if self.exception.match(line):
4052 - continue # exception variables which are fine to be long
4053 + continue # exception variables which are fine to be long
4054 if max(map(len, line.split())) > ExcessiveLineLength.word_length:
4055 - continue # if one part of the line is very long word
4056 + continue # if one part of the line is very long word
4057 if self.long_string.search(line):
4058 - continue # skip lines with long quoted string
4059 + continue # skip lines with long quoted string
4060 lines.append(lineno)
4061 if lines:
4062 yield ExcessiveLineLength(lines=lines, pkg=pkg)
4063 @@ -1134,7 +1213,7 @@ class InstallCompressedManpage(results.LineResult, results.Warning):
4064
4065 @property
4066 def desc(self):
4067 - return f'line {self.lineno}: compressed manpage {self.line!r} passed to {self.func}'
4068 + return f"line {self.lineno}: compressed manpage {self.line!r} passed to {self.func}"
4069
4070
4071 class InstallCompressedInfo(results.LineResult, results.Warning):
4072 @@ -1146,7 +1225,7 @@ class InstallCompressedInfo(results.LineResult, results.Warning):
4073
4074 @property
4075 def desc(self):
4076 - return f'line {self.lineno}: compressed info {self.line!r} passed to {self.func}'
4077 + return f"line {self.lineno}: compressed info {self.line!r} passed to {self.func}"
4078
4079
4080 class DoCompressedFilesCheck(Check):
4081 @@ -1155,23 +1234,27 @@ class DoCompressedFilesCheck(Check):
4082 _source = sources.EbuildParseRepoSource
4083 known_results = frozenset([InstallCompressedManpage, InstallCompressedInfo])
4084
4085 - compresion_extentions = ('.Z', '.gz', '.bz2', '.lzma', '.lz', '.lzo', '.lz4', '.xz', '.zst')
4086 - functions = ImmutableDict({
4087 - 'doman': InstallCompressedManpage,
4088 - 'newman': InstallCompressedManpage,
4089 - 'doinfo': InstallCompressedInfo,
4090 - })
4091 + compresion_extentions = (".Z", ".gz", ".bz2", ".lzma", ".lz", ".lzo", ".lz4", ".xz", ".zst")
4092 + functions = ImmutableDict(
4093 + {
4094 + "doman": InstallCompressedManpage,
4095 + "newman": InstallCompressedManpage,
4096 + "doinfo": InstallCompressedInfo,
4097 + }
4098 + )
4099
4100 def feed(self, pkg):
4101 for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
4102 - call_name = pkg.node_str(node.child_by_field_name('name'))
4103 + call_name = pkg.node_str(node.child_by_field_name("name"))
4104 if call_name not in self.functions:
4105 continue
4106 for arg in node.children[1:]:
4107 - arg_name = pkg.node_str(arg).strip('\'\"')
4108 + arg_name = pkg.node_str(arg).strip("'\"")
4109 lineno, _ = arg.start_point
4110 if arg_name.endswith(self.compresion_extentions):
4111 - yield self.functions[call_name](call_name, lineno=lineno+1, line=arg_name, pkg=pkg)
4112 + yield self.functions[call_name](
4113 + call_name, lineno=lineno + 1, line=arg_name, pkg=pkg
4114 + )
4115
4116
4117 class NonPosixHeadTailUsage(results.LineResult, results.Warning):
4118 @@ -1183,13 +1266,14 @@ class NonPosixHeadTailUsage(results.LineResult, results.Warning):
4119
4120 .. [#] https://devmanual.gentoo.org/tools-reference/head-and-tail/index.html
4121 """
4122 +
4123 def __init__(self, command, **kwargs):
4124 super().__init__(**kwargs)
4125 self.command = command
4126
4127 @property
4128 def desc(self):
4129 - return f'line {self.lineno}: non-posix usage of {self.command!r}: {self.line!r}'
4130 + return f"line {self.lineno}: non-posix usage of {self.command!r}: {self.line!r}"
4131
4132
4133 class NonPosixCheck(Check):
4134 @@ -1200,21 +1284,23 @@ class NonPosixCheck(Check):
4135
4136 def __init__(self, options, **kwargs):
4137 super().__init__(options, **kwargs)
4138 - self.re_head_tail = re.compile(r'[+-]\d+')
4139 + self.re_head_tail = re.compile(r"[+-]\d+")
4140
4141 def check_head_tail(self, pkg, call_node, call_name):
4142 - prev_arg = ''
4143 + prev_arg = ""
4144 for arg in map(pkg.node_str, call_node.children[1:]):
4145 - if (self.re_head_tail.match(arg) and
4146 - not (prev_arg.startswith('-') and prev_arg.endswith(('n', 'c')))):
4147 + if self.re_head_tail.match(arg) and not (
4148 + prev_arg.startswith("-") and prev_arg.endswith(("n", "c"))
4149 + ):
4150 lineno, _ = call_node.start_point
4151 - yield NonPosixHeadTailUsage(f'{call_name} {arg}',
4152 - lineno=lineno+1, line=pkg.node_str(call_node), pkg=pkg)
4153 + yield NonPosixHeadTailUsage(
4154 + f"{call_name} {arg}", lineno=lineno + 1, line=pkg.node_str(call_node), pkg=pkg
4155 + )
4156 break
4157 prev_arg = arg
4158
4159 def feed(self, pkg):
4160 for call_node, _ in bash.cmd_query.captures(pkg.tree.root_node):
4161 - call_name = pkg.node_str(call_node.child_by_field_name('name'))
4162 - if call_name in ('head', 'tail'):
4163 + call_name = pkg.node_str(call_node.child_by_field_name("name"))
4164 + if call_name in ("head", "tail"):
4165 yield from self.check_head_tail(pkg, call_node, call_name)
4166
4167 diff --git a/src/pkgcheck/checks/dropped_keywords.py b/src/pkgcheck/checks/dropped_keywords.py
4168 index 87ec8cf1..613076ef 100644
4169 --- a/src/pkgcheck/checks/dropped_keywords.py
4170 +++ b/src/pkgcheck/checks/dropped_keywords.py
4171 @@ -13,7 +13,7 @@ class DroppedKeywords(results.VersionResult, results.Warning):
4172
4173 @property
4174 def desc(self):
4175 - return ', '.join(self.arches)
4176 + return ", ".join(self.arches)
4177
4178
4179 class DroppedKeywordsCheck(Check):
4180 @@ -36,7 +36,7 @@ class DroppedKeywordsCheck(Check):
4181 for pkg in pkgset:
4182 pkg_arches = {x.lstrip("~-") for x in pkg.keywords}
4183 # special keywords -*, *, and ~* override all dropped keywords
4184 - if '*' in pkg_arches:
4185 + if "*" in pkg_arches:
4186 drops = set()
4187 else:
4188 drops = previous_arches.difference(pkg_arches) | seen_arches.difference(pkg_arches)
4189 @@ -45,7 +45,7 @@ class DroppedKeywordsCheck(Check):
4190 changes[key].append(pkg)
4191 if changes:
4192 # ignore missing arches on previous versions that were re-enabled
4193 - disabled_arches = {x.lstrip("-") for x in pkg.keywords if x.startswith('-')}
4194 + disabled_arches = {x.lstrip("-") for x in pkg.keywords if x.startswith("-")}
4195 adds = pkg_arches.difference(previous_arches) - disabled_arches
4196 for key in adds:
4197 if key in changes:
4198
4199 diff --git a/src/pkgcheck/checks/eclass.py b/src/pkgcheck/checks/eclass.py
4200 index d48df115..5c4f205f 100644
4201 --- a/src/pkgcheck/checks/eclass.py
4202 +++ b/src/pkgcheck/checks/eclass.py
4203 @@ -24,10 +24,10 @@ class DeprecatedEclass(results.VersionResult, results.Warning):
4204 @property
4205 def desc(self):
4206 if self.replacement is not None:
4207 - replacement = f'migrate to {self.replacement}'
4208 + replacement = f"migrate to {self.replacement}"
4209 else:
4210 - replacement = 'no replacement'
4211 - return f'uses deprecated eclass: {self.eclass} ({replacement})'
4212 + replacement = "no replacement"
4213 + return f"uses deprecated eclass: {self.eclass} ({replacement})"
4214
4215
4216 class DeprecatedEclassVariable(results.LineResult, results.Warning):
4217 @@ -41,10 +41,10 @@ class DeprecatedEclassVariable(results.LineResult, results.Warning):
4218 @property
4219 def desc(self):
4220 if self.replacement is not None:
4221 - replacement = f'migrate to {self.replacement}'
4222 + replacement = f"migrate to {self.replacement}"
4223 else:
4224 - replacement = 'no replacement'
4225 - return f'uses deprecated variable on line {self.lineno}: {self.variable} ({replacement})'
4226 + replacement = "no replacement"
4227 + return f"uses deprecated variable on line {self.lineno}: {self.variable} ({replacement})"
4228
4229
4230 class DeprecatedEclassFunction(results.LineResult, results.Warning):
4231 @@ -58,10 +58,10 @@ class DeprecatedEclassFunction(results.LineResult, results.Warning):
4232 @property
4233 def desc(self):
4234 if self.replacement is not None:
4235 - replacement = f'migrate to {self.replacement}'
4236 + replacement = f"migrate to {self.replacement}"
4237 else:
4238 - replacement = 'no replacement'
4239 - return f'uses deprecated function on line {self.lineno}: {self.function} ({replacement})'
4240 + replacement = "no replacement"
4241 + return f"uses deprecated function on line {self.lineno}: {self.function} ({replacement})"
4242
4243
4244 class DuplicateEclassInherit(results.LineResult, results.Style):
4245 @@ -79,7 +79,7 @@ class DuplicateEclassInherit(results.LineResult, results.Style):
4246
4247 @property
4248 def desc(self):
4249 - return f'duplicate eclass inherit {self.eclass!r}, line {self.lineno}'
4250 + return f"duplicate eclass inherit {self.eclass!r}, line {self.lineno}"
4251
4252
4253 class MisplacedEclassVar(results.LineResult, results.Error):
4254 @@ -95,17 +95,22 @@ class MisplacedEclassVar(results.LineResult, results.Error):
4255
4256 @property
4257 def desc(self):
4258 - return f'invalid pre-inherit placement, line {self.lineno}: {self.line!r}'
4259 + return f"invalid pre-inherit placement, line {self.lineno}: {self.line!r}"
4260
4261
4262 class EclassUsageCheck(Check):
4263 """Scan packages for various eclass-related issues."""
4264
4265 _source = sources.EbuildParseRepoSource
4266 - known_results = frozenset([
4267 - DeprecatedEclass, DeprecatedEclassVariable, DeprecatedEclassFunction,
4268 - DuplicateEclassInherit, MisplacedEclassVar,
4269 - ])
4270 + known_results = frozenset(
4271 + [
4272 + DeprecatedEclass,
4273 + DeprecatedEclassVariable,
4274 + DeprecatedEclassFunction,
4275 + DuplicateEclassInherit,
4276 + MisplacedEclassVar,
4277 + ]
4278 + )
4279 required_addons = (addons.eclass.EclassAddon,)
4280
4281 def __init__(self, *args, eclass_addon):
4282 @@ -126,12 +131,11 @@ class EclassUsageCheck(Check):
4283 # scan for any misplaced @PRE_INHERIT variables
4284 if pre_inherits:
4285 for node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
4286 - var_name = pkg.node_str(node.child_by_field_name('name'))
4287 + var_name = pkg.node_str(node.child_by_field_name("name"))
4288 lineno, _colno = node.start_point
4289 if var_name in pre_inherits and lineno > pre_inherits[var_name]:
4290 line = pkg.node_str(node)
4291 - yield MisplacedEclassVar(
4292 - var_name, line=line, lineno=lineno+1, pkg=pkg)
4293 + yield MisplacedEclassVar(var_name, line=line, lineno=lineno + 1, pkg=pkg)
4294
4295 def check_deprecated_variables(self, pkg, inherits):
4296 """Check for usage of @DEPRECATED variables or functions."""
4297 @@ -154,7 +158,8 @@ class EclassUsageCheck(Check):
4298 if not isinstance(replacement, str):
4299 replacement = None
4300 yield DeprecatedEclassVariable(
4301 - var_name, replacement, line=line, lineno=lineno+1, pkg=pkg)
4302 + var_name, replacement, line=line, lineno=lineno + 1, pkg=pkg
4303 + )
4304
4305 def check_deprecated_functions(self, pkg, inherits):
4306 """Check for usage of @DEPRECATED variables or functions."""
4307 @@ -169,7 +174,7 @@ class EclassUsageCheck(Check):
4308 # scan for usage of @DEPRECATED functions
4309 if deprecated:
4310 for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
4311 - func_name = pkg.node_str(node.child_by_field_name('name'))
4312 + func_name = pkg.node_str(node.child_by_field_name("name"))
4313 lineno, _colno = node.start_point
4314 if func_name in deprecated:
4315 line = pkg.node_str(node)
4316 @@ -177,15 +182,16 @@ class EclassUsageCheck(Check):
4317 if not isinstance(replacement, str):
4318 replacement = None
4319 yield DeprecatedEclassFunction(
4320 - func_name, replacement, line=line, lineno=lineno+1, pkg=pkg)
4321 + func_name, replacement, line=line, lineno=lineno + 1, pkg=pkg
4322 + )
4323
4324 def feed(self, pkg):
4325 if pkg.inherit:
4326 inherited = set()
4327 inherits = []
4328 for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
4329 - name = pkg.node_str(node.child_by_field_name('name'))
4330 - if name == 'inherit':
4331 + name = pkg.node_str(node.child_by_field_name("name"))
4332 + if name == "inherit":
4333 call = pkg.node_str(node)
4334 # filter out line continuations and conditional inherits
4335 if eclasses := [x for x in call.split()[1:] if x in pkg.inherit]:
4336 @@ -198,7 +204,8 @@ class EclassUsageCheck(Check):
4337 inherited.add(eclass)
4338 else:
4339 yield DuplicateEclassInherit(
4340 - eclass, line=call, lineno=lineno+1, pkg=pkg)
4341 + eclass, line=call, lineno=lineno + 1, pkg=pkg
4342 + )
4343
4344 # verify @PRE_INHERIT variable placement
4345 yield from self.check_pre_inherits(pkg, inherits)
4346 @@ -218,7 +225,7 @@ class EclassVariableScope(VariableScope, results.EclassResult):
4347
4348 @property
4349 def desc(self):
4350 - return f'{self.eclass}: {super().desc}'
4351 + return f"{self.eclass}: {super().desc}"
4352
4353
4354 class EclassExportFuncsBeforeInherit(results.EclassResult, results.Error):
4355 @@ -235,8 +242,10 @@ class EclassExportFuncsBeforeInherit(results.EclassResult, results.Error):
4356
4357 @property
4358 def desc(self):
4359 - return (f'{self.eclass}: EXPORT_FUNCTIONS (line {self.export_line}) called before inherit (line '
4360 - f'{self.inherit_line})')
4361 + return (
4362 + f"{self.eclass}: EXPORT_FUNCTIONS (line {self.export_line}) called before inherit (line "
4363 + f"{self.inherit_line})"
4364 + )
4365
4366
4367 class EclassParseCheck(Check):
4368 @@ -261,12 +270,12 @@ class EclassParseCheck(Check):
4369 return variables
4370
4371 def feed(self, eclass):
4372 - func_prefix = f'{eclass.name}_'
4373 + func_prefix = f"{eclass.name}_"
4374 for func_node, _ in bash.func_query.captures(eclass.tree.root_node):
4375 - func_name = eclass.node_str(func_node.child_by_field_name('name'))
4376 + func_name = eclass.node_str(func_node.child_by_field_name("name"))
4377 if not func_name.startswith(func_prefix):
4378 continue
4379 - phase = func_name[len(func_prefix):]
4380 + phase = func_name[len(func_prefix) :]
4381 if variables := self.eclass_phase_vars(eclass, phase):
4382 usage = defaultdict(set)
4383 for var_node, _ in bash.var_query.captures(func_node):
4384 @@ -275,17 +284,20 @@ class EclassParseCheck(Check):
4385 lineno, colno = var_node.start_point
4386 usage[var_name].add(lineno + 1)
4387 for var, lines in sorted(usage.items()):
4388 - yield EclassVariableScope(var, func_name, lines=sorted(lines), eclass=eclass.name)
4389 + yield EclassVariableScope(
4390 + var, func_name, lines=sorted(lines), eclass=eclass.name
4391 + )
4392
4393 export_funcs_called = None
4394 for node in eclass.global_query(bash.cmd_query):
4395 call = eclass.node_str(node)
4396 - if call.startswith('EXPORT_FUNCTIONS'):
4397 + if call.startswith("EXPORT_FUNCTIONS"):
4398 export_funcs_called = node.start_point[0] + 1
4399 - elif call.startswith('inherit'):
4400 + elif call.startswith("inherit"):
4401 if export_funcs_called is not None:
4402 - yield EclassExportFuncsBeforeInherit(export_funcs_called, node.start_point[0] + 1,
4403 - eclass=eclass.name)
4404 + yield EclassExportFuncsBeforeInherit(
4405 + export_funcs_called, node.start_point[0] + 1, eclass=eclass.name
4406 + )
4407 break
4408
4409
4410 @@ -299,7 +311,7 @@ class EclassBashSyntaxError(results.EclassResult, results.Error):
4411
4412 @property
4413 def desc(self):
4414 - return f'{self.eclass}: bash syntax error, line {self.lineno}: {self.error}'
4415 + return f"{self.eclass}: bash syntax error, line {self.lineno}: {self.error}"
4416
4417
4418 class EclassDocError(results.EclassResult, results.Warning):
4419 @@ -316,7 +328,7 @@ class EclassDocError(results.EclassResult, results.Warning):
4420
4421 @property
4422 def desc(self):
4423 - return f'{self.eclass}: failed parsing eclass docs: {self.error}'
4424 + return f"{self.eclass}: failed parsing eclass docs: {self.error}"
4425
4426
4427 class EclassDocMissingFunc(results.EclassResult, results.Warning):
4428 @@ -329,8 +341,8 @@ class EclassDocMissingFunc(results.EclassResult, results.Warning):
4429 @property
4430 def desc(self):
4431 s = pluralism(self.functions)
4432 - funcs = ', '.join(self.functions)
4433 - return f'{self.eclass}: undocumented function{s}: {funcs}'
4434 + funcs = ", ".join(self.functions)
4435 + return f"{self.eclass}: undocumented function{s}: {funcs}"
4436
4437
4438 class EclassDocMissingVar(results.EclassResult, results.Warning):
4439 @@ -348,16 +360,17 @@ class EclassDocMissingVar(results.EclassResult, results.Warning):
4440 @property
4441 def desc(self):
4442 s = pluralism(self.variables)
4443 - variables = ', '.join(self.variables)
4444 - return f'{self.eclass}: undocumented variable{s}: {variables}'
4445 + variables = ", ".join(self.variables)
4446 + return f"{self.eclass}: undocumented variable{s}: {variables}"
4447
4448
4449 class EclassCheck(Check):
4450 """Scan eclasses for various issues."""
4451
4452 _source = sources.EclassRepoSource
4453 - known_results = frozenset([
4454 - EclassBashSyntaxError, EclassDocError, EclassDocMissingFunc, EclassDocMissingVar])
4455 + known_results = frozenset(
4456 + [EclassBashSyntaxError, EclassDocError, EclassDocMissingFunc, EclassDocMissingVar]
4457 + )
4458
4459 def __init__(self, *args):
4460 super().__init__(*args)
4461 @@ -370,37 +383,44 @@ class EclassCheck(Check):
4462 def feed(self, eclass):
4463 # check for eclass bash syntax errors
4464 p = subprocess.run(
4465 - ['bash', '-n', shlex.quote(eclass.path)],
4466 - stderr=subprocess.PIPE, stdout=subprocess.DEVNULL,
4467 - env={'LC_ALL': 'C'}, encoding='utf8')
4468 + ["bash", "-n", shlex.quote(eclass.path)],
4469 + stderr=subprocess.PIPE,
4470 + stdout=subprocess.DEVNULL,
4471 + env={"LC_ALL": "C"},
4472 + encoding="utf8",
4473 + )
4474 if p.returncode != 0 and p.stderr:
4475 lineno = 0
4476 error = []
4477 for line in p.stderr.splitlines():
4478 - path, line, msg = line.split(': ', 2)
4479 + path, line, msg = line.split(": ", 2)
4480 lineno = line[5:]
4481 - error.append(msg.strip('\n'))
4482 - error = ': '.join(error)
4483 + error.append(msg.strip("\n"))
4484 + error = ": ".join(error)
4485 yield EclassBashSyntaxError(lineno, error, eclass=eclass)
4486
4487 report_logs = (
4488 - LogMap('pkgcore.log.logger.error', partial(EclassDocError, eclass=eclass)),
4489 - LogMap('pkgcore.log.logger.warning', partial(EclassDocError, eclass=eclass)),
4490 + LogMap("pkgcore.log.logger.error", partial(EclassDocError, eclass=eclass)),
4491 + LogMap("pkgcore.log.logger.warning", partial(EclassDocError, eclass=eclass)),
4492 )
4493
4494 with LogReports(*report_logs) as log_reports:
4495 eclass_obj = EclassDoc(eclass.path, sourced=True)
4496 yield from log_reports
4497
4498 - phase_funcs = {f'{eclass}_{phase}' for phase in self.known_phases}
4499 + phase_funcs = {f"{eclass}_{phase}" for phase in self.known_phases}
4500 funcs_missing_docs = (
4501 - eclass_obj.exported_function_names - phase_funcs - eclass_obj.function_names)
4502 + eclass_obj.exported_function_names - phase_funcs - eclass_obj.function_names
4503 + )
4504 if funcs_missing_docs:
4505 yield EclassDocMissingFunc(sorted(funcs_missing_docs), eclass=eclass)
4506 # ignore underscore-prefixed vars (mostly used for avoiding multiple inherits)
4507 - exported_vars = {x for x in eclass_obj.exported_variable_names if not x.startswith('_')}
4508 + exported_vars = {x for x in eclass_obj.exported_variable_names if not x.startswith("_")}
4509 vars_missing_docs = (
4510 - exported_vars - self.eclass_keys
4511 - - eclass_obj.variable_names - eclass_obj.function_variable_names)
4512 + exported_vars
4513 + - self.eclass_keys
4514 + - eclass_obj.variable_names
4515 + - eclass_obj.function_variable_names
4516 + )
4517 if vars_missing_docs:
4518 yield EclassDocMissingVar(sorted(vars_missing_docs), eclass=eclass)
4519
4520 diff --git a/src/pkgcheck/checks/git.py b/src/pkgcheck/checks/git.py
4521 index c06c8278..a54ce61e 100644
4522 --- a/src/pkgcheck/checks/git.py
4523 +++ b/src/pkgcheck/checks/git.py
4524 @@ -56,7 +56,7 @@ class GitCommitsSource(sources.Source):
4525 class IncorrectCopyright(results.AliasResult, results.Warning):
4526 """Changed file with incorrect copyright date."""
4527
4528 - _name = 'IncorrectCopyright'
4529 + _name = "IncorrectCopyright"
4530
4531 def __init__(self, year, line, **kwargs):
4532 super().__init__(**kwargs)
4533 @@ -65,7 +65,7 @@ class IncorrectCopyright(results.AliasResult, results.Warning):
4534
4535 @property
4536 def desc(self):
4537 - return f'incorrect copyright year {self.year}: {self.line!r}'
4538 + return f"incorrect copyright year {self.year}: {self.line!r}"
4539
4540
4541 class EbuildIncorrectCopyright(IncorrectCopyright, results.VersionResult):
4542 @@ -82,8 +82,8 @@ class DirectStableKeywords(results.VersionResult, results.Error):
4543 @property
4544 def desc(self):
4545 s = pluralism(self.keywords)
4546 - keywords = ', '.join(self.keywords)
4547 - return f'directly committed with stable keyword{s}: [ {keywords} ]'
4548 + keywords = ", ".join(self.keywords)
4549 + return f"directly committed with stable keyword{s}: [ {keywords} ]"
4550
4551
4552 class _DroppedKeywords(results.PackageResult):
4553 @@ -99,23 +99,22 @@ class _DroppedKeywords(results.PackageResult):
4554 @property
4555 def desc(self):
4556 s = pluralism(self.keywords)
4557 - keywords = ', '.join(self.keywords)
4558 + keywords = ", ".join(self.keywords)
4559 return (
4560 - f'commit {self.commit} (or later) dropped {self._status} '
4561 - f'keyword{s}: [ {keywords} ]'
4562 + f"commit {self.commit} (or later) dropped {self._status} " f"keyword{s}: [ {keywords} ]"
4563 )
4564
4565
4566 class DroppedUnstableKeywords(_DroppedKeywords, results.Error):
4567 """Unstable keywords dropped from package."""
4568
4569 - _status = 'unstable'
4570 + _status = "unstable"
4571
4572
4573 class DroppedStableKeywords(_DroppedKeywords, results.Error):
4574 """Stable keywords dropped from package."""
4575
4576 - _status = 'stable'
4577 + _status = "stable"
4578
4579
4580 class DirectNoMaintainer(results.PackageResult, results.Error):
4581 @@ -123,7 +122,7 @@ class DirectNoMaintainer(results.PackageResult, results.Error):
4582
4583 @property
4584 def desc(self):
4585 - return 'directly committed with no package maintainer'
4586 + return "directly committed with no package maintainer"
4587
4588
4589 class RdependChange(results.VersionResult, results.Warning):
4590 @@ -131,7 +130,7 @@ class RdependChange(results.VersionResult, results.Warning):
4591
4592 @property
4593 def desc(self):
4594 - return 'RDEPEND modified without revbump'
4595 + return "RDEPEND modified without revbump"
4596
4597
4598 class MissingSlotmove(results.VersionResult, results.Error):
4599 @@ -150,7 +149,7 @@ class MissingSlotmove(results.VersionResult, results.Error):
4600
4601 @property
4602 def desc(self):
4603 - return f'changed SLOT: {self.old} -> {self.new}'
4604 + return f"changed SLOT: {self.old} -> {self.new}"
4605
4606
4607 class MissingMove(results.PackageResult, results.Error):
4608 @@ -169,7 +168,7 @@ class MissingMove(results.PackageResult, results.Error):
4609
4610 @property
4611 def desc(self):
4612 - return f'renamed package: {self.old} -> {self.new}'
4613 + return f"renamed package: {self.old} -> {self.new}"
4614
4615
4616 class _RemovalRepo(UnconfiguredTree):
4617 @@ -177,17 +176,17 @@ class _RemovalRepo(UnconfiguredTree):
4618
4619 def __init__(self, repo):
4620 self.__parent_repo = repo
4621 - self.__tmpdir = TemporaryDirectory(prefix='tmp-pkgcheck-', suffix='.repo')
4622 + self.__tmpdir = TemporaryDirectory(prefix="tmp-pkgcheck-", suffix=".repo")
4623 self.__created = False
4624 repo_dir = self.__tmpdir.name
4625
4626 # set up some basic repo files so pkgcore doesn't complain
4627 - os.makedirs(pjoin(repo_dir, 'metadata'))
4628 - with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
4629 + os.makedirs(pjoin(repo_dir, "metadata"))
4630 + with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
4631 f.write(f"masters = {' '.join(x.repo_id for x in repo.trees)}\n")
4632 - os.makedirs(pjoin(repo_dir, 'profiles'))
4633 - with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
4634 - f.write('old-repo\n')
4635 + os.makedirs(pjoin(repo_dir, "profiles"))
4636 + with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
4637 + f.write("old-repo\n")
4638 super().__init__(repo_dir)
4639
4640 def cleanup(self):
4641 @@ -205,34 +204,44 @@ class _RemovalRepo(UnconfiguredTree):
4642
4643 def _populate(self, pkgs):
4644 """Populate the repo with a given sequence of historical packages."""
4645 - pkg = min(pkgs, key=attrgetter('time'))
4646 + pkg = min(pkgs, key=attrgetter("time"))
4647 paths = [pjoin(pkg.category, pkg.package)]
4648 - for subdir in ('eclass', 'profiles'):
4649 + for subdir in ("eclass", "profiles"):
4650 if os.path.exists(pjoin(self.__parent_repo.location, subdir)):
4651 paths.append(subdir)
4652 old_files = subprocess.Popen(
4653 - ['git', 'archive', f'{pkg.commit}~1'] + paths,
4654 - stdout=subprocess.PIPE, stderr=subprocess.PIPE,
4655 - cwd=self.__parent_repo.location)
4656 + ["git", "archive", f"{pkg.commit}~1"] + paths,
4657 + stdout=subprocess.PIPE,
4658 + stderr=subprocess.PIPE,
4659 + cwd=self.__parent_repo.location,
4660 + )
4661 if old_files.poll():
4662 error = old_files.stderr.read().decode().strip()
4663 - raise PkgcheckUserException(f'failed populating archive repo: {error}')
4664 - with tarfile.open(mode='r|', fileobj=old_files.stdout) as tar:
4665 + raise PkgcheckUserException(f"failed populating archive repo: {error}")
4666 + with tarfile.open(mode="r|", fileobj=old_files.stdout) as tar:
4667 tar.extractall(path=self.location)
4668
4669
4670 class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4671 """Check unpushed git package commits for various issues."""
4672
4673 - _source = (sources.PackageRepoSource, (), (('source', GitCommitsRepoSource),))
4674 + _source = (sources.PackageRepoSource, (), (("source", GitCommitsRepoSource),))
4675 required_addons = (git.GitAddon,)
4676 - known_results = frozenset([
4677 - DirectStableKeywords, DirectNoMaintainer, RdependChange, EbuildIncorrectCopyright,
4678 - DroppedStableKeywords, DroppedUnstableKeywords, MissingSlotmove, MissingMove,
4679 - ])
4680 + known_results = frozenset(
4681 + [
4682 + DirectStableKeywords,
4683 + DirectNoMaintainer,
4684 + RdependChange,
4685 + EbuildIncorrectCopyright,
4686 + DroppedStableKeywords,
4687 + DroppedUnstableKeywords,
4688 + MissingSlotmove,
4689 + MissingMove,
4690 + ]
4691 + )
4692
4693 # package categories that are committed with stable keywords
4694 - allowed_direct_stable = frozenset(['acct-user', 'acct-group'])
4695 + allowed_direct_stable = frozenset(["acct-user", "acct-group"])
4696
4697 def __init__(self, *args, git_addon):
4698 super().__init__(*args)
4699 @@ -268,25 +277,23 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4700 pkg = pkgs[0]
4701 removal_repo = self.removal_repo(pkgs)
4702
4703 - old_keywords = set().union(*(
4704 - p.keywords for p in removal_repo.match(pkg.unversioned_atom)))
4705 - new_keywords = set().union(*(
4706 - p.keywords for p in self.repo.match(pkg.unversioned_atom)))
4707 + old_keywords = set().union(*(p.keywords for p in removal_repo.match(pkg.unversioned_atom)))
4708 + new_keywords = set().union(*(p.keywords for p in self.repo.match(pkg.unversioned_atom)))
4709
4710 dropped_keywords = old_keywords - new_keywords
4711 dropped_stable_keywords = dropped_keywords & self.valid_arches
4712 dropped_unstable_keywords = set()
4713 - for keyword in (x for x in dropped_keywords if x[0] == '~'):
4714 + for keyword in (x for x in dropped_keywords if x[0] == "~"):
4715 arch = keyword[1:]
4716 if arch in self.valid_arches and arch not in new_keywords:
4717 dropped_unstable_keywords.add(keyword)
4718
4719 if dropped_stable_keywords:
4720 - yield DroppedStableKeywords(
4721 - sort_keywords(dropped_stable_keywords), pkg.commit, pkg=pkg)
4722 + yield DroppedStableKeywords(sort_keywords(dropped_stable_keywords), pkg.commit, pkg=pkg)
4723 if dropped_unstable_keywords:
4724 yield DroppedUnstableKeywords(
4725 - sort_keywords(dropped_unstable_keywords), pkg.commit, pkg=pkg)
4726 + sort_keywords(dropped_unstable_keywords), pkg.commit, pkg=pkg
4727 + )
4728
4729 def rename_checks(self, pkgs):
4730 """Check for issues due to package modifications."""
4731 @@ -297,9 +304,7 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4732 if old_key == new_key:
4733 return
4734
4735 - pkgmoves = (
4736 - x[1:] for x in self.repo.config.updates.get(old_key, ())
4737 - if x[0] == 'move')
4738 + pkgmoves = (x[1:] for x in self.repo.config.updates.get(old_key, ()) if x[0] == "move")
4739
4740 for old, new in pkgmoves:
4741 if old.key == old_key and new.key == new_key:
4742 @@ -334,8 +339,8 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4743 old_slot, new_slot = old_pkg.slot, new_pkg.slot
4744 if old_slot != new_slot:
4745 slotmoves = (
4746 - x[1:] for x in self.repo.config.updates.get(new_pkg.key, ())
4747 - if x[0] == 'slotmove')
4748 + x[1:] for x in self.repo.config.updates.get(new_pkg.key, ()) if x[0] == "slotmove"
4749 + )
4750 for atom, moved_slot in slotmoves:
4751 if atom.match(old_pkg) and new_slot == moved_slot:
4752 break
4753 @@ -347,33 +352,33 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4754 # under the --diff-filter option in git log parsing support and are
4755 # disambiguated as follows:
4756 # A -> added, R -> renamed, M -> modified, D -> deleted
4757 - pkg_map = {'A': set(), 'R': set(), 'M': set(), 'D': set()}
4758 + pkg_map = {"A": set(), "R": set(), "M": set(), "D": set()}
4759 # Iterate over pkg commits in chronological order (git log defaults to
4760 # the reverse) discarding matching pkg commits where relevant.
4761 for pkg in reversed(pkgset):
4762 pkg_map[pkg.status].add(pkg)
4763 - if pkg.status == 'A':
4764 - pkg_map['D'].discard(pkg)
4765 - elif pkg.status == 'D':
4766 - pkg_map['A'].discard(pkg)
4767 - elif pkg.status == 'R':
4768 + if pkg.status == "A":
4769 + pkg_map["D"].discard(pkg)
4770 + elif pkg.status == "D":
4771 + pkg_map["A"].discard(pkg)
4772 + elif pkg.status == "R":
4773 # create pkg add/removal for rename operation
4774 - pkg_map['A'].add(pkg)
4775 - pkg_map['D'].add(pkg.old_pkg())
4776 + pkg_map["A"].add(pkg)
4777 + pkg_map["D"].add(pkg.old_pkg())
4778
4779 # run removed package checks
4780 - if pkg_map['D']:
4781 - yield from self.removal_checks(list(pkg_map['D']))
4782 + if pkg_map["D"]:
4783 + yield from self.removal_checks(list(pkg_map["D"]))
4784 # run renamed package checks
4785 - if pkg_map['R']:
4786 - yield from self.rename_checks(list(pkg_map['R']))
4787 + if pkg_map["R"]:
4788 + yield from self.rename_checks(list(pkg_map["R"]))
4789 # run modified package checks
4790 - if modified := [pkg for pkg in pkg_map['M'] if pkg not in pkg_map['D']]:
4791 + if modified := [pkg for pkg in pkg_map["M"] if pkg not in pkg_map["D"]]:
4792 yield from self.modified_checks(modified)
4793
4794 for git_pkg in pkgset:
4795 # remaining checks are irrelevant for removed packages
4796 - if git_pkg in pkg_map['D']:
4797 + if git_pkg in pkg_map["D"]:
4798 continue
4799
4800 # pull actual package object from repo
4801 @@ -386,15 +391,15 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
4802
4803 # check copyright on new/modified ebuilds
4804 if mo := copyright_regex.match(line):
4805 - year = mo.group('end')
4806 + year = mo.group("end")
4807 if int(year) != self.today.year:
4808 - yield EbuildIncorrectCopyright(year, line.strip('\n'), pkg=pkg)
4809 + yield EbuildIncorrectCopyright(year, line.strip("\n"), pkg=pkg)
4810
4811 # checks for newly added ebuilds
4812 - if git_pkg.status == 'A':
4813 + if git_pkg.status == "A":
4814 # check for directly added stable ebuilds
4815 if pkg.category not in self.allowed_direct_stable:
4816 - if stable_keywords := sorted(x for x in pkg.keywords if x[0] not in '~-'):
4817 + if stable_keywords := sorted(x for x in pkg.keywords if x[0] not in "~-"):
4818 yield DirectStableKeywords(stable_keywords, pkg=pkg)
4819
4820 # pkg was just added to the tree
4821 @@ -422,8 +427,8 @@ class MissingSignOff(results.CommitResult, results.Error):
4822 @property
4823 def desc(self):
4824 s = pluralism(self.missing_sign_offs)
4825 - sign_offs = ', '.join(self.missing_sign_offs)
4826 - return f'commit {self.commit}, missing sign-off{s}: {sign_offs}'
4827 + sign_offs = ", ".join(self.missing_sign_offs)
4828 + return f"commit {self.commit}, missing sign-off{s}: {sign_offs}"
4829
4830
4831 class InvalidCommitTag(results.CommitResult, results.Style):
4832 @@ -453,7 +458,7 @@ class InvalidCommitMessage(results.CommitResult, results.Style):
4833
4834 @property
4835 def desc(self):
4836 - return f'commit {self.commit}: {self.error}'
4837 + return f"commit {self.commit}: {self.error}"
4838
4839
4840 class BadCommitSummary(results.CommitResult, results.Style):
4841 @@ -474,7 +479,7 @@ class BadCommitSummary(results.CommitResult, results.Style):
4842
4843 @property
4844 def desc(self):
4845 - return f'commit {self.commit}, {self.error}: {self.summary!r}'
4846 + return f"commit {self.commit}, {self.error}: {self.summary!r}"
4847
4848
4849 def verify_tags(*tags, required=False):
4850 @@ -498,28 +503,37 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
4851 """Check unpushed git commit messages for various issues."""
4852
4853 _source = GitCommitsSource
4854 - known_results = frozenset([
4855 - MissingSignOff, InvalidCommitTag, InvalidCommitMessage, BadCommitSummary,
4856 - ])
4857 + known_results = frozenset(
4858 + [
4859 + MissingSignOff,
4860 + InvalidCommitTag,
4861 + InvalidCommitMessage,
4862 + BadCommitSummary,
4863 + ]
4864 + )
4865
4866 # mapping between known commit tags and verification methods
4867 known_tags = {}
4868 - _commit_footer_regex = re.compile(r'^(?P<tag>[a-zA-Z0-9_-]+): (?P<value>.*)$')
4869 - _git_cat_file_regex = re.compile(r'^(?P<object>.+?) (?P<status>.+)$')
4870 + _commit_footer_regex = re.compile(r"^(?P<tag>[a-zA-Z0-9_-]+): (?P<value>.*)$")
4871 + _git_cat_file_regex = re.compile(r"^(?P<object>.+?) (?P<status>.+)$")
4872
4873 # categories exception for rule of having package version in summary
4874 - skipped_categories = frozenset({
4875 - 'acct-group', 'acct-user', 'virtual',
4876 - })
4877 + skipped_categories = frozenset(
4878 + {
4879 + "acct-group",
4880 + "acct-user",
4881 + "virtual",
4882 + }
4883 + )
4884
4885 def __init__(self, *args):
4886 super().__init__(*args)
4887 # mapping of required tags to forcibly run verifications methods
4888 self._required_tags = ImmutableDict(
4889 - ((tag, verify), [])
4890 - for tag, (verify, required) in self.known_tags.items() if required)
4891 + ((tag, verify), []) for tag, (verify, required) in self.known_tags.items() if required
4892 + )
4893
4894 - @verify_tags('Signed-off-by', required=True)
4895 + @verify_tags("Signed-off-by", required=True)
4896 def _signed_off_by_tag(self, tag, values, commit):
4897 """Verify commit contains all required sign offs in accordance with GLEP 76."""
4898 required_sign_offs = {commit.author, commit.committer}
4899 @@ -527,14 +541,13 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
4900 if missing_sign_offs:
4901 yield MissingSignOff(sorted(missing_sign_offs), commit=commit)
4902
4903 - @verify_tags('Gentoo-Bug')
4904 + @verify_tags("Gentoo-Bug")
4905 def _deprecated_tag(self, tag, values, commit):
4906 """Flag deprecated tags that shouldn't be used."""
4907 for value in values:
4908 - yield InvalidCommitTag(
4909 - tag, value, f"{tag} tag is no longer valid", commit=commit)
4910 + yield InvalidCommitTag(tag, value, f"{tag} tag is no longer valid", commit=commit)
4911
4912 - @verify_tags('Bug', 'Closes')
4913 + @verify_tags("Bug", "Closes")
4914 def _bug_tag(self, tag, values, commit):
4915 """Verify values are URLs for Bug/Closes tags."""
4916 for value in values:
4917 @@ -544,40 +557,44 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
4918 continue
4919 if parsed.scheme.lower() not in ("http", "https"):
4920 yield InvalidCommitTag(
4921 - tag, value, "invalid protocol; should be http or https", commit=commit)
4922 + tag, value, "invalid protocol; should be http or https", commit=commit
4923 + )
4924
4925 @klass.jit_attr_none
4926 def git_cat_file(self):
4927 """Start a `git cat-file` process to verify git repo hashes."""
4928 return subprocess.Popen(
4929 - ['git', 'cat-file', '--batch-check'],
4930 + ["git", "cat-file", "--batch-check"],
4931 cwd=self.options.target_repo.location,
4932 - stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
4933 - encoding='utf8', bufsize=1)
4934 + stdin=subprocess.PIPE,
4935 + stdout=subprocess.PIPE,
4936 + stderr=subprocess.DEVNULL,
4937 + encoding="utf8",
4938 + bufsize=1,
4939 + )
4940
4941 - @verify_tags('Fixes', 'Reverts')
4942 + @verify_tags("Fixes", "Reverts")
4943 def _commit_tag(self, tag, values, commit):
4944 """Verify referenced commits exist for Fixes/Reverts tags."""
4945 - self.git_cat_file.stdin.write('\n'.join(values) + '\n')
4946 + self.git_cat_file.stdin.write("\n".join(values) + "\n")
4947 if self.git_cat_file.poll() is None:
4948 for _ in range(len(values)):
4949 line = self.git_cat_file.stdout.readline().strip()
4950 if mo := self._git_cat_file_regex.match(line):
4951 - value = mo.group('object')
4952 - status = mo.group('status')
4953 - if not status.startswith('commit '):
4954 - yield InvalidCommitTag(
4955 - tag, value, f'{status} commit', commit=commit)
4956 + value = mo.group("object")
4957 + status = mo.group("status")
4958 + if not status.startswith("commit "):
4959 + yield InvalidCommitTag(tag, value, f"{status} commit", commit=commit)
4960
4961 def feed(self, commit):
4962 if len(commit.message) == 0:
4963 - yield InvalidCommitMessage('no commit message', commit=commit)
4964 + yield InvalidCommitMessage("no commit message", commit=commit)
4965 return
4966
4967 # drop leading '*: ' prefix assuming it's a package/eclass/file/path
4968 summary = commit.message[0]
4969 - if len(summary.split(': ', 1)[-1]) > 69:
4970 - yield InvalidCommitMessage('summary is too long', commit=commit)
4971 + if len(summary.split(": ", 1)[-1]) > 69:
4972 + yield InvalidCommitMessage("summary is too long", commit=commit)
4973
4974 # categorize package changes
4975 pkg_changes = defaultdict(set)
4976 @@ -590,19 +607,21 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
4977 if len({x.package for x in atoms}) == 1:
4978 # changes to a single cat/pn
4979 atom = next(iter(atoms))
4980 - if not re.match(rf'^{re.escape(atom.key)}: ', summary):
4981 - error = f'summary missing {atom.key!r} package prefix'
4982 + if not re.match(rf"^{re.escape(atom.key)}: ", summary):
4983 + error = f"summary missing {atom.key!r} package prefix"
4984 yield BadCommitSummary(error, summary, commit=commit)
4985 # check for version in summary for singular, non-revision bumps
4986 - if len(commit.pkgs['A']) == 1 and category not in self.skipped_categories:
4987 - atom = next(iter(commit.pkgs['A']))
4988 - if not atom.revision and not re.match(rf'^.+\bv?{re.escape(atom.version)}\b.*$', summary):
4989 - error = f'summary missing package version {atom.version!r}'
4990 + if len(commit.pkgs["A"]) == 1 and category not in self.skipped_categories:
4991 + atom = next(iter(commit.pkgs["A"]))
4992 + if not atom.revision and not re.match(
4993 + rf"^.+\bv?{re.escape(atom.version)}\b.*$", summary
4994 + ):
4995 + error = f"summary missing package version {atom.version!r}"
4996 yield BadCommitSummary(error, summary, commit=commit)
4997 else:
4998 # mutiple pkg changes in the same category
4999 - if not re.match(rf'^{re.escape(category)}: ', summary):
5000 - error = f'summary missing {category!r} category prefix'
5001 + if not re.match(rf"^{re.escape(category)}: ", summary):
5002 + error = f"summary missing {category!r} category prefix"
5003 yield BadCommitSummary(error, summary, commit=commit)
5004
5005 # verify message body
5006 @@ -613,18 +632,17 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
5007 if not line.strip():
5008 continue
5009 if self._commit_footer_regex.match(line) is None:
5010 - if not body and commit.message[1] != '':
5011 - yield InvalidCommitMessage(
5012 - 'missing empty line before body', commit=commit)
5013 + if not body and commit.message[1] != "":
5014 + yield InvalidCommitMessage("missing empty line before body", commit=commit)
5015 # still processing the body
5016 body = True
5017 if len(line.split()) > 1 and len(line) > 80:
5018 yield InvalidCommitMessage(
5019 - f'line {lineno} greater than 80 chars: {line!r}', commit=commit)
5020 + f"line {lineno} greater than 80 chars: {line!r}", commit=commit
5021 + )
5022 else:
5023 - if commit.message[lineno - 1] != '':
5024 - yield InvalidCommitMessage(
5025 - 'missing empty line before tags', commit=commit)
5026 + if commit.message[lineno - 1] != "":
5027 + yield InvalidCommitMessage("missing empty line before tags", commit=commit)
5028 # push it back on the stack
5029 i = chain([line], i)
5030 break
5031 @@ -637,20 +655,20 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
5032 if not line.strip():
5033 # single empty end line is ignored
5034 if lineno != len(commit.message):
5035 - yield InvalidCommitMessage(
5036 - f'empty line {lineno} in footer', commit=commit)
5037 + yield InvalidCommitMessage(f"empty line {lineno} in footer", commit=commit)
5038 continue
5039 if mo := self._commit_footer_regex.match(line):
5040 # register known tags for verification
5041 - tag = mo.group('tag')
5042 + tag = mo.group("tag")
5043 try:
5044 func, required = self.known_tags[tag]
5045 - tags.setdefault((tag, func), []).append(mo.group('value'))
5046 + tags.setdefault((tag, func), []).append(mo.group("value"))
5047 except KeyError:
5048 continue
5049 else:
5050 yield InvalidCommitMessage(
5051 - f'non-tag in footer, line {lineno}: {line!r}', commit=commit)
5052 + f"non-tag in footer, line {lineno}: {line!r}", commit=commit
5053 + )
5054
5055 # run tag verification methods
5056 for (tag, func), values in tags.items():
5057 @@ -662,7 +680,7 @@ class EclassIncorrectCopyright(IncorrectCopyright, results.EclassResult):
5058
5059 @property
5060 def desc(self):
5061 - return f'{self.eclass}: {super().desc}'
5062 + return f"{self.eclass}: {super().desc}"
5063
5064
5065 class GitEclassCommitsCheck(GentooRepoCheck, GitCommitsCheck):
5066 @@ -679,6 +697,6 @@ class GitEclassCommitsCheck(GentooRepoCheck, GitCommitsCheck):
5067 # check copyright on new/modified eclasses
5068 line = next(iter(eclass.lines))
5069 if mo := copyright_regex.match(line):
5070 - year = mo.group('end')
5071 + year = mo.group("end")
5072 if int(year) != self.today.year:
5073 - yield EclassIncorrectCopyright(year, line.strip('\n'), eclass=eclass)
5074 + yield EclassIncorrectCopyright(year, line.strip("\n"), eclass=eclass)
5075
5076 diff --git a/src/pkgcheck/checks/glsa.py b/src/pkgcheck/checks/glsa.py
5077 index 2f869099..79d4ec65 100644
5078 --- a/src/pkgcheck/checks/glsa.py
5079 +++ b/src/pkgcheck/checks/glsa.py
5080 @@ -23,8 +23,8 @@ class VulnerablePackage(results.VersionResult, results.Error):
5081 @property
5082 def desc(self):
5083 s = pluralism(self.arches)
5084 - arches = ', '.join(self.arches)
5085 - return f'vulnerable via {self.glsa}, keyword{s}: {arches}'
5086 + arches = ", ".join(self.arches)
5087 + return f"vulnerable via {self.glsa}, keyword{s}: {arches}"
5088
5089
5090 class GlsaCheck(GentooRepoCheck):
5091 @@ -37,8 +37,7 @@ class GlsaCheck(GentooRepoCheck):
5092
5093 @staticmethod
5094 def mangle_argparser(parser):
5095 - parser.plugin.add_argument(
5096 - "--glsa-dir", type=existent_dir, help="custom glsa directory")
5097 + parser.plugin.add_argument("--glsa-dir", type=existent_dir, help="custom glsa directory")
5098
5099 def __init__(self, *args):
5100 super().__init__(*args)
5101 @@ -46,12 +45,12 @@ class GlsaCheck(GentooRepoCheck):
5102 if glsa_dir is None:
5103 # search for glsa dir in target repo and then any masters
5104 for repo in reversed(self.options.target_repo.trees):
5105 - path = pjoin(repo.location, 'metadata', 'glsa')
5106 + path = pjoin(repo.location, "metadata", "glsa")
5107 if os.path.isdir(path):
5108 glsa_dir = path
5109 break
5110 else:
5111 - raise SkipCheck(self, 'no available glsa source')
5112 + raise SkipCheck(self, "no available glsa source")
5113
5114 # this is a bit brittle
5115 self.vulns = defaultdict(list)
5116 @@ -63,13 +62,14 @@ class GlsaCheck(GentooRepoCheck):
5117 for vuln in self.vulns.get(pkg.key, ()):
5118 if vuln.match(pkg):
5119 arches = set()
5120 - for v in collect_package_restrictions(vuln, ['keywords']):
5121 + for v in collect_package_restrictions(vuln, ["keywords"]):
5122 if isinstance(v.restriction, values.ContainmentMatch2):
5123 - arches.update(x.lstrip('~') for x in v.restriction.vals)
5124 + arches.update(x.lstrip("~") for x in v.restriction.vals)
5125 else:
5126 raise Exception(
5127 - f'unexpected restriction sequence- {v.restriction} in {vuln}')
5128 - keys = {x.lstrip('~') for x in pkg.keywords if not x.startswith('-')}
5129 + f"unexpected restriction sequence- {v.restriction} in {vuln}"
5130 + )
5131 + keys = {x.lstrip("~") for x in pkg.keywords if not x.startswith("-")}
5132 if arches:
5133 arches = sorted(arches.intersection(keys))
5134 assert arches
5135
5136 diff --git a/src/pkgcheck/checks/header.py b/src/pkgcheck/checks/header.py
5137 index c08c4c8b..429b26ce 100644
5138 --- a/src/pkgcheck/checks/header.py
5139 +++ b/src/pkgcheck/checks/header.py
5140 @@ -5,8 +5,7 @@ import re
5141 from .. import results, sources
5142 from . import GentooRepoCheck
5143
5144 -copyright_regex = re.compile(
5145 - r'^# Copyright (?P<begin>\d{4}-)?(?P<end>\d{4}) (?P<holder>.+)$')
5146 +copyright_regex = re.compile(r"^# Copyright (?P<begin>\d{4}-)?(?P<end>\d{4}) (?P<holder>.+)$")
5147
5148
5149 class _FileHeaderResult(results.Result):
5150 @@ -30,11 +29,11 @@ class InvalidCopyright(_FileHeaderResult, results.AliasResult, results.Error):
5151 # Copyright YEARS Gentoo Authors
5152 """
5153
5154 - _name = 'InvalidCopyright'
5155 + _name = "InvalidCopyright"
5156
5157 @property
5158 def desc(self):
5159 - return f'invalid copyright: {self.line!r}'
5160 + return f"invalid copyright: {self.line!r}"
5161
5162
5163 class OldGentooCopyright(_FileHeaderResult, results.AliasResult, results.Warning):
5164 @@ -49,7 +48,7 @@ class OldGentooCopyright(_FileHeaderResult, results.AliasResult, results.Warning
5165 holder instead.
5166 """
5167
5168 - _name = 'OldGentooCopyright'
5169 + _name = "OldGentooCopyright"
5170
5171 @property
5172 def desc(self):
5173 @@ -65,7 +64,7 @@ class NonGentooAuthorsCopyright(_FileHeaderResult, results.AliasResult, results.
5174 via bugs.gentoo.org.
5175 """
5176
5177 - _name = 'NonGentooAuthorsCopyright'
5178 + _name = "NonGentooAuthorsCopyright"
5179
5180 @property
5181 def desc(self):
5182 @@ -82,13 +81,13 @@ class InvalidLicenseHeader(_FileHeaderResult, results.AliasResult, results.Error
5183 # Distributed under the terms of the GNU General Public License v2
5184 """
5185
5186 - _name = 'InvalidLicenseHeader'
5187 + _name = "InvalidLicenseHeader"
5188
5189 @property
5190 def desc(self):
5191 if self.line:
5192 - return f'invalid license header: {self.line!r}'
5193 - return 'missing license header'
5194 + return f"invalid license header: {self.line!r}"
5195 + return "missing license header"
5196
5197
5198 class _HeaderCheck(GentooRepoCheck):
5199 @@ -98,12 +97,17 @@ class _HeaderCheck(GentooRepoCheck):
5200 _old_copyright = OldGentooCopyright
5201 _non_gentoo_authors = NonGentooAuthorsCopyright
5202 _invalid_license = InvalidLicenseHeader
5203 - known_results = frozenset([
5204 - _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
5205 - ])
5206 - _item_attr = 'pkg'
5207 -
5208 - license_header = '# Distributed under the terms of the GNU General Public License v2'
5209 + known_results = frozenset(
5210 + [
5211 + _invalid_copyright,
5212 + _old_copyright,
5213 + _non_gentoo_authors,
5214 + _invalid_license,
5215 + ]
5216 + )
5217 + _item_attr = "pkg"
5218 +
5219 + license_header = "# Distributed under the terms of the GNU General Public License v2"
5220
5221 def args(self, item):
5222 return {self._item_attr: item}
5223 @@ -114,19 +118,19 @@ class _HeaderCheck(GentooRepoCheck):
5224 if mo := copyright_regex.match(line):
5225 # Copyright policy is active since 2018-10-21, so it applies
5226 # to all ebuilds committed in 2019 and later
5227 - if int(mo.group('end')) >= 2019:
5228 - if mo.group('holder') == 'Gentoo Foundation':
5229 + if int(mo.group("end")) >= 2019:
5230 + if mo.group("holder") == "Gentoo Foundation":
5231 yield self._old_copyright(line, **self.args(item))
5232 # Gentoo policy requires 'Gentoo Authors'
5233 - elif mo.group('holder') != 'Gentoo Authors':
5234 + elif mo.group("holder") != "Gentoo Authors":
5235 yield self._non_gentoo_authors(line, **self.args(item))
5236 else:
5237 yield self._invalid_copyright(line, **self.args(item))
5238
5239 try:
5240 - line = item.lines[1].strip('\n')
5241 + line = item.lines[1].strip("\n")
5242 except IndexError:
5243 - line = ''
5244 + line = ""
5245 if line != self.license_header:
5246 yield self._invalid_license(line, **self.args(item))
5247
5248 @@ -156,10 +160,15 @@ class EbuildHeaderCheck(_HeaderCheck):
5249 _old_copyright = EbuildOldGentooCopyright
5250 _non_gentoo_authors = EbuildNonGentooAuthorsCopyright
5251 _invalid_license = EbuildInvalidLicenseHeader
5252 - known_results = frozenset([
5253 - _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
5254 - ])
5255 - _item_attr = 'pkg'
5256 + known_results = frozenset(
5257 + [
5258 + _invalid_copyright,
5259 + _old_copyright,
5260 + _non_gentoo_authors,
5261 + _invalid_license,
5262 + ]
5263 + )
5264 + _item_attr = "pkg"
5265
5266
5267 class EclassInvalidCopyright(InvalidCopyright, results.EclassResult):
5268 @@ -167,7 +176,7 @@ class EclassInvalidCopyright(InvalidCopyright, results.EclassResult):
5269
5270 @property
5271 def desc(self):
5272 - return f'{self.eclass}: {super().desc}'
5273 + return f"{self.eclass}: {super().desc}"
5274
5275
5276 class EclassOldGentooCopyright(OldGentooCopyright, results.EclassResult):
5277 @@ -175,7 +184,7 @@ class EclassOldGentooCopyright(OldGentooCopyright, results.EclassResult):
5278
5279 @property
5280 def desc(self):
5281 - return f'{self.eclass}: {super().desc}'
5282 + return f"{self.eclass}: {super().desc}"
5283
5284
5285 class EclassNonGentooAuthorsCopyright(NonGentooAuthorsCopyright, results.EclassResult):
5286 @@ -183,7 +192,7 @@ class EclassNonGentooAuthorsCopyright(NonGentooAuthorsCopyright, results.EclassR
5287
5288 @property
5289 def desc(self):
5290 - return f'{self.eclass}: {super().desc}'
5291 + return f"{self.eclass}: {super().desc}"
5292
5293
5294 class EclassInvalidLicenseHeader(InvalidLicenseHeader, results.EclassResult):
5295 @@ -191,7 +200,7 @@ class EclassInvalidLicenseHeader(InvalidLicenseHeader, results.EclassResult):
5296
5297 @property
5298 def desc(self):
5299 - return f'{self.eclass}: {super().desc}'
5300 + return f"{self.eclass}: {super().desc}"
5301
5302
5303 class EclassHeaderCheck(_HeaderCheck):
5304 @@ -203,7 +212,12 @@ class EclassHeaderCheck(_HeaderCheck):
5305 _old_copyright = EclassOldGentooCopyright
5306 _non_gentoo_authors = EclassNonGentooAuthorsCopyright
5307 _invalid_license = EclassInvalidLicenseHeader
5308 - known_results = frozenset([
5309 - _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
5310 - ])
5311 - _item_attr = 'eclass'
5312 + known_results = frozenset(
5313 + [
5314 + _invalid_copyright,
5315 + _old_copyright,
5316 + _non_gentoo_authors,
5317 + _invalid_license,
5318 + ]
5319 + )
5320 + _item_attr = "eclass"
5321
5322 diff --git a/src/pkgcheck/checks/imlate.py b/src/pkgcheck/checks/imlate.py
5323 index a9688873..ee0da6d8 100644
5324 --- a/src/pkgcheck/checks/imlate.py
5325 +++ b/src/pkgcheck/checks/imlate.py
5326 @@ -18,11 +18,11 @@ class PotentialStable(results.VersionResult, results.Info):
5327
5328 @property
5329 def desc(self):
5330 - es = pluralism(self.stable, plural='es')
5331 - stable = ', '.join(self.stable)
5332 + es = pluralism(self.stable, plural="es")
5333 + stable = ", ".join(self.stable)
5334 s = pluralism(self.keywords)
5335 - keywords = ', '.join(self.keywords)
5336 - return f'slot({self.slot}), stabled arch{es}: [ {stable} ], potential{s}: [ {keywords} ]'
5337 + keywords = ", ".join(self.keywords)
5338 + return f"slot({self.slot}), stabled arch{es}: [ {stable} ], potential{s}: [ {keywords} ]"
5339
5340
5341 class LaggingStable(results.VersionResult, results.Info):
5342 @@ -36,10 +36,10 @@ class LaggingStable(results.VersionResult, results.Info):
5343
5344 @property
5345 def desc(self):
5346 - es = pluralism(self.stable, plural='es')
5347 - stable = ', '.join(self.stable)
5348 - keywords = ', '.join(self.keywords)
5349 - return f'slot({self.slot}), stabled arch{es}: [ {stable} ], lagging: [ {keywords} ]'
5350 + es = pluralism(self.stable, plural="es")
5351 + stable = ", ".join(self.stable)
5352 + keywords = ", ".join(self.keywords)
5353 + return f"slot({self.slot}), stabled arch{es}: [ {stable} ], lagging: [ {keywords} ]"
5354
5355
5356 class ImlateCheck(Check):
5357 @@ -52,28 +52,33 @@ class ImlateCheck(Check):
5358 @staticmethod
5359 def mangle_argparser(parser):
5360 parser.plugin.add_argument(
5361 - "--source-arches", action='csv', metavar='ARCH',
5362 + "--source-arches",
5363 + action="csv",
5364 + metavar="ARCH",
5365 help="comma separated list of arches to compare against for lagging stabilization",
5366 docs="""
5367 Comma separated list of arches to compare against for
5368 lagging stabilization.
5369
5370 The default arches are all stable arches (unless --arches is specified).
5371 - """)
5372 + """,
5373 + )
5374
5375 def __init__(self, *args, stable_arches_addon=None):
5376 super().__init__(*args)
5377 self.all_arches = frozenset(self.options.arches)
5378 - self.stable_arches = frozenset(arch.strip().lstrip("~") for arch in self.options.stable_arches)
5379 - self.target_arches = frozenset(f'~{arch}' for arch in self.stable_arches)
5380 + self.stable_arches = frozenset(
5381 + arch.strip().lstrip("~") for arch in self.options.stable_arches
5382 + )
5383 + self.target_arches = frozenset(f"~{arch}" for arch in self.stable_arches)
5384
5385 source_arches = self.options.source_arches
5386 if source_arches is None:
5387 source_arches = self.options.stable_arches
5388 - self.source_arches = frozenset(
5389 - arch.lstrip("~") for arch in source_arches)
5390 + self.source_arches = frozenset(arch.lstrip("~") for arch in source_arches)
5391 self.source_filter = packages.PackageRestriction(
5392 - "keywords", values.ContainmentMatch2(self.source_arches))
5393 + "keywords", values.ContainmentMatch2(self.source_arches)
5394 + )
5395
5396 def feed(self, pkgset):
5397 pkg_slotted = defaultdict(list)
5398 @@ -84,7 +89,7 @@ class ImlateCheck(Check):
5399 for slot, pkgs in sorted(pkg_slotted.items()):
5400 slot_keywords = set().union(*(pkg.keywords for pkg in pkgs))
5401 stable_slot_keywords = self.all_arches.intersection(slot_keywords)
5402 - potential_slot_stables = {'~' + x for x in stable_slot_keywords}
5403 + potential_slot_stables = {"~" + x for x in stable_slot_keywords}
5404 newer_slot_stables = set()
5405 for pkg in reversed(pkgs):
5406 # only consider pkgs with keywords that contain the targeted arches
5407 @@ -93,23 +98,21 @@ class ImlateCheck(Check):
5408 continue
5409
5410 # current pkg stable keywords
5411 - stable = {'~' + x for x in self.source_arches.intersection(pkg.keywords)}
5412 + stable = {"~" + x for x in self.source_arches.intersection(pkg.keywords)}
5413
5414 lagging = potential_slot_stables.intersection(pkg.keywords)
5415 # skip keywords that have newer stable versions
5416 - lagging -= {'~' + x for x in newer_slot_stables}
5417 + lagging -= {"~" + x for x in newer_slot_stables}
5418 lagging -= stable
5419 if lagging:
5420 - stable_kwds = (x for x in pkg.keywords if not x[0] in ('~', '-'))
5421 - yield LaggingStable(
5422 - slot, sorted(stable_kwds), sorted(lagging), pkg=pkg)
5423 + stable_kwds = (x for x in pkg.keywords if not x[0] in ("~", "-"))
5424 + yield LaggingStable(slot, sorted(stable_kwds), sorted(lagging), pkg=pkg)
5425
5426 - unstable_keywords = {x for x in pkg.keywords if x[0] == '~'}
5427 + unstable_keywords = {x for x in pkg.keywords if x[0] == "~"}
5428 potential = self.target_arches.intersection(unstable_keywords)
5429 potential -= lagging | stable
5430 if potential:
5431 - stable_kwds = (x for x in pkg.keywords if not x[0] in ('~', '-'))
5432 - yield PotentialStable(
5433 - slot, sorted(stable_kwds), sorted(potential), pkg=pkg)
5434 + stable_kwds = (x for x in pkg.keywords if not x[0] in ("~", "-"))
5435 + yield PotentialStable(slot, sorted(stable_kwds), sorted(potential), pkg=pkg)
5436
5437 break
5438
5439 diff --git a/src/pkgcheck/checks/metadata.py b/src/pkgcheck/checks/metadata.py
5440 index 56d54529..8a26024c 100644
5441 --- a/src/pkgcheck/checks/metadata.py
5442 +++ b/src/pkgcheck/checks/metadata.py
5443 @@ -37,32 +37,32 @@ class _LicenseResult(results.VersionResult):
5444 @property
5445 def desc(self):
5446 s = pluralism(self.licenses)
5447 - licenses = ', '.join(self.licenses)
5448 - return f'{self.license_type} license{s}: {licenses}'
5449 + licenses = ", ".join(self.licenses)
5450 + return f"{self.license_type} license{s}: {licenses}"
5451
5452
5453 class UnknownLicense(_LicenseResult, results.Error):
5454 """License usage with no matching license file."""
5455
5456 - license_type = 'unknown'
5457 + license_type = "unknown"
5458
5459
5460 class DeprecatedLicense(_LicenseResult, results.Warning):
5461 """Deprecated license usage."""
5462
5463 - license_type = 'deprecated'
5464 + license_type = "deprecated"
5465
5466
5467 class MissingLicense(results.VersionResult, results.Error):
5468 """Package has no LICENSE defined."""
5469
5470 - desc = 'no license defined'
5471 + desc = "no license defined"
5472
5473
5474 class InvalidLicense(results.MetadataError, results.VersionResult):
5475 """Package's LICENSE is invalid."""
5476
5477 - attr = 'license'
5478 + attr = "license"
5479
5480
5481 class MissingLicenseRestricts(results.VersionResult, results.Warning):
5482 @@ -76,10 +76,9 @@ class MissingLicenseRestricts(results.VersionResult, results.Warning):
5483
5484 @property
5485 def desc(self):
5486 - restrictions = ' '.join(self.restrictions)
5487 + restrictions = " ".join(self.restrictions)
5488 return (
5489 - f'{self.license_group} license {self.license!r} '
5490 - f'requires RESTRICT="{restrictions}"'
5491 + f"{self.license_group} license {self.license!r} " f'requires RESTRICT="{restrictions}"'
5492 )
5493
5494
5495 @@ -94,23 +93,30 @@ class UnnecessaryLicense(results.VersionResult, results.Warning):
5496 class LicenseCheck(Check):
5497 """LICENSE validity checks."""
5498
5499 - known_results = frozenset([
5500 - InvalidLicense, MissingLicense, UnknownLicense, DeprecatedLicense,
5501 - UnnecessaryLicense, UnstatedIuse, MissingLicenseRestricts,
5502 - ])
5503 + known_results = frozenset(
5504 + [
5505 + InvalidLicense,
5506 + MissingLicense,
5507 + UnknownLicense,
5508 + DeprecatedLicense,
5509 + UnnecessaryLicense,
5510 + UnstatedIuse,
5511 + MissingLicenseRestricts,
5512 + ]
5513 + )
5514
5515 # categories for ebuilds that can lack LICENSE settings
5516 - unlicensed_categories = frozenset(['virtual', 'acct-group', 'acct-user'])
5517 + unlicensed_categories = frozenset(["virtual", "acct-group", "acct-user"])
5518
5519 required_addons = (addons.UseAddon,)
5520
5521 def __init__(self, *args, use_addon):
5522 super().__init__(*args)
5523 repo = self.options.target_repo
5524 - self.iuse_filter = use_addon.get_filter('license')
5525 - self.deprecated = repo.licenses.groups.get('DEPRECATED', frozenset())
5526 - self.eula = repo.licenses.groups.get('EULA', frozenset())
5527 - self.mirror_restricts = frozenset(['fetch', 'mirror'])
5528 + self.iuse_filter = use_addon.get_filter("license")
5529 + self.deprecated = repo.licenses.groups.get("DEPRECATED", frozenset())
5530 + self.eula = repo.licenses.groups.get("EULA", frozenset())
5531 + self.mirror_restricts = frozenset(["fetch", "mirror"])
5532
5533 def _required_licenses(self, license_group, nodes, restricts=None):
5534 """Determine required licenses from a given license group."""
5535 @@ -140,14 +146,13 @@ class LicenseCheck(Check):
5536 restricts = set().union(*(x.vals for x in restrictions if not x.negate))
5537 license_restrictions = pkg.restrict.evaluate_depset(restricts)
5538 missing_restricts = []
5539 - if 'bindist' not in license_restrictions:
5540 - missing_restricts.append('bindist')
5541 + if "bindist" not in license_restrictions:
5542 + missing_restricts.append("bindist")
5543 if not self.mirror_restricts.intersection(license_restrictions):
5544 if pkg.fetchables:
5545 - missing_restricts.append('mirror')
5546 + missing_restricts.append("mirror")
5547 if missing_restricts:
5548 - yield MissingLicenseRestricts(
5549 - 'EULA', license, missing_restricts, pkg=pkg)
5550 + yield MissingLicenseRestricts("EULA", license, missing_restricts, pkg=pkg)
5551
5552 # flatten license depset
5553 licenses, unstated = self.iuse_filter((str,), pkg, pkg.license)
5554 @@ -178,26 +183,26 @@ class _UseFlagsResult(results.VersionResult):
5555 @property
5556 def desc(self):
5557 s = pluralism(self.flags)
5558 - flags = ', '.join(map(repr, sorted(self.flags)))
5559 - return f'{self.flag_type} USE flag{s}: {flags}'
5560 + flags = ", ".join(map(repr, sorted(self.flags)))
5561 + return f"{self.flag_type} USE flag{s}: {flags}"
5562
5563
5564 class InvalidUseFlags(_UseFlagsResult, results.Error):
5565 """Package IUSE contains invalid USE flags."""
5566
5567 - flag_type = 'invalid'
5568 + flag_type = "invalid"
5569
5570
5571 class UnknownUseFlags(_UseFlagsResult, results.Error):
5572 """Package IUSE contains unknown USE flags."""
5573
5574 - flag_type = 'unknown'
5575 + flag_type = "unknown"
5576
5577
5578 class BadDefaultUseFlags(_UseFlagsResult, results.Error):
5579 """Package IUSE contains bad default USE flags."""
5580
5581 - flag_type = 'bad default'
5582 + flag_type = "bad default"
5583
5584
5585 class IuseCheck(Check):
5586 @@ -205,19 +210,22 @@ class IuseCheck(Check):
5587
5588 required_addons = (addons.UseAddon,)
5589 known_results = frozenset([InvalidUseFlags, UnknownUseFlags, BadDefaultUseFlags])
5590 - use_expand_groups = ('cpu_flags',)
5591 + use_expand_groups = ("cpu_flags",)
5592
5593 def __init__(self, *args, use_addon):
5594 super().__init__(*args)
5595 self.iuse_handler = use_addon
5596 - self.bad_defaults = tuple(['-'] + [f'+{x}_' for x in self.use_expand_groups])
5597 + self.bad_defaults = tuple(["-"] + [f"+{x}_" for x in self.use_expand_groups])
5598
5599 def feed(self, pkg):
5600 if invalid := sorted(x for x in pkg.iuse_stripped if not pkg.eapi.is_valid_use_flag(x)):
5601 yield InvalidUseFlags(invalid, pkg=pkg)
5602
5603 - if pkg.eapi.options.iuse_defaults and (bad_defaults := sorted(
5604 - x for x in pkg.iuse if x.startswith(self.bad_defaults) and len(x) > 1)):
5605 + if pkg.eapi.options.iuse_defaults and (
5606 + bad_defaults := sorted(
5607 + x for x in pkg.iuse if x.startswith(self.bad_defaults) and len(x) > 1
5608 + )
5609 + ):
5610 yield BadDefaultUseFlags(bad_defaults, pkg=pkg)
5611
5612 if not self.iuse_handler.ignore:
5613 @@ -243,13 +251,13 @@ class _EapiResult(results.VersionResult):
5614 class DeprecatedEapi(_EapiResult, results.Warning):
5615 """Package's EAPI is deprecated according to repo metadata."""
5616
5617 - _type = 'deprecated'
5618 + _type = "deprecated"
5619
5620
5621 class BannedEapi(_EapiResult, results.Error):
5622 """Package's EAPI is banned according to repo metadata."""
5623
5624 - _type = 'banned'
5625 + _type = "banned"
5626
5627
5628 class StableKeywordsOnTestingEapi(results.VersionResult, results.Error):
5629 @@ -281,8 +289,9 @@ class UnsupportedEclassEapi(results.VersionResult, results.Warning):
5630 class EapiCheck(Check):
5631 """Scan for packages with banned or deprecated EAPIs."""
5632
5633 - known_results = frozenset([DeprecatedEapi, BannedEapi, UnsupportedEclassEapi,
5634 - StableKeywordsOnTestingEapi])
5635 + known_results = frozenset(
5636 + [DeprecatedEapi, BannedEapi, UnsupportedEclassEapi, StableKeywordsOnTestingEapi]
5637 + )
5638 required_addons = (addons.eclass.EclassAddon,)
5639
5640 def __init__(self, *args, eclass_addon):
5641 @@ -297,7 +306,7 @@ class EapiCheck(Check):
5642 yield DeprecatedEapi(pkg.eapi, pkg=pkg)
5643
5644 if eapi_str in self.options.target_repo.config.eapis_testing:
5645 - stable_keywords_gen = (k for k in pkg.keywords if not k.startswith(('~', '-')))
5646 + stable_keywords_gen = (k for k in pkg.keywords if not k.startswith(("~", "-")))
5647 if stable_keywords := sorted(stable_keywords_gen):
5648 yield StableKeywordsOnTestingEapi(pkg.eapi, stable_keywords, pkg=pkg)
5649
5650 @@ -310,19 +319,19 @@ class EapiCheck(Check):
5651 class InvalidEapi(results.MetadataError, results.VersionResult):
5652 """Package's EAPI is invalid."""
5653
5654 - attr = 'eapi'
5655 + attr = "eapi"
5656
5657
5658 class InvalidSlot(results.MetadataError, results.VersionResult):
5659 """Package's SLOT is invalid."""
5660
5661 - attr = 'slot'
5662 + attr = "slot"
5663
5664
5665 class SourcingError(results.MetadataError, results.VersionResult):
5666 """Failed sourcing ebuild."""
5667
5668 - attr = 'data'
5669 + attr = "data"
5670
5671
5672 class SourcingCheck(Check):
5673 @@ -346,8 +355,9 @@ class RequiredUseDefaults(results.VersionResult, results.Warning):
5674 or modifying REQUIRED_USE.
5675 """
5676
5677 - def __init__(self, required_use, use=(), keyword=None,
5678 - profile=None, num_profiles=None, **kwargs):
5679 + def __init__(
5680 + self, required_use, use=(), keyword=None, profile=None, num_profiles=None, **kwargs
5681 + ):
5682 super().__init__(**kwargs)
5683 self.required_use = required_use
5684 self.use = tuple(use)
5685 @@ -359,40 +369,48 @@ class RequiredUseDefaults(results.VersionResult, results.Warning):
5686 def desc(self):
5687 if not self.use:
5688 if self.num_profiles is not None and self.num_profiles > 1:
5689 - num_profiles = f' ({self.num_profiles} total)'
5690 + num_profiles = f" ({self.num_profiles} total)"
5691 else:
5692 - num_profiles = ''
5693 + num_profiles = ""
5694 # collapsed version
5695 return (
5696 - f'profile: {self.profile!r}{num_profiles} '
5697 - f'failed REQUIRED_USE: {self.required_use}'
5698 + f"profile: {self.profile!r}{num_profiles} "
5699 + f"failed REQUIRED_USE: {self.required_use}"
5700 )
5701 return (
5702 - f'keyword: {self.keyword}, profile: {self.profile!r}, '
5703 + f"keyword: {self.keyword}, profile: {self.profile!r}, "
5704 f"default USE: [{', '.join(self.use)}] "
5705 - f'-- failed REQUIRED_USE: {self.required_use}'
5706 + f"-- failed REQUIRED_USE: {self.required_use}"
5707 )
5708
5709
5710 class InvalidRequiredUse(results.MetadataError, results.VersionResult):
5711 """Package's REQUIRED_USE is invalid."""
5712
5713 - attr = 'required_use'
5714 + attr = "required_use"
5715
5716
5717 class RequiredUseCheck(Check):
5718 """REQUIRED_USE validity checks."""
5719
5720 # only run the check for EAPI 4 and above
5721 - _source = (sources.RestrictionRepoSource, (
5722 - packages.PackageRestriction('eapi', values.GetAttrRestriction(
5723 - 'options.has_required_use', values.FunctionRestriction(bool))),))
5724 + _source = (
5725 + sources.RestrictionRepoSource,
5726 + (
5727 + packages.PackageRestriction(
5728 + "eapi",
5729 + values.GetAttrRestriction(
5730 + "options.has_required_use", values.FunctionRestriction(bool)
5731 + ),
5732 + ),
5733 + ),
5734 + )
5735 required_addons = (addons.UseAddon, addons.profiles.ProfileAddon)
5736 known_results = frozenset([InvalidRequiredUse, RequiredUseDefaults, UnstatedIuse])
5737
5738 def __init__(self, *args, use_addon, profile_addon):
5739 super().__init__(*args)
5740 - self.iuse_filter = use_addon.get_filter('required_use')
5741 + self.iuse_filter = use_addon.get_filter("required_use")
5742 self.profiles = profile_addon
5743
5744 def feed(self, pkg):
5745 @@ -404,15 +422,15 @@ class RequiredUseCheck(Check):
5746 # unstable profiles for unstable KEYWORDS
5747 keywords = []
5748 for keyword in pkg.sorted_keywords:
5749 - if keyword[0] != '~':
5750 + if keyword[0] != "~":
5751 keywords.append(keyword)
5752 - keywords.append('~' + keyword.lstrip('~'))
5753 + keywords.append("~" + keyword.lstrip("~"))
5754
5755 # check USE defaults (pkg IUSE defaults + profile USE) against
5756 # REQUIRED_USE for all profiles matching a pkg's KEYWORDS
5757 failures = defaultdict(list)
5758 for keyword in keywords:
5759 - for profile in sorted(self.profiles.get(keyword, ()), key=attrgetter('name')):
5760 + for profile in sorted(self.profiles.get(keyword, ()), key=attrgetter("name")):
5761 # skip packages masked by the profile
5762 if profile.visible(pkg):
5763 src = FakeConfigurable(pkg, profile)
5764 @@ -424,15 +442,15 @@ class RequiredUseCheck(Check):
5765 # report all failures with profile info in verbose mode
5766 for node, profile_info in failures.items():
5767 for use, keyword, profile in profile_info:
5768 - yield RequiredUseDefaults(
5769 - str(node), sorted(use), keyword, profile, pkg=pkg)
5770 + yield RequiredUseDefaults(str(node), sorted(use), keyword, profile, pkg=pkg)
5771 else:
5772 # only report one failure per REQUIRED_USE node in regular mode
5773 for node, profile_info in failures.items():
5774 num_profiles = len(profile_info)
5775 _use, _keyword, profile = profile_info[0]
5776 yield RequiredUseDefaults(
5777 - str(node), profile=profile, num_profiles=num_profiles, pkg=pkg)
5778 + str(node), profile=profile, num_profiles=num_profiles, pkg=pkg
5779 + )
5780
5781
5782 class UnusedLocalUse(results.PackageResult, results.Warning):
5783 @@ -445,8 +463,8 @@ class UnusedLocalUse(results.PackageResult, results.Warning):
5784 @property
5785 def desc(self):
5786 s = pluralism(self.flags)
5787 - flags = ', '.join(self.flags)
5788 - return f'unused local USE flag{s}: [ {flags} ]'
5789 + flags = ", ".join(self.flags)
5790 + return f"unused local USE flag{s}: [ {flags} ]"
5791
5792
5793 class MatchingGlobalUse(results.PackageResult, results.Warning):
5794 @@ -526,8 +544,8 @@ class MissingLocalUseDesc(results.PackageResult, results.Warning):
5795 @property
5796 def desc(self):
5797 s = pluralism(self.flags)
5798 - flags = ', '.join(self.flags)
5799 - return f'local USE flag{s} missing description{s}: [ {flags} ]'
5800 + flags = ", ".join(self.flags)
5801 + return f"local USE flag{s} missing description{s}: [ {flags} ]"
5802
5803
5804 class LocalUseCheck(Check):
5805 @@ -535,23 +553,27 @@ class LocalUseCheck(Check):
5806
5807 _source = sources.PackageRepoSource
5808 required_addons = (addons.UseAddon,)
5809 - known_results = frozenset([
5810 - UnusedLocalUse, MatchingGlobalUse, ProbableGlobalUse,
5811 - ProbableUseExpand, UnderscoreInUseFlag, UnstatedIuse,
5812 - MissingLocalUseDesc,
5813 - ])
5814 + known_results = frozenset(
5815 + [
5816 + UnusedLocalUse,
5817 + MatchingGlobalUse,
5818 + ProbableGlobalUse,
5819 + ProbableUseExpand,
5820 + UnderscoreInUseFlag,
5821 + UnstatedIuse,
5822 + MissingLocalUseDesc,
5823 + ]
5824 + )
5825
5826 def __init__(self, *args, use_addon):
5827 super().__init__(*args)
5828 repo_config = self.options.target_repo.config
5829 self.iuse_handler = use_addon
5830 - self.global_use = {
5831 - flag: desc for matcher, (flag, desc) in repo_config.use_desc}
5832 + self.global_use = {flag: desc for matcher, (flag, desc) in repo_config.use_desc}
5833
5834 self.use_expand = dict()
5835 for group in repo_config.use_expand_desc.keys():
5836 - self.use_expand[group] = {
5837 - flag for flag, desc in repo_config.use_expand_desc[group]}
5838 + self.use_expand[group] = {flag for flag, desc in repo_config.use_expand_desc[group]}
5839
5840 def feed(self, pkgs):
5841 pkg = pkgs[0]
5842 @@ -568,9 +590,9 @@ class LocalUseCheck(Check):
5843 yield MatchingGlobalUse(flag, pkg=pkg)
5844 elif ratio >= 0.75:
5845 yield ProbableGlobalUse(flag, pkg=pkg)
5846 - elif '_' in flag:
5847 + elif "_" in flag:
5848 for group, flags in self.use_expand.items():
5849 - if flag.startswith(f'{group}_'):
5850 + if flag.startswith(f"{group}_"):
5851 if flag not in flags:
5852 yield ProbableUseExpand(flag, group.upper(), pkg=pkg)
5853 break
5854 @@ -608,23 +630,32 @@ class UseFlagWithoutDeps(results.VersionResult, results.Warning):
5855 @property
5856 def desc(self):
5857 s = pluralism(self.flags)
5858 - flags = ', '.join(self.flags)
5859 - return f'special small-files USE flag{s} without effect on dependencies: [ {flags} ]'
5860 + flags = ", ".join(self.flags)
5861 + return f"special small-files USE flag{s} without effect on dependencies: [ {flags} ]"
5862
5863
5864 class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
5865 """Check for USE flags without effects."""
5866
5867 - known_results = frozenset({
5868 - UseFlagWithoutDeps,
5869 - })
5870 + known_results = frozenset(
5871 + {
5872 + UseFlagWithoutDeps,
5873 + }
5874 + )
5875
5876 - warn_use_small_files = frozenset({
5877 - 'ipv6', 'logrotate', 'unicode',
5878 - 'bash-completion', 'fish-completion', 'zsh-completion', 'vim-syntax',
5879 - # TODO: enable those one day
5880 - # 'systemd',
5881 - })
5882 + warn_use_small_files = frozenset(
5883 + {
5884 + "ipv6",
5885 + "logrotate",
5886 + "unicode",
5887 + "bash-completion",
5888 + "fish-completion",
5889 + "zsh-completion",
5890 + "vim-syntax",
5891 + # TODO: enable those one day
5892 + # 'systemd',
5893 + }
5894 + )
5895
5896 def feed(self, pkg):
5897 used_flags = set(pkg.local_use)
5898 @@ -632,15 +663,18 @@ class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
5899 deps = getattr(pkg, attr.lower())
5900
5901 use_values = set()
5902 - use_values.update(itertools.chain.from_iterable(
5903 - atom.use or ()
5904 - for atom in iflatten_instance(deps, atom_cls)
5905 - ))
5906 - use_values.update(itertools.chain.from_iterable(
5907 - atom.restriction.vals
5908 - for atom in iflatten_instance(deps, packages.Conditional)
5909 - if isinstance(atom, packages.Conditional) and atom.attr == 'use'
5910 - ))
5911 + use_values.update(
5912 + itertools.chain.from_iterable(
5913 + atom.use or () for atom in iflatten_instance(deps, atom_cls)
5914 + )
5915 + )
5916 + use_values.update(
5917 + itertools.chain.from_iterable(
5918 + atom.restriction.vals
5919 + for atom in iflatten_instance(deps, packages.Conditional)
5920 + if isinstance(atom, packages.Conditional) and atom.attr == "use"
5921 + )
5922 + )
5923 for check_use in self.warn_use_small_files:
5924 if any(check_use in use for use in use_values):
5925 used_flags.add(check_use)
5926 @@ -649,6 +683,7 @@ class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
5927 if flags:
5928 yield UseFlagWithoutDeps(flags, pkg=pkg)
5929
5930 +
5931 class MissingSlotDep(results.VersionResult, results.Warning):
5932 """Missing slot value in dependencies.
5933
5934 @@ -672,18 +707,22 @@ class MissingSlotDep(results.VersionResult, results.Warning):
5935
5936 @property
5937 def desc(self):
5938 - return (
5939 - f"{self.dep!r} matches more than one slot: "
5940 - f"[ {', '.join(self.dep_slots)} ]")
5941 + return f"{self.dep!r} matches more than one slot: " f"[ {', '.join(self.dep_slots)} ]"
5942
5943
5944 class MissingSlotDepCheck(Check):
5945 """Check for missing slot dependencies."""
5946
5947 # only run the check for EAPI 5 and above
5948 - _source = (sources.RestrictionRepoSource, (
5949 - packages.PackageRestriction('eapi', values.GetAttrRestriction(
5950 - 'options.sub_slotting', values.FunctionRestriction(bool))),))
5951 + _source = (
5952 + sources.RestrictionRepoSource,
5953 + (
5954 + packages.PackageRestriction(
5955 + "eapi",
5956 + values.GetAttrRestriction("options.sub_slotting", values.FunctionRestriction(bool)),
5957 + ),
5958 + ),
5959 + )
5960 required_addons = (addons.UseAddon,)
5961 known_results = frozenset([MissingSlotDep])
5962
5963 @@ -696,8 +735,11 @@ class MissingSlotDepCheck(Check):
5964 depend, _ = self.iuse_filter((atom_cls,), pkg, pkg.depend)
5965
5966 # skip deps that are blockers or have explicit slots/slot operators
5967 - for dep in (x for x in set(rdepend).intersection(depend) if not
5968 - (x.blocks or x.slot is not None or x.slot_operator is not None)):
5969 + for dep in (
5970 + x
5971 + for x in set(rdepend).intersection(depend)
5972 + if not (x.blocks or x.slot is not None or x.slot_operator is not None)
5973 + ):
5974 dep_slots = {x.slot for x in pkg.repo.itermatch(dep.no_usedeps)}
5975 if len(dep_slots) > 1:
5976 yield MissingSlotDep(str(dep), sorted(dep_slots), pkg=pkg)
5977 @@ -738,10 +780,10 @@ class MissingUseDepDefault(results.VersionResult, results.Warning):
5978 @property
5979 def desc(self):
5980 s = pluralism(self.pkgs)
5981 - pkgs = ', '.join(self.pkgs)
5982 + pkgs = ", ".join(self.pkgs)
5983 return (
5984 f'{self.attr}="{self.atom}": USE flag {self.flag!r} missing from '
5985 - f'package{s}: [ {pkgs} ]'
5986 + f"package{s}: [ {pkgs} ]"
5987 )
5988
5989
5990 @@ -755,7 +797,7 @@ class DeprecatedDep(results.VersionResult, results.Warning):
5991
5992 @property
5993 def desc(self):
5994 - ies = pluralism(self.atoms, singular='y', plural='ies')
5995 + ies = pluralism(self.atoms, singular="y", plural="ies")
5996 return f"{self.attr}: deprecated dependenc{ies}: {' '.join(self.atoms)}"
5997
5998
5999 @@ -776,31 +818,31 @@ class BadDependency(results.VersionResult, results.Error):
6000 class InvalidDepend(results.MetadataError, results.VersionResult):
6001 """Package has invalid DEPEND."""
6002
6003 - attr = 'depend'
6004 + attr = "depend"
6005
6006
6007 class InvalidRdepend(results.MetadataError, results.VersionResult):
6008 """Package has invalid RDEPEND."""
6009
6010 - attr = 'rdepend'
6011 + attr = "rdepend"
6012
6013
6014 class InvalidPdepend(results.MetadataError, results.VersionResult):
6015 """Package has invalid PDEPEND."""
6016
6017 - attr = 'pdepend'
6018 + attr = "pdepend"
6019
6020
6021 class InvalidBdepend(results.MetadataError, results.VersionResult):
6022 """Package has invalid BDEPEND."""
6023
6024 - attr = 'bdepend'
6025 + attr = "bdepend"
6026
6027
6028 class InvalidIdepend(results.MetadataError, results.VersionResult):
6029 """Package has invalid IDEPEND."""
6030
6031 - attr = 'idepend'
6032 + attr = "idepend"
6033
6034
6035 class MisplacedWeakBlocker(results.Warning, results.VersionResult):
6036 @@ -821,25 +863,35 @@ class MisplacedWeakBlocker(results.Warning, results.VersionResult):
6037
6038 @property
6039 def desc(self):
6040 - return f'{self.attr}: misplaced weak blocker: {self.atom}'
6041 + return f"{self.attr}: misplaced weak blocker: {self.atom}"
6042
6043
6044 class DependencyCheck(Check):
6045 """Verify dependency attributes (e.g. RDEPEND)."""
6046
6047 required_addons = (addons.UseAddon,)
6048 - known_results = frozenset([
6049 - BadDependency, MissingPackageRevision, MissingUseDepDefault,
6050 - UnstatedIuse, DeprecatedDep, InvalidDepend, InvalidRdepend,
6051 - InvalidPdepend, InvalidBdepend, InvalidIdepend, MisplacedWeakBlocker,
6052 - ])
6053 + known_results = frozenset(
6054 + [
6055 + BadDependency,
6056 + MissingPackageRevision,
6057 + MissingUseDepDefault,
6058 + UnstatedIuse,
6059 + DeprecatedDep,
6060 + InvalidDepend,
6061 + InvalidRdepend,
6062 + InvalidPdepend,
6063 + InvalidBdepend,
6064 + InvalidIdepend,
6065 + MisplacedWeakBlocker,
6066 + ]
6067 + )
6068
6069 def __init__(self, *args, use_addon):
6070 super().__init__(*args)
6071 self.deprecated = self.options.target_repo.deprecated.match
6072 self.iuse_filter = use_addon.get_filter()
6073 - self.conditional_ops = {'?', '='}
6074 - self.use_defaults = {'(+)', '(-)'}
6075 + self.conditional_ops = {"?", "="}
6076 + self.use_defaults = {"(+)", "(-)"}
6077
6078 def _check_use_deps(self, attr, atom):
6079 """Check dependencies for missing USE dep defaults."""
6080 @@ -849,7 +901,7 @@ class DependencyCheck(Check):
6081 x = x[:-1]
6082 if x[-3:] in self.use_defaults:
6083 continue
6084 - stripped_use.append(x.lstrip('!-'))
6085 + stripped_use.append(x.lstrip("!-"))
6086 if stripped_use:
6087 missing_use_deps = defaultdict(set)
6088 for pkg in self.options.search_repo.match(atom.no_usedeps):
6089 @@ -868,12 +920,13 @@ class DependencyCheck(Check):
6090 try:
6091 deps = getattr(pkg, attr)
6092 except MetadataException as e:
6093 - cls = globals()[f'Invalid{attr.capitalize()}']
6094 + cls = globals()[f"Invalid{attr.capitalize()}"]
6095 yield cls(attr, e.msg(), pkg=pkg)
6096 continue
6097
6098 nodes, unstated = self.iuse_filter(
6099 - (atom_cls, boolean.OrRestriction), pkg, deps, attr=attr)
6100 + (atom_cls, boolean.OrRestriction), pkg, deps, attr=attr
6101 + )
6102 yield from unstated
6103
6104 for node in nodes:
6105 @@ -892,9 +945,10 @@ class DependencyCheck(Check):
6106 if all(self.deprecated(x.versioned_atom) for x in pkgs):
6107 deprecated[attr].add(atom)
6108
6109 - if in_or_restriction and atom.slot_operator == '=':
6110 + if in_or_restriction and atom.slot_operator == "=":
6111 yield BadDependency(
6112 - attr, atom, '= slot operator used inside || block', pkg=pkg)
6113 + attr, atom, "= slot operator used inside || block", pkg=pkg
6114 + )
6115
6116 if pkg.eapi.options.has_use_dep_defaults and atom.use is not None:
6117 missing_use_deps = self._check_use_deps(attr, atom)
6118 @@ -902,22 +956,23 @@ class DependencyCheck(Check):
6119 pkgs = (x.cpvstr for x in sorted(atoms))
6120 yield MissingUseDepDefault(attr, str(atom), use, pkgs, pkg=pkg)
6121
6122 - if atom.op == '=' and not atom.revision:
6123 + if atom.op == "=" and not atom.revision:
6124 yield MissingPackageRevision(attr, str(atom), pkg=pkg)
6125
6126 if atom.blocks:
6127 if atom.match(pkg):
6128 yield BadDependency(attr, atom, "package blocks itself", pkg=pkg)
6129 - elif atom.slot_operator == '=':
6130 + elif atom.slot_operator == "=":
6131 yield BadDependency(
6132 - attr, atom, '= slot operator used in blocker', pkg=pkg)
6133 + attr, atom, "= slot operator used in blocker", pkg=pkg
6134 + )
6135 elif not atom.blocks_strongly:
6136 weak_blocks[attr].add(atom)
6137
6138 - for attr in ('depend', 'bdepend'):
6139 - weak_blocks[attr].difference_update(weak_blocks['rdepend'])
6140 - weak_blocks['idepend'].difference_update(weak_blocks['rdepend'], weak_blocks['depend'])
6141 - for attr in ('depend', 'bdepend', 'idepend', 'pdepend'):
6142 + for attr in ("depend", "bdepend"):
6143 + weak_blocks[attr].difference_update(weak_blocks["rdepend"])
6144 + weak_blocks["idepend"].difference_update(weak_blocks["rdepend"], weak_blocks["depend"])
6145 + for attr in ("depend", "bdepend", "idepend", "pdepend"):
6146 for atom in weak_blocks[attr]:
6147 yield MisplacedWeakBlocker(attr, atom, pkg=pkg)
6148
6149 @@ -941,7 +996,7 @@ class OutdatedBlocker(results.VersionResult, results.Info):
6150 def desc(self):
6151 return (
6152 f'outdated blocker {self.attr}="{self.atom}": '
6153 - f'last match removed {self.age} years ago'
6154 + f"last match removed {self.age} years ago"
6155 )
6156
6157
6158 @@ -961,10 +1016,7 @@ class NonexistentBlocker(results.VersionResult, results.Warning):
6159
6160 @property
6161 def desc(self):
6162 - return (
6163 - f'nonexistent blocker {self.attr}="{self.atom}": '
6164 - 'no matches in repo history'
6165 - )
6166 + return f'nonexistent blocker {self.attr}="{self.atom}": ' "no matches in repo history"
6167
6168
6169 class OutdatedBlockersCheck(Check):
6170 @@ -985,7 +1037,7 @@ class OutdatedBlockersCheck(Check):
6171 for attr in sorted(x.lower() for x in pkg.eapi.dep_keys):
6172 blockers = (x for x in iflatten_instance(getattr(pkg, attr), atom_cls) if x.blocks)
6173 for atom in blockers:
6174 - if atom.op == '=*':
6175 + if atom.op == "=*":
6176 atom_str = f"={atom.cpvstr}*"
6177 else:
6178 atom_str = atom.op + atom.cpvstr
6179 @@ -1084,7 +1136,7 @@ class VirtualKeywordsUpdate(results.VersionResult, results.Info):
6180 @property
6181 def desc(self):
6182 s = pluralism(self.keywords)
6183 - keywords = ', '.join(self.keywords)
6184 + keywords = ", ".join(self.keywords)
6185 return f"KEYWORDS update{s} available: {keywords}"
6186
6187
6188 @@ -1092,10 +1144,16 @@ class KeywordsCheck(Check):
6189 """Check package keywords for sanity; empty keywords, and -* are flagged."""
6190
6191 required_addons = (addons.UseAddon, addons.KeywordsAddon)
6192 - known_results = frozenset([
6193 - BadKeywords, UnknownKeywords, OverlappingKeywords, DuplicateKeywords,
6194 - UnsortedKeywords, VirtualKeywordsUpdate,
6195 - ])
6196 + known_results = frozenset(
6197 + [
6198 + BadKeywords,
6199 + UnknownKeywords,
6200 + OverlappingKeywords,
6201 + DuplicateKeywords,
6202 + UnsortedKeywords,
6203 + VirtualKeywordsUpdate,
6204 + ]
6205 + )
6206
6207 def __init__(self, *args, use_addon, keywords_addon):
6208 super().__init__(*args)
6209 @@ -1103,7 +1161,7 @@ class KeywordsCheck(Check):
6210 self.keywords = keywords_addon
6211
6212 def feed(self, pkg):
6213 - if pkg.keywords == ('-*',):
6214 + if pkg.keywords == ("-*",):
6215 yield BadKeywords(pkg)
6216 else:
6217 # check for unknown keywords
6218 @@ -1115,11 +1173,12 @@ class KeywordsCheck(Check):
6219 yield UnknownKeywords(sorted(unknown), pkg=pkg)
6220
6221 # check for overlapping keywords
6222 - unstable = {x[1:] for x in pkg.keywords if x[0] == '~'}
6223 - stable = {x for x in pkg.keywords if x[0] != '~'}
6224 + unstable = {x[1:] for x in pkg.keywords if x[0] == "~"}
6225 + stable = {x for x in pkg.keywords if x[0] != "~"}
6226 if overlapping := unstable & stable:
6227 - keywords = ', '.join(map(
6228 - str, sorted(zip(overlapping, ('~' + x for x in overlapping)))))
6229 + keywords = ", ".join(
6230 + map(str, sorted(zip(overlapping, ("~" + x for x in overlapping))))
6231 + )
6232 yield OverlappingKeywords(keywords, pkg=pkg)
6233
6234 # check for duplicate keywords
6235 @@ -1139,19 +1198,21 @@ class KeywordsCheck(Check):
6236 yield UnsortedKeywords(pkg.keywords, pkg=pkg)
6237 else:
6238 yield UnsortedKeywords(
6239 - pkg.keywords, sorted_keywords=pkg.sorted_keywords, pkg=pkg)
6240 + pkg.keywords, sorted_keywords=pkg.sorted_keywords, pkg=pkg
6241 + )
6242
6243 - if pkg.category == 'virtual':
6244 + if pkg.category == "virtual":
6245 dep_keywords = defaultdict(set)
6246 rdepend, _ = self.iuse_filter((atom_cls,), pkg, pkg.rdepend)
6247 for dep in set(rdepend):
6248 for p in self.options.search_repo.match(dep.no_usedeps):
6249 dep_keywords[dep].update(
6250 - x for x in p.keywords if x.lstrip('~') in self.keywords.arches)
6251 + x for x in p.keywords if x.lstrip("~") in self.keywords.arches
6252 + )
6253 if dep_keywords:
6254 dep_keywords = set.intersection(*dep_keywords.values())
6255 pkg_keywords = set(pkg.keywords)
6256 - pkg_keywords.update(f'~{x}' for x in pkg.keywords if x[0] != '~')
6257 + pkg_keywords.update(f"~{x}" for x in pkg.keywords if x[0] != "~")
6258 if keywords := dep_keywords - pkg_keywords:
6259 yield VirtualKeywordsUpdate(sort_keywords(keywords), pkg=pkg)
6260
6261 @@ -1166,8 +1227,8 @@ class MissingUri(results.VersionResult, results.Warning):
6262 @property
6263 def desc(self):
6264 s = pluralism(self.filenames)
6265 - filenames = ', '.join(map(repr, self.filenames))
6266 - return f'unfetchable file{s}: {filenames}'
6267 + filenames = ", ".join(map(repr, self.filenames))
6268 + return f"unfetchable file{s}: {filenames}"
6269
6270
6271 class UnknownMirror(results.VersionResult, results.Error):
6272 @@ -1180,7 +1241,7 @@ class UnknownMirror(results.VersionResult, results.Error):
6273
6274 @property
6275 def desc(self):
6276 - return f'unknown mirror {self.mirror!r} from URI {self.uri!r}'
6277 + return f"unknown mirror {self.mirror!r} from URI {self.uri!r}"
6278
6279
6280 class BadProtocol(results.VersionResult, results.Error):
6281 @@ -1197,8 +1258,8 @@ class BadProtocol(results.VersionResult, results.Error):
6282 @property
6283 def desc(self):
6284 s = pluralism(self.uris)
6285 - uris = ', '.join(map(repr, self.uris))
6286 - return f'bad protocol {self.protocol!r} in URI{s}: {uris}'
6287 + uris = ", ".join(map(repr, self.uris))
6288 + return f"bad protocol {self.protocol!r} in URI{s}: {uris}"
6289
6290
6291 class RedundantUriRename(results.VersionResult, results.Style):
6292 @@ -1226,8 +1287,8 @@ class BadFilename(results.VersionResult, results.Warning):
6293 @property
6294 def desc(self):
6295 s = pluralism(self.filenames)
6296 - filenames = ', '.join(self.filenames)
6297 - return f'bad filename{s}: [ {filenames} ]'
6298 + filenames = ", ".join(self.filenames)
6299 + return f"bad filename{s}: [ {filenames} ]"
6300
6301
6302 class TarballAvailable(results.VersionResult, results.Style):
6303 @@ -1244,14 +1305,14 @@ class TarballAvailable(results.VersionResult, results.Style):
6304 @property
6305 def desc(self):
6306 s = pluralism(self.uris)
6307 - uris = ' '.join(self.uris)
6308 - return f'zip archive{s} used when tarball available: [ {uris} ]'
6309 + uris = " ".join(self.uris)
6310 + return f"zip archive{s} used when tarball available: [ {uris} ]"
6311
6312
6313 class InvalidSrcUri(results.MetadataError, results.VersionResult):
6314 """Package's SRC_URI is invalid."""
6315
6316 - attr = 'fetchables'
6317 + attr = "fetchables"
6318
6319
6320 class SrcUriCheck(Check):
6321 @@ -1262,19 +1323,28 @@ class SrcUriCheck(Check):
6322 """
6323
6324 required_addons = (addons.UseAddon,)
6325 - known_results = frozenset([
6326 - BadFilename, BadProtocol, MissingUri, InvalidSrcUri,
6327 - RedundantUriRename, TarballAvailable, UnknownMirror, UnstatedIuse,
6328 - ])
6329 + known_results = frozenset(
6330 + [
6331 + BadFilename,
6332 + BadProtocol,
6333 + MissingUri,
6334 + InvalidSrcUri,
6335 + RedundantUriRename,
6336 + TarballAvailable,
6337 + UnknownMirror,
6338 + UnstatedIuse,
6339 + ]
6340 + )
6341
6342 valid_protos = frozenset(["http", "https", "ftp"])
6343
6344 def __init__(self, *args, use_addon):
6345 super().__init__(*args)
6346 - self.iuse_filter = use_addon.get_filter('fetchables')
6347 + self.iuse_filter = use_addon.get_filter("fetchables")
6348 self.zip_to_tar_re = re.compile(
6349 - r'https?://(github\.com/.*?/.*?/archive/.+\.zip|'
6350 - r'gitlab\.com/.*?/.*?/-/archive/.+\.zip)')
6351 + r"https?://(github\.com/.*?/.*?/archive/.+\.zip|"
6352 + r"gitlab\.com/.*?/.*?/-/archive/.+\.zip)"
6353 + )
6354
6355 def feed(self, pkg):
6356 lacks_uri = set()
6357 @@ -1283,13 +1353,17 @@ class SrcUriCheck(Check):
6358 bad_filenames = set()
6359 tarball_available = set()
6360
6361 - report_uris = LogMap('pkgcore.log.logger.info', partial(RedundantUriRename, pkg))
6362 + report_uris = LogMap("pkgcore.log.logger.info", partial(RedundantUriRename, pkg))
6363 with LogReports(report_uris) as log_reports:
6364 fetchables, unstated = self.iuse_filter(
6365 - (fetchable,), pkg,
6366 + (fetchable,),
6367 + pkg,
6368 pkg.generate_fetchables(
6369 - allow_missing_checksums=True, ignore_unknown_mirrors=True,
6370 - skip_default_mirrors=True))
6371 + allow_missing_checksums=True,
6372 + ignore_unknown_mirrors=True,
6373 + skip_default_mirrors=True,
6374 + ),
6375 + )
6376 yield from log_reports
6377
6378 yield from unstated
6379 @@ -1300,7 +1374,8 @@ class SrcUriCheck(Check):
6380
6381 mirrors = f_inst.uri.visit_mirrors(treat_default_as_mirror=False)
6382 unknown_mirrors = [
6383 - (m, sub_uri) for m, sub_uri in mirrors if isinstance(m, unknown_mirror)]
6384 + (m, sub_uri) for m, sub_uri in mirrors if isinstance(m, unknown_mirror)
6385 + ]
6386 for mirror, sub_uri in unknown_mirrors:
6387 uri = f"{mirror}/{sub_uri}"
6388 yield UnknownMirror(mirror.mirror_name, uri, pkg=pkg)
6389 @@ -1311,12 +1386,12 @@ class SrcUriCheck(Check):
6390 PN = re.escape(pkg.PN)
6391 PV = re.escape(pkg.PV)
6392 exts = pkg.eapi.archive_exts_regex_pattern
6393 - bad_filenames_re = rf'^({PN}|v?{PV}|[0-9a-f]{{40}}){exts}$'
6394 + bad_filenames_re = rf"^({PN}|v?{PV}|[0-9a-f]{{40}}){exts}$"
6395 if re.match(bad_filenames_re, f_inst.filename):
6396 bad_filenames.add(f_inst.filename)
6397
6398 restricts = set().union(*(x.vals for x in restrictions if not x.negate))
6399 - if not f_inst.uri and 'fetch' not in pkg.restrict.evaluate_depset(restricts):
6400 + if not f_inst.uri and "fetch" not in pkg.restrict.evaluate_depset(restricts):
6401 lacks_uri.add(f_inst.filename)
6402 else:
6403 bad_protocols = defaultdict(set)
6404 @@ -1349,8 +1424,8 @@ class BadDescription(results.VersionResult, results.Style):
6405
6406 @property
6407 def desc(self):
6408 - pkg_desc = f'DESCRIPTION="{self.pkg_desc}" ' if self.pkg_desc else ''
6409 - return f'{pkg_desc}{self.msg}'
6410 + pkg_desc = f'DESCRIPTION="{self.pkg_desc}" ' if self.pkg_desc else ""
6411 + return f"{pkg_desc}{self.msg}"
6412
6413
6414 class DescriptionCheck(Check):
6415 @@ -1403,31 +1478,33 @@ class HomepageCheck(Check):
6416 known_results = frozenset([BadHomepage])
6417
6418 # categories for ebuilds that should lack HOMEPAGE
6419 - missing_categories = frozenset(['virtual', 'acct-group', 'acct-user'])
6420 + missing_categories = frozenset(["virtual", "acct-group", "acct-user"])
6421 # generic sites that shouldn't be used for HOMEPAGE
6422 - generic_sites = frozenset(['https://www.gentoo.org', 'https://gentoo.org'])
6423 + generic_sites = frozenset(["https://www.gentoo.org", "https://gentoo.org"])
6424
6425 def feed(self, pkg):
6426 if not pkg.homepage:
6427 if pkg.category not in self.missing_categories:
6428 - yield BadHomepage('HOMEPAGE empty/unset', pkg=pkg)
6429 + yield BadHomepage("HOMEPAGE empty/unset", pkg=pkg)
6430 else:
6431 if pkg.category in self.missing_categories:
6432 yield BadHomepage(
6433 - f'HOMEPAGE should be undefined for {pkg.category!r} packages', pkg=pkg)
6434 + f"HOMEPAGE should be undefined for {pkg.category!r} packages", pkg=pkg
6435 + )
6436 else:
6437 for homepage in pkg.homepage:
6438 - if homepage.rstrip('/') in self.generic_sites:
6439 - yield BadHomepage(f'unspecific HOMEPAGE: {homepage}', pkg=pkg)
6440 + if homepage.rstrip("/") in self.generic_sites:
6441 + yield BadHomepage(f"unspecific HOMEPAGE: {homepage}", pkg=pkg)
6442 else:
6443 - i = homepage.find('://')
6444 + i = homepage.find("://")
6445 if i == -1:
6446 - yield BadHomepage(f'HOMEPAGE={homepage!r} lacks protocol', pkg=pkg)
6447 + yield BadHomepage(f"HOMEPAGE={homepage!r} lacks protocol", pkg=pkg)
6448 elif homepage[:i] not in SrcUriCheck.valid_protos:
6449 yield BadHomepage(
6450 - f'HOMEPAGE={homepage!r} uses unsupported '
6451 - f'protocol {homepage[:i]!r}',
6452 - pkg=pkg)
6453 + f"HOMEPAGE={homepage!r} uses unsupported "
6454 + f"protocol {homepage[:i]!r}",
6455 + pkg=pkg,
6456 + )
6457
6458
6459 class UnknownRestrict(results.VersionResult, results.Warning):
6460 @@ -1439,7 +1516,7 @@ class UnknownRestrict(results.VersionResult, results.Warning):
6461
6462 @property
6463 def desc(self):
6464 - restricts = ' '.join(self.restricts)
6465 + restricts = " ".join(self.restricts)
6466 return f'unknown RESTRICT="{restricts}"'
6467
6468
6469 @@ -1452,20 +1529,20 @@ class UnknownProperties(results.VersionResult, results.Warning):
6470
6471 @property
6472 def desc(self):
6473 - properties = ' '.join(self.properties)
6474 + properties = " ".join(self.properties)
6475 return f'unknown PROPERTIES="{properties}"'
6476
6477
6478 class InvalidRestrict(results.MetadataError, results.VersionResult):
6479 """Package's RESTRICT is invalid."""
6480
6481 - attr = 'restrict'
6482 + attr = "restrict"
6483
6484
6485 class InvalidProperties(results.MetadataError, results.VersionResult):
6486 """Package's PROPERTIES is invalid."""
6487
6488 - attr = 'properties'
6489 + attr = "properties"
6490
6491
6492 class _RestrictPropertiesCheck(Check):
6493 @@ -1482,7 +1559,7 @@ class _RestrictPropertiesCheck(Check):
6494 # pull allowed values from a repo and its masters
6495 allowed = []
6496 for repo in self.options.target_repo.trees:
6497 - allowed.extend(getattr(repo.config, f'{self._attr}_allowed'))
6498 + allowed.extend(getattr(repo.config, f"{self._attr}_allowed"))
6499 self.allowed = frozenset(allowed)
6500
6501 def feed(self, pkg):
6502 @@ -1499,7 +1576,7 @@ class RestrictCheck(_RestrictPropertiesCheck):
6503 """RESTRICT related checks."""
6504
6505 known_results = frozenset([UnknownRestrict, UnstatedIuse, InvalidRestrict])
6506 - _attr = 'restrict'
6507 + _attr = "restrict"
6508 _unknown_result_cls = UnknownRestrict
6509
6510
6511 @@ -1507,7 +1584,7 @@ class PropertiesCheck(_RestrictPropertiesCheck):
6512 """PROPERTIES related checks."""
6513
6514 known_results = frozenset([UnknownProperties, UnstatedIuse, InvalidProperties])
6515 - _attr = 'properties'
6516 + _attr = "properties"
6517 _unknown_result_cls = UnknownProperties
6518
6519
6520 @@ -1536,15 +1613,16 @@ class RestrictTestCheck(Check):
6521 super().__init__(*args)
6522 # create "!test? ( test )" conditional to match restrictions against
6523 self.test_restrict = packages.Conditional(
6524 - 'use', values.ContainmentMatch2('test', negate=True), ['test'])
6525 + "use", values.ContainmentMatch2("test", negate=True), ["test"]
6526 + )
6527
6528 def feed(self, pkg):
6529 - if 'test' not in pkg.iuse:
6530 + if "test" not in pkg.iuse:
6531 return
6532
6533 # conditional is unnecessary if it already exists or is in unconditional form
6534 for r in pkg.restrict:
6535 - if r in ('test', self.test_restrict):
6536 + if r in ("test", self.test_restrict):
6537 return
6538
6539 yield MissingTestRestrict(pkg=pkg)
6540 @@ -1567,7 +1645,7 @@ class MissingUnpackerDep(results.VersionResult, results.Warning):
6541 def desc(self):
6542 # determine proper dep type from pkg EAPI
6543 eapi_obj = get_eapi(self.eapi)
6544 - dep_type = 'BDEPEND' if 'BDEPEND' in eapi_obj.metadata_keys else 'DEPEND'
6545 + dep_type = "BDEPEND" if "BDEPEND" in eapi_obj.metadata_keys else "DEPEND"
6546
6547 if len(self.unpackers) == 1:
6548 dep = self.unpackers[0]
6549 @@ -1575,7 +1653,7 @@ class MissingUnpackerDep(results.VersionResult, results.Warning):
6550 dep = f"|| ( {' '.join(self.unpackers)} )"
6551
6552 s = pluralism(self.filenames)
6553 - filenames = ', '.join(self.filenames)
6554 + filenames = ", ".join(self.filenames)
6555 return f'missing {dep_type}="{dep}" for SRC_URI archive{s}: [ {filenames} ]'
6556
6557
6558 @@ -1585,26 +1663,30 @@ class MissingUnpackerDepCheck(Check):
6559 known_results = frozenset([MissingUnpackerDep])
6560 required_addons = (addons.UseAddon,)
6561
6562 - non_system_unpackers = ImmutableDict({
6563 - '.zip': frozenset(['app-arch/unzip']),
6564 - '.7z': frozenset(['app-arch/p7zip']),
6565 - '.rar': frozenset(['app-arch/rar', 'app-arch/unrar']),
6566 - '.lha': frozenset(['app-arch/lha']),
6567 - '.lzh': frozenset(['app-arch/lha']),
6568 - })
6569 + non_system_unpackers = ImmutableDict(
6570 + {
6571 + ".zip": frozenset(["app-arch/unzip"]),
6572 + ".7z": frozenset(["app-arch/p7zip"]),
6573 + ".rar": frozenset(["app-arch/rar", "app-arch/unrar"]),
6574 + ".lha": frozenset(["app-arch/lha"]),
6575 + ".lzh": frozenset(["app-arch/lha"]),
6576 + }
6577 + )
6578
6579 def __init__(self, *args, use_addon):
6580 super().__init__(*args)
6581 self.dep_filter = use_addon.get_filter()
6582 - self.fetch_filter = use_addon.get_filter('fetchables')
6583 + self.fetch_filter = use_addon.get_filter("fetchables")
6584
6585 def feed(self, pkg):
6586 # ignore conditionals
6587 fetchables, _ = self.fetch_filter(
6588 - (fetchable,), pkg,
6589 + (fetchable,),
6590 + pkg,
6591 pkg.generate_fetchables(
6592 - allow_missing_checksums=True, ignore_unknown_mirrors=True,
6593 - skip_default_mirrors=True))
6594 + allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
6595 + ),
6596 + )
6597
6598 missing_unpackers = defaultdict(set)
6599
6600 @@ -1616,7 +1698,7 @@ class MissingUnpackerDepCheck(Check):
6601
6602 # toss all the potentially missing unpackers that properly include deps
6603 if missing_unpackers:
6604 - for dep_type in ('bdepend', 'depend'):
6605 + for dep_type in ("bdepend", "depend"):
6606 deps, _ = self.dep_filter((atom_cls,), pkg, getattr(pkg, dep_type))
6607 deps = {x.key for x in deps}
6608 for unpackers in list(missing_unpackers.keys()):
6609 @@ -1624,8 +1706,7 @@ class MissingUnpackerDepCheck(Check):
6610 missing_unpackers.pop(unpackers, None)
6611
6612 for unpackers, filenames in missing_unpackers.items():
6613 - yield MissingUnpackerDep(
6614 - str(pkg.eapi), sorted(filenames), sorted(unpackers), pkg=pkg)
6615 + yield MissingUnpackerDep(str(pkg.eapi), sorted(filenames), sorted(unpackers), pkg=pkg)
6616
6617
6618 class VirtualWithSingleProvider(results.PackageResult, results.Warning):
6619 @@ -1644,32 +1725,31 @@ class VirtualWithSingleProvider(results.PackageResult, results.Warning):
6620
6621 @property
6622 def desc(self):
6623 - return f'virtual package with a single provider: {self.provider}'
6624 + return f"virtual package with a single provider: {self.provider}"
6625
6626
6627 class VirtualWithBdepend(results.VersionResult, results.Warning):
6628 """Virtual package with a BDEPEND defined."""
6629
6630 - desc = 'virtual package with a BDEPEND defined'
6631 + desc = "virtual package with a BDEPEND defined"
6632
6633
6634 class VirtualWithDepend(results.VersionResult, results.Warning):
6635 """Virtual package with a BDEPEND defined."""
6636
6637 - desc = 'virtual package with a DEPEND defined'
6638 + desc = "virtual package with a DEPEND defined"
6639
6640
6641 class VirtualProvidersCheck(Check):
6642 """Check providers of virtual packages."""
6643
6644 - _restricted_source = (sources.RestrictionRepoSource, (restricts.CategoryDep('virtual'), ))
6645 - _source = (sources.PackageRepoSource, (), (('source', _restricted_source),))
6646 - known_results = frozenset([VirtualWithSingleProvider,
6647 - VirtualWithBdepend, VirtualWithDepend])
6648 + _restricted_source = (sources.RestrictionRepoSource, (restricts.CategoryDep("virtual"),))
6649 + _source = (sources.PackageRepoSource, (), (("source", _restricted_source),))
6650 + known_results = frozenset([VirtualWithSingleProvider, VirtualWithBdepend, VirtualWithDepend])
6651
6652 useless_depends = (
6653 - ('depend', VirtualWithDepend),
6654 - ('bdepend', VirtualWithBdepend),
6655 + ("depend", VirtualWithDepend),
6656 + ("bdepend", VirtualWithBdepend),
6657 )
6658
6659 def __init__(self, options, **kwargs):
6660 @@ -1678,10 +1758,13 @@ class VirtualProvidersCheck(Check):
6661 self.deprecated = self.options.target_repo.deprecated
6662
6663 def pkg_has_conditional_exception(self, pkgs):
6664 - return any(use.startswith(('elibc', 'kernel'))
6665 + return any(
6666 + use.startswith(("elibc", "kernel"))
6667 for pkg in pkgs
6668 for dep in iflatten_instance(pkg.rdepend, (atom_cls, packages.Conditional))
6669 - if isinstance(dep, packages.Conditional) and dep.attr == 'use' and isinstance(dep.restriction, values.ContainmentMatch)
6670 + if isinstance(dep, packages.Conditional)
6671 + and dep.attr == "use"
6672 + and isinstance(dep.restriction, values.ContainmentMatch)
6673 for use in dep.restriction.vals
6674 )
6675
6676 @@ -1692,15 +1775,10 @@ class VirtualProvidersCheck(Check):
6677 yield cls(pkg=pkg)
6678
6679 if not any(self.deprecated.match(pkg) for pkg in pkgs):
6680 - pkgs_rdepends = tuple(
6681 - tuple(iflatten_instance(pkg.rdepend, atom_cls))
6682 - for pkg in pkgs
6683 - )
6684 + pkgs_rdepends = tuple(tuple(iflatten_instance(pkg.rdepend, atom_cls)) for pkg in pkgs)
6685 if max(map(len, pkgs_rdepends)) == 1:
6686 unversioned_rdepends = {
6687 - deps[0].unversioned_atom
6688 - for deps in pkgs_rdepends
6689 - if len(deps) == 1
6690 + deps[0].unversioned_atom for deps in pkgs_rdepends if len(deps) == 1
6691 }
6692 if len(unversioned_rdepends) == 1 and not self.pkg_has_conditional_exception(pkgs):
6693 yield VirtualWithSingleProvider(unversioned_rdepends.pop(), pkg=pkgs[0])
6694
6695 diff --git a/src/pkgcheck/checks/metadata_xml.py b/src/pkgcheck/checks/metadata_xml.py
6696 index 2182585b..0fcc31ac 100644
6697 --- a/src/pkgcheck/checks/metadata_xml.py
6698 +++ b/src/pkgcheck/checks/metadata_xml.py
6699 @@ -25,7 +25,7 @@ class _MissingXml(results.Error):
6700
6701 @property
6702 def desc(self):
6703 - return f'{self._attr} is missing {self.filename}'
6704 + return f"{self._attr} is missing {self.filename}"
6705
6706
6707 class _BadlyFormedXml(results.Warning):
6708 @@ -38,7 +38,7 @@ class _BadlyFormedXml(results.Warning):
6709
6710 @property
6711 def desc(self):
6712 - return f'{self._attr} {self.filename} is not well formed xml: {self.error}'
6713 + return f"{self._attr} {self.filename} is not well formed xml: {self.error}"
6714
6715
6716 class _InvalidXml(results.Error):
6717 @@ -51,7 +51,7 @@ class _InvalidXml(results.Error):
6718
6719 @property
6720 def desc(self):
6721 - return f'{self._attr} {self.filename} violates metadata.xsd:\n{self.message}'
6722 + return f"{self._attr} {self.filename} violates metadata.xsd:\n{self.message}"
6723
6724
6725 class _MetadataXmlInvalidPkgRef(results.Error):
6726 @@ -65,8 +65,8 @@ class _MetadataXmlInvalidPkgRef(results.Error):
6727 @property
6728 def desc(self):
6729 return (
6730 - f'{self._attr} {self.filename} <pkg/> '
6731 - f'references unknown/invalid package: {self.pkgtext!r}'
6732 + f"{self._attr} {self.filename} <pkg/> "
6733 + f"references unknown/invalid package: {self.pkgtext!r}"
6734 )
6735
6736
6737 @@ -81,8 +81,8 @@ class _MetadataXmlInvalidCatRef(results.Error):
6738 @property
6739 def desc(self):
6740 return (
6741 - f'{self._attr} {self.filename} <cat/> references '
6742 - f'unknown/invalid category: {self.cattext!r}'
6743 + f"{self._attr} {self.filename} <cat/> references "
6744 + f"unknown/invalid category: {self.cattext!r}"
6745 )
6746
6747
6748 @@ -97,8 +97,8 @@ class MaintainerNeeded(results.PackageResult, results.Warning):
6749 @property
6750 def desc(self):
6751 if not self.needed:
6752 - return f'{self.filename}: missing maintainer-needed comment'
6753 - return f'{self.filename}: invalid maintainer-needed comment'
6754 + return f"{self.filename}: missing maintainer-needed comment"
6755 + return f"{self.filename}: invalid maintainer-needed comment"
6756
6757
6758 class MaintainerWithoutProxy(results.PackageResult, results.Warning):
6759 @@ -119,8 +119,8 @@ class MaintainerWithoutProxy(results.PackageResult, results.Warning):
6760 @property
6761 def desc(self):
6762 s = pluralism(self.maintainers)
6763 - maintainers = ', '.join(self.maintainers)
6764 - return f'{self.filename}: proxied maintainer{s} missing proxy dev/project: {maintainers}'
6765 + maintainers = ", ".join(self.maintainers)
6766 + return f"{self.filename}: proxied maintainer{s} missing proxy dev/project: {maintainers}"
6767
6768
6769 class ProxyWithoutProxied(results.PackageResult, results.Warning):
6770 @@ -137,7 +137,7 @@ class ProxyWithoutProxied(results.PackageResult, results.Warning):
6771
6772 @property
6773 def desc(self):
6774 - return f'{self.filename}: proxy with no proxied maintainer'
6775 + return f"{self.filename}: proxy with no proxied maintainer"
6776
6777
6778 class NonexistentProjectMaintainer(results.PackageResult, results.Warning):
6779 @@ -151,8 +151,8 @@ class NonexistentProjectMaintainer(results.PackageResult, results.Warning):
6780 @property
6781 def desc(self):
6782 s = pluralism(self.emails)
6783 - emails = ', '.join(self.emails)
6784 - return f'{self.filename}: nonexistent project maintainer{s}: {emails}'
6785 + emails = ", ".join(self.emails)
6786 + return f"{self.filename}: nonexistent project maintainer{s}: {emails}"
6787
6788
6789 class WrongMaintainerType(results.PackageResult, results.Warning):
6790 @@ -166,7 +166,7 @@ class WrongMaintainerType(results.PackageResult, results.Warning):
6791 @property
6792 def desc(self):
6793 s = pluralism(self.emails)
6794 - emails = ', '.join(self.emails)
6795 + emails = ", ".join(self.emails)
6796 return f'{self.filename}: project maintainer{s} with type="person": {emails}'
6797
6798
6799 @@ -222,7 +222,7 @@ class _MetadataXmlIndentation(results.BaseLinesResult, results.Style):
6800
6801 @property
6802 def desc(self):
6803 - return f'{self.filename}: metadata.xml has inconsistent indentation {self.lines_str}'
6804 + return f"{self.filename}: metadata.xml has inconsistent indentation {self.lines_str}"
6805
6806
6807 class CatMetadataXmlIndentation(_MetadataXmlIndentation, results.CategoryResult):
6808 @@ -250,7 +250,7 @@ class _MetadataXmlEmptyElement(results.Style):
6809
6810 @property
6811 def desc(self):
6812 - return f'{self.filename}: empty element {self.element!r} on line {self.line}'
6813 + return f"{self.filename}: empty element {self.element!r} on line {self.line}"
6814
6815
6816 class CatMetadataXmlEmptyElement(_MetadataXmlEmptyElement, results.CategoryResult):
6817 @@ -288,8 +288,10 @@ class InvalidRemoteID(results.PackageResult, results.Warning):
6818
6819 @property
6820 def desc(self):
6821 - return (f"remote-id value {self.id_value!r} invalid for "
6822 - f"type={self.id_type!r}, expected: {self.expected!r}")
6823 + return (
6824 + f"remote-id value {self.id_value!r} invalid for "
6825 + f"type={self.id_type!r}, expected: {self.expected!r}"
6826 + )
6827
6828
6829 class _XmlBaseCheck(Check):
6830 @@ -306,13 +308,12 @@ class _XmlBaseCheck(Check):
6831 self.repo_base = self.options.target_repo.location
6832 self.pkgref_cache = {}
6833 # content validation checks to run after parsing XML doc
6834 - self._checks = tuple(
6835 - getattr(self, x) for x in dir(self) if x.startswith('_check_'))
6836 + self._checks = tuple(getattr(self, x) for x in dir(self) if x.startswith("_check_"))
6837
6838 # Prefer xsd file from the target repository or its masters, falling
6839 # back to the file installed with pkgcore.
6840 for repo in reversed(self.options.target_repo.trees):
6841 - metadata_xsd = pjoin(repo.location, 'metadata', 'xml-schema', 'metadata.xsd')
6842 + metadata_xsd = pjoin(repo.location, "metadata", "xml-schema", "metadata.xsd")
6843 if os.path.isfile(metadata_xsd):
6844 try:
6845 self.schema = etree.XMLSchema(etree.parse(metadata_xsd))
6846 @@ -321,7 +322,7 @@ class _XmlBaseCheck(Check):
6847 # ignore invalid xsd files
6848 pass
6849 else:
6850 - metadata_xsd = pjoin(pkgcore_const.DATA_PATH, 'xml-schema', 'metadata.xsd')
6851 + metadata_xsd = pjoin(pkgcore_const.DATA_PATH, "xml-schema", "metadata.xsd")
6852 self.schema = etree.XMLSchema(etree.parse(metadata_xsd))
6853
6854 def _check_doc(self, pkg, loc, doc):
6855 @@ -330,16 +331,19 @@ class _XmlBaseCheck(Check):
6856 # 'stabilize-allarches' which is allowed to be empty and 'flag' which
6857 # is caught by MissingLocalUseDesc.
6858 for el in doc.getroot().iterdescendants():
6859 - if (not el.getchildren() and (el.text is None or not el.text.strip())
6860 - and el.tag not in ('flag', 'stabilize-allarches')):
6861 + if (
6862 + not el.getchildren()
6863 + and (el.text is None or not el.text.strip())
6864 + and el.tag not in ("flag", "stabilize-allarches")
6865 + ):
6866 yield self.empty_element(os.path.basename(loc), el.tag, el.sourceline, pkg=pkg)
6867
6868 - for el in doc.findall('.//cat'):
6869 + for el in doc.findall(".//cat"):
6870 c = el.text.strip()
6871 if c not in self.options.search_repo.categories:
6872 yield self.catref_error(os.path.basename(loc), c, pkg=pkg)
6873
6874 - for el in doc.findall('.//pkg'):
6875 + for el in doc.findall(".//pkg"):
6876 p = el.text.strip()
6877 if p not in self.pkgref_cache:
6878 try:
6879 @@ -358,7 +362,7 @@ class _XmlBaseCheck(Check):
6880 indents = set()
6881 with open(loc) as f:
6882 for lineno, line in enumerate(f, 1):
6883 - for i in line[:-len(line.lstrip())]:
6884 + for i in line[: -len(line.lstrip())]:
6885 if i != orig_indent:
6886 if orig_indent is None:
6887 orig_indent = i
6888 @@ -370,7 +374,7 @@ class _XmlBaseCheck(Check):
6889 @staticmethod
6890 def _format_lxml_errors(error_log):
6891 for x in error_log:
6892 - yield f'line {x.line}, col {x.column}: ({x.type_name}) {x.message}'
6893 + yield f"line {x.line}, col {x.column}: ({x.type_name}) {x.message}"
6894
6895 def _parse_xml(self, pkg, loc):
6896 try:
6897 @@ -387,7 +391,7 @@ class _XmlBaseCheck(Check):
6898 # note: while doc is available, do not pass it here as it may
6899 # trigger undefined behavior due to incorrect structure
6900 if self.schema is not None and not self.schema.validate(doc):
6901 - message = '\n'.join(self._format_lxml_errors(self.schema.error_log))
6902 + message = "\n".join(self._format_lxml_errors(self.schema.error_log))
6903 yield self.invalid_error(os.path.basename(loc), message, pkg=pkg)
6904 return
6905
6906 @@ -413,64 +417,74 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
6907 indent_error = PkgMetadataXmlIndentation
6908 empty_element = PkgMetadataXmlEmptyElement
6909
6910 - known_results = frozenset([
6911 - PkgBadlyFormedXml, PkgInvalidXml, PkgMissingMetadataXml,
6912 - PkgMetadataXmlInvalidPkgRef, PkgMetadataXmlInvalidCatRef,
6913 - PkgMetadataXmlIndentation, PkgMetadataXmlEmptyElement, MaintainerNeeded,
6914 - MaintainerWithoutProxy, ProxyWithoutProxied, RedundantLongDescription,
6915 - NonexistentProjectMaintainer, WrongMaintainerType, InvalidRemoteID,
6916 - ])
6917 + known_results = frozenset(
6918 + [
6919 + PkgBadlyFormedXml,
6920 + PkgInvalidXml,
6921 + PkgMissingMetadataXml,
6922 + PkgMetadataXmlInvalidPkgRef,
6923 + PkgMetadataXmlInvalidCatRef,
6924 + PkgMetadataXmlIndentation,
6925 + PkgMetadataXmlEmptyElement,
6926 + MaintainerNeeded,
6927 + MaintainerWithoutProxy,
6928 + ProxyWithoutProxied,
6929 + RedundantLongDescription,
6930 + NonexistentProjectMaintainer,
6931 + WrongMaintainerType,
6932 + InvalidRemoteID,
6933 + ]
6934 + )
6935
6936 - _one_component_validator_re = re.compile(r'^[^/]+$')
6937 - _two_components_validator_re = re.compile(r'^[^/]+/[^/]+$')
6938 - _gitlab_validator_re = re.compile(r'^([^/]+/)*[^/]+/[^/]+$')
6939 + _one_component_validator_re = re.compile(r"^[^/]+$")
6940 + _two_components_validator_re = re.compile(r"^[^/]+/[^/]+$")
6941 + _gitlab_validator_re = re.compile(r"^([^/]+/)*[^/]+/[^/]+$")
6942
6943 remote_id_validators = {
6944 # {name}-style remotes
6945 - 'cpan': (_one_component_validator_re, '{project}'),
6946 - 'cpan-module': (_one_component_validator_re, '{module}'),
6947 - 'cran': (_one_component_validator_re, '{project}'),
6948 - 'ctan': (_one_component_validator_re, '{project}'),
6949 - 'google-code': (_one_component_validator_re, '{project}'),
6950 - 'osdn': (_one_component_validator_re, '{project}'),
6951 - 'pear': (_one_component_validator_re, '{project}'),
6952 - 'pecl': (_one_component_validator_re, '{project}'),
6953 - 'pypi': (_one_component_validator_re, '{project}'),
6954 - 'rubygems': (_one_component_validator_re, '{project}'),
6955 - 'sourceforge': (_one_component_validator_re, '{project}'),
6956 + "cpan": (_one_component_validator_re, "{project}"),
6957 + "cpan-module": (_one_component_validator_re, "{module}"),
6958 + "cran": (_one_component_validator_re, "{project}"),
6959 + "ctan": (_one_component_validator_re, "{project}"),
6960 + "google-code": (_one_component_validator_re, "{project}"),
6961 + "osdn": (_one_component_validator_re, "{project}"),
6962 + "pear": (_one_component_validator_re, "{project}"),
6963 + "pecl": (_one_component_validator_re, "{project}"),
6964 + "pypi": (_one_component_validator_re, "{project}"),
6965 + "rubygems": (_one_component_validator_re, "{project}"),
6966 + "sourceforge": (_one_component_validator_re, "{project}"),
6967 # {name} with a special check for lp: prefix
6968 - 'launchpad': (re.compile(r'^(?!lp:)[^/]+$'), '{project}'),
6969 + "launchpad": (re.compile(r"^(?!lp:)[^/]+$"), "{project}"),
6970 # {owner}/{name}-style remotes
6971 - 'bitbucket': (_two_components_validator_re, '{username}/{project}'),
6972 - 'github': (_two_components_validator_re, '{username}/{project}'),
6973 + "bitbucket": (_two_components_validator_re, "{username}/{project}"),
6974 + "github": (_two_components_validator_re, "{username}/{project}"),
6975 # gitlab (2+ components)
6976 - 'gitlab': (_gitlab_validator_re, '{username}/[{group}/...]{repo}'),
6977 - 'heptapod': (_gitlab_validator_re, '{username}/[{group}/...]{repo}'),
6978 + "gitlab": (_gitlab_validator_re, "{username}/[{group}/...]{repo}"),
6979 + "heptapod": (_gitlab_validator_re, "{username}/[{group}/...]{repo}"),
6980 # cpe
6981 - 'cpe': (re.compile(r'^cpe:/[aho]:[^:]+:[^:]+$'),
6982 - 'cpe:/[aho]:{vendor}:{product}'),
6983 + "cpe": (re.compile(r"^cpe:/[aho]:[^:]+:[^:]+$"), "cpe:/[aho]:{vendor}:{product}"),
6984 # 1+ component + no ".git" suffix
6985 - 'gentoo': (re.compile(r'^([^/]+/)*[^/]+(?<!\.git)$'),
6986 - '[{group}/...]{repo}'),
6987 + "gentoo": (re.compile(r"^([^/]+/)*[^/]+(?<!\.git)$"), "[{group}/...]{repo}"),
6988 # a positive decimal number
6989 - 'vim': (re.compile(r'^[1-9]\d*$'), '{script_id}'),
6990 + "vim": (re.compile(r"^[1-9]\d*$"), "{script_id}"),
6991 }
6992
6993 @staticmethod
6994 def _maintainer_proxied_key(m):
6995 if m.proxied is not None:
6996 return m.proxied
6997 - if m.email == 'proxy-maint@g.o':
6998 - return 'proxy'
6999 - if m.email.endswith('@gentoo.org'):
7000 - return 'no'
7001 - return 'yes'
7002 + if m.email == "proxy-maint@g.o":
7003 + return "proxy"
7004 + if m.email.endswith("@gentoo.org"):
7005 + return "no"
7006 + return "yes"
7007
7008 def _check_maintainers(self, pkg, loc, doc):
7009 """Validate maintainers in package metadata for the gentoo repo."""
7010 if self.options.gentoo_repo:
7011 maintainer_needed = any(
7012 - c.text.strip() == 'maintainer-needed' for c in doc.xpath('//comment()'))
7013 + c.text.strip() == "maintainer-needed" for c in doc.xpath("//comment()")
7014 + )
7015 if pkg.maintainers:
7016 # check for invalid maintainer-needed comment
7017 if maintainer_needed:
7018 @@ -478,15 +492,14 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
7019
7020 # determine proxy maintainer status
7021 proxied, devs, proxies = [], [], []
7022 - proxy_map = {'yes': proxied, 'no': devs, 'proxy': proxies}
7023 + proxy_map = {"yes": proxied, "no": devs, "proxy": proxies}
7024 for m in pkg.maintainers:
7025 proxy_map[self._maintainer_proxied_key(m)].append(m)
7026
7027 # check proxy maintainers
7028 if not devs and not proxies:
7029 maintainers = sorted(map(str, pkg.maintainers))
7030 - yield MaintainerWithoutProxy(
7031 - os.path.basename(loc), maintainers, pkg=pkg)
7032 + yield MaintainerWithoutProxy(os.path.basename(loc), maintainers, pkg=pkg)
7033 elif not proxied and proxies:
7034 yield ProxyWithoutProxied(os.path.basename(loc), pkg=pkg)
7035 elif not maintainer_needed:
7036 @@ -498,25 +511,27 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
7037 nonexistent = []
7038 wrong_maintainers = []
7039 for m in pkg.maintainers:
7040 - if m.maint_type == 'project' and m.email not in projects:
7041 + if m.maint_type == "project" and m.email not in projects:
7042 nonexistent.append(m.email)
7043 - elif m.maint_type == 'person' and m.email in projects:
7044 + elif m.maint_type == "person" and m.email in projects:
7045 wrong_maintainers.append(m.email)
7046 if nonexistent:
7047 yield NonexistentProjectMaintainer(
7048 - os.path.basename(loc), sorted(nonexistent), pkg=pkg)
7049 + os.path.basename(loc), sorted(nonexistent), pkg=pkg
7050 + )
7051 if wrong_maintainers:
7052 yield WrongMaintainerType(
7053 - os.path.basename(loc), sorted(wrong_maintainers), pkg=pkg)
7054 + os.path.basename(loc), sorted(wrong_maintainers), pkg=pkg
7055 + )
7056
7057 def _check_longdescription(self, pkg, loc, doc):
7058 if pkg.longdescription is not None:
7059 match_ratio = SequenceMatcher(None, pkg.description, pkg.longdescription).ratio()
7060 if match_ratio > 0.75:
7061 - msg = 'metadata.xml longdescription closely matches DESCRIPTION'
7062 + msg = "metadata.xml longdescription closely matches DESCRIPTION"
7063 yield RedundantLongDescription(msg, pkg=pkg)
7064 elif len(pkg.longdescription) < 100:
7065 - msg = 'metadata.xml longdescription is too short'
7066 + msg = "metadata.xml longdescription is too short"
7067 yield RedundantLongDescription(msg, pkg=pkg)
7068
7069 def _check_remote_id(self, pkg, loc, doc):
7070 @@ -533,13 +548,13 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
7071
7072 def _get_xml_location(self, pkg):
7073 """Return the metadata.xml location for a given package."""
7074 - return pjoin(os.path.dirname(pkg.ebuild.path), 'metadata.xml')
7075 + return pjoin(os.path.dirname(pkg.ebuild.path), "metadata.xml")
7076
7077
7078 class CategoryMetadataXmlCheck(_XmlBaseCheck):
7079 """Category level metadata.xml scans."""
7080
7081 - _source = (sources.CategoryRepoSource, (), (('source', sources.RawRepoSource),))
7082 + _source = (sources.CategoryRepoSource, (), (("source", sources.RawRepoSource),))
7083 misformed_error = CatBadlyFormedXml
7084 invalid_error = CatInvalidXml
7085 missing_error = CatMissingMetadataXml
7086 @@ -548,15 +563,21 @@ class CategoryMetadataXmlCheck(_XmlBaseCheck):
7087 indent_error = CatMetadataXmlIndentation
7088 empty_element = CatMetadataXmlEmptyElement
7089
7090 - known_results = frozenset([
7091 - CatBadlyFormedXml, CatInvalidXml, CatMissingMetadataXml,
7092 - CatMetadataXmlInvalidPkgRef, CatMetadataXmlInvalidCatRef,
7093 - CatMetadataXmlIndentation, CatMetadataXmlEmptyElement,
7094 - ])
7095 + known_results = frozenset(
7096 + [
7097 + CatBadlyFormedXml,
7098 + CatInvalidXml,
7099 + CatMissingMetadataXml,
7100 + CatMetadataXmlInvalidPkgRef,
7101 + CatMetadataXmlInvalidCatRef,
7102 + CatMetadataXmlIndentation,
7103 + CatMetadataXmlEmptyElement,
7104 + ]
7105 + )
7106
7107 def _get_xml_location(self, pkg):
7108 """Return the metadata.xml location for a given package's category."""
7109 - return pjoin(self.repo_base, pkg.category, 'metadata.xml')
7110 + return pjoin(self.repo_base, pkg.category, "metadata.xml")
7111
7112
7113 class MissingRemoteId(results.PackageResult, results.Info):
7114 @@ -577,8 +598,10 @@ class MissingRemoteId(results.PackageResult, results.Info):
7115
7116 @property
7117 def desc(self):
7118 - return (f'missing <remote-id type="{self.remote_type}">'
7119 - f'{self.value}</remote-id> (inferred from URI {self.uri!r})')
7120 + return (
7121 + f'missing <remote-id type="{self.remote_type}">'
7122 + f"{self.value}</remote-id> (inferred from URI {self.uri!r})"
7123 + )
7124
7125
7126 class MissingRemoteIdCheck(Check):
7127 @@ -587,37 +610,47 @@ class MissingRemoteIdCheck(Check):
7128 _source = sources.PackageRepoSource
7129 known_results = frozenset([MissingRemoteId])
7130
7131 - _gitlab_match = r'(?P<value>(\w[^/]*/)*\w[^/]*/\w[^/]*)'
7132 + _gitlab_match = r"(?P<value>(\w[^/]*/)*\w[^/]*/\w[^/]*)"
7133
7134 remotes_map = (
7135 - ('bitbucket', r'https://bitbucket.org/(?P<value>[^/]+/[^/]+)'),
7136 - ('freedesktop-gitlab', rf'https://gitlab.freedesktop.org/{_gitlab_match}'),
7137 - ('github', r'https://github.com/(?P<value>[^/]+/[^/]+)'),
7138 - ('gitlab', rf'https://gitlab.com/{_gitlab_match}'),
7139 - ('gnome-gitlab', rf'https://gitlab.gnome.org/{_gitlab_match}'),
7140 - ('heptapod', rf'https://foss.heptapod.net/{_gitlab_match}'),
7141 - ('launchpad', r'https://launchpad.net/(?P<value>[^/]+)'),
7142 - ('pypi', r'https://pypi.org/project/(?P<value>[^/]+)'),
7143 - ('pypi', r'https://files.pythonhosted.org/packages/source/\S/(?P<value>[^/]+)'),
7144 - ('savannah', r'https://savannah.gnu.org/projects/(?P<value>[^/]+)'),
7145 - ('savannah-nongnu', r'https://savannah.nongnu.org/projects/(?P<value>[^/]+)'),
7146 - ('sourceforge', r'https://downloads.sourceforge.(net|io)/(?:project/)?(?P<value>[^/]+)'),
7147 - ('sourceforge', r'https://sourceforge.(net|io)/projects/(?P<value>[^/]+)'),
7148 - ('sourceforge', r'https://(?P<value>[^/]+).sourceforge.(net|io)/'),
7149 - ('sourcehut', r'https://sr.ht/(?P<value>[^/]+/[^/]+)'),
7150 + ("bitbucket", r"https://bitbucket.org/(?P<value>[^/]+/[^/]+)"),
7151 + ("freedesktop-gitlab", rf"https://gitlab.freedesktop.org/{_gitlab_match}"),
7152 + ("github", r"https://github.com/(?P<value>[^/]+/[^/]+)"),
7153 + ("gitlab", rf"https://gitlab.com/{_gitlab_match}"),
7154 + ("gnome-gitlab", rf"https://gitlab.gnome.org/{_gitlab_match}"),
7155 + ("heptapod", rf"https://foss.heptapod.net/{_gitlab_match}"),
7156 + ("launchpad", r"https://launchpad.net/(?P<value>[^/]+)"),
7157 + ("pypi", r"https://pypi.org/project/(?P<value>[^/]+)"),
7158 + ("pypi", r"https://files.pythonhosted.org/packages/source/\S/(?P<value>[^/]+)"),
7159 + ("savannah", r"https://savannah.gnu.org/projects/(?P<value>[^/]+)"),
7160 + ("savannah-nongnu", r"https://savannah.nongnu.org/projects/(?P<value>[^/]+)"),
7161 + ("sourceforge", r"https://downloads.sourceforge.(net|io)/(?:project/)?(?P<value>[^/]+)"),
7162 + ("sourceforge", r"https://sourceforge.(net|io)/projects/(?P<value>[^/]+)"),
7163 + ("sourceforge", r"https://(?P<value>[^/]+).sourceforge.(net|io)/"),
7164 + ("sourcehut", r"https://sr.ht/(?P<value>[^/]+/[^/]+)"),
7165 )
7166
7167 def __init__(self, options, **kwargs):
7168 super().__init__(options, **kwargs)
7169 - self.remotes_map = tuple((remote_type, re.compile(regex)) for remote_type, regex in self.remotes_map)
7170 + self.remotes_map = tuple(
7171 + (remote_type, re.compile(regex)) for remote_type, regex in self.remotes_map
7172 + )
7173
7174 def feed(self, pkgset):
7175 remotes = {u.type: (None, None) for u in pkgset[0].upstreams}
7176 for pkg in sorted(pkgset, reverse=True):
7177 - fetchables = iflatten_instance(pkg.generate_fetchables(allow_missing_checksums=True,
7178 - ignore_unknown_mirrors=True, skip_default_mirrors=True), (fetchable, Conditional))
7179 - all_urls = set(chain.from_iterable(f.uri for f in fetchables if isinstance(f, fetchable)))
7180 - urls = {url for url in all_urls if not url.endswith(('.patch', '.diff'))}
7181 + fetchables = iflatten_instance(
7182 + pkg.generate_fetchables(
7183 + allow_missing_checksums=True,
7184 + ignore_unknown_mirrors=True,
7185 + skip_default_mirrors=True,
7186 + ),
7187 + (fetchable, Conditional),
7188 + )
7189 + all_urls = set(
7190 + chain.from_iterable(f.uri for f in fetchables if isinstance(f, fetchable))
7191 + )
7192 + urls = {url for url in all_urls if not url.endswith((".patch", ".diff"))}
7193 urls = sorted(urls.union(pkg.homepage), key=len)
7194
7195 for remote_type, regex in self.remotes_map:
7196 @@ -625,7 +658,7 @@ class MissingRemoteIdCheck(Check):
7197 continue
7198 for url in urls:
7199 if mo := regex.match(url):
7200 - remotes[remote_type] = (mo.group('value'), url)
7201 + remotes[remote_type] = (mo.group("value"), url)
7202 break
7203
7204 for remote_type, (value, url) in remotes.items():
7205
7206 diff --git a/src/pkgcheck/checks/network.py b/src/pkgcheck/checks/network.py
7207 index cad8e536..f51796e2 100644
7208 --- a/src/pkgcheck/checks/network.py
7209 +++ b/src/pkgcheck/checks/network.py
7210 @@ -24,8 +24,8 @@ class _UrlResult(results.VersionResult, results.Warning):
7211 @property
7212 def desc(self):
7213 if self.url in self.message:
7214 - return f'{self.attr}: {self.message}'
7215 - return f'{self.attr}: {self.message}: {self.url}'
7216 + return f"{self.attr}: {self.message}"
7217 + return f"{self.attr}: {self.message}: {self.url}"
7218
7219
7220 class DeadUrl(_UrlResult):
7221 @@ -38,8 +38,8 @@ class SSLCertificateError(_UrlResult):
7222 @property
7223 def desc(self):
7224 if self.url in self.message:
7225 - return f'{self.attr}: SSL cert error: {self.message}'
7226 - return f'{self.attr}: SSL cert error: {self.message}: {self.url}'
7227 + return f"{self.attr}: SSL cert error: {self.message}"
7228 + return f"{self.attr}: SSL cert error: {self.message}: {self.url}"
7229
7230
7231 class _UpdatedUrlResult(results.VersionResult, results.Warning):
7232 @@ -58,20 +58,20 @@ class _UpdatedUrlResult(results.VersionResult, results.Warning):
7233 msg = [self.attr]
7234 if self.message is not None:
7235 msg.append(self.message)
7236 - msg.append(f'{self.url} -> {self.new_url}')
7237 - return ': '.join(msg)
7238 + msg.append(f"{self.url} -> {self.new_url}")
7239 + return ": ".join(msg)
7240
7241
7242 class RedirectedUrl(_UpdatedUrlResult):
7243 """Package with a URL that permanently redirects to a different site."""
7244
7245 - message = 'permanently redirected'
7246 + message = "permanently redirected"
7247
7248
7249 class HttpsUrlAvailable(_UpdatedUrlResult):
7250 """URL uses http:// when https:// is available."""
7251
7252 - message = 'HTTPS url available'
7253 + message = "HTTPS url available"
7254
7255
7256 class _RequestException(Exception):
7257 @@ -100,9 +100,14 @@ class _UrlCheck(NetworkCheck):
7258
7259 _source = sources.LatestVersionRepoSource
7260
7261 - known_results = frozenset([
7262 - DeadUrl, RedirectedUrl, HttpsUrlAvailable, SSLCertificateError,
7263 - ])
7264 + known_results = frozenset(
7265 + [
7266 + DeadUrl,
7267 + RedirectedUrl,
7268 + HttpsUrlAvailable,
7269 + SSLCertificateError,
7270 + ]
7271 + )
7272
7273 def _http_check(self, attr, url, *, pkg):
7274 """Verify http:// and https:// URLs."""
7275 @@ -113,14 +118,14 @@ class _UrlCheck(NetworkCheck):
7276 for response in r.history:
7277 if not response.is_permanent_redirect:
7278 break
7279 - redirected_url = response.headers['location']
7280 - hsts = 'strict-transport-security' in response.headers
7281 + redirected_url = response.headers["location"]
7282 + hsts = "strict-transport-security" in response.headers
7283
7284 if redirected_url:
7285 - if redirected_url.startswith('https://') and url.startswith('http://'):
7286 + if redirected_url.startswith("https://") and url.startswith("http://"):
7287 result = HttpsUrlAvailable(attr, url, redirected_url, pkg=pkg)
7288 - elif redirected_url.startswith('http://') and hsts:
7289 - redirected_url = f'https://{redirected_url[7:]}'
7290 + elif redirected_url.startswith("http://") and hsts:
7291 + redirected_url = f"https://{redirected_url[7:]}"
7292 result = RedirectedUrl(attr, url, redirected_url, pkg=pkg)
7293 else:
7294 result = RedirectedUrl(attr, url, redirected_url, pkg=pkg)
7295 @@ -139,16 +144,16 @@ class _UrlCheck(NetworkCheck):
7296 for response in r.history:
7297 if not response.is_permanent_redirect:
7298 break
7299 - redirected_url = response.headers['location']
7300 - hsts = 'strict-transport-security' in response.headers
7301 + redirected_url = response.headers["location"]
7302 + hsts = "strict-transport-security" in response.headers
7303
7304 # skip result if http:// URL check was redirected to https://
7305 if not isinstance(future.result(), HttpsUrlAvailable):
7306 if redirected_url:
7307 - if redirected_url.startswith('https://'):
7308 + if redirected_url.startswith("https://"):
7309 result = HttpsUrlAvailable(attr, orig_url, redirected_url, pkg=pkg)
7310 - elif redirected_url.startswith('http://') and hsts:
7311 - redirected_url = f'https://{redirected_url[7:]}'
7312 + elif redirected_url.startswith("http://") and hsts:
7313 + redirected_url = f"https://{redirected_url[7:]}"
7314 result = HttpsUrlAvailable(attr, orig_url, redirected_url, pkg=pkg)
7315 else:
7316 result = HttpsUrlAvailable(attr, orig_url, url, pkg=pkg)
7317 @@ -182,7 +187,7 @@ class _UrlCheck(NetworkCheck):
7318 if pkg is not None:
7319 # recreate result object with different pkg target and attr
7320 attrs = result._attrs.copy()
7321 - attrs['attr'] = attr
7322 + attrs["attr"] = attr
7323 result = result._create(**attrs, pkg=pkg)
7324 self.results_q.put([result])
7325
7326 @@ -203,29 +208,36 @@ class _UrlCheck(NetworkCheck):
7327 future.add_done_callback(partial(self.task_done, None, None))
7328 futures[url] = future
7329 else:
7330 - future.add_done_callback(partial(self.task_done, kwargs['pkg'], attr))
7331 + future.add_done_callback(partial(self.task_done, kwargs["pkg"], attr))
7332
7333 def schedule(self, pkg, executor, futures):
7334 """Schedule verification methods to run in separate threads for all flagged URLs."""
7335 http_urls = []
7336 for attr, url in self._get_urls(pkg):
7337 - if url.startswith('ftp://'):
7338 - self._schedule_check(
7339 - self._ftp_check, attr, url, executor, futures, pkg=pkg)
7340 - elif url.startswith(('https://', 'http://')):
7341 - self._schedule_check(
7342 - self._http_check, attr, url, executor, futures, pkg=pkg)
7343 + if url.startswith("ftp://"):
7344 + self._schedule_check(self._ftp_check, attr, url, executor, futures, pkg=pkg)
7345 + elif url.startswith(("https://", "http://")):
7346 + self._schedule_check(self._http_check, attr, url, executor, futures, pkg=pkg)
7347 http_urls.append((attr, url))
7348
7349 http_urls = tuple(http_urls)
7350 http_to_https_urls = (
7351 - (attr, url, f'https://{url[7:]}') for (attr, url) in http_urls
7352 - if url.startswith('http://'))
7353 + (attr, url, f"https://{url[7:]}")
7354 + for (attr, url) in http_urls
7355 + if url.startswith("http://")
7356 + )
7357 for attr, orig_url, url in http_to_https_urls:
7358 future = futures[orig_url]
7359 self._schedule_check(
7360 - self._https_available_check, attr, url, executor, futures,
7361 - future=future, orig_url=orig_url, pkg=pkg)
7362 + self._https_available_check,
7363 + attr,
7364 + url,
7365 + executor,
7366 + futures,
7367 + future=future,
7368 + orig_url=orig_url,
7369 + pkg=pkg,
7370 + )
7371
7372
7373 class HomepageUrlCheck(_UrlCheck):
7374 @@ -233,7 +245,7 @@ class HomepageUrlCheck(_UrlCheck):
7375
7376 def _get_urls(self, pkg):
7377 for url in pkg.homepage:
7378 - yield 'HOMEPAGE', url
7379 + yield "HOMEPAGE", url
7380
7381
7382 class FetchablesUrlCheck(_UrlCheck):
7383 @@ -243,18 +255,20 @@ class FetchablesUrlCheck(_UrlCheck):
7384
7385 def __init__(self, *args, use_addon, **kwargs):
7386 super().__init__(*args, **kwargs)
7387 - self.fetch_filter = use_addon.get_filter('fetchables')
7388 + self.fetch_filter = use_addon.get_filter("fetchables")
7389
7390 def _get_urls(self, pkg):
7391 # ignore conditionals
7392 fetchables, _ = self.fetch_filter(
7393 - (fetchable,), pkg,
7394 + (fetchable,),
7395 + pkg,
7396 pkg.generate_fetchables(
7397 - allow_missing_checksums=True, ignore_unknown_mirrors=True,
7398 - skip_default_mirrors=True))
7399 + allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
7400 + ),
7401 + )
7402 for f in fetchables.keys():
7403 for url in f.uri:
7404 - yield 'SRC_URI', url
7405 + yield "SRC_URI", url
7406
7407
7408 class MetadataUrlCheck(_UrlCheck):
7409 @@ -264,31 +278,31 @@ class MetadataUrlCheck(_UrlCheck):
7410
7411 def __init__(self, *args, **kwargs):
7412 super().__init__(*args, **kwargs)
7413 - self.protocols = ('http://', 'https://', 'ftp://')
7414 + self.protocols = ("http://", "https://", "ftp://")
7415 self.remote_map = {
7416 - 'bitbucket': 'https://bitbucket.org/{project}',
7417 - 'cpan': 'https://metacpan.org/dist/{project}',
7418 + "bitbucket": "https://bitbucket.org/{project}",
7419 + "cpan": "https://metacpan.org/dist/{project}",
7420 # some packages include a lot of modules, and scanning them
7421 # DoS-es metacpan
7422 # 'cpan-module': 'https://metacpan.org/pod/{project}',
7423 - 'cran': 'https://cran.r-project.org/web/packages/{project}/',
7424 - 'ctan': 'https://ctan.org/pkg/{project}',
7425 - 'freedesktop-gitlab': 'https://gitlab.freedesktop.org/{project}.git/',
7426 - 'gentoo': 'https://gitweb.gentoo.org/{project}.git/',
7427 - 'github': 'https://github.com/{project}',
7428 - 'gitlab': 'https://gitlab.com/{project}',
7429 - 'gnome-gitlab': 'https://gitlab.gnome.org/{project}.git/',
7430 - 'hackage': 'https://hackage.haskell.org/package/{project}',
7431 - 'launchpad': 'https://launchpad.net/{project}',
7432 - 'osdn': 'https://osdn.net/projects/{project}/',
7433 - 'pecl': 'https://pecl.php.net/package/{project}',
7434 - 'pypi': 'https://pypi.org/project/{project}/',
7435 - 'rubygems': 'https://rubygems.org/gems/{project}',
7436 - 'savannah': 'https://savannah.gnu.org/projects/{project}',
7437 - 'savannah-nongnu': 'https://savannah.nongnu.org/projects/{project}',
7438 - 'sourceforge': 'https://sourceforge.net/projects/{project}/',
7439 - 'sourcehut': 'https://sr.ht/{project}/',
7440 - 'vim': 'https://vim.org/scripts/script.php?script_id={project}',
7441 + "cran": "https://cran.r-project.org/web/packages/{project}/",
7442 + "ctan": "https://ctan.org/pkg/{project}",
7443 + "freedesktop-gitlab": "https://gitlab.freedesktop.org/{project}.git/",
7444 + "gentoo": "https://gitweb.gentoo.org/{project}.git/",
7445 + "github": "https://github.com/{project}",
7446 + "gitlab": "https://gitlab.com/{project}",
7447 + "gnome-gitlab": "https://gitlab.gnome.org/{project}.git/",
7448 + "hackage": "https://hackage.haskell.org/package/{project}",
7449 + "launchpad": "https://launchpad.net/{project}",
7450 + "osdn": "https://osdn.net/projects/{project}/",
7451 + "pecl": "https://pecl.php.net/package/{project}",
7452 + "pypi": "https://pypi.org/project/{project}/",
7453 + "rubygems": "https://rubygems.org/gems/{project}",
7454 + "savannah": "https://savannah.gnu.org/projects/{project}",
7455 + "savannah-nongnu": "https://savannah.nongnu.org/projects/{project}",
7456 + "sourceforge": "https://sourceforge.net/projects/{project}/",
7457 + "sourcehut": "https://sr.ht/{project}/",
7458 + "vim": "https://vim.org/scripts/script.php?script_id={project}",
7459 # these platforms return 200 for errors, so no point in trying
7460 # 'google-code': 'https://code.google.com/archive/p/{project}/',
7461 # 'heptapod': 'https://foss.heptapod.net/{project}',
7462 @@ -302,20 +316,20 @@ class MetadataUrlCheck(_UrlCheck):
7463 return
7464
7465 # TODO: move upstream parsing to a pkgcore attribute?
7466 - for element in ('changelog', 'doc', 'bugs-to', 'remote-id'):
7467 - for x in tree.xpath(f'//upstream/{element}'):
7468 + for element in ("changelog", "doc", "bugs-to", "remote-id"):
7469 + for x in tree.xpath(f"//upstream/{element}"):
7470 if x.text:
7471 url = x.text
7472 - if element == 'remote-id':
7473 + if element == "remote-id":
7474 # Use remote-id -> URL map to determine actual URL,
7475 # skipping verification for unmapped remote-ids.
7476 try:
7477 - url = self.remote_map[x.attrib['type']].format(project=url)
7478 + url = self.remote_map[x.attrib["type"]].format(project=url)
7479 except KeyError:
7480 continue
7481 # skip unsupported protocols, e.g. mailto URLs from bugs-to
7482 if url.startswith(self.protocols):
7483 - yield f'metadata.xml: {element}', url
7484 + yield f"metadata.xml: {element}", url
7485
7486 def schedule(self, pkgs, *args, **kwargs):
7487 super().schedule(pkgs[-1], *args, **kwargs)
7488
7489 diff --git a/src/pkgcheck/checks/overlays.py b/src/pkgcheck/checks/overlays.py
7490 index 1268542a..cbe3d5b6 100644
7491 --- a/src/pkgcheck/checks/overlays.py
7492 +++ b/src/pkgcheck/checks/overlays.py
7493 @@ -18,8 +18,8 @@ class UnusedInMastersLicenses(results.VersionResult, results.Warning):
7494 @property
7495 def desc(self):
7496 s = pluralism(self.licenses)
7497 - licenses = ', '.join(self.licenses)
7498 - return f'unused license{s} in master repo(s): {licenses}'
7499 + licenses = ", ".join(self.licenses)
7500 + return f"unused license{s} in master repo(s): {licenses}"
7501
7502
7503 class UnusedInMastersMirrors(results.VersionResult, results.Warning):
7504 @@ -35,8 +35,8 @@ class UnusedInMastersMirrors(results.VersionResult, results.Warning):
7505 @property
7506 def desc(self):
7507 s = pluralism(self.mirrors)
7508 - mirrors = ', '.join(self.mirrors)
7509 - return f'unused mirror{s} in master repo(s): {mirrors}'
7510 + mirrors = ", ".join(self.mirrors)
7511 + return f"unused mirror{s} in master repo(s): {mirrors}"
7512
7513
7514 class UnusedInMastersEclasses(results.VersionResult, results.Warning):
7515 @@ -51,9 +51,9 @@ class UnusedInMastersEclasses(results.VersionResult, results.Warning):
7516
7517 @property
7518 def desc(self):
7519 - es = pluralism(self.eclasses, plural='es')
7520 - eclasses = ', '.join(self.eclasses)
7521 - return f'unused eclass{es} in master repo(s): {eclasses}'
7522 + es = pluralism(self.eclasses, plural="es")
7523 + eclasses = ", ".join(self.eclasses)
7524 + return f"unused eclass{es} in master repo(s): {eclasses}"
7525
7526
7527 class UnusedInMastersGlobalUse(results.VersionResult, results.Warning):
7528 @@ -69,18 +69,22 @@ class UnusedInMastersGlobalUse(results.VersionResult, results.Warning):
7529 @property
7530 def desc(self):
7531 s = pluralism(self.flags)
7532 - flags = ', '.join(self.flags)
7533 - return f'use.desc unused flag{s} in master repo(s): {flags}'
7534 + flags = ", ".join(self.flags)
7535 + return f"use.desc unused flag{s} in master repo(s): {flags}"
7536
7537
7538 class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCheck):
7539 """Check for various metadata that may be removed from master repos."""
7540
7541 _source = sources.RepositoryRepoSource
7542 - known_results = frozenset([
7543 - UnusedInMastersLicenses, UnusedInMastersMirrors, UnusedInMastersEclasses,
7544 - UnusedInMastersGlobalUse,
7545 - ])
7546 + known_results = frozenset(
7547 + [
7548 + UnusedInMastersLicenses,
7549 + UnusedInMastersMirrors,
7550 + UnusedInMastersEclasses,
7551 + UnusedInMastersGlobalUse,
7552 + ]
7553 + )
7554
7555 def start(self):
7556 self.unused_master_licenses = set()
7557 @@ -93,8 +97,7 @@ class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCh
7558 self.unused_master_licenses.update(repo.licenses)
7559 self.unused_master_mirrors.update(repo.mirrors.keys())
7560 self.unused_master_eclasses.update(repo.eclass_cache.eclasses.keys())
7561 - self.unused_master_flags.update(
7562 - flag for matcher, (flag, desc) in repo.config.use_desc)
7563 + self.unused_master_flags.update(flag for matcher, (flag, desc) in repo.config.use_desc)
7564
7565 # determine unused licenses/mirrors/eclasses/flags across all master repos
7566 for repo in self.options.target_repo.masters:
7567 @@ -103,7 +106,8 @@ class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCh
7568 self.unused_master_mirrors.difference_update(self.get_mirrors(pkg))
7569 self.unused_master_eclasses.difference_update(pkg.inherited)
7570 self.unused_master_flags.difference_update(
7571 - pkg.iuse_stripped.difference(pkg.local_use.keys()))
7572 + pkg.iuse_stripped.difference(pkg.local_use.keys())
7573 + )
7574
7575 def feed(self, pkg):
7576 # report licenses used in the pkg but not in any pkg from the master repo(s)
7577
7578 diff --git a/src/pkgcheck/checks/perl.py b/src/pkgcheck/checks/perl.py
7579 index 3f6c2dde..f2c3bd25 100644
7580 --- a/src/pkgcheck/checks/perl.py
7581 +++ b/src/pkgcheck/checks/perl.py
7582 @@ -19,7 +19,7 @@ class MismatchedPerlVersion(results.VersionResult, results.Warning):
7583
7584 @property
7585 def desc(self):
7586 - return f'DIST_VERSION={self.dist_version} normalizes to {self.normalized}'
7587 + return f"DIST_VERSION={self.dist_version} normalizes to {self.normalized}"
7588
7589
7590 class _PerlException(Exception):
7591 @@ -36,25 +36,29 @@ class _PerlConnection:
7592 # start perl client for normalizing perl module versions into package versions
7593 try:
7594 self.perl_client = subprocess.Popen(
7595 - ['perl', pjoin(const.DATA_PATH, 'perl-version.pl')],
7596 - text=True, bufsize=1,
7597 - stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
7598 + ["perl", pjoin(const.DATA_PATH, "perl-version.pl")],
7599 + text=True,
7600 + bufsize=1,
7601 + stdin=subprocess.PIPE,
7602 + stdout=subprocess.PIPE,
7603 + stderr=subprocess.PIPE,
7604 + )
7605 except FileNotFoundError:
7606 - raise _PerlException('perl not installed on system')
7607 + raise _PerlException("perl not installed on system")
7608
7609 # check if the script is running
7610 ready = self.perl_client.stdout.readline().strip()
7611 - if ready != 'ready' or self.perl_client.poll():
7612 - err_msg = 'failed to run perl script'
7613 + if ready != "ready" or self.perl_client.poll():
7614 + err_msg = "failed to run perl script"
7615 if options.verbosity > 0:
7616 stderr = self.perl_client.stderr.read().strip()
7617 - err_msg += f': {stderr}'
7618 + err_msg += f": {stderr}"
7619 raise _PerlException(err_msg)
7620
7621 def normalize(self, version):
7622 """Normalize a given version number to its perl equivalent."""
7623 with self.process_lock:
7624 - self.perl_client.stdin.write(version + '\n')
7625 + self.perl_client.stdin.write(version + "\n")
7626 return self.perl_client.stdout.readline().strip()
7627
7628 def __del__(self):
7629 @@ -66,14 +70,16 @@ class _PerlConnection:
7630 class PerlCheck(OptionalCheck):
7631 """Perl ebuild related checks."""
7632
7633 - _restricted_source = (sources.RestrictionRepoSource, (
7634 - packages.PackageRestriction('inherited', values.ContainmentMatch2('perl-module')),))
7635 - _source = (sources.EbuildFileRepoSource, (), (('source', _restricted_source),))
7636 + _restricted_source = (
7637 + sources.RestrictionRepoSource,
7638 + (packages.PackageRestriction("inherited", values.ContainmentMatch2("perl-module")),),
7639 + )
7640 + _source = (sources.EbuildFileRepoSource, (), (("source", _restricted_source),))
7641 known_results = frozenset([MismatchedPerlVersion])
7642
7643 def __init__(self, *args):
7644 super().__init__(*args)
7645 - self.dist_version_re = re.compile(r'DIST_VERSION=(?P<dist_version>\d+(\.\d+)*)\s*\n')
7646 + self.dist_version_re = re.compile(r"DIST_VERSION=(?P<dist_version>\d+(\.\d+)*)\s*\n")
7647 # Initialize connection with perl script. This is done during
7648 # __init__() since only one running version of the script is shared
7649 # between however many scanning processes will be run. Also, it makes
7650 @@ -84,8 +90,8 @@ class PerlCheck(OptionalCheck):
7651 raise SkipCheck(self, str(e))
7652
7653 def feed(self, pkg):
7654 - if mo := self.dist_version_re.search(''.join(pkg.lines)):
7655 - dist_version = mo.group('dist_version')
7656 + if mo := self.dist_version_re.search("".join(pkg.lines)):
7657 + dist_version = mo.group("dist_version")
7658 normalized = self.perl.normalize(dist_version)
7659 if normalized != pkg.version:
7660 yield MismatchedPerlVersion(dist_version, normalized, pkg=pkg)
7661
7662 diff --git a/src/pkgcheck/checks/pkgdir.py b/src/pkgcheck/checks/pkgdir.py
7663 index cc82c7c8..ef90baac 100644
7664 --- a/src/pkgcheck/checks/pkgdir.py
7665 +++ b/src/pkgcheck/checks/pkgdir.py
7666 @@ -14,9 +14,9 @@ from . import Check, GentooRepoCheck
7667
7668 # allowed filename characters: "a-zA-Z0-9._-+:"
7669 allowed_filename_chars = set()
7670 -allowed_filename_chars.update(chr(x) for x in range(ord('a'), ord('z') + 1))
7671 -allowed_filename_chars.update(chr(x) for x in range(ord('A'), ord('Z') + 1))
7672 -allowed_filename_chars.update(chr(x) for x in range(ord('0'), ord('9') + 1))
7673 +allowed_filename_chars.update(chr(x) for x in range(ord("a"), ord("z") + 1))
7674 +allowed_filename_chars.update(chr(x) for x in range(ord("A"), ord("Z") + 1))
7675 +allowed_filename_chars.update(chr(x) for x in range(ord("0"), ord("9") + 1))
7676 allowed_filename_chars.update([".", "-", "_", "+", ":"])
7677
7678
7679 @@ -30,8 +30,8 @@ class MismatchedPN(results.PackageResult, results.Error):
7680 @property
7681 def desc(self):
7682 s = pluralism(self.ebuilds)
7683 - ebuilds = ', '.join(self.ebuilds)
7684 - return f'mismatched package name{s}: [ {ebuilds} ]'
7685 + ebuilds = ", ".join(self.ebuilds)
7686 + return f"mismatched package name{s}: [ {ebuilds} ]"
7687
7688
7689 class InvalidPN(results.PackageResult, results.Error):
7690 @@ -44,8 +44,8 @@ class InvalidPN(results.PackageResult, results.Error):
7691 @property
7692 def desc(self):
7693 s = pluralism(self.ebuilds)
7694 - ebuilds = ', '.join(self.ebuilds)
7695 - return f'invalid package name{s}: [ {ebuilds} ]'
7696 + ebuilds = ", ".join(self.ebuilds)
7697 + return f"invalid package name{s}: [ {ebuilds} ]"
7698
7699
7700 class EqualVersions(results.PackageResult, results.Error):
7701 @@ -74,8 +74,8 @@ class DuplicateFiles(results.PackageResult, results.Warning):
7702
7703 @property
7704 def desc(self):
7705 - files = ', '.join(map(repr, self.files))
7706 - return f'duplicate identical files in FILESDIR: {files}'
7707 + files = ", ".join(map(repr, self.files))
7708 + return f"duplicate identical files in FILESDIR: {files}"
7709
7710
7711 class EmptyFile(results.PackageResult, results.Warning):
7712 @@ -87,7 +87,7 @@ class EmptyFile(results.PackageResult, results.Warning):
7713
7714 @property
7715 def desc(self):
7716 - return f'empty file in FILESDIR: {self.filename!r}'
7717 + return f"empty file in FILESDIR: {self.filename!r}"
7718
7719
7720 class ExecutableFile(results.PackageResult, results.Warning):
7721 @@ -99,7 +99,7 @@ class ExecutableFile(results.PackageResult, results.Warning):
7722
7723 @property
7724 def desc(self):
7725 - return f'unnecessary executable bit: {self.filename!r}'
7726 + return f"unnecessary executable bit: {self.filename!r}"
7727
7728
7729 class UnknownPkgDirEntry(results.PackageResult, results.Warning):
7730 @@ -115,9 +115,9 @@ class UnknownPkgDirEntry(results.PackageResult, results.Warning):
7731
7732 @property
7733 def desc(self):
7734 - files = ', '.join(map(repr, self.filenames))
7735 - y = pluralism(self.filenames, singular='y', plural='ies')
7736 - return f'unknown entr{y}: {files}'
7737 + files = ", ".join(map(repr, self.filenames))
7738 + y = pluralism(self.filenames, singular="y", plural="ies")
7739 + return f"unknown entr{y}: {files}"
7740
7741
7742 class SizeViolation(results.PackageResult, results.Warning):
7743 @@ -132,8 +132,10 @@ class SizeViolation(results.PackageResult, results.Warning):
7744
7745 @property
7746 def desc(self):
7747 - return (f'{self.filename!r} exceeds {sizeof_fmt(self.limit)} in size; '
7748 - f'{sizeof_fmt(self.size)} total')
7749 + return (
7750 + f"{self.filename!r} exceeds {sizeof_fmt(self.limit)} in size; "
7751 + f"{sizeof_fmt(self.size)} total"
7752 + )
7753
7754
7755 class TotalSizeViolation(results.PackageResult, results.Warning):
7756 @@ -147,8 +149,10 @@ class TotalSizeViolation(results.PackageResult, results.Warning):
7757
7758 @property
7759 def desc(self):
7760 - return (f'files/ directory exceeds {sizeof_fmt(self.limit)} in size; '
7761 - f'{sizeof_fmt(self.size)} total')
7762 + return (
7763 + f"files/ directory exceeds {sizeof_fmt(self.limit)} in size; "
7764 + f"{sizeof_fmt(self.size)} total"
7765 + )
7766
7767
7768 class BannedCharacter(results.PackageResult, results.Error):
7769 @@ -167,8 +171,8 @@ class BannedCharacter(results.PackageResult, results.Error):
7770 @property
7771 def desc(self):
7772 s = pluralism(self.chars)
7773 - chars = ', '.join(map(repr, self.chars))
7774 - return f'filename {self.filename!r} character{s} outside allowed set: {chars}'
7775 + chars = ", ".join(map(repr, self.chars))
7776 + return f"filename {self.filename!r} character{s} outside allowed set: {chars}"
7777
7778
7779 class InvalidUTF8(results.PackageResult, results.Error):
7780 @@ -187,17 +191,27 @@ class InvalidUTF8(results.PackageResult, results.Error):
7781 class PkgDirCheck(Check):
7782 """Scan ebuild directory for various file-related issues."""
7783
7784 - _source = (sources.PackageRepoSource, (), (('source', sources.RawRepoSource),))
7785 + _source = (sources.PackageRepoSource, (), (("source", sources.RawRepoSource),))
7786
7787 ignore_dirs = frozenset(["cvs", ".svn", ".bzr"])
7788 required_addons = (addons.git.GitAddon,)
7789 - known_results = frozenset([
7790 - DuplicateFiles, EmptyFile, ExecutableFile, UnknownPkgDirEntry, SizeViolation,
7791 - BannedCharacter, InvalidUTF8, MismatchedPN, InvalidPN, TotalSizeViolation,
7792 - ])
7793 + known_results = frozenset(
7794 + [
7795 + DuplicateFiles,
7796 + EmptyFile,
7797 + ExecutableFile,
7798 + UnknownPkgDirEntry,
7799 + SizeViolation,
7800 + BannedCharacter,
7801 + InvalidUTF8,
7802 + MismatchedPN,
7803 + InvalidPN,
7804 + TotalSizeViolation,
7805 + ]
7806 + )
7807
7808 # TODO: put some 'preferred algorithms by purpose' into snakeoil?
7809 - digest_algo = 'sha256'
7810 + digest_algo = "sha256"
7811
7812 def __init__(self, *args, git_addon):
7813 super().__init__(*args)
7814 @@ -206,7 +220,7 @@ class PkgDirCheck(Check):
7815 def feed(self, pkgset):
7816 pkg = pkgset[0]
7817 pkg_path = pjoin(self.options.target_repo.location, pkg.category, pkg.package)
7818 - ebuild_ext = '.ebuild'
7819 + ebuild_ext = ".ebuild"
7820 mismatched = []
7821 invalid = []
7822 unknown = []
7823 @@ -228,20 +242,19 @@ class PkgDirCheck(Check):
7824
7825 if filename.endswith(ebuild_ext):
7826 try:
7827 - with open(path, mode='rb') as f:
7828 + with open(path, mode="rb") as f:
7829 f.read(8192).decode()
7830 except UnicodeDecodeError as e:
7831 yield InvalidUTF8(filename, str(e), pkg=pkg)
7832
7833 - pkg_name = os.path.basename(filename[:-len(ebuild_ext)])
7834 + pkg_name = os.path.basename(filename[: -len(ebuild_ext)])
7835 try:
7836 - pkg_atom = atom_cls(f'={pkg.category}/{pkg_name}')
7837 + pkg_atom = atom_cls(f"={pkg.category}/{pkg_name}")
7838 if pkg_atom.package != os.path.basename(pkg_path):
7839 mismatched.append(pkg_name)
7840 except MalformedAtom:
7841 invalid.append(pkg_name)
7842 - elif (self.options.gentoo_repo and
7843 - filename not in ('Manifest', 'metadata.xml', 'files')):
7844 + elif self.options.gentoo_repo and filename not in ("Manifest", "metadata.xml", "files"):
7845 unknown.append(filename)
7846
7847 if mismatched:
7848 @@ -254,7 +267,7 @@ class PkgDirCheck(Check):
7849 files_by_size = defaultdict(list)
7850 pkg_path_len = len(pkg_path) + 1
7851 total_size = 0
7852 - for root, dirs, files in os.walk(pjoin(pkg_path, 'files')):
7853 + for root, dirs, files in os.walk(pjoin(pkg_path, "files")):
7854 # don't visit any ignored directories
7855 for d in self.ignore_dirs.intersection(dirs):
7856 dirs.remove(d)
7857 @@ -274,10 +287,12 @@ class PkgDirCheck(Check):
7858 total_size += file_stat.st_size
7859 if file_stat.st_size > SizeViolation.limit:
7860 yield SizeViolation(
7861 - pjoin(base_dir, filename), file_stat.st_size, pkg=pkg)
7862 + pjoin(base_dir, filename), file_stat.st_size, pkg=pkg
7863 + )
7864 if banned_chars := set(filename) - allowed_filename_chars:
7865 yield BannedCharacter(
7866 - pjoin(base_dir, filename), sorted(banned_chars), pkg=pkg)
7867 + pjoin(base_dir, filename), sorted(banned_chars), pkg=pkg
7868 + )
7869
7870 if total_size > TotalSizeViolation.limit:
7871 yield TotalSizeViolation(total_size, pkg=pkg)
7872 @@ -324,9 +339,9 @@ class LiveOnlyPackage(results.PackageResult, results.Warning):
7873 @property
7874 def desc(self):
7875 if self.age < 365:
7876 - return f'all versions are VCS-based added over {self.age} days ago'
7877 + return f"all versions are VCS-based added over {self.age} days ago"
7878 years = round(self.age / 365, 2)
7879 - return f'all versions are VCS-based added over {years} years ago'
7880 + return f"all versions are VCS-based added over {years} years ago"
7881
7882
7883 class LiveOnlyCheck(GentooRepoCheck):
7884
7885 diff --git a/src/pkgcheck/checks/profiles.py b/src/pkgcheck/checks/profiles.py
7886 index db49cf38..8673ed7d 100644
7887 --- a/src/pkgcheck/checks/profiles.py
7888 +++ b/src/pkgcheck/checks/profiles.py
7889 @@ -25,7 +25,7 @@ class UnknownProfilePackage(results.ProfilesResult, results.Warning):
7890
7891 @property
7892 def desc(self):
7893 - return f'{self.path!r}: unknown package: {self.atom!r}'
7894 + return f"{self.path!r}: unknown package: {self.atom!r}"
7895
7896
7897 class UnmatchedProfilePackageUnmask(results.ProfilesResult, results.Warning):
7898 @@ -42,7 +42,7 @@ class UnmatchedProfilePackageUnmask(results.ProfilesResult, results.Warning):
7899
7900 @property
7901 def desc(self):
7902 - return f'{self.path!r}: unmask of not masked package: {self.atom!r}'
7903 + return f"{self.path!r}: unmask of not masked package: {self.atom!r}"
7904
7905
7906 class UnknownProfilePackageUse(results.ProfilesResult, results.Warning):
7907 @@ -57,9 +57,9 @@ class UnknownProfilePackageUse(results.ProfilesResult, results.Warning):
7908 @property
7909 def desc(self):
7910 s = pluralism(self.flags)
7911 - flags = ', '.join(self.flags)
7912 - atom = f'{self.atom}[{flags}]'
7913 - return f'{self.path!r}: unknown package USE flag{s}: {atom!r}'
7914 + flags = ", ".join(self.flags)
7915 + atom = f"{self.atom}[{flags}]"
7916 + return f"{self.path!r}: unknown package USE flag{s}: {atom!r}"
7917
7918
7919 class UnknownProfileUse(results.ProfilesResult, results.Warning):
7920 @@ -73,8 +73,8 @@ class UnknownProfileUse(results.ProfilesResult, results.Warning):
7921 @property
7922 def desc(self):
7923 s = pluralism(self.flags)
7924 - flags = ', '.join(map(repr, self.flags))
7925 - return f'{self.path!r}: unknown USE flag{s}: {flags}'
7926 + flags = ", ".join(map(repr, self.flags))
7927 + return f"{self.path!r}: unknown USE flag{s}: {flags}"
7928
7929
7930 class UnknownProfilePackageKeywords(results.ProfilesResult, results.Warning):
7931 @@ -89,8 +89,8 @@ class UnknownProfilePackageKeywords(results.ProfilesResult, results.Warning):
7932 @property
7933 def desc(self):
7934 s = pluralism(self.keywords)
7935 - keywords = ', '.join(map(repr, self.keywords))
7936 - return f'{self.path!r}: unknown package keyword{s}: {self.atom}: {keywords}'
7937 + keywords = ", ".join(map(repr, self.keywords))
7938 + return f"{self.path!r}: unknown package keyword{s}: {self.atom}: {keywords}"
7939
7940
7941 class UnknownProfileUseExpand(results.ProfilesResult, results.Warning):
7942 @@ -104,8 +104,8 @@ class UnknownProfileUseExpand(results.ProfilesResult, results.Warning):
7943 @property
7944 def desc(self):
7945 s = pluralism(self.groups)
7946 - groups = ', '.join(self.groups)
7947 - return f'{self.path!r}: unknown USE_EXPAND group{s}: {groups}'
7948 + groups = ", ".join(self.groups)
7949 + return f"{self.path!r}: unknown USE_EXPAND group{s}: {groups}"
7950
7951
7952 class ProfileWarning(results.ProfilesResult, results.LogWarning):
7953 @@ -118,8 +118,8 @@ class ProfileError(results.ProfilesResult, results.LogError):
7954
7955 # mapping of profile log levels to result classes
7956 _logmap = (
7957 - base.LogMap('pkgcore.log.logger.warning', ProfileWarning),
7958 - base.LogMap('pkgcore.log.logger.error', ProfileError),
7959 + base.LogMap("pkgcore.log.logger.warning", ProfileWarning),
7960 + base.LogMap("pkgcore.log.logger.error", ProfileError),
7961 )
7962
7963
7964 @@ -145,12 +145,18 @@ class ProfilesCheck(Check):
7965
7966 _source = sources.ProfilesRepoSource
7967 required_addons = (addons.UseAddon, addons.KeywordsAddon)
7968 - known_results = frozenset([
7969 - UnknownProfilePackage, UnmatchedProfilePackageUnmask,
7970 - UnknownProfilePackageUse, UnknownProfileUse,
7971 - UnknownProfilePackageKeywords, UnknownProfileUseExpand,
7972 - ProfileWarning, ProfileError,
7973 - ])
7974 + known_results = frozenset(
7975 + [
7976 + UnknownProfilePackage,
7977 + UnmatchedProfilePackageUnmask,
7978 + UnknownProfilePackageUse,
7979 + UnknownProfileUse,
7980 + UnknownProfilePackageKeywords,
7981 + UnknownProfileUseExpand,
7982 + ProfileWarning,
7983 + ProfileError,
7984 + ]
7985 + )
7986
7987 # mapping between known files and verification methods
7988 known_files = {}
7989 @@ -165,16 +171,18 @@ class ProfilesCheck(Check):
7990
7991 local_iuse = {use for _pkg, (use, _desc) in repo.config.use_local_desc}
7992 self.available_iuse = frozenset(
7993 - local_iuse | use_addon.global_iuse |
7994 - use_addon.global_iuse_expand | use_addon.global_iuse_implicit)
7995 + local_iuse
7996 + | use_addon.global_iuse
7997 + | use_addon.global_iuse_expand
7998 + | use_addon.global_iuse_implicit
7999 + )
8000
8001 - @verify_files(('parent', 'parents'),
8002 - ('eapi', 'eapi'))
8003 + @verify_files(("parent", "parents"), ("eapi", "eapi"))
8004 def _pull_attr(self, *args):
8005 """Verification only needs to pull the profile attr."""
8006 yield from ()
8007
8008 - @verify_files(('deprecated', 'deprecated'))
8009 + @verify_files(("deprecated", "deprecated"))
8010 def _deprecated(self, filename, node, vals):
8011 # make sure replacement profile exists
8012 if vals is not None:
8013 @@ -183,47 +191,51 @@ class ProfilesCheck(Check):
8014 addons.profiles.ProfileNode(pjoin(self.profiles_dir, replacement))
8015 except profiles_mod.ProfileError:
8016 yield ProfileError(
8017 - f'nonexistent replacement {replacement!r} '
8018 - f'for deprecated profile: {node.name!r}')
8019 + f"nonexistent replacement {replacement!r} "
8020 + f"for deprecated profile: {node.name!r}"
8021 + )
8022
8023 # non-spec files
8024 - @verify_files(('package.keywords', 'keywords'),
8025 - ('package.accept_keywords', 'accept_keywords'))
8026 + @verify_files(("package.keywords", "keywords"), ("package.accept_keywords", "accept_keywords"))
8027 def _pkg_keywords(self, filename, node, vals):
8028 for atom, keywords in vals:
8029 if invalid := sorted(set(keywords) - self.keywords.valid):
8030 - yield UnknownProfilePackageKeywords(
8031 - pjoin(node.name, filename), atom, invalid)
8032 -
8033 - @verify_files(('use.force', 'use_force'),
8034 - ('use.stable.force', 'use_stable_force'),
8035 - ('use.mask', 'use_mask'),
8036 - ('use.stable.mask', 'use_stable_mask'))
8037 + yield UnknownProfilePackageKeywords(pjoin(node.name, filename), atom, invalid)
8038 +
8039 + @verify_files(
8040 + ("use.force", "use_force"),
8041 + ("use.stable.force", "use_stable_force"),
8042 + ("use.mask", "use_mask"),
8043 + ("use.stable.mask", "use_stable_mask"),
8044 + )
8045 def _use(self, filename, node, vals):
8046 # TODO: give ChunkedDataDict some dict view methods
8047 d = vals.render_to_dict()
8048 for _, entries in d.items():
8049 for _, disabled, enabled in entries:
8050 if unknown_disabled := set(disabled) - self.available_iuse:
8051 - flags = ('-' + u for u in unknown_disabled)
8052 - yield UnknownProfileUse(
8053 - pjoin(node.name, filename), flags)
8054 + flags = ("-" + u for u in unknown_disabled)
8055 + yield UnknownProfileUse(pjoin(node.name, filename), flags)
8056 if unknown_enabled := set(enabled) - self.available_iuse:
8057 - yield UnknownProfileUse(
8058 - pjoin(node.name, filename), unknown_enabled)
8059 + yield UnknownProfileUse(pjoin(node.name, filename), unknown_enabled)
8060
8061 - @verify_files(('packages', 'packages'),
8062 - ('package.unmask', 'unmasks'),
8063 - ('package.deprecated', 'pkg_deprecated'))
8064 + @verify_files(
8065 + ("packages", "packages"),
8066 + ("package.unmask", "unmasks"),
8067 + ("package.deprecated", "pkg_deprecated"),
8068 + )
8069 def _pkg_atoms(self, filename, node, vals):
8070 for x in iflatten_instance(vals, atom_cls):
8071 if not self.search_repo.match(x):
8072 yield UnknownProfilePackage(pjoin(node.name, filename), x)
8073
8074 - @verify_files(('package.mask', 'masks'),)
8075 + @verify_files(
8076 + ("package.mask", "masks"),
8077 + )
8078 def _pkg_masks(self, filename, node, vals):
8079 - all_masked = set().union(*(masked[1]
8080 - for p in profiles_mod.ProfileStack(node.path).stack if (masked := p.masks)))
8081 + all_masked = set().union(
8082 + *(masked[1] for p in profiles_mod.ProfileStack(node.path).stack if (masked := p.masks))
8083 + )
8084
8085 unmasked, masked = vals
8086 for x in masked:
8087 @@ -235,11 +247,13 @@ class ProfilesCheck(Check):
8088 elif x not in all_masked:
8089 yield UnmatchedProfilePackageUnmask(pjoin(node.name, filename), x)
8090
8091 - @verify_files(('package.use', 'pkg_use'),
8092 - ('package.use.force', 'pkg_use_force'),
8093 - ('package.use.stable.force', 'pkg_use_stable_force'),
8094 - ('package.use.mask', 'pkg_use_mask'),
8095 - ('package.use.stable.mask', 'pkg_use_stable_mask'))
8096 + @verify_files(
8097 + ("package.use", "pkg_use"),
8098 + ("package.use.force", "pkg_use_force"),
8099 + ("package.use.stable.force", "pkg_use_stable_force"),
8100 + ("package.use.mask", "pkg_use_mask"),
8101 + ("package.use.stable.mask", "pkg_use_stable_mask"),
8102 + )
8103 def _pkg_use(self, filename, node, vals):
8104 # TODO: give ChunkedDataDict some dict view methods
8105 d = vals
8106 @@ -251,19 +265,18 @@ class ProfilesCheck(Check):
8107 if pkgs := self.search_repo.match(a):
8108 available = {u for pkg in pkgs for u in pkg.iuse_stripped}
8109 if unknown_disabled := set(disabled) - available:
8110 - flags = ('-' + u for u in unknown_disabled)
8111 - yield UnknownProfilePackageUse(
8112 - pjoin(node.name, filename), a, flags)
8113 + flags = ("-" + u for u in unknown_disabled)
8114 + yield UnknownProfilePackageUse(pjoin(node.name, filename), a, flags)
8115 if unknown_enabled := set(enabled) - available:
8116 yield UnknownProfilePackageUse(
8117 - pjoin(node.name, filename), a, unknown_enabled)
8118 + pjoin(node.name, filename), a, unknown_enabled
8119 + )
8120 else:
8121 - yield UnknownProfilePackage(
8122 - pjoin(node.name, filename), a)
8123 + yield UnknownProfilePackage(pjoin(node.name, filename), a)
8124
8125 - @verify_files(('make.defaults', 'make_defaults'))
8126 + @verify_files(("make.defaults", "make_defaults"))
8127 def _make_defaults(self, filename, node, vals):
8128 - if defined := set(vals.get('USE_EXPAND', '').split()):
8129 + if defined := set(vals.get("USE_EXPAND", "").split()):
8130 if unknown := defined - self.use_expand_groups:
8131 yield UnknownProfileUseExpand(pjoin(node.name, filename), sorted(unknown))
8132
8133 @@ -286,8 +299,8 @@ class UnusedProfileDirs(results.ProfilesResult, results.Warning):
8134 @property
8135 def desc(self):
8136 s = pluralism(self.dirs)
8137 - dirs = ', '.join(map(repr, self.dirs))
8138 - return f'unused profile dir{s}: {dirs}'
8139 + dirs = ", ".join(map(repr, self.dirs))
8140 + return f"unused profile dir{s}: {dirs}"
8141
8142
8143 class ArchesWithoutProfiles(results.ProfilesResult, results.Warning):
8144 @@ -299,9 +312,9 @@ class ArchesWithoutProfiles(results.ProfilesResult, results.Warning):
8145
8146 @property
8147 def desc(self):
8148 - es = pluralism(self.arches, plural='es')
8149 - arches = ', '.join(self.arches)
8150 - return f'arch{es} without profiles: {arches}'
8151 + es = pluralism(self.arches, plural="es")
8152 + arches = ", ".join(self.arches)
8153 + return f"arch{es} without profiles: {arches}"
8154
8155
8156 class NonexistentProfilePath(results.ProfilesResult, results.Error):
8157 @@ -313,7 +326,7 @@ class NonexistentProfilePath(results.ProfilesResult, results.Error):
8158
8159 @property
8160 def desc(self):
8161 - return f'nonexistent profile path: {self.path!r}'
8162 + return f"nonexistent profile path: {self.path!r}"
8163
8164
8165 class LaggingProfileEapi(results.ProfilesResult, results.Warning):
8166 @@ -329,8 +342,8 @@ class LaggingProfileEapi(results.ProfilesResult, results.Warning):
8167 @property
8168 def desc(self):
8169 return (
8170 - f'{self.profile!r} profile has EAPI {self.eapi}, '
8171 - f'{self.parent!r} parent has EAPI {self.parent_eapi}'
8172 + f"{self.profile!r} profile has EAPI {self.eapi}, "
8173 + f"{self.parent!r} parent has EAPI {self.parent_eapi}"
8174 )
8175
8176
8177 @@ -352,13 +365,13 @@ class _ProfileEapiResult(results.ProfilesResult):
8178 class BannedProfileEapi(_ProfileEapiResult, results.Error):
8179 """Profile has an EAPI that is banned in the repository."""
8180
8181 - _type = 'banned'
8182 + _type = "banned"
8183
8184
8185 class DeprecatedProfileEapi(_ProfileEapiResult, results.Warning):
8186 """Profile has an EAPI that is deprecated in the repository."""
8187
8188 - _type = 'deprecated'
8189 + _type = "deprecated"
8190
8191
8192 class UnknownCategoryDirs(results.ProfilesResult, results.Warning):
8193 @@ -373,9 +386,9 @@ class UnknownCategoryDirs(results.ProfilesResult, results.Warning):
8194
8195 @property
8196 def desc(self):
8197 - dirs = ', '.join(self.dirs)
8198 + dirs = ", ".join(self.dirs)
8199 s = pluralism(self.dirs)
8200 - return f'unknown category dir{s}: {dirs}'
8201 + return f"unknown category dir{s}: {dirs}"
8202
8203
8204 class NonexistentCategories(results.ProfilesResult, results.Warning):
8205 @@ -387,9 +400,9 @@ class NonexistentCategories(results.ProfilesResult, results.Warning):
8206
8207 @property
8208 def desc(self):
8209 - categories = ', '.join(self.categories)
8210 - ies = pluralism(self.categories, singular='y', plural='ies')
8211 - return f'nonexistent profiles/categories entr{ies}: {categories}'
8212 + categories = ", ".join(self.categories)
8213 + ies = pluralism(self.categories, singular="y", plural="ies")
8214 + return f"nonexistent profiles/categories entr{ies}: {categories}"
8215
8216
8217 def dir_parents(path):
8218 @@ -399,11 +412,11 @@ def dir_parents(path):
8219 >>> list(dir_parents('/root/foo/bar/baz'))
8220 ['root/foo/bar', 'root/foo', 'root']
8221 """
8222 - path = os.path.normpath(path.strip('/'))
8223 + path = os.path.normpath(path.strip("/"))
8224 while path:
8225 yield path
8226 dirname, _basename = os.path.split(path)
8227 - path = dirname.rstrip('/')
8228 + path = dirname.rstrip("/")
8229
8230
8231 class RepoProfilesCheck(RepoCheck):
8232 @@ -415,14 +428,23 @@ class RepoProfilesCheck(RepoCheck):
8233
8234 _source = (sources.EmptySource, (base.profiles_scope,))
8235 required_addons = (addons.profiles.ProfileAddon,)
8236 - known_results = frozenset([
8237 - ArchesWithoutProfiles, UnusedProfileDirs, NonexistentProfilePath,
8238 - UnknownCategoryDirs, NonexistentCategories, LaggingProfileEapi,
8239 - ProfileError, ProfileWarning, BannedProfileEapi, DeprecatedProfileEapi,
8240 - ])
8241 + known_results = frozenset(
8242 + [
8243 + ArchesWithoutProfiles,
8244 + UnusedProfileDirs,
8245 + NonexistentProfilePath,
8246 + UnknownCategoryDirs,
8247 + NonexistentCategories,
8248 + LaggingProfileEapi,
8249 + ProfileError,
8250 + ProfileWarning,
8251 + BannedProfileEapi,
8252 + DeprecatedProfileEapi,
8253 + ]
8254 + )
8255
8256 # known profile status types for the gentoo repo
8257 - known_profile_statuses = frozenset(['stable', 'dev', 'exp'])
8258 + known_profile_statuses = frozenset(["stable", "dev", "exp"])
8259
8260 def __init__(self, *args, profile_addon):
8261 super().__init__(*args)
8262 @@ -433,17 +455,21 @@ class RepoProfilesCheck(RepoCheck):
8263
8264 def finish(self):
8265 if self.options.gentoo_repo:
8266 - if unknown_category_dirs := set(self.repo.category_dirs).difference(self.repo.categories):
8267 + if unknown_category_dirs := set(self.repo.category_dirs).difference(
8268 + self.repo.categories
8269 + ):
8270 yield UnknownCategoryDirs(sorted(unknown_category_dirs))
8271 - if nonexistent_categories := set(self.repo.config.categories).difference(self.repo.category_dirs):
8272 + if nonexistent_categories := set(self.repo.config.categories).difference(
8273 + self.repo.category_dirs
8274 + ):
8275 yield NonexistentCategories(sorted(nonexistent_categories))
8276 if arches_without_profiles := set(self.arches) - set(self.repo.profiles.arches()):
8277 yield ArchesWithoutProfiles(sorted(arches_without_profiles))
8278
8279 - root_profile_dirs = {'embedded'}
8280 + root_profile_dirs = {"embedded"}
8281 available_profile_dirs = set()
8282 for root, _dirs, _files in os.walk(self.profiles_dir):
8283 - if d := root[len(self.profiles_dir):].lstrip('/'):
8284 + if d := root[len(self.profiles_dir) :].lstrip("/"):
8285 available_profile_dirs.add(d)
8286 available_profile_dirs -= self.non_profile_dirs | root_profile_dirs
8287
8288 @@ -456,8 +482,11 @@ class RepoProfilesCheck(RepoCheck):
8289 # forcibly parse profiles.desc and convert log warnings/errors into reports
8290 with base.LogReports(*_logmap) as log_reports:
8291 profiles = Profiles.parse(
8292 - self.profiles_dir, self.repo.repo_id,
8293 - known_status=known_profile_statuses, known_arch=self.arches)
8294 + self.profiles_dir,
8295 + self.repo.repo_id,
8296 + known_status=known_profile_statuses,
8297 + known_arch=self.arches,
8298 + )
8299 yield from log_reports
8300
8301 banned_eapis = self.repo.config.profile_eapis_banned
8302 @@ -484,8 +513,7 @@ class RepoProfilesCheck(RepoCheck):
8303
8304 for profile, parents in lagging_profile_eapi.items():
8305 parent = parents[-1]
8306 - yield LaggingProfileEapi(
8307 - profile.name, str(profile.eapi), parent.name, str(parent.eapi))
8308 + yield LaggingProfileEapi(profile.name, str(profile.eapi), parent.name, str(parent.eapi))
8309 for profile in banned_profile_eapi:
8310 yield BannedProfileEapi(profile.name, profile.eapi)
8311 for profile in deprecated_profile_eapi:
8312
8313 diff --git a/src/pkgcheck/checks/python.py b/src/pkgcheck/checks/python.py
8314 index d8f7eb0b..510689bb 100644
8315 --- a/src/pkgcheck/checks/python.py
8316 +++ b/src/pkgcheck/checks/python.py
8317 @@ -15,14 +15,14 @@ from .. import addons, bash, results, sources
8318 from . import Check
8319
8320 # NB: distutils-r1 inherits one of the first two
8321 -ECLASSES = frozenset(['python-r1', 'python-single-r1', 'python-any-r1'])
8322 +ECLASSES = frozenset(["python-r1", "python-single-r1", "python-any-r1"])
8323
8324 -IUSE_PREFIX = 'python_targets_'
8325 -IUSE_PREFIX_S = 'python_single_target_'
8326 +IUSE_PREFIX = "python_targets_"
8327 +IUSE_PREFIX_S = "python_single_target_"
8328
8329 -GITHUB_ARCHIVE_RE = re.compile(r'^https://github\.com/[^/]+/[^/]+/archive/')
8330 -SNAPSHOT_RE = re.compile(r'[a-fA-F0-9]{40}\.tar\.gz$')
8331 -USE_FLAGS_PYTHON_USEDEP = re.compile(r'\[(.+,)?\$\{PYTHON_USEDEP\}(,.+)?\]$')
8332 +GITHUB_ARCHIVE_RE = re.compile(r"^https://github\.com/[^/]+/[^/]+/archive/")
8333 +SNAPSHOT_RE = re.compile(r"[a-fA-F0-9]{40}\.tar\.gz$")
8334 +USE_FLAGS_PYTHON_USEDEP = re.compile(r"\[(.+,)?\$\{PYTHON_USEDEP\}(,.+)?\]$")
8335
8336
8337 def get_python_eclass(pkg):
8338 @@ -30,8 +30,7 @@ def get_python_eclass(pkg):
8339 # All three eclasses block one another, but check and throw an error
8340 # just in case it isn't caught when sourcing the ebuild.
8341 if len(eclasses) > 1:
8342 - raise ValueError(
8343 - f"python eclasses are mutually exclusive: [ {', '.join(eclasses)} ]")
8344 + raise ValueError(f"python eclasses are mutually exclusive: [ {', '.join(eclasses)} ]")
8345 return next(iter(eclasses)) if eclasses else None
8346
8347
8348 @@ -139,10 +138,7 @@ class DistutilsNonPEP517Build(results.VersionResult, results.Warning):
8349
8350 @property
8351 def desc(self):
8352 - return (
8353 - "uses deprecated non-PEP517 build mode, please switch to "
8354 - "DISTUTILS_USE_PEP517=..."
8355 - )
8356 + return "uses deprecated non-PEP517 build mode, please switch to " "DISTUTILS_USE_PEP517=..."
8357
8358
8359 class PythonHasVersionUsage(results.LinesResult, results.Style):
8360 @@ -158,7 +154,7 @@ class PythonHasVersionUsage(results.LinesResult, results.Style):
8361
8362 @property
8363 def desc(self):
8364 - return f'usage of has_version {self.lines_str}, replace with python_has_version'
8365 + return f"usage of has_version {self.lines_str}, replace with python_has_version"
8366
8367
8368 class PythonHasVersionMissingPythonUseDep(results.LineResult, results.Error):
8369 @@ -174,7 +170,9 @@ class PythonHasVersionMissingPythonUseDep(results.LineResult, results.Error):
8370
8371 @property
8372 def desc(self):
8373 - return f'line: {self.lineno}: missing [${{PYTHON_USEDEP}}] suffix for argument "{self.line}"'
8374 + return (
8375 + f'line: {self.lineno}: missing [${{PYTHON_USEDEP}}] suffix for argument "{self.line}"'
8376 + )
8377
8378
8379 class PythonAnyMismatchedUseHasVersionCheck(results.VersionResult, results.Warning):
8380 @@ -198,8 +196,8 @@ class PythonAnyMismatchedUseHasVersionCheck(results.VersionResult, results.Warni
8381 @property
8382 def desc(self):
8383 s = pluralism(self.use_flags)
8384 - use_flags = ', '.join(map(str, self.use_flags))
8385 - return f'{self.dep_category}: mismatch for {self.dep_atom} check use flag{s} [{use_flags}] in {self.location}'
8386 + use_flags = ", ".join(map(str, self.use_flags))
8387 + return f"{self.dep_category}: mismatch for {self.dep_atom} check use flag{s} [{use_flags}] in {self.location}"
8388
8389
8390 class PythonAnyMismatchedDepHasVersionCheck(results.VersionResult, results.Warning):
8391 @@ -222,8 +220,9 @@ class PythonAnyMismatchedDepHasVersionCheck(results.VersionResult, results.Warni
8392
8393 @property
8394 def desc(self):
8395 - use_flags = ', '.join(map(str, self.use_flags))
8396 - return f'{self.dep_category}: missing check for {self.dep_atom}[{use_flags}] in {self.location!r}'
8397 + use_flags = ", ".join(map(str, self.use_flags))
8398 + return f"{self.dep_category}: missing check for {self.dep_atom}[{use_flags}] in {self.location!r}"
8399 +
8400
8401 class PythonCheck(Check):
8402 """Python eclass checks.
8403 @@ -233,32 +232,37 @@ class PythonCheck(Check):
8404 """
8405
8406 _source = sources.EbuildParseRepoSource
8407 - known_results = frozenset([
8408 - MissingPythonEclass, PythonMissingRequiredUse,
8409 - PythonMissingDeps, PythonRuntimeDepInAnyR1, PythonEclassError,
8410 - DistutilsNonPEP517Build,
8411 - PythonHasVersionUsage,
8412 - PythonHasVersionMissingPythonUseDep,
8413 - PythonAnyMismatchedUseHasVersionCheck,
8414 - PythonAnyMismatchedDepHasVersionCheck,
8415 - ])
8416 + known_results = frozenset(
8417 + [
8418 + MissingPythonEclass,
8419 + PythonMissingRequiredUse,
8420 + PythonMissingDeps,
8421 + PythonRuntimeDepInAnyR1,
8422 + PythonEclassError,
8423 + DistutilsNonPEP517Build,
8424 + PythonHasVersionUsage,
8425 + PythonHasVersionMissingPythonUseDep,
8426 + PythonAnyMismatchedUseHasVersionCheck,
8427 + PythonAnyMismatchedDepHasVersionCheck,
8428 + ]
8429 + )
8430
8431 has_version_known_flags = {
8432 - '-b': 'BDEPEND',
8433 - '-r': 'RDEPEND',
8434 - '-d': 'DEPEND',
8435 - '--host-root': 'BDEPEND',
8436 + "-b": "BDEPEND",
8437 + "-r": "RDEPEND",
8438 + "-d": "DEPEND",
8439 + "--host-root": "BDEPEND",
8440 }
8441
8442 has_version_default = {
8443 - 'has_version': 'DEPEND',
8444 - 'python_has_version': 'BDEPEND',
8445 + "has_version": "DEPEND",
8446 + "python_has_version": "BDEPEND",
8447 }
8448
8449 eclass_any_dep_func = {
8450 - 'python-single-r1': 'python_gen_cond_dep',
8451 - 'python-any-r1': 'python_gen_any_dep',
8452 - 'python-r1': 'python_gen_any_dep',
8453 + "python-single-r1": "python_gen_cond_dep",
8454 + "python-any-r1": "python_gen_any_dep",
8455 + "python-r1": "python_gen_any_dep",
8456 }
8457
8458 def scan_tree_recursively(self, deptree, expected_cls):
8459 @@ -269,8 +273,7 @@ class PythonCheck(Check):
8460 yield deptree
8461
8462 def check_required_use(self, requse, flags, prefix, container_cls):
8463 - for token in self.scan_tree_recursively(requse,
8464 - values.ContainmentMatch2):
8465 + for token in self.scan_tree_recursively(requse, values.ContainmentMatch2):
8466 # pkgcore collapses single flag in ||/^^, so expect top-level flags
8467 # when len(flags) == 1
8468 if len(flags) > 1 and not isinstance(token, container_cls):
8469 @@ -281,7 +284,7 @@ class PythonCheck(Check):
8470 continue
8471 name = next(iter(x.vals))
8472 if name.startswith(prefix):
8473 - matched.add(name[len(prefix):])
8474 + matched.add(name[len(prefix) :])
8475 elif isinstance(token, container_cls):
8476 # skip the ||/^^ if it contains at least one foreign flag
8477 break
8478 @@ -304,7 +307,7 @@ class PythonCheck(Check):
8479 continue
8480 if not any(is_python_interpreter(y) for y in x if isinstance(y, atom)):
8481 continue
8482 - matched.add(flag[len(prefix):])
8483 + matched.add(flag[len(prefix) :])
8484 if matched == flags:
8485 return True
8486 return False
8487 @@ -322,7 +325,7 @@ class PythonCheck(Check):
8488 pep517_value = None
8489
8490 for var_node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
8491 - var_name = pkg.node_str(var_node.child_by_field_name('name'))
8492 + var_name = pkg.node_str(var_node.child_by_field_name("name"))
8493
8494 if var_name == "DISTUTILS_OPTIONAL":
8495 has_distutils_optional = True
8496 @@ -334,7 +337,6 @@ class PythonCheck(Check):
8497 # there's nothing for us to do anyway.
8498 has_distutils_deps = True
8499
8500 -
8501 if pep517_value is None:
8502 yield DistutilsNonPEP517Build(pkg=pkg)
8503 elif has_distutils_optional and not has_distutils_deps and pep517_value != "no":
8504 @@ -344,11 +346,14 @@ class PythonCheck(Check):
8505 if "dev-python/gpep517" not in iflatten_instance(pkg.bdepend, atom):
8506 yield PythonMissingDeps("BDEPEND", pkg=pkg, dep_value="DISTUTILS_DEPS")
8507
8508 -
8509 @staticmethod
8510 def _prepare_deps(deps: str):
8511 try:
8512 - deps_str = deps.strip('\"\'').replace('\\$', '$').replace('${PYTHON_USEDEP}', 'pkgcheck_python_usedep')
8513 + deps_str = (
8514 + deps.strip("\"'")
8515 + .replace("\\$", "$")
8516 + .replace("${PYTHON_USEDEP}", "pkgcheck_python_usedep")
8517 + )
8518 return iflatten_instance(DepSet.parse(deps_str, atom), atom)
8519 except DepsetParseError:
8520 # if we are unable to parse that dep's string, skip it
8521 @@ -357,18 +362,20 @@ class PythonCheck(Check):
8522 def build_python_gen_any_dep_calls(self, pkg, any_dep_func):
8523 check_deps = defaultdict(set)
8524 for var_node in pkg.global_query(bash.var_assign_query):
8525 - name = pkg.node_str(var_node.child_by_field_name('name'))
8526 - if name in {'DEPEND', 'BDEPEND'}:
8527 + name = pkg.node_str(var_node.child_by_field_name("name"))
8528 + if name in {"DEPEND", "BDEPEND"}:
8529 for call_node, _ in bash.cmd_query.captures(var_node):
8530 - call_name = pkg.node_str(call_node.child_by_field_name('name'))
8531 + call_name = pkg.node_str(call_node.child_by_field_name("name"))
8532 if call_name == any_dep_func and len(call_node.children) > 1:
8533 - check_deps[name].update(self._prepare_deps(
8534 - pkg.node_str(call_node.children[1])))
8535 + check_deps[name].update(
8536 + self._prepare_deps(pkg.node_str(call_node.children[1]))
8537 + )
8538 return {dep: frozenset(atoms) for dep, atoms in check_deps.items()}
8539
8540 - def report_mismatch_check_deps(self, pkg, python_check_deps, has_version_checked_deps, any_dep_func):
8541 - for dep_type in frozenset(python_check_deps.keys()).union(
8542 - has_version_checked_deps.keys()):
8543 + def report_mismatch_check_deps(
8544 + self, pkg, python_check_deps, has_version_checked_deps, any_dep_func
8545 + ):
8546 + for dep_type in frozenset(python_check_deps.keys()).union(has_version_checked_deps.keys()):
8547 extra = has_version_checked_deps[dep_type] - python_check_deps.get(dep_type, set())
8548 missing = python_check_deps.get(dep_type, set()) - has_version_checked_deps[dep_type]
8549 for diff, other, location in (
8550 @@ -380,28 +387,35 @@ class PythonCheck(Check):
8551 for other_dep in other:
8552 if dep_atom == str(other_dep.versioned_atom):
8553 if diff_flags := set(other_dep.use) - set(dep.use):
8554 - yield PythonAnyMismatchedUseHasVersionCheck(pkg=pkg,
8555 - dep_category=dep_type, dep_atom=dep_atom,
8556 - use_flags=diff_flags, location=location)
8557 + yield PythonAnyMismatchedUseHasVersionCheck(
8558 + pkg=pkg,
8559 + dep_category=dep_type,
8560 + dep_atom=dep_atom,
8561 + use_flags=diff_flags,
8562 + location=location,
8563 + )
8564 break
8565 else:
8566 - use_flags = {'${PYTHON_USEDEP}'} | set(dep.use) \
8567 - - {'pkgcheck_python_usedep'}
8568 - yield PythonAnyMismatchedDepHasVersionCheck(pkg=pkg,
8569 - dep_category=dep_type, dep_atom=dep_atom,
8570 - use_flags=use_flags, location=location)
8571 + use_flags = {"${PYTHON_USEDEP}"} | set(dep.use) - {"pkgcheck_python_usedep"}
8572 + yield PythonAnyMismatchedDepHasVersionCheck(
8573 + pkg=pkg,
8574 + dep_category=dep_type,
8575 + dep_atom=dep_atom,
8576 + use_flags=use_flags,
8577 + location=location,
8578 + )
8579
8580 @staticmethod
8581 def _prepare_dep_type(pkg, dep_type: str) -> str:
8582 - if dep_type == 'BDEPEND' not in pkg.eapi.dep_keys:
8583 - return 'DEPEND'
8584 + if dep_type == "BDEPEND" not in pkg.eapi.dep_keys:
8585 + return "DEPEND"
8586 return dep_type
8587
8588 def check_python_check_deps(self, pkg, func_node, python_check_deps, any_dep_func):
8589 has_version_checked_deps = defaultdict(set)
8590 has_version_lines = set()
8591 for node, _ in bash.cmd_query.captures(func_node):
8592 - call_name = pkg.node_str(node.child_by_field_name('name'))
8593 + call_name = pkg.node_str(node.child_by_field_name("name"))
8594 if call_name == "has_version":
8595 lineno, _ = node.start_point
8596 has_version_lines.add(lineno + 1)
8597 @@ -412,19 +426,21 @@ class PythonCheck(Check):
8598 if new_dep_mode := self.has_version_known_flags.get(arg_name, None):
8599 dep_mode = self._prepare_dep_type(pkg, new_dep_mode)
8600 else:
8601 - arg_name = arg_name.strip('\"\'')
8602 + arg_name = arg_name.strip("\"'")
8603 if not USE_FLAGS_PYTHON_USEDEP.search(arg_name):
8604 lineno, _ = arg.start_point
8605 yield PythonHasVersionMissingPythonUseDep(
8606 - lineno=lineno+1, line=arg_name, pkg=pkg)
8607 + lineno=lineno + 1, line=arg_name, pkg=pkg
8608 + )
8609 else:
8610 - has_version_checked_deps[dep_mode].update(
8611 - self._prepare_deps(arg_name))
8612 + has_version_checked_deps[dep_mode].update(self._prepare_deps(arg_name))
8613
8614 if has_version_lines:
8615 yield PythonHasVersionUsage(lines=sorted(has_version_lines), pkg=pkg)
8616
8617 - yield from self.report_mismatch_check_deps(pkg, python_check_deps, has_version_checked_deps, any_dep_func)
8618 + yield from self.report_mismatch_check_deps(
8619 + pkg, python_check_deps, has_version_checked_deps, any_dep_func
8620 + )
8621
8622 def feed(self, pkg):
8623 try:
8624 @@ -450,21 +466,21 @@ class PythonCheck(Check):
8625 else:
8626 recommendation = "python-any-r1"
8627 yield MissingPythonEclass(recommendation, attr.upper(), str(p), pkg=pkg)
8628 - elif eclass in ('python-r1', 'python-single-r1'):
8629 + elif eclass in ("python-r1", "python-single-r1"):
8630 # grab Python implementations from IUSE
8631 - iuse = {x.lstrip('+-') for x in pkg.iuse}
8632 + iuse = {x.lstrip("+-") for x in pkg.iuse}
8633
8634 - if eclass == 'python-r1':
8635 - flags = {x[len(IUSE_PREFIX):] for x in iuse if x.startswith(IUSE_PREFIX)}
8636 + if eclass == "python-r1":
8637 + flags = {x[len(IUSE_PREFIX) :] for x in iuse if x.startswith(IUSE_PREFIX)}
8638 req_use_args = (flags, IUSE_PREFIX, OrRestriction)
8639 else:
8640 - flags = {x[len(IUSE_PREFIX_S):] for x in iuse if x.startswith(IUSE_PREFIX_S)}
8641 + flags = {x[len(IUSE_PREFIX_S) :] for x in iuse if x.startswith(IUSE_PREFIX_S)}
8642 req_use_args = (flags, IUSE_PREFIX_S, JustOneRestriction)
8643
8644 if not self.check_required_use(pkg.required_use, *req_use_args):
8645 yield PythonMissingRequiredUse(pkg=pkg)
8646 if not self.check_depend(pkg.rdepend, *(req_use_args[:2])):
8647 - yield PythonMissingDeps('RDEPEND', pkg=pkg)
8648 + yield PythonMissingDeps("RDEPEND", pkg=pkg)
8649 else: # python-any-r1
8650 for attr in ("rdepend", "pdepend"):
8651 for p in iflatten_instance(getattr(pkg, attr), atom):
8652 @@ -476,10 +492,12 @@ class PythonCheck(Check):
8653 for attr in ("depend", "bdepend")
8654 for p in iflatten_instance(getattr(pkg, attr), atom)
8655 ):
8656 - yield PythonMissingDeps('DEPEND', pkg=pkg)
8657 + yield PythonMissingDeps("DEPEND", pkg=pkg)
8658
8659 # We're not interested in testing fake objects from TestPythonCheck
8660 - if eclass is None or not isinstance(pkg, sources._ParsedPkg) or not hasattr(pkg, 'tree'): # pragma: no cover
8661 + if (
8662 + eclass is None or not isinstance(pkg, sources._ParsedPkg) or not hasattr(pkg, "tree")
8663 + ): # pragma: no cover
8664 return
8665
8666 if "distutils-r1" in pkg.inherited:
8667 @@ -488,9 +506,11 @@ class PythonCheck(Check):
8668 any_dep_func = self.eclass_any_dep_func[eclass]
8669 python_check_deps = self.build_python_gen_any_dep_calls(pkg, any_dep_func)
8670 for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
8671 - func_name = pkg.node_str(func_node.child_by_field_name('name'))
8672 + func_name = pkg.node_str(func_node.child_by_field_name("name"))
8673 if func_name == "python_check_deps":
8674 - yield from self.check_python_check_deps(pkg, func_node, python_check_deps, any_dep_func)
8675 + yield from self.check_python_check_deps(
8676 + pkg, func_node, python_check_deps, any_dep_func
8677 + )
8678
8679
8680 class PythonCompatUpdate(results.VersionResult, results.Info):
8681 @@ -503,8 +523,8 @@ class PythonCompatUpdate(results.VersionResult, results.Info):
8682 @property
8683 def desc(self):
8684 s = pluralism(self.updates)
8685 - updates = ', '.join(self.updates)
8686 - return f'PYTHON_COMPAT update{s} available: {updates}'
8687 + updates = ", ".join(self.updates)
8688 + return f"PYTHON_COMPAT update{s} available: {updates}"
8689
8690
8691 class PythonCompatCheck(Check):
8692 @@ -520,32 +540,32 @@ class PythonCompatCheck(Check):
8693 super().__init__(*args)
8694 repo = self.options.target_repo
8695 # sorter for python targets leveraging USE_EXPAND flag ordering from repo
8696 - self.sorter = repo.use_expand_sorter('python_targets')
8697 + self.sorter = repo.use_expand_sorter("python_targets")
8698
8699 # determine available PYTHON_TARGET use flags
8700 targets = []
8701 for target, _desc in repo.use_expand_desc.get(IUSE_PREFIX[:-1], ()):
8702 - if target[len(IUSE_PREFIX):].startswith('python'):
8703 - targets.append(target[len(IUSE_PREFIX):])
8704 + if target[len(IUSE_PREFIX) :].startswith("python"):
8705 + targets.append(target[len(IUSE_PREFIX) :])
8706 multi_targets = tuple(sorted(targets, key=self.sorter))
8707
8708 # determine available PYTHON_SINGLE_TARGET use flags
8709 targets = []
8710 for target, _desc in repo.use_expand_desc.get(IUSE_PREFIX_S[:-1], ()):
8711 - if target[len(IUSE_PREFIX_S):].startswith('python'):
8712 - targets.append(target[len(IUSE_PREFIX_S):])
8713 + if target[len(IUSE_PREFIX_S) :].startswith("python"):
8714 + targets.append(target[len(IUSE_PREFIX_S) :])
8715 single_targets = tuple(sorted(targets, key=self.sorter))
8716
8717 self.params = {
8718 - 'python-r1': (multi_targets, IUSE_PREFIX, None),
8719 - 'python-single-r1': (single_targets, (IUSE_PREFIX, IUSE_PREFIX_S), None),
8720 - 'python-any-r1': (multi_targets, (IUSE_PREFIX, IUSE_PREFIX_S), ('depend', 'bdepend')),
8721 + "python-r1": (multi_targets, IUSE_PREFIX, None),
8722 + "python-single-r1": (single_targets, (IUSE_PREFIX, IUSE_PREFIX_S), None),
8723 + "python-any-r1": (multi_targets, (IUSE_PREFIX, IUSE_PREFIX_S), ("depend", "bdepend")),
8724 }
8725
8726 def python_deps(self, deps, prefix):
8727 for dep in (x for x in deps if x.use):
8728 for x in dep.use:
8729 - if x.startswith(('-', '!')):
8730 + if x.startswith(("-", "!")):
8731 continue
8732 if x.startswith(prefix):
8733 yield dep.no_usedeps
8734 @@ -573,19 +593,25 @@ class PythonCompatCheck(Check):
8735 try:
8736 # determine the latest supported python version
8737 latest_target = sorted(
8738 - (f"python{x.slot.replace('.', '_')}" for x in deps
8739 - if x.key == 'dev-lang/python' and x.slot is not None), key=self.sorter)[-1]
8740 + (
8741 + f"python{x.slot.replace('.', '_')}"
8742 + for x in deps
8743 + if x.key == "dev-lang/python" and x.slot is not None
8744 + ),
8745 + key=self.sorter,
8746 + )[-1]
8747 except IndexError:
8748 # should be flagged by PythonMissingDeps
8749 return
8750
8751 # ignore pkgs that probably aren't py3 compatible
8752 - if latest_target == 'python2_7':
8753 + if latest_target == "python2_7":
8754 return
8755
8756 # determine python impls to target
8757 - targets = set(itertools.takewhile(
8758 - lambda x: x != latest_target, reversed(available_targets)))
8759 + targets = set(
8760 + itertools.takewhile(lambda x: x != latest_target, reversed(available_targets))
8761 + )
8762
8763 if targets:
8764 try:
8765 @@ -595,7 +621,9 @@ class PythonCompatCheck(Check):
8766 latest = sorted(self.options.search_repo.match(dep))[-1]
8767 targets.intersection_update(
8768 f"python{x.rsplit('python', 1)[-1]}"
8769 - for x in latest.iuse_stripped if x.startswith(prefix))
8770 + for x in latest.iuse_stripped
8771 + if x.startswith(prefix)
8772 + )
8773 if not targets:
8774 return
8775 except IndexError:
8776 @@ -624,20 +652,20 @@ class PythonGHDistfileSuffix(results.VersionResult, results.Warning):
8777
8778 @property
8779 def desc(self):
8780 - return (f"GitHub archive {self.filename!r} ({self.uri!r}) is not "
8781 - "using '.gh.tar.gz' suffix")
8782 + return (
8783 + f"GitHub archive {self.filename!r} ({self.uri!r}) is not " "using '.gh.tar.gz' suffix"
8784 + )
8785
8786
8787 class PythonGHDistfileSuffixCheck(Check):
8788 - """Check ebuilds with PyPI remotes for missing ".gh.tar.gz" suffixes.
8789 - """
8790 + """Check ebuilds with PyPI remotes for missing ".gh.tar.gz" suffixes."""
8791
8792 required_addons = (addons.UseAddon,)
8793 known_results = frozenset([PythonGHDistfileSuffix])
8794
8795 def __init__(self, *args, use_addon):
8796 super().__init__(*args)
8797 - self.iuse_filter = use_addon.get_filter('fetchables')
8798 + self.iuse_filter = use_addon.get_filter("fetchables")
8799
8800 def feed(self, pkg):
8801 # consider only packages with pypi remote-id
8802 @@ -646,10 +674,12 @@ class PythonGHDistfileSuffixCheck(Check):
8803
8804 # look for GitHub archives
8805 fetchables, _ = self.iuse_filter(
8806 - (fetch.fetchable,), pkg,
8807 - pkg.generate_fetchables(allow_missing_checksums=True,
8808 - ignore_unknown_mirrors=True,
8809 - skip_default_mirrors=True))
8810 + (fetch.fetchable,),
8811 + pkg,
8812 + pkg.generate_fetchables(
8813 + allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
8814 + ),
8815 + )
8816 for f in fetchables:
8817 # skip files that have the correct suffix already
8818 if f.filename.endswith(".gh.tar.gz"):
8819
8820 diff --git a/src/pkgcheck/checks/repo.py b/src/pkgcheck/checks/repo.py
8821 index b12eb352..8b12f68d 100644
8822 --- a/src/pkgcheck/checks/repo.py
8823 +++ b/src/pkgcheck/checks/repo.py
8824 @@ -28,14 +28,13 @@ class RepoDirCheck(GentooRepoCheck, RepoCheck):
8825 known_results = frozenset([BinaryFile])
8826
8827 # repo root level directories that are ignored
8828 - ignored_root_dirs = frozenset(['.git'])
8829 + ignored_root_dirs = frozenset([".git"])
8830
8831 def __init__(self, *args, git_addon):
8832 super().__init__(*args)
8833 self.gitignored = git_addon.gitignored
8834 self.repo = self.options.target_repo
8835 - self.ignored_paths = {
8836 - pjoin(self.repo.location, x) for x in self.ignored_root_dirs}
8837 + self.ignored_paths = {pjoin(self.repo.location, x) for x in self.ignored_root_dirs}
8838 self.dirs = [self.repo.location]
8839
8840 def finish(self):
8841 @@ -47,7 +46,7 @@ class RepoDirCheck(GentooRepoCheck, RepoCheck):
8842 self.dirs.append(entry.path)
8843 elif is_binary(entry.path):
8844 if not self.gitignored(entry.path):
8845 - rel_path = entry.path[len(self.repo.location) + 1:]
8846 + rel_path = entry.path[len(self.repo.location) + 1 :]
8847 yield BinaryFile(rel_path)
8848
8849
8850 @@ -58,7 +57,7 @@ class EmptyCategoryDir(results.CategoryResult, results.Warning):
8851
8852 @property
8853 def desc(self):
8854 - return f'empty category directory: {self.category}'
8855 + return f"empty category directory: {self.category}"
8856
8857
8858 class EmptyPackageDir(results.PackageResult, results.Warning):
8859 @@ -68,7 +67,7 @@ class EmptyPackageDir(results.PackageResult, results.Warning):
8860
8861 @property
8862 def desc(self):
8863 - return f'empty package directory: {self.category}/{self.package}'
8864 + return f"empty package directory: {self.category}/{self.package}"
8865
8866
8867 class EmptyDirsCheck(GentooRepoCheck, RepoCheck):
8868
8869 diff --git a/src/pkgcheck/checks/repo_metadata.py b/src/pkgcheck/checks/repo_metadata.py
8870 index a8466f0f..003ff891 100644
8871 --- a/src/pkgcheck/checks/repo_metadata.py
8872 +++ b/src/pkgcheck/checks/repo_metadata.py
8873 @@ -79,11 +79,16 @@ class PackageUpdatesCheck(RepoCheck):
8874 """Scan profiles/updates/* for outdated entries and other issues."""
8875
8876 _source = (sources.EmptySource, (base.profiles_scope,))
8877 - known_results = frozenset([
8878 - MultiMovePackageUpdate, OldMultiMovePackageUpdate,
8879 - OldPackageUpdate, MovedPackageUpdate, BadPackageUpdate,
8880 - RedundantPackageUpdate,
8881 - ])
8882 + known_results = frozenset(
8883 + [
8884 + MultiMovePackageUpdate,
8885 + OldMultiMovePackageUpdate,
8886 + OldPackageUpdate,
8887 + MovedPackageUpdate,
8888 + BadPackageUpdate,
8889 + RedundantPackageUpdate,
8890 + ]
8891 + )
8892
8893 def __init__(self, *args):
8894 super().__init__(*args)
8895 @@ -92,8 +97,8 @@ class PackageUpdatesCheck(RepoCheck):
8896
8897 def finish(self):
8898 logmap = (
8899 - base.LogMap('pkgcore.log.logger.warning', MovedPackageUpdate),
8900 - base.LogMap('pkgcore.log.logger.error', BadPackageUpdate),
8901 + base.LogMap("pkgcore.log.logger.warning", MovedPackageUpdate),
8902 + base.LogMap("pkgcore.log.logger.error", BadPackageUpdate),
8903 )
8904
8905 # convert log warnings/errors into reports
8906 @@ -106,8 +111,8 @@ class PackageUpdatesCheck(RepoCheck):
8907 old_slotmove_updates = {}
8908
8909 for pkg, updates in repo_updates.items():
8910 - move_updates = [x for x in updates if x[0] == 'move']
8911 - slotmove_updates = [x for x in updates if x[0] == 'slotmove']
8912 + move_updates = [x for x in updates if x[0] == "move"]
8913 + slotmove_updates = [x for x in updates if x[0] == "slotmove"]
8914
8915 # check for multi-updates, a -> b, b -> c, ...
8916 if len(move_updates) > 1:
8917 @@ -126,7 +131,7 @@ class PackageUpdatesCheck(RepoCheck):
8918 # scan updates for old entries with removed packages
8919 for x in slotmove_updates:
8920 _, pkg, newslot = x
8921 - orig_line = ('slotmove', str(pkg)[:-(len(pkg.slot) + 1)], pkg.slot, newslot)
8922 + orig_line = ("slotmove", str(pkg)[: -(len(pkg.slot) + 1)], pkg.slot, newslot)
8923 if not self.search_repo.match(pkg.unversioned_atom):
8924 # reproduce updates file line data for result output
8925 old_slotmove_updates[pkg.key] = orig_line
8926 @@ -160,8 +165,8 @@ class UnusedLicenses(results.Warning):
8927 @property
8928 def desc(self):
8929 s = pluralism(self.licenses)
8930 - licenses = ', '.join(self.licenses)
8931 - return f'unused license{s}: {licenses}'
8932 + licenses = ", ".join(self.licenses)
8933 + return f"unused license{s}: {licenses}"
8934
8935
8936 class UnusedLicensesCheck(RepoCheck):
8937 @@ -199,8 +204,8 @@ class UnusedMirrors(results.Warning):
8938 @property
8939 def desc(self):
8940 s = pluralism(self.mirrors)
8941 - mirrors = ', '.join(self.mirrors)
8942 - return f'unused mirror{s}: {mirrors}'
8943 + mirrors = ", ".join(self.mirrors)
8944 + return f"unused mirror{s}: {mirrors}"
8945
8946
8947 class UnusedMirrorsCheck(MirrorsCheck, RepoCheck):
8948 @@ -234,9 +239,9 @@ class UnusedEclasses(results.Warning):
8949
8950 @property
8951 def desc(self):
8952 - es = pluralism(self.eclasses, plural='es')
8953 - eclasses = ', '.join(self.eclasses)
8954 - return f'unused eclass{es}: {eclasses}'
8955 + es = pluralism(self.eclasses, plural="es")
8956 + eclasses = ", ".join(self.eclasses)
8957 + return f"unused eclass{es}: {eclasses}"
8958
8959
8960 class UnusedEclassesCheck(RepoCheck):
8961 @@ -253,8 +258,9 @@ class UnusedEclassesCheck(RepoCheck):
8962 master_eclasses = set()
8963 for repo in self.options.target_repo.masters:
8964 master_eclasses.update(repo.eclass_cache.eclasses.keys())
8965 - self.unused_eclasses = set(
8966 - self.options.target_repo.eclass_cache.eclasses.keys()) - master_eclasses
8967 + self.unused_eclasses = (
8968 + set(self.options.target_repo.eclass_cache.eclasses.keys()) - master_eclasses
8969 + )
8970
8971 def feed(self, pkg):
8972 self.unused_eclasses.difference_update(pkg.inherited)
8973 @@ -276,8 +282,8 @@ class UnknownLicenses(results.Warning):
8974 @property
8975 def desc(self):
8976 s = pluralism(self.licenses)
8977 - licenses = ', '.join(self.licenses)
8978 - return f'license group {self.group!r} has unknown license{s}: [ {licenses} ]'
8979 + licenses = ", ".join(self.licenses)
8980 + return f"license group {self.group!r} has unknown license{s}: [ {licenses} ]"
8981
8982
8983 class LicenseGroupsCheck(RepoCheck):
8984 @@ -307,10 +313,10 @@ class PotentialLocalUse(results.Info):
8985 @property
8986 def desc(self):
8987 s = pluralism(self.pkgs)
8988 - pkgs = ', '.join(self.pkgs)
8989 + pkgs = ", ".join(self.pkgs)
8990 return (
8991 - f'global USE flag {self.flag!r} is a potential local, '
8992 - f'used by {len(self.pkgs)} package{s}: {pkgs}'
8993 + f"global USE flag {self.flag!r} is a potential local, "
8994 + f"used by {len(self.pkgs)} package{s}: {pkgs}"
8995 )
8996
8997
8998 @@ -324,8 +330,8 @@ class UnusedGlobalUse(results.Warning):
8999 @property
9000 def desc(self):
9001 s = pluralism(self.flags)
9002 - flags = ', '.join(self.flags)
9003 - return f'use.desc unused flag{s}: {flags}'
9004 + flags = ", ".join(self.flags)
9005 + return f"use.desc unused flag{s}: {flags}"
9006
9007
9008 class UnusedGlobalUseExpand(results.Warning):
9009 @@ -338,8 +344,8 @@ class UnusedGlobalUseExpand(results.Warning):
9010 @property
9011 def desc(self):
9012 s = pluralism(self.flags)
9013 - flags = ', '.join(self.flags)
9014 - return f'unused flag{s}: {flags}'
9015 + flags = ", ".join(self.flags)
9016 + return f"unused flag{s}: {flags}"
9017
9018
9019 class PotentialGlobalUse(results.Info):
9020 @@ -354,7 +360,8 @@ class PotentialGlobalUse(results.Info):
9021 def desc(self):
9022 return (
9023 f"local USE flag {self.flag!r} is a potential global "
9024 - f"used by {len(self.pkgs)} packages: {', '.join(self.pkgs)}")
9025 + f"used by {len(self.pkgs)} packages: {', '.join(self.pkgs)}"
9026 + )
9027
9028
9029 def _dfs(graph, start, visited=None):
9030 @@ -369,11 +376,16 @@ def _dfs(graph, start, visited=None):
9031 class GlobalUseCheck(RepoCheck):
9032 """Check global USE and USE_EXPAND flags for various issues."""
9033
9034 - _source = (sources.RepositoryRepoSource, (), (('source', sources.PackageRepoSource),))
9035 + _source = (sources.RepositoryRepoSource, (), (("source", sources.PackageRepoSource),))
9036 required_addons = (addons.UseAddon,)
9037 - known_results = frozenset([
9038 - PotentialLocalUse, PotentialGlobalUse, UnusedGlobalUse, UnusedGlobalUseExpand,
9039 - ])
9040 + known_results = frozenset(
9041 + [
9042 + PotentialLocalUse,
9043 + PotentialGlobalUse,
9044 + UnusedGlobalUse,
9045 + UnusedGlobalUseExpand,
9046 + ]
9047 + )
9048
9049 def __init__(self, *args, use_addon):
9050 super().__init__(*args)
9051 @@ -394,7 +406,7 @@ class GlobalUseCheck(RepoCheck):
9052 # calculate USE flag description difference ratios
9053 diffs = {}
9054 for i, (i_pkg, i_desc) in enumerate(pkgs):
9055 - for j, (j_pkg, j_desc) in enumerate(pkgs[i + 1:]):
9056 + for j, (j_pkg, j_desc) in enumerate(pkgs[i + 1 :]):
9057 diffs[(i, i + j + 1)] = SequenceMatcher(None, i_desc, j_desc).ratio()
9058
9059 # create an adjacency list using all closely matching flags pairs
9060 @@ -424,11 +436,12 @@ class GlobalUseCheck(RepoCheck):
9061 yield [pkgs[i][0] for i in component]
9062
9063 def finish(self):
9064 - repo_global_use = {
9065 - flag for matcher, (flag, desc) in self.repo.config.use_desc}
9066 + repo_global_use = {flag for matcher, (flag, desc) in self.repo.config.use_desc}
9067 repo_global_use_expand = {
9068 - flag for use_expand in self.repo.config.use_expand_desc.values()
9069 - for flag, desc in use_expand}
9070 + flag
9071 + for use_expand in self.repo.config.use_expand_desc.values()
9072 + for flag, desc in use_expand
9073 + }
9074 repo_local_use = self.repo.config.use_local_desc
9075 unused_global_use = []
9076 unused_global_use_expand = []
9077 @@ -481,7 +494,8 @@ class MissingChksum(results.VersionResult, results.Warning):
9078 def desc(self):
9079 return (
9080 f"{self.filename!r} missing required chksums: "
9081 - f"{', '.join(self.missing)}; has chksums: {', '.join(self.existing)}")
9082 + f"{', '.join(self.missing)}; has chksums: {', '.join(self.existing)}"
9083 + )
9084
9085
9086 class DeprecatedChksum(results.VersionResult, results.Warning):
9087 @@ -495,8 +509,8 @@ class DeprecatedChksum(results.VersionResult, results.Warning):
9088 @property
9089 def desc(self):
9090 s = pluralism(self.deprecated)
9091 - deprecated = ', '.join(self.deprecated)
9092 - return f'{self.filename!r} has deprecated checksum{s}: {deprecated}'
9093 + deprecated = ", ".join(self.deprecated)
9094 + return f"{self.filename!r} has deprecated checksum{s}: {deprecated}"
9095
9096
9097 class MissingManifest(results.VersionResult, results.Error):
9098 @@ -509,8 +523,8 @@ class MissingManifest(results.VersionResult, results.Error):
9099 @property
9100 def desc(self):
9101 s = pluralism(self.files)
9102 - files = ', '.join(self.files)
9103 - return f'distfile{s} missing from Manifest: [ {files} ]'
9104 + files = ", ".join(self.files)
9105 + return f"distfile{s} missing from Manifest: [ {files} ]"
9106
9107
9108 class UnknownManifest(results.PackageResult, results.Warning):
9109 @@ -523,8 +537,8 @@ class UnknownManifest(results.PackageResult, results.Warning):
9110 @property
9111 def desc(self):
9112 s = pluralism(self.files)
9113 - files = ', '.join(self.files)
9114 - return f'unknown distfile{s} in Manifest: [ {files} ]'
9115 + files = ", ".join(self.files)
9116 + return f"unknown distfile{s} in Manifest: [ {files} ]"
9117
9118
9119 class UnnecessaryManifest(results.PackageResult, results.Warning):
9120 @@ -537,14 +551,14 @@ class UnnecessaryManifest(results.PackageResult, results.Warning):
9121 @property
9122 def desc(self):
9123 s = pluralism(self.files)
9124 - files = ', '.join(self.files)
9125 - return f'unnecessary file{s} in Manifest: [ {files} ]'
9126 + files = ", ".join(self.files)
9127 + return f"unnecessary file{s} in Manifest: [ {files} ]"
9128
9129
9130 class InvalidManifest(results.MetadataError, results.PackageResult):
9131 """Package's Manifest file is invalid."""
9132
9133 - attr = 'manifest'
9134 + attr = "manifest"
9135
9136
9137 class ManifestCheck(Check):
9138 @@ -556,19 +570,27 @@ class ManifestCheck(Check):
9139
9140 required_addons = (addons.UseAddon,)
9141 _source = sources.PackageRepoSource
9142 - known_results = frozenset([
9143 - MissingChksum, MissingManifest, UnknownManifest, UnnecessaryManifest,
9144 - DeprecatedChksum, InvalidManifest,
9145 - ])
9146 + known_results = frozenset(
9147 + [
9148 + MissingChksum,
9149 + MissingManifest,
9150 + UnknownManifest,
9151 + UnnecessaryManifest,
9152 + DeprecatedChksum,
9153 + InvalidManifest,
9154 + ]
9155 + )
9156
9157 def __init__(self, *args, use_addon):
9158 super().__init__(*args)
9159 repo = self.options.target_repo
9160 self.preferred_checksums = frozenset(
9161 - repo.config.manifests.hashes if hasattr(repo, 'config') else ())
9162 + repo.config.manifests.hashes if hasattr(repo, "config") else ()
9163 + )
9164 self.required_checksums = frozenset(
9165 - repo.config.manifests.required_hashes if hasattr(repo, 'config') else ())
9166 - self.iuse_filter = use_addon.get_filter('fetchables')
9167 + repo.config.manifests.required_hashes if hasattr(repo, "config") else ()
9168 + )
9169 + self.iuse_filter = use_addon.get_filter("fetchables")
9170
9171 def feed(self, pkgset):
9172 pkg_manifest = pkgset[0].manifest
9173 @@ -577,8 +599,10 @@ class ManifestCheck(Check):
9174 for pkg in pkgset:
9175 pkg.release_cached_data()
9176 fetchables, _ = self.iuse_filter(
9177 - (fetch.fetchable,), pkg,
9178 - pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True))
9179 + (fetch.fetchable,),
9180 + pkg,
9181 + pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True),
9182 + )
9183 fetchables = set(fetchables)
9184 pkg.release_cached_data()
9185
9186 @@ -593,8 +617,8 @@ class ManifestCheck(Check):
9187 missing = self.required_checksums.difference(f_inst.chksums)
9188 if f_inst.filename not in missing_manifests and missing:
9189 yield MissingChksum(
9190 - f_inst.filename, sorted(missing),
9191 - sorted(f_inst.chksums), pkg=pkg)
9192 + f_inst.filename, sorted(missing), sorted(f_inst.chksums), pkg=pkg
9193 + )
9194 elif f_inst.chksums and self.preferred_checksums != frozenset(f_inst.chksums):
9195 deprecated = set(f_inst.chksums).difference(self.preferred_checksums)
9196 yield DeprecatedChksum(f_inst.filename, sorted(deprecated), pkg=pkg)
9197 @@ -602,7 +626,7 @@ class ManifestCheck(Check):
9198
9199 if pkg_manifest.thin:
9200 unnecessary_manifests = []
9201 - for attr in ('aux_files', 'ebuilds', 'misc'):
9202 + for attr in ("aux_files", "ebuilds", "misc"):
9203 unnecessary_manifests.extend(getattr(pkg_manifest, attr, []))
9204 if unnecessary_manifests:
9205 yield UnnecessaryManifest(sorted(unnecessary_manifests), pkg=pkgset[0])
9206 @@ -624,12 +648,12 @@ class ConflictingChksums(results.VersionResult, results.Error):
9207 @property
9208 def desc(self):
9209 s = pluralism(self.chksums)
9210 - chksums = ', '.join(self.chksums)
9211 + chksums = ", ".join(self.chksums)
9212 pkgs_s = pluralism(self.pkgs)
9213 - pkgs = ', '.join(self.pkgs)
9214 + pkgs = ", ".join(self.pkgs)
9215 return (
9216 - f'distfile {self.filename!r} has different checksum{s} '
9217 - f'({chksums}) for package{pkgs_s}: {pkgs}'
9218 + f"distfile {self.filename!r} has different checksum{s} "
9219 + f"({chksums}) for package{pkgs_s}: {pkgs}"
9220 )
9221
9222
9223 @@ -644,9 +668,9 @@ class MatchingChksums(results.VersionResult, results.Warning):
9224
9225 @property
9226 def desc(self):
9227 - msg = f'distfile {self.filename!r} matches checksums for {self.orig_file!r}'
9228 - if f'{self.category}/{self.package}' != self.orig_pkg:
9229 - msg += f' from {self.orig_pkg}'
9230 + msg = f"distfile {self.filename!r} matches checksums for {self.orig_file!r}"
9231 + if f"{self.category}/{self.package}" != self.orig_pkg:
9232 + msg += f" from {self.orig_pkg}"
9233 return msg
9234
9235
9236 @@ -657,7 +681,7 @@ class ManifestCollisionCheck(Check):
9237 different filenames with matching checksums.
9238 """
9239
9240 - _source = (sources.RepositoryRepoSource, (), (('source', sources.PackageRepoSource),))
9241 + _source = (sources.RepositoryRepoSource, (), (("source", sources.PackageRepoSource),))
9242 known_results = frozenset([ConflictingChksums, MatchingChksums])
9243
9244 def __init__(self, *args):
9245 @@ -665,15 +689,14 @@ class ManifestCollisionCheck(Check):
9246 self.seen_files = {}
9247 self.seen_chksums = {}
9248 # ignore go.mod false positives (issue #228)
9249 - self._ignored_files_re = re.compile(r'^.*%2F@v.*\.mod$')
9250 + self._ignored_files_re = re.compile(r"^.*%2F@v.*\.mod$")
9251
9252 def _conflicts(self, pkg):
9253 """Check for similarly named distfiles with different checksums."""
9254 for filename, chksums in pkg.manifest.distfiles.items():
9255 existing = self.seen_files.get(filename)
9256 if existing is None:
9257 - self.seen_files[filename] = (
9258 - [pkg.key], dict(chksums.items()))
9259 + self.seen_files[filename] = ([pkg.key], dict(chksums.items()))
9260 continue
9261 seen_pkgs, seen_chksums = existing
9262 conflicting_chksums = []
9263
9264 diff --git a/src/pkgcheck/checks/reserved.py b/src/pkgcheck/checks/reserved.py
9265 index 8448179a..a67d1683 100644
9266 --- a/src/pkgcheck/checks/reserved.py
9267 +++ b/src/pkgcheck/checks/reserved.py
9268 @@ -7,12 +7,17 @@ from . import Check
9269
9270
9271 class _ReservedNameCheck(Check):
9272 - reserved_prefixes = ('__', 'abort', 'dyn', 'prep')
9273 - reserved_substrings = ('hook', 'paludis', 'portage') # 'ebuild' is special case
9274 - reserved_ebuild_regex = re.compile(r'(.*[^a-zA-Z])?ebuild.*')
9275 + reserved_prefixes = ("__", "abort", "dyn", "prep")
9276 + reserved_substrings = ("hook", "paludis", "portage") # 'ebuild' is special case
9277 + reserved_ebuild_regex = re.compile(r"(.*[^a-zA-Z])?ebuild.*")
9278
9279 """Portage variables whose use is half-legitimate and harmless if the package manager doesn't support them."""
9280 - special_whitelist = ('EBUILD_DEATH_HOOKS', 'EBUILD_SUCCESS_HOOKS', 'PORTAGE_QUIET', 'PORTAGE_ACTUAL_DISTDIR')
9281 + special_whitelist = (
9282 + "EBUILD_DEATH_HOOKS",
9283 + "EBUILD_SUCCESS_HOOKS",
9284 + "PORTAGE_QUIET",
9285 + "PORTAGE_ACTUAL_DISTDIR",
9286 + )
9287
9288 """Approved good exceptions to using of variables."""
9289 variables_usage_whitelist = {"EBUILD_PHASE", "EBUILD_PHASE_FUNC"}
9290 @@ -24,32 +29,37 @@ class _ReservedNameCheck(Check):
9291 test_name = used_name.lower()
9292 for reserved in self.reserved_prefixes:
9293 if test_name.startswith(reserved):
9294 - yield used_name, used_type, reserved, 'prefix', lineno+1
9295 + yield used_name, used_type, reserved, "prefix", lineno + 1
9296 for reserved in self.reserved_substrings:
9297 if reserved in test_name:
9298 - yield used_name, used_type, reserved, 'substring', lineno+1
9299 + yield used_name, used_type, reserved, "substring", lineno + 1
9300 if self.reserved_ebuild_regex.match(test_name):
9301 - yield used_name, used_type, 'ebuild', 'substring', lineno+1
9302 + yield used_name, used_type, "ebuild", "substring", lineno + 1
9303
9304 def _feed(self, item):
9305 - yield from self._check('function', {
9306 - item.node_str(node.child_by_field_name('name')): node.start_point
9307 - for node, _ in bash.func_query.captures(item.tree.root_node)
9308 - })
9309 + yield from self._check(
9310 + "function",
9311 + {
9312 + item.node_str(node.child_by_field_name("name")): node.start_point
9313 + for node, _ in bash.func_query.captures(item.tree.root_node)
9314 + },
9315 + )
9316 used_variables = {
9317 - item.node_str(node.child_by_field_name('name')): node.start_point
9318 + item.node_str(node.child_by_field_name("name")): node.start_point
9319 for node, _ in bash.var_assign_query.captures(item.tree.root_node)
9320 }
9321 for node, _ in bash.var_query.captures(item.tree.root_node):
9322 if (name := item.node_str(node)) not in self.variables_usage_whitelist:
9323 used_variables.setdefault(name, node.start_point)
9324 - yield from self._check('variable', used_variables)
9325 + yield from self._check("variable", used_variables)
9326
9327
9328 class EclassReservedName(results.EclassResult, results.Warning):
9329 """Eclass uses reserved variable or function name for package manager."""
9330
9331 - def __init__(self, used_name: str, used_type: str, reserved_word: str, reserved_type: str, **kwargs):
9332 + def __init__(
9333 + self, used_name: str, used_type: str, reserved_word: str, reserved_type: str, **kwargs
9334 + ):
9335 super().__init__(**kwargs)
9336 self.used_name = used_name
9337 self.used_type = used_type
9338 @@ -101,7 +111,7 @@ class EbuildReservedCheck(_ReservedNameCheck):
9339 super().__init__(options, **kwargs)
9340 self.phases_hooks = {
9341 eapi_name: {
9342 - f'{prefix}_{phase}' for phase in eapi.phases.values() for prefix in ('pre', 'post')
9343 + f"{prefix}_{phase}" for phase in eapi.phases.values() for prefix in ("pre", "post")
9344 }
9345 for eapi_name, eapi in EAPI.known_eapis.items()
9346 }
9347 @@ -111,7 +121,9 @@ class EbuildReservedCheck(_ReservedNameCheck):
9348 yield EbuildReservedName(*args, lineno=lineno, line=used_name, pkg=pkg)
9349
9350 for node, _ in bash.func_query.captures(pkg.tree.root_node):
9351 - used_name = pkg.node_str(node.child_by_field_name('name'))
9352 + used_name = pkg.node_str(node.child_by_field_name("name"))
9353 if used_name in self.phases_hooks[str(pkg.eapi)]:
9354 lineno, _ = node.start_point
9355 - yield EbuildReservedName('function', used_name, 'phase hook', lineno=lineno+1, line=used_name, pkg=pkg)
9356 + yield EbuildReservedName(
9357 + "function", used_name, "phase hook", lineno=lineno + 1, line=used_name, pkg=pkg
9358 + )
9359
9360 diff --git a/src/pkgcheck/checks/stablereq.py b/src/pkgcheck/checks/stablereq.py
9361 index 3f396e08..57e41a84 100644
9362 --- a/src/pkgcheck/checks/stablereq.py
9363 +++ b/src/pkgcheck/checks/stablereq.py
9364 @@ -20,7 +20,7 @@ class StableRequest(results.VersionResult, results.Info):
9365 @property
9366 def desc(self):
9367 s = pluralism(self.keywords)
9368 - keywords = ', '.join(self.keywords)
9369 + keywords = ", ".join(self.keywords)
9370 return (
9371 f"slot({self.slot}) no change in {self.age} days "
9372 f"for unstable keyword{s}: [ {keywords} ]"
9373 @@ -37,19 +37,25 @@ class StableRequestCheck(GentooRepoCheck):
9374 Note that packages with no stable keywords won't trigger this at all.
9375 Instead they'll be caught by the UnstableOnly check.
9376 """
9377 - _source = (sources.PackageRepoSource, (), (('source', sources.UnmaskedRepoSource),))
9378 +
9379 + _source = (sources.PackageRepoSource, (), (("source", sources.UnmaskedRepoSource),))
9380 required_addons = (addons.git.GitAddon,)
9381 known_results = frozenset([StableRequest])
9382
9383 @staticmethod
9384 def mangle_argparser(parser):
9385 parser.plugin.add_argument(
9386 - '--stabletime', metavar='DAYS', dest='stable_time', default=30,
9387 - type=arghparse.positive_int, help='set number of days before stabilisation',
9388 + "--stabletime",
9389 + metavar="DAYS",
9390 + dest="stable_time",
9391 + default=30,
9392 + type=arghparse.positive_int,
9393 + help="set number of days before stabilisation",
9394 docs="""
9395 An integer number of days before a package version is flagged by
9396 StableRequestCheck. Defaults to 30 days.
9397 - """)
9398 + """,
9399 + )
9400
9401 def __init__(self, *args, git_addon):
9402 super().__init__(*args)
9403 @@ -64,7 +70,7 @@ class StableRequestCheck(GentooRepoCheck):
9404 pkg_slotted[pkg.slot].append(pkg)
9405 pkg_keywords.update(pkg.keywords)
9406
9407 - if stable_pkg_keywords := {x for x in pkg_keywords if x[0] not in {'-', '~'}}:
9408 + if stable_pkg_keywords := {x for x in pkg_keywords if x[0] not in {"-", "~"}}:
9409 for slot, pkgs in sorted(pkg_slotted.items()):
9410 slot_keywords = set().union(*(pkg.keywords for pkg in pkgs))
9411 stable_slot_keywords = slot_keywords.intersection(stable_pkg_keywords)
9412 @@ -82,11 +88,11 @@ class StableRequestCheck(GentooRepoCheck):
9413 added = datetime.fromtimestamp(match.time)
9414 days_old = (self.today - added).days
9415 if days_old >= self.options.stable_time:
9416 - pkg_stable_keywords = {x.lstrip('~') for x in pkg.keywords}
9417 + pkg_stable_keywords = {x.lstrip("~") for x in pkg.keywords}
9418 if stable_slot_keywords:
9419 keywords = stable_slot_keywords.intersection(pkg_stable_keywords)
9420 else:
9421 keywords = stable_pkg_keywords.intersection(pkg_stable_keywords)
9422 - keywords = sorted('~' + x for x in keywords)
9423 + keywords = sorted("~" + x for x in keywords)
9424 yield StableRequest(slot, keywords, days_old, pkg=pkg)
9425 break
9426
9427 diff --git a/src/pkgcheck/checks/unstable_only.py b/src/pkgcheck/checks/unstable_only.py
9428 index 2d08f635..0fc8b9f6 100644
9429 --- a/src/pkgcheck/checks/unstable_only.py
9430 +++ b/src/pkgcheck/checks/unstable_only.py
9431 @@ -18,10 +18,10 @@ class UnstableOnly(results.PackageResult, results.Info):
9432
9433 @property
9434 def desc(self):
9435 - es = pluralism(self.arches, plural='es')
9436 - arches = ', '.join(self.arches)
9437 - versions = ', '.join(self.versions)
9438 - return f'for arch{es}: [ {arches} ], all versions are unstable: [ {versions} ]'
9439 + es = pluralism(self.arches, plural="es")
9440 + arches = ", ".join(self.arches)
9441 + versions = ", ".join(self.versions)
9442 + return f"for arch{es}: [ {arches} ], all versions are unstable: [ {versions} ]"
9443
9444
9445 class UnstableOnlyCheck(GentooRepoCheck):
9446 @@ -39,10 +39,8 @@ class UnstableOnlyCheck(GentooRepoCheck):
9447 self.arch_restricts = {}
9448 for arch in arches:
9449 self.arch_restricts[arch] = [
9450 - packages.PackageRestriction(
9451 - "keywords", values.ContainmentMatch2((arch,))),
9452 - packages.PackageRestriction(
9453 - "keywords", values.ContainmentMatch2((f"~{arch}",)))
9454 + packages.PackageRestriction("keywords", values.ContainmentMatch2((arch,))),
9455 + packages.PackageRestriction("keywords", values.ContainmentMatch2((f"~{arch}",))),
9456 ]
9457
9458 def feed(self, pkgset):
9459
9460 diff --git a/src/pkgcheck/checks/visibility.py b/src/pkgcheck/checks/visibility.py
9461 index 021a738f..5440db7f 100644
9462 --- a/src/pkgcheck/checks/visibility.py
9463 +++ b/src/pkgcheck/checks/visibility.py
9464 @@ -12,30 +12,29 @@ from . import Check
9465
9466
9467 class FakeConfigurable:
9468 - "Package wrapper binding profile data."""
9469 + "Package wrapper binding profile data." ""
9470
9471 configurable = True
9472 - __slots__ = ('use', 'iuse', '_forced_use', '_masked_use', '_pkg_use', '_raw_pkg', '_profile')
9473 + __slots__ = ("use", "iuse", "_forced_use", "_masked_use", "_pkg_use", "_raw_pkg", "_profile")
9474
9475 def __init__(self, pkg, profile):
9476 - object.__setattr__(self, '_raw_pkg', pkg)
9477 - object.__setattr__(self, '_profile', profile)
9478 -
9479 - object.__setattr__(
9480 - self, '_forced_use', self._profile.forced_use.pull_data(self._raw_pkg))
9481 - object.__setattr__(
9482 - self, '_masked_use', self._profile.masked_use.pull_data(self._raw_pkg))
9483 - object.__setattr__(
9484 - self, '_pkg_use', self._profile.pkg_use.pull_data(self._raw_pkg))
9485 - use_defaults = {x[1:] for x in pkg.iuse if x[0] == '+'}
9486 - enabled_use = (use_defaults | profile.use | self._pkg_use | self._forced_use) - self._masked_use
9487 - object.__setattr__(
9488 - self, 'use', frozenset(enabled_use & (profile.iuse_effective | pkg.iuse_effective)))
9489 + object.__setattr__(self, "_raw_pkg", pkg)
9490 + object.__setattr__(self, "_profile", profile)
9491 +
9492 + object.__setattr__(self, "_forced_use", self._profile.forced_use.pull_data(self._raw_pkg))
9493 + object.__setattr__(self, "_masked_use", self._profile.masked_use.pull_data(self._raw_pkg))
9494 + object.__setattr__(self, "_pkg_use", self._profile.pkg_use.pull_data(self._raw_pkg))
9495 + use_defaults = {x[1:] for x in pkg.iuse if x[0] == "+"}
9496 + enabled_use = (
9497 + use_defaults | profile.use | self._pkg_use | self._forced_use
9498 + ) - self._masked_use
9499 object.__setattr__(
9500 - self, 'iuse', frozenset(profile.iuse_effective.union(pkg.iuse_stripped)))
9501 + self, "use", frozenset(enabled_use & (profile.iuse_effective | pkg.iuse_effective))
9502 + )
9503 + object.__setattr__(self, "iuse", frozenset(profile.iuse_effective.union(pkg.iuse_stripped)))
9504
9505 def request_enable(self, attr, *vals):
9506 - if attr != 'use':
9507 + if attr != "use":
9508 return False
9509
9510 set_vals = frozenset(vals)
9511 @@ -47,7 +46,7 @@ class FakeConfigurable:
9512 return set_vals.isdisjoint(self._masked_use)
9513
9514 def request_disable(self, attr, *vals):
9515 - if attr != 'use':
9516 + if attr != "use":
9517 return False
9518
9519 set_vals = frozenset(vals)
9520 @@ -70,7 +69,7 @@ class FakeConfigurable:
9521 __getattr__ = klass.GetAttrProxy("_raw_pkg")
9522
9523 def __setattr__(self, attr, val):
9524 - raise AttributeError(self, 'is immutable')
9525 + raise AttributeError(self, "is immutable")
9526
9527
9528 class _BlockMemoryExhaustion(Exception):
9529 @@ -78,10 +77,13 @@ class _BlockMemoryExhaustion(Exception):
9530
9531
9532 # This is fast path code, hence the seperated implementations.
9533 -if getattr(atom, '_TRANSITIVE_USE_ATOM_BUG_IS_FIXED', False):
9534 +if getattr(atom, "_TRANSITIVE_USE_ATOM_BUG_IS_FIXED", False):
9535 +
9536 def _eapi2_flatten(val):
9537 return isinstance(val, atom) and not isinstance(val, transitive_use_atom)
9538 +
9539 else:
9540 +
9541 def _eapi2_flatten(val):
9542 if isinstance(val, transitive_use_atom):
9543 if len([x for x in val.use if x.endswith("?")]) > 16:
9544 @@ -107,13 +109,13 @@ class VisibleVcsPkg(results.VersionResult, results.Warning):
9545 @property
9546 def desc(self):
9547 if self.num_profiles is not None and self.num_profiles > 1:
9548 - num_profiles = f' ({self.num_profiles} total)'
9549 + num_profiles = f" ({self.num_profiles} total)"
9550 else:
9551 - num_profiles = ''
9552 + num_profiles = ""
9553
9554 return (
9555 f'VCS version visible for KEYWORDS="{self.arch}", '
9556 - f'profile {self.profile}{num_profiles}'
9557 + f"profile {self.profile}{num_profiles}"
9558 )
9559
9560
9561 @@ -128,8 +130,8 @@ class NonexistentDeps(results.VersionResult, results.Warning):
9562 @property
9563 def desc(self):
9564 s = pluralism(self.nonexistent)
9565 - nonexistent = ', '.join(self.nonexistent)
9566 - return f'{self.attr}: nonexistent package{s}: {nonexistent}'
9567 + nonexistent = ", ".join(self.nonexistent)
9568 + return f"{self.attr}: nonexistent package{s}: {nonexistent}"
9569
9570
9571 class UncheckableDep(results.VersionResult, results.Warning):
9572 @@ -147,8 +149,17 @@ class UncheckableDep(results.VersionResult, results.Warning):
9573 class NonsolvableDeps(results.VersionResult, results.AliasResult, results.Error):
9574 """No potential solution for a depset attribute."""
9575
9576 - def __init__(self, attr, keyword, profile, deps, profile_status,
9577 - profile_deprecated, num_profiles=None, **kwargs):
9578 + def __init__(
9579 + self,
9580 + attr,
9581 + keyword,
9582 + profile,
9583 + deps,
9584 + profile_status,
9585 + profile_deprecated,
9586 + num_profiles=None,
9587 + **kwargs,
9588 + ):
9589 super().__init__(**kwargs)
9590 self.attr = attr
9591 self.keyword = keyword
9592 @@ -160,12 +171,12 @@ class NonsolvableDeps(results.VersionResult, results.AliasResult, results.Error)
9593
9594 @property
9595 def desc(self):
9596 - profile_status = 'deprecated ' if self.profile_deprecated else ''
9597 - profile_status += self.profile_status or 'custom'
9598 + profile_status = "deprecated " if self.profile_deprecated else ""
9599 + profile_status += self.profile_status or "custom"
9600 if self.num_profiles is not None and self.num_profiles > 1:
9601 - num_profiles = f' ({self.num_profiles} total)'
9602 + num_profiles = f" ({self.num_profiles} total)"
9603 else:
9604 - num_profiles = ''
9605 + num_profiles = ""
9606
9607 return (
9608 f"nonsolvable depset({self.attr}) keyword({self.keyword}) "
9609 @@ -186,7 +197,7 @@ class NonsolvableDepsInExp(NonsolvableDeps):
9610 """No potential solution for dependency on exp profile."""
9611
9612 # results require experimental profiles to be enabled
9613 - _profile = 'exp'
9614 + _profile = "exp"
9615
9616
9617 class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
9618 @@ -198,18 +209,24 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
9619 """
9620
9621 required_addons = (addons.profiles.ProfileAddon,)
9622 - known_results = frozenset([
9623 - VisibleVcsPkg, NonexistentDeps, UncheckableDep,
9624 - NonsolvableDepsInStable, NonsolvableDepsInDev, NonsolvableDepsInExp,
9625 - ])
9626 + known_results = frozenset(
9627 + [
9628 + VisibleVcsPkg,
9629 + NonexistentDeps,
9630 + UncheckableDep,
9631 + NonsolvableDepsInStable,
9632 + NonsolvableDepsInDev,
9633 + NonsolvableDepsInExp,
9634 + ]
9635 + )
9636
9637 def __init__(self, *args, profile_addon):
9638 super().__init__(*args, profile_addon=profile_addon)
9639 self.profiles = profile_addon
9640 self.report_cls_map = {
9641 - 'stable': NonsolvableDepsInStable,
9642 - 'dev': NonsolvableDepsInDev,
9643 - 'exp': NonsolvableDepsInExp,
9644 + "stable": NonsolvableDepsInStable,
9645 + "dev": NonsolvableDepsInDev,
9646 + "exp": NonsolvableDepsInExp,
9647 }
9648
9649 def feed(self, pkg):
9650 @@ -238,8 +255,7 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
9651 # on don't have to use the slower get method
9652 self.query_cache[node] = ()
9653 else:
9654 - matches = caching_iter(
9655 - self.options.search_repo.itermatch(node))
9656 + matches = caching_iter(self.options.search_repo.itermatch(node))
9657 if matches:
9658 self.query_cache[node] = matches
9659 if orig_node is not node:
9660 @@ -263,10 +279,8 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
9661 continue
9662 depset = getattr(pkg, attr)
9663 profile_failures = defaultdict(lambda: defaultdict(set))
9664 - for edepset, profiles in self.collapse_evaluate_depset(
9665 - pkg, attr, depset):
9666 - for profile, failures in self.process_depset(
9667 - pkg, attr, depset, edepset, profiles):
9668 + for edepset, profiles in self.collapse_evaluate_depset(pkg, attr, depset):
9669 + for profile, failures in self.process_depset(pkg, attr, depset, edepset, profiles):
9670 failures = tuple(map(str, sorted(stable_unique(failures))))
9671 profile_failures[failures][profile.status].add(profile)
9672
9673 @@ -276,24 +290,36 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
9674 for failures, profiles in profile_failures.items():
9675 for profile_status, cls in self.report_cls_map.items():
9676 for profile in sorted(
9677 - profiles.get(profile_status, ()),
9678 - key=attrgetter('key', 'name')):
9679 + profiles.get(profile_status, ()), key=attrgetter("key", "name")
9680 + ):
9681 yield cls(
9682 - attr, profile.key, profile.name, failures,
9683 - profile_status, profile.deprecated, pkg=pkg)
9684 + attr,
9685 + profile.key,
9686 + profile.name,
9687 + failures,
9688 + profile_status,
9689 + profile.deprecated,
9690 + pkg=pkg,
9691 + )
9692 else:
9693 # only report one failure per depset per profile type in regular mode
9694 for failures, profiles in profile_failures.items():
9695 for profile_status, cls in self.report_cls_map.items():
9696 status_profiles = sorted(
9697 - profiles.get(profile_status, ()),
9698 - key=attrgetter('key', 'name'))
9699 + profiles.get(profile_status, ()), key=attrgetter("key", "name")
9700 + )
9701 if status_profiles:
9702 profile = status_profiles[0]
9703 yield cls(
9704 - attr, profile.key, profile.name,
9705 - failures, profile_status,
9706 - profile.deprecated, len(status_profiles), pkg=pkg)
9707 + attr,
9708 + profile.key,
9709 + profile.name,
9710 + failures,
9711 + profile_status,
9712 + profile.deprecated,
9713 + len(status_profiles),
9714 + pkg=pkg,
9715 + )
9716
9717 def check_visibility_vcs(self, pkg):
9718 visible = []
9719
9720 diff --git a/src/pkgcheck/checks/whitespace.py b/src/pkgcheck/checks/whitespace.py
9721 index 356a3634..823a8cfd 100644
9722 --- a/src/pkgcheck/checks/whitespace.py
9723 +++ b/src/pkgcheck/checks/whitespace.py
9724 @@ -68,25 +68,48 @@ class BadWhitespaceCharacter(results.LineResult, results.Warning):
9725 @property
9726 def desc(self):
9727 return (
9728 - f'bad whitespace character {self.char} on line {self.lineno}'
9729 - f', char {self.position}: {self.line}'
9730 + f"bad whitespace character {self.char} on line {self.lineno}"
9731 + f", char {self.position}: {self.line}"
9732 )
9733
9734
9735 class WhitespaceData(NamedTuple):
9736 """Data format to register hardcoded list of bad whitespace characters."""
9737 +
9738 unicode_version: str
9739 chars: tuple
9740
9741
9742 whitespace_data = WhitespaceData(
9743 - unicode_version='12.1.0',
9744 + unicode_version="12.1.0",
9745 chars=(
9746 - '\x0b', '\x0c', '\r', '\x1c', '\x1d', '\x1e', '\x1f', '\x85', '\xa0',
9747 - '\u1680', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005',
9748 - '\u2006', '\u2007', '\u2008', '\u2009', '\u200a', '\u2028', '\u2029',
9749 - '\u202f', '\u205f', '\u3000',
9750 - )
9751 + "\x0b",
9752 + "\x0c",
9753 + "\r",
9754 + "\x1c",
9755 + "\x1d",
9756 + "\x1e",
9757 + "\x1f",
9758 + "\x85",
9759 + "\xa0",
9760 + "\u1680",
9761 + "\u2000",
9762 + "\u2001",
9763 + "\u2002",
9764 + "\u2003",
9765 + "\u2004",
9766 + "\u2005",
9767 + "\u2006",
9768 + "\u2007",
9769 + "\u2008",
9770 + "\u2009",
9771 + "\u200a",
9772 + "\u2028",
9773 + "\u2029",
9774 + "\u202f",
9775 + "\u205f",
9776 + "\u3000",
9777 + ),
9778 )
9779
9780
9781 @@ -94,17 +117,23 @@ class WhitespaceCheck(Check):
9782 """Scan ebuild for useless whitespace."""
9783
9784 _source = sources.EbuildFileRepoSource
9785 - known_results = frozenset([
9786 - WhitespaceFound, WrongIndentFound, DoubleEmptyLine,
9787 - TrailingEmptyLine, NoFinalNewline, BadWhitespaceCharacter
9788 - ])
9789 + known_results = frozenset(
9790 + [
9791 + WhitespaceFound,
9792 + WrongIndentFound,
9793 + DoubleEmptyLine,
9794 + TrailingEmptyLine,
9795 + NoFinalNewline,
9796 + BadWhitespaceCharacter,
9797 + ]
9798 + )
9799
9800 - _indent_regex = re.compile('^\t* \t+')
9801 + _indent_regex = re.compile("^\t* \t+")
9802
9803 def __init__(self, *args):
9804 super().__init__(*args)
9805 - bad_whitespace = ''.join(whitespace_data.chars)
9806 - self.bad_whitespace_regex = re.compile(rf'(?P<char>[{bad_whitespace}])')
9807 + bad_whitespace = "".join(whitespace_data.chars)
9808 + self.bad_whitespace_regex = re.compile(rf"(?P<char>[{bad_whitespace}])")
9809
9810 def feed(self, pkg):
9811 lastlineempty = False
9812 @@ -116,14 +145,18 @@ class WhitespaceCheck(Check):
9813 for lineno, line in enumerate(pkg.lines, 1):
9814 for match in self.bad_whitespace_regex.finditer(line):
9815 yield BadWhitespaceCharacter(
9816 - repr(match.group('char')), match.end('char'),
9817 - line=repr(line), lineno=lineno, pkg=pkg)
9818 -
9819 - if line != '\n':
9820 + repr(match.group("char")),
9821 + match.end("char"),
9822 + line=repr(line),
9823 + lineno=lineno,
9824 + pkg=pkg,
9825 + )
9826 +
9827 + if line != "\n":
9828 lastlineempty = False
9829 - if line[-2:-1] == ' ' or line[-2:-1] == '\t':
9830 + if line[-2:-1] == " " or line[-2:-1] == "\t":
9831 trailing.append(lineno)
9832 - elif line[0] == ' ':
9833 + elif line[0] == " ":
9834 leading.append(lineno)
9835 if self._indent_regex.match(line):
9836 indent.append(lineno)
9837 @@ -132,9 +165,9 @@ class WhitespaceCheck(Check):
9838 else:
9839 lastlineempty = True
9840 if trailing:
9841 - yield WhitespaceFound('trailing', lines=trailing, pkg=pkg)
9842 + yield WhitespaceFound("trailing", lines=trailing, pkg=pkg)
9843 if leading:
9844 - yield WhitespaceFound('leading', lines=leading, pkg=pkg)
9845 + yield WhitespaceFound("leading", lines=leading, pkg=pkg)
9846 if indent:
9847 yield WrongIndentFound(indent, pkg=pkg)
9848 if double_empty:
9849 @@ -143,5 +176,5 @@ class WhitespaceCheck(Check):
9850 yield TrailingEmptyLine(pkg=pkg)
9851
9852 # Dealing with empty ebuilds is just paranoia
9853 - if pkg.lines and not pkg.lines[-1].endswith('\n'):
9854 + if pkg.lines and not pkg.lines[-1].endswith("\n"):
9855 yield NoFinalNewline(pkg=pkg)
9856
9857 diff --git a/src/pkgcheck/cli.py b/src/pkgcheck/cli.py
9858 index 5450788e..55e9f30a 100644
9859 --- a/src/pkgcheck/cli.py
9860 +++ b/src/pkgcheck/cli.py
9861 @@ -14,10 +14,9 @@ from . import const
9862
9863
9864 class Tool(commandline.Tool):
9865 -
9866 def main(self):
9867 # suppress all pkgcore log messages
9868 - logging.getLogger('pkgcore').setLevel(100)
9869 + logging.getLogger("pkgcore").setLevel(100)
9870 return super().main()
9871
9872
9873 @@ -50,14 +49,16 @@ class ConfigFileParser:
9874 for f in configs:
9875 config.read(f)
9876 except configparser.ParsingError as e:
9877 - self.parser.error(f'parsing config file failed: {e}')
9878 + self.parser.error(f"parsing config file failed: {e}")
9879 return config
9880
9881 def parse_config_sections(self, namespace, sections):
9882 """Parse options from a given iterable of config section names."""
9883 - with patch('snakeoil.cli.arghparse.ArgumentParser.error', self._config_error):
9884 + with patch("snakeoil.cli.arghparse.ArgumentParser.error", self._config_error):
9885 for section in (x for x in sections if x in self.config):
9886 - config_args = [f'--{k}={v}' if v else f'--{k}' for k, v in self.config.items(section)]
9887 + config_args = [
9888 + f"--{k}={v}" if v else f"--{k}" for k, v in self.config.items(section)
9889 + ]
9890 namespace, args = self.parser.parse_known_optionals(config_args, namespace)
9891 if args:
9892 self.parser.error(f"unknown arguments: {' '.join(args)}")
9893 @@ -74,16 +75,16 @@ class ConfigFileParser:
9894 self._config = None
9895
9896 # load default options
9897 - namespace = self.parse_config_sections(namespace, ['DEFAULT'])
9898 + namespace = self.parse_config_sections(namespace, ["DEFAULT"])
9899
9900 # load any defined checksets -- empty checksets are ignored
9901 - if 'CHECKSETS' in self.config:
9902 - for k, v in self.config.items('CHECKSETS'):
9903 + if "CHECKSETS" in self.config:
9904 + for k, v in self.config.items("CHECKSETS"):
9905 if v:
9906 - namespace.config_checksets[k] = re.split('[,\n]', v.strip())
9907 + namespace.config_checksets[k] = re.split("[,\n]", v.strip())
9908
9909 return namespace
9910
9911 def _config_error(self, message, status=2):
9912 """Stub to replace error method that notes config failure."""
9913 - self.parser.exit(status, f'{self.parser.prog}: failed loading config: {message}\n')
9914 + self.parser.exit(status, f"{self.parser.prog}: failed loading config: {message}\n")
9915
9916 diff --git a/src/pkgcheck/const.py b/src/pkgcheck/const.py
9917 index 7e440ce4..61b0922f 100644
9918 --- a/src/pkgcheck/const.py
9919 +++ b/src/pkgcheck/const.py
9920 @@ -25,17 +25,20 @@ def _GET_CONST(attr, default_value):
9921
9922 # determine XDG compatible paths
9923 for xdg_var, var_name, fallback_dir in (
9924 - ('XDG_CONFIG_HOME', 'USER_CONFIG_PATH', '~/.config'),
9925 - ('XDG_CACHE_HOME', 'USER_CACHE_PATH', '~/.cache'),
9926 - ('XDG_DATA_HOME', 'USER_DATA_PATH', '~/.local/share')):
9927 + ("XDG_CONFIG_HOME", "USER_CONFIG_PATH", "~/.config"),
9928 + ("XDG_CACHE_HOME", "USER_CACHE_PATH", "~/.cache"),
9929 + ("XDG_DATA_HOME", "USER_DATA_PATH", "~/.local/share"),
9930 +):
9931 setattr(
9932 - _module, var_name,
9933 - os.environ.get(xdg_var, os.path.join(os.path.expanduser(fallback_dir), 'pkgcheck')))
9934 -
9935 -REPO_PATH = _GET_CONST('REPO_PATH', _reporoot)
9936 -DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data/share/pkgcheck')
9937 -
9938 -USER_CACHE_DIR = getattr(_module, 'USER_CACHE_PATH')
9939 -USER_CONF_FILE = os.path.join(getattr(_module, 'USER_CONFIG_PATH'), 'pkgcheck.conf')
9940 -SYSTEM_CONF_FILE = '/etc/pkgcheck/pkgcheck.conf'
9941 -BUNDLED_CONF_FILE = os.path.join(DATA_PATH, 'pkgcheck.conf')
9942 + _module,
9943 + var_name,
9944 + os.environ.get(xdg_var, os.path.join(os.path.expanduser(fallback_dir), "pkgcheck")),
9945 + )
9946 +
9947 +REPO_PATH = _GET_CONST("REPO_PATH", _reporoot)
9948 +DATA_PATH = _GET_CONST("DATA_PATH", "%(REPO_PATH)s/data/share/pkgcheck")
9949 +
9950 +USER_CACHE_DIR = getattr(_module, "USER_CACHE_PATH")
9951 +USER_CONF_FILE = os.path.join(getattr(_module, "USER_CONFIG_PATH"), "pkgcheck.conf")
9952 +SYSTEM_CONF_FILE = "/etc/pkgcheck/pkgcheck.conf"
9953 +BUNDLED_CONF_FILE = os.path.join(DATA_PATH, "pkgcheck.conf")
9954
9955 diff --git a/src/pkgcheck/feeds.py b/src/pkgcheck/feeds.py
9956 index 0edffc2c..e09874dc 100644
9957 --- a/src/pkgcheck/feeds.py
9958 +++ b/src/pkgcheck/feeds.py
9959 @@ -38,15 +38,16 @@ class Feed(base.Addon):
9960
9961
9962 class QueryCache(Feed):
9963 -
9964 @staticmethod
9965 def mangle_argparser(parser):
9966 - group = parser.add_argument_group('query caching')
9967 + group = parser.add_argument_group("query caching")
9968 group.add_argument(
9969 - '--reset-caching-per', dest='query_caching_freq',
9970 - choices=('version', 'package', 'category'), default='package',
9971 - help='control how often the cache is cleared '
9972 - '(version, package or category)')
9973 + "--reset-caching-per",
9974 + dest="query_caching_freq",
9975 + choices=("version", "package", "category"),
9976 + default="package",
9977 + help="control how often the cache is cleared " "(version, package or category)",
9978 + )
9979
9980 @staticmethod
9981 def _version(item):
9982 @@ -63,7 +64,7 @@ class QueryCache(Feed):
9983 def __init__(self, options):
9984 super().__init__(options)
9985 self.query_cache = {}
9986 - self._keyfunc = getattr(self, f'_{options.query_caching_freq}')
9987 + self._keyfunc = getattr(self, f"_{options.query_caching_freq}")
9988 self._key = None
9989
9990 def feed(self, item):
9991 @@ -76,7 +77,6 @@ class QueryCache(Feed):
9992
9993
9994 class EvaluateDepSet(Feed):
9995 -
9996 def __init__(self, *args, profile_addon):
9997 super().__init__(*args)
9998 self.pkg_evaluate_depsets_cache = {}
9999 @@ -95,15 +95,15 @@ class EvaluateDepSet(Feed):
10000 self.pkg_profiles_cache[pkg] = profile_grps
10001
10002 # strip use dep defaults so known flags get identified correctly
10003 - diuse = frozenset(
10004 - x[:-3] if x[-1] == ')' else x for x in depset.known_conditionals)
10005 + diuse = frozenset(x[:-3] if x[-1] == ")" else x for x in depset.known_conditionals)
10006 collapsed = {}
10007 for profiles in profile_grps:
10008 immutable, enabled = profiles[0].identify_use(pkg, diuse)
10009 collapsed.setdefault((immutable, enabled), []).extend(profiles)
10010
10011 - return [(depset.evaluate_depset(k[1], tristate_filter=k[0]), v)
10012 - for k, v in collapsed.items()]
10013 + return [
10014 + (depset.evaluate_depset(k[1], tristate_filter=k[0]), v) for k, v in collapsed.items()
10015 + ]
10016
10017 def collapse_evaluate_depset(self, pkg, attr, depset):
10018 depset_profiles = self.pkg_evaluate_depsets_cache.get((pkg, attr))
10019
10020 diff --git a/src/pkgcheck/log.py b/src/pkgcheck/log.py
10021 index 0bc11269..6db8441b 100644
10022 --- a/src/pkgcheck/log.py
10023 +++ b/src/pkgcheck/log.py
10024 @@ -9,4 +9,4 @@ import logging
10025 # overrides the root logger handler.
10026 logging.basicConfig()
10027
10028 -logger = logging.getLogger('pkgcheck')
10029 +logger = logging.getLogger("pkgcheck")
10030
10031 diff --git a/src/pkgcheck/objects.py b/src/pkgcheck/objects.py
10032 index b91d07b6..51f2bed2 100644
10033 --- a/src/pkgcheck/objects.py
10034 +++ b/src/pkgcheck/objects.py
10035 @@ -21,15 +21,15 @@ except ImportError: # pragma: no cover
10036
10037 def _find_modules(module): # pragma: no cover
10038 """Generator of all public modules under a given module."""
10039 - if getattr(module, '__path__', False):
10040 - for _imp, name, _ in pkgutil.walk_packages(module.__path__, module.__name__ + '.'):
10041 + if getattr(module, "__path__", False):
10042 + for _imp, name, _ in pkgutil.walk_packages(module.__path__, module.__name__ + "."):
10043 # skip "private" modules
10044 - if name.rsplit('.', 1)[1][0] == '_':
10045 + if name.rsplit(".", 1)[1][0] == "_":
10046 continue
10047 try:
10048 yield import_module(name)
10049 except ImportError as e:
10050 - raise Exception(f'failed importing {name!r}: {e}')
10051 + raise Exception(f"failed importing {name!r}: {e}")
10052 else:
10053 yield module
10054
10055 @@ -37,27 +37,31 @@ def _find_modules(module): # pragma: no cover
10056 def _find_classes(module, matching_cls, skip=()): # pragma: no cover
10057 """Generator of all subclasses of a selected class under a given module."""
10058 for _name, cls in inspect.getmembers(module):
10059 - if (inspect.isclass(cls) and issubclass(cls, matching_cls)
10060 - and cls.__name__[0] != '_' and cls not in skip):
10061 + if (
10062 + inspect.isclass(cls)
10063 + and issubclass(cls, matching_cls)
10064 + and cls.__name__[0] != "_"
10065 + and cls not in skip
10066 + ):
10067 yield cls
10068
10069
10070 def _find_obj_classes(module_name, target_cls): # pragma: no cover
10071 """Determine mapping of object class names to class objects."""
10072 - module = import_module(f'.{module_name}', 'pkgcheck')
10073 - cls_module, cls_name = target_cls.rsplit('.', 1)
10074 - matching_cls = getattr(import_module(f'.{cls_module}', 'pkgcheck'), cls_name)
10075 + module = import_module(f".{module_name}", "pkgcheck")
10076 + cls_module, cls_name = target_cls.rsplit(".", 1)
10077 + matching_cls = getattr(import_module(f".{cls_module}", "pkgcheck"), cls_name)
10078
10079 # skip top-level, base classes
10080 base_classes = {matching_cls}
10081 - if os.path.basename(module.__file__) == '__init__.py':
10082 + if os.path.basename(module.__file__) == "__init__.py":
10083 base_classes.update(_find_classes(module, matching_cls))
10084
10085 classes = {}
10086 for m in _find_modules(module):
10087 for cls in _find_classes(m, matching_cls, skip=base_classes):
10088 if cls.__name__ in classes and classes[cls.__name__] != cls:
10089 - raise Exception(f'object name overlap: {cls} and {classes[cls.__name__]}')
10090 + raise Exception(f"object name overlap: {cls} and {classes[cls.__name__]}")
10091 classes[cls.__name__] = cls
10092
10093 return classes
10094 @@ -120,7 +124,7 @@ def _keyword_alias(alias=None):
10095 def __set_name__(self, cls, name):
10096 key = alias if alias is not None else name
10097 cls._alias_keywords.add(key)
10098 - jit_attr = klass.jit_attr_named(f'_{self.func.__name__}')
10099 + jit_attr = klass.jit_attr_named(f"_{self.func.__name__}")
10100 func = jit_attr(partial(self.func))
10101 setattr(cls, name, func)
10102
10103 @@ -136,6 +140,7 @@ class _KeywordsLazyDict(_LazyDict):
10104 def aliases(self):
10105 """Mapping of aliases to their respective mappings."""
10106 from . import results
10107 +
10108 alias_map = {x: getattr(self, x) for x in self._alias_keywords}
10109 # support class-based aliasing
10110 for k, v in self._dict.items():
10111 @@ -147,24 +152,28 @@ class _KeywordsLazyDict(_LazyDict):
10112 def error(self):
10113 """Mapping of all error level keywords."""
10114 from . import results
10115 +
10116 return ImmutableDict(self.select(results.Error))
10117
10118 @_keyword_alias()
10119 def warning(self):
10120 """Mapping of all warning level keywords."""
10121 from . import results
10122 +
10123 return ImmutableDict(self.select(results.Warning))
10124
10125 @_keyword_alias()
10126 def style(self):
10127 """Mapping of all style level keywords."""
10128 from . import results
10129 +
10130 return ImmutableDict(self.select(results.Style))
10131
10132 @_keyword_alias()
10133 def info(self):
10134 """Mapping of all info level keywords."""
10135 from . import results
10136 +
10137 return ImmutableDict(self.select(results.Info))
10138
10139 @klass.jit_attr
10140 @@ -180,11 +189,12 @@ class _ChecksLazyDict(_LazyDict):
10141 def default(self):
10142 """Mapping of all default-enabled checks."""
10143 from . import checks
10144 - return ImmutableDict({
10145 - k: v for k, v in self._dict.items()
10146 - if not issubclass(v, checks.OptionalCheck)})
10147 +
10148 + return ImmutableDict(
10149 + {k: v for k, v in self._dict.items() if not issubclass(v, checks.OptionalCheck)}
10150 + )
10151
10152
10153 -KEYWORDS = _KeywordsLazyDict('KEYWORDS', ('checks', 'results.Result'))
10154 -CHECKS = _ChecksLazyDict('CHECKS', ('checks', 'checks.Check'))
10155 -REPORTERS = _LazyDict('REPORTERS', ('reporters', 'reporters.Reporter'))
10156 +KEYWORDS = _KeywordsLazyDict("KEYWORDS", ("checks", "results.Result"))
10157 +CHECKS = _ChecksLazyDict("CHECKS", ("checks", "checks.Check"))
10158 +REPORTERS = _LazyDict("REPORTERS", ("reporters", "reporters.Reporter"))
10159
10160 diff --git a/src/pkgcheck/packages.py b/src/pkgcheck/packages.py
10161 index e2a07aa1..195b9d19 100644
10162 --- a/src/pkgcheck/packages.py
10163 +++ b/src/pkgcheck/packages.py
10164 @@ -11,6 +11,7 @@ from snakeoil import klass
10165 @dataclass(frozen=True, eq=False)
10166 class RawCPV:
10167 """Raw CPV objects supporting basic restrictions/sorting."""
10168 +
10169 category: str
10170 package: str
10171 fullver: str
10172 @@ -19,18 +20,18 @@ class RawCPV:
10173
10174 def __post_init__(self):
10175 if self.fullver is not None:
10176 - version, _, revision = self.fullver.partition('-r')
10177 - object.__setattr__(self, 'version', version)
10178 - object.__setattr__(self, 'revision', cpv.Revision(revision))
10179 + version, _, revision = self.fullver.partition("-r")
10180 + object.__setattr__(self, "version", version)
10181 + object.__setattr__(self, "revision", cpv.Revision(revision))
10182
10183 @property
10184 def key(self):
10185 - return f'{self.category}/{self.package}'
10186 + return f"{self.category}/{self.package}"
10187
10188 @property
10189 def versioned_atom(self):
10190 if self.fullver:
10191 - return atom.atom(f'={self}')
10192 + return atom.atom(f"={self}")
10193 return atom.atom(str(self))
10194
10195 @property
10196 @@ -45,19 +46,19 @@ class RawCPV:
10197
10198 def __str__(self):
10199 if self.fullver:
10200 - return f'{self.category}/{self.package}-{self.fullver}'
10201 - return f'{self.category}/{self.package}'
10202 + return f"{self.category}/{self.package}-{self.fullver}"
10203 + return f"{self.category}/{self.package}"
10204
10205 def __repr__(self):
10206 - address = '@%#8x' % (id(self),)
10207 - return f'<{self.__class__.__name__} cpv={self.versioned_atom.cpvstr!r} {address}>'
10208 + address = "@%#8x" % (id(self),)
10209 + return f"<{self.__class__.__name__} cpv={self.versioned_atom.cpvstr!r} {address}>"
10210
10211
10212 @total_ordering
10213 class WrappedPkg:
10214 """Generic package wrapper used to inject attributes into package objects."""
10215
10216 - __slots__ = ('_pkg',)
10217 + __slots__ = ("_pkg",)
10218
10219 def __init__(self, pkg):
10220 self._pkg = pkg
10221 @@ -77,8 +78,8 @@ class WrappedPkg:
10222 def __hash__(self):
10223 return hash(self._pkg)
10224
10225 - __getattr__ = klass.GetAttrProxy('_pkg')
10226 - __dir__ = klass.DirProxy('_pkg')
10227 + __getattr__ = klass.GetAttrProxy("_pkg")
10228 + __dir__ = klass.DirProxy("_pkg")
10229
10230
10231 class FilteredPkg(WrappedPkg):
10232
10233 diff --git a/src/pkgcheck/pipeline.py b/src/pkgcheck/pipeline.py
10234 index 0dd8f9b4..184f3454 100644
10235 --- a/src/pkgcheck/pipeline.py
10236 +++ b/src/pkgcheck/pipeline.py
10237 @@ -29,7 +29,7 @@ class Pipeline:
10238 self.errors = []
10239
10240 # pkgcheck currently requires the fork start method (#254)
10241 - self._mp_ctx = multiprocessing.get_context('fork')
10242 + self._mp_ctx = multiprocessing.get_context("fork")
10243 self._results_q = self._mp_ctx.SimpleQueue()
10244
10245 # create checkrunners
10246 @@ -44,19 +44,19 @@ class Pipeline:
10247 if self.options.pkg_scan:
10248 # package level scans sort all returned results
10249 self._ordered_results = {
10250 - scope: [] for scope in base.scopes.values()
10251 - if scope >= base.package_scope
10252 + scope: [] for scope in base.scopes.values() if scope >= base.package_scope
10253 }
10254 else:
10255 # scoped mapping for caching repo and location specific results
10256 self._ordered_results = {
10257 - scope: [] for scope in reversed(list(base.scopes.values()))
10258 + scope: []
10259 + for scope in reversed(list(base.scopes.values()))
10260 if scope <= base.repo_scope
10261 }
10262
10263 def _filter_checks(self, scope):
10264 """Verify check scope against given scope to determine activation."""
10265 - for check in sorted(self.options.enabled_checks, key=attrgetter('__name__')):
10266 + for check in sorted(self.options.enabled_checks, key=attrgetter("__name__")):
10267 if isinstance(check.scope, base.ConditionalScope):
10268 # conditionally enabled check
10269 yield check
10270 @@ -77,7 +77,7 @@ class Pipeline:
10271
10272 def _create_runners(self):
10273 """Initialize and categorize checkrunners for results pipeline."""
10274 - pipes = {'async': [], 'sync': [], 'sequential': []}
10275 + pipes = {"async": [], "sync": [], "sequential": []}
10276
10277 # use addon/source caches to avoid re-initializing objects
10278 addons_map = {}
10279 @@ -88,15 +88,20 @@ class Pipeline:
10280 addons = list(base.get_addons(self._filter_checks(scope)))
10281 if not addons:
10282 raise base.PkgcheckUserException(
10283 - f'no matching checks available for {scope.desc} scope')
10284 + f"no matching checks available for {scope.desc} scope"
10285 + )
10286 checks = init_checks(
10287 - addons, self.options, self._results_q,
10288 - addons_map=addons_map, source_map=source_map)
10289 + addons, self.options, self._results_q, addons_map=addons_map, source_map=source_map
10290 + )
10291
10292 # Initialize checkrunners per source type using separate runner for
10293 # async checks and categorize them for parallelization based on the
10294 # scan and source scope.
10295 - runners = {'async': defaultdict(list), 'sync': defaultdict(list), 'sequential': defaultdict(list)}
10296 + runners = {
10297 + "async": defaultdict(list),
10298 + "sync": defaultdict(list),
10299 + "sequential": defaultdict(list),
10300 + }
10301 for (source, runner_cls), check_objs in checks.items():
10302 runner = runner_cls(self.options, source, check_objs)
10303 if not self.options.pkg_scan and source.scope >= base.package_scope:
10304 @@ -183,8 +188,9 @@ class Pipeline:
10305 """Consumer that runs scanning tasks, queuing results for output."""
10306 try:
10307 for scope, restrict, i, runners in iter(work_q.get, None):
10308 - if results := sorted(chain.from_iterable(
10309 - pipes[i][-1][scope][j].run(restrict) for j in runners)):
10310 + if results := sorted(
10311 + chain.from_iterable(pipes[i][-1][scope][j].run(restrict) for j in runners)
10312 + ):
10313 self._results_q.put(results)
10314 except Exception: # pragma: no cover
10315 # traceback can't be pickled so serialize it
10316 @@ -213,21 +219,19 @@ class Pipeline:
10317
10318 # schedule asynchronous checks in a separate process
10319 async_proc = None
10320 - if async_pipes := self._pipes['async']:
10321 - async_proc = self._mp_ctx.Process(
10322 - target=self._schedule_async, args=(async_pipes,))
10323 + if async_pipes := self._pipes["async"]:
10324 + async_proc = self._mp_ctx.Process(target=self._schedule_async, args=(async_pipes,))
10325 async_proc.start()
10326
10327 # run synchronous checks using a process pool
10328 - if sync_pipes := self._pipes['sync']:
10329 + if sync_pipes := self._pipes["sync"]:
10330 work_q = self._mp_ctx.SimpleQueue()
10331 - pool = self._mp_ctx.Pool(
10332 - self.options.jobs, self._run_checks, (sync_pipes, work_q))
10333 + pool = self._mp_ctx.Pool(self.options.jobs, self._run_checks, (sync_pipes, work_q))
10334 pool.close()
10335 self._queue_work(sync_pipes, work_q)
10336 pool.join()
10337
10338 - if sequential_pipes := self._pipes['sequential']:
10339 + if sequential_pipes := self._pipes["sequential"]:
10340 for _scope, restriction, pipes in sequential_pipes:
10341 for runner in chain.from_iterable(pipes.values()):
10342 if results := tuple(runner.run(restriction)):
10343
10344 diff --git a/src/pkgcheck/reporters.py b/src/pkgcheck/reporters.py
10345 index 3696f5fd..089037d1 100644
10346 --- a/src/pkgcheck/reporters.py
10347 +++ b/src/pkgcheck/reporters.py
10348 @@ -62,15 +62,15 @@ class StrReporter(Reporter):
10349 def _process_report(self):
10350 # scope to result prefix mapping
10351 scope_prefix_map = {
10352 - base.version_scope: '{category}/{package}-{version}: ',
10353 - base.package_scope: '{category}/{package}: ',
10354 - base.category_scope: '{category}: ',
10355 + base.version_scope: "{category}/{package}-{version}: ",
10356 + base.package_scope: "{category}/{package}: ",
10357 + base.category_scope: "{category}: ",
10358 }
10359
10360 while True:
10361 - result = (yield)
10362 - prefix = scope_prefix_map.get(result.scope, '').format(**vars(result))
10363 - self.out.write(f'{prefix}{result.desc}')
10364 + result = yield
10365 + prefix = scope_prefix_map.get(result.scope, "").format(**vars(result))
10366 + self.out.write(f"{prefix}{result.desc}")
10367 self.out.stream.flush()
10368
10369
10370 @@ -92,9 +92,9 @@ class FancyReporter(Reporter):
10371 prev_key = None
10372
10373 while True:
10374 - result = (yield)
10375 + result = yield
10376 if result.scope in (base.version_scope, base.package_scope):
10377 - key = f'{result.category}/{result.package}'
10378 + key = f"{result.category}/{result.package}"
10379 elif result.scope == base.category_scope:
10380 key = result.category
10381 else:
10382 @@ -103,17 +103,16 @@ class FancyReporter(Reporter):
10383 if key != prev_key:
10384 if prev_key is not None:
10385 self.out.write()
10386 - self.out.write(self.out.bold, self.out.fg('blue'), key, self.out.reset)
10387 + self.out.write(self.out.bold, self.out.fg("blue"), key, self.out.reset)
10388 prev_key = key
10389 - self.out.first_prefix.append(' ')
10390 - self.out.later_prefix.append(' ')
10391 - s = ''
10392 + self.out.first_prefix.append(" ")
10393 + self.out.later_prefix.append(" ")
10394 + s = ""
10395 if result.scope == base.version_scope:
10396 s = f"version {result.version}: "
10397 self.out.write(
10398 - self.out.fg(result.color),
10399 - result.name, self.out.reset,
10400 - ': ', s, result.desc)
10401 + self.out.fg(result.color), result.name, self.out.reset, ": ", s, result.desc
10402 + )
10403 self.out.first_prefix.pop()
10404 self.out.later_prefix.pop()
10405 self.out.stream.flush()
10406 @@ -145,10 +144,10 @@ class JsonReporter(Reporter):
10407 }
10408
10409 while True:
10410 - result = (yield)
10411 + result = yield
10412 data = json_dict()
10413 d = scope_map.get(result.scope, lambda x, y: x)(data, result)
10414 - d['_' + result.level][result.name] = result.desc
10415 + d["_" + result.level][result.name] = result.desc
10416 self.out.write(json.dumps(data))
10417 # flush output so partial objects aren't written
10418 self.out.stream.flush()
10419 @@ -160,27 +159,28 @@ class XmlReporter(Reporter):
10420 priority = -1000
10421
10422 def _start(self):
10423 - self.out.write('<checks>')
10424 + self.out.write("<checks>")
10425
10426 def _finish(self):
10427 - self.out.write('</checks>')
10428 + self.out.write("</checks>")
10429
10430 @coroutine
10431 def _process_report(self):
10432 - result_template = (
10433 - "<result><class>%(class)s</class>"
10434 - "<msg>%(msg)s</msg></result>")
10435 + result_template = "<result><class>%(class)s</class>" "<msg>%(msg)s</msg></result>"
10436 cat_template = (
10437 "<result><category>%(category)s</category>"
10438 - "<class>%(class)s</class><msg>%(msg)s</msg></result>")
10439 + "<class>%(class)s</class><msg>%(msg)s</msg></result>"
10440 + )
10441 pkg_template = (
10442 "<result><category>%(category)s</category>"
10443 "<package>%(package)s</package><class>%(class)s</class>"
10444 - "<msg>%(msg)s</msg></result>")
10445 + "<msg>%(msg)s</msg></result>"
10446 + )
10447 ver_template = (
10448 "<result><category>%(category)s</category>"
10449 "<package>%(package)s</package><version>%(version)s</version>"
10450 - "<class>%(class)s</class><msg>%(msg)s</msg></result>")
10451 + "<class>%(class)s</class><msg>%(msg)s</msg></result>"
10452 + )
10453
10454 scope_map = {
10455 base.category_scope: cat_template,
10456 @@ -189,10 +189,10 @@ class XmlReporter(Reporter):
10457 }
10458
10459 while True:
10460 - result = (yield)
10461 - d = {k: getattr(result, k, '') for k in ('category', 'package', 'version')}
10462 - d['class'] = xml_escape(result.name)
10463 - d['msg'] = xml_escape(result.desc)
10464 + result = yield
10465 + d = {k: getattr(result, k, "") for k in ("category", "package", "version")}
10466 + d["class"] = xml_escape(result.name)
10467 + d["msg"] = xml_escape(result.desc)
10468 self.out.write(scope_map.get(result.scope, result_template) % d)
10469
10470
10471 @@ -211,19 +211,18 @@ class CsvReporter(Reporter):
10472
10473 @coroutine
10474 def _process_report(self):
10475 - writer = csv.writer(
10476 - self.out,
10477 - doublequote=False,
10478 - escapechar='\\',
10479 - lineterminator='')
10480 + writer = csv.writer(self.out, doublequote=False, escapechar="\\", lineterminator="")
10481
10482 while True:
10483 - result = (yield)
10484 - writer.writerow((
10485 - getattr(result, 'category', ''),
10486 - getattr(result, 'package', ''),
10487 - getattr(result, 'version', ''),
10488 - result.desc))
10489 + result = yield
10490 + writer.writerow(
10491 + (
10492 + getattr(result, "category", ""),
10493 + getattr(result, "package", ""),
10494 + getattr(result, "version", ""),
10495 + result.desc,
10496 + )
10497 + )
10498
10499
10500 class _ResultFormatter(Formatter):
10501 @@ -235,9 +234,8 @@ class _ResultFormatter(Formatter):
10502 try:
10503 return kwds[key]
10504 except KeyError:
10505 - return ''
10506 - raise base.PkgcheckUserException(
10507 - 'FormatReporter: integer indexes are not supported')
10508 + return ""
10509 + raise base.PkgcheckUserException("FormatReporter: integer indexes are not supported")
10510
10511
10512 class FormatReporter(Reporter):
10513 @@ -253,10 +251,10 @@ class FormatReporter(Reporter):
10514 def _process_report(self):
10515 formatter = _ResultFormatter()
10516 # provide expansions for result desc, level, and output name properties
10517 - properties = ('desc', 'level', 'name')
10518 + properties = ("desc", "level", "name")
10519
10520 while True:
10521 - result = (yield)
10522 + result = yield
10523 attrs = vars(result)
10524 attrs.update((k, getattr(result, k)) for k in properties)
10525 s = formatter.format(self.format_str, **attrs)
10526 @@ -279,7 +277,7 @@ class JsonStream(Reporter):
10527 def to_json(obj):
10528 """Serialize results and other objects to JSON."""
10529 if isinstance(obj, Result):
10530 - d = {'__class__': obj.__class__.__name__}
10531 + d = {"__class__": obj.__class__.__name__}
10532 d.update(obj._attrs)
10533 return d
10534 return str(obj)
10535 @@ -289,19 +287,20 @@ class JsonStream(Reporter):
10536 """Deserialize results from a given iterable."""
10537 # avoid circular import issues
10538 from . import objects
10539 +
10540 try:
10541 for data in map(json.loads, iterable):
10542 - cls = objects.KEYWORDS[data.pop('__class__')]
10543 + cls = objects.KEYWORDS[data.pop("__class__")]
10544 yield cls._create(**data)
10545 except (json.decoder.JSONDecodeError, UnicodeDecodeError, DeserializationError) as e:
10546 - raise DeserializationError('failed loading') from e
10547 + raise DeserializationError("failed loading") from e
10548 except (KeyError, InvalidResult):
10549 - raise DeserializationError('unknown result')
10550 + raise DeserializationError("unknown result")
10551
10552 @coroutine
10553 def _process_report(self):
10554 while True:
10555 - result = (yield)
10556 + result = yield
10557 self.out.write(json.dumps(result, default=self.to_json))
10558
10559
10560 @@ -316,11 +315,11 @@ class FlycheckReporter(Reporter):
10561 @coroutine
10562 def _process_report(self):
10563 while True:
10564 - result = (yield)
10565 + result = yield
10566 file = f'{getattr(result, "package", "")}-{getattr(result, "version", "")}.ebuild'
10567 message = f'{getattr(result, "name")}: {getattr(result, "desc")}'
10568 if isinstance(result, BaseLinesResult):
10569 - message = message.replace(result.lines_str, '').strip()
10570 + message = message.replace(result.lines_str, "").strip()
10571 for lineno in result.lines:
10572 self.out.write(f'{file}:{lineno}:{getattr(result, "level")}:{message}')
10573 else:
10574
10575 diff --git a/src/pkgcheck/results.py b/src/pkgcheck/results.py
10576 index cac8fbfa..23d639fc 100644
10577 --- a/src/pkgcheck/results.py
10578 +++ b/src/pkgcheck/results.py
10579 @@ -34,7 +34,7 @@ class Result:
10580 cls.name = cls._name if cls._name is not None else cls.__name__
10581
10582 def __str__(self):
10583 - return f'{self.name}: {self.desc}'
10584 + return f"{self.name}: {self.desc}"
10585
10586 @property
10587 def desc(self):
10588 @@ -43,24 +43,24 @@ class Result:
10589 @property
10590 def _attrs(self):
10591 """Return all public result attributes."""
10592 - return {k: v for k, v in self.__dict__.items() if not k.startswith('_')}
10593 + return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
10594
10595 @classmethod
10596 def _create(cls, **kwargs):
10597 """Create a new result object from a given attributes dict."""
10598 if issubclass(cls, CategoryResult):
10599 - category = kwargs.pop('category', None)
10600 - package = kwargs.pop('package', None)
10601 - version = kwargs.pop('version', None)
10602 - if 'pkg' not in kwargs:
10603 + category = kwargs.pop("category", None)
10604 + package = kwargs.pop("package", None)
10605 + version = kwargs.pop("version", None)
10606 + if "pkg" not in kwargs:
10607 # recreate pkg param from related, separated attributes
10608 if category is None:
10609 - raise InvalidResult('missing category')
10610 + raise InvalidResult("missing category")
10611 if issubclass(cls, PackageResult) and package is None:
10612 - raise InvalidResult('missing package')
10613 + raise InvalidResult("missing package")
10614 if issubclass(cls, VersionResult) and version is None:
10615 - raise InvalidResult('missing version')
10616 - kwargs['pkg'] = RawCPV(category, package, version)
10617 + raise InvalidResult("missing version")
10618 + kwargs["pkg"] = RawCPV(category, package, version)
10619 return cls(**kwargs)
10620
10621 def __eq__(self, other):
10622 @@ -91,36 +91,36 @@ class BaseLinesResult:
10623 @property
10624 def lines_str(self):
10625 s = pluralism(self.lines)
10626 - lines = ', '.join(map(str, self.lines))
10627 - return f'on line{s}: {lines}'
10628 + lines = ", ".join(map(str, self.lines))
10629 + return f"on line{s}: {lines}"
10630
10631
10632 class Error(Result):
10633 """Result with an error priority level."""
10634
10635 - level = 'error'
10636 - color = 'red'
10637 + level = "error"
10638 + color = "red"
10639
10640
10641 class Warning(Result):
10642 """Result with a warning priority level."""
10643
10644 - level = 'warning'
10645 - color = 'yellow'
10646 + level = "warning"
10647 + color = "yellow"
10648
10649
10650 class Style(Result):
10651 """Result with a coding style priority level."""
10652
10653 - level = 'style'
10654 - color = 'cyan'
10655 + level = "style"
10656 + color = "cyan"
10657
10658
10659 class Info(Result):
10660 """Result with an info priority level."""
10661
10662 - level = 'info'
10663 - color = 'green'
10664 + level = "info"
10665 + color = "green"
10666
10667
10668 class CommitResult(Result):
10669 @@ -131,7 +131,7 @@ class CommitResult(Result):
10670 def __init__(self, commit, **kwargs):
10671 super().__init__(**kwargs)
10672 self.commit = str(commit)
10673 - self._attr = 'commit'
10674 + self._attr = "commit"
10675
10676 def __lt__(self, other):
10677 try:
10678 @@ -159,7 +159,7 @@ class EclassResult(Result):
10679 def __init__(self, eclass, **kwargs):
10680 super().__init__(**kwargs)
10681 self.eclass = str(eclass)
10682 - self._attr = 'eclass'
10683 + self._attr = "eclass"
10684
10685 def __lt__(self, other):
10686 try:
10687 @@ -182,7 +182,7 @@ class CategoryResult(Result):
10688 def __init__(self, pkg, **kwargs):
10689 super().__init__(**kwargs)
10690 self.category = pkg.category
10691 - self._attr = 'category'
10692 + self._attr = "category"
10693
10694 def __lt__(self, other):
10695 try:
10696 @@ -201,7 +201,7 @@ class PackageResult(CategoryResult):
10697 def __init__(self, pkg, **kwargs):
10698 super().__init__(pkg, **kwargs)
10699 self.package = pkg.package
10700 - self._attr = 'package'
10701 + self._attr = "package"
10702
10703 def __lt__(self, other):
10704 try:
10705 @@ -223,11 +223,11 @@ class VersionResult(PackageResult):
10706 pkg = pkg._pkg
10707 super().__init__(pkg, **kwargs)
10708 self.version = pkg.fullver
10709 - self._attr = 'version'
10710 + self._attr = "version"
10711
10712 @klass.jit_attr
10713 def ver_rev(self):
10714 - version, _, revision = self.version.partition('-r')
10715 + version, _, revision = self.version.partition("-r")
10716 revision = cpv.Revision(revision)
10717 return version, revision
10718
10719 @@ -307,10 +307,9 @@ class MetadataError(Error):
10720 if cls.attr is not None:
10721 setting = cls.results.setdefault(cls.attr, cls)
10722 if setting != cls:
10723 - raise ValueError(
10724 - f'metadata attribute {cls.attr!r} already registered: {setting!r}')
10725 + raise ValueError(f"metadata attribute {cls.attr!r} already registered: {setting!r}")
10726 else:
10727 - raise ValueError(f'class missing metadata attributes: {cls!r}')
10728 + raise ValueError(f"class missing metadata attributes: {cls!r}")
10729
10730 def __init__(self, attr, msg, **kwargs):
10731 super().__init__(**kwargs)
10732
10733 diff --git a/src/pkgcheck/runners.py b/src/pkgcheck/runners.py
10734 index b1aa8e64..86bdbe6e 100644
10735 --- a/src/pkgcheck/runners.py
10736 +++ b/src/pkgcheck/runners.py
10737 @@ -31,7 +31,7 @@ class CheckRunner:
10738 class SyncCheckRunner(CheckRunner):
10739 """Generic runner for synchronous checks."""
10740
10741 - type = 'sync'
10742 + type = "sync"
10743
10744 def __init__(self, *args):
10745 super().__init__(*args)
10746 @@ -43,7 +43,8 @@ class SyncCheckRunner(CheckRunner):
10747 # only report metadata errors for version-scoped sources
10748 if self.source.scope == base.version_scope:
10749 self.source.itermatch = partial(
10750 - self.source.itermatch, error_callback=self._metadata_error_cb)
10751 + self.source.itermatch, error_callback=self._metadata_error_cb
10752 + )
10753
10754 def _metadata_error_cb(self, e, check=None):
10755 """Callback handling MetadataError results."""
10756 @@ -58,7 +59,7 @@ class SyncCheckRunner(CheckRunner):
10757 # so they can be noticed and fixed.
10758 result_cls = MetadataError.results[e.attr]
10759 if result_cls in known_results:
10760 - error_str = ': '.join(e.msg().split('\n'))
10761 + error_str = ": ".join(e.msg().split("\n"))
10762 result = result_cls(e.attr, error_str, pkg=e.pkg)
10763 self._metadata_errors.append((e.pkg, result))
10764
10765 @@ -98,7 +99,7 @@ class SequentialCheckRunner(SyncCheckRunner):
10766 Checks that must not be run in parallel, will be run on the main process.
10767 """
10768
10769 - type = 'sequential'
10770 + type = "sequential"
10771
10772
10773 class AsyncCheckRunner(CheckRunner):
10774 @@ -109,7 +110,7 @@ class AsyncCheckRunner(CheckRunner):
10775 on completion.
10776 """
10777
10778 - type = 'async'
10779 + type = "async"
10780
10781 def schedule(self, executor, futures, restrict=packages.AlwaysTrue):
10782 """Schedule all checks to run via the given executor."""
10783
10784 diff --git a/src/pkgcheck/scripts/__init__.py b/src/pkgcheck/scripts/__init__.py
10785 index 351cc7c9..7757a9c0 100755
10786 --- a/src/pkgcheck/scripts/__init__.py
10787 +++ b/src/pkgcheck/scripts/__init__.py
10788 @@ -19,19 +19,21 @@ def run(script_name):
10789
10790 try:
10791 from pkgcheck.cli import Tool
10792 - script_module = '.'.join(
10793 - os.path.realpath(__file__).split(os.path.sep)[-3:-1] +
10794 - [script_name.replace('-', '_')])
10795 +
10796 + script_module = ".".join(
10797 + os.path.realpath(__file__).split(os.path.sep)[-3:-1] + [script_name.replace("-", "_")]
10798 + )
10799 script = import_module(script_module)
10800 except ImportError as e:
10801 - python_version = '.'.join(map(str, sys.version_info[:3]))
10802 - sys.stderr.write(f'Failed importing: {e}!\n')
10803 + python_version = ".".join(map(str, sys.version_info[:3]))
10804 + sys.stderr.write(f"Failed importing: {e}!\n")
10805 sys.stderr.write(
10806 - 'Verify that pkgcheck and its deps are properly installed '
10807 - f'and/or PYTHONPATH is set correctly for python {python_version}.\n')
10808 - if '--debug' in sys.argv[1:]:
10809 + "Verify that pkgcheck and its deps are properly installed "
10810 + f"and/or PYTHONPATH is set correctly for python {python_version}.\n"
10811 + )
10812 + if "--debug" in sys.argv[1:]:
10813 raise
10814 - sys.stderr.write('Add --debug to the commandline for a traceback.\n')
10815 + sys.stderr.write("Add --debug to the commandline for a traceback.\n")
10816 sys.exit(1)
10817
10818 tool = Tool(script.argparser)
10819 @@ -46,5 +48,5 @@ def main():
10820 run(os.path.basename(sys.argv[0]))
10821
10822
10823 -if __name__ == '__main__':
10824 +if __name__ == "__main__":
10825 main()
10826
10827 diff --git a/src/pkgcheck/scripts/argparse_actions.py b/src/pkgcheck/scripts/argparse_actions.py
10828 index 8d6485f6..67a18f73 100644
10829 --- a/src/pkgcheck/scripts/argparse_actions.py
10830 +++ b/src/pkgcheck/scripts/argparse_actions.py
10831 @@ -16,7 +16,7 @@ class ConfigArg(argparse._StoreAction):
10832 """Store config path string or False when explicitly disabled."""
10833
10834 def __call__(self, parser, namespace, values, option_string=None):
10835 - if values.lower() in ('false', 'no', 'n'):
10836 + if values.lower() in ("false", "no", "n"):
10837 values = False
10838 setattr(namespace, self.dest, values)
10839
10840 @@ -30,13 +30,13 @@ def object_to_keywords(namespace, obj):
10841 elif obj in namespace.config_checksets:
10842 yield from chain(*ChecksetArgs.checksets_to_keywords(namespace, [obj]))
10843 else:
10844 - raise ValueError(f'unknown checkset, check, or keyword: {obj!r}')
10845 + raise ValueError(f"unknown checkset, check, or keyword: {obj!r}")
10846
10847
10848 class FilterArgs(arghparse.CommaSeparatedValues):
10849 """Apply filters to an entire scan or specific checks/keywords."""
10850
10851 - known_filters = frozenset(['latest'])
10852 + known_filters = frozenset(["latest"])
10853
10854 def __call__(self, parser, namespace, values, option_string=None):
10855 values = self.parse_values(values)
10856 @@ -44,14 +44,14 @@ class FilterArgs(arghparse.CommaSeparatedValues):
10857 disabled = False
10858
10859 for val in values:
10860 - if ':' in val:
10861 - filter_type, target = val.split(':')
10862 + if ":" in val:
10863 + filter_type, target = val.split(":")
10864 try:
10865 keywords = object_to_keywords(namespace, target)
10866 filter_map.update({x: filter_type for x in keywords})
10867 except ValueError as e:
10868 raise argparse.ArgumentError(self, str(e))
10869 - elif val.lower() in ('false', 'no', 'n'):
10870 + elif val.lower() in ("false", "no", "n"):
10871 # disable all filters
10872 disabled = True
10873 break
10874 @@ -63,19 +63,24 @@ class FilterArgs(arghparse.CommaSeparatedValues):
10875 # validate selected filters
10876 if unknown := set(filter_map.values()) - self.known_filters:
10877 s = pluralism(unknown)
10878 - unknown = ', '.join(map(repr, unknown))
10879 - available = ', '.join(sorted(self.known_filters))
10880 + unknown = ", ".join(map(repr, unknown))
10881 + available = ", ".join(sorted(self.known_filters))
10882 raise argparse.ArgumentError(
10883 - self, f'unknown filter{s}: {unknown} (available: {available})')
10884 + self, f"unknown filter{s}: {unknown} (available: {available})"
10885 + )
10886
10887 filters = {}
10888 if not disabled:
10889 # pull default filters
10890 filters.update(objects.KEYWORDS.filter)
10891 # ignore invalid keywords -- only keywords version scope and higher are affected
10892 - filters.update({
10893 - objects.KEYWORDS[k]: v for k, v in filter_map.items()
10894 - if objects.KEYWORDS[k].scope >= base.version_scope})
10895 + filters.update(
10896 + {
10897 + objects.KEYWORDS[k]: v
10898 + for k, v in filter_map.items()
10899 + if objects.KEYWORDS[k].scope >= base.version_scope
10900 + }
10901 + )
10902
10903 setattr(namespace, self.dest, ImmutableDict(filters))
10904
10905 @@ -104,20 +109,21 @@ class CacheNegations(arghparse.CommaSeparatedNegations):
10906 def parse_values(self, values):
10907 all_cache_types = {cache.type for cache in CachedAddon.caches.values()}
10908 disabled, enabled = [], list(all_cache_types)
10909 - if values is None or values.lower() in ('y', 'yes', 'true'):
10910 + if values is None or values.lower() in ("y", "yes", "true"):
10911 pass
10912 - elif values.lower() in ('n', 'no', 'false'):
10913 + elif values.lower() in ("n", "no", "false"):
10914 disabled = list(all_cache_types)
10915 else:
10916 disabled, enabled = super().parse_values(values)
10917 disabled = set(disabled)
10918 enabled = set(enabled) if enabled else all_cache_types
10919 if unknown := (disabled | enabled) - all_cache_types:
10920 - unknowns = ', '.join(map(repr, unknown))
10921 - choices = ', '.join(map(repr, sorted(self.caches)))
10922 + unknowns = ", ".join(map(repr, unknown))
10923 + choices = ", ".join(map(repr, sorted(self.caches)))
10924 s = pluralism(unknown)
10925 raise argparse.ArgumentError(
10926 - self, f'unknown cache type{s}: {unknowns} (choose from {choices})')
10927 + self, f"unknown cache type{s}: {unknowns} (choose from {choices})"
10928 + )
10929 enabled = set(enabled).difference(disabled)
10930 return enabled
10931
10932 @@ -135,8 +141,8 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
10933 def __init__(self, *args, **kwargs):
10934 super().__init__(*args, **kwargs)
10935 self.aliases = {
10936 - 'all': list(objects.CHECKS.values()),
10937 - 'net': list(objects.CHECKS.select(NetworkCheck).values()),
10938 + "all": list(objects.CHECKS.values()),
10939 + "net": list(objects.CHECKS.select(NetworkCheck).values()),
10940 }
10941
10942 def expand_aliases(self, args):
10943 @@ -157,7 +163,7 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
10944 for arg in args:
10945 for x in namespace.config_checksets[arg]:
10946 # determine if checkset item is disabled or enabled
10947 - if x[0] == '-':
10948 + if x[0] == "-":
10949 x = x[1:]
10950 keywords = disabled
10951 else:
10952 @@ -168,7 +174,7 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
10953 elif x in objects.KEYWORDS:
10954 keywords.append(x)
10955 else:
10956 - raise ValueError(f'{arg!r} checkset, unknown check or keyword: {x!r}')
10957 + raise ValueError(f"{arg!r} checkset, unknown check or keyword: {x!r}")
10958 return disabled, enabled
10959
10960 def __call__(self, parser, namespace, values, option_string=None):
10961 @@ -177,11 +183,12 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
10962
10963 # validate selected checksets
10964 if unknown := set(disabled + enabled) - set(self.aliases) - set(checksets):
10965 - unknown_str = ', '.join(map(repr, unknown))
10966 - available = ', '.join(sorted(chain(checksets, self.aliases)))
10967 + unknown_str = ", ".join(map(repr, unknown))
10968 + available = ", ".join(sorted(chain(checksets, self.aliases)))
10969 s = pluralism(unknown)
10970 raise argparse.ArgumentError(
10971 - self, f'unknown checkset{s}: {unknown_str} (available: {available})')
10972 + self, f"unknown checkset{s}: {unknown_str} (available: {available})"
10973 + )
10974
10975 # expand aliases into keywords
10976 disabled, disabled_aliases = self.expand_aliases(disabled)
10977 @@ -203,12 +210,12 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
10978 args = []
10979 if enabled_keywords:
10980 keywords_set = {objects.KEYWORDS[x] for x in enabled_keywords}
10981 - checks = ','.join(
10982 - k for k, v in objects.CHECKS.items()
10983 - if v.known_results.intersection(keywords_set))
10984 - args.append(f'--checks={checks}')
10985 - keywords = ','.join(enabled_keywords | {f'-{x}' for x in disabled_keywords})
10986 - args.append(f'--keywords={keywords}')
10987 + checks = ",".join(
10988 + k for k, v in objects.CHECKS.items() if v.known_results.intersection(keywords_set)
10989 + )
10990 + args.append(f"--checks={checks}")
10991 + keywords = ",".join(enabled_keywords | {f"-{x}" for x in disabled_keywords})
10992 + args.append(f"--keywords={keywords}")
10993 parser._parse_known_args(args, namespace)
10994
10995
10996 @@ -220,21 +227,22 @@ class ScopeArgs(arghparse.CommaSeparatedNegations):
10997
10998 # validate selected scopes
10999 if unknown_scopes := set(disabled + enabled) - set(base.scopes):
11000 - unknown = ', '.join(map(repr, unknown_scopes))
11001 - available = ', '.join(base.scopes)
11002 + unknown = ", ".join(map(repr, unknown_scopes))
11003 + available = ", ".join(base.scopes)
11004 s = pluralism(unknown_scopes)
11005 raise argparse.ArgumentError(
11006 - self, f'unknown scope{s}: {unknown} (available: {available})')
11007 + self, f"unknown scope{s}: {unknown} (available: {available})"
11008 + )
11009
11010 disabled = set(chain.from_iterable(base.scopes[x] for x in disabled))
11011 enabled = set(chain.from_iterable(base.scopes[x] for x in enabled))
11012
11013 if enabled:
11014 - namespace.enabled_checks = {
11015 - c for c in objects.CHECKS.values() if c.scope in enabled}
11016 + namespace.enabled_checks = {c for c in objects.CHECKS.values() if c.scope in enabled}
11017 if disabled:
11018 namespace.enabled_checks.difference_update(
11019 - c for c in objects.CHECKS.values() if c.scope in disabled)
11020 + c for c in objects.CHECKS.values() if c.scope in disabled
11021 + )
11022
11023 setattr(namespace, self.dest, frozenset(enabled))
11024
11025 @@ -247,9 +255,9 @@ class CheckArgs(arghparse.CommaSeparatedElements):
11026
11027 # validate selected checks
11028 if unknown_checks := set(subtractive + neutral + additive) - set(objects.CHECKS):
11029 - unknown = ', '.join(map(repr, unknown_checks))
11030 + unknown = ", ".join(map(repr, unknown_checks))
11031 s = pluralism(unknown_checks)
11032 - raise argparse.ArgumentError(self, f'unknown check{s}: {unknown}')
11033 + raise argparse.ArgumentError(self, f"unknown check{s}: {unknown}")
11034
11035 if neutral:
11036 # replace the default check set
11037 @@ -259,8 +267,7 @@ class CheckArgs(arghparse.CommaSeparatedElements):
11038 namespace.enabled_checks.update(objects.CHECKS[c] for c in additive)
11039 if subtractive:
11040 # remove from the default check set
11041 - namespace.enabled_checks.difference_update(
11042 - objects.CHECKS[c] for c in subtractive)
11043 + namespace.enabled_checks.difference_update(objects.CHECKS[c] for c in subtractive)
11044
11045 setattr(namespace, self.dest, frozenset(neutral + additive))
11046
11047 @@ -278,9 +285,9 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
11048
11049 # validate selected keywords
11050 if unknown_keywords := set(disabled + enabled) - set(objects.KEYWORDS):
11051 - unknown = ', '.join(map(repr, unknown_keywords))
11052 + unknown = ", ".join(map(repr, unknown_keywords))
11053 s = pluralism(unknown_keywords)
11054 - raise argparse.ArgumentError(self, f'unknown keyword{s}: {unknown}')
11055 + raise argparse.ArgumentError(self, f"unknown keyword{s}: {unknown}")
11056
11057 # create keyword instance sets
11058 disabled_keywords = {objects.KEYWORDS[k] for k in disabled}
11059 @@ -292,8 +299,7 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
11060 for check in list(namespace.enabled_checks):
11061 if check.known_results.issubset(disabled_keywords):
11062 namespace.enabled_checks.discard(check)
11063 - enabled_keywords = set().union(
11064 - *(c.known_results for c in namespace.enabled_checks))
11065 + enabled_keywords = set().union(*(c.known_results for c in namespace.enabled_checks))
11066
11067 namespace.filtered_keywords = enabled_keywords - disabled_keywords
11068 # restrict enabled checks if none have been selected
11069 @@ -305,7 +311,7 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
11070
11071 # check if experimental profiles are required for explicitly selected keywords
11072 for r in namespace.filtered_keywords:
11073 - if r.name in enabled and r._profile == 'exp':
11074 + if r.name in enabled and r._profile == "exp":
11075 namespace.exp_profiles_required = True
11076 break
11077
11078 @@ -331,17 +337,19 @@ class ExitArgs(arghparse.CommaSeparatedElements):
11079 def __call__(self, parser, namespace, values, option_string=None):
11080 # default to using error results if no keywords are selected
11081 if values is None:
11082 - values = 'error'
11083 + values = "error"
11084
11085 subtractive, neutral, additive = self.parse_values(values)
11086
11087 # default to using error results if no neutral keywords are selected
11088 if not neutral:
11089 - neutral.append('error')
11090 + neutral.append("error")
11091
11092 # expand args to keyword objects
11093 keywords = {objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, neutral)}
11094 keywords.update(objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, additive))
11095 - keywords.difference_update(objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, subtractive))
11096 + keywords.difference_update(
11097 + objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, subtractive)
11098 + )
11099
11100 setattr(namespace, self.dest, frozenset(keywords))
11101
11102 diff --git a/src/pkgcheck/scripts/argparsers.py b/src/pkgcheck/scripts/argparsers.py
11103 index d082cf36..0fd349d7 100644
11104 --- a/src/pkgcheck/scripts/argparsers.py
11105 +++ b/src/pkgcheck/scripts/argparsers.py
11106 @@ -7,18 +7,25 @@ from snakeoil.cli import arghparse
11107 from .. import objects, reporters
11108
11109 reporter_argparser = arghparse.ArgumentParser(suppress=True)
11110 -reporter_options = reporter_argparser.add_argument_group('reporter options')
11111 +reporter_options = reporter_argparser.add_argument_group("reporter options")
11112 reporter_options.add_argument(
11113 - '-R', '--reporter', action='store', default=None,
11114 - help='use a non-default reporter',
11115 + "-R",
11116 + "--reporter",
11117 + action="store",
11118 + default=None,
11119 + help="use a non-default reporter",
11120 docs="""
11121 Select a reporter to use for output.
11122
11123 Use ``pkgcheck show --reporters`` to see available options.
11124 - """)
11125 + """,
11126 +)
11127 reporter_options.add_argument(
11128 - '--format', dest='format_str', action='store', default=None,
11129 - help='format string used with FormatReporter',
11130 + "--format",
11131 + dest="format_str",
11132 + action="store",
11133 + default=None,
11134 + help="format string used with FormatReporter",
11135 docs="""
11136 Custom format string used to format output by FormatReporter.
11137
11138 @@ -34,34 +41,40 @@ reporter_options.add_argument(
11139 requested attribute expansion in the format string. In other words,
11140 ``--format {foo}`` will never produce any output because no result has the
11141 ``foo`` attribute.
11142 - """)
11143 + """,
11144 +)
11145
11146
11147 @reporter_argparser.bind_final_check
11148 def _setup_reporter(parser, namespace):
11149 if namespace.reporter is None:
11150 namespace.reporter = sorted(
11151 - objects.REPORTERS.values(), key=attrgetter('priority'), reverse=True)[0]
11152 + objects.REPORTERS.values(), key=attrgetter("priority"), reverse=True
11153 + )[0]
11154 else:
11155 try:
11156 namespace.reporter = objects.REPORTERS[namespace.reporter]
11157 except KeyError:
11158 - available = ', '.join(objects.REPORTERS)
11159 - parser.error(
11160 - f"no reporter matches {namespace.reporter!r} "
11161 - f"(available: {available})")
11162 + available = ", ".join(objects.REPORTERS)
11163 + parser.error(f"no reporter matches {namespace.reporter!r} " f"(available: {available})")
11164
11165 if namespace.reporter is reporters.FormatReporter:
11166 if not namespace.format_str:
11167 - parser.error('missing or empty --format option required by FormatReporter')
11168 + parser.error("missing or empty --format option required by FormatReporter")
11169 namespace.reporter = partial(namespace.reporter, namespace.format_str)
11170 elif namespace.format_str is not None:
11171 - parser.error('--format option is only valid when using FormatReporter')
11172 + parser.error("--format option is only valid when using FormatReporter")
11173
11174
11175 repo_argparser = arghparse.ArgumentParser(suppress=True)
11176 -repo_options = repo_argparser.add_argument_group('repo options')
11177 +repo_options = repo_argparser.add_argument_group("repo options")
11178 repo_options.add_argument(
11179 - '-r', '--repo', metavar='REPO', dest='target_repo',
11180 - action=commandline.StoreRepoObject, repo_type='ebuild-raw', allow_external_repos=True,
11181 - help='target repo')
11182 + "-r",
11183 + "--repo",
11184 + metavar="REPO",
11185 + dest="target_repo",
11186 + action=commandline.StoreRepoObject,
11187 + repo_type="ebuild-raw",
11188 + allow_external_repos=True,
11189 + help="target repo",
11190 +)
11191
11192 diff --git a/src/pkgcheck/scripts/pkgcheck.py b/src/pkgcheck/scripts/pkgcheck.py
11193 index 9eb5c0c8..7ec3cf77 100644
11194 --- a/src/pkgcheck/scripts/pkgcheck.py
11195 +++ b/src/pkgcheck/scripts/pkgcheck.py
11196 @@ -7,4 +7,5 @@ ebuild repositories for various issues.
11197 from pkgcore.util import commandline
11198
11199 argparser = commandline.ArgumentParser(
11200 - description=__doc__, help=False, subcmds=True, script=(__file__, __name__))
11201 + description=__doc__, help=False, subcmds=True, script=(__file__, __name__)
11202 +)
11203
11204 diff --git a/src/pkgcheck/scripts/pkgcheck_cache.py b/src/pkgcheck/scripts/pkgcheck_cache.py
11205 index a986bb2d..5787c65e 100644
11206 --- a/src/pkgcheck/scripts/pkgcheck_cache.py
11207 +++ b/src/pkgcheck/scripts/pkgcheck_cache.py
11208 @@ -10,34 +10,37 @@ from .argparse_actions import CacheNegations
11209 from .argparsers import repo_argparser
11210
11211 cache = arghparse.ArgumentParser(
11212 - prog='pkgcheck cache', description='perform cache operations',
11213 + prog="pkgcheck cache",
11214 + description="perform cache operations",
11215 parents=(repo_argparser,),
11216 docs="""
11217 Various types of caches are used by pkgcheck. This command supports
11218 running operations on them including updates and removals.
11219 - """)
11220 + """,
11221 +)
11222 cache.add_argument(
11223 - '--cache-dir', type=arghparse.create_dir, default=const.USER_CACHE_DIR,
11224 - help='directory to use for storing cache files')
11225 + "--cache-dir",
11226 + type=arghparse.create_dir,
11227 + default=const.USER_CACHE_DIR,
11228 + help="directory to use for storing cache files",
11229 +)
11230 cache_actions = cache.add_mutually_exclusive_group()
11231 cache_actions.add_argument(
11232 - '-l', '--list', dest='list_cache', action='store_true',
11233 - help='list available caches')
11234 + "-l", "--list", dest="list_cache", action="store_true", help="list available caches"
11235 +)
11236 cache_actions.add_argument(
11237 - '-u', '--update', dest='update_cache', action='store_true',
11238 - help='update caches')
11239 + "-u", "--update", dest="update_cache", action="store_true", help="update caches"
11240 +)
11241 cache_actions.add_argument(
11242 - '-R', '--remove', dest='remove_cache', action='store_true',
11243 - help='forcibly remove caches')
11244 + "-R", "--remove", dest="remove_cache", action="store_true", help="forcibly remove caches"
11245 +)
11246 cache.add_argument(
11247 - '-f', '--force', dest='force_cache', action='store_true',
11248 - help='forcibly update/remove caches')
11249 + "-f", "--force", dest="force_cache", action="store_true", help="forcibly update/remove caches"
11250 +)
11251 cache.add_argument(
11252 - '-n', '--dry-run', action='store_true',
11253 - help='dry run without performing any changes')
11254 -cache.add_argument(
11255 - '-t', '--type', dest='cache', action=CacheNegations,
11256 - help='target cache types')
11257 + "-n", "--dry-run", action="store_true", help="dry run without performing any changes"
11258 +)
11259 +cache.add_argument("-t", "--type", dest="cache", action=CacheNegations, help="target cache types")
11260
11261
11262 @cache.bind_pre_parse
11263 @@ -50,16 +53,14 @@ def _setup_cache_addons(parser, namespace):
11264 @cache.bind_early_parse
11265 def _setup_cache(parser, namespace, args):
11266 if namespace.target_repo is None:
11267 - namespace.target_repo = namespace.config.get_default('repo')
11268 + namespace.target_repo = namespace.config.get_default("repo")
11269 return namespace, args
11270
11271
11272 @cache.bind_final_check
11273 def _validate_cache_args(parser, namespace):
11274 enabled_caches = {k for k, v in namespace.cache.items() if v}
11275 - cache_addons = (
11276 - addon for addon in CachedAddon.caches
11277 - if addon.cache.type in enabled_caches)
11278 + cache_addons = (addon for addon in CachedAddon.caches if addon.cache.type in enabled_caches)
11279 # sort caches by type
11280 namespace.cache_addons = sorted(cache_addons, key=lambda x: x.cache.type)
11281
11282 @@ -72,18 +73,18 @@ def _cache(options, out, err):
11283 cache_obj = CachedAddon(options)
11284 cache_obj.remove_caches()
11285 elif options.update_cache:
11286 - for addon_cls in options.pop('cache_addons'):
11287 + for addon_cls in options.pop("cache_addons"):
11288 init_addon(addon_cls, options)
11289 else:
11290 # list existing caches
11291 cache_obj = CachedAddon(options)
11292 - repos_dir = pjoin(options.cache_dir, 'repos')
11293 + repos_dir = pjoin(options.cache_dir, "repos")
11294 for cache_type in sorted(options.enabled_caches):
11295 paths = cache_obj.existing_caches[cache_type]
11296 if paths:
11297 - out.write(out.fg('yellow'), f'{cache_type} caches: ', out.reset)
11298 + out.write(out.fg("yellow"), f"{cache_type} caches: ", out.reset)
11299 for path in paths:
11300 - repo = str(path.parent)[len(repos_dir):]
11301 + repo = str(path.parent)[len(repos_dir) :]
11302 # non-path repo ids get path separator stripped
11303 if repo.count(os.sep) == 1:
11304 repo = repo.lstrip(os.sep)
11305
11306 diff --git a/src/pkgcheck/scripts/pkgcheck_ci.py b/src/pkgcheck/scripts/pkgcheck_ci.py
11307 index 8db03e14..1a15f640 100644
11308 --- a/src/pkgcheck/scripts/pkgcheck_ci.py
11309 +++ b/src/pkgcheck/scripts/pkgcheck_ci.py
11310 @@ -21,10 +21,10 @@ class ArgumentParser(arghparse.ArgumentParser):
11311 return namespace, []
11312
11313
11314 -ci = ArgumentParser(prog='pkgcheck ci', description='scan repo for CI')
11315 +ci = ArgumentParser(prog="pkgcheck ci", description="scan repo for CI")
11316 ci.add_argument(
11317 - '--failures', type=argparse.FileType('w'),
11318 - help='file path for storing failure results')
11319 + "--failures", type=argparse.FileType("w"), help="file path for storing failure results"
11320 +)
11321
11322
11323 @ci.bind_main_func
11324
11325 diff --git a/src/pkgcheck/scripts/pkgcheck_replay.py b/src/pkgcheck/scripts/pkgcheck_replay.py
11326 index 2f025f5e..37e0024e 100644
11327 --- a/src/pkgcheck/scripts/pkgcheck_replay.py
11328 +++ b/src/pkgcheck/scripts/pkgcheck_replay.py
11329 @@ -5,7 +5,8 @@ from ..base import PkgcheckUserException
11330 from .argparsers import reporter_argparser
11331
11332 replay = arghparse.ArgumentParser(
11333 - prog='pkgcheck replay', description='replay result streams',
11334 + prog="pkgcheck replay",
11335 + description="replay result streams",
11336 parents=(reporter_argparser,),
11337 docs="""
11338 Replay previous json result streams, feeding the results into a reporter.
11339 @@ -13,10 +14,14 @@ replay = arghparse.ArgumentParser(
11340 Useful if you need to delay acting on results until it can be done in
11341 one minimal window, e.g. updating a database, or want to generate
11342 several different reports.
11343 - """)
11344 + """,
11345 +)
11346 replay.add_argument(
11347 - dest='results', metavar='FILE',
11348 - type=arghparse.FileType('rb'), help='path to serialized results file')
11349 + dest="results",
11350 + metavar="FILE",
11351 + type=arghparse.FileType("rb"),
11352 + help="path to serialized results file",
11353 +)
11354
11355
11356 @replay.bind_main_func
11357 @@ -30,8 +35,7 @@ def _replay(options, out, err):
11358 processed += 1
11359 except reporters.DeserializationError as e:
11360 if not processed:
11361 - raise PkgcheckUserException('invalid or unsupported replay file')
11362 - raise PkgcheckUserException(
11363 - f'corrupted results file {options.results.name!r}: {e}')
11364 + raise PkgcheckUserException("invalid or unsupported replay file")
11365 + raise PkgcheckUserException(f"corrupted results file {options.results.name!r}: {e}")
11366
11367 return 0
11368
11369 diff --git a/src/pkgcheck/scripts/pkgcheck_scan.py b/src/pkgcheck/scripts/pkgcheck_scan.py
11370 index e5227bbf..1d583407 100644
11371 --- a/src/pkgcheck/scripts/pkgcheck_scan.py
11372 +++ b/src/pkgcheck/scripts/pkgcheck_scan.py
11373 @@ -20,10 +20,12 @@ from . import argparse_actions
11374 from .argparsers import repo_argparser, reporter_argparser
11375
11376 config_argparser = arghparse.ArgumentParser(suppress=True)
11377 -config_options = config_argparser.add_argument_group('config options')
11378 +config_options = config_argparser.add_argument_group("config options")
11379 config_options.add_argument(
11380 - '--config', action=argparse_actions.ConfigArg, dest='config_file',
11381 - help='use custom pkgcheck scan settings file',
11382 + "--config",
11383 + action=argparse_actions.ConfigArg,
11384 + dest="config_file",
11385 + help="use custom pkgcheck scan settings file",
11386 docs="""
11387 Load custom pkgcheck scan settings from a given file.
11388
11389 @@ -32,21 +34,31 @@ config_options.add_argument(
11390
11391 It's also possible to disable all types of settings loading by
11392 specifying an argument of 'false' or 'no'.
11393 - """)
11394 + """,
11395 +)
11396
11397
11398 scan = arghparse.ArgumentParser(
11399 - prog='pkgcheck scan', description='scan targets for QA issues',
11400 - parents=(config_argparser, repo_argparser, reporter_argparser))
11401 + prog="pkgcheck scan",
11402 + description="scan targets for QA issues",
11403 + parents=(config_argparser, repo_argparser, reporter_argparser),
11404 +)
11405 scan.add_argument(
11406 - 'targets', metavar='TARGET', nargs='*', action=arghparse.ParseNonblockingStdin,
11407 - help='optional targets')
11408 -
11409 -main_options = scan.add_argument_group('main options')
11410 + "targets",
11411 + metavar="TARGET",
11412 + nargs="*",
11413 + action=arghparse.ParseNonblockingStdin,
11414 + help="optional targets",
11415 +)
11416 +
11417 +main_options = scan.add_argument_group("main options")
11418 main_options.add_argument(
11419 - '-f', '--filter',
11420 - action=arghparse.Delayed, target=argparse_actions.FilterArgs, priority=99,
11421 - help='limit targeted packages for scanning',
11422 + "-f",
11423 + "--filter",
11424 + action=arghparse.Delayed,
11425 + target=argparse_actions.FilterArgs,
11426 + priority=99,
11427 + help="limit targeted packages for scanning",
11428 docs="""
11429 Support limiting targeted packages for scanning using a chosen filter.
11430
11431 @@ -62,23 +74,31 @@ main_options.add_argument(
11432 network-related checks are filtered to avoid redundant or unnecessary
11433 server requests. In order to forcibly disable all filtering use the
11434 'no' argument.
11435 - """)
11436 + """,
11437 +)
11438 main_options.add_argument(
11439 - '-j', '--jobs', type=arghparse.positive_int,
11440 - help='number of checks to run in parallel',
11441 + "-j",
11442 + "--jobs",
11443 + type=arghparse.positive_int,
11444 + help="number of checks to run in parallel",
11445 docs="""
11446 Number of checks to run in parallel, defaults to using all available
11447 processors.
11448 - """)
11449 + """,
11450 +)
11451 main_options.add_argument(
11452 - '-t', '--tasks', type=arghparse.positive_int,
11453 - help='number of asynchronous tasks to run concurrently',
11454 + "-t",
11455 + "--tasks",
11456 + type=arghparse.positive_int,
11457 + help="number of asynchronous tasks to run concurrently",
11458 docs="""
11459 Number of asynchronous tasks to run concurrently (defaults to 5 * CPU count).
11460 - """)
11461 + """,
11462 +)
11463 main_options.add_argument(
11464 - '--cache', action=argparse_actions.CacheNegations,
11465 - help='forcibly enable/disable caches',
11466 + "--cache",
11467 + action=argparse_actions.CacheNegations,
11468 + help="forcibly enable/disable caches",
11469 docs="""
11470 All cache types are enabled by default, this option explicitly sets
11471 which caches will be generated and used during scanning.
11472 @@ -97,14 +117,22 @@ main_options.add_argument(
11473
11474 When disabled, no caches will be saved to disk and results requiring
11475 caches (e.g. git-related checks) will be skipped.
11476 - """)
11477 + """,
11478 +)
11479 main_options.add_argument(
11480 - '--cache-dir', type=arghparse.create_dir, default=const.USER_CACHE_DIR,
11481 - help='directory to use for storing cache files')
11482 + "--cache-dir",
11483 + type=arghparse.create_dir,
11484 + default=const.USER_CACHE_DIR,
11485 + help="directory to use for storing cache files",
11486 +)
11487 main_options.add_argument(
11488 - '--exit', metavar='ITEM', dest='exit_keywords',
11489 - action=argparse_actions.ExitArgs, nargs='?', default=(),
11490 - help='checksets, checks, or keywords that trigger an error exit status',
11491 + "--exit",
11492 + metavar="ITEM",
11493 + dest="exit_keywords",
11494 + action=argparse_actions.ExitArgs,
11495 + nargs="?",
11496 + default=(),
11497 + help="checksets, checks, or keywords that trigger an error exit status",
11498 docs="""
11499 Comma-separated list of checksets, checks, or keywords to enable and
11500 disable that trigger an exit status failure. Checkset and check
11501 @@ -116,17 +144,25 @@ main_options.add_argument(
11502 To specify disabled keywords prefix them with ``-``. Also, the special
11503 arguments of ``error``, ``warning``, ``style``, and ``info`` correspond
11504 to the related keyword groups.
11505 - """)
11506 + """,
11507 +)
11508
11509
11510 -check_options = scan.add_argument_group('check selection')
11511 +check_options = scan.add_argument_group("check selection")
11512 check_options.add_argument(
11513 - '--net', nargs=0,
11514 - action=arghparse.Delayed, target=argparse_actions.EnableNet, priority=-1,
11515 - help='enable checks that require network access')
11516 + "--net",
11517 + nargs=0,
11518 + action=arghparse.Delayed,
11519 + target=argparse_actions.EnableNet,
11520 + priority=-1,
11521 + help="enable checks that require network access",
11522 +)
11523 check_options.add_argument(
11524 - '-C', '--checksets', metavar='CHECKSET', action=argparse_actions.ChecksetArgs,
11525 - help='scan using a configured set of check/keyword args',
11526 + "-C",
11527 + "--checksets",
11528 + metavar="CHECKSET",
11529 + action=argparse_actions.ChecksetArgs,
11530 + help="scan using a configured set of check/keyword args",
11531 docs="""
11532 Comma-separated list of checksets to enable and disable for
11533 scanning.
11534 @@ -137,11 +173,18 @@ check_options.add_argument(
11535 All network-related checks (which are disabled by default)
11536 can be enabled using ``-C net``. This allows for easily running only
11537 network checks without having to explicitly list them.
11538 - """)
11539 + """,
11540 +)
11541 check_options.add_argument(
11542 - '-s', '--scopes', metavar='SCOPE', dest='selected_scopes', default=(),
11543 - action=arghparse.Delayed, target=argparse_actions.ScopeArgs, priority=51,
11544 - help='limit checks to run by scope',
11545 + "-s",
11546 + "--scopes",
11547 + metavar="SCOPE",
11548 + dest="selected_scopes",
11549 + default=(),
11550 + action=arghparse.Delayed,
11551 + target=argparse_actions.ScopeArgs,
11552 + priority=51,
11553 + help="limit checks to run by scope",
11554 docs="""
11555 Comma-separated list of scopes to enable and disable for scanning. Any
11556 scopes specified in this fashion will affect the checks that get
11557 @@ -149,11 +192,19 @@ check_options.add_argument(
11558 enabled will cause only repo-level checks to run.
11559
11560 Available scopes: %s
11561 - """ % (', '.join(base.scopes)))
11562 + """
11563 + % (", ".join(base.scopes)),
11564 +)
11565 check_options.add_argument(
11566 - '-c', '--checks', metavar='CHECK', dest='selected_checks', default=(),
11567 - action=arghparse.Delayed, target=argparse_actions.CheckArgs, priority=52,
11568 - help='limit checks to run',
11569 + "-c",
11570 + "--checks",
11571 + metavar="CHECK",
11572 + dest="selected_checks",
11573 + default=(),
11574 + action=arghparse.Delayed,
11575 + target=argparse_actions.CheckArgs,
11576 + priority=52,
11577 + help="limit checks to run",
11578 docs="""
11579 Comma-separated list of checks to enable and disable for
11580 scanning. Any checks specified in this fashion will be the
11581 @@ -169,11 +220,18 @@ check_options.add_argument(
11582 optional checks in addition to the default set.
11583
11584 Use ``pkgcheck show --checks`` see all available checks.
11585 - """)
11586 + """,
11587 +)
11588 check_options.add_argument(
11589 - '-k', '--keywords', metavar='KEYWORD', dest='selected_keywords', default=(),
11590 - action=arghparse.Delayed, target=argparse_actions.KeywordArgs, priority=53,
11591 - help='limit keywords to scan for',
11592 + "-k",
11593 + "--keywords",
11594 + metavar="KEYWORD",
11595 + dest="selected_keywords",
11596 + default=(),
11597 + action=arghparse.Delayed,
11598 + target=argparse_actions.KeywordArgs,
11599 + priority=53,
11600 + help="limit keywords to scan for",
11601 docs="""
11602 Comma-separated list of keywords to enable and disable for
11603 scanning. Any keywords specified in this fashion will be the
11604 @@ -189,9 +247,10 @@ check_options.add_argument(
11605 scan for errors use ``-k error``.
11606
11607 Use ``pkgcheck show --keywords`` to see available options.
11608 - """)
11609 + """,
11610 +)
11611
11612 -scan.plugin = scan.add_argument_group('plugin options')
11613 +scan.plugin = scan.add_argument_group("plugin options")
11614
11615
11616 def _determine_target_repo(namespace):
11617 @@ -225,17 +284,16 @@ def _determine_target_repo(namespace):
11618
11619 # determine if CWD is inside an unconfigured repo
11620 try:
11621 - repo = namespace.domain.find_repo(
11622 - target_dir, config=namespace.config, configure=False)
11623 + repo = namespace.domain.find_repo(target_dir, config=namespace.config, configure=False)
11624 except (repo_errors.InitializationError, IOError) as e:
11625 raise argparse.ArgumentError(None, str(e))
11626
11627 # fallback to the default repo
11628 if repo is None:
11629 - repo = namespace.config.get_default('repo')
11630 + repo = namespace.config.get_default("repo")
11631 # if the bundled stub repo is the default, no default repo exists
11632 - if repo is None or repo.location == pjoin(pkgcore_const.DATA_PATH, 'stubrepo'):
11633 - raise argparse.ArgumentError(None, 'no default repo found')
11634 + if repo is None or repo.location == pjoin(pkgcore_const.DATA_PATH, "stubrepo"):
11635 + raise argparse.ArgumentError(None, "no default repo found")
11636
11637 return repo
11638
11639 @@ -268,9 +326,10 @@ def _path_restrict(path, repo):
11640 def _restrict_to_scope(restrict):
11641 """Determine a given restriction's scope level."""
11642 for scope, attrs in (
11643 - (base.version_scope, ['fullver', 'version', 'rev']),
11644 - (base.package_scope, ['package']),
11645 - (base.category_scope, ['category'])):
11646 + (base.version_scope, ["fullver", "version", "rev"]),
11647 + (base.package_scope, ["package"]),
11648 + (base.category_scope, ["category"]),
11649 + ):
11650 if any(collect_package_restrictions(restrict, attrs)):
11651 return scope
11652 return base.repo_scope
11653 @@ -299,13 +358,13 @@ def _setup_scan(parser, namespace, args):
11654 # parser supporting config file options
11655 config_parser = ConfigFileParser(parser)
11656 # always load settings from bundled config
11657 - namespace = config_parser.parse_config_options(
11658 - namespace, configs=[const.BUNDLED_CONF_FILE])
11659 + namespace = config_parser.parse_config_options(namespace, configs=[const.BUNDLED_CONF_FILE])
11660
11661 # load default args from system/user configs if config-loading is allowed
11662 if namespace.config_file is None:
11663 namespace = config_parser.parse_config_options(
11664 - namespace, configs=ConfigFileParser.default_configs)
11665 + namespace, configs=ConfigFileParser.default_configs
11666 + )
11667
11668 # TODO: Limit to parsing repo and targets options here so all args don't
11669 # have to be parsed twice, will probably require a custom snakeoil
11670 @@ -325,14 +384,14 @@ def _setup_scan(parser, namespace, args):
11671 namespace.target_repo = _determine_target_repo(namespace)
11672
11673 # determine if we're running in the gentoo repo or a clone
11674 - namespace.gentoo_repo = 'gentoo' in namespace.target_repo.aliases
11675 + namespace.gentoo_repo = "gentoo" in namespace.target_repo.aliases
11676
11677 # multiplex of target repo and its masters used for package existence queries
11678 namespace.search_repo = multiplex.tree(*namespace.target_repo.trees)
11679
11680 if namespace.config_file is not False:
11681 # support loading repo-specific config settings from metadata/pkgcheck.conf
11682 - repo_config_file = os.path.join(namespace.target_repo.location, 'metadata', 'pkgcheck.conf')
11683 + repo_config_file = os.path.join(namespace.target_repo.location, "metadata", "pkgcheck.conf")
11684 configs = [repo_config_file]
11685 # custom user settings take precedence over previous configs
11686 if namespace.config_file:
11687 @@ -342,7 +401,7 @@ def _setup_scan(parser, namespace, args):
11688 # load repo-specific args from config if they exist
11689 namespace = config_parser.parse_config_sections(namespace, namespace.target_repo.aliases)
11690
11691 - if os.getenv('NOCOLOR'):
11692 + if os.getenv("NOCOLOR"):
11693 namespace.color = False
11694
11695 return namespace, args
11696 @@ -356,10 +415,10 @@ def generate_restricts(repo, targets):
11697 path = os.path.realpath(target)
11698 # prefer path restrictions if it's in the target repo
11699 if os.path.exists(path) and path in repo:
11700 - if path.endswith('.eclass'):
11701 + if path.endswith(".eclass"):
11702 # direct eclass file targets
11703 yield base.eclass_scope, os.path.basename(path)[:-7]
11704 - elif path.startswith(profiles_base) and path[len(profiles_base):]:
11705 + elif path.startswith(profiles_base) and path[len(profiles_base) :]:
11706 if os.path.isdir(path):
11707 # descend into profiles dir targets
11708 for root, _dirs, files in os.walk(path):
11709 @@ -381,44 +440,45 @@ def generate_restricts(repo, targets):
11710 # use path-based error for path-based targets
11711 if os.path.exists(path) or os.path.isabs(target):
11712 raise PkgcheckUserException(
11713 - f"{repo.repo_id!r} repo doesn't contain: {target!r}")
11714 + f"{repo.repo_id!r} repo doesn't contain: {target!r}"
11715 + )
11716 raise PkgcheckUserException(str(e))
11717
11718
11719 -@××××.bind_delayed_default(1000, 'jobs')
11720 +@××××.bind_delayed_default(1000, "jobs")
11721 def _default_jobs(namespace, attr):
11722 """Extract jobs count from MAKEOPTS."""
11723 parser = argparse.ArgumentParser()
11724 - parser.add_argument('-j', '--jobs', type=arghparse.positive_int, default=os.cpu_count())
11725 - makeopts, _ = parser.parse_known_args(shlex.split(os.getenv('MAKEOPTS', '')))
11726 + parser.add_argument("-j", "--jobs", type=arghparse.positive_int, default=os.cpu_count())
11727 + makeopts, _ = parser.parse_known_args(shlex.split(os.getenv("MAKEOPTS", "")))
11728 setattr(namespace, attr, makeopts.jobs)
11729
11730
11731 -@××××.bind_delayed_default(1001, 'tasks')
11732 +@××××.bind_delayed_default(1001, "tasks")
11733 def _default_tasks(namespace, attr):
11734 """Set based on jobs count."""
11735 setattr(namespace, attr, namespace.jobs * 5)
11736
11737
11738 -@××××.bind_delayed_default(1000, 'filter')
11739 +@××××.bind_delayed_default(1000, "filter")
11740 def _default_filter(namespace, attr):
11741 """Use source filtering for keywords requesting it by default."""
11742 setattr(namespace, attr, objects.KEYWORDS.filter)
11743
11744
11745 -@××××.bind_delayed_default(1000, 'enabled_checks')
11746 +@××××.bind_delayed_default(1000, "enabled_checks")
11747 def _default_enabled_checks(namespace, attr):
11748 """All non-optional checks are run by default."""
11749 setattr(namespace, attr, set(objects.CHECKS.default.values()))
11750
11751
11752 -@××××.bind_delayed_default(1000, 'filtered_keywords')
11753 +@××××.bind_delayed_default(1000, "filtered_keywords")
11754 def _default_filtered_keywords(namespace, attr):
11755 """Enable all keywords to be shown by default."""
11756 setattr(namespace, attr, set(objects.KEYWORDS.values()))
11757
11758
11759 -@××××.bind_delayed_default(9999, 'restrictions')
11760 +@××××.bind_delayed_default(9999, "restrictions")
11761 def _determine_restrictions(namespace, attr):
11762 """Determine restrictions for untargeted scans and generate collapsed restriction for targeted scans."""
11763 if namespace.targets:
11764 @@ -428,7 +488,7 @@ def _determine_restrictions(namespace, attr):
11765 # running pipeline.
11766 restrictions = list(generate_restricts(namespace.target_repo, namespace.targets))
11767 if not restrictions:
11768 - raise PkgcheckUserException('no targets')
11769 + raise PkgcheckUserException("no targets")
11770 else:
11771 if namespace.cwd in namespace.target_repo:
11772 scope, restrict = _path_restrict(namespace.cwd, namespace.target_repo)
11773 @@ -445,7 +505,7 @@ def _determine_restrictions(namespace, attr):
11774 def _scan(options, out, err):
11775 with ExitStack() as stack:
11776 reporter = options.reporter(out)
11777 - for c in options.pop('contexts') + [reporter]:
11778 + for c in options.pop("contexts") + [reporter]:
11779 stack.enter_context(c)
11780 pipe = Pipeline(options)
11781 for result in pipe:
11782
11783 diff --git a/src/pkgcheck/scripts/pkgcheck_show.py b/src/pkgcheck/scripts/pkgcheck_show.py
11784 index 8273bf5e..4ea20e34 100644
11785 --- a/src/pkgcheck/scripts/pkgcheck_show.py
11786 +++ b/src/pkgcheck/scripts/pkgcheck_show.py
11787 @@ -8,53 +8,72 @@ from snakeoil.formatters import decorate_forced_wrapping
11788 from .. import base, objects
11789 from ..addons.caches import CachedAddon
11790
11791 -show = arghparse.ArgumentParser(
11792 - prog='pkgcheck show', description='show various pkgcheck info')
11793 -list_options = show.add_argument_group('list options')
11794 +show = arghparse.ArgumentParser(prog="pkgcheck show", description="show various pkgcheck info")
11795 +list_options = show.add_argument_group("list options")
11796 output_types = list_options.add_mutually_exclusive_group()
11797 output_types.add_argument(
11798 - '-k', '--keywords', action='store_true', default=False,
11799 - help='show available warning/error keywords',
11800 + "-k",
11801 + "--keywords",
11802 + action="store_true",
11803 + default=False,
11804 + help="show available warning/error keywords",
11805 docs="""
11806 List all available keywords.
11807
11808 Use -v/--verbose to show keywords sorted into the scope they run at
11809 (repository, category, package, or version) along with their
11810 descriptions.
11811 - """)
11812 + """,
11813 +)
11814 output_types.add_argument(
11815 - '-c', '--checks', action='store_true', default=False,
11816 - help='show available checks',
11817 + "-c",
11818 + "--checks",
11819 + action="store_true",
11820 + default=False,
11821 + help="show available checks",
11822 docs="""
11823 List all available checks.
11824
11825 Use -v/--verbose to show descriptions and possible keyword results for
11826 each check.
11827 - """)
11828 + """,
11829 +)
11830 output_types.add_argument(
11831 - '-s', '--scopes', action='store_true', default=False,
11832 - help='show available keyword/check scopes',
11833 + "-s",
11834 + "--scopes",
11835 + action="store_true",
11836 + default=False,
11837 + help="show available keyword/check scopes",
11838 docs="""
11839 List all available keyword and check scopes.
11840
11841 Use -v/--verbose to show scope descriptions.
11842 - """)
11843 + """,
11844 +)
11845 output_types.add_argument(
11846 - '-r', '--reporters', action='store_true', default=False,
11847 - help='show available reporters',
11848 + "-r",
11849 + "--reporters",
11850 + action="store_true",
11851 + default=False,
11852 + help="show available reporters",
11853 docs="""
11854 List all available reporters.
11855
11856 Use -v/--verbose to show reporter descriptions.
11857 - """)
11858 + """,
11859 +)
11860 output_types.add_argument(
11861 - '-C', '--caches', action='store_true', default=False,
11862 - help='show available caches',
11863 + "-C",
11864 + "--caches",
11865 + action="store_true",
11866 + default=False,
11867 + help="show available caches",
11868 docs="""
11869 List all available cache types.
11870
11871 Use -v/--verbose to show more cache information.
11872 - """)
11873 + """,
11874 +)
11875
11876
11877 def dump_docstring(out, obj, prefix=None):
11878 @@ -63,16 +82,16 @@ def dump_docstring(out, obj, prefix=None):
11879 out.later_prefix.append(prefix)
11880 try:
11881 if obj.__doc__ is None:
11882 - raise ValueError(f'no docs for {obj!r}')
11883 + raise ValueError(f"no docs for {obj!r}")
11884
11885 # Docstrings start with an unindented line, everything else is
11886 # consistently indented.
11887 - lines = obj.__doc__.split('\n')
11888 + lines = obj.__doc__.split("\n")
11889 # some docstrings start on the second line
11890 if firstline := lines[0].strip():
11891 out.write(firstline)
11892 if len(lines) > 1:
11893 - for line in textwrap.dedent('\n'.join(lines[1:])).split('\n'):
11894 + for line in textwrap.dedent("\n".join(lines[1:])).split("\n"):
11895 out.write(line)
11896 else:
11897 out.write()
11898 @@ -85,23 +104,23 @@ def dump_docstring(out, obj, prefix=None):
11899 @decorate_forced_wrapping()
11900 def display_keywords(out, options):
11901 if options.verbosity < 1:
11902 - out.write('\n'.join(sorted(objects.KEYWORDS)), wrap=False)
11903 + out.write("\n".join(sorted(objects.KEYWORDS)), wrap=False)
11904 else:
11905 scopes = defaultdict(set)
11906 for keyword in objects.KEYWORDS.values():
11907 scopes[keyword.scope].add(keyword)
11908
11909 for scope in reversed(sorted(scopes)):
11910 - out.write(out.bold, f'{scope.desc.capitalize()} scope:')
11911 + out.write(out.bold, f"{scope.desc.capitalize()} scope:")
11912 out.write()
11913 - keywords = sorted(scopes[scope], key=attrgetter('__name__'))
11914 + keywords = sorted(scopes[scope], key=attrgetter("__name__"))
11915
11916 try:
11917 - out.first_prefix.append(' ')
11918 - out.later_prefix.append(' ')
11919 + out.first_prefix.append(" ")
11920 + out.later_prefix.append(" ")
11921 for keyword in keywords:
11922 - out.write(out.fg(keyword.color), keyword.__name__, out.reset, ':')
11923 - dump_docstring(out, keyword, prefix=' ')
11924 + out.write(out.fg(keyword.color), keyword.__name__, out.reset, ":")
11925 + dump_docstring(out, keyword, prefix=" ")
11926 finally:
11927 out.first_prefix.pop()
11928 out.later_prefix.pop()
11929 @@ -110,7 +129,7 @@ def display_keywords(out, options):
11930 @decorate_forced_wrapping()
11931 def display_checks(out, options):
11932 if options.verbosity < 1:
11933 - out.write('\n'.join(sorted(objects.CHECKS)), wrap=False)
11934 + out.write("\n".join(sorted(objects.CHECKS)), wrap=False)
11935 else:
11936 d = defaultdict(list)
11937 for x in objects.CHECKS.values():
11938 @@ -120,21 +139,21 @@ def display_checks(out, options):
11939 out.write(out.bold, f"{module_name}:")
11940 out.write()
11941 checks = d[module_name]
11942 - checks.sort(key=attrgetter('__name__'))
11943 + checks.sort(key=attrgetter("__name__"))
11944
11945 try:
11946 - out.first_prefix.append(' ')
11947 - out.later_prefix.append(' ')
11948 + out.first_prefix.append(" ")
11949 + out.later_prefix.append(" ")
11950 for check in checks:
11951 - out.write(out.fg('yellow'), check.__name__, out.reset, ':')
11952 - dump_docstring(out, check, prefix=' ')
11953 + out.write(out.fg("yellow"), check.__name__, out.reset, ":")
11954 + dump_docstring(out, check, prefix=" ")
11955
11956 # output result types that each check can generate
11957 keywords = []
11958 - for r in sorted(check.known_results, key=attrgetter('__name__')):
11959 - keywords.extend([out.fg(r.color), r.__name__, out.reset, ', '])
11960 + for r in sorted(check.known_results, key=attrgetter("__name__")):
11961 + keywords.extend([out.fg(r.color), r.__name__, out.reset, ", "])
11962 keywords.pop()
11963 - out.write(*([' (known results: '] + keywords + [')']))
11964 + out.write(*([" (known results: "] + keywords + [")"]))
11965 out.write()
11966
11967 finally:
11968 @@ -145,15 +164,15 @@ def display_checks(out, options):
11969 @decorate_forced_wrapping()
11970 def display_reporters(out, options):
11971 if options.verbosity < 1:
11972 - out.write('\n'.join(sorted(objects.REPORTERS)), wrap=False)
11973 + out.write("\n".join(sorted(objects.REPORTERS)), wrap=False)
11974 else:
11975 out.write("reporters:")
11976 out.write()
11977 - out.first_prefix.append(' ')
11978 - out.later_prefix.append(' ')
11979 - for reporter in sorted(objects.REPORTERS.values(), key=attrgetter('__name__')):
11980 - out.write(out.bold, out.fg('yellow'), reporter.__name__)
11981 - dump_docstring(out, reporter, prefix=' ')
11982 + out.first_prefix.append(" ")
11983 + out.later_prefix.append(" ")
11984 + for reporter in sorted(objects.REPORTERS.values(), key=attrgetter("__name__")):
11985 + out.write(out.bold, out.fg("yellow"), reporter.__name__)
11986 + dump_docstring(out, reporter, prefix=" ")
11987
11988
11989 @show.bind_main_func
11990 @@ -162,19 +181,19 @@ def _show(options, out, err):
11991 display_checks(out, options)
11992 elif options.scopes:
11993 if options.verbosity < 1:
11994 - out.write('\n'.join(base.scopes))
11995 + out.write("\n".join(base.scopes))
11996 else:
11997 for name, scope in base.scopes.items():
11998 - out.write(f'{name} -- {scope.desc} scope')
11999 + out.write(f"{name} -- {scope.desc} scope")
12000 elif options.reporters:
12001 display_reporters(out, options)
12002 elif options.caches:
12003 if options.verbosity < 1:
12004 - caches = sorted(map(attrgetter('type'), CachedAddon.caches.values()))
12005 - out.write('\n'.join(caches))
12006 + caches = sorted(map(attrgetter("type"), CachedAddon.caches.values()))
12007 + out.write("\n".join(caches))
12008 else:
12009 - for cache in sorted(CachedAddon.caches.values(), key=attrgetter('type')):
12010 - out.write(f'{cache.type} -- file: {cache.file}, version: {cache.version}')
12011 + for cache in sorted(CachedAddon.caches.values(), key=attrgetter("type")):
12012 + out.write(f"{cache.type} -- file: {cache.file}, version: {cache.version}")
12013 else:
12014 # default to showing keywords if no output option is selected
12015 display_keywords(out, options)
12016
12017 diff --git a/src/pkgcheck/sources.py b/src/pkgcheck/sources.py
12018 index e6230589..2d0832cd 100644
12019 --- a/src/pkgcheck/sources.py
12020 +++ b/src/pkgcheck/sources.py
12021 @@ -64,8 +64,9 @@ class LatestVersionRepoSource(RepoSource):
12022 """Repo source that returns only the latest non-VCS and VCS slots"""
12023
12024 def itermatch(self, *args, **kwargs):
12025 - for _, pkgs in groupby(super().itermatch(*args, **kwargs),
12026 - key=lambda pkg: pkg.slotted_atom):
12027 + for _, pkgs in groupby(
12028 + super().itermatch(*args, **kwargs), key=lambda pkg: pkg.slotted_atom
12029 + ):
12030 best_by_live = {pkg.live: pkg for pkg in pkgs}
12031 yield from sorted(best_by_live.values())
12032
12033 @@ -94,7 +95,7 @@ class LatestVersionsFilter:
12034 # determine the latest non-VCS and VCS pkgs for each slot
12035 while key == pkg.key:
12036 if pkg.live:
12037 - selected_pkgs[f'vcs-{pkg.slot}'] = pkg
12038 + selected_pkgs[f"vcs-{pkg.slot}"] = pkg
12039 else:
12040 selected_pkgs[pkg.slot] = pkg
12041
12042 @@ -111,7 +112,8 @@ class LatestVersionsFilter:
12043
12044 selected_pkgs = set(selected_pkgs.values())
12045 self._pkg_cache.extend(
12046 - FilteredPkg(pkg=pkg) if pkg not in selected_pkgs else pkg for pkg in pkgs)
12047 + FilteredPkg(pkg=pkg) if pkg not in selected_pkgs else pkg for pkg in pkgs
12048 + )
12049
12050 return self._pkg_cache.popleft()
12051
12052 @@ -132,7 +134,7 @@ class LatestPkgsFilter:
12053 # determine the latest non-VCS and VCS pkgs for each slot
12054 for pkg in pkgs:
12055 if pkg.live:
12056 - selected_pkgs[f'vcs-{pkg.slot}'] = pkg
12057 + selected_pkgs[f"vcs-{pkg.slot}"] = pkg
12058 else:
12059 selected_pkgs[pkg.slot] = pkg
12060
12061 @@ -166,7 +168,7 @@ class EclassRepoSource(RepoSource):
12062 def __init__(self, *args, eclass_addon, **kwargs):
12063 super().__init__(*args, **kwargs)
12064 self.eclasses = eclass_addon._eclass_repos[self.repo.location]
12065 - self.eclass_dir = pjoin(self.repo.location, 'eclass')
12066 + self.eclass_dir = pjoin(self.repo.location, "eclass")
12067
12068 def itermatch(self, restrict, **kwargs):
12069 if isinstance(restrict, str):
12070 @@ -178,12 +180,13 @@ class EclassRepoSource(RepoSource):
12071 eclasses = self.eclasses
12072
12073 for name in eclasses:
12074 - yield Eclass(name, pjoin(self.eclass_dir, f'{name}.eclass'))
12075 + yield Eclass(name, pjoin(self.eclass_dir, f"{name}.eclass"))
12076
12077
12078 @dataclass
12079 class Profile:
12080 """Generic profile object."""
12081 +
12082 node: ProfileNode
12083 files: set
12084
12085 @@ -196,8 +199,7 @@ class ProfilesRepoSource(RepoSource):
12086 def __init__(self, *args, **kwargs):
12087 super().__init__(*args, **kwargs)
12088 self.profiles_dir = self.repo.config.profiles_base
12089 - self.non_profile_dirs = {
12090 - f'profiles/{x}' for x in ProfileAddon.non_profile_dirs}
12091 + self.non_profile_dirs = {f"profiles/{x}" for x in ProfileAddon.non_profile_dirs}
12092 self._prefix_len = len(self.repo.location.rstrip(os.sep)) + 1
12093
12094 def itermatch(self, restrict, **kwargs):
12095 @@ -221,7 +223,7 @@ class ProfilesRepoSource(RepoSource):
12096 else:
12097 # matching all profiles
12098 for root, _dirs, files in os.walk(self.profiles_dir):
12099 - if root[self._prefix_len:] not in self.non_profile_dirs:
12100 + if root[self._prefix_len :] not in self.non_profile_dirs:
12101 yield Profile(ProfileNode(root), set(files))
12102
12103
12104 @@ -234,17 +236,15 @@ class _RawRepo(UnconfiguredTree):
12105 Deviates from parent in that no package name check is done.
12106 """
12107 cppath = pjoin(self.base, catpkg[0], catpkg[1])
12108 - pkg = f'{catpkg[-1]}-'
12109 + pkg = f"{catpkg[-1]}-"
12110 lp = len(pkg)
12111 extension = self.extension
12112 ext_len = -len(extension)
12113 try:
12114 - return tuple(
12115 - x[lp:ext_len] for x in listdir_files(cppath)
12116 - if x[ext_len:] == extension)
12117 + return tuple(x[lp:ext_len] for x in listdir_files(cppath) if x[ext_len:] == extension)
12118 except EnvironmentError as e:
12119 path = pjoin(self.base, os.sep.join(catpkg))
12120 - raise KeyError(f'failed fetching versions for package {path}: {e}') from e
12121 + raise KeyError(f"failed fetching versions for package {path}: {e}") from e
12122
12123
12124 class RawRepoSource(RepoSource):
12125 @@ -276,8 +276,14 @@ class UnmaskedRepoSource(RepoSource):
12126 def __init__(self, *args, **kwargs):
12127 super().__init__(*args, **kwargs)
12128 self._filtered_repo = self.options.domain.filter_repo(
12129 - self.repo, pkg_masks=(), pkg_unmasks=(), pkg_filters=(),
12130 - pkg_accept_keywords=(), pkg_keywords=(), profile=False)
12131 + self.repo,
12132 + pkg_masks=(),
12133 + pkg_unmasks=(),
12134 + pkg_filters=(),
12135 + pkg_accept_keywords=(),
12136 + pkg_keywords=(),
12137 + profile=False,
12138 + )
12139
12140 def itermatch(self, restrict, **kwargs):
12141 yield from self._filtered_repo.itermatch(restrict, **kwargs)
12142 @@ -286,7 +292,7 @@ class UnmaskedRepoSource(RepoSource):
12143 class _SourcePkg(WrappedPkg):
12144 """Package object with file contents injected as an attribute."""
12145
12146 - __slots__ = ('lines',)
12147 + __slots__ = ("lines",)
12148
12149 def __init__(self, pkg):
12150 super().__init__(pkg)
12151 @@ -323,8 +329,8 @@ class _ParsedEclass(ParseTree):
12152 super().__init__(data)
12153 self.eclass = eclass
12154
12155 - __getattr__ = klass.GetAttrProxy('eclass')
12156 - __dir__ = klass.DirProxy('eclass')
12157 + __getattr__ = klass.GetAttrProxy("eclass")
12158 + __dir__ = klass.DirProxy("eclass")
12159
12160
12161 class EclassParseRepoSource(EclassRepoSource):
12162 @@ -332,7 +338,7 @@ class EclassParseRepoSource(EclassRepoSource):
12163
12164 def itermatch(self, restrict, **kwargs):
12165 for eclass in super().itermatch(restrict, **kwargs):
12166 - with open(eclass.path, 'rb') as f:
12167 + with open(eclass.path, "rb") as f:
12168 data = f.read()
12169 yield _ParsedEclass(data, eclass=eclass)
12170
12171 @@ -364,14 +370,14 @@ class PackageRepoSource(_CombinedSource):
12172 """Ebuild repository source yielding lists of versioned packages per package."""
12173
12174 scope = base.package_scope
12175 - keyfunc = attrgetter('key')
12176 + keyfunc = attrgetter("key")
12177
12178
12179 class CategoryRepoSource(_CombinedSource):
12180 """Ebuild repository source yielding lists of versioned packages per category."""
12181
12182 scope = base.category_scope
12183 - keyfunc = attrgetter('category')
12184 + keyfunc = attrgetter("category")
12185
12186
12187 class RepositoryRepoSource(RepoSource):
12188 @@ -401,13 +407,13 @@ class _FilteredSource(RawRepoSource):
12189 class UnversionedSource(_FilteredSource):
12190 """Source yielding unversioned atoms from matching packages."""
12191
12192 - keyfunc = attrgetter('unversioned_atom')
12193 + keyfunc = attrgetter("unversioned_atom")
12194
12195
12196 class VersionedSource(_FilteredSource):
12197 """Source yielding versioned atoms from matching packages."""
12198
12199 - keyfunc = attrgetter('versioned_atom')
12200 + keyfunc = attrgetter("versioned_atom")
12201
12202
12203 def init_source(source, options, addons_map=None):
12204 @@ -417,8 +423,8 @@ def init_source(source, options, addons_map=None):
12205 cls, args, kwargs = source
12206 kwargs = dict(kwargs)
12207 # initialize wrapped source
12208 - if 'source' in kwargs:
12209 - kwargs['source'] = init_source(kwargs['source'], options, addons_map)
12210 + if "source" in kwargs:
12211 + kwargs["source"] = init_source(kwargs["source"], options, addons_map)
12212 else:
12213 cls, args = source
12214 kwargs = {}
12215
12216 diff --git a/src/pkgcheck/utils.py b/src/pkgcheck/utils.py
12217 index 22e50824..da716568 100644
12218 --- a/src/pkgcheck/utils.py
12219 +++ b/src/pkgcheck/utils.py
12220 @@ -30,7 +30,7 @@
12221 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
12222 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
12223
12224 -_control_chars = b'\n\r\t\f\b'
12225 +_control_chars = b"\n\r\t\f\b"
12226 _printable_ascii = _control_chars + bytes(range(32, 127))
12227 _printable_high_ascii = bytes(range(127, 256))
12228
12229 @@ -51,7 +51,7 @@ def is_binary(path, blocksize=1024):
12230 :returns: True if appears to be a binary, otherwise False.
12231 """
12232 try:
12233 - with open(path, 'rb') as f:
12234 + with open(path, "rb") as f:
12235 byte_str = f.read(blocksize)
12236 except IOError:
12237 return False
12238 @@ -75,9 +75,8 @@ def is_binary(path, blocksize=1024):
12239 high_chars = byte_str.translate(None, _printable_high_ascii)
12240 nontext_ratio2 = len(high_chars) / len(byte_str)
12241
12242 - is_likely_binary = (
12243 - (nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or
12244 - (nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8)
12245 + is_likely_binary = (nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or (
12246 + nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8
12247 )
12248
12249 decodable = False
12250 @@ -91,9 +90,9 @@ def is_binary(path, blocksize=1024):
12251
12252 # guess character encoding using chardet
12253 detected_encoding = chardet.detect(byte_str)
12254 - if detected_encoding['confidence'] > 0.8:
12255 + if detected_encoding["confidence"] > 0.8:
12256 try:
12257 - byte_str.decode(encoding=detected_encoding['encoding'])
12258 + byte_str.decode(encoding=detected_encoding["encoding"])
12259 decodable = True
12260 except (UnicodeDecodeError, LookupError):
12261 pass
12262 @@ -101,6 +100,6 @@ def is_binary(path, blocksize=1024):
12263 # finally use all the checks to decide binary or text
12264 if decodable:
12265 return False
12266 - if is_likely_binary or b'\x00' in byte_str:
12267 + if is_likely_binary or b"\x00" in byte_str:
12268 return True
12269 return False
12270
12271 diff --git a/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py b/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
12272 index 6c48dfab..8c23ea65 100644
12273 --- a/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
12274 +++ b/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
12275 @@ -4,7 +4,7 @@ from requests.models import Response
12276
12277 r = Response()
12278 r.status_code = 404
12279 -r.reason = 'Not Found'
12280 -r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
12281 +r.reason = "Not Found"
12282 +r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
12283 r.raw = io.StringIO()
12284 responses = [r]
12285
12286 diff --git a/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
12287 index 68b0e2de..215aadfa 100644
12288 --- a/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
12289 +++ b/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
12290 @@ -5,14 +5,14 @@ from requests.models import Response
12291 # initial URL check
12292 r = Response()
12293 r.status_code = 200
12294 -r.reason = 'OK'
12295 -r.url = 'http://github.com/pkgcore/pkgcheck/foo.tar.gz'
12296 +r.reason = "OK"
12297 +r.url = "http://github.com/pkgcore/pkgcheck/foo.tar.gz"
12298 r.raw = io.StringIO()
12299 # now checking if https:// exists
12300 https_r = Response()
12301 https_r.status_code = 200
12302 -https_r.reason = 'OK'
12303 -https_r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
12304 +https_r.reason = "OK"
12305 +https_r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
12306 https_r.raw = io.StringIO()
12307
12308 responses = [r, https_r]
12309
12310 diff --git a/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
12311 index b582b6db..14333d48 100644
12312 --- a/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
12313 +++ b/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
12314 @@ -4,15 +4,15 @@ from requests.models import Response
12315
12316 r_hist = Response()
12317 r_hist.status_code = 301
12318 -r_hist.reason = 'Moved Permanently'
12319 -r_hist.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
12320 -r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck/foo-moved.tar.gz'}
12321 +r_hist.reason = "Moved Permanently"
12322 +r_hist.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
12323 +r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck/foo-moved.tar.gz"}
12324 r_hist.raw = io.StringIO()
12325
12326 r = Response()
12327 r.status_code = 200
12328 -r.reason = 'OK'
12329 -r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
12330 +r.reason = "OK"
12331 +r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
12332 r.history = [r_hist]
12333 r.raw = io.StringIO()
12334
12335
12336 diff --git a/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
12337 index 95ed6778..b9d30062 100644
12338 --- a/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
12339 +++ b/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
12340 @@ -1,3 +1,3 @@
12341 from requests.exceptions import SSLError
12342
12343 -responses = [SSLError('Certificate verification failed')]
12344 +responses = [SSLError("Certificate verification failed")]
12345
12346 diff --git a/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py b/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
12347 index ad3fd857..0a78a313 100644
12348 --- a/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
12349 +++ b/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
12350 @@ -1,3 +1,3 @@
12351 from requests.exceptions import ConnectionError
12352
12353 -responses = [ConnectionError('connection failed')]
12354 +responses = [ConnectionError("connection failed")]
12355
12356 diff --git a/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py b/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
12357 index e490c7ed..31ad363c 100644
12358 --- a/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
12359 +++ b/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
12360 @@ -4,7 +4,7 @@ from requests.models import Response
12361
12362 r = Response()
12363 r.status_code = 404
12364 -r.reason = 'Not Found'
12365 -r.url = 'https://github.com/pkgcore/pkgcheck'
12366 +r.reason = "Not Found"
12367 +r.url = "https://github.com/pkgcore/pkgcheck"
12368 r.raw = io.StringIO()
12369 responses = [r]
12370
12371 diff --git a/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
12372 index 2f6f43d4..a43f15d4 100644
12373 --- a/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
12374 +++ b/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
12375 @@ -5,14 +5,14 @@ from requests.models import Response
12376 # initial URL check
12377 r = Response()
12378 r.status_code = 200
12379 -r.reason = 'OK'
12380 -r.url = 'http://github.com/pkgcore/pkgcheck'
12381 +r.reason = "OK"
12382 +r.url = "http://github.com/pkgcore/pkgcheck"
12383 r.raw = io.StringIO()
12384 # now checking if https:// exists
12385 https_r = Response()
12386 https_r.status_code = 200
12387 -https_r.reason = 'OK'
12388 -https_r.url = 'https://github.com/pkgcore/pkgcheck'
12389 +https_r.reason = "OK"
12390 +https_r.url = "https://github.com/pkgcore/pkgcheck"
12391 https_r.raw = io.StringIO()
12392
12393 responses = [r, https_r]
12394
12395 diff --git a/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
12396 index 71360581..384a2466 100644
12397 --- a/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
12398 +++ b/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
12399 @@ -4,15 +4,15 @@ from requests.models import Response
12400
12401 r_hist = Response()
12402 r_hist.status_code = 301
12403 -r_hist.reason = 'Moved Permanently'
12404 -r_hist.url = 'https://github.com/pkgcore/pkgcheck'
12405 -r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck-moved'}
12406 +r_hist.reason = "Moved Permanently"
12407 +r_hist.url = "https://github.com/pkgcore/pkgcheck"
12408 +r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck-moved"}
12409 r_hist.raw = io.StringIO()
12410
12411 r = Response()
12412 r.status_code = 200
12413 -r.reason = 'OK'
12414 -r.url = 'https://github.com/pkgcore/pkgcheck'
12415 +r.reason = "OK"
12416 +r.url = "https://github.com/pkgcore/pkgcheck"
12417 r.raw = io.StringIO()
12418 r.history = [r_hist]
12419
12420
12421 diff --git a/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
12422 index 95ed6778..b9d30062 100644
12423 --- a/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
12424 +++ b/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
12425 @@ -1,3 +1,3 @@
12426 from requests.exceptions import SSLError
12427
12428 -responses = [SSLError('Certificate verification failed')]
12429 +responses = [SSLError("Certificate verification failed")]
12430
12431 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
12432 index ba4f7cd1..35f8f6bd 100644
12433 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
12434 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
12435 @@ -4,7 +4,7 @@ from requests.models import Response
12436
12437 r = Response()
12438 r.status_code = 404
12439 -r.reason = 'Not Found'
12440 -r.url = 'https://bitbucket.org/pkgcore/pkgcheck'
12441 +r.reason = "Not Found"
12442 +r.url = "https://bitbucket.org/pkgcore/pkgcheck"
12443 r.raw = io.StringIO()
12444 responses = [r]
12445
12446 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
12447 index 02e637f4..84c21ff3 100644
12448 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
12449 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
12450 @@ -4,7 +4,7 @@ from requests.models import Response
12451
12452 r = Response()
12453 r.status_code = 404
12454 -r.reason = 'Not Found'
12455 -r.url = 'https://metacpan.org/dist/PkgCore-PkgCheck'
12456 +r.reason = "Not Found"
12457 +r.url = "https://metacpan.org/dist/PkgCore-PkgCheck"
12458 r.raw = io.StringIO()
12459 responses = [r]
12460
12461 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
12462 index 7e6bef3e..63ee8e0e 100644
12463 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
12464 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
12465 @@ -4,7 +4,7 @@ from requests.models import Response
12466
12467 r = Response()
12468 r.status_code = 404
12469 -r.reason = 'Not Found'
12470 -r.url = 'https://cran.r-project.org/web/packages/PkgCheck/'
12471 +r.reason = "Not Found"
12472 +r.url = "https://cran.r-project.org/web/packages/PkgCheck/"
12473 r.raw = io.StringIO()
12474 responses = [r]
12475
12476 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
12477 index ff9b152e..6297edda 100644
12478 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
12479 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
12480 @@ -4,7 +4,7 @@ from requests.models import Response
12481
12482 r = Response()
12483 r.status_code = 404
12484 -r.reason = 'Not Found'
12485 -r.url = 'https://ctan.org/pkg/pkgcheck'
12486 +r.reason = "Not Found"
12487 +r.url = "https://ctan.org/pkg/pkgcheck"
12488 r.raw = io.StringIO()
12489 responses = [r]
12490
12491 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
12492 index d32f1ee0..9193e4d7 100644
12493 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
12494 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
12495 @@ -4,7 +4,7 @@ from requests.models import Response
12496
12497 r = Response()
12498 r.status_code = 404
12499 -r.reason = 'Not Found'
12500 -r.url = 'https://gitlab.freedesktop.org/pkgcore/pkgcheck.git/'
12501 +r.reason = "Not Found"
12502 +r.url = "https://gitlab.freedesktop.org/pkgcore/pkgcheck.git/"
12503 r.raw = io.StringIO()
12504 responses = [r]
12505
12506 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
12507 index 61271075..d9c007a0 100644
12508 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
12509 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
12510 @@ -4,7 +4,7 @@ from requests.models import Response
12511
12512 r = Response()
12513 r.status_code = 404
12514 -r.reason = 'Not Found'
12515 -r.url = 'https://gitweb.gentoo.org/proj/pkgcheck.git/'
12516 +r.reason = "Not Found"
12517 +r.url = "https://gitweb.gentoo.org/proj/pkgcheck.git/"
12518 r.raw = io.StringIO()
12519 responses = [r]
12520
12521 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
12522 index 694e4834..416016ed 100644
12523 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
12524 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
12525 @@ -4,7 +4,7 @@ from requests.models import Response
12526
12527 r = Response()
12528 r.status_code = 404
12529 -r.reason = 'Not Found'
12530 -r.url = 'https://gitlab.gnome.org/pkgcore/pkgcheck.git/'
12531 +r.reason = "Not Found"
12532 +r.url = "https://gitlab.gnome.org/pkgcore/pkgcheck.git/"
12533 r.raw = io.StringIO()
12534 responses = [r]
12535
12536 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
12537 index 34770180..e2627295 100644
12538 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
12539 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
12540 @@ -4,7 +4,7 @@ from requests.models import Response
12541
12542 r = Response()
12543 r.status_code = 404
12544 -r.reason = 'Not Found'
12545 -r.url = 'https://hackage.haskell.org/package/pkgcheck'
12546 +r.reason = "Not Found"
12547 +r.url = "https://hackage.haskell.org/package/pkgcheck"
12548 r.raw = io.StringIO()
12549 responses = [r]
12550
12551 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
12552 index 92455e24..e4327013 100644
12553 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
12554 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
12555 @@ -4,7 +4,7 @@ from requests.models import Response
12556
12557 r = Response()
12558 r.status_code = 404
12559 -r.reason = 'Not Found'
12560 -r.url = 'https://launchpad.net/pkgcheck'
12561 +r.reason = "Not Found"
12562 +r.url = "https://launchpad.net/pkgcheck"
12563 r.raw = io.StringIO()
12564 responses = [r]
12565
12566 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
12567 index 02bcf124..f5be015d 100644
12568 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
12569 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
12570 @@ -4,7 +4,7 @@ from requests.models import Response
12571
12572 r = Response()
12573 r.status_code = 404
12574 -r.reason = 'Not Found'
12575 -r.url = 'https://osdn.net/projects/pkgcore/pkgcheck/'
12576 +r.reason = "Not Found"
12577 +r.url = "https://osdn.net/projects/pkgcore/pkgcheck/"
12578 r.raw = io.StringIO()
12579 responses = [r]
12580
12581 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
12582 index f9585adf..fca18be6 100644
12583 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
12584 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
12585 @@ -4,7 +4,7 @@ from requests.models import Response
12586
12587 r = Response()
12588 r.status_code = 404
12589 -r.reason = 'Not Found'
12590 -r.url = 'https://pecl.php.net/package/pkgcheck'
12591 +r.reason = "Not Found"
12592 +r.url = "https://pecl.php.net/package/pkgcheck"
12593 r.raw = io.StringIO()
12594 responses = [r]
12595
12596 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
12597 index 1a13d51b..3a164368 100644
12598 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
12599 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
12600 @@ -4,7 +4,7 @@ from requests.models import Response
12601
12602 r = Response()
12603 r.status_code = 404
12604 -r.reason = 'Not Found'
12605 -r.url = 'https://pypi.org/project/pkgcheck/'
12606 +r.reason = "Not Found"
12607 +r.url = "https://pypi.org/project/pkgcheck/"
12608 r.raw = io.StringIO()
12609 responses = [r]
12610
12611 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
12612 index dd70699f..473bd566 100644
12613 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
12614 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
12615 @@ -4,7 +4,7 @@ from requests.models import Response
12616
12617 r = Response()
12618 r.status_code = 404
12619 -r.reason = 'Not Found'
12620 -r.url = 'https://rubygems.org/gems/pkgcheck'
12621 +r.reason = "Not Found"
12622 +r.url = "https://rubygems.org/gems/pkgcheck"
12623 r.raw = io.StringIO()
12624 responses = [r]
12625
12626 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
12627 index d5aeb788..f1776c9c 100644
12628 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
12629 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
12630 @@ -4,7 +4,7 @@ from requests.models import Response
12631
12632 r = Response()
12633 r.status_code = 404
12634 -r.reason = 'Not Found'
12635 -r.url = 'https://savannah.nongnu.org/projects/pkgcheck'
12636 +r.reason = "Not Found"
12637 +r.url = "https://savannah.nongnu.org/projects/pkgcheck"
12638 r.raw = io.StringIO()
12639 responses = [r]
12640
12641 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
12642 index fb20f23f..eb9a56d8 100644
12643 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
12644 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
12645 @@ -4,7 +4,7 @@ from requests.models import Response
12646
12647 r = Response()
12648 r.status_code = 404
12649 -r.reason = 'Not Found'
12650 -r.url = 'https://savannah.gnu.org/projects/pkgcheck'
12651 +r.reason = "Not Found"
12652 +r.url = "https://savannah.gnu.org/projects/pkgcheck"
12653 r.raw = io.StringIO()
12654 responses = [r]
12655
12656 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
12657 index 53fe194e..719a5958 100644
12658 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
12659 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
12660 @@ -4,7 +4,7 @@ from requests.models import Response
12661
12662 r = Response()
12663 r.status_code = 404
12664 -r.reason = 'Not Found'
12665 -r.url = 'https://sourceforge.net/projects/pkgcheck/'
12666 +r.reason = "Not Found"
12667 +r.url = "https://sourceforge.net/projects/pkgcheck/"
12668 r.raw = io.StringIO()
12669 responses = [r]
12670
12671 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
12672 index a4bd454a..e79f9625 100644
12673 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
12674 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
12675 @@ -4,7 +4,7 @@ from requests.models import Response
12676
12677 r = Response()
12678 r.status_code = 404
12679 -r.reason = 'Not Found'
12680 -r.url = 'https://sr.ht/~pkgcore/pkgcheck/'
12681 +r.reason = "Not Found"
12682 +r.url = "https://sr.ht/~pkgcore/pkgcheck/"
12683 r.raw = io.StringIO()
12684 responses = [r]
12685
12686 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
12687 index 9e184839..8ecfaf6d 100644
12688 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
12689 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
12690 @@ -4,7 +4,7 @@ from requests.models import Response
12691
12692 r = Response()
12693 r.status_code = 404
12694 -r.reason = 'Not Found'
12695 -r.url = 'https://vim.org/scripts/script.php?script_id=12345'
12696 +r.reason = "Not Found"
12697 +r.url = "https://vim.org/scripts/script.php?script_id=12345"
12698 r.raw = io.StringIO()
12699 responses = [r]
12700
12701 diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
12702 index e490c7ed..31ad363c 100644
12703 --- a/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
12704 +++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
12705 @@ -4,7 +4,7 @@ from requests.models import Response
12706
12707 r = Response()
12708 r.status_code = 404
12709 -r.reason = 'Not Found'
12710 -r.url = 'https://github.com/pkgcore/pkgcheck'
12711 +r.reason = "Not Found"
12712 +r.url = "https://github.com/pkgcore/pkgcheck"
12713 r.raw = io.StringIO()
12714 responses = [r]
12715
12716 diff --git a/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
12717 index dacb475f..2c079574 100644
12718 --- a/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
12719 +++ b/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
12720 @@ -5,14 +5,14 @@ from requests.models import Response
12721 # initial URL check
12722 r = Response()
12723 r.status_code = 200
12724 -r.reason = 'OK'
12725 -r.url = 'http://github.com/pkgcore/pkgcheck/issues'
12726 +r.reason = "OK"
12727 +r.url = "http://github.com/pkgcore/pkgcheck/issues"
12728 r.raw = io.StringIO()
12729 # now checking if https:// exists
12730 https_r = Response()
12731 https_r.status_code = 200
12732 -https_r.reason = 'OK'
12733 -https_r.url = 'https://github.com/pkgcore/pkgcheck/issues'
12734 +https_r.reason = "OK"
12735 +https_r.url = "https://github.com/pkgcore/pkgcheck/issues"
12736 https_r.raw = io.StringIO()
12737
12738 responses = [r, https_r]
12739
12740 diff --git a/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
12741 index 7567a614..39e182b7 100644
12742 --- a/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
12743 +++ b/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
12744 @@ -1,17 +1,18 @@
12745 import io
12746
12747 from requests.models import Response
12748 +
12749 r_hist = Response()
12750 r_hist.status_code = 301
12751 -r_hist.reason = 'Moved Permanently'
12752 -r_hist.url = 'https://github.com/pkgcore/pkgcheck'
12753 -r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck/'}
12754 +r_hist.reason = "Moved Permanently"
12755 +r_hist.url = "https://github.com/pkgcore/pkgcheck"
12756 +r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck/"}
12757 r_hist.raw = io.StringIO()
12758
12759 r = Response()
12760 r.status_code = 301
12761 -r.reason = 'OK'
12762 -r.url = 'https://github.com/pkgcore/pkgcheck'
12763 +r.reason = "OK"
12764 +r.url = "https://github.com/pkgcore/pkgcheck"
12765 r.raw = io.StringIO()
12766 r.history = [r_hist]
12767
12768
12769 diff --git a/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
12770 index 95ed6778..b9d30062 100644
12771 --- a/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
12772 +++ b/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
12773 @@ -1,3 +1,3 @@
12774 from requests.exceptions import SSLError
12775
12776 -responses = [SSLError('Certificate verification failed')]
12777 +responses = [SSLError("Certificate verification failed")]
12778
12779 diff --git a/tests/addons/test_addons.py b/tests/addons/test_addons.py
12780 index 32226927..87c59359 100644
12781 --- a/tests/addons/test_addons.py
12782 +++ b/tests/addons/test_addons.py
12783 @@ -11,70 +11,68 @@ from ..misc import FakePkg, FakeProfile, Profile
12784
12785
12786 class TestArchesAddon:
12787 -
12788 @pytest.fixture(autouse=True)
12789 def _setup(self, tool, repo):
12790 self.tool = tool
12791 self.repo = repo
12792 - self.args = ['scan', '--repo', repo.location]
12793 + self.args = ["scan", "--repo", repo.location]
12794
12795 def test_empty_default(self):
12796 options, _ = self.tool.parse_args(self.args)
12797 assert options.arches == frozenset()
12798
12799 def test_repo_default(self):
12800 - with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
12801 + with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
12802 f.write("arm64\namd64\n")
12803 options, _ = self.tool.parse_args(self.args)
12804 - assert options.arches == frozenset(['amd64', 'arm64'])
12805 + assert options.arches == frozenset(["amd64", "arm64"])
12806
12807 def test_enabled(self):
12808 data = (
12809 - ('x86', ['x86']),
12810 - ('ppc', ['ppc']),
12811 - ('x86,ppc', ['ppc', 'x86']),
12812 + ("x86", ["x86"]),
12813 + ("ppc", ["ppc"]),
12814 + ("x86,ppc", ["ppc", "x86"]),
12815 )
12816 for arg, expected in data:
12817 - for opt in ('-a', '--arches'):
12818 - options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
12819 + for opt in ("-a", "--arches"):
12820 + options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
12821 assert options.arches == frozenset(expected)
12822
12823 def test_disabled(self):
12824 # set repo defaults
12825 - with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
12826 + with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
12827 f.write("arm64\namd64\narm64-linux\n")
12828
12829 data = (
12830 - ('-x86', ['amd64', 'arm64']),
12831 - ('-x86,-amd64', ['arm64']),
12832 + ("-x86", ["amd64", "arm64"]),
12833 + ("-x86,-amd64", ["arm64"]),
12834 )
12835 for arg, expected in data:
12836 - for opt in ('-a', '--arches'):
12837 - options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
12838 + for opt in ("-a", "--arches"):
12839 + options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
12840 assert options.arches == frozenset(expected)
12841
12842 def test_unknown(self, capsys):
12843 # unknown arch checking requires repo defaults
12844 - with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
12845 + with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
12846 f.write("arm64\namd64\narm64-linux\n")
12847
12848 - for arg in ('foo', 'bar'):
12849 - for opt in ('-a', '--arches'):
12850 + for arg in ("foo", "bar"):
12851 + for opt in ("-a", "--arches"):
12852 with pytest.raises(SystemExit) as excinfo:
12853 - self.tool.parse_args(self.args + [f'{opt}={arg}'])
12854 + self.tool.parse_args(self.args + [f"{opt}={arg}"])
12855 assert excinfo.value.code == 2
12856 out, err = capsys.readouterr()
12857 assert not out
12858 - assert f'unknown arch: {arg}' in err
12859 + assert f"unknown arch: {arg}" in err
12860
12861
12862 class TestStableArchesAddon:
12863 -
12864 @pytest.fixture(autouse=True)
12865 def _setup(self, tool, repo):
12866 self.tool = tool
12867 self.repo = repo
12868 - self.args = ['scan', '--repo', repo.location]
12869 + self.args = ["scan", "--repo", repo.location]
12870
12871 def test_empty_default(self):
12872 options, _ = self.tool.parse_args(self.args)
12873 @@ -82,40 +80,56 @@ class TestStableArchesAddon:
12874
12875 def test_repo_arches_default(self):
12876 """Use GLEP 72 arches.desc file if it exists."""
12877 - with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
12878 + with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
12879 f.write("arm64\namd64\nriscv\n")
12880 - with open(pjoin(self.repo.location, 'profiles', 'arches.desc'), 'w') as f:
12881 + with open(pjoin(self.repo.location, "profiles", "arches.desc"), "w") as f:
12882 f.write("arm64 stable\namd64 stable\nriscv testing")
12883 options, _ = self.tool.parse_args(self.args)
12884 - assert options.stable_arches == {'amd64', 'arm64'}
12885 + assert options.stable_arches == {"amd64", "arm64"}
12886
12887 def test_repo_profiles_default(self):
12888 """Otherwise arch stability is determined from the profiles.desc file."""
12889 - with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
12890 + with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
12891 f.write("arm64\namd64\nriscv\n")
12892 - os.mkdir(pjoin(self.repo.location, 'profiles', 'default'))
12893 - with open(pjoin(self.repo.location, 'profiles', 'profiles.desc'), 'w') as f:
12894 + os.mkdir(pjoin(self.repo.location, "profiles", "default"))
12895 + with open(pjoin(self.repo.location, "profiles", "profiles.desc"), "w") as f:
12896 f.write("arm64 default dev\namd64 default stable\nriscv default exp")
12897 options, _ = self.tool.parse_args(self.args)
12898 - assert options.stable_arches == {'amd64'}
12899 + assert options.stable_arches == {"amd64"}
12900
12901 def test_selected_arches(self):
12902 - for opt in ('-a', '--arches'):
12903 - options, _ = self.tool.parse_args(self.args + [f'{opt}=amd64'])
12904 - assert options.stable_arches == {'amd64'}
12905 + for opt in ("-a", "--arches"):
12906 + options, _ = self.tool.parse_args(self.args + [f"{opt}=amd64"])
12907 + assert options.stable_arches == {"amd64"}
12908
12909
12910 class Test_profile_data:
12911 -
12912 - def assertResults(self, profile, known_flags, required_immutable,
12913 - required_forced, cpv="dev-util/diffball-0.1",
12914 - key_override=None, data_override=None):
12915 + def assertResults(
12916 + self,
12917 + profile,
12918 + known_flags,
12919 + required_immutable,
12920 + required_forced,
12921 + cpv="dev-util/diffball-0.1",
12922 + key_override=None,
12923 + data_override=None,
12924 + ):
12925 profile_data = addons.profiles.ProfileData(
12926 - "test-repo", "test-profile", key_override,
12927 + "test-repo",
12928 + "test-profile",
12929 + key_override,
12930 profile.provides_repo,
12931 - packages.AlwaysFalse, profile.iuse_effective,
12932 - profile.use, profile.pkg_use, profile.masked_use, profile.forced_use, {}, set(),
12933 - 'stable', False)
12934 + packages.AlwaysFalse,
12935 + profile.iuse_effective,
12936 + profile.use,
12937 + profile.pkg_use,
12938 + profile.masked_use,
12939 + profile.forced_use,
12940 + {},
12941 + set(),
12942 + "stable",
12943 + False,
12944 + )
12945 pkg = FakePkg(cpv, data=data_override)
12946 immutable, enabled = profile_data.identify_use(pkg, set(known_flags))
12947 assert immutable == set(required_immutable)
12948 @@ -140,15 +154,15 @@ class Test_profile_data:
12949 self.assertResults(profile, ["lib", "bar"], ["lib"], ["lib"])
12950
12951 profile = FakeProfile(
12952 - forced_use={"dev-util/diffball": ["lib"]},
12953 - masked_use={"dev-util/diffball": ["lib"]})
12954 + forced_use={"dev-util/diffball": ["lib"]}, masked_use={"dev-util/diffball": ["lib"]}
12955 + )
12956 self.assertResults(profile, [], [], [])
12957 # check that masked use wins out over forced.
12958 self.assertResults(profile, ["lib", "bar"], ["lib"], [])
12959
12960 profile = FakeProfile(
12961 - forced_use={"dev-util/diffball": ["lib"]},
12962 - masked_use={"dev-util/diffball": ["lib"]})
12963 + forced_use={"dev-util/diffball": ["lib"]}, masked_use={"dev-util/diffball": ["lib"]}
12964 + )
12965 self.assertResults(profile, [], [], [])
12966 # check that masked use wins out over forced.
12967 self.assertResults(profile, ["lib", "bar"], ["lib"], [])
12968 @@ -162,7 +176,7 @@ class TestProfileAddon:
12969 def _setup(self, tool, repo, tmp_path):
12970 self.tool = tool
12971 self.repo = repo
12972 - self.args = ['scan', '--cache-dir', str(tmp_path), '--repo', repo.location]
12973 + self.args = ["scan", "--cache-dir", str(tmp_path), "--repo", repo.location]
12974
12975 def assertProfiles(self, addon, key, *profile_names):
12976 actual = sorted(x.name for y in addon.profile_evaluate_dict[key] for x in y)
12977 @@ -171,34 +185,34 @@ class TestProfileAddon:
12978
12979 def test_defaults(self):
12980 profiles = [
12981 - Profile('profile1', 'x86'),
12982 - Profile('profile1/2', 'x86'),
12983 + Profile("profile1", "x86"),
12984 + Profile("profile1/2", "x86"),
12985 ]
12986 self.repo.create_profiles(profiles)
12987 - self.repo.arches.add('x86')
12988 + self.repo.arches.add("x86")
12989 options, _ = self.tool.parse_args(self.args)
12990 addon = addons.init_addon(self.addon_kls, options)
12991 - assert sorted(addon.profile_evaluate_dict) == ['x86', '~x86']
12992 - self.assertProfiles(addon, 'x86', 'profile1', 'profile1/2')
12993 + assert sorted(addon.profile_evaluate_dict) == ["x86", "~x86"]
12994 + self.assertProfiles(addon, "x86", "profile1", "profile1/2")
12995
12996 def test_profiles_base(self):
12997 profiles = [
12998 - Profile('default-linux/dep', 'x86', deprecated=True),
12999 - Profile('default-linux', 'x86', 'dev'),
13000 - Profile('default-linux/x86', 'x86'),
13001 + Profile("default-linux/dep", "x86", deprecated=True),
13002 + Profile("default-linux", "x86", "dev"),
13003 + Profile("default-linux/x86", "x86"),
13004 ]
13005 self.repo.create_profiles(profiles)
13006 - self.repo.arches.add('x86')
13007 + self.repo.arches.add("x86")
13008 options, _ = self.tool.parse_args(self.args)
13009 addon = addons.init_addon(self.addon_kls, options)
13010 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/x86')
13011 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/x86")
13012
13013 def test_nonexistent(self, capsys):
13014 - profile = Profile('x86', 'x86')
13015 + profile = Profile("x86", "x86")
13016 self.repo.create_profiles([profile])
13017 - for profiles in ('bar', '-bar', 'x86,bar', 'bar,x86', 'x86,-bar'):
13018 + for profiles in ("bar", "-bar", "x86,bar", "bar,x86", "x86,-bar"):
13019 with pytest.raises(SystemExit) as excinfo:
13020 - self.tool.parse_args(self.args + [f'--profiles={profiles}'])
13021 + self.tool.parse_args(self.args + [f"--profiles={profiles}"])
13022 assert excinfo.value.code == 2
13023 out, err = capsys.readouterr()
13024 assert not out
13025 @@ -206,149 +220,150 @@ class TestProfileAddon:
13026
13027 def test_profiles_args(self):
13028 profiles = [
13029 - Profile('default-linux/dep', 'x86', deprecated=True),
13030 - Profile('default-linux/dev', 'x86', 'dev'),
13031 - Profile('default-linux/exp', 'x86', 'exp'),
13032 - Profile('default-linux', 'x86'),
13033 + Profile("default-linux/dep", "x86", deprecated=True),
13034 + Profile("default-linux/dev", "x86", "dev"),
13035 + Profile("default-linux/exp", "x86", "exp"),
13036 + Profile("default-linux", "x86"),
13037 ]
13038 self.repo.create_profiles(profiles)
13039 - self.repo.arches.add('x86')
13040 + self.repo.arches.add("x86")
13041
13042 # enable stable
13043 - options, _ = self.tool.parse_args(self.args + ['--profiles=stable'])
13044 + options, _ = self.tool.parse_args(self.args + ["--profiles=stable"])
13045 addon = addons.init_addon(self.addon_kls, options)
13046 - self.assertProfiles(addon, 'x86', 'default-linux')
13047 + self.assertProfiles(addon, "x86", "default-linux")
13048
13049 # disable stable
13050 - options, _ = self.tool.parse_args(self.args + ['--profiles=-stable'])
13051 + options, _ = self.tool.parse_args(self.args + ["--profiles=-stable"])
13052 addon = addons.init_addon(self.addon_kls, options)
13053 - self.assertProfiles(addon, 'x86', 'default-linux/dev', 'default-linux/exp')
13054 + self.assertProfiles(addon, "x86", "default-linux/dev", "default-linux/exp")
13055
13056 # enable dev
13057 - options, _ = self.tool.parse_args(self.args + ['--profiles=dev'])
13058 + options, _ = self.tool.parse_args(self.args + ["--profiles=dev"])
13059 addon = addons.init_addon(self.addon_kls, options)
13060 - self.assertProfiles(addon, 'x86', 'default-linux/dev')
13061 + self.assertProfiles(addon, "x86", "default-linux/dev")
13062
13063 # disable dev
13064 - options, _ = self.tool.parse_args(self.args + ['--profiles=-dev'])
13065 + options, _ = self.tool.parse_args(self.args + ["--profiles=-dev"])
13066 addon = addons.init_addon(self.addon_kls, options)
13067 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/exp')
13068 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/exp")
13069
13070 # enable exp
13071 - options, _ = self.tool.parse_args(self.args + ['--profiles=exp'])
13072 + options, _ = self.tool.parse_args(self.args + ["--profiles=exp"])
13073 addon = addons.init_addon(self.addon_kls, options)
13074 - self.assertProfiles(addon, 'x86', 'default-linux/exp')
13075 + self.assertProfiles(addon, "x86", "default-linux/exp")
13076
13077 # disable exp
13078 - options, _ = self.tool.parse_args(self.args + ['--profiles=-exp'])
13079 + options, _ = self.tool.parse_args(self.args + ["--profiles=-exp"])
13080 addon = addons.init_addon(self.addon_kls, options)
13081 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev')
13082 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev")
13083
13084 # enable deprecated
13085 - options, _ = self.tool.parse_args(self.args + ['--profiles=deprecated'])
13086 + options, _ = self.tool.parse_args(self.args + ["--profiles=deprecated"])
13087 addon = addons.init_addon(self.addon_kls, options)
13088 - self.assertProfiles(addon, 'x86', 'default-linux/dep')
13089 + self.assertProfiles(addon, "x86", "default-linux/dep")
13090
13091 # disable deprecated
13092 - options, _ = self.tool.parse_args(self.args + ['--profiles=-deprecated'])
13093 + options, _ = self.tool.parse_args(self.args + ["--profiles=-deprecated"])
13094 addon = addons.init_addon(self.addon_kls, options)
13095 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev', 'default-linux/exp')
13096 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev", "default-linux/exp")
13097
13098 # enable specific profile
13099 - options, _ = self.tool.parse_args(self.args + ['--profiles', 'default-linux/exp'])
13100 + options, _ = self.tool.parse_args(self.args + ["--profiles", "default-linux/exp"])
13101 addon = addons.init_addon(self.addon_kls, options)
13102 - self.assertProfiles(addon, 'x86', 'default-linux/exp')
13103 + self.assertProfiles(addon, "x86", "default-linux/exp")
13104
13105 # disable specific profile
13106 - options, _ = self.tool.parse_args(self.args + ['--profiles=-default-linux'])
13107 + options, _ = self.tool.parse_args(self.args + ["--profiles=-default-linux"])
13108 addon = addons.init_addon(self.addon_kls, options)
13109 - self.assertProfiles(addon, 'x86', 'default-linux/dev', 'default-linux/exp')
13110 + self.assertProfiles(addon, "x86", "default-linux/dev", "default-linux/exp")
13111
13112 def test_auto_enable_exp_profiles(self):
13113 profiles = [
13114 - Profile('default-linux/dep', 'x86', deprecated=True),
13115 - Profile('default-linux/dev', 'x86', 'dev'),
13116 - Profile('default-linux/exp', 'x86', 'exp'),
13117 - Profile('default-linux/amd64', 'amd64', 'exp'),
13118 - Profile('default-linux', 'x86'),
13119 + Profile("default-linux/dep", "x86", deprecated=True),
13120 + Profile("default-linux/dev", "x86", "dev"),
13121 + Profile("default-linux/exp", "x86", "exp"),
13122 + Profile("default-linux/amd64", "amd64", "exp"),
13123 + Profile("default-linux", "x86"),
13124 ]
13125 self.repo.create_profiles(profiles)
13126 - self.repo.arches.update(['amd64', 'x86'])
13127 + self.repo.arches.update(["amd64", "x86"])
13128
13129 # experimental profiles aren't enabled by default
13130 options, _ = self.tool.parse_args(self.args)
13131 addon = addons.init_addon(self.addon_kls, options)
13132 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev')
13133 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev")
13134
13135 # but are auto-enabled when an arch with only exp profiles is selected
13136 - options, _ = self.tool.parse_args(self.args + ['-a', 'amd64'])
13137 + options, _ = self.tool.parse_args(self.args + ["-a", "amd64"])
13138 addon = addons.init_addon(self.addon_kls, options)
13139 - self.assertProfiles(addon, 'amd64', 'default-linux/amd64')
13140 + self.assertProfiles(addon, "amd64", "default-linux/amd64")
13141
13142 # or a result keyword is selected that requires them
13143 - options, _ = self.tool.parse_args(self.args + ['-k', 'NonsolvableDepsInExp'])
13144 + options, _ = self.tool.parse_args(self.args + ["-k", "NonsolvableDepsInExp"])
13145 addon = addons.init_addon(self.addon_kls, options)
13146 - self.assertProfiles(addon, 'amd64', 'default-linux/amd64')
13147 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev', 'default-linux/exp')
13148 + self.assertProfiles(addon, "amd64", "default-linux/amd64")
13149 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev", "default-linux/exp")
13150
13151 def test_addon_dict(self):
13152 """ProfileAddon has methods that allow it to act like a dict of profile filters."""
13153 profiles = [
13154 - Profile('linux/x86', 'x86'),
13155 - Profile('linux/ppc', 'ppc'),
13156 + Profile("linux/x86", "x86"),
13157 + Profile("linux/ppc", "ppc"),
13158 ]
13159 self.repo.create_profiles(profiles)
13160 - self.repo.arches.update(['x86', 'ppc'])
13161 + self.repo.arches.update(["x86", "ppc"])
13162 options, _ = self.tool.parse_args(self.args)
13163 addon = addons.init_addon(self.addon_kls, options)
13164
13165 assert len(addon) == 4
13166 - assert set(x.name for x in addon) == {'linux/x86', 'linux/ppc'}
13167 - assert len(addon['x86']) == 1
13168 - assert [x.name for x in addon['~x86']] == ['linux/x86']
13169 - assert addon.get('foo', ['foo']) == ['foo']
13170 - assert addon.get('foo') is None
13171 + assert set(x.name for x in addon) == {"linux/x86", "linux/ppc"}
13172 + assert len(addon["x86"]) == 1
13173 + assert [x.name for x in addon["~x86"]] == ["linux/x86"]
13174 + assert addon.get("foo", ["foo"]) == ["foo"]
13175 + assert addon.get("foo") is None
13176
13177 def test_profile_collapsing(self):
13178 profiles = [
13179 - Profile('default-linux', 'x86'),
13180 - Profile('default-linux/x86', 'x86'),
13181 - Profile('default-linux/ppc', 'ppc'),
13182 + Profile("default-linux", "x86"),
13183 + Profile("default-linux/x86", "x86"),
13184 + Profile("default-linux/ppc", "ppc"),
13185 ]
13186 self.repo.create_profiles(profiles)
13187 - self.repo.arches.update(['x86', 'ppc'])
13188 + self.repo.arches.update(["x86", "ppc"])
13189 options, _ = self.tool.parse_args(self.args)
13190 addon = addons.init_addon(self.addon_kls, options)
13191
13192 # assert they're collapsed properly.
13193 - self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/x86')
13194 - assert len(addon.profile_evaluate_dict['x86']) == 1
13195 - assert len(addon.profile_evaluate_dict['x86'][0]) == 2
13196 - self.assertProfiles(addon, 'ppc', 'default-linux/ppc')
13197 + self.assertProfiles(addon, "x86", "default-linux", "default-linux/x86")
13198 + assert len(addon.profile_evaluate_dict["x86"]) == 1
13199 + assert len(addon.profile_evaluate_dict["x86"][0]) == 2
13200 + self.assertProfiles(addon, "ppc", "default-linux/ppc")
13201
13202 - groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={'KEYWORDS': 'x86'}))
13203 + groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={"KEYWORDS": "x86"}))
13204 assert len(groups) == 2, f"checking for profile collapsing: {groups!r}"
13205 assert len(groups[0]) == 2, f"checking for proper # of profiles: {groups[0]!r}"
13206 - assert sorted(x.name for x in groups[0]) == sorted(['default-linux', 'default-linux/x86'])
13207 + assert sorted(x.name for x in groups[0]) == sorted(["default-linux", "default-linux/x86"])
13208
13209 # check arch vs ~arch runs (i.e. arch KEYWORDS should also trigger ~arch runs)
13210 - groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={'KEYWORDS': '~x86'}))
13211 + groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={"KEYWORDS": "~x86"}))
13212 assert len(groups) == 1, f"checking for profile collapsing: {groups!r}"
13213 assert len(groups[0]) == 2, f"checking for proper # of profiles: {groups[0]!r}"
13214 - assert sorted(x.name for x in groups[0]) == sorted(['default-linux', 'default-linux/x86'])
13215 + assert sorted(x.name for x in groups[0]) == sorted(["default-linux", "default-linux/x86"])
13216
13217 # check keyword collapsing
13218 - groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={'KEYWORDS': 'ppc'}))
13219 + groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={"KEYWORDS": "ppc"}))
13220 assert len(groups) == 2, f"checking for profile collapsing: {groups!r}"
13221 assert len(groups[0]) == 1, f"checking for proper # of profiles: {groups[0]!r}"
13222 - assert groups[0][0].name == 'default-linux/ppc'
13223 + assert groups[0][0].name == "default-linux/ppc"
13224
13225 - groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={'KEYWORDS': 'foon'}))
13226 + groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={"KEYWORDS": "foon"}))
13227 assert len(groups) == 0, f"checking for profile collapsing: {groups!r}"
13228
13229
13230 try:
13231 import requests
13232 +
13233 net_skip = False
13234 except ImportError:
13235 net_skip = True
13236 @@ -356,33 +371,33 @@ except ImportError:
13237
13238 @pytest.mark.skipif(net_skip, reason="requests isn't installed")
13239 class TestNetAddon:
13240 -
13241 def test_failed_import(self, tool):
13242 - options, _ = tool.parse_args(['scan'])
13243 + options, _ = tool.parse_args(["scan"])
13244 addon = addons.NetAddon(options)
13245 - with patch('pkgcheck.addons.net.Session') as net:
13246 - net.side_effect = ImportError('import failed', name='foo')
13247 + with patch("pkgcheck.addons.net.Session") as net:
13248 + net.side_effect = ImportError("import failed", name="foo")
13249 with pytest.raises(ImportError):
13250 addon.session
13251 # failing to import requests specifically returns a nicer user exception
13252 - net.side_effect = ImportError('import failed', name='requests')
13253 - with pytest.raises(PkgcheckUserException, match='network checks require requests'):
13254 + net.side_effect = ImportError("import failed", name="requests")
13255 + with pytest.raises(PkgcheckUserException, match="network checks require requests"):
13256 addon.session
13257
13258 def test_custom_timeout(self, tool):
13259 - options, _ = tool.parse_args(['scan', '--timeout', '10'])
13260 + options, _ = tool.parse_args(["scan", "--timeout", "10"])
13261 addon = addons.NetAddon(options)
13262 assert isinstance(addon.session, requests.Session)
13263 assert addon.session.timeout == 10
13264 # a timeout of zero disables timeouts entirely
13265 - options, _ = tool.parse_args(['scan', '--timeout', '0'])
13266 + options, _ = tool.parse_args(["scan", "--timeout", "0"])
13267 addon = addons.NetAddon(options)
13268 assert addon.session.timeout is None
13269
13270 def test_args(self, tool):
13271 options, _ = tool.parse_args(
13272 - ['scan', '--timeout', '10', '--tasks', '50', '--user-agent', 'firefox'])
13273 + ["scan", "--timeout", "10", "--tasks", "50", "--user-agent", "firefox"]
13274 + )
13275 addon = addons.NetAddon(options)
13276 - with patch('pkgcheck.addons.net.Session') as net:
13277 + with patch("pkgcheck.addons.net.Session") as net:
13278 addon.session
13279 - net.assert_called_once_with(concurrent=50, timeout=10, user_agent='firefox')
13280 + net.assert_called_once_with(concurrent=50, timeout=10, user_agent="firefox")
13281
13282 diff --git a/tests/addons/test_eclass.py b/tests/addons/test_eclass.py
13283 index 4e1b26db..c6c045b2 100644
13284 --- a/tests/addons/test_eclass.py
13285 +++ b/tests/addons/test_eclass.py
13286 @@ -13,26 +13,29 @@ from snakeoil.osutils import pjoin
13287
13288
13289 class TestEclass:
13290 -
13291 @pytest.fixture(autouse=True)
13292 def _setup(self, tmp_path):
13293 - path = str(tmp_path / 'foo.eclass')
13294 - with open(path, 'w') as f:
13295 - f.write(textwrap.dedent("""\
13296 - # eclass header
13297 - foo () { :; }
13298 - """))
13299 - self.eclass1 = Eclass('foo', path)
13300 - path = str(tmp_path / 'bar.eclass')
13301 - self.eclass2 = Eclass('bar', path)
13302 + path = str(tmp_path / "foo.eclass")
13303 + with open(path, "w") as f:
13304 + f.write(
13305 + textwrap.dedent(
13306 + """\
13307 + # eclass header
13308 + foo () { :; }
13309 + """
13310 + )
13311 + )
13312 + self.eclass1 = Eclass("foo", path)
13313 + path = str(tmp_path / "bar.eclass")
13314 + self.eclass2 = Eclass("bar", path)
13315
13316 def test_lines(self):
13317 - assert self.eclass1.lines == ('# eclass header\n', 'foo () { :; }\n')
13318 + assert self.eclass1.lines == ("# eclass header\n", "foo () { :; }\n")
13319 assert self.eclass2.lines == ()
13320
13321 def test_lt(self):
13322 assert self.eclass2 < self.eclass1
13323 - assert self.eclass1 < 'zoo.eclass'
13324 + assert self.eclass1 < "zoo.eclass"
13325
13326 def test_hash(self):
13327 eclasses = {self.eclass1, self.eclass2}
13328 @@ -46,23 +49,22 @@ class TestEclass:
13329
13330
13331 class TestEclassAddon:
13332 -
13333 @pytest.fixture(autouse=True)
13334 def _setup(self, tool, tmp_path, repo):
13335 self.repo = repo
13336 self.cache_dir = str(tmp_path)
13337
13338 - self.eclass_dir = pjoin(repo.location, 'eclass')
13339 + self.eclass_dir = pjoin(repo.location, "eclass")
13340
13341 - args = ['scan', '--cache-dir', self.cache_dir, '--repo', repo.location]
13342 + args = ["scan", "--cache-dir", self.cache_dir, "--repo", repo.location]
13343 options, _ = tool.parse_args(args)
13344 self.addon = EclassAddon(options)
13345 self.cache_file = self.addon.cache_file(self.repo)
13346
13347 def test_cache_disabled(self, tool):
13348 - args = ['scan', '--cache', 'no', '--repo', self.repo.location]
13349 + args = ["scan", "--cache", "no", "--repo", self.repo.location]
13350 options, _ = tool.parse_args(args)
13351 - with pytest.raises(CacheDisabled, match='eclass cache support required'):
13352 + with pytest.raises(CacheDisabled, match="eclass cache support required"):
13353 init_addon(EclassAddon, options)
13354
13355 def test_no_eclasses(self):
13356 @@ -73,18 +75,18 @@ class TestEclassAddon:
13357
13358 def test_eclasses(self):
13359 # non-eclass files are ignored
13360 - for f in ('foo.eclass', 'bar'):
13361 + for f in ("foo.eclass", "bar"):
13362 touch(pjoin(self.eclass_dir, f))
13363 self.addon.update_cache()
13364 - assert list(self.addon.eclasses) == ['foo']
13365 + assert list(self.addon.eclasses) == ["foo"]
13366 assert not self.addon.deprecated
13367
13368 def test_cache_load(self):
13369 - touch(pjoin(self.eclass_dir, 'foo.eclass'))
13370 + touch(pjoin(self.eclass_dir, "foo.eclass"))
13371 self.addon.update_cache()
13372 - assert list(self.addon.eclasses) == ['foo']
13373 + assert list(self.addon.eclasses) == ["foo"]
13374
13375 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
13376 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
13377 self.addon.update_cache()
13378 # verify the cache was loaded and not regenerated
13379 save_cache.assert_not_called()
13380 @@ -93,9 +95,9 @@ class TestEclassAddon:
13381 save_cache.assert_called_once()
13382
13383 def test_outdated_cache(self):
13384 - touch(pjoin(self.eclass_dir, 'foo.eclass'))
13385 + touch(pjoin(self.eclass_dir, "foo.eclass"))
13386 self.addon.update_cache()
13387 - assert list(self.addon.eclasses) == ['foo']
13388 + assert list(self.addon.eclasses) == ["foo"]
13389
13390 # increment cache version and dump cache
13391 cache = self.addon.load_cache(self.cache_file)
13392 @@ -103,68 +105,72 @@ class TestEclassAddon:
13393 self.addon.save_cache(cache, self.cache_file)
13394
13395 # verify cache load causes regen
13396 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
13397 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
13398 self.addon.update_cache()
13399 save_cache.assert_called_once()
13400
13401 def test_eclass_changes(self):
13402 """The cache stores eclass mtimes and regenerates entries if they differ."""
13403 - eclass_path = pjoin(self.eclass_dir, 'foo.eclass')
13404 + eclass_path = pjoin(self.eclass_dir, "foo.eclass")
13405 touch(eclass_path)
13406 self.addon.update_cache()
13407 - assert list(self.addon.eclasses) == ['foo']
13408 + assert list(self.addon.eclasses) == ["foo"]
13409 sleep(1)
13410 - with open(eclass_path, 'w') as f:
13411 - f.write('# changed eclass\n')
13412 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
13413 + with open(eclass_path, "w") as f:
13414 + f.write("# changed eclass\n")
13415 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
13416 self.addon.update_cache()
13417 save_cache.assert_called_once()
13418
13419 def test_error_loading_cache(self):
13420 - touch(pjoin(self.eclass_dir, 'foo.eclass'))
13421 + touch(pjoin(self.eclass_dir, "foo.eclass"))
13422 self.addon.update_cache()
13423 - assert list(self.addon.eclasses) == ['foo']
13424 + assert list(self.addon.eclasses) == ["foo"]
13425
13426 - with patch('pkgcheck.addons.caches.pickle.load') as pickle_load:
13427 + with patch("pkgcheck.addons.caches.pickle.load") as pickle_load:
13428 # catastrophic errors are raised
13429 - pickle_load.side_effect = MemoryError('unpickling failed')
13430 - with pytest.raises(MemoryError, match='unpickling failed'):
13431 + pickle_load.side_effect = MemoryError("unpickling failed")
13432 + with pytest.raises(MemoryError, match="unpickling failed"):
13433 self.addon.update_cache()
13434
13435 # but various load failure exceptions cause cache regen
13436 - pickle_load.side_effect = Exception('unpickling failed')
13437 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
13438 + pickle_load.side_effect = Exception("unpickling failed")
13439 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
13440 self.addon.update_cache()
13441 save_cache.assert_called_once()
13442
13443 def test_error_dumping_cache(self):
13444 - touch(pjoin(self.eclass_dir, 'foo.eclass'))
13445 + touch(pjoin(self.eclass_dir, "foo.eclass"))
13446 # verify IO related dump failures are raised
13447 - with patch('pkgcheck.addons.caches.pickle.dump') as pickle_dump:
13448 - pickle_dump.side_effect = IOError('unpickling failed')
13449 - with pytest.raises(PkgcheckUserException, match='failed dumping eclass cache'):
13450 + with patch("pkgcheck.addons.caches.pickle.dump") as pickle_dump:
13451 + pickle_dump.side_effect = IOError("unpickling failed")
13452 + with pytest.raises(PkgcheckUserException, match="failed dumping eclass cache"):
13453 self.addon.update_cache()
13454
13455 def test_eclass_removal(self):
13456 - for name in ('foo', 'bar'):
13457 - touch(pjoin(self.eclass_dir, f'{name}.eclass'))
13458 + for name in ("foo", "bar"):
13459 + touch(pjoin(self.eclass_dir, f"{name}.eclass"))
13460 self.addon.update_cache()
13461 - assert sorted(self.addon.eclasses) == ['bar', 'foo']
13462 - os.unlink(pjoin(self.eclass_dir, 'bar.eclass'))
13463 + assert sorted(self.addon.eclasses) == ["bar", "foo"]
13464 + os.unlink(pjoin(self.eclass_dir, "bar.eclass"))
13465 self.addon.update_cache()
13466 - assert list(self.addon.eclasses) == ['foo']
13467 + assert list(self.addon.eclasses) == ["foo"]
13468
13469 def test_deprecated(self):
13470 - with open(pjoin(self.eclass_dir, 'foo.eclass'), 'w') as f:
13471 - f.write(textwrap.dedent("""
13472 - # @ECLASS: foo.eclass
13473 - # @MAINTAINER:
13474 - # Random Person <random.person@××××××.email>
13475 - # @AUTHOR:
13476 - # Random Person <random.person@××××××.email>
13477 - # @BLURB: Example deprecated eclass with replacement.
13478 - # @DEPRECATED: foo2
13479 - """))
13480 + with open(pjoin(self.eclass_dir, "foo.eclass"), "w") as f:
13481 + f.write(
13482 + textwrap.dedent(
13483 + """\
13484 + # @ECLASS: foo.eclass
13485 + # @MAINTAINER:
13486 + # Random Person <random.person@××××××.email>
13487 + # @AUTHOR:
13488 + # Random Person <random.person@××××××.email>
13489 + # @BLURB: Example deprecated eclass with replacement.
13490 + # @DEPRECATED: foo2
13491 + """
13492 + )
13493 + )
13494 self.addon.update_cache()
13495 - assert list(self.addon.eclasses) == ['foo']
13496 - assert self.addon.deprecated == {'foo': 'foo2'}
13497 + assert list(self.addon.eclasses) == ["foo"]
13498 + assert self.addon.deprecated == {"foo": "foo2"}
13499
13500 diff --git a/tests/addons/test_git.py b/tests/addons/test_git.py
13501 index cf39efc3..da88d501 100644
13502 --- a/tests/addons/test_git.py
13503 +++ b/tests/addons/test_git.py
13504 @@ -18,146 +18,147 @@ from snakeoil.process import CommandNotFound, find_binary
13505
13506 # skip testing module if git isn't installed
13507 try:
13508 - find_binary('git')
13509 + find_binary("git")
13510 except CommandNotFound:
13511 - pytestmark = pytest.mark.skipif(True, reason='git not installed')
13512 + pytestmark = pytest.mark.skipif(True, reason="git not installed")
13513
13514
13515 class TestPkgcheckScanCommitsParseArgs:
13516 -
13517 @pytest.fixture(autouse=True)
13518 def _setup(self, tool):
13519 self.tool = tool
13520 - self.args = ['scan']
13521 + self.args = ["scan"]
13522
13523 def test_commits_with_targets(self, capsys):
13524 with pytest.raises(SystemExit) as excinfo:
13525 - options, _func = self.tool.parse_args(self.args + ['--commits', 'ref', 'dev-util/foo'])
13526 + options, _func = self.tool.parse_args(self.args + ["--commits", "ref", "dev-util/foo"])
13527 assert excinfo.value.code == 2
13528 out, err = capsys.readouterr()
13529 - assert err.strip() == \
13530 - "pkgcheck scan: error: --commits is mutually exclusive with target: dev-util/foo"
13531 + assert (
13532 + err.strip()
13533 + == "pkgcheck scan: error: --commits is mutually exclusive with target: dev-util/foo"
13534 + )
13535
13536 def test_commits_git_unavailable(self, capsys):
13537 - with patch('subprocess.run') as git_diff:
13538 + with patch("subprocess.run") as git_diff:
13539 git_diff.side_effect = FileNotFoundError("no such file 'git'")
13540 with pytest.raises(SystemExit) as excinfo:
13541 - options, _func = self.tool.parse_args(self.args + ['--commits'])
13542 + options, _func = self.tool.parse_args(self.args + ["--commits"])
13543 assert excinfo.value.code == 2
13544 out, err = capsys.readouterr()
13545 assert err.strip() == "pkgcheck scan: error: no such file 'git'"
13546
13547 def test_git_error(self, capsys):
13548 - with patch('subprocess.run') as git_diff:
13549 - git_diff.side_effect = subprocess.CalledProcessError(1, 'git')
13550 - git_diff.side_effect.stderr = 'git error: foobar'
13551 + with patch("subprocess.run") as git_diff:
13552 + git_diff.side_effect = subprocess.CalledProcessError(1, "git")
13553 + git_diff.side_effect.stderr = "git error: foobar"
13554 with pytest.raises(SystemExit) as excinfo:
13555 - options, _func = self.tool.parse_args(self.args + ['--commits'])
13556 + options, _func = self.tool.parse_args(self.args + ["--commits"])
13557 assert excinfo.value.code == 2
13558 out, err = capsys.readouterr()
13559 - err = err.strip().split('\n')
13560 - assert err[-1].startswith('pkgcheck scan: error: failed running git: ')
13561 + err = err.strip().split("\n")
13562 + assert err[-1].startswith("pkgcheck scan: error: failed running git: ")
13563
13564 def test_commits_nonexistent(self, make_repo, make_git_repo, tmp_path):
13565 parent = make_repo()
13566 origin = make_git_repo(parent.location, commit=True)
13567 local = make_git_repo(str(tmp_path), commit=False)
13568 - local.run(['git', 'remote', 'add', 'origin', origin.path])
13569 - local.run(['git', 'pull', 'origin', 'main'])
13570 - local.run(['git', 'remote', 'set-head', 'origin', 'main'])
13571 + local.run(["git", "remote", "add", "origin", origin.path])
13572 + local.run(["git", "pull", "origin", "main"])
13573 + local.run(["git", "remote", "set-head", "origin", "main"])
13574
13575 with pytest.raises(SystemExit) as excinfo:
13576 - options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
13577 + options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
13578 assert excinfo.value.code == 0
13579
13580 def test_commits_existing(self, make_repo, make_git_repo, tmp_path):
13581 # create parent repo
13582 parent = make_repo()
13583 origin = make_git_repo(parent.location, commit=True)
13584 - parent.create_ebuild('cat/pkg-0')
13585 - origin.add_all('cat/pkg-0')
13586 + parent.create_ebuild("cat/pkg-0")
13587 + origin.add_all("cat/pkg-0")
13588
13589 # create child repo and pull from parent
13590 local = make_git_repo(str(tmp_path), commit=False)
13591 - local.run(['git', 'remote', 'add', 'origin', origin.path])
13592 - local.run(['git', 'pull', 'origin', 'main'])
13593 - local.run(['git', 'remote', 'set-head', 'origin', 'main'])
13594 + local.run(["git", "remote", "add", "origin", origin.path])
13595 + local.run(["git", "pull", "origin", "main"])
13596 + local.run(["git", "remote", "set-head", "origin", "main"])
13597 child = make_repo(local.path)
13598
13599 # create local commits on child repo
13600 - child.create_ebuild('cat/pkg-1')
13601 - local.add_all('cat/pkg-1')
13602 - child.create_ebuild('cat/pkg-2')
13603 - local.add_all('cat/pkg-2')
13604 -
13605 - options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
13606 - atom_restricts = [atom_cls('cat/pkg')]
13607 - assert list(options.restrictions) == \
13608 - [(base.package_scope, packages.OrRestriction(*atom_restricts))]
13609 + child.create_ebuild("cat/pkg-1")
13610 + local.add_all("cat/pkg-1")
13611 + child.create_ebuild("cat/pkg-2")
13612 + local.add_all("cat/pkg-2")
13613 +
13614 + options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
13615 + atom_restricts = [atom_cls("cat/pkg")]
13616 + assert list(options.restrictions) == [
13617 + (base.package_scope, packages.OrRestriction(*atom_restricts))
13618 + ]
13619
13620 def test_commits_eclasses(self, make_repo, make_git_repo, tmp_path):
13621 # create parent repo
13622 parent = make_repo()
13623 origin = make_git_repo(parent.location, commit=True)
13624 - parent.create_ebuild('cat/pkg-0')
13625 - origin.add_all('cat/pkg-0')
13626 + parent.create_ebuild("cat/pkg-0")
13627 + origin.add_all("cat/pkg-0")
13628
13629 # create child repo and pull from parent
13630 local = make_git_repo(str(tmp_path), commit=False)
13631 - local.run(['git', 'remote', 'add', 'origin', origin.path])
13632 - local.run(['git', 'pull', 'origin', 'main'])
13633 - local.run(['git', 'remote', 'set-head', 'origin', 'main'])
13634 + local.run(["git", "remote", "add", "origin", origin.path])
13635 + local.run(["git", "pull", "origin", "main"])
13636 + local.run(["git", "remote", "set-head", "origin", "main"])
13637 child = make_repo(local.path)
13638
13639 # create local commits on child repo
13640 - with open(pjoin(local.path, 'cat', 'pkg', 'metadata.xml'), 'w') as f:
13641 + with open(pjoin(local.path, "cat", "pkg", "metadata.xml"), "w") as f:
13642 f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
13643 - local.add_all('cat/pkg: metadata')
13644 - child.create_ebuild('cat/pkg-1')
13645 - local.add_all('cat/pkg-1')
13646 - os.makedirs(pjoin(local.path, 'eclass'))
13647 - with open(pjoin(local.path, 'eclass', 'foo.eclass'), 'w') as f:
13648 - f.write('data\n')
13649 - local.add_all('foo.eclass')
13650 -
13651 - options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
13652 - atom_restricts = [atom_cls('cat/pkg')]
13653 + local.add_all("cat/pkg: metadata")
13654 + child.create_ebuild("cat/pkg-1")
13655 + local.add_all("cat/pkg-1")
13656 + os.makedirs(pjoin(local.path, "eclass"))
13657 + with open(pjoin(local.path, "eclass", "foo.eclass"), "w") as f:
13658 + f.write("data\n")
13659 + local.add_all("foo.eclass")
13660 +
13661 + options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
13662 + atom_restricts = [atom_cls("cat/pkg")]
13663 restrictions = list(options.restrictions)
13664 assert len(restrictions) == 2
13665 - assert restrictions[0] == \
13666 - (base.package_scope, packages.OrRestriction(*atom_restricts))
13667 + assert restrictions[0] == (base.package_scope, packages.OrRestriction(*atom_restricts))
13668 assert restrictions[1][0] == base.eclass_scope
13669 - assert restrictions[1][1] == frozenset(['foo'])
13670 + assert restrictions[1][1] == frozenset(["foo"])
13671
13672 def test_commits_profiles(self, make_repo, make_git_repo, tmp_path):
13673 # create parent repo
13674 parent = make_repo()
13675 origin = make_git_repo(parent.location, commit=True)
13676 - parent.create_ebuild('cat/pkg-0')
13677 - origin.add_all('cat/pkg-0')
13678 + parent.create_ebuild("cat/pkg-0")
13679 + origin.add_all("cat/pkg-0")
13680
13681 # create child repo and pull from parent
13682 local = make_git_repo(str(tmp_path), commit=False)
13683 - local.run(['git', 'remote', 'add', 'origin', origin.path])
13684 - local.run(['git', 'pull', 'origin', 'main'])
13685 - local.run(['git', 'remote', 'set-head', 'origin', 'main'])
13686 + local.run(["git", "remote", "add", "origin", origin.path])
13687 + local.run(["git", "pull", "origin", "main"])
13688 + local.run(["git", "remote", "set-head", "origin", "main"])
13689 child = make_repo(local.path)
13690
13691 # create local commits on child repo
13692 - with open(pjoin(local.path, 'cat', 'pkg', 'metadata.xml'), 'w') as f:
13693 + with open(pjoin(local.path, "cat", "pkg", "metadata.xml"), "w") as f:
13694 f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
13695 - local.add_all('cat/pkg: metadata')
13696 - child.create_ebuild('cat/pkg-1')
13697 - local.add_all('cat/pkg-1')
13698 - with open(pjoin(local.path, 'profiles', 'package.mask'), 'w') as f:
13699 - f.write('data\n')
13700 - local.add_all('package.mask')
13701 -
13702 - options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
13703 - atom_restricts = [atom_cls('cat/pkg')]
13704 + local.add_all("cat/pkg: metadata")
13705 + child.create_ebuild("cat/pkg-1")
13706 + local.add_all("cat/pkg-1")
13707 + with open(pjoin(local.path, "profiles", "package.mask"), "w") as f:
13708 + f.write("data\n")
13709 + local.add_all("package.mask")
13710 +
13711 + options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
13712 + atom_restricts = [atom_cls("cat/pkg")]
13713 restrictions = [
13714 (base.package_scope, packages.OrRestriction(*atom_restricts)),
13715 - (base.profile_node_scope, frozenset(['profiles/package.mask'])),
13716 + (base.profile_node_scope, frozenset(["profiles/package.mask"])),
13717 ]
13718 assert restrictions == options.restrictions
13719
13720 @@ -165,33 +166,32 @@ class TestPkgcheckScanCommitsParseArgs:
13721 # create parent repo
13722 parent = make_repo()
13723 origin = make_git_repo(parent.location, commit=True)
13724 - parent.create_ebuild('cat/pkg-0')
13725 - origin.add_all('cat/pkg-0')
13726 + parent.create_ebuild("cat/pkg-0")
13727 + origin.add_all("cat/pkg-0")
13728
13729 # create child repo and pull from parent
13730 local = make_git_repo(str(tmp_path), commit=False)
13731 - local.run(['git', 'remote', 'add', 'origin', origin.path])
13732 - local.run(['git', 'pull', 'origin', 'main'])
13733 - local.run(['git', 'remote', 'set-head', 'origin', 'main'])
13734 + local.run(["git", "remote", "add", "origin", origin.path])
13735 + local.run(["git", "pull", "origin", "main"])
13736 + local.run(["git", "remote", "set-head", "origin", "main"])
13737
13738 # create local commits on child repo
13739 - os.makedirs(pjoin(local.path, 'foo'))
13740 - with open(pjoin(local.path, 'foo', 'bar.txt'), 'w') as f:
13741 - f.write('data\n')
13742 - os.makedirs(pjoin(local.path, 'eclass', 'tests'))
13743 - with open(pjoin(local.path, 'eclass', 'tests', 'test.sh'), 'w') as f:
13744 - f.write('data\n')
13745 - local.add_all('add files')
13746 + os.makedirs(pjoin(local.path, "foo"))
13747 + with open(pjoin(local.path, "foo", "bar.txt"), "w") as f:
13748 + f.write("data\n")
13749 + os.makedirs(pjoin(local.path, "eclass", "tests"))
13750 + with open(pjoin(local.path, "eclass", "tests", "test.sh"), "w") as f:
13751 + f.write("data\n")
13752 + local.add_all("add files")
13753
13754 with pytest.raises(SystemExit) as excinfo:
13755 - self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
13756 + self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
13757 assert excinfo.value.code == 0
13758
13759
13760 class TestGitStash:
13761 -
13762 def test_non_git_repo(self, tmp_path):
13763 - with pytest.raises(ValueError, match='not a git repo'):
13764 + with pytest.raises(ValueError, match="not a git repo"):
13765 with git.GitStash(str(tmp_path)):
13766 pass
13767
13768 @@ -200,7 +200,7 @@ class TestGitStash:
13769 pass
13770
13771 def test_untracked_file(self, git_repo):
13772 - path = pjoin(git_repo.path, 'foo')
13773 + path = pjoin(git_repo.path, "foo")
13774 touch(path)
13775 assert os.path.exists(path)
13776 with git.GitStash(git_repo.path):
13777 @@ -208,37 +208,36 @@ class TestGitStash:
13778 assert os.path.exists(path)
13779
13780 def test_failed_stashing(self, git_repo):
13781 - path = pjoin(git_repo.path, 'foo')
13782 + path = pjoin(git_repo.path, "foo")
13783 touch(path)
13784 assert os.path.exists(path)
13785 - with patch('subprocess.run') as run:
13786 - err = subprocess.CalledProcessError(1, 'git stash')
13787 - err.stderr = 'git stash failed'
13788 - run.side_effect = [Mock(stdout='foo'), err]
13789 - with pytest.raises(UserException, match='git failed stashing files'):
13790 + with patch("subprocess.run") as run:
13791 + err = subprocess.CalledProcessError(1, "git stash")
13792 + err.stderr = "git stash failed"
13793 + run.side_effect = [Mock(stdout="foo"), err]
13794 + with pytest.raises(UserException, match="git failed stashing files"):
13795 with git.GitStash(git_repo.path):
13796 pass
13797
13798 def test_failed_unstashing(self, git_repo):
13799 - path = pjoin(git_repo.path, 'foo')
13800 + path = pjoin(git_repo.path, "foo")
13801 touch(path)
13802 assert os.path.exists(path)
13803 - with pytest.raises(UserException, match='git failed applying stash'):
13804 + with pytest.raises(UserException, match="git failed applying stash"):
13805 with git.GitStash(git_repo.path):
13806 assert not os.path.exists(path)
13807 touch(path)
13808
13809
13810 class TestGitRepoCommits:
13811 -
13812 def test_non_git(self, tmp_path):
13813 - with pytest.raises(git.GitError, match='failed running git log'):
13814 - git.GitRepoCommits(str(tmp_path), 'HEAD')
13815 + with pytest.raises(git.GitError, match="failed running git log"):
13816 + git.GitRepoCommits(str(tmp_path), "HEAD")
13817
13818 def test_empty_repo(self, make_git_repo):
13819 git_repo = make_git_repo()
13820 - with pytest.raises(git.GitError, match='failed running git log'):
13821 - git.GitRepoCommits(git_repo.path, 'HEAD')
13822 + with pytest.raises(git.GitError, match="failed running git log"):
13823 + git.GitRepoCommits(git_repo.path, "HEAD")
13824
13825 def test_parsing(self, make_repo, make_git_repo):
13826 git_repo = make_git_repo()
13827 @@ -246,135 +245,134 @@ class TestGitRepoCommits:
13828 path = git_repo.path
13829
13830 # make an initial commit
13831 - git_repo.add('foo', msg='foo', create=True)
13832 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13833 + git_repo.add("foo", msg="foo", create=True)
13834 + commits = list(git.GitRepoCommits(path, "HEAD"))
13835 assert len(commits) == 1
13836 - assert commits[0].message == ['foo']
13837 + assert commits[0].message == ["foo"]
13838 assert commits[0].pkgs == {}
13839 orig_commit = commits[0]
13840
13841 # make another commit
13842 - git_repo.add('bar', msg='bar', create=True)
13843 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13844 + git_repo.add("bar", msg="bar", create=True)
13845 + commits = list(git.GitRepoCommits(path, "HEAD"))
13846 assert len(commits) == 2
13847 - assert commits[0].message == ['bar']
13848 + assert commits[0].message == ["bar"]
13849 assert commits[0].pkgs == {}
13850 assert commits[1] == orig_commit
13851 assert len(set(commits)) == 2
13852
13853 # make a pkg commit
13854 - repo.create_ebuild('cat/pkg-0')
13855 - git_repo.add_all('cat/pkg-0')
13856 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13857 + repo.create_ebuild("cat/pkg-0")
13858 + git_repo.add_all("cat/pkg-0")
13859 + commits = list(git.GitRepoCommits(path, "HEAD"))
13860 assert len(commits) == 3
13861 - assert commits[0].message == ['cat/pkg-0']
13862 - assert commits[0].pkgs == {'A': {atom_cls('=cat/pkg-0')}}
13863 + assert commits[0].message == ["cat/pkg-0"]
13864 + assert commits[0].pkgs == {"A": {atom_cls("=cat/pkg-0")}}
13865
13866 # make a multiple pkg commit
13867 - repo.create_ebuild('newcat/newpkg-0')
13868 - repo.create_ebuild('newcat/newpkg-1')
13869 - git_repo.add_all('newcat: various updates')
13870 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13871 + repo.create_ebuild("newcat/newpkg-0")
13872 + repo.create_ebuild("newcat/newpkg-1")
13873 + git_repo.add_all("newcat: various updates")
13874 + commits = list(git.GitRepoCommits(path, "HEAD"))
13875 assert len(commits) == 4
13876 - assert commits[0].message == ['newcat: various updates']
13877 + assert commits[0].message == ["newcat: various updates"]
13878 assert commits[0].pkgs == {
13879 - 'A': {atom_cls('=newcat/newpkg-0'), atom_cls('=newcat/newpkg-1')}}
13880 + "A": {atom_cls("=newcat/newpkg-0"), atom_cls("=newcat/newpkg-1")}
13881 + }
13882
13883 # remove the old version
13884 - git_repo.remove('newcat/newpkg/newpkg-0.ebuild')
13885 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13886 + git_repo.remove("newcat/newpkg/newpkg-0.ebuild")
13887 + commits = list(git.GitRepoCommits(path, "HEAD"))
13888 assert len(commits) == 5
13889 - assert commits[0].pkgs == {'D': {atom_cls('=newcat/newpkg-0')}}
13890 + assert commits[0].pkgs == {"D": {atom_cls("=newcat/newpkg-0")}}
13891
13892 # rename the pkg
13893 - git_repo.move('newcat', 'newcat2')
13894 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13895 + git_repo.move("newcat", "newcat2")
13896 + commits = list(git.GitRepoCommits(path, "HEAD"))
13897 assert len(commits) == 6
13898 assert commits[0].pkgs == {
13899 - 'A': {atom_cls('=newcat2/newpkg-1')},
13900 - 'D': {atom_cls('=newcat/newpkg-1')},
13901 + "A": {atom_cls("=newcat2/newpkg-1")},
13902 + "D": {atom_cls("=newcat/newpkg-1")},
13903 }
13904
13905 # malformed atoms don't show up as pkgs
13906 - repo.create_ebuild('cat/pkg-3')
13907 - git_repo.add_all('cat/pkg-3')
13908 - with patch('pkgcheck.addons.git.atom_cls') as fake_atom:
13909 - fake_atom.side_effect = MalformedAtom('bad atom')
13910 - commits = list(git.GitRepoCommits(path, 'HEAD'))
13911 + repo.create_ebuild("cat/pkg-3")
13912 + git_repo.add_all("cat/pkg-3")
13913 + with patch("pkgcheck.addons.git.atom_cls") as fake_atom:
13914 + fake_atom.side_effect = MalformedAtom("bad atom")
13915 + commits = list(git.GitRepoCommits(path, "HEAD"))
13916 assert len(commits) == 7
13917 assert commits[0].pkgs == {}
13918
13919
13920 class TestGitRepoPkgs:
13921 -
13922 def test_non_git(self, tmp_path):
13923 - with pytest.raises(git.GitError, match='failed running git log'):
13924 - git.GitRepoPkgs(str(tmp_path), 'HEAD')
13925 + with pytest.raises(git.GitError, match="failed running git log"):
13926 + git.GitRepoPkgs(str(tmp_path), "HEAD")
13927
13928 def test_empty_repo(self, make_git_repo):
13929 git_repo = make_git_repo()
13930 - with pytest.raises(git.GitError, match='failed running git log'):
13931 - git.GitRepoPkgs(git_repo.path, 'HEAD')
13932 + with pytest.raises(git.GitError, match="failed running git log"):
13933 + git.GitRepoPkgs(git_repo.path, "HEAD")
13934
13935 def test_parsing(self, repo, make_git_repo):
13936 git_repo = make_git_repo(repo.location, commit=True)
13937 path = git_repo.path
13938
13939 # empty repo contains no packages
13940 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
13941 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
13942 assert len(pkgs) == 0
13943
13944 # create a pkg and commit it
13945 - repo.create_ebuild('cat/pkg-0')
13946 - git_repo.add_all('cat/pkg-0')
13947 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
13948 + repo.create_ebuild("cat/pkg-0")
13949 + git_repo.add_all("cat/pkg-0")
13950 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
13951 assert len(pkgs) == 1
13952 pkg = pkgs[0]
13953 - assert pkg.atom == atom_cls('=cat/pkg-0')
13954 - assert pkg.status == 'A'
13955 + assert pkg.atom == atom_cls("=cat/pkg-0")
13956 + assert pkg.status == "A"
13957
13958 # add a new version and commit it
13959 - repo.create_ebuild('cat/pkg-1')
13960 - git_repo.add_all('cat/pkg-1')
13961 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
13962 + repo.create_ebuild("cat/pkg-1")
13963 + git_repo.add_all("cat/pkg-1")
13964 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
13965 assert len(pkgs) == 2
13966 pkg = pkgs[0]
13967 - assert pkg.atom == atom_cls('=cat/pkg-1')
13968 - assert pkg.status == 'A'
13969 + assert pkg.atom == atom_cls("=cat/pkg-1")
13970 + assert pkg.status == "A"
13971
13972 # remove the old version
13973 - git_repo.remove('cat/pkg/pkg-0.ebuild')
13974 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
13975 + git_repo.remove("cat/pkg/pkg-0.ebuild")
13976 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
13977 assert len(pkgs) == 3
13978 pkg = pkgs[0]
13979 - assert pkg.atom == atom_cls('=cat/pkg-0')
13980 - assert pkg.status == 'D'
13981 + assert pkg.atom == atom_cls("=cat/pkg-0")
13982 + assert pkg.status == "D"
13983
13984 # rename the pkg
13985 - git_repo.move('cat', 'cat2')
13986 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
13987 + git_repo.move("cat", "cat2")
13988 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
13989 assert len(pkgs) == 5
13990 new_pkg, old_pkg = pkgs[:2]
13991 - assert old_pkg.atom == atom_cls('=cat/pkg-1')
13992 - assert old_pkg.status == 'D'
13993 - assert new_pkg.atom == atom_cls('=cat2/pkg-1')
13994 - assert new_pkg.status == 'A'
13995 + assert old_pkg.atom == atom_cls("=cat/pkg-1")
13996 + assert old_pkg.status == "D"
13997 + assert new_pkg.atom == atom_cls("=cat2/pkg-1")
13998 + assert new_pkg.status == "A"
13999
14000 # malformed atoms don't show up as pkgs
14001 - with patch('pkgcheck.addons.git.atom_cls') as fake_atom:
14002 - fake_atom.side_effect = MalformedAtom('bad atom')
14003 - pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
14004 + with patch("pkgcheck.addons.git.atom_cls") as fake_atom:
14005 + fake_atom.side_effect = MalformedAtom("bad atom")
14006 + pkgs = list(git.GitRepoPkgs(path, "HEAD"))
14007 assert len(pkgs) == 0
14008
14009
14010 class TestGitChangedRepo:
14011 -
14012 def test_pkg_history(self, repo, make_git_repo):
14013 git_repo = make_git_repo(repo.location, commit=True)
14014 pkg_history = partial(git.GitAddon.pkg_history, repo)
14015
14016 # initialize the dict cache
14017 - data = pkg_history('HEAD')
14018 + data = pkg_history("HEAD")
14019 assert data == {}
14020
14021 # overlay repo objects on top of the dict cache
14022 @@ -388,10 +386,10 @@ class TestGitChangedRepo:
14023 assert len(removed_repo) == 0
14024
14025 # create a pkg and commit it
14026 - repo.create_ebuild('cat/pkg-0')
14027 - git_repo.add_all('cat/pkg-0')
14028 + repo.create_ebuild("cat/pkg-0")
14029 + git_repo.add_all("cat/pkg-0")
14030 # update the dict cache
14031 - data = pkg_history('HEAD', data=data)
14032 + data = pkg_history("HEAD", data=data)
14033 commit = git_repo.HEAD
14034
14035 # overlay repo objects on top of the dict cache
14036 @@ -405,10 +403,10 @@ class TestGitChangedRepo:
14037 assert len(removed_repo) == 0
14038
14039 # add a new version and commit it
14040 - repo.create_ebuild('cat/pkg-1')
14041 - git_repo.add_all('cat/pkg-1')
14042 + repo.create_ebuild("cat/pkg-1")
14043 + git_repo.add_all("cat/pkg-1")
14044 # update the dict cache
14045 - data = pkg_history(f'{commit}..HEAD', data=data)
14046 + data = pkg_history(f"{commit}..HEAD", data=data)
14047 commit = git_repo.HEAD
14048
14049 # overlay repo objects on top of the dict cache
14050 @@ -422,9 +420,9 @@ class TestGitChangedRepo:
14051 assert len(removed_repo) == 0
14052
14053 # remove the old version
14054 - git_repo.remove('cat/pkg/pkg-0.ebuild')
14055 + git_repo.remove("cat/pkg/pkg-0.ebuild")
14056 # update the dict cache
14057 - data = pkg_history(f'{commit}..HEAD', data=data)
14058 + data = pkg_history(f"{commit}..HEAD", data=data)
14059 commit = git_repo.HEAD
14060
14061 # overlay repo objects on top of the dict cache
14062 @@ -438,9 +436,9 @@ class TestGitChangedRepo:
14063 assert len(removed_repo) == 1
14064
14065 # rename the pkg
14066 - git_repo.move('cat', 'cat2')
14067 + git_repo.move("cat", "cat2")
14068 # update the dict cache
14069 - data = pkg_history(f'{commit}..HEAD', data=data)
14070 + data = pkg_history(f"{commit}..HEAD", data=data)
14071 commit = git_repo.HEAD
14072
14073 # overlay repo objects on top of the dict cache
14074 @@ -455,51 +453,50 @@ class TestGitChangedRepo:
14075
14076
14077 class TestGitAddon:
14078 -
14079 @pytest.fixture(autouse=True)
14080 def _setup(self, tool, tmp_path, repo):
14081 self.repo = repo
14082 self.cache_dir = str(tmp_path)
14083
14084 - args = ['scan', '--cache-dir', self.cache_dir, '--repo', self.repo.location]
14085 + args = ["scan", "--cache-dir", self.cache_dir, "--repo", self.repo.location]
14086 options, _ = tool.parse_args(args)
14087 self.addon = git.GitAddon(options)
14088 self.cache_file = self.addon.cache_file(self.repo)
14089
14090 def test_git_unavailable(self, tool):
14091 - args = ['scan', '--cache-dir', self.cache_dir, '--repo', self.repo.location]
14092 + args = ["scan", "--cache-dir", self.cache_dir, "--repo", self.repo.location]
14093 options, _ = tool.parse_args(args)
14094 - with patch('pkgcheck.addons.git.find_binary') as find_binary:
14095 - find_binary.side_effect = CommandNotFound('git not found')
14096 - with pytest.raises(CacheDisabled, match='git cache support required'):
14097 + with patch("pkgcheck.addons.git.find_binary") as find_binary:
14098 + find_binary.side_effect = CommandNotFound("git not found")
14099 + with pytest.raises(CacheDisabled, match="git cache support required"):
14100 git.GitAddon(options)
14101
14102 def test_no_gitignore(self):
14103 assert self.addon._gitignore is None
14104 - assert not self.addon.gitignored('')
14105 + assert not self.addon.gitignored("")
14106
14107 def test_failed_gitignore(self):
14108 - with open(pjoin(self.repo.location, '.gitignore'), 'w') as f:
14109 - f.write('.*.swp\n')
14110 - with patch('pkgcheck.addons.git.open') as fake_open:
14111 - fake_open.side_effect = IOError('file reading failure')
14112 + with open(pjoin(self.repo.location, ".gitignore"), "w") as f:
14113 + f.write(".*.swp\n")
14114 + with patch("pkgcheck.addons.git.open") as fake_open:
14115 + fake_open.side_effect = IOError("file reading failure")
14116 assert self.addon._gitignore is None
14117
14118 def test_gitignore(self):
14119 - for path in ('.gitignore', '.git/info/exclude'):
14120 + for path in (".gitignore", ".git/info/exclude"):
14121 file_path = pjoin(self.repo.location, path)
14122 os.makedirs(os.path.dirname(file_path), exist_ok=True)
14123 - with open(file_path, 'w') as f:
14124 - f.write('.*.swp\n')
14125 - assert self.addon.gitignored('.foo.swp')
14126 - assert self.addon.gitignored(pjoin(self.repo.location, '.foo.swp'))
14127 - assert not self.addon.gitignored('foo.swp')
14128 - assert not self.addon.gitignored(pjoin(self.repo.location, 'foo.swp'))
14129 + with open(file_path, "w") as f:
14130 + f.write(".*.swp\n")
14131 + assert self.addon.gitignored(".foo.swp")
14132 + assert self.addon.gitignored(pjoin(self.repo.location, ".foo.swp"))
14133 + assert not self.addon.gitignored("foo.swp")
14134 + assert not self.addon.gitignored(pjoin(self.repo.location, "foo.swp"))
14135
14136 def test_cache_disabled(self, tool):
14137 - args = ['scan', '--cache', 'no', '--repo', self.repo.location]
14138 + args = ["scan", "--cache", "no", "--repo", self.repo.location]
14139 options, _ = tool.parse_args(args)
14140 - with pytest.raises(CacheDisabled, match='git cache support required'):
14141 + with pytest.raises(CacheDisabled, match="git cache support required"):
14142 init_addon(git.GitAddon, options)
14143
14144 def test_non_git_repo(self):
14145 @@ -516,26 +513,26 @@ class TestGitAddon:
14146 """Cache file isn't updated if no relevant commits exist."""
14147 parent_repo = make_git_repo(commit=True)
14148 child_repo = make_git_repo(self.repo.location, commit=False)
14149 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14150 - child_repo.run(['git', 'pull', 'origin', 'main'])
14151 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14152 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14153 + child_repo.run(["git", "pull", "origin", "main"])
14154 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14155 self.addon.update_cache()
14156 assert not os.path.exists(self.cache_file)
14157
14158 def test_cache_creation_and_load(self, repo, make_git_repo):
14159 parent_repo = make_git_repo(repo.location, commit=True)
14160 # create a pkg and commit it
14161 - repo.create_ebuild('cat/pkg-0')
14162 - parent_repo.add_all('cat/pkg-0')
14163 + repo.create_ebuild("cat/pkg-0")
14164 + parent_repo.add_all("cat/pkg-0")
14165
14166 child_repo = make_git_repo(self.repo.location, commit=False)
14167 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14168 - child_repo.run(['git', 'pull', 'origin', 'main'])
14169 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14170 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14171 + child_repo.run(["git", "pull", "origin", "main"])
14172 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14173 self.addon.update_cache()
14174 - assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
14175 + assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
14176
14177 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
14178 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
14179 # verify the cache was loaded and not regenerated
14180 self.addon.update_cache()
14181 save_cache.assert_not_called()
14182 @@ -544,28 +541,28 @@ class TestGitAddon:
14183 save_cache.assert_called_once()
14184
14185 # create another pkg and commit it to the parent repo
14186 - repo.create_ebuild('cat/pkg-1')
14187 - parent_repo.add_all('cat/pkg-1')
14188 + repo.create_ebuild("cat/pkg-1")
14189 + parent_repo.add_all("cat/pkg-1")
14190 self.addon.update_cache()
14191 - assert atom_cls('=cat/pkg-1') not in self.addon.cached_repo(git.GitAddedRepo)
14192 + assert atom_cls("=cat/pkg-1") not in self.addon.cached_repo(git.GitAddedRepo)
14193
14194 # new package is seen after child repo pulls changes
14195 - child_repo.run(['git', 'pull', 'origin', 'main'])
14196 + child_repo.run(["git", "pull", "origin", "main"])
14197 self.addon.update_cache()
14198 - assert atom_cls('=cat/pkg-1') in self.addon.cached_repo(git.GitAddedRepo)
14199 + assert atom_cls("=cat/pkg-1") in self.addon.cached_repo(git.GitAddedRepo)
14200
14201 def test_outdated_cache(self, repo, make_git_repo):
14202 parent_repo = make_git_repo(repo.location, commit=True)
14203 # create a pkg and commit it
14204 - repo.create_ebuild('cat/pkg-0')
14205 - parent_repo.add_all('cat/pkg-0')
14206 + repo.create_ebuild("cat/pkg-0")
14207 + parent_repo.add_all("cat/pkg-0")
14208
14209 child_repo = make_git_repo(self.repo.location, commit=False)
14210 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14211 - child_repo.run(['git', 'pull', 'origin', 'main'])
14212 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14213 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14214 + child_repo.run(["git", "pull", "origin", "main"])
14215 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14216 self.addon.update_cache()
14217 - assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
14218 + assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
14219
14220 # increment cache version and dump cache
14221 cache = self.addon.load_cache(self.cache_file)
14222 @@ -573,79 +570,79 @@ class TestGitAddon:
14223 self.addon.save_cache(cache, self.cache_file)
14224
14225 # verify cache load causes regen
14226 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
14227 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
14228 self.addon.update_cache()
14229 save_cache.assert_called_once()
14230
14231 def test_error_creating_cache(self, repo, make_git_repo):
14232 parent_repo = make_git_repo(repo.location, commit=True)
14233 # create a pkg and commit it
14234 - repo.create_ebuild('cat/pkg-0')
14235 - parent_repo.add_all('cat/pkg-0')
14236 + repo.create_ebuild("cat/pkg-0")
14237 + parent_repo.add_all("cat/pkg-0")
14238
14239 child_repo = make_git_repo(self.repo.location, commit=False)
14240 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14241 - child_repo.run(['git', 'pull', 'origin', 'main'])
14242 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14243 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14244 + child_repo.run(["git", "pull", "origin", "main"])
14245 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14246
14247 - with patch('pkgcheck.addons.git.GitLog') as git_log:
14248 - git_log.side_effect = git.GitError('git parsing failed')
14249 - with pytest.raises(PkgcheckUserException, match='git parsing failed'):
14250 + with patch("pkgcheck.addons.git.GitLog") as git_log:
14251 + git_log.side_effect = git.GitError("git parsing failed")
14252 + with pytest.raises(PkgcheckUserException, match="git parsing failed"):
14253 self.addon.update_cache()
14254
14255 def test_error_loading_cache(self, repo, make_git_repo):
14256 parent_repo = make_git_repo(repo.location, commit=True)
14257 # create a pkg and commit it
14258 - repo.create_ebuild('cat/pkg-0')
14259 - parent_repo.add_all('cat/pkg-0')
14260 + repo.create_ebuild("cat/pkg-0")
14261 + parent_repo.add_all("cat/pkg-0")
14262
14263 child_repo = make_git_repo(self.repo.location, commit=False)
14264 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14265 - child_repo.run(['git', 'pull', 'origin', 'main'])
14266 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14267 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14268 + child_repo.run(["git", "pull", "origin", "main"])
14269 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14270 self.addon.update_cache()
14271 - assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
14272 + assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
14273
14274 - with patch('pkgcheck.addons.caches.pickle.load') as pickle_load:
14275 + with patch("pkgcheck.addons.caches.pickle.load") as pickle_load:
14276 # catastrophic errors are raised
14277 - pickle_load.side_effect = MemoryError('unpickling failed')
14278 - with pytest.raises(MemoryError, match='unpickling failed'):
14279 + pickle_load.side_effect = MemoryError("unpickling failed")
14280 + with pytest.raises(MemoryError, match="unpickling failed"):
14281 self.addon.update_cache()
14282
14283 # but various load failure exceptions cause cache regen
14284 - pickle_load.side_effect = Exception('unpickling failed')
14285 - with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
14286 + pickle_load.side_effect = Exception("unpickling failed")
14287 + with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
14288 self.addon.update_cache()
14289 save_cache.assert_called_once()
14290
14291 def test_error_dumping_cache(self, repo, make_git_repo):
14292 parent_repo = make_git_repo(repo.location, commit=True)
14293 # create a pkg and commit it
14294 - repo.create_ebuild('cat/pkg-0')
14295 - parent_repo.add_all('cat/pkg-0')
14296 + repo.create_ebuild("cat/pkg-0")
14297 + parent_repo.add_all("cat/pkg-0")
14298
14299 child_repo = make_git_repo(self.repo.location, commit=False)
14300 - child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
14301 - child_repo.run(['git', 'pull', 'origin', 'main'])
14302 - child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14303 + child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
14304 + child_repo.run(["git", "pull", "origin", "main"])
14305 + child_repo.run(["git", "remote", "set-head", "origin", "main"])
14306
14307 # verify IO related dump failures are raised
14308 - with patch('pkgcheck.addons.caches.pickle.dump') as pickle_dump:
14309 - pickle_dump.side_effect = IOError('unpickling failed')
14310 - with pytest.raises(PkgcheckUserException, match='failed dumping git cache'):
14311 + with patch("pkgcheck.addons.caches.pickle.dump") as pickle_dump:
14312 + pickle_dump.side_effect = IOError("unpickling failed")
14313 + with pytest.raises(PkgcheckUserException, match="failed dumping git cache"):
14314 self.addon.update_cache()
14315
14316 def test_commits_repo(self, repo, make_repo, make_git_repo):
14317 parent_repo = repo
14318 parent_git_repo = make_git_repo(repo.location, commit=True)
14319 # create a pkg and commit it
14320 - parent_repo.create_ebuild('cat/pkg-0')
14321 - parent_git_repo.add_all('cat/pkg-0')
14322 + parent_repo.create_ebuild("cat/pkg-0")
14323 + parent_git_repo.add_all("cat/pkg-0")
14324
14325 child_git_repo = make_git_repo(self.repo.location, commit=False)
14326 - child_git_repo.run(['git', 'remote', 'add', 'origin', parent_git_repo.path])
14327 - child_git_repo.run(['git', 'pull', 'origin', 'main'])
14328 - child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14329 + child_git_repo.run(["git", "remote", "add", "origin", parent_git_repo.path])
14330 + child_git_repo.run(["git", "pull", "origin", "main"])
14331 + child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
14332 self.addon.update_cache()
14333
14334 # no new pkg commits exist locally in the child repo
14335 @@ -654,37 +651,37 @@ class TestGitAddon:
14336
14337 # create a pkg in the child repo and commit it
14338 child_repo = make_repo(child_git_repo.path)
14339 - child_repo.create_ebuild('cat/pkg-1')
14340 - child_git_repo.add_all('cat/pkg-1')
14341 + child_repo.create_ebuild("cat/pkg-1")
14342 + child_git_repo.add_all("cat/pkg-1")
14343
14344 # pkg commits now exist locally in the child repo
14345 commits_repo = self.addon.commits_repo(git.GitChangedRepo)
14346 assert len(commits_repo) == 1
14347 - assert atom_cls('=cat/pkg-1') in commits_repo
14348 + assert atom_cls("=cat/pkg-1") in commits_repo
14349
14350 # failing to parse git log returns error with git cache enabled
14351 - with patch('pkgcheck.addons.git.GitLog') as git_log:
14352 - git_log.side_effect = git.GitError('git parsing failed')
14353 - with pytest.raises(PkgcheckUserException, match='git parsing failed'):
14354 + with patch("pkgcheck.addons.git.GitLog") as git_log:
14355 + git_log.side_effect = git.GitError("git parsing failed")
14356 + with pytest.raises(PkgcheckUserException, match="git parsing failed"):
14357 self.addon.commits_repo(git.GitChangedRepo)
14358
14359 # failing to parse git log yields an empty repo with git cache disabled
14360 - with patch('pkgcheck.addons.git.GitLog') as git_log:
14361 - git_log.side_effect = git.GitError('git parsing failed')
14362 - with pytest.raises(PkgcheckUserException, match='git parsing failed'):
14363 + with patch("pkgcheck.addons.git.GitLog") as git_log:
14364 + git_log.side_effect = git.GitError("git parsing failed")
14365 + with pytest.raises(PkgcheckUserException, match="git parsing failed"):
14366 self.addon.commits_repo(git.GitChangedRepo)
14367
14368 def test_commits(self, repo, make_repo, make_git_repo):
14369 parent_repo = repo
14370 parent_git_repo = make_git_repo(repo.location, commit=True)
14371 # create a pkg and commit it
14372 - parent_repo.create_ebuild('cat/pkg-0')
14373 - parent_git_repo.add_all('cat/pkg-0')
14374 + parent_repo.create_ebuild("cat/pkg-0")
14375 + parent_git_repo.add_all("cat/pkg-0")
14376
14377 child_git_repo = make_git_repo(self.repo.location, commit=False)
14378 - child_git_repo.run(['git', 'remote', 'add', 'origin', parent_git_repo.path])
14379 - child_git_repo.run(['git', 'pull', 'origin', 'main'])
14380 - child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14381 + child_git_repo.run(["git", "remote", "add", "origin", parent_git_repo.path])
14382 + child_git_repo.run(["git", "pull", "origin", "main"])
14383 + child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
14384 self.addon.update_cache()
14385
14386 # no new commits exist locally in the child repo
14387 @@ -692,22 +689,22 @@ class TestGitAddon:
14388
14389 # create a pkg in the child repo and commit it
14390 child_repo = make_repo(child_git_repo.path)
14391 - child_repo.create_ebuild('cat/pkg-1')
14392 - child_git_repo.add_all('cat/pkg-1')
14393 + child_repo.create_ebuild("cat/pkg-1")
14394 + child_git_repo.add_all("cat/pkg-1")
14395
14396 # commits now exist locally in the child repo
14397 commits = list(self.addon.commits())
14398 assert len(commits) == 1
14399 - assert commits[0].message == ['cat/pkg-1']
14400 + assert commits[0].message == ["cat/pkg-1"]
14401
14402 # failing to parse git log returns error with git cache enabled
14403 - with patch('pkgcheck.addons.git.GitLog') as git_log:
14404 - git_log.side_effect = git.GitError('git parsing failed')
14405 - with pytest.raises(PkgcheckUserException, match='git parsing failed'):
14406 + with patch("pkgcheck.addons.git.GitLog") as git_log:
14407 + git_log.side_effect = git.GitError("git parsing failed")
14408 + with pytest.raises(PkgcheckUserException, match="git parsing failed"):
14409 list(self.addon.commits())
14410
14411 # failing to parse git log raises exception
14412 - with patch('pkgcheck.addons.git.GitLog') as git_log:
14413 - git_log.side_effect = git.GitError('git parsing failed')
14414 - with pytest.raises(PkgcheckUserException, match='git parsing failed'):
14415 + with patch("pkgcheck.addons.git.GitLog") as git_log:
14416 + git_log.side_effect = git.GitError("git parsing failed")
14417 + with pytest.raises(PkgcheckUserException, match="git parsing failed"):
14418 self.addon.commits()
14419
14420 diff --git a/tests/checks/test_acct.py b/tests/checks/test_acct.py
14421 index 57273705..4c8202dc 100644
14422 --- a/tests/checks/test_acct.py
14423 +++ b/tests/checks/test_acct.py
14424 @@ -12,81 +12,86 @@ class TestAcctUser(misc.ReportTestCase):
14425
14426 check_kls = acct.AcctCheck
14427
14428 - kind = 'user'
14429 + kind = "user"
14430
14431 @pytest.fixture(autouse=True)
14432 def _setup(self, tmp_path):
14433 - (metadata := tmp_path / 'metadata').mkdir()
14434 - (metadata / 'qa-policy.conf').write_text(textwrap.dedent("""\
14435 - [user-group-ids]
14436 - uid-range = 0-749,65534
14437 - gid-range = 0-749,65533,65534
14438 - """))
14439 + (metadata := tmp_path / "metadata").mkdir()
14440 + (metadata / "qa-policy.conf").write_text(
14441 + textwrap.dedent(
14442 + """\
14443 + [user-group-ids]
14444 + uid-range = 0-749,65534
14445 + gid-range = 0-749,65533,65534
14446 + """
14447 + )
14448 + )
14449 self.location = str(tmp_path)
14450
14451 def mk_check(self, pkgs):
14452 - repo = FakeRepo(pkgs=pkgs, repo_id='test', location=self.location)
14453 + repo = FakeRepo(pkgs=pkgs, repo_id="test", location=self.location)
14454 check = self.check_kls(arghparse.Namespace(target_repo=repo, gentoo_repo=True))
14455 return check
14456
14457 def mk_pkg(self, name, identifier, version=1, ebuild=None):
14458 if ebuild is None:
14459 - ebuild = textwrap.dedent(f'''\
14460 - inherit acct-{self.kind}
14461 - ACCT_{self.kind.upper()}_ID="{identifier}"
14462 - ''')
14463 - return misc.FakePkg(f'acct-{self.kind}/{name}-{version}', ebuild=ebuild)
14464 + ebuild = textwrap.dedent(
14465 + f"""\
14466 + inherit acct-{self.kind}
14467 + ACCT_{self.kind.upper()}_ID="{identifier}"
14468 + """
14469 + )
14470 + return misc.FakePkg(f"acct-{self.kind}/{name}-{version}", ebuild=ebuild)
14471
14472 def test_unmatching_pkgs(self):
14473 - pkgs = (misc.FakePkg('dev-util/foo-0'),
14474 - misc.FakePkg('dev-util/bar-1'))
14475 + pkgs = (misc.FakePkg("dev-util/foo-0"), misc.FakePkg("dev-util/bar-1"))
14476 check = self.mk_check(pkgs)
14477 self.assertNoReport(check, pkgs)
14478
14479 def test_correct_ids(self):
14480 - pkgs = (self.mk_pkg('foo', 100),
14481 - self.mk_pkg('bar', 200),
14482 - self.mk_pkg('test', 749),
14483 - self.mk_pkg('nobody', 65534))
14484 + pkgs = (
14485 + self.mk_pkg("foo", 100),
14486 + self.mk_pkg("bar", 200),
14487 + self.mk_pkg("test", 749),
14488 + self.mk_pkg("nobody", 65534),
14489 + )
14490 check = self.mk_check(pkgs)
14491 self.assertNoReport(check, pkgs)
14492
14493 def test_missing_ids(self):
14494 - pkg = self.mk_pkg('foo', None, ebuild='inherit acct-user\n')
14495 + pkg = self.mk_pkg("foo", None, ebuild="inherit acct-user\n")
14496 check = self.mk_check((pkg,))
14497 r = self.assertReport(check, pkg)
14498 assert isinstance(r, acct.MissingAccountIdentifier)
14499 - assert r.var == f'ACCT_{self.kind.upper()}_ID'
14500 + assert r.var == f"ACCT_{self.kind.upper()}_ID"
14501 assert r.var in str(r)
14502
14503 def test_conflicting_ids(self):
14504 - pkgs = (self.mk_pkg('foo', 100),
14505 - self.mk_pkg('bar', 100))
14506 + pkgs = (self.mk_pkg("foo", 100), self.mk_pkg("bar", 100))
14507 check = self.mk_check(pkgs)
14508 r = self.assertReport(check, pkgs)
14509 assert isinstance(r, acct.ConflictingAccountIdentifiers)
14510 assert r.kind == self.kind
14511 assert r.identifier == 100
14512 - assert r.pkgs == (f'acct-{self.kind}/bar-1', f'acct-{self.kind}/foo-1')
14513 - assert f'conflicting {self.kind} id 100 usage: ' in str(r)
14514 + assert r.pkgs == (f"acct-{self.kind}/bar-1", f"acct-{self.kind}/foo-1")
14515 + assert f"conflicting {self.kind} id 100 usage: " in str(r)
14516
14517 def test_self_nonconflicting_ids(self):
14518 - pkgs = (self.mk_pkg('foo', 100),
14519 - self.mk_pkg('foo', 100, version=2))
14520 + pkgs = (self.mk_pkg("foo", 100), self.mk_pkg("foo", 100, version=2))
14521 check = self.mk_check(pkgs)
14522 self.assertNoReport(check, pkgs)
14523
14524 def test_dynamic_assignment_range(self):
14525 - pkg = self.mk_pkg('foo', 750)
14526 + pkg = self.mk_pkg("foo", 750)
14527 check = self.mk_check((pkg,))
14528 r = self.assertReport(check, pkg)
14529 assert isinstance(r, acct.OutsideRangeAccountIdentifier)
14530 assert r.kind == self.kind
14531 assert r.identifier == 750
14532 - assert f'{self.kind} id 750 outside permitted' in str(r)
14533 + assert f"{self.kind} id 750 outside permitted" in str(r)
14534
14535 def test_sysadmin_assignment_range(self):
14536 - pkg = self.mk_pkg('foo', 1000)
14537 + pkg = self.mk_pkg("foo", 1000)
14538 check = self.mk_check((pkg,))
14539 r = self.assertReport(check, pkg)
14540 assert isinstance(r, acct.OutsideRangeAccountIdentifier)
14541 @@ -94,7 +99,7 @@ class TestAcctUser(misc.ReportTestCase):
14542 assert r.identifier == 1000
14543
14544 def test_high_reserved(self):
14545 - pkg = self.mk_pkg('foo', 65535)
14546 + pkg = self.mk_pkg("foo", 65535)
14547 check = self.mk_check((pkg,))
14548 r = self.assertReport(check, pkg)
14549 assert isinstance(r, acct.OutsideRangeAccountIdentifier)
14550 @@ -103,7 +108,7 @@ class TestAcctUser(misc.ReportTestCase):
14551
14552 def test_nogroup(self):
14553 """Test that 65533 is not accepted for UID."""
14554 - pkg = self.mk_pkg('nogroup', 65533)
14555 + pkg = self.mk_pkg("nogroup", 65533)
14556 check = self.mk_check((pkg,))
14557 r = self.assertReport(check, pkg)
14558 assert isinstance(r, acct.OutsideRangeAccountIdentifier)
14559 @@ -111,28 +116,27 @@ class TestAcctUser(misc.ReportTestCase):
14560 assert r.identifier == 65533
14561
14562 def test_nobody(self):
14563 - pkg = self.mk_pkg('nobody', 65534)
14564 + pkg = self.mk_pkg("nobody", 65534)
14565 check = self.mk_check((pkg,))
14566 self.assertNoReport(check, pkg)
14567
14568
14569 class TestAcctGroup(TestAcctUser):
14570 - kind = 'group'
14571 + kind = "group"
14572
14573 def test_nogroup(self):
14574 """Test that 65533 is accepted for GID."""
14575 - pkg = self.mk_pkg('nogroup', 65533)
14576 + pkg = self.mk_pkg("nogroup", 65533)
14577 check = self.mk_check((pkg,))
14578 self.assertNoReport(check, pkg)
14579
14580
14581 class TestQaPolicyValidation(misc.ReportTestCase):
14582 -
14583 def mk_check(self, tmp_path, content):
14584 if content:
14585 - (metadata := tmp_path / 'metadata').mkdir()
14586 - (metadata / 'qa-policy.conf').write_text(textwrap.dedent(content))
14587 - repo = FakeRepo(repo_id='test', location=str(tmp_path))
14588 + (metadata := tmp_path / "metadata").mkdir()
14589 + (metadata / "qa-policy.conf").write_text(textwrap.dedent(content))
14590 + repo = FakeRepo(repo_id="test", location=str(tmp_path))
14591 return acct.AcctCheck(arghparse.Namespace(target_repo=repo, gentoo_repo=True))
14592
14593 def test_missing_qa_policy(self, tmp_path):
14594 @@ -141,27 +145,39 @@ class TestQaPolicyValidation(misc.ReportTestCase):
14595
14596 def test_missing_section(self, tmp_path):
14597 with pytest.raises(SkipCheck, match="missing section user-group-ids"):
14598 - self.mk_check(tmp_path, '''\
14599 + self.mk_check(
14600 + tmp_path,
14601 + """\
14602 [random]
14603 x = 5
14604 - ''')
14605 + """,
14606 + )
14607
14608 def test_missing_config(self, tmp_path):
14609 with pytest.raises(SkipCheck, match="missing value for gid-range"):
14610 - self.mk_check(tmp_path, '''\
14611 + self.mk_check(
14612 + tmp_path,
14613 + """\
14614 [user-group-ids]
14615 uid-range = 0-749
14616 - ''')
14617 -
14618 - @pytest.mark.parametrize('value', (
14619 - 'start-end',
14620 - '0-749-1500',
14621 - ',150',
14622 - ))
14623 + """,
14624 + )
14625 +
14626 + @pytest.mark.parametrize(
14627 + "value",
14628 + (
14629 + "start-end",
14630 + "0-749-1500",
14631 + ",150",
14632 + ),
14633 + )
14634 def test_invalid_value(self, tmp_path, value):
14635 with pytest.raises(SkipCheck, match="invalid value for uid-range"):
14636 - self.mk_check(tmp_path, f'''\
14637 + self.mk_check(
14638 + tmp_path,
14639 + f"""\
14640 [user-group-ids]
14641 uid-range = {value}
14642 gid-range = 0-749
14643 - ''')
14644 + """,
14645 + )
14646
14647 diff --git a/tests/checks/test_all.py b/tests/checks/test_all.py
14648 index 2ca8a114..a153a802 100644
14649 --- a/tests/checks/test_all.py
14650 +++ b/tests/checks/test_all.py
14651 @@ -22,96 +22,91 @@ class TestMetadataError:
14652
14653 def test_reregister_error(self):
14654 with pytest.raises(ValueError, match="metadata attribute 'eapi' already registered"):
14655 +
14656 class InvalidEapi2(results.MetadataError, results.VersionResult):
14657 - attr = 'eapi'
14658 + attr = "eapi"
14659
14660 def test_register_missing_attr(self):
14661 with pytest.raises(ValueError, match="class missing metadata attributes"):
14662 +
14663 class InvalidAttr(results.MetadataError, results.VersionResult):
14664 pass
14665
14666
14667 class TestGentooRepoCheck:
14668 -
14669 def test_non_gentoo_repo(self, tool, make_repo):
14670 self.repo = make_repo()
14671 - args = ['scan', '--repo', self.repo.location]
14672 + args = ["scan", "--repo", self.repo.location]
14673 options, _ = tool.parse_args(args)
14674 - with pytest.raises(checks_mod.SkipCheck, match='not running against gentoo repo'):
14675 + with pytest.raises(checks_mod.SkipCheck, match="not running against gentoo repo"):
14676 init_check(checks_mod.GentooRepoCheck, options)
14677
14678 def test_gentoo_repo(self, tool, make_repo):
14679 - self.repo = make_repo(repo_id='gentoo')
14680 - args = ['scan', '--repo', self.repo.location]
14681 + self.repo = make_repo(repo_id="gentoo")
14682 + args = ["scan", "--repo", self.repo.location]
14683 options, _ = tool.parse_args(args)
14684 assert init_check(checks_mod.GentooRepoCheck, options)
14685
14686
14687 class TestOverlayCheck:
14688 -
14689 def test_non_overlay_repo(self, tool, testconfig):
14690 tool.parser.set_defaults(config_path=testconfig)
14691 - options, _ = tool.parse_args(['scan', '--repo', 'gentoo'])
14692 - with pytest.raises(checks_mod.SkipCheck, match='not running against overlay'):
14693 + options, _ = tool.parse_args(["scan", "--repo", "gentoo"])
14694 + with pytest.raises(checks_mod.SkipCheck, match="not running against overlay"):
14695 init_check(checks_mod.OverlayRepoCheck, options)
14696
14697 def test_overlay_repo(self, tool, testconfig):
14698 tool.parser.set_defaults(config_path=testconfig)
14699 - options, _ = tool.parse_args(['scan', '--repo', 'overlay'])
14700 + options, _ = tool.parse_args(["scan", "--repo", "overlay"])
14701 assert init_check(checks_mod.OverlayRepoCheck, options)
14702
14703
14704 class TestGitCommitsCheck:
14705 -
14706 @pytest.fixture(autouse=True)
14707 def _setup(self, tool, make_repo, make_git_repo):
14708 # initialize parent repo
14709 self.parent_git_repo = make_git_repo()
14710 - self.parent_repo = make_repo(
14711 - self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
14712 - self.parent_git_repo.add_all('initial commit')
14713 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
14714 + self.parent_git_repo.add_all("initial commit")
14715
14716 # initialize child repo
14717 self.child_git_repo = make_git_repo()
14718 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
14719 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
14720 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
14721 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
14722 + self.child_git_repo.run(["git", "pull", "origin", "main"])
14723 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
14724 self.child_repo = make_repo(self.child_git_repo.path)
14725
14726 def test_no_commits_option(self, tool, make_git_repo):
14727 - options, _ = tool.parse_args(
14728 - ['scan', '--repo', self.child_repo.location])
14729 - with pytest.raises(checks_mod.SkipCheck, match='not scanning against git commits'):
14730 + options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location])
14731 + with pytest.raises(checks_mod.SkipCheck, match="not scanning against git commits"):
14732 init_check(checks_mod.GitCommitsCheck, options)
14733
14734 def test_commits_option(self, tool, make_repo):
14735 - self.child_repo.create_ebuild('cat/pkg-1')
14736 - self.child_git_repo.add_all('cat/pkg-1')
14737 - options, _ = tool.parse_args(
14738 - ['scan', '--repo', self.child_repo.location, '--commits'])
14739 + self.child_repo.create_ebuild("cat/pkg-1")
14740 + self.child_git_repo.add_all("cat/pkg-1")
14741 + options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
14742 assert init_check(checks_mod.GitCommitsCheck, options)
14743
14744 def test_no_local_commits(self, tool):
14745 with pytest.raises(SystemExit) as excinfo:
14746 - tool.parse_args(['scan', '--repo', self.child_repo.location, '--commits'])
14747 + tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
14748 assert excinfo.value.code == 0
14749
14750 # parent repo has new commits
14751 - self.parent_repo.create_ebuild('cat/pkg-1')
14752 - self.parent_git_repo.add_all('cat/pkg-1')
14753 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
14754 + self.parent_repo.create_ebuild("cat/pkg-1")
14755 + self.parent_git_repo.add_all("cat/pkg-1")
14756 + self.child_git_repo.run(["git", "pull", "origin", "main"])
14757 with pytest.raises(SystemExit) as excinfo:
14758 - tool.parse_args(['scan', '--repo', self.child_repo.location, '--commits'])
14759 + tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
14760 assert excinfo.value.code == 0
14761
14762
14763 class TestNetworkCheck:
14764 -
14765 def test_network_disabled(self, tool):
14766 - options, _ = tool.parse_args(['scan'])
14767 - with pytest.raises(checks_mod.SkipCheck, match='network checks not enabled'):
14768 + options, _ = tool.parse_args(["scan"])
14769 + with pytest.raises(checks_mod.SkipCheck, match="network checks not enabled"):
14770 init_check(checks_mod.NetworkCheck, options)
14771
14772 def test_network_enabled(self, tool):
14773 - options, _ = tool.parse_args(['scan', '--net'])
14774 + options, _ = tool.parse_args(["scan", "--net"])
14775 assert init_check(checks_mod.NetworkCheck, options)
14776
14777 diff --git a/tests/checks/test_cleanup.py b/tests/checks/test_cleanup.py
14778 index 4e1aa2b3..7ca2f3b6 100644
14779 --- a/tests/checks/test_cleanup.py
14780 +++ b/tests/checks/test_cleanup.py
14781 @@ -3,10 +3,12 @@ from snakeoil.cli import arghparse
14782
14783 from .. import misc
14784
14785 +
14786 def mk_pkg(ver, keywords=("x86", "amd64"), slot="0", **kwds):
14787 return misc.FakePkg(
14788 - f"dev-util/diffball-{ver}",
14789 - data={**kwds, "KEYWORDS": ' '.join(keywords), "SLOT": slot})
14790 + f"dev-util/diffball-{ver}", data={**kwds, "KEYWORDS": " ".join(keywords), "SLOT": slot}
14791 + )
14792 +
14793
14794 class TestRedundantVersion(misc.ReportTestCase):
14795
14796 @@ -17,50 +19,43 @@ class TestRedundantVersion(misc.ReportTestCase):
14797 self.assertNoReport(self.check, [mk_pkg("0.7.1")])
14798
14799 def test_live_version(self):
14800 - self.assertNoReport(
14801 - self.check, [mk_pkg('0.7'), mk_pkg('0.9', PROPERTIES='live')])
14802 - self.assertNoReport(
14803 - self.check, [mk_pkg('0.7'), mk_pkg('9999', PROPERTIES='live')])
14804 + self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", PROPERTIES="live")])
14805 + self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("9999", PROPERTIES="live")])
14806
14807 def test_no_keywords(self):
14808 - self.assertNoReport(
14809 - self.check, [mk_pkg('0.7'), mk_pkg('0.9', keywords=())])
14810 + self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", keywords=())])
14811
14812 def test_disabled_keywords(self):
14813 - self.assertNoReport(
14814 - self.check, [mk_pkg('0.7'), mk_pkg('0.9', keywords=('-x86', '-amd64'))])
14815 + self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", keywords=("-x86", "-amd64"))])
14816
14817 def test_single_redundant(self):
14818 - r = self.assertReport(
14819 - self.check, [mk_pkg(x) for x in ("0.7", "0.8")])
14820 + r = self.assertReport(self.check, [mk_pkg(x) for x in ("0.7", "0.8")])
14821 assert isinstance(r, cleanup.RedundantVersion)
14822 assert r.later_versions == ("0.8",)
14823 - assert 'slot(0) keywords are overshadowed by version: 0.8' in str(r)
14824 + assert "slot(0) keywords are overshadowed by version: 0.8" in str(r)
14825
14826 def test_multiple_redundants(self):
14827 - reports = self.assertReports(
14828 - self.check, [mk_pkg(x) for x in ("0.7", "0.8", "0.9")])
14829 - assert (
14830 - [list(x.later_versions) for x in reports] ==
14831 - [["0.8", "0.9"], ["0.9"]])
14832 + reports = self.assertReports(self.check, [mk_pkg(x) for x in ("0.7", "0.8", "0.9")])
14833 + assert [list(x.later_versions) for x in reports] == [["0.8", "0.9"], ["0.9"]]
14834 for x in reports:
14835 assert isinstance(x, cleanup.RedundantVersion)
14836
14837 def test_multiple_slots(self):
14838 - l = [mk_pkg("0.7", slot="1"), mk_pkg("0.8"),
14839 - mk_pkg("0.9", slot="1")]
14840 + l = [mk_pkg("0.7", slot="1"), mk_pkg("0.8"), mk_pkg("0.9", slot="1")]
14841 r = self.assertReport(self.check, l)
14842 assert r.later_versions == ("0.9",)
14843 assert isinstance(r, cleanup.RedundantVersion)
14844 - assert 'slot(1) keywords are overshadowed by version: 0.9' in str(r)
14845 + assert "slot(1) keywords are overshadowed by version: 0.9" in str(r)
14846
14847 l.append(mk_pkg("0.10", keywords=("x86", "amd64", "~sparc")))
14848 reports = self.assertReports(self.check, l)
14849 - assert ([list(x.later_versions) for x in reports] == [["0.9"], ["0.10"]])
14850 + assert [list(x.later_versions) for x in reports] == [["0.9"], ["0.10"]]
14851
14852 def test_multiple_keywords(self):
14853 - l = [mk_pkg("0.1", keywords=("~x86", "~amd64")),
14854 - mk_pkg("0.2", keywords=("x86", "~amd64", "~sparc"))]
14855 + l = [
14856 + mk_pkg("0.1", keywords=("~x86", "~amd64")),
14857 + mk_pkg("0.2", keywords=("x86", "~amd64", "~sparc")),
14858 + ]
14859 r = self.assertReport(self.check, l)
14860 assert r.later_versions == ("0.2",)
14861
14862 @@ -71,32 +66,33 @@ class TestRedundantVersionByStable(misc.ReportTestCase):
14863 check = cleanup.RedundantVersionCheck(arghparse.Namespace(stable_only=True), profile_addon={})
14864
14865 def test_only_unstable(self):
14866 - l = [mk_pkg("0.1", keywords=("~x86", "~amd64")),
14867 - mk_pkg("0.2", keywords=("~x86", "~amd64"))]
14868 + l = [mk_pkg("0.1", keywords=("~x86", "~amd64")), mk_pkg("0.2", keywords=("~x86", "~amd64"))]
14869 self.assertNoReport(self.check, l)
14870
14871 def test_only_stable(self):
14872 - l = [mk_pkg("0.1", keywords=("x86", "amd64")),
14873 - mk_pkg("0.2", keywords=("x86", "amd64"))]
14874 + l = [mk_pkg("0.1", keywords=("x86", "amd64")), mk_pkg("0.2", keywords=("x86", "amd64"))]
14875 r = self.assertReport(self.check, l)
14876 assert r.later_versions == ("0.2",)
14877
14878 def test_mixed_stable(self):
14879 - l = [mk_pkg("0.1", keywords=("x86", "amd64", "~sparc")),
14880 - mk_pkg("0.2", keywords=("x86", "amd64", "~sparc"))]
14881 + l = [
14882 + mk_pkg("0.1", keywords=("x86", "amd64", "~sparc")),
14883 + mk_pkg("0.2", keywords=("x86", "amd64", "~sparc")),
14884 + ]
14885 r = self.assertReport(self.check, l)
14886 assert r.later_versions == ("0.2",)
14887
14888 def test_mixed_history(self):
14889 - l = [mk_pkg("0.1", keywords=("amd64")),
14890 - mk_pkg("0.2", keywords=("~x86", "~amd64")),
14891 - mk_pkg("0.3", keywords=("x86", "amd64")),
14892 - mk_pkg("0.4", keywords=("~x86", "~amd64")),
14893 - mk_pkg("0.5", keywords=("~x86", "~amd64"))]
14894 + l = [
14895 + mk_pkg("0.1", keywords=("amd64")),
14896 + mk_pkg("0.2", keywords=("~x86", "~amd64")),
14897 + mk_pkg("0.3", keywords=("x86", "amd64")),
14898 + mk_pkg("0.4", keywords=("~x86", "~amd64")),
14899 + mk_pkg("0.5", keywords=("~x86", "~amd64")),
14900 + ]
14901 r = self.assertReport(self.check, l)
14902 assert r.later_versions == ("0.3", "0.4", "0.5")
14903
14904 def test_no_redundant(self):
14905 - l = [mk_pkg("0.1", keywords=("x86", "amd64")),
14906 - mk_pkg("0.2", keywords=("x86", "~amd64"))]
14907 + l = [mk_pkg("0.1", keywords=("x86", "amd64")), mk_pkg("0.2", keywords=("x86", "~amd64"))]
14908 self.assertNoReport(self.check, l)
14909
14910 diff --git a/tests/checks/test_codingstyle.py b/tests/checks/test_codingstyle.py
14911 index 1c6a0075..528faa8b 100644
14912 --- a/tests/checks/test_codingstyle.py
14913 +++ b/tests/checks/test_codingstyle.py
14914 @@ -30,8 +30,12 @@ class TestInsintoCheck(misc.ReportTestCase):
14915 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
14916
14917 bad = (
14918 - "/etc/env.d", "/etc/conf.d", "/etc/init.d", "/etc/pam.d",
14919 - "/usr/share/applications", "/usr/share/applications",
14920 + "/etc/env.d",
14921 + "/etc/conf.d",
14922 + "/etc/init.d",
14923 + "/etc/pam.d",
14924 + "/usr/share/applications",
14925 + "/usr/share/applications",
14926 "//usr/share//applications",
14927 )
14928 check = self.check_kls(None)
14929 @@ -42,11 +46,12 @@ class TestInsintoCheck(misc.ReportTestCase):
14930
14931 def test_docinto(self):
14932 check = self.check_kls(None)
14933 - for path in ('${PF}', '${P}', '${PF}/examples'):
14934 + for path in ("${PF}", "${P}", "${PF}/examples"):
14935 for eapi_str, eapi in EAPI.known_eapis.items():
14936 - fake_src = [f'\tinsinto /usr/share/doc/{path}\n']
14937 + fake_src = [f"\tinsinto /usr/share/doc/{path}\n"]
14938 fake_pkg = misc.FakePkg(
14939 - "dev-util/diff-0.5", data={'EAPI': eapi_str}, lines=fake_src)
14940 + "dev-util/diff-0.5", data={"EAPI": eapi_str}, lines=fake_src
14941 + )
14942 if eapi.options.dodoc_allow_recursive:
14943 r = self.assertReport(check, fake_pkg)
14944 assert path in str(r)
14945 @@ -68,10 +73,10 @@ class TestAbsoluteSymlink(misc.ReportTestCase):
14946
14947 absolute_prefixed = []
14948 for path_var in codingstyle.PATH_VARIABLES:
14949 - src, dest = ('/bin/blah', '/bin/bash')
14950 + src, dest = ("/bin/blah", "/bin/bash")
14951 absolute_prefixed.append((f'"${{{path_var}}}"{src}', dest))
14952 absolute_prefixed.append((f'"${{{path_var}%/}}"{src}', dest))
14953 - src, dest = ('/bin/blah baz', '/bin/blahbaz')
14954 + src, dest = ("/bin/blah baz", "/bin/blahbaz")
14955 absolute_prefixed.append((f'"${{{path_var}}}{src}"', dest))
14956 absolute_prefixed.append((f'"${{{path_var}%/}}{src}"', dest))
14957
14958 @@ -99,7 +104,7 @@ class TestAbsoluteSymlink(misc.ReportTestCase):
14959
14960 assert len(reports) == len(absolute) + len(absolute_prefixed)
14961 for r, (src, dest) in zip(reports, absolute + absolute_prefixed):
14962 - assert f'dosym {src}' in str(r)
14963 + assert f"dosym {src}" in str(r)
14964
14965
14966 class TestPathVariablesCheck(misc.ReportTestCase):
14967 @@ -107,7 +112,7 @@ class TestPathVariablesCheck(misc.ReportTestCase):
14968 check_kls = codingstyle.PathVariablesCheck
14969 check = check_kls(None)
14970
14971 - def _found(self, cls, suffix=''):
14972 + def _found(self, cls, suffix=""):
14973 # check single and multiple matches across all specified variables
14974 for lines in (1, 2):
14975 for path_var in codingstyle.PATH_VARIABLES:
14976 @@ -117,17 +122,18 @@ class TestPathVariablesCheck(misc.ReportTestCase):
14977 fake_src.extend(["}\n", "\n"])
14978 for eapi_str, eapi in EAPI.known_eapis.items():
14979 fake_pkg = misc.FakePkg(
14980 - "dev-util/diff-0.5", data={'EAPI': eapi_str}, lines=fake_src)
14981 + "dev-util/diff-0.5", data={"EAPI": eapi_str}, lines=fake_src
14982 + )
14983 if eapi.options.trailing_slash:
14984 self.assertNoReport(self.check, fake_pkg)
14985 else:
14986 r = self.assertReport(self.check, fake_pkg)
14987 assert isinstance(r, cls)
14988 - assert r.match == f'${{{path_var}{suffix}}}'
14989 + assert r.match == f"${{{path_var}{suffix}}}"
14990 assert r.lines == tuple(x + 2 for x in range(lines))
14991 assert path_var in str(r)
14992
14993 - def _unfound(self, cls, suffix=''):
14994 + def _unfound(self, cls, suffix=""):
14995 for path_var in codingstyle.PATH_VARIABLES:
14996 fake_src = [
14997 "src_install() {\n",
14998 @@ -138,7 +144,8 @@ class TestPathVariablesCheck(misc.ReportTestCase):
14999 ]
15000 for eapi_str, eapi in EAPI.known_eapis.items():
15001 fake_pkg = misc.FakePkg(
15002 - "dev-util/diffball-0.5", data={'EAPI': eapi_str}, lines=fake_src)
15003 + "dev-util/diffball-0.5", data={"EAPI": eapi_str}, lines=fake_src
15004 + )
15005 self.assertNoReport(self.check, fake_pkg)
15006
15007 def test_missing_found(self):
15008 @@ -148,14 +155,14 @@ class TestPathVariablesCheck(misc.ReportTestCase):
15009 self._unfound(codingstyle.MissingSlash)
15010
15011 def test_unnecessary_found(self):
15012 - self._found(codingstyle.UnnecessarySlashStrip, suffix='%/')
15013 + self._found(codingstyle.UnnecessarySlashStrip, suffix="%/")
15014
15015 def test_unnecessary_unfound(self):
15016 - self._unfound(codingstyle.UnnecessarySlashStrip, suffix='%/')
15017 + self._unfound(codingstyle.UnnecessarySlashStrip, suffix="%/")
15018
15019 def test_double_prefix_found(self):
15020 fake_src = [
15021 - 'src_install() {\n',
15022 + "src_install() {\n",
15023 ' cp foo.py "${ED}$(python_get_sitedir)"\n',
15024 # test non-match
15025 ' cp foo.py "${D%/}$(python_get_sitedir)"\n',
15026 @@ -174,17 +181,17 @@ class TestPathVariablesCheck(misc.ReportTestCase):
15027 ' dodir /foo/bar "${EPREFIX}"/bar/baz\n',
15028 # commented lines aren't flagged for double prefix usage
15029 '# exeinto "${EPREFIX}/foo/bar"\n',
15030 - '}\n'
15031 + "}\n",
15032 ]
15033 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15034 r = self.assertReports(self.check, fake_pkg)
15035 cls = codingstyle.DoublePrefixInPath
15036 expected_results = (
15037 - ('${ED}$(python_get_sitedir)', 2),
15038 - ('${ED%/}$(python_get_sitedir)', 4),
15039 - ('${ED}/$(python_get_sitedir)', 5),
15040 - ('${ED}${PYTHON_SITEDIR}', 6),
15041 - ('${ED}${EPREFIX}', 7),
15042 + ("${ED}$(python_get_sitedir)", 2),
15043 + ("${ED%/}$(python_get_sitedir)", 4),
15044 + ("${ED}/$(python_get_sitedir)", 5),
15045 + ("${ED}${PYTHON_SITEDIR}", 6),
15046 + ("${ED}${EPREFIX}", 7),
15047 ('insinto "$(python_get_sitedir)', 8),
15048 ('exeinto "${EPREFIX}', 9),
15049 ('fowners foo:bar "$(python_get_sitedir)', 10),
15050 @@ -199,16 +206,16 @@ class TestPathVariablesCheck(misc.ReportTestCase):
15051
15052 def test_double_prefix_unfound(self):
15053 fake_src = [
15054 - 'src_install() {\n',
15055 + "src_install() {\n",
15056 ' cp foo.py "${D}$(python_get_sitedir)"\n',
15057 ' cp foo "${D}${EPREFIX}/foo/bar"\n',
15058 - ' insinto /foo/bar\n',
15059 + " insinto /foo/bar\n",
15060 # potential false positives: stripping prefix
15061 ' insinto "${MYVAR#${EPREFIX}}"\n',
15062 ' insinto "${MYVAR#"${EPREFIX}"}"\n',
15063 # combined commands
15064 ' dodir /etc/env.d && echo "FOO=${EPREFIX}"\n',
15065 - '}\n'
15066 + "}\n",
15067 ]
15068 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15069 self.assertNoReport(self.check, fake_pkg)
15070 @@ -219,99 +226,76 @@ class TestObsoleteUri(misc.ReportTestCase):
15071 check_kls = codingstyle.ObsoleteUriCheck
15072
15073 def test_github_archive_uri(self):
15074 - uri = 'https://github.com/foo/bar/archive/${PV}.tar.gz'
15075 - fake_src = [
15076 - f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
15077 - ]
15078 + uri = "https://github.com/foo/bar/archive/${PV}.tar.gz"
15079 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
15080 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15081 self.assertNoReport(self.check_kls(None), fake_pkg)
15082
15083 def test_commented_github_tarball_uri(self):
15084 - uri = 'https://github.com/foo/bar/tarball/${PV}'
15085 - fake_src = [
15086 - '# github tarball\n',
15087 - '\n',
15088 - f'# {uri}\n'
15089 - ]
15090 + uri = "https://github.com/foo/bar/tarball/${PV}"
15091 + fake_src = ["# github tarball\n", "\n", f"# {uri}\n"]
15092 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15093 self.assertNoReport(self.check_kls(None), fake_pkg)
15094
15095 def test_github_tarball_uri(self):
15096 - uri = 'https://github.com/foo/bar/tarball/${PV}'
15097 - fake_src = [
15098 - f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
15099 - ]
15100 + uri = "https://github.com/foo/bar/tarball/${PV}"
15101 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
15102
15103 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15104 r = self.assertReport(self.check_kls(None), fake_pkg)
15105 assert r.line == 1
15106 assert r.uri == uri
15107 - assert (r.replacement ==
15108 - 'https://github.com/foo/bar/archive/${PV}.tar.gz')
15109 + assert r.replacement == "https://github.com/foo/bar/archive/${PV}.tar.gz"
15110 assert uri in str(r)
15111
15112 def test_github_zipball_uri(self):
15113 - uri = 'https://github.com/foo/bar/zipball/${PV}'
15114 - fake_src = [
15115 - f'SRC_URI="{uri} -> ${{P}}.zip"\n'
15116 - ]
15117 + uri = "https://github.com/foo/bar/zipball/${PV}"
15118 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.zip"\n']
15119
15120 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15121 r = self.assertReport(self.check_kls(None), fake_pkg)
15122 assert r.line == 1
15123 assert r.uri == uri
15124 - assert (r.replacement ==
15125 - 'https://github.com/foo/bar/archive/${PV}.tar.gz')
15126 + assert r.replacement == "https://github.com/foo/bar/archive/${PV}.tar.gz"
15127 assert uri in str(r)
15128
15129 def test_gitlab_archive_uri(self):
15130 - uri = 'https://gitlab.com/foo/bar/-/archive/${PV}/${P}.tar.gz'
15131 - fake_src = [
15132 - f'SRC_URI="{uri}"\n'
15133 - ]
15134 + uri = "https://gitlab.com/foo/bar/-/archive/${PV}/${P}.tar.gz"
15135 + fake_src = [f'SRC_URI="{uri}"\n']
15136 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15137 self.assertNoReport(self.check_kls(None), fake_pkg)
15138
15139 def test_gitlab_tar_gz_uri(self):
15140 - uri = 'https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=${PV}'
15141 - fake_src = [
15142 - f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
15143 - ]
15144 + uri = "https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=${PV}"
15145 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
15146
15147 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15148 r = self.assertReport(self.check_kls(None), fake_pkg)
15149 assert r.line == 1
15150 assert r.uri == uri
15151 - assert (r.replacement ==
15152 - 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.gz')
15153 + assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.gz"
15154 assert uri in str(r)
15155
15156 def test_gitlab_tar_bz2_uri(self):
15157 - uri = 'https://gitlab.com/foo/bar/repository/archive.tar.bz2?ref=${PV}'
15158 - fake_src = [
15159 - f'SRC_URI="{uri} -> ${{P}}.tar.bz2"\n'
15160 - ]
15161 + uri = "https://gitlab.com/foo/bar/repository/archive.tar.bz2?ref=${PV}"
15162 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.bz2"\n']
15163
15164 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15165 r = self.assertReport(self.check_kls(None), fake_pkg)
15166 assert r.line == 1
15167 assert r.uri == uri
15168 - assert (r.replacement ==
15169 - 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.bz2')
15170 + assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.bz2"
15171 assert uri in str(r)
15172
15173 def test_gitlab_zip_uri(self):
15174 - uri = 'https://gitlab.com/foo/bar/repository/archive.zip?ref=${PV}'
15175 - fake_src = [
15176 - f'SRC_URI="{uri} -> ${{P}}.zip"\n'
15177 - ]
15178 + uri = "https://gitlab.com/foo/bar/repository/archive.zip?ref=${PV}"
15179 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.zip"\n']
15180
15181 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15182 r = self.assertReport(self.check_kls(None), fake_pkg)
15183 assert r.line == 1
15184 assert r.uri == uri
15185 - assert (r.replacement ==
15186 - 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.zip')
15187 + assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.zip"
15188 assert uri in str(r)
15189
15190
15191 @@ -320,15 +304,13 @@ class TestBetterCompression(misc.ReportTestCase):
15192 check_kls = codingstyle.BetterCompressionCheck
15193
15194 def test_github_archive_uri(self):
15195 - uri = 'https://github.com/foo/bar/archive/${PV}.tar.gz'
15196 - fake_src = [
15197 - f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
15198 - ]
15199 + uri = "https://github.com/foo/bar/archive/${PV}.tar.gz"
15200 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
15201 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15202 self.assertNoReport(self.check_kls(None), fake_pkg)
15203
15204 def test_comment_uri(self):
15205 - uri = 'https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar'
15206 + uri = "https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar"
15207 fake_src = [
15208 f'#SRC_URI="{uri} -> ${{P}}.tar.gz"\n',
15209 " ",
15210 @@ -339,21 +321,22 @@ class TestBetterCompression(misc.ReportTestCase):
15211 r = self.assertReport(self.check_kls(None), fake_pkg)
15212 assert r.lineno == 4
15213
15214 - @pytest.mark.parametrize('uri', (
15215 - 'https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar',
15216 - 'https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.tar.gz',
15217 - 'https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.zip',
15218 - 'https://gitlab.freedesktop.org/glvnd/${PN}/-/archive/v${PV}/${PN}-v${PV}.tar.gz',
15219 - ))
15220 + @pytest.mark.parametrize(
15221 + "uri",
15222 + (
15223 + "https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar",
15224 + "https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.tar.gz",
15225 + "https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.zip",
15226 + "https://gitlab.freedesktop.org/glvnd/${PN}/-/archive/v${PV}/${PN}-v${PV}.tar.gz",
15227 + ),
15228 + )
15229 def test_gitlab_archive_uri(self, uri):
15230 - fake_src = [
15231 - f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
15232 - ]
15233 + fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
15234 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
15235 r = self.assertReport(self.check_kls(None), fake_pkg)
15236 assert r.lineno == 1
15237 assert r.line == uri
15238 - assert r.replacement == '.tar.bz2'
15239 + assert r.replacement == ".tar.bz2"
15240 assert uri in str(r)
15241
15242
15243 @@ -363,76 +346,85 @@ class TestStaticSrcUri(misc.ReportTestCase):
15244 check = check_kls(None)
15245
15246 @staticmethod
15247 - def _prepare_pkg(uri_value: str, rename: str = '', pkgver: str = 'diffball-0.1.2.3'):
15248 + def _prepare_pkg(uri_value: str, rename: str = "", pkgver: str = "diffball-0.1.2.3"):
15249 if rename:
15250 - rename = f' -> {rename}'
15251 - uri = f'https://github.com/pkgcore/pkgcheck/archive/{uri_value}.tar.gz'
15252 - fake_src = [
15253 - f'SRC_URI="{uri}{rename}"\n'
15254 - ]
15255 + rename = f" -> {rename}"
15256 + uri = f"https://github.com/pkgcore/pkgcheck/archive/{uri_value}.tar.gz"
15257 + fake_src = [f'SRC_URI="{uri}{rename}"\n']
15258
15259 - fake_pkg = misc.FakePkg(f"dev-util/{pkgver}", ebuild=''.join(fake_src), lines=fake_src)
15260 - data = ''.join(fake_src).encode()
15261 + fake_pkg = misc.FakePkg(f"dev-util/{pkgver}", ebuild="".join(fake_src), lines=fake_src)
15262 + data = "".join(fake_src).encode()
15263 return _ParsedPkg(data, pkg=fake_pkg)
15264
15265 -
15266 - @pytest.mark.parametrize('value', (
15267 - '${P}',
15268 - '${PV}',
15269 - 'v${PV}',
15270 - 'random-0.1.2.3', # not a valid prefix
15271 - '1.2.3', # currently we support only ver_cut with start=1
15272 - '0', # for ver_cut only if more then 1 part
15273 - ))
15274 + @pytest.mark.parametrize(
15275 + "value",
15276 + (
15277 + "${P}",
15278 + "${PV}",
15279 + "v${PV}",
15280 + "random-0.1.2.3", # not a valid prefix
15281 + "1.2.3", # currently we support only ver_cut with start=1
15282 + "0", # for ver_cut only if more then 1 part
15283 + ),
15284 + )
15285 def test_no_report(self, value):
15286 self.assertNoReport(self.check, self._prepare_pkg(value))
15287
15288 - @pytest.mark.parametrize(('value', 'static_str', 'replacement'), (
15289 - ('diffball-0.1.2.3', 'diffball-0.1.2.3', '${P}'),
15290 - ('Diffball-0.1.2.3', 'Diffball-0.1.2.3', '${P^}'),
15291 - ('DIFFBALL-0.1.2.3', 'DIFFBALL-0.1.2.3', '${P^^}'),
15292 - ('diffball-0123', 'diffball-0123', '${P//.}'),
15293 - ('Diffball-0123', 'Diffball-0123', '${P^//.}'),
15294 - ('0.1.2.3', '0.1.2.3', '${PV}'),
15295 - ('v0.1.2.3', '0.1.2.3', '${PV}'),
15296 - ('0.1.2', '0.1.2', '$(ver_cut 1-3)'),
15297 - ('0.1', '0.1', '$(ver_cut 1-2)'),
15298 - ('diffball-0.1.2', '0.1.2', '$(ver_cut 1-3)'),
15299 - ('v0123', '0123', "${PV//.}"),
15300 - ('012.3', '012.3', "$(ver_rs 1-2 '')"),
15301 - ('012.3', '012.3', "$(ver_rs 1-2 '')"),
15302 - ('0_1_2_3', '0_1_2_3', "${PV//./_}"),
15303 - ('0_1_2.3', '0_1_2.3', "$(ver_rs 1-2 '_')"),
15304 - ('0-1.2.3', '0-1.2.3', "$(ver_rs 1 '-')"),
15305 - ))
15306 + @pytest.mark.parametrize(
15307 + ("value", "static_str", "replacement"),
15308 + (
15309 + ("diffball-0.1.2.3", "diffball-0.1.2.3", "${P}"),
15310 + ("Diffball-0.1.2.3", "Diffball-0.1.2.3", "${P^}"),
15311 + ("DIFFBALL-0.1.2.3", "DIFFBALL-0.1.2.3", "${P^^}"),
15312 + ("diffball-0123", "diffball-0123", "${P//.}"),
15313 + ("Diffball-0123", "Diffball-0123", "${P^//.}"),
15314 + ("0.1.2.3", "0.1.2.3", "${PV}"),
15315 + ("v0.1.2.3", "0.1.2.3", "${PV}"),
15316 + ("0.1.2", "0.1.2", "$(ver_cut 1-3)"),
15317 + ("0.1", "0.1", "$(ver_cut 1-2)"),
15318 + ("diffball-0.1.2", "0.1.2", "$(ver_cut 1-3)"),
15319 + ("v0123", "0123", "${PV//.}"),
15320 + ("012.3", "012.3", "$(ver_rs 1-2 '')"),
15321 + ("012.3", "012.3", "$(ver_rs 1-2 '')"),
15322 + ("0_1_2_3", "0_1_2_3", "${PV//./_}"),
15323 + ("0_1_2.3", "0_1_2.3", "$(ver_rs 1-2 '_')"),
15324 + ("0-1.2.3", "0-1.2.3", "$(ver_rs 1 '-')"),
15325 + ),
15326 + )
15327 def test_with_report(self, value, static_str, replacement):
15328 r = self.assertReport(self.check, self._prepare_pkg(value))
15329 assert r.static_str == static_str
15330 assert r.replacement == replacement
15331
15332 def test_rename(self):
15333 - self.assertNoReport(self.check, self._prepare_pkg('${P}', '${P}.tar.gz'))
15334 + self.assertNoReport(self.check, self._prepare_pkg("${P}", "${P}.tar.gz"))
15335
15336 - r = self.assertReport(self.check, self._prepare_pkg('${P}', 'diffball-0.1.2.3.tar.gz'))
15337 - assert r.static_str == 'diffball-0.1.2.3'
15338 - assert r.replacement == '${P}'
15339 + r = self.assertReport(self.check, self._prepare_pkg("${P}", "diffball-0.1.2.3.tar.gz"))
15340 + assert r.static_str == "diffball-0.1.2.3"
15341 + assert r.replacement == "${P}"
15342
15343 - r = self.assertReport(self.check, self._prepare_pkg('0.1.2.3', '${P}.tar.gz'))
15344 - assert r.static_str == '0.1.2.3'
15345 - assert r.replacement == '${PV}'
15346 + r = self.assertReport(self.check, self._prepare_pkg("0.1.2.3", "${P}.tar.gz"))
15347 + assert r.static_str == "0.1.2.3"
15348 + assert r.replacement == "${PV}"
15349
15350 - r = self.assertReport(self.check, self._prepare_pkg('diffball-0.1.2.3', 'diffball-0.1.2.3.tar.gz'))
15351 - assert r.static_str == 'diffball-0.1.2.3'
15352 - assert r.replacement == '${P}'
15353 + r = self.assertReport(
15354 + self.check, self._prepare_pkg("diffball-0.1.2.3", "diffball-0.1.2.3.tar.gz")
15355 + )
15356 + assert r.static_str == "diffball-0.1.2.3"
15357 + assert r.replacement == "${P}"
15358
15359 def test_capitalize(self):
15360 - r = self.assertReport(self.check, self._prepare_pkg('DIFFBALL-0.1.2.3', pkgver='DIFFBALL-0.1.2.3'))
15361 - assert r.static_str == 'DIFFBALL-0.1.2.3'
15362 - assert r.replacement == '${P}'
15363 + r = self.assertReport(
15364 + self.check, self._prepare_pkg("DIFFBALL-0.1.2.3", pkgver="DIFFBALL-0.1.2.3")
15365 + )
15366 + assert r.static_str == "DIFFBALL-0.1.2.3"
15367 + assert r.replacement == "${P}"
15368
15369 - r = self.assertReport(self.check, self._prepare_pkg('Diffball-0.1.2.3', pkgver='Diffball-0.1.2.3'))
15370 - assert r.static_str == 'Diffball-0.1.2.3'
15371 - assert r.replacement == '${P}'
15372 + r = self.assertReport(
15373 + self.check, self._prepare_pkg("Diffball-0.1.2.3", pkgver="Diffball-0.1.2.3")
15374 + )
15375 + assert r.static_str == "Diffball-0.1.2.3"
15376 + assert r.replacement == "${P}"
15377
15378
15379 class TestExcessiveLineLength(misc.ReportTestCase):
15380 @@ -441,54 +433,68 @@ class TestExcessiveLineLength(misc.ReportTestCase):
15381 check = check_kls(None)
15382 word_length = codingstyle.ExcessiveLineLength.word_length
15383
15384 -
15385 @staticmethod
15386 def _prepare_pkg(*lines: str):
15387 - fake_pkg = misc.FakePkg("dev-util/diffball-0", ebuild=''.join(lines), lines=lines)
15388 - data = ''.join(lines).encode()
15389 + fake_pkg = misc.FakePkg("dev-util/diffball-0", ebuild="".join(lines), lines=lines)
15390 + data = "".join(lines).encode()
15391 return _ParsedPkg(data, pkg=fake_pkg)
15392
15393 def test_normal_length(self):
15394 self.assertNoReport(self.check, self._prepare_pkg('echo "short line"'))
15395
15396 def test_long_line(self):
15397 - r = self.assertReport(self.check, self._prepare_pkg(f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}'))
15398 - assert r.lines == (1, )
15399 + r = self.assertReport(
15400 + self.check,
15401 + self._prepare_pkg(f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}'),
15402 + )
15403 + assert r.lines == (1,)
15404
15405 def test_multiple_lines(self):
15406 - r = self.assertReport(self.check, self._prepare_pkg(
15407 - f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}',
15408 - 'echo "short line"',
15409 - f'echo {"Hello " * codingstyle.ExcessiveLineLength.line_length}',
15410 - ))
15411 + r = self.assertReport(
15412 + self.check,
15413 + self._prepare_pkg(
15414 + f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}',
15415 + 'echo "short line"',
15416 + f'echo {"Hello " * codingstyle.ExcessiveLineLength.line_length}',
15417 + ),
15418 + )
15419 assert r.lines == (1, 3)
15420
15421 - @pytest.mark.parametrize('variable', ('DESCRIPTION', 'KEYWORDS', 'IUSE'))
15422 + @pytest.mark.parametrize("variable", ("DESCRIPTION", "KEYWORDS", "IUSE"))
15423 def test_special_variables(self, variable):
15424 - self.assertNoReport(self.check, self._prepare_pkg(
15425 - f'{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15426 - f' {variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15427 - f'\t\t{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15428 - ))
15429 + self.assertNoReport(
15430 + self.check,
15431 + self._prepare_pkg(
15432 + f'{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15433 + f' {variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15434 + f'\t\t{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
15435 + ),
15436 + )
15437
15438 def test_long_words(self):
15439 - long_word = 'a' * self.word_length + 'b'
15440 - medium_word = 'a' * (self.word_length // 2)
15441 - r = self.assertReport(self.check, self._prepare_pkg(
15442 - f'echo {"a" * codingstyle.ExcessiveLineLength.line_length}',
15443 - f'echo {medium_word} {long_word}',
15444 - f'echo {medium_word} {long_word[:-5]}',
15445 - ))
15446 - assert r.lines == (3, )
15447 + long_word = "a" * self.word_length + "b"
15448 + medium_word = "a" * (self.word_length // 2)
15449 + r = self.assertReport(
15450 + self.check,
15451 + self._prepare_pkg(
15452 + f'echo {"a" * codingstyle.ExcessiveLineLength.line_length}',
15453 + f"echo {medium_word} {long_word}",
15454 + f"echo {medium_word} {long_word[:-5]}",
15455 + ),
15456 + )
15457 + assert r.lines == (3,)
15458
15459 def test_long_quotes(self):
15460 # The exception is for any quoted string with length >= word_length.
15461 # Each quoted string is computed by itself.
15462 - long_word = 'a ' * (self.word_length // 2) + 'b' # long quoted string, skipped
15463 - medium_word = 'a ' * (self.word_length // 4) # not long enough string, not skipped
15464 - r = self.assertReport(self.check, self._prepare_pkg(
15465 - f'echo "{"a" * codingstyle.ExcessiveLineLength.line_length}"',
15466 - f'echo "{medium_word}" "{long_word}"',
15467 - 'echo' + f' "{medium_word}"' * 3,
15468 - ))
15469 - assert r.lines == (3, )
15470 + long_word = "a " * (self.word_length // 2) + "b" # long quoted string, skipped
15471 + medium_word = "a " * (self.word_length // 4) # not long enough string, not skipped
15472 + r = self.assertReport(
15473 + self.check,
15474 + self._prepare_pkg(
15475 + f'echo "{"a" * codingstyle.ExcessiveLineLength.line_length}"',
15476 + f'echo "{medium_word}" "{long_word}"',
15477 + "echo" + f' "{medium_word}"' * 3,
15478 + ),
15479 + )
15480 + assert r.lines == (3,)
15481
15482 diff --git a/tests/checks/test_dropped_keywords.py b/tests/checks/test_dropped_keywords.py
15483 index 6b070919..fbfee5fc 100644
15484 --- a/tests/checks/test_dropped_keywords.py
15485 +++ b/tests/checks/test_dropped_keywords.py
15486 @@ -8,65 +8,62 @@ class TestDroppedKeywords(misc.ReportTestCase):
15487
15488 check_kls = dropped_keywords.DroppedKeywordsCheck
15489
15490 - def mk_pkg(self, ver, keywords='', eclasses=(), **kwargs):
15491 + def mk_pkg(self, ver, keywords="", eclasses=(), **kwargs):
15492 return misc.FakePkg(
15493 f"dev-util/diffball-{ver}",
15494 data={
15495 **kwargs,
15496 "KEYWORDS": keywords,
15497 "_eclasses_": eclasses,
15498 - })
15499 + },
15500 + )
15501
15502 - def mk_check(self, arches=('x86', 'amd64'), verbosity=0):
15503 + def mk_check(self, arches=("x86", "amd64"), verbosity=0):
15504 options = arghparse.Namespace(arches=arches, verbosity=verbosity)
15505 return self.check_kls(options, arches_addon=None)
15506
15507 def test_it(self):
15508 # single version, shouldn't yield.
15509 check = self.mk_check()
15510 - self.assertNoReport(check, [self.mk_pkg('1')])
15511 + self.assertNoReport(check, [self.mk_pkg("1")])
15512
15513 # ebuilds without keywords are skipped
15514 - self.assertNoReport(
15515 - check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2")])
15516 + self.assertNoReport(check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2")])
15517
15518 # ensure it limits itself to just the arches we care about
15519 # check unstable at the same time;
15520 # finally, check '-' handling; if x86 -> -x86, that's valid.
15521 self.assertNoReport(
15522 check,
15523 - [self.mk_pkg("1", "x86 ~amd64 ppc"),
15524 - self.mk_pkg("2", "~amd64 x86"),
15525 - self.mk_pkg("3", "-amd64 x86")])
15526 + [
15527 + self.mk_pkg("1", "x86 ~amd64 ppc"),
15528 + self.mk_pkg("2", "~amd64 x86"),
15529 + self.mk_pkg("3", "-amd64 x86"),
15530 + ],
15531 + )
15532
15533 # check added keyword handling
15534 self.assertNoReport(
15535 check,
15536 - [self.mk_pkg("1", "amd64"),
15537 - self.mk_pkg("2", "x86"),
15538 - self.mk_pkg("3", "~x86 ~amd64")])
15539 + [self.mk_pkg("1", "amd64"), self.mk_pkg("2", "x86"), self.mk_pkg("3", "~x86 ~amd64")],
15540 + )
15541
15542 # check special keyword handling
15543 - for key in ('-*', '*', '~*'):
15544 - self.assertNoReport(
15545 - check,
15546 - [self.mk_pkg("1", "x86 ~amd64"),
15547 - self.mk_pkg("2", key)])
15548 + for key in ("-*", "*", "~*"):
15549 + self.assertNoReport(check, [self.mk_pkg("1", "x86 ~amd64"), self.mk_pkg("2", key)])
15550
15551 # ensure it doesn't flag live ebuilds
15552 self.assertNoReport(
15553 - check,
15554 - [self.mk_pkg("1", "x86 amd64"),
15555 - self.mk_pkg("9999", "", PROPERTIES='live')])
15556 + check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("9999", "", PROPERTIES="live")]
15557 + )
15558
15559 def test_verbose_mode(self):
15560 # verbose mode outputs a report per version with dropped keywords
15561 check = self.mk_check(verbosity=1)
15562 reports = self.assertReports(
15563 check,
15564 - [self.mk_pkg("1", "amd64 x86"),
15565 - self.mk_pkg("2", "amd64"),
15566 - self.mk_pkg("3", "amd64")])
15567 + [self.mk_pkg("1", "amd64 x86"), self.mk_pkg("2", "amd64"), self.mk_pkg("3", "amd64")],
15568 + )
15569 assert len(reports) == 2
15570 assert {x.version for x in reports} == {"2", "3"}
15571 assert set().union(*(x.arches for x in reports)) == {"x86"}
15572 @@ -76,9 +73,8 @@ class TestDroppedKeywords(misc.ReportTestCase):
15573 check = self.mk_check()
15574 reports = self.assertReports(
15575 check,
15576 - [self.mk_pkg("1", "x86 amd64"),
15577 - self.mk_pkg("2", "amd64"),
15578 - self.mk_pkg("3", "amd64")])
15579 + [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2", "amd64"), self.mk_pkg("3", "amd64")],
15580 + )
15581 assert len(reports) == 1
15582 - assert reports[0].version == '3'
15583 + assert reports[0].version == "3"
15584 assert set().union(*(x.arches for x in reports)) == {"x86"}
15585
15586 diff --git a/tests/checks/test_git.py b/tests/checks/test_git.py
15587 index 150d8b8b..1cefd549 100644
15588 --- a/tests/checks/test_git.py
15589 +++ b/tests/checks/test_git.py
15590 @@ -21,11 +21,11 @@ class FakeCommit(GitCommit):
15591
15592 def __init__(self, **kwargs):
15593 commit_data = {
15594 - 'hash': '7f9abd7ec2d079b1d0c36fc2f5d626ae0691757e',
15595 - 'commit_time': 1613438722,
15596 - 'author': 'author@××××××.com',
15597 - 'committer': 'author@××××××.com',
15598 - 'message': (),
15599 + "hash": "7f9abd7ec2d079b1d0c36fc2f5d626ae0691757e",
15600 + "commit_time": 1613438722,
15601 + "author": "author@××××××.com",
15602 + "committer": "author@××××××.com",
15603 + "message": (),
15604 }
15605 commit_data.update(kwargs)
15606 super().__init__(**commit_data)
15607 @@ -33,199 +33,217 @@ class FakeCommit(GitCommit):
15608
15609 class TestGitCommitMessageCheck(ReportTestCase):
15610 check_kls = git_mod.GitCommitMessageCheck
15611 - options = arghparse.Namespace(
15612 - target_repo=FakeRepo(), commits='origin', gentoo_repo=True)
15613 + options = arghparse.Namespace(target_repo=FakeRepo(), commits="origin", gentoo_repo=True)
15614 check = git_mod.GitCommitMessageCheck(options)
15615
15616 def test_sign_offs(self):
15617 # assert that it checks for both author and comitter
15618 r = self.assertReport(
15619 - self.check,
15620 - FakeCommit(author='user1', committer='user2', message=['blah'])
15621 + self.check, FakeCommit(author="user1", committer="user2", message=["blah"])
15622 )
15623 assert isinstance(r, git_mod.MissingSignOff)
15624 - assert r.missing_sign_offs == ('user1', 'user2')
15625 + assert r.missing_sign_offs == ("user1", "user2")
15626
15627 # assert that it handles author/committer being the same
15628 self.assertNoReport(
15629 self.check,
15630 FakeCommit(
15631 - author='user@××××.com', committer='user@××××.com',
15632 - message=['summary', '', 'Signed-off-by: user@××××.com']))
15633 + author="user@××××.com",
15634 + committer="user@××××.com",
15635 + message=["summary", "", "Signed-off-by: user@××××.com"],
15636 + ),
15637 + )
15638
15639 # assert it can handle multiple sign offs.
15640 self.assertNoReport(
15641 self.check,
15642 FakeCommit(
15643 - author='user1', committer='user2',
15644 - message=['summary', '', 'Signed-off-by: user2', 'Signed-off-by: user1']))
15645 + author="user1",
15646 + committer="user2",
15647 + message=["summary", "", "Signed-off-by: user2", "Signed-off-by: user1"],
15648 + ),
15649 + )
15650
15651 - def SO_commit(self, summary='summary', body='', tags=(), **kwargs):
15652 + def SO_commit(self, summary="summary", body="", tags=(), **kwargs):
15653 """Create a commit object from summary, body, and tags components."""
15654 - author = kwargs.pop('author', 'author@××××××.com')
15655 - committer = kwargs.pop('committer', 'author@××××××.com')
15656 + author = kwargs.pop("author", "author@××××××.com")
15657 + committer = kwargs.pop("committer", "author@××××××.com")
15658 message = summary
15659 if message:
15660 if body:
15661 - message += '\n\n' + body
15662 - sign_offs = tuple(f'Signed-off-by: {user}' for user in {author, committer})
15663 - message += '\n\n' + '\n'.join(tuple(tags) + sign_offs)
15664 + message += "\n\n" + body
15665 + sign_offs = tuple(f"Signed-off-by: {user}" for user in {author, committer})
15666 + message += "\n\n" + "\n".join(tuple(tags) + sign_offs)
15667 return FakeCommit(author=author, committer=committer, message=message.splitlines())
15668
15669 def test_invalid_commit_tag(self):
15670 # assert it doesn't puke if there are no tags
15671 self.assertNoReport(self.check, self.SO_commit())
15672
15673 - self.assertNoReport(self.check, self.SO_commit(tags=['Bug: https://gentoo.org/blah']))
15674 - self.assertNoReport(self.check, self.SO_commit(tags=['Close: https://gentoo.org/blah']))
15675 + self.assertNoReport(self.check, self.SO_commit(tags=["Bug: https://gentoo.org/blah"]))
15676 + self.assertNoReport(self.check, self.SO_commit(tags=["Close: https://gentoo.org/blah"]))
15677
15678 - r = self.assertReport(self.check, self.SO_commit(tags=['Bug: 123455']))
15679 + r = self.assertReport(self.check, self.SO_commit(tags=["Bug: 123455"]))
15680 assert isinstance(r, git_mod.InvalidCommitTag)
15681 - assert (r.tag, r.value, r.error) == ('Bug', '123455', "value isn't a URL")
15682 + assert (r.tag, r.value, r.error) == ("Bug", "123455", "value isn't a URL")
15683
15684 # Do a protocol check; this is more of an assertion against the parsing model
15685 # used in the implementation.
15686 - r = self.assertReport(self.check, self.SO_commit(tags=['Closes: ftp://blah.com/asdf']))
15687 + r = self.assertReport(self.check, self.SO_commit(tags=["Closes: ftp://blah.com/asdf"]))
15688 assert isinstance(r, git_mod.InvalidCommitTag)
15689 - assert r.tag == 'Closes'
15690 - assert 'protocol' in r.error
15691 + assert r.tag == "Closes"
15692 + assert "protocol" in r.error
15693
15694 def test_gentoo_bug_tag(self):
15695 - commit = self.SO_commit(tags=['Gentoo-Bug: https://bugs.gentoo.org/1'])
15696 - assert 'Gentoo-Bug tag is no longer valid' in self.assertReport(self.check, commit).error
15697 + commit = self.SO_commit(tags=["Gentoo-Bug: https://bugs.gentoo.org/1"])
15698 + assert "Gentoo-Bug tag is no longer valid" in self.assertReport(self.check, commit).error
15699
15700 def test_commit_tags(self):
15701 - ref = 'd8337304f09'
15702 + ref = "d8337304f09"
15703
15704 - for tag in ('Fixes', 'Reverts'):
15705 + for tag in ("Fixes", "Reverts"):
15706 # no results on `git cat-file` failure
15707 - with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
15708 + with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
15709 # force using a new `git cat-file` process for each iteration
15710 self.check._git_cat_file = None
15711 git_cat.return_value.poll.return_value = -1
15712 - commit = self.SO_commit(tags=[f'{tag}: {ref}'])
15713 + commit = self.SO_commit(tags=[f"{tag}: {ref}"])
15714 self.assertNoReport(self.check, commit)
15715
15716 # missing and ambiguous object refs
15717 - for status in ('missing', 'ambiguous'):
15718 + for status in ("missing", "ambiguous"):
15719 self.check._git_cat_file = None
15720 - with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
15721 + with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
15722 git_cat.return_value.poll.return_value = None
15723 - git_cat.return_value.stdout.readline.return_value = f'{ref} {status}'
15724 - commit = self.SO_commit(tags=[f'{tag}: {ref}'])
15725 + git_cat.return_value.stdout.readline.return_value = f"{ref} {status}"
15726 + commit = self.SO_commit(tags=[f"{tag}: {ref}"])
15727 r = self.assertReport(self.check, commit)
15728 assert isinstance(r, git_mod.InvalidCommitTag)
15729 - assert f'{status} commit' in r.error
15730 + assert f"{status} commit" in r.error
15731
15732 # valid tag reference
15733 - with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
15734 + with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
15735 self.check._git_cat_file = None
15736 git_cat.return_value.poll.return_value = None
15737 - git_cat.return_value.stdout.readline.return_value = f'{ref} commit 1234'
15738 - commit = self.SO_commit(tags=[f'{tag}: {ref}'])
15739 + git_cat.return_value.stdout.readline.return_value = f"{ref} commit 1234"
15740 + commit = self.SO_commit(tags=[f"{tag}: {ref}"])
15741 self.assertNoReport(self.check, commit)
15742
15743 def test_summary_length(self):
15744 - self.assertNoReport(self.check, self.SO_commit('single summary headline'))
15745 - self.assertNoReport(self.check, self.SO_commit('a' * 69))
15746 - assert 'no commit message' in \
15747 - self.assertReport(self.check, self.SO_commit('')).error
15748 - assert 'summary is too long' in \
15749 - self.assertReport(self.check, self.SO_commit('a' * 70)).error
15750 + self.assertNoReport(self.check, self.SO_commit("single summary headline"))
15751 + self.assertNoReport(self.check, self.SO_commit("a" * 69))
15752 + assert "no commit message" in self.assertReport(self.check, self.SO_commit("")).error
15753 + assert (
15754 + "summary is too long" in self.assertReport(self.check, self.SO_commit("a" * 70)).error
15755 + )
15756
15757 def test_message_body_length(self):
15758 # message body lines longer than 80 chars are flagged
15759 - long_line = 'a' + ' b' * 40
15760 - assert 'line 2 greater than 80 chars' in \
15761 - self.assertReport(
15762 - self.check,
15763 - self.SO_commit(body=long_line)).error
15764 + long_line = "a" + " b" * 40
15765 + assert (
15766 + "line 2 greater than 80 chars"
15767 + in self.assertReport(self.check, self.SO_commit(body=long_line)).error
15768 + )
15769
15770 # but not non-word lines
15771 - long_line = 'a' * 81
15772 + long_line = "a" * 81
15773 self.assertNoReport(self.check, self.SO_commit(body=long_line))
15774
15775 def test_message_empty_lines(self):
15776 - message = textwrap.dedent("""\
15777 - foo
15778 + message = textwrap.dedent(
15779 + """\
15780 + foo
15781
15782 - bar
15783 + bar
15784
15785 - Signed-off-by: author@××××××.com
15786 - """).splitlines()
15787 + Signed-off-by: author@××××××.com
15788 + """
15789 + ).splitlines()
15790 commit = FakeCommit(message=message)
15791 self.assertNoReport(self.check, commit)
15792
15793 # missing empty line between summary and body
15794 - message = textwrap.dedent("""\
15795 - foo
15796 - bar
15797 + message = textwrap.dedent(
15798 + """\
15799 + foo
15800 + bar
15801
15802 - Signed-off-by: author@××××××.com
15803 - """).splitlines()
15804 + Signed-off-by: author@××××××.com
15805 + """
15806 + ).splitlines()
15807 commit = FakeCommit(message=message)
15808 r = self.assertReport(self.check, commit)
15809 - assert 'missing empty line before body' in str(r)
15810 + assert "missing empty line before body" in str(r)
15811
15812 # missing empty line between summary and tags
15813 - message = textwrap.dedent("""\
15814 - foo
15815 - Signed-off-by: author@××××××.com
15816 - """).splitlines()
15817 + message = textwrap.dedent(
15818 + """\
15819 + foo
15820 + Signed-off-by: author@××××××.com
15821 + """
15822 + ).splitlines()
15823 commit = FakeCommit(message=message)
15824 r = self.assertReport(self.check, commit)
15825 - assert 'missing empty line before tags' in str(r)
15826 + assert "missing empty line before tags" in str(r)
15827
15828 # missing empty lines between summary, body, and tags
15829 - message = textwrap.dedent("""\
15830 - foo
15831 - bar
15832 - Signed-off-by: author@××××××.com
15833 - """).splitlines()
15834 + message = textwrap.dedent(
15835 + """\
15836 + foo
15837 + bar
15838 + Signed-off-by: author@××××××.com
15839 + """
15840 + ).splitlines()
15841 commit = FakeCommit(message=message)
15842 reports = self.assertReports(self.check, commit)
15843 - assert 'missing empty line before body' in str(reports[0])
15844 - assert 'missing empty line before tags' in str(reports[1])
15845 + assert "missing empty line before body" in str(reports[0])
15846 + assert "missing empty line before tags" in str(reports[1])
15847
15848 def test_footer_empty_lines(self):
15849 - for whitespace in ('\t', ' ', ''):
15850 + for whitespace in ("\t", " ", ""):
15851 # empty lines in footer are flagged
15852 - message = textwrap.dedent(f"""\
15853 - foon
15854 -
15855 - blah: dar
15856 - {whitespace}
15857 - footer: yep
15858 - Signed-off-by: author@××××××.com
15859 - """).splitlines()
15860 + message = textwrap.dedent(
15861 + f"""\
15862 + foon
15863 +
15864 + blah: dar
15865 + {whitespace}
15866 + footer: yep
15867 + Signed-off-by: author@××××××.com
15868 + """
15869 + ).splitlines()
15870 commit = FakeCommit(message=message)
15871 r = self.assertReport(self.check, commit)
15872 - assert 'empty line 4 in footer' in str(r)
15873 + assert "empty line 4 in footer" in str(r)
15874
15875 # empty lines at the end of a commit message are ignored
15876 - message = textwrap.dedent(f"""\
15877 + message = textwrap.dedent(
15878 + f"""\
15879 + foon
15880 +
15881 + blah: dar
15882 + footer: yep
15883 + Signed-off-by: author@××××××.com
15884 + {whitespace}
15885 + """
15886 + ).splitlines()
15887 + commit = FakeCommit(message=message)
15888 + self.assertNoReport(self.check, commit)
15889 +
15890 + def test_footer_non_tags(self):
15891 + message = textwrap.dedent(
15892 + """\
15893 foon
15894
15895 blah: dar
15896 footer: yep
15897 + some random line
15898 Signed-off-by: author@××××××.com
15899 - {whitespace}
15900 - """).splitlines()
15901 - commit = FakeCommit(message=message)
15902 - self.assertNoReport(self.check, commit)
15903 -
15904 - def test_footer_non_tags(self):
15905 - message = textwrap.dedent("""\
15906 - foon
15907 -
15908 - blah: dar
15909 - footer: yep
15910 - some random line
15911 - Signed-off-by: author@××××××.com
15912 - """).splitlines()
15913 + """
15914 + ).splitlines()
15915 commit = FakeCommit(message=message)
15916 r = self.assertReport(self.check, commit)
15917 - assert 'non-tag in footer, line 5' in str(r)
15918 + assert "non-tag in footer, line 5" in str(r)
15919
15920
15921 class TestGitCommitMessageRepoCheck(ReportTestCase):
15922 @@ -239,18 +257,17 @@ class TestGitCommitMessageRepoCheck(ReportTestCase):
15923
15924 # initialize parent repo
15925 self.parent_git_repo = make_git_repo()
15926 - self.parent_repo = make_repo(
15927 - self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
15928 - self.parent_git_repo.add_all('initial commit')
15929 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
15930 + self.parent_git_repo.add_all("initial commit")
15931 # create a stub pkg and commit it
15932 - self.parent_repo.create_ebuild('cat/pkg-0')
15933 - self.parent_git_repo.add_all('cat/pkg-0')
15934 + self.parent_repo.create_ebuild("cat/pkg-0")
15935 + self.parent_git_repo.add_all("cat/pkg-0")
15936
15937 # initialize child repo
15938 self.child_git_repo = make_git_repo()
15939 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
15940 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
15941 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
15942 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
15943 + self.child_git_repo.run(["git", "pull", "origin", "main"])
15944 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
15945 self.child_repo = make_repo(self.child_git_repo.path)
15946
15947 def init_check(self, options=None, future=0):
15948 @@ -263,96 +280,106 @@ class TestGitCommitMessageRepoCheck(ReportTestCase):
15949
15950 def _options(self, **kwargs):
15951 args = [
15952 - 'scan', '-q', '--cache-dir', self.cache_dir,
15953 - '--repo', self.child_repo.location, '--commits',
15954 + "scan",
15955 + "-q",
15956 + "--cache-dir",
15957 + self.cache_dir,
15958 + "--repo",
15959 + self.child_repo.location,
15960 + "--commits",
15961 ]
15962 options, _ = self._tool.parse_args(args)
15963 return options
15964
15965 def test_bad_commit_summary_pkg(self):
15966 # properly prefixed commit summary
15967 - self.child_repo.create_ebuild('cat/pkg-1')
15968 - self.child_git_repo.add_all('cat/pkg: version bump to 1', signoff=True)
15969 + self.child_repo.create_ebuild("cat/pkg-1")
15970 + self.child_git_repo.add_all("cat/pkg: version bump to 1", signoff=True)
15971 self.init_check()
15972 self.assertNoReport(self.check, self.source)
15973
15974 # properly prefixed multiple ebuild commit summary
15975 - self.child_repo.create_ebuild('cat/pkg-2')
15976 - self.child_repo.create_ebuild('cat/pkg-3')
15977 - self.child_git_repo.add_all('cat/pkg: more version bumps', signoff=True)
15978 + self.child_repo.create_ebuild("cat/pkg-2")
15979 + self.child_repo.create_ebuild("cat/pkg-3")
15980 + self.child_git_repo.add_all("cat/pkg: more version bumps", signoff=True)
15981 self.init_check()
15982 self.assertNoReport(self.check, self.source)
15983
15984 # special categories that allow not having version in new package summary
15985 - self.child_repo.create_ebuild('acct-user/pkgcheck-1')
15986 - self.child_git_repo.add_all('acct-user/pkgcheck: add user for pkgcheck', signoff=True)
15987 + self.child_repo.create_ebuild("acct-user/pkgcheck-1")
15988 + self.child_git_repo.add_all("acct-user/pkgcheck: add user for pkgcheck", signoff=True)
15989 self.init_check()
15990 self.assertNoReport(self.check, self.source)
15991
15992 # special categories that allow not having version in bump version summary
15993 - self.child_repo.create_ebuild('acct-user/pkgcheck-2')
15994 - self.child_git_repo.add_all('acct-user/pkgcheck: bump user for pkgcheck', signoff=True)
15995 + self.child_repo.create_ebuild("acct-user/pkgcheck-2")
15996 + self.child_git_repo.add_all("acct-user/pkgcheck: bump user for pkgcheck", signoff=True)
15997 self.init_check()
15998 self.assertNoReport(self.check, self.source)
15999
16000 # poorly prefixed commit summary
16001 - self.child_repo.create_ebuild('cat/pkg-4')
16002 - self.child_git_repo.add_all('version bump to 4', signoff=True)
16003 + self.child_repo.create_ebuild("cat/pkg-4")
16004 + self.child_git_repo.add_all("version bump to 4", signoff=True)
16005 commit1 = self.child_git_repo.HEAD
16006 # commit summary missing package version
16007 - self.child_repo.create_ebuild('cat/pkg-5')
16008 - self.child_git_repo.add_all('cat/pkg: version bump', signoff=True)
16009 + self.child_repo.create_ebuild("cat/pkg-5")
16010 + self.child_git_repo.add_all("cat/pkg: version bump", signoff=True)
16011 commit2 = self.child_git_repo.HEAD
16012 # commit summary missing renamed package version
16013 self.child_git_repo.move(
16014 - 'cat/pkg/pkg-3.ebuild', 'cat/pkg/pkg-6.ebuild',
16015 - msg='cat/pkg: version bump and remove old', signoff=True)
16016 + "cat/pkg/pkg-3.ebuild",
16017 + "cat/pkg/pkg-6.ebuild",
16018 + msg="cat/pkg: version bump and remove old",
16019 + signoff=True,
16020 + )
16021 commit3 = self.child_git_repo.HEAD
16022 # revision bumps aren't flagged
16023 - self.child_repo.create_ebuild('cat/pkg-6-r1')
16024 - self.child_git_repo.add_all('cat/pkg: revision bump', signoff=True)
16025 + self.child_repo.create_ebuild("cat/pkg-6-r1")
16026 + self.child_git_repo.add_all("cat/pkg: revision bump", signoff=True)
16027 self.init_check()
16028 # allow vVERSION
16029 - self.child_repo.create_ebuild('cat/pkg-7')
16030 - self.child_git_repo.add_all('cat/pkg: bump to v7', signoff=True)
16031 + self.child_repo.create_ebuild("cat/pkg-7")
16032 + self.child_git_repo.add_all("cat/pkg: bump to v7", signoff=True)
16033 self.init_check()
16034 results = self.assertReports(self.check, self.source)
16035 r1 = git_mod.BadCommitSummary(
16036 - "summary missing 'cat/pkg' package prefix",
16037 - 'version bump to 4', commit=commit1)
16038 + "summary missing 'cat/pkg' package prefix", "version bump to 4", commit=commit1
16039 + )
16040 r2 = git_mod.BadCommitSummary(
16041 - "summary missing package version '5'",
16042 - 'cat/pkg: version bump', commit=commit2)
16043 + "summary missing package version '5'", "cat/pkg: version bump", commit=commit2
16044 + )
16045 r3 = git_mod.BadCommitSummary(
16046 "summary missing package version '6'",
16047 - 'cat/pkg: version bump and remove old', commit=commit3)
16048 + "cat/pkg: version bump and remove old",
16049 + commit=commit3,
16050 + )
16051 assert set(results) == {r1, r2, r3}
16052
16053 def test_bad_commit_summary_category(self):
16054 # properly prefixed commit summary
16055 - self.child_repo.create_ebuild('cat/pkg1-1')
16056 - self.child_repo.create_ebuild('cat/pkg2-1')
16057 - self.child_git_repo.add_all('cat: various pkg updates', signoff=True)
16058 + self.child_repo.create_ebuild("cat/pkg1-1")
16059 + self.child_repo.create_ebuild("cat/pkg2-1")
16060 + self.child_git_repo.add_all("cat: various pkg updates", signoff=True)
16061 self.init_check()
16062 self.assertNoReport(self.check, self.source)
16063
16064 # multiple category commits are ignored
16065 - self.child_repo.create_ebuild('newcat1/newcat1-1')
16066 - self.child_repo.create_ebuild('newcat2/newpkg2-1')
16067 - self.child_git_repo.add_all('various changes', signoff=True)
16068 + self.child_repo.create_ebuild("newcat1/newcat1-1")
16069 + self.child_repo.create_ebuild("newcat2/newpkg2-1")
16070 + self.child_git_repo.add_all("various changes", signoff=True)
16071 self.init_check()
16072 self.assertNoReport(self.check, self.source)
16073
16074 # poorly prefixed commit summary for single category changes
16075 - self.child_repo.create_ebuild('cat/pkg3-1')
16076 - self.child_repo.create_ebuild('cat/pkg4-1')
16077 - self.child_git_repo.add_all('cat updates', signoff=True)
16078 + self.child_repo.create_ebuild("cat/pkg3-1")
16079 + self.child_repo.create_ebuild("cat/pkg4-1")
16080 + self.child_git_repo.add_all("cat updates", signoff=True)
16081 commit = self.child_git_repo.HEAD
16082 self.init_check()
16083 r = self.assertReport(self.check, self.source)
16084 expected = git_mod.BadCommitSummary(
16085 - "summary missing 'cat' category prefix",
16086 - 'cat updates', commit=commit)
16087 + "summary missing 'cat' category prefix", "cat updates", commit=commit
16088 + )
16089 assert r == expected
16090
16091
16092 @@ -367,18 +394,17 @@ class TestGitPkgCommitsCheck(ReportTestCase):
16093
16094 # initialize parent repo
16095 self.parent_git_repo = make_git_repo()
16096 - self.parent_repo = make_repo(
16097 - self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
16098 - self.parent_git_repo.add_all('initial commit')
16099 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
16100 + self.parent_git_repo.add_all("initial commit")
16101 # create a stub pkg and commit it
16102 - self.parent_repo.create_ebuild('cat/pkg-0')
16103 - self.parent_git_repo.add_all('cat/pkg-0')
16104 + self.parent_repo.create_ebuild("cat/pkg-0")
16105 + self.parent_git_repo.add_all("cat/pkg-0")
16106
16107 # initialize child repo
16108 self.child_git_repo = make_git_repo()
16109 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
16110 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16111 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
16112 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
16113 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16114 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
16115 self.child_repo = make_repo(self.child_git_repo.path)
16116
16117 def init_check(self, options=None, future=0):
16118 @@ -391,193 +417,206 @@ class TestGitPkgCommitsCheck(ReportTestCase):
16119
16120 def _options(self, **kwargs):
16121 args = [
16122 - 'scan', '-q', '--cache-dir', self.cache_dir,
16123 - '--repo', self.child_repo.location, '--commits',
16124 + "scan",
16125 + "-q",
16126 + "--cache-dir",
16127 + self.cache_dir,
16128 + "--repo",
16129 + self.child_repo.location,
16130 + "--commits",
16131 ]
16132 options, _ = self._tool.parse_args(args)
16133 return options
16134
16135 def test_broken_ebuilds_ignored(self):
16136 - self.child_repo.create_ebuild('newcat/pkg-1', eapi='-1')
16137 - self.child_git_repo.add_all('newcat/pkg: initial import')
16138 + self.child_repo.create_ebuild("newcat/pkg-1", eapi="-1")
16139 + self.child_git_repo.add_all("newcat/pkg: initial import")
16140 self.init_check()
16141 self.assertNoReport(self.check, self.source)
16142
16143 def test_direct_stable(self):
16144 - self.child_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
16145 - self.child_git_repo.add_all('cat/pkg: version bump to 1')
16146 + self.child_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
16147 + self.child_git_repo.add_all("cat/pkg: version bump to 1")
16148 self.init_check()
16149 r = self.assertReport(self.check, self.source)
16150 - expected = git_mod.DirectStableKeywords(['amd64'], pkg=CPV('cat/pkg-1'))
16151 + expected = git_mod.DirectStableKeywords(["amd64"], pkg=CPV("cat/pkg-1"))
16152 assert r == expected
16153
16154 def test_direct_no_maintainer(self):
16155 - self.child_repo.create_ebuild('newcat/pkg-1')
16156 - self.child_git_repo.add_all('newcat/pkg: initial import')
16157 + self.child_repo.create_ebuild("newcat/pkg-1")
16158 + self.child_git_repo.add_all("newcat/pkg: initial import")
16159 self.init_check()
16160 r = self.assertReport(self.check, self.source)
16161 - expected = git_mod.DirectNoMaintainer(pkg=CPV('newcat/pkg-1'))
16162 + expected = git_mod.DirectNoMaintainer(pkg=CPV("newcat/pkg-1"))
16163 assert r == expected
16164
16165 def test_ebuild_incorrect_copyright(self):
16166 - self.child_repo.create_ebuild('cat/pkg-1')
16167 - line = '# Copyright 1999-2019 Gentoo Authors'
16168 - with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-1.ebuild'), 'r+') as f:
16169 + self.child_repo.create_ebuild("cat/pkg-1")
16170 + line = "# Copyright 1999-2019 Gentoo Authors"
16171 + with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-1.ebuild"), "r+") as f:
16172 lines = f.read().splitlines()
16173 lines[0] = line
16174 f.seek(0)
16175 f.truncate()
16176 - f.write('\n'.join(lines))
16177 - self.child_git_repo.add_all('cat/pkg: version bump to 1')
16178 + f.write("\n".join(lines))
16179 + self.child_git_repo.add_all("cat/pkg: version bump to 1")
16180 self.init_check()
16181 r = self.assertReport(self.check, self.source)
16182 - expected = git_mod.EbuildIncorrectCopyright('2019', line=line, pkg=CPV('cat/pkg-1'))
16183 + expected = git_mod.EbuildIncorrectCopyright("2019", line=line, pkg=CPV("cat/pkg-1"))
16184 assert r == expected
16185
16186 def test_missing_copyright(self):
16187 """Ebuilds missing copyrights entirely are handled by EbuildHeaderCheck."""
16188 - self.child_repo.create_ebuild('cat/pkg-1')
16189 - with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-1.ebuild'), 'r+') as f:
16190 + self.child_repo.create_ebuild("cat/pkg-1")
16191 + with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-1.ebuild"), "r+") as f:
16192 lines = f.read().splitlines()
16193 f.seek(0)
16194 f.truncate()
16195 - f.write('\n'.join(lines[1:]))
16196 - self.child_git_repo.add_all('cat/pkg: update ebuild')
16197 + f.write("\n".join(lines[1:]))
16198 + self.child_git_repo.add_all("cat/pkg: update ebuild")
16199 self.init_check()
16200 self.assertNoReport(self.check, self.source)
16201
16202 def test_dropped_stable_keywords(self):
16203 # add stable ebuild to parent repo
16204 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
16205 - self.parent_git_repo.add_all('cat/pkg: version bump to 1')
16206 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
16207 + self.parent_git_repo.add_all("cat/pkg: version bump to 1")
16208 # pull changes and remove it from the child repo
16209 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16210 - self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
16211 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16212 + self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
16213 commit = self.child_git_repo.HEAD
16214 self.init_check()
16215 r = self.assertReport(self.check, self.source)
16216 - expected = git_mod.DroppedStableKeywords(['amd64'], commit, pkg=CPV('cat/pkg-1'))
16217 + expected = git_mod.DroppedStableKeywords(["amd64"], commit, pkg=CPV("cat/pkg-1"))
16218 assert r == expected
16219
16220 # git archive failures error out
16221 - with patch('pkgcheck.checks.git.subprocess.Popen') as git_archive:
16222 + with patch("pkgcheck.checks.git.subprocess.Popen") as git_archive:
16223 git_archive.return_value.poll.return_value = -1
16224 - with pytest.raises(PkgcheckUserException, match='failed populating archive repo'):
16225 + with pytest.raises(PkgcheckUserException, match="failed populating archive repo"):
16226 self.assertNoReport(self.check, self.source)
16227
16228 def test_dropped_unstable_keywords(self):
16229 # add stable ebuild to parent repo
16230 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
16231 - self.parent_git_repo.add_all('cat/pkg: version bump to 1')
16232 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
16233 + self.parent_git_repo.add_all("cat/pkg: version bump to 1")
16234 # pull changes and remove it from the child repo
16235 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16236 - self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
16237 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16238 + self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
16239 commit = self.child_git_repo.HEAD
16240 self.init_check()
16241 r = self.assertReport(self.check, self.source)
16242 - expected = git_mod.DroppedUnstableKeywords(['~amd64'], commit, pkg=CPV('cat/pkg-1'))
16243 + expected = git_mod.DroppedUnstableKeywords(["~amd64"], commit, pkg=CPV("cat/pkg-1"))
16244 assert r == expected
16245
16246 def test_dropped_keywords_inherit_eclass(self):
16247 # add stable ebuild to parent repo
16248 - with open(pjoin(self.parent_git_repo.path, 'eclass/make.eclass'), 'w') as f:
16249 - f.write(':')
16250 - self.parent_git_repo.add_all('make.eclass: initial commit')
16251 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'], data="inherit make")
16252 - self.parent_git_repo.add_all('cat/pkg: version bump to 1')
16253 + with open(pjoin(self.parent_git_repo.path, "eclass/make.eclass"), "w") as f:
16254 + f.write(":")
16255 + self.parent_git_repo.add_all("make.eclass: initial commit")
16256 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"], data="inherit make")
16257 + self.parent_git_repo.add_all("cat/pkg: version bump to 1")
16258 # pull changes and remove it from the child repo
16259 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16260 - self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
16261 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16262 + self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
16263 commit = self.child_git_repo.HEAD
16264 self.init_check()
16265 r = self.assertReport(self.check, self.source)
16266 - expected = git_mod.DroppedUnstableKeywords(['~amd64'], commit, pkg=CPV('cat/pkg-1'))
16267 + expected = git_mod.DroppedUnstableKeywords(["~amd64"], commit, pkg=CPV("cat/pkg-1"))
16268 assert r == expected
16269
16270 def test_rdepend_change(self):
16271 # add pkgs to parent repo
16272 - self.parent_repo.create_ebuild('cat/dep1-0')
16273 - self.parent_git_repo.add_all('cat/dep1: initial import')
16274 - self.parent_repo.create_ebuild('cat/dep2-0')
16275 - self.parent_git_repo.add_all('cat/dep2: initial import')
16276 - self.parent_repo.create_ebuild('newcat/newpkg-1')
16277 - self.parent_git_repo.add_all('newcat/newpkg: initial import')
16278 - self.parent_repo.create_ebuild('newcat/newpkg-2', rdepend="cat/dep1 cat/dep2")
16279 - self.parent_git_repo.add_all('newcat/newpkg: version bump')
16280 + self.parent_repo.create_ebuild("cat/dep1-0")
16281 + self.parent_git_repo.add_all("cat/dep1: initial import")
16282 + self.parent_repo.create_ebuild("cat/dep2-0")
16283 + self.parent_git_repo.add_all("cat/dep2: initial import")
16284 + self.parent_repo.create_ebuild("newcat/newpkg-1")
16285 + self.parent_git_repo.add_all("newcat/newpkg: initial import")
16286 + self.parent_repo.create_ebuild("newcat/newpkg-2", rdepend="cat/dep1 cat/dep2")
16287 + self.parent_git_repo.add_all("newcat/newpkg: version bump")
16288 # pull changes to child repo
16289 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16290 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16291 # change pkg RDEPEND and commit
16292 - with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-0.ebuild'), 'a') as f:
16293 + with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-0.ebuild"), "a") as f:
16294 f.write('RDEPEND="cat/dep1"\n')
16295 - self.child_git_repo.add_all('cat/pkg: update deps')
16296 + self.child_git_repo.add_all("cat/pkg: update deps")
16297 # change live pkg RDEPEND and commit
16298 - with open(pjoin(self.child_git_repo.path, 'newcat/newpkg/newpkg-1.ebuild'), 'a') as f:
16299 + with open(pjoin(self.child_git_repo.path, "newcat/newpkg/newpkg-1.ebuild"), "a") as f:
16300 f.write('RDEPEND="cat/dep1"\n')
16301 f.write('PROPERTIES="live"\n')
16302 - self.child_git_repo.add_all('newcat/newpkg: update deps')
16303 + self.child_git_repo.add_all("newcat/newpkg: update deps")
16304 # reorder pkg RDEPEND and commit
16305 - with open(pjoin(self.child_git_repo.path, 'newcat/newpkg/newpkg-2.ebuild'), 'a') as f:
16306 + with open(pjoin(self.child_git_repo.path, "newcat/newpkg/newpkg-2.ebuild"), "a") as f:
16307 f.write('RDEPEND="cat/dep2 cat/dep1"\n')
16308 - self.child_git_repo.add_all('newcat/newpkg: reorder deps')
16309 + self.child_git_repo.add_all("newcat/newpkg: reorder deps")
16310 self.init_check()
16311 r = self.assertReport(self.check, self.source)
16312 # only one result is expected since live ebuilds are ignored
16313 - expected = git_mod.RdependChange(pkg=CPV('cat/pkg-0'))
16314 + expected = git_mod.RdependChange(pkg=CPV("cat/pkg-0"))
16315 assert r == expected
16316
16317 def test_missing_slotmove(self):
16318 # add new ebuild to parent repo
16319 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
16320 - self.parent_git_repo.add_all('cat/pkg: version bump to 1')
16321 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
16322 + self.parent_git_repo.add_all("cat/pkg: version bump to 1")
16323 # pull changes and modify its slot in the child repo
16324 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16325 - self.child_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'], slot='1')
16326 - self.child_git_repo.add_all('cat/pkg: update SLOT to 1')
16327 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16328 + self.child_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"], slot="1")
16329 + self.child_git_repo.add_all("cat/pkg: update SLOT to 1")
16330 self.init_check()
16331 r = self.assertReport(self.check, self.source)
16332 - expected = git_mod.MissingSlotmove('0', '1', pkg=CPV('cat/pkg-1'))
16333 + expected = git_mod.MissingSlotmove("0", "1", pkg=CPV("cat/pkg-1"))
16334 assert r == expected
16335
16336 # create slot move update and the result goes away
16337 - updates_dir = pjoin(self.child_git_repo.path, 'profiles', 'updates')
16338 + updates_dir = pjoin(self.child_git_repo.path, "profiles", "updates")
16339 os.makedirs(updates_dir, exist_ok=True)
16340 - with open(pjoin(updates_dir, '4Q-2020'), 'w') as f:
16341 - f.write(textwrap.dedent("""\
16342 - slotmove ~cat/foo-0 0 1
16343 - slotmove ~cat/pkg-1 0 1
16344 - """))
16345 + with open(pjoin(updates_dir, "4Q-2020"), "w") as f:
16346 + f.write(
16347 + textwrap.dedent(
16348 + """\
16349 + slotmove ~cat/foo-0 0 1
16350 + slotmove ~cat/pkg-1 0 1
16351 + """
16352 + )
16353 + )
16354 # force repo_config pkg updates jitted attr to be reset
16355 self.init_check()
16356 self.assertNoReport(self.check, self.source)
16357
16358 # git archive failures error out
16359 - with patch('pkgcheck.checks.git.subprocess.Popen') as git_archive:
16360 + with patch("pkgcheck.checks.git.subprocess.Popen") as git_archive:
16361 git_archive.return_value.poll.return_value = -1
16362 - with pytest.raises(PkgcheckUserException, match='failed populating archive repo'):
16363 + with pytest.raises(PkgcheckUserException, match="failed populating archive repo"):
16364 self.assertNoReport(self.check, self.source)
16365
16366 def test_missing_move(self):
16367 # verify ebuild renames at the git level don't trigger
16368 - self.child_repo.create_ebuild('cat/pkg-1')
16369 - self.child_git_repo.run(['git', 'rm', 'cat/pkg/pkg-0.ebuild'])
16370 - self.child_git_repo.add_all('cat/pkg: version bump and remove old')
16371 + self.child_repo.create_ebuild("cat/pkg-1")
16372 + self.child_git_repo.run(["git", "rm", "cat/pkg/pkg-0.ebuild"])
16373 + self.child_git_repo.add_all("cat/pkg: version bump and remove old")
16374 self.init_check()
16375 self.assertNoReport(self.check, self.source)
16376
16377 - self.child_git_repo.move('cat', 'newcat', msg='newcat/pkg: moved pkg')
16378 + self.child_git_repo.move("cat", "newcat", msg="newcat/pkg: moved pkg")
16379 self.init_check()
16380 r = self.assertReport(self.check, self.source)
16381 - expected = git_mod.MissingMove('cat/pkg', 'newcat/pkg', pkg=CPV('newcat/pkg-0'))
16382 + expected = git_mod.MissingMove("cat/pkg", "newcat/pkg", pkg=CPV("newcat/pkg-0"))
16383 assert r == expected
16384
16385 # create package move update and the result goes away
16386 - updates_dir = pjoin(self.child_git_repo.path, 'profiles', 'updates')
16387 + updates_dir = pjoin(self.child_git_repo.path, "profiles", "updates")
16388 os.makedirs(updates_dir, exist_ok=True)
16389 - with open(pjoin(updates_dir, '4Q-2020'), 'w') as f:
16390 - f.write(textwrap.dedent("""\
16391 - move cat/foo newcat/foo
16392 - move cat/pkg newcat/pkg
16393 - """))
16394 + with open(pjoin(updates_dir, "4Q-2020"), "w") as f:
16395 + f.write(
16396 + textwrap.dedent(
16397 + """\
16398 + move cat/foo newcat/foo
16399 + move cat/pkg newcat/pkg
16400 + """
16401 + )
16402 + )
16403 # force repo_config pkg updates jitted attr to be reset
16404 self.init_check()
16405 self.assertNoReport(self.check, self.source)
16406 @@ -594,18 +633,17 @@ class TestGitEclassCommitsCheck(ReportTestCase):
16407
16408 # initialize parent repo
16409 self.parent_git_repo = make_git_repo()
16410 - self.parent_repo = make_repo(
16411 - self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
16412 - self.parent_git_repo.add_all('initial commit')
16413 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
16414 + self.parent_git_repo.add_all("initial commit")
16415 # create a stub eclass and commit it
16416 - touch(pjoin(self.parent_git_repo.path, 'eclass', 'foo.eclass'))
16417 - self.parent_git_repo.add_all('eclass: add foo eclass')
16418 + touch(pjoin(self.parent_git_repo.path, "eclass", "foo.eclass"))
16419 + self.parent_git_repo.add_all("eclass: add foo eclass")
16420
16421 # initialize child repo
16422 self.child_git_repo = make_git_repo()
16423 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
16424 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
16425 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
16426 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
16427 + self.child_git_repo.run(["git", "pull", "origin", "main"])
16428 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
16429 self.child_repo = make_repo(self.child_git_repo.path)
16430
16431 def init_check(self, options=None, future=0):
16432 @@ -618,35 +656,40 @@ class TestGitEclassCommitsCheck(ReportTestCase):
16433
16434 def _options(self, **kwargs):
16435 args = [
16436 - 'scan', '-q', '--cache-dir', self.cache_dir,
16437 - '--repo', self.child_repo.location, '--commits',
16438 + "scan",
16439 + "-q",
16440 + "--cache-dir",
16441 + self.cache_dir,
16442 + "--repo",
16443 + self.child_repo.location,
16444 + "--commits",
16445 ]
16446 options, _ = self._tool.parse_args(args)
16447 return options
16448
16449 def test_eclass_incorrect_copyright(self):
16450 - line = '# Copyright 1999-2019 Gentoo Authors'
16451 - with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
16452 - f.write(f'{line}\n')
16453 - self.child_git_repo.add_all('eclass: update foo')
16454 + line = "# Copyright 1999-2019 Gentoo Authors"
16455 + with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
16456 + f.write(f"{line}\n")
16457 + self.child_git_repo.add_all("eclass: update foo")
16458 self.init_check()
16459 r = self.assertReport(self.check, self.source)
16460 - expected = git_mod.EclassIncorrectCopyright('2019', line, eclass='foo')
16461 + expected = git_mod.EclassIncorrectCopyright("2019", line, eclass="foo")
16462 assert r == expected
16463
16464 # correcting the year results in no report
16465 year = datetime.today().year
16466 - line = f'# Copyright 1999-{year} Gentoo Authors'
16467 - with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
16468 - f.write(f'{line}\n')
16469 - self.child_git_repo.add_all('eclass: fix copyright year')
16470 + line = f"# Copyright 1999-{year} Gentoo Authors"
16471 + with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
16472 + f.write(f"{line}\n")
16473 + self.child_git_repo.add_all("eclass: fix copyright year")
16474 self.init_check()
16475 self.assertNoReport(self.check, self.source)
16476
16477 def test_eclass_missing_copyright(self):
16478 """Eclasses missing copyrights entirely are handled by EclassHeaderCheck."""
16479 - with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
16480 - f.write('# comment\n')
16481 - self.child_git_repo.add_all('eclass: update foo')
16482 + with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
16483 + f.write("# comment\n")
16484 + self.child_git_repo.add_all("eclass: update foo")
16485 self.init_check()
16486 self.assertNoReport(self.check, self.source)
16487
16488 diff --git a/tests/checks/test_glsa.py b/tests/checks/test_glsa.py
16489 index c3182be1..bec35857 100644
16490 --- a/tests/checks/test_glsa.py
16491 +++ b/tests/checks/test_glsa.py
16492 @@ -31,34 +31,34 @@ class TestVulnerabilitiesCheck(misc.ReportTestCase):
16493 def test_no_glsa_dir(self, tmp_path):
16494 # TODO: switch to using a repo fixture when available
16495 repo_dir = str(tmp_path)
16496 - os.makedirs(pjoin(repo_dir, 'profiles'))
16497 - os.makedirs(pjoin(repo_dir, 'metadata'))
16498 - with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
16499 - f.write('fake\n')
16500 - with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
16501 - f.write('masters =\n')
16502 + os.makedirs(pjoin(repo_dir, "profiles"))
16503 + os.makedirs(pjoin(repo_dir, "metadata"))
16504 + with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
16505 + f.write("fake\n")
16506 + with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
16507 + f.write("masters =\n")
16508 repo_config = repo_objs.RepoConfig(location=repo_dir)
16509 repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
16510 options = arghparse.Namespace(glsa_dir=None, target_repo=repo, gentoo_repo=True)
16511 - with pytest.raises(SkipCheck, match='no available glsa source'):
16512 + with pytest.raises(SkipCheck, match="no available glsa source"):
16513 glsa.GlsaCheck(options)
16514
16515 def test_repo_glsa_dir(self, tmp_path):
16516 # TODO: switch to using a repo fixture when available
16517 repo_dir = str(tmp_path)
16518 - os.makedirs(pjoin(repo_dir, 'profiles'))
16519 - os.makedirs(pjoin(repo_dir, 'metadata', 'glsa'))
16520 - with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
16521 - f.write('fake\n')
16522 - with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
16523 - f.write('masters =\n')
16524 - with open(pjoin(repo_dir, 'metadata', 'glsa', 'glsa-202010-01.xml'), 'w') as f:
16525 + os.makedirs(pjoin(repo_dir, "profiles"))
16526 + os.makedirs(pjoin(repo_dir, "metadata", "glsa"))
16527 + with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
16528 + f.write("fake\n")
16529 + with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
16530 + f.write("masters =\n")
16531 + with open(pjoin(repo_dir, "metadata", "glsa", "glsa-202010-01.xml"), "w") as f:
16532 f.write(mk_glsa(("dev-util/diffball", ([], ["~>=0.5-r3"]))))
16533 repo_config = repo_objs.RepoConfig(location=repo_dir)
16534 repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
16535 options = arghparse.Namespace(glsa_dir=None, target_repo=repo, gentoo_repo=True)
16536 check = glsa.GlsaCheck(options)
16537 - assert 'dev-util/diffball' in check.vulns
16538 + assert "dev-util/diffball" in check.vulns
16539
16540 def test_non_matching(self, check):
16541 self.assertNoReport(check, mk_pkg("0.5.1"))
16542 @@ -67,10 +67,8 @@ class TestVulnerabilitiesCheck(misc.ReportTestCase):
16543 def test_matching(self, check):
16544 r = self.assertReport(check, mk_pkg("0.5-r5"))
16545 assert isinstance(r, glsa.VulnerablePackage)
16546 - assert (
16547 - (r.category, r.package, r.version) ==
16548 - ("dev-util", "diffball", "0.5-r5"))
16549 - assert 'vulnerable via glsa(200611-02)' in str(r)
16550 + assert (r.category, r.package, r.version) == ("dev-util", "diffball", "0.5-r5")
16551 + assert "vulnerable via glsa(200611-02)" in str(r)
16552
16553 # multiple glsa matches
16554 self.assertReports(check, mk_pkg("1.0"))
16555
16556 diff --git a/tests/checks/test_header.py b/tests/checks/test_header.py
16557 index 492c3d8c..e79fdeb1 100644
16558 --- a/tests/checks/test_header.py
16559 +++ b/tests/checks/test_header.py
16560 @@ -21,9 +21,9 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16561
16562 def test_good_copyright(self):
16563 good_copyrights = [
16564 - '# Copyright 1999-2019 Gentoo Authors\n',
16565 - '# Copyright 2019 Gentoo Authors\n',
16566 - '# Copyright 2010-2017 Gentoo Authors\n',
16567 + "# Copyright 1999-2019 Gentoo Authors\n",
16568 + "# Copyright 2019 Gentoo Authors\n",
16569 + "# Copyright 2010-2017 Gentoo Authors\n",
16570 ]
16571 for line in good_copyrights:
16572 fake_src = [line, self.check_kls.license_header]
16573 @@ -32,11 +32,11 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16574
16575 def test_invalid_copyright(self):
16576 bad_copyrights = [
16577 - '# Copyright (c) 1999-2019 Gentoo Authors\n',
16578 - '# Copyright Gentoo Authors\n',
16579 - '# Gentoo Authors\n',
16580 - '# Here is entirely random text\n',
16581 - '\n',
16582 + "# Copyright (c) 1999-2019 Gentoo Authors\n",
16583 + "# Copyright Gentoo Authors\n",
16584 + "# Gentoo Authors\n",
16585 + "# Here is entirely random text\n",
16586 + "\n",
16587 ]
16588 for line in bad_copyrights:
16589 fake_src = [line, self.check_kls.license_header]
16590 @@ -48,10 +48,10 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16591 def test_new_foundation_copyright(self):
16592 """Foundation copyright on new ebuilds triggers the report."""
16593 bad_copyrights = [
16594 - '# Copyright 1999-2019 Gentoo Foundation\n',
16595 - '# Copyright 2019 Gentoo Foundation\n',
16596 - '# Copyright 3125 Gentoo Foundation\n',
16597 - '# Copyright 2010-2021 Gentoo Foundation\n',
16598 + "# Copyright 1999-2019 Gentoo Foundation\n",
16599 + "# Copyright 2019 Gentoo Foundation\n",
16600 + "# Copyright 3125 Gentoo Foundation\n",
16601 + "# Copyright 2010-2021 Gentoo Foundation\n",
16602 ]
16603 for line in bad_copyrights:
16604 fake_src = [line, self.check_kls.license_header]
16605 @@ -63,9 +63,9 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16606 def test_old_foundation_copyright(self):
16607 """Foundation copyright on old ebuilds does not trigger false positives."""
16608 good_copyrights = [
16609 - '# Copyright 1999-2018 Gentoo Foundation\n',
16610 - '# Copyright 2016 Gentoo Foundation\n',
16611 - '# Copyright 2010-2017 Gentoo Foundation\n',
16612 + "# Copyright 1999-2018 Gentoo Foundation\n",
16613 + "# Copyright 2016 Gentoo Foundation\n",
16614 + "# Copyright 2010-2017 Gentoo Foundation\n",
16615 ]
16616 for line in good_copyrights:
16617 fake_src = [line, self.check_kls.license_header]
16618 @@ -75,8 +75,8 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16619 def test_non_gentoo_authors_copyright_in_gentoo(self):
16620 """Ebuilds in the gentoo repo must use 'Gentoo Authors'."""
16621 bad_copyrights = [
16622 - '# Copyright 1999-2019 D. E. Veloper\n',
16623 - '# Copyright 2019 辣鸡汤\n',
16624 + "# Copyright 1999-2019 D. E. Veloper\n",
16625 + "# Copyright 2019 辣鸡汤\n",
16626 ]
16627 for line in bad_copyrights:
16628 fake_src = [line, self.check_kls.license_header]
16629 @@ -86,23 +86,23 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
16630 assert line.strip() in str(r)
16631
16632 def test_license_headers(self):
16633 - copyright = '# Copyright 1999-2019 Gentoo Authors\n'
16634 + copyright = "# Copyright 1999-2019 Gentoo Authors\n"
16635 fake_src = [copyright, self.check_kls.license_header]
16636 fake_pkg = self.mk_pkg(lines=fake_src)
16637 self.assertNoReport(self.mk_check(), fake_pkg)
16638
16639 bad_license_headers = [
16640 [],
16641 - [''],
16642 - ['\n'],
16643 - [f'{self.check_kls.license_header} '],
16644 - [f' {self.check_kls.license_header}'],
16645 - ['# Distributed under the terms of the GNU General Public License v3'],
16646 + [""],
16647 + ["\n"],
16648 + [f"{self.check_kls.license_header} "],
16649 + [f" {self.check_kls.license_header}"],
16650 + ["# Distributed under the terms of the GNU General Public License v3"],
16651 ]
16652 for content in bad_license_headers:
16653 fake_src = [copyright] + content
16654 fake_pkg = self.mk_pkg(lines=fake_src)
16655 r = self.assertReport(self.mk_check(), fake_pkg)
16656 assert isinstance(r, header.EbuildInvalidLicenseHeader)
16657 - expected = content[0].strip() if content else 'missing license header'
16658 + expected = content[0].strip() if content else "missing license header"
16659 assert expected in str(r)
16660
16661 diff --git a/tests/checks/test_imlate.py b/tests/checks/test_imlate.py
16662 index 3c0f44f8..624dc90a 100644
16663 --- a/tests/checks/test_imlate.py
16664 +++ b/tests/checks/test_imlate.py
16665 @@ -4,21 +4,25 @@ from snakeoil.cli import arghparse
16666 from .. import misc
16667
16668
16669 -def mk_check(selected_arches=("x86", "ppc", "amd64"), arches=None,
16670 - stable_arches=None, source_arches=None):
16671 +def mk_check(
16672 + selected_arches=("x86", "ppc", "amd64"), arches=None, stable_arches=None, source_arches=None
16673 +):
16674 if arches is None:
16675 arches = selected_arches
16676 if stable_arches is None:
16677 stable_arches = selected_arches
16678 return imlate.ImlateCheck(
16679 arghparse.Namespace(
16680 - selected_arches=selected_arches, arches=arches,
16681 - stable_arches=stable_arches, source_arches=source_arches))
16682 + selected_arches=selected_arches,
16683 + arches=arches,
16684 + stable_arches=stable_arches,
16685 + source_arches=source_arches,
16686 + )
16687 + )
16688
16689
16690 def mk_pkg(ver, keywords="", slot="0"):
16691 - return misc.FakePkg(
16692 - f"dev-util/diffball-{ver}", data={"SLOT": slot, "KEYWORDS": keywords})
16693 + return misc.FakePkg(f"dev-util/diffball-{ver}", data={"SLOT": slot, "KEYWORDS": keywords})
16694
16695
16696 class TestImlateCheck(misc.ReportTestCase):
16697 @@ -26,96 +30,82 @@ class TestImlateCheck(misc.ReportTestCase):
16698 check_kls = imlate.ImlateCheck
16699
16700 def test_all_unstable(self):
16701 - self.assertNoReport(
16702 - mk_check(),
16703 - [mk_pkg(str(x), "~x86 ~amd64") for x in range(10)])
16704 + self.assertNoReport(mk_check(), [mk_pkg(str(x), "~x86 ~amd64") for x in range(10)])
16705
16706 def test_all_stable(self):
16707 - self.assertNoReport(
16708 - mk_check(),
16709 - [mk_pkg("0.9", "amd64 x86")])
16710 + self.assertNoReport(mk_check(), [mk_pkg("0.9", "amd64 x86")])
16711
16712 def test_unselected_arch(self):
16713 - self.assertNoReport(
16714 - mk_check(),
16715 - [mk_pkg("0.9", "~mips amd64")])
16716 + self.assertNoReport(mk_check(), [mk_pkg("0.9", "~mips amd64")])
16717
16718 def test_specified_stable_arches(self):
16719 # pkg doesn't have any unstable arches we care about
16720 - self.assertNoReport(
16721 - mk_check(source_arches=('arm', 'arm64')),
16722 - [mk_pkg("0.9", "~x86 amd64")])
16723 + self.assertNoReport(mk_check(source_arches=("arm", "arm64")), [mk_pkg("0.9", "~x86 amd64")])
16724
16725 # pkg doesn't have any stable arches we care about
16726 - self.assertNoReport(
16727 - mk_check(source_arches=('arm64',)),
16728 - [mk_pkg("0.9", "~x86 amd64")])
16729 + self.assertNoReport(mk_check(source_arches=("arm64",)), [mk_pkg("0.9", "~x86 amd64")])
16730
16731 # only flag arches we care about
16732 r = self.assertReport(
16733 - mk_check(source_arches=('amd64',), selected_arches=('arm64',)),
16734 - [mk_pkg("0.9", "~arm64 ~x86 amd64")])
16735 + mk_check(source_arches=("amd64",), selected_arches=("arm64",)),
16736 + [mk_pkg("0.9", "~arm64 ~x86 amd64")],
16737 + )
16738 assert isinstance(r, imlate.PotentialStable)
16739 assert r.stable == ("amd64",)
16740 assert r.keywords == ("~arm64",)
16741 assert r.version == "0.9"
16742
16743 def test_lagging_keyword(self):
16744 - r = self.assertReport(
16745 - mk_check(),
16746 - [mk_pkg("0.8", "x86 amd64"),
16747 - mk_pkg("0.9", "x86 ~amd64")])
16748 + r = self.assertReport(mk_check(), [mk_pkg("0.8", "x86 amd64"), mk_pkg("0.9", "x86 ~amd64")])
16749 assert isinstance(r, imlate.LaggingStable)
16750 assert r.stable == ("x86",)
16751 assert r.keywords == ("~amd64",)
16752 assert r.version == "0.9"
16753 - assert 'x86' in str(r) and '~amd64' in str(r)
16754 + assert "x86" in str(r) and "~amd64" in str(r)
16755
16756 def test_potential_keyword(self):
16757 - r = self.assertReport(
16758 - mk_check(),
16759 - [mk_pkg("0.9", "~x86 amd64")])
16760 + r = self.assertReport(mk_check(), [mk_pkg("0.9", "~x86 amd64")])
16761 assert isinstance(r, imlate.PotentialStable)
16762 assert r.stable == ("amd64",)
16763 assert r.keywords == ("~x86",)
16764 assert r.version == "0.9"
16765 - assert 'amd64' in str(r) and '~x86' in str(r)
16766 + assert "amd64" in str(r) and "~x86" in str(r)
16767
16768 def test_multiple_unstable_pkgs(self):
16769 r = self.assertReport(
16770 - mk_check(),
16771 - [mk_pkg("0.7", "~x86"),
16772 - mk_pkg("0.8", "~x86"),
16773 - mk_pkg("0.9", "~x86 amd64")])
16774 + mk_check(), [mk_pkg("0.7", "~x86"), mk_pkg("0.8", "~x86"), mk_pkg("0.9", "~x86 amd64")]
16775 + )
16776 assert r.stable == ("amd64",)
16777 assert r.keywords == ("~x86",)
16778 assert r.version == "0.9"
16779
16780 def test_multiple_stable_arches(self):
16781 r = self.assertReport(
16782 - mk_check(),
16783 - [mk_pkg("0.7", "~x86 ~ppc"),
16784 - mk_pkg("0.9", "~x86 ppc amd64")])
16785 + mk_check(), [mk_pkg("0.7", "~x86 ~ppc"), mk_pkg("0.9", "~x86 ppc amd64")]
16786 + )
16787 assert r.stable == ("amd64", "ppc")
16788 assert r.keywords == ("~x86",)
16789 assert r.version == "0.9"
16790
16791 def test_multiple_potential_arches(self):
16792 - r = self.assertReport(
16793 - mk_check(),
16794 - [mk_pkg("0.7", "~x86"),
16795 - mk_pkg("0.9", "~x86 ~ppc amd64")])
16796 + r = self.assertReport(mk_check(), [mk_pkg("0.7", "~x86"), mk_pkg("0.9", "~x86 ~ppc amd64")])
16797 assert r.stable == ("amd64",)
16798 - assert r.keywords == ("~ppc", "~x86",)
16799 + assert r.keywords == (
16800 + "~ppc",
16801 + "~x86",
16802 + )
16803 assert r.version == "0.9"
16804
16805 def test_multiple_lagging_slots(self):
16806 r = self.assertReports(
16807 mk_check(),
16808 - [mk_pkg("0.7", slot="0", keywords="x86 ppc"),
16809 - mk_pkg("0.9", slot="0", keywords="~x86 ppc"),
16810 - mk_pkg("1.0", slot="1", keywords="x86 ppc"),
16811 - mk_pkg("1.2", slot="1", keywords="x86 ~ppc")])
16812 + [
16813 + mk_pkg("0.7", slot="0", keywords="x86 ppc"),
16814 + mk_pkg("0.9", slot="0", keywords="~x86 ppc"),
16815 + mk_pkg("1.0", slot="1", keywords="x86 ppc"),
16816 + mk_pkg("1.2", slot="1", keywords="x86 ~ppc"),
16817 + ],
16818 + )
16819 assert len(r) == 2
16820 assert isinstance(r[0], imlate.LaggingStable)
16821 assert r[0].slot == "0"
16822 @@ -131,8 +121,11 @@ class TestImlateCheck(misc.ReportTestCase):
16823 def test_multiple_potential_slots(self):
16824 r = self.assertReports(
16825 mk_check(),
16826 - [mk_pkg("0.9", slot="0", keywords="x86 ~ppc"),
16827 - mk_pkg("1.2", slot="1", keywords="x86 ~ppc")])
16828 + [
16829 + mk_pkg("0.9", slot="0", keywords="x86 ~ppc"),
16830 + mk_pkg("1.2", slot="1", keywords="x86 ~ppc"),
16831 + ],
16832 + )
16833 assert len(r) == 2
16834 assert isinstance(r[0], imlate.PotentialStable)
16835 assert r[0].slot == "0"
16836 @@ -146,15 +139,17 @@ class TestImlateCheck(misc.ReportTestCase):
16837 assert r[1].version == "1.2"
16838
16839 def test_drop_newer_slot_stables(self):
16840 - selected_arches=("x86", "amd64")
16841 - all_arches=("x86", "amd64", "arm64")
16842 + selected_arches = ("x86", "amd64")
16843 + all_arches = ("x86", "amd64", "arm64")
16844 r = self.assertReport(
16845 mk_check(selected_arches=selected_arches, arches=all_arches),
16846 - [mk_pkg("0.7", "amd64 x86 ~arm64"),
16847 - mk_pkg("0.8", "amd64 ~x86 ~arm64"),
16848 - mk_pkg("0.9", "~amd64 ~x86 arm64")]
16849 + [
16850 + mk_pkg("0.7", "amd64 x86 ~arm64"),
16851 + mk_pkg("0.8", "amd64 ~x86 ~arm64"),
16852 + mk_pkg("0.9", "~amd64 ~x86 arm64"),
16853 + ],
16854 )
16855 assert isinstance(r, imlate.LaggingStable)
16856 - assert r.stable == ('amd64',)
16857 - assert r.keywords == ('~x86',)
16858 - assert r.version == '0.8'
16859 + assert r.stable == ("amd64",)
16860 + assert r.keywords == ("~x86",)
16861 + assert r.version == "0.8"
16862
16863 diff --git a/tests/checks/test_metadata.py b/tests/checks/test_metadata.py
16864 index cc074d93..ee0ac08e 100644
16865 --- a/tests/checks/test_metadata.py
16866 +++ b/tests/checks/test_metadata.py
16867 @@ -30,27 +30,24 @@ class TestDescriptionCheck(misc.ReportTestCase):
16868 self.assertNoReport(self.check, self.mk_pkg("a perfectly written package description"))
16869
16870 def test_bad_descs(self):
16871 - for desc in ('based on eclass',
16872 - 'diffball',
16873 - 'dev-util/diffball',
16874 - 'foon'):
16875 + for desc in ("based on eclass", "diffball", "dev-util/diffball", "foon"):
16876 r = self.assertReport(self.check, self.mk_pkg(desc))
16877 assert isinstance(r, metadata.BadDescription)
16878
16879 def test_desc_length(self):
16880 r = self.assertReport(self.check, self.mk_pkg())
16881 assert isinstance(r, metadata.BadDescription)
16882 - assert 'empty/unset' in str(r)
16883 + assert "empty/unset" in str(r)
16884
16885 - self.assertNoReport(self.check, self.mk_pkg('s' * 80))
16886 - r = self.assertReport(self.check, self.mk_pkg('s' * 81))
16887 + self.assertNoReport(self.check, self.mk_pkg("s" * 80))
16888 + r = self.assertReport(self.check, self.mk_pkg("s" * 81))
16889 assert isinstance(r, metadata.BadDescription)
16890 - assert 'over 80 chars in length' in str(r)
16891 + assert "over 80 chars in length" in str(r)
16892
16893 - self.assertNoReport(self.check, self.mk_pkg('s' * 10))
16894 - r = self.assertReport(self.check, self.mk_pkg('s' * 9))
16895 + self.assertNoReport(self.check, self.mk_pkg("s" * 10))
16896 + r = self.assertReport(self.check, self.mk_pkg("s" * 9))
16897 assert isinstance(r, metadata.BadDescription)
16898 - assert 'under 10 chars in length' in str(r)
16899 + assert "under 10 chars in length" in str(r)
16900
16901
16902 class TestHomepageCheck(misc.ReportTestCase):
16903 @@ -58,7 +55,7 @@ class TestHomepageCheck(misc.ReportTestCase):
16904 check_kls = metadata.HomepageCheck
16905 check = metadata.HomepageCheck(None)
16906
16907 - def mk_pkg(self, homepage='', cpvstr='dev-util/diffball-0.7.1'):
16908 + def mk_pkg(self, homepage="", cpvstr="dev-util/diffball-0.7.1"):
16909 return misc.FakePkg(cpvstr, data={"HOMEPAGE": homepage})
16910
16911 def test_regular(self):
16912 @@ -72,26 +69,26 @@ class TestHomepageCheck(misc.ReportTestCase):
16913 def test_unset(self):
16914 r = self.assertReport(self.check, self.mk_pkg())
16915 isinstance(r, metadata.BadHomepage)
16916 - assert 'empty/unset' in str(r)
16917 + assert "empty/unset" in str(r)
16918
16919 # categories of pkgs allowed to skip HOMEPAGE
16920 for cat in self.check_kls.missing_categories:
16921 - self.assertNoReport(self.check, self.mk_pkg(cpvstr=f'{cat}/foo-0'))
16922 + self.assertNoReport(self.check, self.mk_pkg(cpvstr=f"{cat}/foo-0"))
16923
16924 def test_no_protocol(self):
16925 - r = self.assertReport(self.check, self.mk_pkg('foobar.com'))
16926 + r = self.assertReport(self.check, self.mk_pkg("foobar.com"))
16927 isinstance(r, metadata.BadHomepage)
16928 - assert 'lacks protocol' in str(r)
16929 + assert "lacks protocol" in str(r)
16930
16931 def test_unsupported_protocol(self):
16932 - r = self.assertReport(self.check, self.mk_pkg('htp://foobar.com'))
16933 + r = self.assertReport(self.check, self.mk_pkg("htp://foobar.com"))
16934 isinstance(r, metadata.BadHomepage)
16935 assert "uses unsupported protocol 'htp'" in str(r)
16936
16937 def test_unspecific_site(self):
16938 - for suffix in ('', '/'):
16939 - for site in ('https://www.gentoo.org', 'https://gentoo.org'):
16940 - r = self.assertReport(self.check, self.mk_pkg(f'{site}{suffix}'))
16941 + for suffix in ("", "/"):
16942 + for site in ("https://www.gentoo.org", "https://gentoo.org"):
16943 + r = self.assertReport(self.check, self.mk_pkg(f"{site}{suffix}"))
16944 isinstance(r, metadata.BadHomepage)
16945 assert "unspecific HOMEPAGE" in str(r)
16946
16947 @@ -104,27 +101,30 @@ class TestHomepageCheck(misc.ReportTestCase):
16948
16949
16950 class IUSE_Options(misc.Tmpdir):
16951 -
16952 def get_options(self, properties=(), restrict=(), **kwargs):
16953 repo_base = tempfile.mkdtemp(dir=self.dir)
16954 - base = pjoin(repo_base, 'profiles')
16955 + base = pjoin(repo_base, "profiles")
16956 os.mkdir(base)
16957 - with open(pjoin(base, "arch.list"), 'w') as file:
16958 + with open(pjoin(base, "arch.list"), "w") as file:
16959 file.write("\n".join(kwargs.pop("arches", ("x86", "ppc", "amd64", "amd64-fbsd"))))
16960 with open(pjoin(base, "use.desc"), "w") as file:
16961 file.write("\n".join(f"{x} - {x}" for x in kwargs.pop("use_desc", ("foo", "bar"))))
16962 - with open(pjoin(base, 'repo_name'), 'w') as file:
16963 - file.write(kwargs.pop('repo_name', 'monkeys'))
16964 - os.mkdir(pjoin(repo_base, 'metadata'))
16965 - with open(pjoin(repo_base, 'metadata', 'layout.conf'), 'w') as f:
16966 - f.write(textwrap.dedent(f"""\
16967 - masters =
16968 - properties-allowed = {' '.join(properties)}
16969 - restrict-allowed = {' '.join(restrict)}
16970 - """))
16971 - kwargs['target_repo'] = repository.UnconfiguredTree(repo_base)
16972 - kwargs.setdefault('verbosity', 0)
16973 - kwargs.setdefault('cache', {'git': False})
16974 + with open(pjoin(base, "repo_name"), "w") as file:
16975 + file.write(kwargs.pop("repo_name", "monkeys"))
16976 + os.mkdir(pjoin(repo_base, "metadata"))
16977 + with open(pjoin(repo_base, "metadata", "layout.conf"), "w") as f:
16978 + f.write(
16979 + textwrap.dedent(
16980 + f"""\
16981 + masters =
16982 + properties-allowed = {' '.join(properties)}
16983 + restrict-allowed = {' '.join(restrict)}
16984 + """
16985 + )
16986 + )
16987 + kwargs["target_repo"] = repository.UnconfiguredTree(repo_base)
16988 + kwargs.setdefault("verbosity", 0)
16989 + kwargs.setdefault("cache", {"git": False})
16990 return arghparse.Namespace(**kwargs)
16991
16992
16993 @@ -135,21 +135,21 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
16994 @pytest.fixture
16995 def check(self):
16996 pkgs = (
16997 - FakePkg('dev-libs/foo-0', keywords=('amd64', '~x86')),
16998 - FakePkg('dev-libs/foo-1', keywords=('-*', 'ppc')),
16999 - FakePkg('dev-libs/bar-2', keywords=()),
17000 + FakePkg("dev-libs/foo-0", keywords=("amd64", "~x86")),
17001 + FakePkg("dev-libs/foo-1", keywords=("-*", "ppc")),
17002 + FakePkg("dev-libs/bar-2", keywords=()),
17003 )
17004 search_repo = FakeRepo(pkgs=pkgs)
17005 options = self.get_options(search_repo=search_repo, gentoo_repo=False)
17006
17007 kwargs = {
17008 - 'use_addon': addons.UseAddon(options),
17009 - 'keywords_addon': addons.KeywordsAddon(options),
17010 + "use_addon": addons.UseAddon(options),
17011 + "keywords_addon": addons.KeywordsAddon(options),
17012 }
17013 return metadata.KeywordsCheck(options, **kwargs)
17014
17015 - def mk_pkg(self, keywords='', cpv='dev-util/diffball-0.7.1', rdepend=''):
17016 - return misc.FakePkg(cpv, data={'KEYWORDS': keywords, 'RDEPEND': rdepend})
17017 + def mk_pkg(self, keywords="", cpv="dev-util/diffball-0.7.1", rdepend=""):
17018 + return misc.FakePkg(cpv, data={"KEYWORDS": keywords, "RDEPEND": rdepend})
17019
17020 def test_no_keywords(self, check):
17021 self.assertNoReport(check, self.mk_pkg())
17022 @@ -173,23 +173,23 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
17023 # unknown keyword
17024 r = self.assertReport(check, self.mk_pkg("foo"))
17025 assert isinstance(r, metadata.UnknownKeywords)
17026 - assert r.keywords == ('foo',)
17027 + assert r.keywords == ("foo",)
17028 assert "unknown KEYWORDS: 'foo'" in str(r)
17029
17030 # check that * and ~* are flagged in gentoo repo
17031 - options = self.get_options(repo_name='gentoo', gentoo_repo=True)
17032 + options = self.get_options(repo_name="gentoo", gentoo_repo=True)
17033 kwargs = {
17034 - 'use_addon': addons.UseAddon(options),
17035 - 'keywords_addon': addons.KeywordsAddon(options),
17036 + "use_addon": addons.UseAddon(options),
17037 + "keywords_addon": addons.KeywordsAddon(options),
17038 }
17039 check = metadata.KeywordsCheck(options, **kwargs)
17040 r = self.assertReport(check, self.mk_pkg("*"))
17041 assert isinstance(r, metadata.UnknownKeywords)
17042 - assert r.keywords == ('*',)
17043 + assert r.keywords == ("*",)
17044 assert "unknown KEYWORDS: '*'" in str(r)
17045 r = self.assertReport(check, self.mk_pkg("~*"))
17046 assert isinstance(r, metadata.UnknownKeywords)
17047 - assert r.keywords == ('~*',)
17048 + assert r.keywords == ("~*",)
17049 assert "unknown KEYWORDS: '~*'" in str(r)
17050
17051 def test_overlapping_keywords(self, check):
17052 @@ -214,78 +214,78 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
17053 # single duplicate
17054 r = self.assertReport(check, self.mk_pkg("amd64 amd64"))
17055 assert isinstance(r, metadata.DuplicateKeywords)
17056 - assert r.keywords == ('amd64',)
17057 - assert 'duplicate KEYWORDS: amd64' in str(r)
17058 + assert r.keywords == ("amd64",)
17059 + assert "duplicate KEYWORDS: amd64" in str(r)
17060
17061 # multiple duplicates
17062 r = self.assertReport(check, self.mk_pkg("-* -* amd64 amd64 ~x86 ~x86"))
17063 assert isinstance(r, metadata.DuplicateKeywords)
17064 - assert r.keywords == ('-*', 'amd64', '~x86')
17065 + assert r.keywords == ("-*", "amd64", "~x86")
17066
17067 def test_unsorted_keywords(self, check):
17068 # regular keywords
17069 - self.assertNoReport(check, self.mk_pkg('-* ~amd64'))
17070 + self.assertNoReport(check, self.mk_pkg("-* ~amd64"))
17071
17072 # prefix keywords come after regular keywords
17073 - self.assertNoReport(check, self.mk_pkg('~amd64 ppc ~x86 ~amd64-fbsd'))
17074 + self.assertNoReport(check, self.mk_pkg("~amd64 ppc ~x86 ~amd64-fbsd"))
17075
17076 # non-verbose mode doesn't show sorted keywords
17077 - r = self.assertReport(check, self.mk_pkg('~amd64 -*'))
17078 + r = self.assertReport(check, self.mk_pkg("~amd64 -*"))
17079 assert isinstance(r, metadata.UnsortedKeywords)
17080 - assert r.keywords == ('~amd64', '-*')
17081 + assert r.keywords == ("~amd64", "-*")
17082 assert r.sorted_keywords == ()
17083 - assert 'unsorted KEYWORDS: ~amd64, -*' in str(r)
17084 + assert "unsorted KEYWORDS: ~amd64, -*" in str(r)
17085
17086 # create a check instance with verbose mode enabled
17087 options = self.get_options(gentoo_repo=False, verbosity=1)
17088 kwargs = {
17089 - 'use_addon': addons.UseAddon(options),
17090 - 'keywords_addon': addons.KeywordsAddon(options),
17091 + "use_addon": addons.UseAddon(options),
17092 + "keywords_addon": addons.KeywordsAddon(options),
17093 }
17094 check = metadata.KeywordsCheck(options, **kwargs)
17095
17096 # masks should come before regular keywords
17097 - r = self.assertReport(check, self.mk_pkg('~amd64 -*'))
17098 + r = self.assertReport(check, self.mk_pkg("~amd64 -*"))
17099 assert isinstance(r, metadata.UnsortedKeywords)
17100 - assert r.keywords == ('~amd64', '-*')
17101 - assert r.sorted_keywords == ('-*', '~amd64')
17102 - assert '\n\tunsorted KEYWORDS: ~amd64, -*\n\tsorted KEYWORDS: -*, ~amd64' in str(r)
17103 + assert r.keywords == ("~amd64", "-*")
17104 + assert r.sorted_keywords == ("-*", "~amd64")
17105 + assert "\n\tunsorted KEYWORDS: ~amd64, -*\n\tsorted KEYWORDS: -*, ~amd64" in str(r)
17106
17107 # keywords should be sorted alphabetically by arch
17108 - r = self.assertReport(check, self.mk_pkg('ppc ~amd64'))
17109 + r = self.assertReport(check, self.mk_pkg("ppc ~amd64"))
17110 assert isinstance(r, metadata.UnsortedKeywords)
17111 - assert r.keywords == ('ppc', '~amd64')
17112 - assert r.sorted_keywords == ('~amd64', 'ppc')
17113 - assert '\n\tunsorted KEYWORDS: ppc, ~amd64\n\tsorted KEYWORDS: ~amd64, ppc' in str(r)
17114 + assert r.keywords == ("ppc", "~amd64")
17115 + assert r.sorted_keywords == ("~amd64", "ppc")
17116 + assert "\n\tunsorted KEYWORDS: ppc, ~amd64\n\tsorted KEYWORDS: ~amd64, ppc" in str(r)
17117
17118 # prefix keywords should come after regular keywords
17119 - r = self.assertReport(check, self.mk_pkg('~amd64 ~amd64-fbsd ppc ~x86'))
17120 + r = self.assertReport(check, self.mk_pkg("~amd64 ~amd64-fbsd ppc ~x86"))
17121 assert isinstance(r, metadata.UnsortedKeywords)
17122 - assert r.keywords == ('~amd64', '~amd64-fbsd', 'ppc', '~x86')
17123 - assert r.sorted_keywords == ('~amd64', 'ppc', '~x86', '~amd64-fbsd')
17124 + assert r.keywords == ("~amd64", "~amd64-fbsd", "ppc", "~x86")
17125 + assert r.sorted_keywords == ("~amd64", "ppc", "~x86", "~amd64-fbsd")
17126
17127 def test_missing_virtual_keywords(self, check):
17128 # non-virtuals don't trigger
17129 - pkg = self.mk_pkg(cpv='dev-util/foo-0', rdepend='=dev-libs/foo-0')
17130 + pkg = self.mk_pkg(cpv="dev-util/foo-0", rdepend="=dev-libs/foo-0")
17131 self.assertNoReport(check, pkg)
17132
17133 # matching pkg with no keywords
17134 - pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='dev-libs/bar')
17135 + pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="dev-libs/bar")
17136 self.assertNoReport(check, pkg)
17137
17138 # single pkg match
17139 - pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='=dev-libs/foo-0')
17140 + pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="=dev-libs/foo-0")
17141 r = self.assertReport(check, pkg)
17142 assert isinstance(r, metadata.VirtualKeywordsUpdate)
17143 - assert r.keywords == ('amd64', '~x86')
17144 - assert 'KEYWORDS updates available: amd64, ~x86' in str(r)
17145 + assert r.keywords == ("amd64", "~x86")
17146 + assert "KEYWORDS updates available: amd64, ~x86" in str(r)
17147
17148 # multiple pkg match
17149 - pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='dev-libs/foo')
17150 + pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="dev-libs/foo")
17151 r = self.assertReport(check, pkg)
17152 assert isinstance(r, metadata.VirtualKeywordsUpdate)
17153 - assert r.keywords == ('amd64', 'ppc', '~x86')
17154 - assert 'KEYWORDS updates available: amd64, ppc, ~x86' in str(r)
17155 + assert r.keywords == ("amd64", "ppc", "~x86")
17156 + assert "KEYWORDS updates available: amd64, ppc, ~x86" in str(r)
17157
17158
17159 class TestIuseCheck(IUSE_Options, misc.ReportTestCase):
17160 @@ -298,28 +298,28 @@ class TestIuseCheck(IUSE_Options, misc.ReportTestCase):
17161 use_addon = addons.UseAddon(options)
17162 return self.check_kls(options, use_addon=use_addon)
17163
17164 - def mk_pkg(self, iuse=''):
17165 - return misc.FakePkg('dev-util/diffball-0.7.1', data={'IUSE': iuse, 'EAPI': '1'})
17166 + def mk_pkg(self, iuse=""):
17167 + return misc.FakePkg("dev-util/diffball-0.7.1", data={"IUSE": iuse, "EAPI": "1"})
17168
17169 def test_known_iuse(self, check):
17170 - self.assertNoReport(check, self.mk_pkg('foo bar'))
17171 + self.assertNoReport(check, self.mk_pkg("foo bar"))
17172
17173 def test_unknown_iuse(self, check):
17174 - r = self.assertReport(check, self.mk_pkg('foo dar'))
17175 + r = self.assertReport(check, self.mk_pkg("foo dar"))
17176 assert isinstance(r, metadata.UnknownUseFlags)
17177 - assert r.flags == ('dar',)
17178 - assert 'dar' in str(r)
17179 + assert r.flags == ("dar",)
17180 + assert "dar" in str(r)
17181
17182 def test_arch_iuse(self, check):
17183 # arch flags must _not_ be in IUSE
17184 - r = self.assertReport(check, self.mk_pkg('x86'))
17185 + r = self.assertReport(check, self.mk_pkg("x86"))
17186 assert isinstance(r, metadata.UnknownUseFlags)
17187 - assert r.flags == ('x86',)
17188 - assert 'x86' in str(r)
17189 + assert r.flags == ("x86",)
17190 + assert "x86" in str(r)
17191
17192 def test_invalid_iuse(self, check):
17193 - for flag in ('+', '-', '@', '_'):
17194 - r = self.assertReport(check, self.mk_pkg(f'foo {flag}'))
17195 + for flag in ("+", "-", "@", "_"):
17196 + r = self.assertReport(check, self.mk_pkg(f"foo {flag}"))
17197 assert isinstance(r, metadata.InvalidUseFlags)
17198 assert r.flags == (flag,)
17199 assert flag in str(r)
17200 @@ -331,12 +331,12 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
17201
17202 def mk_check(self, deprecated=(), banned=()):
17203 # TODO: switch to using a repo fixture when available
17204 - os.makedirs(pjoin(self.dir, 'profiles'))
17205 - os.makedirs(pjoin(self.dir, 'metadata'))
17206 - with open(pjoin(self.dir, 'profiles', 'repo_name'), 'w') as f:
17207 - f.write('fake\n')
17208 - with open(pjoin(self.dir, 'metadata', 'layout.conf'), 'w') as f:
17209 - f.write('masters =\n')
17210 + os.makedirs(pjoin(self.dir, "profiles"))
17211 + os.makedirs(pjoin(self.dir, "metadata"))
17212 + with open(pjoin(self.dir, "profiles", "repo_name"), "w") as f:
17213 + f.write("fake\n")
17214 + with open(pjoin(self.dir, "metadata", "layout.conf"), "w") as f:
17215 + f.write("masters =\n")
17216 f.write(f"eapis-deprecated = {' '.join(deprecated)}\n")
17217 f.write(f"eapis-banned = {' '.join(banned)}\n")
17218 repo_config = repo_objs.RepoConfig(location=self.dir)
17219 @@ -345,7 +345,7 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
17220 return self.check_kls(options, eclass_addon=addons.eclass.EclassAddon(options))
17221
17222 def mk_pkg(self, eapi):
17223 - return misc.FakePkg('dev-util/diffball-2.7.1', data={'EAPI': eapi})
17224 + return misc.FakePkg("dev-util/diffball-2.7.1", data={"EAPI": eapi})
17225
17226 def test_repo_with_no_settings(self):
17227 check = self.mk_check()
17228 @@ -353,29 +353,35 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
17229 self.assertNoReport(check, self.mk_pkg(eapi=eapi_str))
17230
17231 def test_latest_eapi(self):
17232 - check = self.mk_check(deprecated=('0', '2', '4', '5'), banned=('1', '3',))
17233 + check = self.mk_check(
17234 + deprecated=("0", "2", "4", "5"),
17235 + banned=(
17236 + "1",
17237 + "3",
17238 + ),
17239 + )
17240 latest_eapi = list(eapi.EAPI.known_eapis)[-1]
17241 self.assertNoReport(check, self.mk_pkg(eapi=latest_eapi))
17242
17243 def test_deprecated_eapi(self):
17244 - deprecated = ('0', '2', '4', '5')
17245 - banned = ('1', '3')
17246 + deprecated = ("0", "2", "4", "5")
17247 + banned = ("1", "3")
17248 check = self.mk_check(deprecated=deprecated, banned=banned)
17249 for eapi_str in deprecated:
17250 r = self.assertReport(check, self.mk_pkg(eapi=eapi_str))
17251 assert isinstance(r, metadata.DeprecatedEapi)
17252 assert r.eapi == eapi_str
17253 - assert f'uses deprecated EAPI {eapi_str}' in str(r)
17254 + assert f"uses deprecated EAPI {eapi_str}" in str(r)
17255
17256 def test_banned_eapi(self):
17257 - deprecated = ('0', '2', '4', '5')
17258 - banned = ('1', '3')
17259 + deprecated = ("0", "2", "4", "5")
17260 + banned = ("1", "3")
17261 check = self.mk_check(deprecated=deprecated, banned=banned)
17262 for eapi_str in banned:
17263 r = self.assertReport(check, self.mk_pkg(eapi=eapi_str))
17264 assert isinstance(r, metadata.BannedEapi)
17265 assert r.eapi == eapi_str
17266 - assert f'uses banned EAPI {eapi_str}' in str(r)
17267 + assert f"uses banned EAPI {eapi_str}" in str(r)
17268
17269
17270 class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
17271 @@ -387,19 +393,19 @@ class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
17272 # TODO: switch to using a repo fixture when available
17273 repo_dir = pjoin(self.dir, str(self._repo_id))
17274 self._repo_id += 1
17275 - os.makedirs(pjoin(repo_dir, 'profiles'))
17276 - os.makedirs(pjoin(repo_dir, 'metadata'))
17277 - with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
17278 - f.write('fake\n')
17279 - with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
17280 - f.write('masters =\n')
17281 + os.makedirs(pjoin(repo_dir, "profiles"))
17282 + os.makedirs(pjoin(repo_dir, "metadata"))
17283 + with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
17284 + f.write("fake\n")
17285 + with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
17286 + f.write("masters =\n")
17287 repo_config = repo_objs.RepoConfig(location=repo_dir)
17288 self.repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
17289 options = arghparse.Namespace(target_repo=self.repo, verbosity=False)
17290 return self.check_kls(options)
17291
17292 def mk_pkg(self, eapi):
17293 - return misc.FakePkg('dev-util/diffball-2.7.1', data={'EAPI': eapi})
17294 + return misc.FakePkg("dev-util/diffball-2.7.1", data={"EAPI": eapi})
17295
17296 def test_repo_with_no_settings(self):
17297 check = self.mk_check()
17298 @@ -407,51 +413,43 @@ class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
17299 self.assertNoReport(check, self.mk_pkg(eapi=eapi_str))
17300
17301 def test_unknown_eapis(self):
17302 - for eapi in ('blah', '9999'):
17303 + for eapi in ("blah", "9999"):
17304 check = self.mk_check()
17305 - pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
17306 + pkg_path = pjoin(self.repo.location, "dev-util", "foo")
17307 os.makedirs(pkg_path)
17308 - with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
17309 - f.write(textwrap.dedent(f"""\
17310 - EAPI={eapi}
17311 - """))
17312 + with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
17313 + f.write(f"EAPI={eapi}\n")
17314 r = self.assertReport(check, self.repo)
17315 assert isinstance(r, metadata.InvalidEapi)
17316 assert f"EAPI '{eapi}' is not supported" in str(r)
17317
17318 def test_invalid_eapis(self):
17319 - for eapi in ('invalid!', '${EAPI}'):
17320 + for eapi in ("invalid!", "${EAPI}"):
17321 check = self.mk_check()
17322 - pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
17323 + pkg_path = pjoin(self.repo.location, "dev-util", "foo")
17324 os.makedirs(pkg_path)
17325 - with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
17326 - f.write(textwrap.dedent(f"""\
17327 - EAPI="{eapi}"
17328 - """))
17329 + with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
17330 + f.write(f"EAPI={eapi}\n")
17331 r = self.assertReport(check, self.repo)
17332 assert isinstance(r, metadata.InvalidEapi)
17333 assert f"invalid EAPI '{eapi}'" in str(r)
17334
17335 def test_sourcing_error(self):
17336 check = self.mk_check()
17337 - pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
17338 + pkg_path = pjoin(self.repo.location, "dev-util", "foo")
17339 os.makedirs(pkg_path)
17340 - with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
17341 - f.write(textwrap.dedent("""\
17342 - foo
17343 - """))
17344 + with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
17345 + f.write("foo\n")
17346 r = self.assertReport(check, self.repo)
17347 assert isinstance(r, metadata.SourcingError)
17348
17349 def test_invalid_slots(self):
17350 - for slot in ('?', '0/1'):
17351 + for slot in ("?", "0/1"):
17352 check = self.mk_check()
17353 - pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
17354 + pkg_path = pjoin(self.repo.location, "dev-util", "foo")
17355 os.makedirs(pkg_path)
17356 - with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
17357 - f.write(textwrap.dedent(f"""\
17358 - SLOT="{slot}"
17359 - """))
17360 + with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
17361 + f.write(f"""SLOT="{slot}"\n""")
17362 r = self.assertReport(check, self.repo)
17363 assert isinstance(r, metadata.InvalidSlot)
17364 assert f"invalid SLOT: '{slot}'" in str(r)
17365 @@ -467,19 +465,26 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
17366
17367 def mk_check(self, masks=(), verbosity=1, profiles=None):
17368 if profiles is None:
17369 - profiles = {'x86': [misc.FakeProfile(name='default/linux/x86', masks=masks)]}
17370 + profiles = {"x86": [misc.FakeProfile(name="default/linux/x86", masks=masks)]}
17371 options = self.get_options(verbosity=verbosity)
17372 use_addon = addons.UseAddon(options)
17373 check = self.check_kls(options, use_addon=use_addon, profile_addon=profiles)
17374 return check
17375
17376 - def mk_pkg(self, cpvstr="dev-util/diffball-0.7.1", eapi="4", iuse="",
17377 - required_use="", keywords="~amd64 x86"):
17378 + def mk_pkg(
17379 + self,
17380 + cpvstr="dev-util/diffball-0.7.1",
17381 + eapi="4",
17382 + iuse="",
17383 + required_use="",
17384 + keywords="~amd64 x86",
17385 + ):
17386 return FakePkg(
17387 cpvstr,
17388 eapi=eapi,
17389 iuse=iuse.split(),
17390 - data={"REQUIRED_USE": required_use, "KEYWORDS": keywords})
17391 + data={"REQUIRED_USE": required_use, "KEYWORDS": keywords},
17392 + )
17393
17394 def test_unsupported_eapis(self, check):
17395 for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
17396 @@ -489,9 +494,10 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
17397
17398 def test_multireport_verbosity(self):
17399 profiles = {
17400 - 'x86': [
17401 - misc.FakeProfile(name='default/linux/x86', masks=()),
17402 - misc.FakeProfile(name='default/linux/x86/foo', masks=())]
17403 + "x86": [
17404 + misc.FakeProfile(name="default/linux/x86", masks=()),
17405 + misc.FakeProfile(name="default/linux/x86/foo", masks=()),
17406 + ]
17407 }
17408 # non-verbose mode should only one failure per node
17409 check = self.mk_check(verbosity=0, profiles=profiles)
17410 @@ -516,7 +522,9 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
17411
17412 # only supported in >= EAPI 5
17413 self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="?? ( foo bar )"))
17414 - self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )"))
17415 + self.assertNoReport(
17416 + check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )")
17417 + )
17418
17419 def test_unstated_iuse(self, check):
17420 r = self.assertReport(check, self.mk_pkg(required_use="foo? ( blah )"))
17421 @@ -534,25 +542,34 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
17422
17423 # pkgs masked by the related profile aren't checked
17424 self.assertNoReport(
17425 - self.mk_check(masks=('>=dev-util/diffball-8.0',)),
17426 - self.mk_pkg(cpvstr="dev-util/diffball-8.0", iuse="foo bar", required_use="bar"))
17427 + self.mk_check(masks=(">=dev-util/diffball-8.0",)),
17428 + self.mk_pkg(cpvstr="dev-util/diffball-8.0", iuse="foo bar", required_use="bar"),
17429 + )
17430
17431 # unsatisfied REQUIRED_USE
17432 r = self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="bar"))
17433 assert isinstance(r, metadata.RequiredUseDefaults)
17434 - assert r.keyword == 'x86'
17435 - assert r.profile == 'default/linux/x86'
17436 + assert r.keyword == "x86"
17437 + assert r.profile == "default/linux/x86"
17438 assert r.use == ()
17439 - assert str(r.required_use) == 'bar'
17440 + assert str(r.required_use) == "bar"
17441
17442 # at-most-one-of
17443 - self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )"))
17444 - self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="+foo bar", required_use="?? ( foo bar )"))
17445 - self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo +bar", required_use="?? ( foo bar )"))
17446 - r = self.assertReport(check, self.mk_pkg(eapi="5", iuse="+foo +bar", required_use="?? ( foo bar )"))
17447 + self.assertNoReport(
17448 + check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )")
17449 + )
17450 + self.assertNoReport(
17451 + check, self.mk_pkg(eapi="5", iuse="+foo bar", required_use="?? ( foo bar )")
17452 + )
17453 + self.assertNoReport(
17454 + check, self.mk_pkg(eapi="5", iuse="foo +bar", required_use="?? ( foo bar )")
17455 + )
17456 + r = self.assertReport(
17457 + check, self.mk_pkg(eapi="5", iuse="+foo +bar", required_use="?? ( foo bar )")
17458 + )
17459 assert isinstance(r, metadata.RequiredUseDefaults)
17460 - assert r.use == ('bar', 'foo')
17461 - assert str(r.required_use) == 'at-most-one-of ( foo bar )'
17462 + assert r.use == ("bar", "foo")
17463 + assert str(r.required_use) == "at-most-one-of ( foo bar )"
17464
17465 # exactly-one-of
17466 self.assertNoReport(check, self.mk_pkg(iuse="+foo bar", required_use="^^ ( foo bar )"))
17467 @@ -560,35 +577,48 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
17468 self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="^^ ( foo bar )"))
17469 r = self.assertReport(check, self.mk_pkg(iuse="+foo +bar", required_use="^^ ( foo bar )"))
17470 assert isinstance(r, metadata.RequiredUseDefaults)
17471 - assert r.use == ('bar', 'foo')
17472 - assert str(r.required_use) == 'exactly-one-of ( foo bar )'
17473 + assert r.use == ("bar", "foo")
17474 + assert str(r.required_use) == "exactly-one-of ( foo bar )"
17475
17476 # all-of
17477 self.assertNoReport(check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( bar baz )"))
17478 - self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( bar baz )"))
17479 + self.assertNoReport(
17480 + check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( bar baz )")
17481 + )
17482 self.assertReports(check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( bar baz )"))
17483 self.assertReport(check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( bar baz )"))
17484 - r = self.assertReport(check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( bar baz )"))
17485 + r = self.assertReport(
17486 + check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( bar baz )")
17487 + )
17488 assert isinstance(r, metadata.RequiredUseDefaults)
17489 - assert r.use == ('baz', 'foo')
17490 + assert r.use == ("baz", "foo")
17491 # TODO: fix this output to show both required USE flags
17492 - assert str(r.required_use) == 'bar'
17493 + assert str(r.required_use) == "bar"
17494
17495 # any-of
17496 - self.assertNoReport(check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( || ( bar baz ) )"))
17497 - self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( || ( bar baz ) )"))
17498 - self.assertNoReport(check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( || ( bar baz ) )"))
17499 - self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( || ( bar baz ) )"))
17500 - r = self.assertReport(check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( || ( bar baz ) )"))
17501 + self.assertNoReport(
17502 + check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( || ( bar baz ) )")
17503 + )
17504 + self.assertNoReport(
17505 + check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( || ( bar baz ) )")
17506 + )
17507 + self.assertNoReport(
17508 + check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( || ( bar baz ) )")
17509 + )
17510 + self.assertNoReport(
17511 + check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( || ( bar baz ) )")
17512 + )
17513 + r = self.assertReport(
17514 + check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( || ( bar baz ) )")
17515 + )
17516 assert isinstance(r, metadata.RequiredUseDefaults)
17517 - assert r.use == ('foo',)
17518 - assert str(r.required_use) == '( bar || baz )'
17519 + assert r.use == ("foo",)
17520 + assert str(r.required_use) == "( bar || baz )"
17521
17522
17523 def use_based():
17524 # hidden to keep the test runner from finding it
17525 class UseBased(IUSE_Options):
17526 -
17527 def test_required_addons(self):
17528 assert addons.UseAddon in self.check_kls.required_addons
17529
17530 @@ -604,30 +634,32 @@ def use_based():
17531
17532
17533 class _TestRestrictPropertiesCheck(use_based(), misc.ReportTestCase):
17534 -
17535 - def mk_pkg(self, restrict='', properties='', iuse=''):
17536 + def mk_pkg(self, restrict="", properties="", iuse=""):
17537 return misc.FakePkg(
17538 - 'dev-util/diffball-2.7.1',
17539 - data={'IUSE': iuse, 'RESTRICT': restrict, 'PROPERTIES': properties})
17540 + "dev-util/diffball-2.7.1",
17541 + data={"IUSE": iuse, "RESTRICT": restrict, "PROPERTIES": properties},
17542 + )
17543
17544 def test_no_allowed(self):
17545 # repo or its masters don't define any allowed values so anything goes
17546 check = self.mk_check()
17547 - self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
17548 - self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )', 'iuse': 'foo'}))
17549 + self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
17550 + self.assertNoReport(
17551 + check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )", "iuse": "foo"})
17552 + )
17553
17554 def test_allowed(self):
17555 - check = self.mk_check(options={self.check_kls._attr: ('foo',)})
17556 + check = self.mk_check(options={self.check_kls._attr: ("foo",)})
17557 # allowed
17558 - self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
17559 + self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
17560
17561 # unknown
17562 - r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'bar'}))
17563 + r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: "bar"}))
17564 assert isinstance(r, self.check_kls._unknown_result_cls)
17565 assert f'unknown {self.check_kls._attr.upper()}="bar"' in str(r)
17566
17567 # unknown multiple, conditional
17568 - pkg = self.mk_pkg(**{self.check_kls._attr: 'baz? ( foo bar boo )', 'iuse': 'baz'})
17569 + pkg = self.mk_pkg(**{self.check_kls._attr: "baz? ( foo bar boo )", "iuse": "baz"})
17570 r = self.assertReport(check, pkg)
17571 assert isinstance(r, self.check_kls._unknown_result_cls)
17572 assert f'unknown {self.check_kls._attr.upper()}="bar boo"' in str(r)
17573 @@ -635,17 +667,21 @@ class _TestRestrictPropertiesCheck(use_based(), misc.ReportTestCase):
17574 def test_unstated_iuse(self):
17575 check = self.mk_check()
17576 # no IUSE
17577 - self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
17578 + self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
17579 # conditional with IUSE defined
17580 - self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )', 'iuse': 'foo'}))
17581 + self.assertNoReport(
17582 + check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )", "iuse": "foo"})
17583 + )
17584 # conditional missing IUSE
17585 - r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )'}))
17586 + r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )"}))
17587 assert isinstance(r, addons.UnstatedIuse)
17588 - assert 'unstated flag: [ foo ]' in str(r)
17589 + assert "unstated flag: [ foo ]" in str(r)
17590 # multiple missing IUSE
17591 - r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar ) boo? ( blah )'}))
17592 + r = self.assertReport(
17593 + check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar ) boo? ( blah )"})
17594 + )
17595 assert isinstance(r, addons.UnstatedIuse)
17596 - assert 'unstated flags: [ boo, foo ]' in str(r)
17597 + assert "unstated flags: [ boo, foo ]" in str(r)
17598
17599
17600 class TestRestrictCheck(_TestRestrictPropertiesCheck):
17601 @@ -670,30 +706,33 @@ class TestRestrictTestCheck(misc.ReportTestCase):
17602 check_kls = metadata.RestrictTestCheck
17603 check = metadata.RestrictTestCheck(None)
17604
17605 - def mk_pkg(self, iuse='', restrict=''):
17606 - return misc.FakePkg(
17607 - 'dev-util/diffball-2.7.1', data={'IUSE': iuse, 'RESTRICT': restrict})
17608 + def mk_pkg(self, iuse="", restrict=""):
17609 + return misc.FakePkg("dev-util/diffball-2.7.1", data={"IUSE": iuse, "RESTRICT": restrict})
17610
17611 def test_empty_restrict(self):
17612 self.assertNoReport(self.check, self.mk_pkg())
17613
17614 def test_specified_restrict(self):
17615 - self.assertNoReport(self.check, self.mk_pkg(
17616 - iuse='test', restrict='!test? ( test )'))
17617 + self.assertNoReport(self.check, self.mk_pkg(iuse="test", restrict="!test? ( test )"))
17618
17619 # unconditional restriction is fine too
17620 - self.assertNoReport(self.check, self.mk_pkg(iuse='test', restrict='test'))
17621 - self.assertNoReport(self.check, self.mk_pkg(restrict='test'))
17622 + self.assertNoReport(self.check, self.mk_pkg(iuse="test", restrict="test"))
17623 + self.assertNoReport(self.check, self.mk_pkg(restrict="test"))
17624 # more RESTRICTs
17625 - self.assertNoReport(self.check, self.mk_pkg(iuse='foo test',
17626 - restrict='foo? ( strip ) !test? ( test ) bindist'))
17627 + self.assertNoReport(
17628 + self.check,
17629 + self.mk_pkg(iuse="foo test", restrict="foo? ( strip ) !test? ( test ) bindist"),
17630 + )
17631
17632 def test_missing_restrict(self):
17633 data = (
17634 - ('test', ''), # missing entirely
17635 - ('foo test', '!foo? ( test )'), # 'test' present in other condition
17636 - ('foo test', '!foo? ( !test? ( test ) )'), # correct restriction inside another condition
17637 - ('test', 'test? ( test )'), # USE condition gotten the other way around
17638 + ("test", ""), # missing entirely
17639 + ("foo test", "!foo? ( test )"), # 'test' present in other condition
17640 + (
17641 + "foo test",
17642 + "!foo? ( !test? ( test ) )",
17643 + ), # correct restriction inside another condition
17644 + ("test", "test? ( test )"), # USE condition gotten the other way around
17645 )
17646 for iuse, restrict in data:
17647 r = self.assertReport(self.check, self.mk_pkg(iuse=iuse, restrict=restrict))
17648 @@ -706,67 +745,65 @@ class TestLicenseCheck(use_based(), misc.ReportTestCase):
17649 check_kls = metadata.LicenseCheck
17650
17651 def mk_check(self, licenses=(), **kwargs):
17652 - self.repo = FakeRepo(repo_id='test', licenses=licenses)
17653 + self.repo = FakeRepo(repo_id="test", licenses=licenses)
17654 options = self.get_options(**kwargs)
17655 use_addon = addons.UseAddon(options)
17656 check = self.check_kls(options, use_addon=use_addon)
17657 return check
17658
17659 - def mk_pkg(self, license='', iuse=''):
17660 + def mk_pkg(self, license="", iuse=""):
17661 return FakePkg(
17662 - 'dev-util/diffball-2.7.1',
17663 - data={'LICENSE': license, 'IUSE': iuse},
17664 - repo=self.repo)
17665 + "dev-util/diffball-2.7.1", data={"LICENSE": license, "IUSE": iuse}, repo=self.repo
17666 + )
17667
17668 def test_malformed(self):
17669 r = self.assertReport(self.mk_check(), self.mk_pkg("|| ("))
17670 assert isinstance(r, metadata.InvalidLicense)
17671 - assert r.attr == 'license'
17672 + assert r.attr == "license"
17673
17674 def test_empty(self):
17675 r = self.assertReport(self.mk_check(), self.mk_pkg())
17676 assert isinstance(r, metadata.MissingLicense)
17677
17678 def test_unstated_iuse(self):
17679 - chk = self.mk_check(licenses=('BSD',))
17680 + chk = self.mk_check(licenses=("BSD",))
17681
17682 # no IUSE
17683 - self.assertNoReport(chk, self.mk_pkg('BSD'))
17684 + self.assertNoReport(chk, self.mk_pkg("BSD"))
17685
17686 # conditional URI with related IUSE
17687 - pkg = self.mk_pkg(license='foo? ( BSD )', iuse='foo')
17688 + pkg = self.mk_pkg(license="foo? ( BSD )", iuse="foo")
17689 self.assertNoReport(chk, pkg)
17690
17691 # conditional URI with missing IUSE
17692 - pkg = self.mk_pkg(license='foo? ( BSD )')
17693 + pkg = self.mk_pkg(license="foo? ( BSD )")
17694 r = self.assertReport(chk, pkg)
17695 assert isinstance(r, addons.UnstatedIuse)
17696 - assert 'unstated flag: [ foo ]' in str(r)
17697 + assert "unstated flag: [ foo ]" in str(r)
17698
17699 def test_single_missing(self):
17700 r = self.assertReport(self.mk_check(), self.mk_pkg("foo"))
17701 assert isinstance(r, metadata.UnknownLicense)
17702 - assert r.licenses == ('foo',)
17703 + assert r.licenses == ("foo",)
17704
17705 def test_multiple_existing(self):
17706 - chk = self.mk_check(['foo', 'foo2'])
17707 - self.assertNoReport(chk, self.mk_pkg('foo'))
17708 - self.assertNoReport(chk, self.mk_pkg('foo', 'foo2'))
17709 + chk = self.mk_check(["foo", "foo2"])
17710 + self.assertNoReport(chk, self.mk_pkg("foo"))
17711 + self.assertNoReport(chk, self.mk_pkg("foo", "foo2"))
17712
17713 def test_multiple_missing(self):
17714 - chk = self.mk_check(['foo', 'foo2'])
17715 - r = self.assertReport(chk, self.mk_pkg('|| ( foo foo3 foo4 )'))
17716 + chk = self.mk_check(["foo", "foo2"])
17717 + r = self.assertReport(chk, self.mk_pkg("|| ( foo foo3 foo4 )"))
17718 assert isinstance(r, metadata.UnknownLicense)
17719 - assert r.licenses == ('foo3', 'foo4')
17720 + assert r.licenses == ("foo3", "foo4")
17721
17722 def test_unlicensed_categories(self):
17723 - check = self.mk_check(['foo'])
17724 + check = self.mk_check(["foo"])
17725 for category in self.check_kls.unlicensed_categories:
17726 - for license in ('foo', ''):
17727 + for license in ("foo", ""):
17728 pkg = FakePkg(
17729 - f'{category}/diffball-2.7.1',
17730 - data={'LICENSE': license},
17731 - repo=self.repo)
17732 + f"{category}/diffball-2.7.1", data={"LICENSE": license}, repo=self.repo
17733 + )
17734 if license:
17735 r = self.assertReport(check, pkg)
17736 assert isinstance(r, metadata.UnnecessaryLicense)
17737 @@ -782,87 +819,94 @@ class TestMissingSlotDepCheck(use_based(), misc.ReportTestCase):
17738 def mk_check(self, pkgs=None, **kwargs):
17739 if pkgs is None:
17740 pkgs = (
17741 - FakePkg('dev-libs/foo-0', slot='0'),
17742 - FakePkg('dev-libs/foo-1', slot='1'),
17743 - FakePkg('dev-libs/bar-2', slot='2'),
17744 + FakePkg("dev-libs/foo-0", slot="0"),
17745 + FakePkg("dev-libs/foo-1", slot="1"),
17746 + FakePkg("dev-libs/bar-2", slot="2"),
17747 )
17748 - self.repo = FakeRepo(pkgs=pkgs, repo_id='test')
17749 + self.repo = FakeRepo(pkgs=pkgs, repo_id="test")
17750 options = self.get_options(**kwargs)
17751 use_addon = addons.UseAddon(options)
17752 check = self.check_kls(options, use_addon=use_addon)
17753 return check
17754
17755 - def mk_pkg(self, eapi='5', rdepend='', depend=''):
17756 + def mk_pkg(self, eapi="5", rdepend="", depend=""):
17757 return FakePkg(
17758 - 'dev-util/diffball-2.7.1', eapi=eapi,
17759 - data={'RDEPEND': rdepend, 'DEPEND': depend},
17760 - repo=self.repo)
17761 + "dev-util/diffball-2.7.1",
17762 + eapi=eapi,
17763 + data={"RDEPEND": rdepend, "DEPEND": depend},
17764 + repo=self.repo,
17765 + )
17766
17767 def test_flagged_deps(self):
17768 - for dep_str in ('dev-libs/foo', 'dev-libs/foo[bar]'):
17769 + for dep_str in ("dev-libs/foo", "dev-libs/foo[bar]"):
17770 for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
17771 if eapi_obj.options.sub_slotting:
17772 r = self.assertReport(
17773 - self.mk_check(), self.mk_pkg(
17774 - eapi=eapi_str, rdepend=dep_str, depend=dep_str))
17775 + self.mk_check(), self.mk_pkg(eapi=eapi_str, rdepend=dep_str, depend=dep_str)
17776 + )
17777 assert isinstance(r, metadata.MissingSlotDep)
17778 - assert 'matches more than one slot: [ 0, 1 ]' in str(r)
17779 + assert "matches more than one slot: [ 0, 1 ]" in str(r)
17780
17781 def test_skipped_deps(self):
17782 for dep_str in (
17783 - '!dev-libs/foo', '!!dev-libs/foo', # blockers
17784 - '~dev-libs/foo-0', '~dev-libs/foo-1', # version limited to single slots
17785 - 'dev-libs/foo:0', 'dev-libs/foo:1', # slotted
17786 - 'dev-libs/foo:*', 'dev-libs/foo:=', # slot operators
17787 - ):
17788 + "!dev-libs/foo",
17789 + "!!dev-libs/foo", # blockers
17790 + "~dev-libs/foo-0",
17791 + "~dev-libs/foo-1", # version limited to single slots
17792 + "dev-libs/foo:0",
17793 + "dev-libs/foo:1", # slotted
17794 + "dev-libs/foo:*",
17795 + "dev-libs/foo:=", # slot operators
17796 + ):
17797 for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
17798 if eapi_obj.options.sub_slotting:
17799 self.assertNoReport(
17800 - self.mk_check(), self.mk_pkg(
17801 - eapi=eapi_str, rdepend=dep_str, depend=dep_str))
17802 + self.mk_check(), self.mk_pkg(eapi=eapi_str, rdepend=dep_str, depend=dep_str)
17803 + )
17804
17805 def test_no_deps(self):
17806 self.assertNoReport(self.mk_check(), self.mk_pkg())
17807
17808 def test_single_slot_dep(self):
17809 self.assertNoReport(
17810 - self.mk_check(), self.mk_pkg(rdepend='dev-libs/bar', depend='dev-libs/bar'))
17811 + self.mk_check(), self.mk_pkg(rdepend="dev-libs/bar", depend="dev-libs/bar")
17812 + )
17813
17814
17815 class TestDependencyCheck(use_based(), misc.ReportTestCase):
17816
17817 check_kls = metadata.DependencyCheck
17818
17819 - def mk_pkg(self, attr, depset='', eapi='0', iuse=''):
17820 - eapi_attr_map = {'BDEPEND': '7', 'IDEPEND': '8'}
17821 + def mk_pkg(self, attr, depset="", eapi="0", iuse=""):
17822 + eapi_attr_map = {"BDEPEND": "7", "IDEPEND": "8"}
17823 eapi = eapi_attr_map.get(attr, eapi)
17824 return misc.FakePkg(
17825 - 'dev-util/diffball-2.7.1',
17826 - data={'EAPI': eapi, 'IUSE': iuse, attr: depset})
17827 + "dev-util/diffball-2.7.1", data={"EAPI": eapi, "IUSE": iuse, attr: depset}
17828 + )
17829
17830 def mk_check(self, pkgs=None, **kwargs):
17831 if pkgs is None:
17832 pkgs = (
17833 - FakePkg('dev-libs/foo-0', slot='0', iuse=('bar',)),
17834 - FakePkg('dev-libs/foo-1', slot='1', iuse=('bar', 'baz')),
17835 - FakePkg('dev-libs/bar-2', slot='2'),
17836 + FakePkg("dev-libs/foo-0", slot="0", iuse=("bar",)),
17837 + FakePkg("dev-libs/foo-1", slot="1", iuse=("bar", "baz")),
17838 + FakePkg("dev-libs/bar-2", slot="2"),
17839 )
17840 - kwargs['search_repo'] = FakeRepo(pkgs=pkgs, repo_id='test')
17841 + kwargs["search_repo"] = FakeRepo(pkgs=pkgs, repo_id="test")
17842 return super().mk_check(options=kwargs)
17843
17844 # pull the set of dependency attrs from the most recent EAPI
17845 dep_attrs = sorted(list(eapi.EAPI.known_eapis.values())[-1].dep_keys)
17846
17847 - @pytest.mark.parametrize('attr', dep_attrs)
17848 + @pytest.mark.parametrize("attr", dep_attrs)
17849 def test_depset(self, attr):
17850 chk = self.mk_check()
17851 mk_pkg = partial(self.mk_pkg, attr)
17852
17853 # various regular depsets
17854 self.assertNoReport(chk, mk_pkg())
17855 - self.assertNoReport(chk, mk_pkg('dev-util/foo'))
17856 + self.assertNoReport(chk, mk_pkg("dev-util/foo"))
17857 self.assertNoReport(chk, mk_pkg("|| ( dev-util/foo ) dev-foo/bugger "))
17858 - if attr == 'RDEPEND':
17859 + if attr == "RDEPEND":
17860 self.assertNoReport(chk, mk_pkg("!dev-util/blah"))
17861 else:
17862 r = self.assertReport(chk, mk_pkg("!dev-util/blah"))
17863 @@ -870,7 +914,7 @@ class TestDependencyCheck(use_based(), misc.ReportTestCase):
17864
17865 # invalid depset syntax
17866 r = self.assertReport(chk, mk_pkg("|| ("))
17867 - assert isinstance(r, getattr(metadata, f'Invalid{attr.lower().capitalize()}'))
17868 + assert isinstance(r, getattr(metadata, f"Invalid{attr.lower().capitalize()}"))
17869
17870 # pkg blocking itself
17871 r = self.assertReport(chk, mk_pkg("!dev-util/diffball"))
17872 @@ -879,105 +923,113 @@ class TestDependencyCheck(use_based(), misc.ReportTestCase):
17873 assert f'{attr.upper()}="!dev-util/diffball"' in str(r)
17874
17875 # check for := in || () blocks
17876 - pkg = mk_pkg(eapi='5', depset="|| ( dev-libs/foo:= dev-libs/bar )")
17877 + pkg = mk_pkg(eapi="5", depset="|| ( dev-libs/foo:= dev-libs/bar )")
17878 r = self.assertReport(chk, pkg)
17879 assert isinstance(r, metadata.BadDependency)
17880 assert "= slot operator used inside || block" in str(r)
17881 assert f'{attr.upper()}="dev-libs/foo:="' in str(r)
17882
17883 # multiple := atoms in || () blocks
17884 - pkg = mk_pkg(eapi='5', depset="|| ( dev-libs/foo:= dev-libs/bar:= )")
17885 + pkg = mk_pkg(eapi="5", depset="|| ( dev-libs/foo:= dev-libs/bar:= )")
17886 reports = self.assertReports(chk, pkg)
17887 for r in reports:
17888 assert isinstance(r, metadata.BadDependency)
17889 assert "= slot operator used inside || block" in str(r)
17890
17891 # check for := in blockers
17892 - r = self.assertReport(chk, mk_pkg(eapi='5', depset="!dev-libs/foo:="))
17893 + r = self.assertReport(chk, mk_pkg(eapi="5", depset="!dev-libs/foo:="))
17894 assert isinstance(r, metadata.BadDependency)
17895 assert "= slot operator used in blocker" in str(r)
17896 assert f'{attr.upper()}="!dev-libs/foo:="' in str(r)
17897
17898 # check for missing package revisions
17899 self.assertNoReport(chk, mk_pkg("=dev-libs/foo-1-r0"))
17900 - r = self.assertReport(chk, mk_pkg(eapi='6', depset="=dev-libs/foo-1"))
17901 + r = self.assertReport(chk, mk_pkg(eapi="6", depset="=dev-libs/foo-1"))
17902 assert isinstance(r, metadata.MissingPackageRevision)
17903 assert f'{attr.upper()}="=dev-libs/foo-1"' in str(r)
17904
17905 - @pytest.mark.parametrize('attr', dep_attrs)
17906 + @pytest.mark.parametrize("attr", dep_attrs)
17907 def test_depset_unstated_iuse(self, attr):
17908 chk = self.mk_check()
17909 mk_pkg = partial(self.mk_pkg, attr)
17910
17911 # unstated IUSE
17912 - r = self.assertReport(chk, mk_pkg(depset='foo? ( dev-libs/foo )'))
17913 + r = self.assertReport(chk, mk_pkg(depset="foo? ( dev-libs/foo )"))
17914 assert isinstance(r, addons.UnstatedIuse)
17915 - assert 'unstated flag: [ foo ]' in str(r)
17916 + assert "unstated flag: [ foo ]" in str(r)
17917 # known IUSE
17918 - self.assertNoReport(chk, mk_pkg(depset='foo? ( dev-libs/foo )', iuse='foo'))
17919 + self.assertNoReport(chk, mk_pkg(depset="foo? ( dev-libs/foo )", iuse="foo"))
17920 # multiple unstated IUSE
17921 - r = self.assertReport(chk, mk_pkg(depset='foo? ( !bar? ( dev-libs/foo ) )'))
17922 + r = self.assertReport(chk, mk_pkg(depset="foo? ( !bar? ( dev-libs/foo ) )"))
17923 assert isinstance(r, addons.UnstatedIuse)
17924 - assert 'unstated flags: [ bar, foo ]' in str(r)
17925 + assert "unstated flags: [ bar, foo ]" in str(r)
17926
17927 - @pytest.mark.parametrize('attr', dep_attrs)
17928 + @pytest.mark.parametrize("attr", dep_attrs)
17929 def test_depset_missing_usedep_default(self, attr):
17930 chk = self.mk_check()
17931 mk_pkg = partial(self.mk_pkg, attr)
17932
17933 # USE flag exists on all matching pkgs
17934 - self.assertNoReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[bar?]'))
17935 + self.assertNoReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[bar?]"))
17936
17937 use_deps = (
17938 - 'foo(-)?', '!foo(-)?', 'foo(+)?', '!foo(+)?', 'foo(-)=', '!foo(-)=',
17939 - 'foo(+)=', '!foo(+)=', '-foo(-)', '-foo(+)',
17940 + "foo(-)?",
17941 + "!foo(-)?",
17942 + "foo(+)?",
17943 + "!foo(+)?",
17944 + "foo(-)=",
17945 + "!foo(-)=",
17946 + "foo(+)=",
17947 + "!foo(+)=",
17948 + "-foo(-)",
17949 + "-foo(+)",
17950 )
17951 for use_dep in use_deps:
17952 # USE flag doesn't exist but has proper default
17953 - self.assertNoReport(chk, mk_pkg(eapi='4', depset=f'dev-libs/bar[{use_dep}]'))
17954 - if attr == 'RDEPEND':
17955 - self.assertNoReport(chk, mk_pkg(eapi='4', depset=f'!dev-libs/bar[{use_dep}]'))
17956 + self.assertNoReport(chk, mk_pkg(eapi="4", depset=f"dev-libs/bar[{use_dep}]"))
17957 + if attr == "RDEPEND":
17958 + self.assertNoReport(chk, mk_pkg(eapi="4", depset=f"!dev-libs/bar[{use_dep}]"))
17959 else:
17960 - r = self.assertReport(chk, mk_pkg(eapi='4', depset=f'!dev-libs/bar[{use_dep}]'))
17961 + r = self.assertReport(chk, mk_pkg(eapi="4", depset=f"!dev-libs/bar[{use_dep}]"))
17962 assert isinstance(r, metadata.MisplacedWeakBlocker)
17963
17964 # result triggers when all matching pkgs don't have requested USE flag
17965 for dep in (
17966 - 'dev-libs/bar[foo?]',
17967 - 'dev-libs/bar[!foo?]',
17968 - 'dev-libs/bar[foo=]',
17969 - 'dev-libs/bar[!foo=]',
17970 - 'dev-libs/bar[-foo]',
17971 - '|| ( dev-libs/foo[bar] dev-libs/bar[foo] )',
17972 - '|| ( dev-libs/foo[bar] dev-libs/bar[-foo] )',
17973 - ):
17974 - r = self.assertReport(chk, mk_pkg(eapi='4', depset=dep))
17975 + "dev-libs/bar[foo?]",
17976 + "dev-libs/bar[!foo?]",
17977 + "dev-libs/bar[foo=]",
17978 + "dev-libs/bar[!foo=]",
17979 + "dev-libs/bar[-foo]",
17980 + "|| ( dev-libs/foo[bar] dev-libs/bar[foo] )",
17981 + "|| ( dev-libs/foo[bar] dev-libs/bar[-foo] )",
17982 + ):
17983 + r = self.assertReport(chk, mk_pkg(eapi="4", depset=dep))
17984 assert isinstance(r, metadata.MissingUseDepDefault)
17985 - assert r.pkgs == ('dev-libs/bar-2',)
17986 - assert r.flag == 'foo'
17987 + assert r.pkgs == ("dev-libs/bar-2",)
17988 + assert r.flag == "foo"
17989 assert "USE flag 'foo' missing" in str(r)
17990
17991 - if attr == 'RDEPEND':
17992 - r = self.assertReport(chk, mk_pkg(eapi='4', depset='!dev-libs/bar[foo?]'))
17993 + if attr == "RDEPEND":
17994 + r = self.assertReport(chk, mk_pkg(eapi="4", depset="!dev-libs/bar[foo?]"))
17995 assert isinstance(r, metadata.MissingUseDepDefault)
17996 - assert r.pkgs == ('dev-libs/bar-2',)
17997 - assert r.flag == 'foo'
17998 + assert r.pkgs == ("dev-libs/bar-2",)
17999 + assert r.flag == "foo"
18000 assert "USE flag 'foo' missing" in str(r)
18001
18002 # USE flag missing on one of multiple matches
18003 - r = self.assertReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[baz?]'))
18004 + r = self.assertReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[baz?]"))
18005 assert isinstance(r, metadata.MissingUseDepDefault)
18006 - assert r.atom == 'dev-libs/foo[baz?]'
18007 - assert r.pkgs == ('dev-libs/foo-0',)
18008 - assert r.flag == 'baz'
18009 + assert r.atom == "dev-libs/foo[baz?]"
18010 + assert r.pkgs == ("dev-libs/foo-0",)
18011 + assert r.flag == "baz"
18012 assert "USE flag 'baz' missing" in str(r)
18013
18014 # USE flag missing on all matches
18015 - r = self.assertReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[blah?]'))
18016 + r = self.assertReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[blah?]"))
18017 assert isinstance(r, metadata.MissingUseDepDefault)
18018 - assert r.atom == 'dev-libs/foo[blah?]'
18019 - assert r.pkgs == ('dev-libs/foo-0', 'dev-libs/foo-1')
18020 - assert r.flag == 'blah'
18021 + assert r.atom == "dev-libs/foo[blah?]"
18022 + assert r.pkgs == ("dev-libs/foo-0", "dev-libs/foo-1")
18023 + assert r.flag == "blah"
18024 assert "USE flag 'blah' missing" in str(r)
18025
18026
18027 @@ -993,16 +1045,16 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
18028 # initialize parent repo
18029 self.parent_git_repo = make_git_repo()
18030 self.parent_repo = make_repo(self.parent_git_repo.path)
18031 - self.parent_git_repo.add_all('initial commit')
18032 + self.parent_git_repo.add_all("initial commit")
18033 # create a stub pkg and commit it
18034 - self.parent_repo.create_ebuild('cat/pkg-0')
18035 - self.parent_git_repo.add_all('cat/pkg-0')
18036 + self.parent_repo.create_ebuild("cat/pkg-0")
18037 + self.parent_git_repo.add_all("cat/pkg-0")
18038
18039 # initialize child repo
18040 self.child_git_repo = make_git_repo()
18041 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
18042 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
18043 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
18044 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
18045 + self.child_git_repo.run(["git", "pull", "origin", "main"])
18046 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
18047 self.child_repo = make_repo(self.child_git_repo.path)
18048
18049 def init_check(self, options=None, future=0):
18050 @@ -1015,36 +1067,39 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
18051
18052 def _options(self, **kwargs):
18053 args = [
18054 - 'scan', '-q', '--cache-dir', self.cache_dir,
18055 - '--repo', self.child_repo.location,
18056 + "scan",
18057 + "-q",
18058 + "--cache-dir",
18059 + self.cache_dir,
18060 + "--repo",
18061 + self.child_repo.location,
18062 ]
18063 options, _ = self.tool.parse_args(args)
18064 return options
18065
18066 def test_existent_blockers(self):
18067 - self.child_repo.create_ebuild('cat/pkg-1', depend='!~cat/pkg-0')
18068 - self.child_git_repo.add_all('cat/pkg: version bump to 1')
18069 - self.child_repo.create_ebuild('cat/pkg-2', depend='!!~cat/pkg-0')
18070 - self.child_git_repo.add_all('cat/pkg: version bump to 2')
18071 - self.child_repo.create_ebuild('cat/pkg-3', depend='!!=cat/pkg-0*')
18072 - self.child_git_repo.add_all('cat/pkg: version bump to 3')
18073 + self.child_repo.create_ebuild("cat/pkg-1", depend="!~cat/pkg-0")
18074 + self.child_git_repo.add_all("cat/pkg: version bump to 1")
18075 + self.child_repo.create_ebuild("cat/pkg-2", depend="!!~cat/pkg-0")
18076 + self.child_git_repo.add_all("cat/pkg: version bump to 2")
18077 + self.child_repo.create_ebuild("cat/pkg-3", depend="!!=cat/pkg-0*")
18078 + self.child_git_repo.add_all("cat/pkg: version bump to 3")
18079 self.init_check()
18080 self.assertNoReport(self.check, self.source)
18081
18082 def test_nonexistent_blockers(self):
18083 - self.child_repo.create_ebuild('cat/pkg-1', depend='!nonexistent/pkg')
18084 - self.child_git_repo.add_all('cat/pkg: version bump to 1')
18085 + self.child_repo.create_ebuild("cat/pkg-1", depend="!nonexistent/pkg")
18086 + self.child_git_repo.add_all("cat/pkg: version bump to 1")
18087 self.init_check()
18088 r = self.assertReport(self.check, self.source)
18089 - expected = metadata.NonexistentBlocker(
18090 - 'DEPEND', '!nonexistent/pkg', pkg=CPV('cat/pkg-1'))
18091 + expected = metadata.NonexistentBlocker("DEPEND", "!nonexistent/pkg", pkg=CPV("cat/pkg-1"))
18092 assert r == expected
18093
18094 def test_outdated_blockers(self):
18095 - self.parent_git_repo.remove_all('cat/pkg')
18096 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
18097 - self.child_repo.create_ebuild('cat/pkg-1', depend='!!=cat/pkg-0*')
18098 - self.child_git_repo.add_all('cat/pkg: version bump to 1')
18099 + self.parent_git_repo.remove_all("cat/pkg")
18100 + self.child_git_repo.run(["git", "pull", "origin", "main"])
18101 + self.child_repo.create_ebuild("cat/pkg-1", depend="!!=cat/pkg-0*")
18102 + self.child_git_repo.add_all("cat/pkg: version bump to 1")
18103
18104 # packages are not old enough to trigger any results
18105 for days in (0, 100, 365, 729):
18106 @@ -1056,7 +1111,8 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
18107 self.init_check(future=days)
18108 r = self.assertReport(self.check, self.source)
18109 expected = metadata.OutdatedBlocker(
18110 - 'DEPEND', '!!=cat/pkg-0*', years, pkg=CPV('cat/pkg-1'))
18111 + "DEPEND", "!!=cat/pkg-0*", years, pkg=CPV("cat/pkg-1")
18112 + )
18113 assert r == expected
18114
18115
18116 @@ -1064,16 +1120,17 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18117
18118 check_kls = metadata.SrcUriCheck
18119
18120 - def mk_pkg(self, src_uri='', restrict='', default_chksums={"size": 100},
18121 - iuse='', disable_chksums=False):
18122 + def mk_pkg(
18123 + self, src_uri="", restrict="", default_chksums={"size": 100}, iuse="", disable_chksums=False
18124 + ):
18125 class fake_repo:
18126 def __init__(self, default_chksums):
18127 if disable_chksums:
18128 self.chksums = {}
18129 else:
18130 self.chksums = {}.fromkeys(
18131 - {os.path.basename(x) for x in src_uri.split()},
18132 - default_chksums)
18133 + {os.path.basename(x) for x in src_uri.split()}, default_chksums
18134 + )
18135
18136 def _get_digests(self, pkg, allow_missing=False):
18137 return False, self.chksums
18138 @@ -1082,47 +1139,52 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18139 _parent_repo = fake_repo(default_chksums)
18140
18141 return misc.FakePkg(
18142 - 'dev-util/diffball-2.7.1',
18143 - data={'SRC_URI': src_uri, 'IUSE': iuse, 'RESTRICT': restrict},
18144 - parent=fake_parent())
18145 + "dev-util/diffball-2.7.1",
18146 + data={"SRC_URI": src_uri, "IUSE": iuse, "RESTRICT": restrict},
18147 + parent=fake_parent(),
18148 + )
18149
18150 def test_malformed(self):
18151 - r = self.assertReport(
18152 - self.mk_check(), self.mk_pkg("foon", disable_chksums=True))
18153 + r = self.assertReport(self.mk_check(), self.mk_pkg("foon", disable_chksums=True))
18154 assert isinstance(r, metadata.InvalidSrcUri)
18155 - assert r.attr == 'fetchables'
18156 + assert r.attr == "fetchables"
18157
18158 def test_regular_src_uri(self):
18159 chk = self.mk_check()
18160 # single file
18161 - self.assertNoReport(chk, self.mk_pkg(src_uri='https://foon.com/foon-2.7.1.tar.gz'))
18162 + self.assertNoReport(chk, self.mk_pkg(src_uri="https://foon.com/foon-2.7.1.tar.gz"))
18163 # single file, multiple uris
18164 - self.assertNoReport(chk, self.mk_pkg(
18165 - src_uri='https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz'))
18166 + self.assertNoReport(
18167 + chk, self.mk_pkg(src_uri="https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz")
18168 + )
18169 # multiple files, multiple uris
18170 - self.assertNoReport(chk, self.mk_pkg(
18171 - src_uri="""
18172 + self.assertNoReport(
18173 + chk,
18174 + self.mk_pkg(
18175 + src_uri="""
18176 https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz
18177 https://blah.org/b-1.zip https://boo.net/boo-10.tar.xz
18178 - """))
18179 + """
18180 + ),
18181 + )
18182
18183 def test_unknown_mirror(self):
18184 chk = self.mk_check()
18185
18186 # single mirror
18187 - r = self.assertReport(chk, self.mk_pkg('mirror://foo/a-0.gz https://foo.com/a-0.gz'))
18188 + r = self.assertReport(chk, self.mk_pkg("mirror://foo/a-0.gz https://foo.com/a-0.gz"))
18189 assert isinstance(r, metadata.UnknownMirror)
18190 - assert r.mirror == 'foo'
18191 - assert r.uri == 'mirror://foo/a-0.gz'
18192 + assert r.mirror == "foo"
18193 + assert r.uri == "mirror://foo/a-0.gz"
18194 assert "unknown mirror 'foo'" in str(r)
18195
18196 # multiple mirrors
18197 - pkg = self.mk_pkg('mirror://foo/a-0.gz mirror://bar/a-0.gz https://foo.com/a-0.gz')
18198 + pkg = self.mk_pkg("mirror://foo/a-0.gz mirror://bar/a-0.gz https://foo.com/a-0.gz")
18199 reports = self.assertReports(chk, pkg)
18200 - for mirror, r in zip(('bar', 'foo'), sorted(reports, key=attrgetter('mirror'))):
18201 + for mirror, r in zip(("bar", "foo"), sorted(reports, key=attrgetter("mirror"))):
18202 assert isinstance(r, metadata.UnknownMirror)
18203 assert r.mirror == mirror
18204 - assert r.uri == f'mirror://{mirror}/a-0.gz'
18205 + assert r.uri == f"mirror://{mirror}/a-0.gz"
18206 assert f"unknown mirror '{mirror}'" in str(r)
18207
18208 def test_bad_filename(self):
18209 @@ -1131,77 +1193,80 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18210 # PN filename
18211 r = self.assertReport(chk, self.mk_pkg("https://foon.com/diffball.tar.gz"))
18212 assert isinstance(r, metadata.BadFilename)
18213 - assert r.filenames == ('diffball.tar.gz',)
18214 - assert 'bad filename: [ diffball.tar.gz ]' in str(r)
18215 + assert r.filenames == ("diffball.tar.gz",)
18216 + assert "bad filename: [ diffball.tar.gz ]" in str(r)
18217
18218 # PV filename
18219 r = self.assertReport(chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz"))
18220 assert isinstance(r, metadata.BadFilename)
18221 - assert r.filenames == ('2.7.1.tar.gz',)
18222 - assert 'bad filename: [ 2.7.1.tar.gz ]' in str(r)
18223 + assert r.filenames == ("2.7.1.tar.gz",)
18224 + assert "bad filename: [ 2.7.1.tar.gz ]" in str(r)
18225
18226 # github-style PV filename
18227 r = self.assertReport(chk, self.mk_pkg("https://foon.com/v2.7.1.zip"))
18228 assert isinstance(r, metadata.BadFilename)
18229 - assert r.filenames == ('v2.7.1.zip',)
18230 - assert 'bad filename: [ v2.7.1.zip ]' in str(r)
18231 + assert r.filenames == ("v2.7.1.zip",)
18232 + assert "bad filename: [ v2.7.1.zip ]" in str(r)
18233
18234 # github-style commit snapshot filename
18235 - r = self.assertReport(chk, self.mk_pkg("https://foon.com/cb230f01fb288a0b9f0fc437545b97d06c846bd3.tar.gz"))
18236 + r = self.assertReport(
18237 + chk, self.mk_pkg("https://foon.com/cb230f01fb288a0b9f0fc437545b97d06c846bd3.tar.gz")
18238 + )
18239 assert isinstance(r, metadata.BadFilename)
18240
18241 # multiple bad filenames
18242 - r = self.assertReport(chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz https://foon.com/diffball.zip"))
18243 + r = self.assertReport(
18244 + chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz https://foon.com/diffball.zip")
18245 + )
18246 assert isinstance(r, metadata.BadFilename)
18247 - assert r.filenames == ('2.7.1.tar.gz', 'diffball.zip')
18248 - assert 'bad filenames: [ 2.7.1.tar.gz, diffball.zip ]' in str(r)
18249 + assert r.filenames == ("2.7.1.tar.gz", "diffball.zip")
18250 + assert "bad filenames: [ 2.7.1.tar.gz, diffball.zip ]" in str(r)
18251
18252 def test_missing_uri(self):
18253 chk = self.mk_check()
18254
18255 # mangled protocol
18256 - r = self.assertReport(chk, self.mk_pkg('http:/foo/foo-0.tar.gz'))
18257 + r = self.assertReport(chk, self.mk_pkg("http:/foo/foo-0.tar.gz"))
18258 assert isinstance(r, metadata.MissingUri)
18259 - assert r.filenames == ('http:/foo/foo-0.tar.gz',)
18260 + assert r.filenames == ("http:/foo/foo-0.tar.gz",)
18261 assert "unfetchable file: 'http:/foo/foo-0.tar.gz'" in str(r)
18262
18263 # no URI and RESTRICT doesn't contain 'fetch'
18264 - r = self.assertReport(chk, self.mk_pkg('foon'))
18265 + r = self.assertReport(chk, self.mk_pkg("foon"))
18266 assert isinstance(r, metadata.MissingUri)
18267 - assert r.filenames == ('foon',)
18268 + assert r.filenames == ("foon",)
18269 assert "unfetchable file: 'foon'" in str(r)
18270
18271 # no URI and RESTRICT contains 'fetch'
18272 - self.assertNoReport(chk, self.mk_pkg('foon', restrict='fetch'))
18273 + self.assertNoReport(chk, self.mk_pkg("foon", restrict="fetch"))
18274
18275 # conditional URI and conditional RESTRICT containing 'fetch'
18276 - pkg = self.mk_pkg(src_uri='foo? ( bar )', iuse='foo', restrict='foo? ( fetch )')
18277 + pkg = self.mk_pkg(src_uri="foo? ( bar )", iuse="foo", restrict="foo? ( fetch )")
18278 self.assertNoReport(chk, pkg)
18279 # negated
18280 - pkg = self.mk_pkg(src_uri='!foo? ( bar )', iuse='foo', restrict='!foo? ( fetch )')
18281 + pkg = self.mk_pkg(src_uri="!foo? ( bar )", iuse="foo", restrict="!foo? ( fetch )")
18282 self.assertNoReport(chk, pkg)
18283 # multi-level conditional
18284 pkg = self.mk_pkg(
18285 - iuse='foo bar',
18286 - src_uri='foo? ( bar? ( blah ) )',
18287 - restrict='foo? ( bar? ( fetch ) )')
18288 + iuse="foo bar", src_uri="foo? ( bar? ( blah ) )", restrict="foo? ( bar? ( fetch ) )"
18289 + )
18290 self.assertNoReport(chk, pkg)
18291
18292 def test_unstated_iuse(self):
18293 chk = self.mk_check()
18294
18295 # no IUSE
18296 - self.assertNoReport(chk, self.mk_pkg('https://foo.com/foo-0.tar.gz'))
18297 + self.assertNoReport(chk, self.mk_pkg("https://foo.com/foo-0.tar.gz"))
18298
18299 # conditional URI with related IUSE
18300 - pkg = self.mk_pkg(src_uri='foo? ( https://foo.com/foo-0.tar.gz )', iuse='foo')
18301 + pkg = self.mk_pkg(src_uri="foo? ( https://foo.com/foo-0.tar.gz )", iuse="foo")
18302 self.assertNoReport(chk, pkg)
18303
18304 # conditional URI with missing IUSE
18305 - pkg = self.mk_pkg(src_uri='foo? ( https://foo.com/foo-0.tar.gz )')
18306 + pkg = self.mk_pkg(src_uri="foo? ( https://foo.com/foo-0.tar.gz )")
18307 r = self.assertReport(chk, pkg)
18308 assert isinstance(r, addons.UnstatedIuse)
18309 - assert 'unstated flag: [ foo ]' in str(r)
18310 + assert "unstated flag: [ foo ]" in str(r)
18311
18312 def test_bad_proto(self):
18313 chk = self.mk_check()
18314 @@ -1211,22 +1276,23 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18315
18316 for proto in self.check_kls.valid_protos:
18317 self.assertNoReport(
18318 - chk, self.mk_pkg(f"{proto}://dar.com/foon"),
18319 - msg=f"testing valid proto {proto}")
18320 + chk, self.mk_pkg(f"{proto}://dar.com/foon"), msg=f"testing valid proto {proto}"
18321 + )
18322
18323 - bad_proto = f'{proto}x'
18324 + bad_proto = f"{proto}x"
18325
18326 r = self.assertReport(chk, self.mk_pkg(f"{bad_proto}://foon.com/foon"))
18327 assert isinstance(r, metadata.BadProtocol)
18328 assert bad_proto in str(r)
18329 - assert f'{bad_proto}://foon.com/foon' in str(r)
18330 + assert f"{bad_proto}://foon.com/foon" in str(r)
18331
18332 # check collapsing
18333 pkg = self.mk_pkg(f"{bad_proto}://foon.com/foon {bad_proto}://dar.com/foon")
18334 r = self.assertReport(chk, pkg)
18335 assert isinstance(r, metadata.BadProtocol)
18336 assert list(r.uris) == sorted(
18337 - f'{bad_proto}://{x}/foon' for x in ('foon.com', 'dar.com'))
18338 + f"{bad_proto}://{x}/foon" for x in ("foon.com", "dar.com")
18339 + )
18340 assert bad_proto in str(r)
18341
18342 def test_tarball_available_github(self):
18343 @@ -1235,7 +1301,7 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18344 r = self.assertReport(chk, self.mk_pkg(uri))
18345 assert isinstance(r, metadata.TarballAvailable)
18346 assert r.uris == (uri,)
18347 - assert '[ https://github.com/foo/bar/archive/v1.2.3.zip ]' in str(r)
18348 + assert "[ https://github.com/foo/bar/archive/v1.2.3.zip ]" in str(r)
18349
18350 def test_tarball_available_gitlab(self):
18351 chk = self.mk_check()
18352 @@ -1243,36 +1309,34 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
18353 r = self.assertReport(chk, self.mk_pkg(uri))
18354 assert isinstance(r, metadata.TarballAvailable)
18355 assert r.uris == (uri,)
18356 - assert 'zip archive used when tarball available' in str(r)
18357 + assert "zip archive used when tarball available" in str(r)
18358
18359
18360 class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
18361
18362 check_kls = metadata.MissingUnpackerDepCheck
18363
18364 - def mk_pkg(self, exts, eapi='7', **data):
18365 + def mk_pkg(self, exts, eapi="7", **data):
18366 if isinstance(exts, str):
18367 exts = [exts]
18368
18369 class fake_repo:
18370 def _get_digests(self, pkg, allow_missing=False):
18371 - chksums = {f'diffball-2.7.1{ext}': {'size': 100} for ext in exts}
18372 + chksums = {f"diffball-2.7.1{ext}": {"size": 100} for ext in exts}
18373 return False, chksums
18374
18375 - data['SRC_URI'] = ' '.join(
18376 - f'https://foo.com/diffball-2.7.1{ext}' for ext in exts)
18377 - return FakePkg(
18378 - 'dev-util/diffball-2.7.1', data=data, eapi=eapi, repo=fake_repo())
18379 + data["SRC_URI"] = " ".join(f"https://foo.com/diffball-2.7.1{ext}" for ext in exts)
18380 + return FakePkg("dev-util/diffball-2.7.1", data=data, eapi=eapi, repo=fake_repo())
18381
18382 def test_with_system_dep(self):
18383 - self.assertNoReport(self.mk_check(), self.mk_pkg('.tar.gz'))
18384 + self.assertNoReport(self.mk_check(), self.mk_pkg(".tar.gz"))
18385
18386 def test_keyword_output(self):
18387 # unpacker deps go in BDEPEND in EAPI >= 7
18388 - r = self.assertReport(self.mk_check(), self.mk_pkg('.zip', eapi='7'))
18389 + r = self.assertReport(self.mk_check(), self.mk_pkg(".zip", eapi="7"))
18390 assert 'missing BDEPEND="app-arch/unzip"' in str(r)
18391 # and in DEPEND for EAPI < 7
18392 - r = self.assertReport(self.mk_check(), self.mk_pkg('.zip', eapi='6'))
18393 + r = self.assertReport(self.mk_check(), self.mk_pkg(".zip", eapi="6"))
18394 assert 'missing DEPEND="app-arch/unzip"' in str(r)
18395
18396 def test_without_dep(self):
18397 @@ -1280,23 +1344,22 @@ class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
18398 pkg = self.mk_pkg(ext)
18399 r = self.assertReport(self.mk_check(), pkg)
18400 assert isinstance(r, metadata.MissingUnpackerDep)
18401 - assert r.filenames == (f'diffball-2.7.1{ext}',)
18402 - assert r.unpackers == tuple(
18403 - sorted(map(str, self.check_kls.non_system_unpackers[ext])))
18404 + assert r.filenames == (f"diffball-2.7.1{ext}",)
18405 + assert r.unpackers == tuple(sorted(map(str, self.check_kls.non_system_unpackers[ext])))
18406
18407 def test_with_dep(self):
18408 for ext, unpackers in self.check_kls.non_system_unpackers.items():
18409 - for dep_type in ('DEPEND', 'BDEPEND'):
18410 + for dep_type in ("DEPEND", "BDEPEND"):
18411 for unpacker in unpackers:
18412 - for dep in (unpacker, f'>={unpacker}-1'):
18413 + for dep in (unpacker, f">={unpacker}-1"):
18414 kwargs = {dep_type: dep}
18415 pkg = self.mk_pkg(ext, **kwargs)
18416 self.assertNoReport(self.mk_check(), pkg)
18417
18418 def test_rar_with_or_dep(self):
18419 self.assertNoReport(
18420 - self.mk_check(),
18421 - self.mk_pkg('.rar', DEPEND='|| ( app-arch/rar app-arch/unrar )'))
18422 + self.mk_check(), self.mk_pkg(".rar", DEPEND="|| ( app-arch/rar app-arch/unrar )")
18423 + )
18424
18425 def test_without_multiple_unpackers(self):
18426 for combination in combinations(self.check_kls.non_system_unpackers.items(), 2):
18427 @@ -1310,19 +1373,19 @@ class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
18428 assert len(set(unpackers)) == 1
18429 r = reports[0]
18430 assert isinstance(r, metadata.MissingUnpackerDep)
18431 - assert r.filenames == tuple(sorted(f'diffball-2.7.1{ext}' for ext in exts))
18432 + assert r.filenames == tuple(sorted(f"diffball-2.7.1{ext}" for ext in exts))
18433 assert r.unpackers == tuple(sorted(map(str, unpackers[0])))
18434 else:
18435 assert len(reports) == 2
18436 for i, r in enumerate(reports):
18437 assert isinstance(r, metadata.MissingUnpackerDep)
18438 - assert r.filenames == (f'diffball-2.7.1{exts[i]}',)
18439 + assert r.filenames == (f"diffball-2.7.1{exts[i]}",)
18440 assert r.unpackers == tuple(sorted(map(str, unpackers[i])))
18441
18442 def test_with_multiple_unpackers_one_missing(self):
18443 r = self.assertReport(
18444 - self.mk_check(),
18445 - self.mk_pkg(['.zip', '.7z'], DEPEND='app-arch/unzip'))
18446 + self.mk_check(), self.mk_pkg([".zip", ".7z"], DEPEND="app-arch/unzip")
18447 + )
18448 assert isinstance(r, metadata.MissingUnpackerDep)
18449 - assert r.filenames == (f'diffball-2.7.1.7z',)
18450 - assert r.unpackers == ('app-arch/p7zip',)
18451 + assert r.filenames == (f"diffball-2.7.1.7z",)
18452 + assert r.unpackers == ("app-arch/p7zip",)
18453
18454 diff --git a/tests/checks/test_network.py b/tests/checks/test_network.py
18455 index bb3a7ef5..fb684954 100644
18456 --- a/tests/checks/test_network.py
18457 +++ b/tests/checks/test_network.py
18458 @@ -10,34 +10,38 @@ from unittest.mock import patch
18459 import pytest
18460 from pkgcheck import objects, reporters, scan
18461 from pkgcheck.checks import NetworkCheck
18462 -from pkgcheck.checks.network import (DeadUrl, FetchablesUrlCheck,
18463 - HomepageUrlCheck)
18464 +from pkgcheck.checks.network import DeadUrl, FetchablesUrlCheck, HomepageUrlCheck
18465 from pkgcheck.packages import RawCPV
18466 from snakeoil.formatters import PlainTextFormatter
18467
18468 # skip module tests if requests isn't available
18469 -requests = pytest.importorskip('requests')
18470 +requests = pytest.importorskip("requests")
18471
18472
18473 class TestNetworkChecks:
18474
18475 - repos_data = pytest.REPO_ROOT / 'testdata/data/repos'
18476 - repos_dir = pytest.REPO_ROOT / 'testdata/repos'
18477 + repos_data = pytest.REPO_ROOT / "testdata/data/repos"
18478 + repos_dir = pytest.REPO_ROOT / "testdata/repos"
18479
18480 @pytest.fixture(autouse=True)
18481 def _setup(self, testconfig, tmp_path):
18482 - base_args = ['--config', testconfig]
18483 + base_args = ["--config", testconfig]
18484 self.scan = partial(scan, base_args=base_args)
18485 self.scan_args = [
18486 - '--config', 'no', '--cache-dir', str(tmp_path), '--net',
18487 - '-r', str(self.repos_dir / 'network'),
18488 + "--config",
18489 + "no",
18490 + "--cache-dir",
18491 + str(tmp_path),
18492 + "--net",
18493 + "-r",
18494 + str(self.repos_dir / "network"),
18495 ]
18496
18497 _net_results = [
18498 (cls, result)
18499 for _name, cls in sorted(objects.CHECKS.items())
18500 if issubclass(cls, NetworkCheck)
18501 - for result in sorted(cls.known_results, key=attrgetter('__name__'))
18502 + for result in sorted(cls.known_results, key=attrgetter("__name__"))
18503 ]
18504
18505 def _render_results(self, results, **kwargs):
18506 @@ -50,34 +54,34 @@ class TestNetworkChecks:
18507 output = f.read().decode()
18508 return output
18509
18510 - @pytest.mark.parametrize('check, result', _net_results)
18511 + @pytest.mark.parametrize("check, result", _net_results)
18512 def test_scan(self, check, result):
18513 check_name = check.__name__
18514 keyword = result.__name__
18515
18516 - result_dir = self.repos_dir / 'network' / check_name
18517 - paths = tuple(result_dir.glob(keyword + '*'))
18518 + result_dir = self.repos_dir / "network" / check_name
18519 + paths = tuple(result_dir.glob(keyword + "*"))
18520 if not paths:
18521 - pytest.skip('data unavailable')
18522 + pytest.skip("data unavailable")
18523
18524 for path in paths:
18525 ebuild_name = os.path.basename(path)
18526 - data_dir = self.repos_data / 'network' / check_name / ebuild_name
18527 + data_dir = self.repos_data / "network" / check_name / ebuild_name
18528
18529 # load response data to fake
18530 - module_path = path / 'responses.py'
18531 - spec = importlib.util.spec_from_file_location('responses_mod', module_path)
18532 + module_path = path / "responses.py"
18533 + spec = importlib.util.spec_from_file_location("responses_mod", module_path)
18534 responses_mod = importlib.util.module_from_spec(spec)
18535 spec.loader.exec_module(responses_mod)
18536
18537 results = []
18538 - args = ['-c', check_name, '-k', keyword, f'{check_name}/{ebuild_name}']
18539 - with patch('pkgcheck.addons.net.requests.Session.send') as send:
18540 + args = ["-c", check_name, "-k", keyword, f"{check_name}/{ebuild_name}"]
18541 + with patch("pkgcheck.addons.net.requests.Session.send") as send:
18542 send.side_effect = responses_mod.responses
18543
18544 # load expected results if they exist
18545 try:
18546 - with (data_dir / 'expected.json').open() as f:
18547 + with (data_dir / "expected.json").open() as f:
18548 expected_results = set(reporters.JsonStream.from_iter(f))
18549 except FileNotFoundError:
18550 # check stopped before making request or completed successfully
18551 @@ -85,37 +89,42 @@ class TestNetworkChecks:
18552
18553 results = list(self.scan(self.scan_args + args))
18554 rendered_results = self._render_results(results)
18555 - assert rendered_results, 'failed rendering results'
18556 + assert rendered_results, "failed rendering results"
18557 if set(results) != expected_results:
18558 - error = ['unmatched results:']
18559 + error = ["unmatched results:"]
18560 expected = self._render_results(expected_results)
18561 - error.append(f'expected:\n{expected}')
18562 - error.append(f'got:\n{rendered_results}')
18563 - pytest.fail('\n'.join(error))
18564 -
18565 - @pytest.mark.parametrize('check, result', (
18566 - (HomepageUrlCheck, DeadUrl),
18567 - (FetchablesUrlCheck, DeadUrl),
18568 - ))
18569 + error.append(f"expected:\n{expected}")
18570 + error.append(f"got:\n{rendered_results}")
18571 + pytest.fail("\n".join(error))
18572 +
18573 + @pytest.mark.parametrize(
18574 + "check, result",
18575 + (
18576 + (HomepageUrlCheck, DeadUrl),
18577 + (FetchablesUrlCheck, DeadUrl),
18578 + ),
18579 + )
18580 def test_scan_ftp(self, check, result):
18581 check_name = check.__name__
18582 keyword = result.__name__
18583
18584 - pkg = RawCPV(check_name, f'ftp-{keyword}', '0')
18585 - if check_name == 'HomepageUrlCheck':
18586 - deadurl = DeadUrl('HOMEPAGE', 'ftp://pkgcheck.net/pkgcheck/', 'dead ftp', pkg=pkg)
18587 + pkg = RawCPV(check_name, f"ftp-{keyword}", "0")
18588 + if check_name == "HomepageUrlCheck":
18589 + deadurl = DeadUrl("HOMEPAGE", "ftp://pkgcheck.net/pkgcheck/", "dead ftp", pkg=pkg)
18590 else:
18591 - deadurl = DeadUrl('SRC_URI', 'ftp://pkgcheck.net/pkgcheck/foo.tar.gz', 'dead ftp', pkg=pkg)
18592 + deadurl = DeadUrl(
18593 + "SRC_URI", "ftp://pkgcheck.net/pkgcheck/foo.tar.gz", "dead ftp", pkg=pkg
18594 + )
18595
18596 data = (
18597 - (urllib.error.URLError('dead ftp'), deadurl),
18598 - (socket.timeout('dead ftp'), deadurl),
18599 + (urllib.error.URLError("dead ftp"), deadurl),
18600 + (socket.timeout("dead ftp"), deadurl),
18601 (None, None), # faking a clean connection
18602 )
18603
18604 - args = ['-c', check_name, '-k', keyword, f'{check_name}/ftp-{keyword}']
18605 + args = ["-c", check_name, "-k", keyword, f"{check_name}/ftp-{keyword}"]
18606 for side_effect, expected_result in data:
18607 - with patch('pkgcheck.checks.network.urllib.request.urlopen') as urlopen:
18608 + with patch("pkgcheck.checks.network.urllib.request.urlopen") as urlopen:
18609 if side_effect is not None:
18610 urlopen.side_effect = side_effect
18611 results = list(self.scan(self.scan_args + args))
18612 @@ -123,4 +132,4 @@ class TestNetworkChecks:
18613 assert not results
18614 else:
18615 assert results == [expected_result]
18616 - assert self._render_results(results), 'failed rendering results'
18617 + assert self._render_results(results), "failed rendering results"
18618
18619 diff --git a/tests/checks/test_perl.py b/tests/checks/test_perl.py
18620 index b9c25578..1b26e412 100644
18621 --- a/tests/checks/test_perl.py
18622 +++ b/tests/checks/test_perl.py
18623 @@ -6,7 +6,7 @@ from snakeoil.cli import arghparse
18624
18625 from .. import misc
18626
18627 -REASON = ''
18628 +REASON = ""
18629
18630
18631 def perl_deps_missing():
18632 @@ -28,49 +28,49 @@ class TestPerlCheck(misc.ReportTestCase):
18633 def mk_check(self, verbosity=0):
18634 return self.check_kls(arghparse.Namespace(verbosity=verbosity))
18635
18636 - def mk_pkg(self, PVR, dist_version='', eclasses=('perl-module',), **kwargs):
18637 - lines = ['inherit perl-module\n']
18638 + def mk_pkg(self, PVR, dist_version="", eclasses=("perl-module",), **kwargs):
18639 + lines = ["inherit perl-module\n"]
18640 if dist_version:
18641 - lines.append(f'DIST_VERSION={dist_version}\n')
18642 - kwargs.setdefault('EAPI', '7')
18643 - kwargs.setdefault('_eclasses_', list(eclasses))
18644 - return misc.FakePkg(f'app-foo/bar-{PVR}', lines=lines, data=kwargs)
18645 + lines.append(f"DIST_VERSION={dist_version}\n")
18646 + kwargs.setdefault("EAPI", "7")
18647 + kwargs.setdefault("_eclasses_", list(eclasses))
18648 + return misc.FakePkg(f"app-foo/bar-{PVR}", lines=lines, data=kwargs)
18649
18650 def test_matching(self):
18651 """Ebuilds with matching DIST_VERSION and package version."""
18652 - for PVR in ('1.7.0-r0', '1.7.0', '1.7.0-r100'):
18653 - self.assertNoReport(self.mk_check(), self.mk_pkg(PVR, '1.007'))
18654 + for PVR in ("1.7.0-r0", "1.7.0", "1.7.0-r100"):
18655 + self.assertNoReport(self.mk_check(), self.mk_pkg(PVR, "1.007"))
18656
18657 def test_nonmatching(self):
18658 """Ebuilds without matching DIST_VERSION and package version."""
18659 - for PVR in ('1.7.0-r0', '1.7.0', '1.7.0-r100'):
18660 - r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, '1.07'))
18661 + for PVR in ("1.7.0-r0", "1.7.0", "1.7.0-r100"):
18662 + r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, "1.07"))
18663 assert isinstance(r, perl.MismatchedPerlVersion)
18664 - assert r.dist_version == '1.07'
18665 - assert r.normalized == '1.70.0'
18666 - assert 'DIST_VERSION=1.07 normalizes to 1.70.0' in str(r)
18667 - r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, '1.7'))
18668 + assert r.dist_version == "1.07"
18669 + assert r.normalized == "1.70.0"
18670 + assert "DIST_VERSION=1.07 normalizes to 1.70.0" in str(r)
18671 + r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, "1.7"))
18672 assert isinstance(r, perl.MismatchedPerlVersion)
18673 - assert r.dist_version == '1.7'
18674 - assert r.normalized == '1.700.0'
18675 - assert 'DIST_VERSION=1.7 normalizes to 1.700.0' in str(r)
18676 + assert r.dist_version == "1.7"
18677 + assert r.normalized == "1.700.0"
18678 + assert "DIST_VERSION=1.7 normalizes to 1.700.0" in str(r)
18679
18680 def test_no_dist_version(self):
18681 """Ebuilds without DIST_VERSION defined are skipped."""
18682 - self.assertNoReport(self.mk_check(), self.mk_pkg('1.7.0'))
18683 + self.assertNoReport(self.mk_check(), self.mk_pkg("1.7.0"))
18684
18685 def test_no_perl(self):
18686 """Check initialization fails if perl isn't installed."""
18687 - with patch('subprocess.Popen') as popen:
18688 - popen.side_effect = FileNotFoundError('perl not available')
18689 - with pytest.raises(SkipCheck, match='perl not installed'):
18690 + with patch("subprocess.Popen") as popen:
18691 + popen.side_effect = FileNotFoundError("perl not available")
18692 + with pytest.raises(SkipCheck, match="perl not installed"):
18693 self.mk_check()
18694
18695 def test_no_perl_deps(self):
18696 """Check initialization fails if perl deps aren't installed."""
18697 - with patch('pkgcheck.checks.perl.subprocess.Popen') as popen:
18698 - popen.return_value.stdout.readline.return_value = 'perl error'
18699 + with patch("pkgcheck.checks.perl.subprocess.Popen") as popen:
18700 + popen.return_value.stdout.readline.return_value = "perl error"
18701 popen.return_value.poll.return_value = 2
18702 for verbosity in (0, 1):
18703 - with pytest.raises(SkipCheck, match='failed to run perl script'):
18704 + with pytest.raises(SkipCheck, match="failed to run perl script"):
18705 self.mk_check(verbosity=verbosity)
18706
18707 diff --git a/tests/checks/test_pkgdir.py b/tests/checks/test_pkgdir.py
18708 index 1fc01b01..2a26e79c 100644
18709 --- a/tests/checks/test_pkgdir.py
18710 +++ b/tests/checks/test_pkgdir.py
18711 @@ -21,33 +21,34 @@ class PkgDirCheckBase(misc.ReportTestCase):
18712
18713 @pytest.fixture(autouse=True)
18714 def _create_repo(self, tmpdir):
18715 - self.repo = FakeRepo(repo_id='repo', location=str(tmpdir))
18716 + self.repo = FakeRepo(repo_id="repo", location=str(tmpdir))
18717
18718 def mk_check(self, gentoo=False):
18719 options = arghparse.Namespace(
18720 - target_repo=self.repo, cache={'git': False}, gentoo_repo=gentoo)
18721 + target_repo=self.repo, cache={"git": False}, gentoo_repo=gentoo
18722 + )
18723 kwargs = {}
18724 if addons.git.GitAddon in self.check_kls.required_addons:
18725 - kwargs['git_addon'] = addons.git.GitAddon(options)
18726 + kwargs["git_addon"] = addons.git.GitAddon(options)
18727 return self.check_kls(options, **kwargs)
18728
18729 - def mk_pkg(self, files={}, category=None, package=None, version='0.7.1', revision=''):
18730 + def mk_pkg(self, files={}, category=None, package=None, version="0.7.1", revision=""):
18731 # generate random cat/PN
18732 category = misc.random_str() if category is None else category
18733 package = misc.random_str() if package is None else package
18734
18735 pkg = f"{category}/{package}-{version}{revision}"
18736 - self.filesdir = pjoin(self.repo.location, category, package, 'files')
18737 + self.filesdir = pjoin(self.repo.location, category, package, "files")
18738 # create files dir with random empty subdir
18739 os.makedirs(pjoin(self.filesdir, misc.random_str()), exist_ok=True)
18740
18741 # create dirs that should be ignored
18742 - for d in getattr(self.check_kls, 'ignore_dirs', ()):
18743 + for d in getattr(self.check_kls, "ignore_dirs", ()):
18744 os.makedirs(pjoin(self.filesdir, d), exist_ok=True)
18745
18746 # create specified files in FILESDIR
18747 for fn, contents in files.items():
18748 - with open(pjoin(self.filesdir, fn), 'w') as file:
18749 + with open(pjoin(self.filesdir, fn), "w") as file:
18750 file.write(contents)
18751
18752 return misc.FakeFilesDirPkg(pkg, repo=self.repo)
18753 @@ -64,24 +65,30 @@ class TestDuplicateFiles(PkgDirCheckBase):
18754 """Check DuplicateFiles results."""
18755
18756 def test_unique_files(self):
18757 - self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'abc', 'test2': 'bcd'})])
18758 + self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "abc", "test2": "bcd"})])
18759
18760 def test_single_duplicate(self):
18761 - pkg = self.mk_pkg({'test': 'abc', 'test2': 'abc'})
18762 + pkg = self.mk_pkg({"test": "abc", "test2": "abc"})
18763 r = self.assertReport(self.mk_check(), [pkg])
18764 assert isinstance(r, pkgdir.DuplicateFiles)
18765 - assert r.files == ('files/test', 'files/test2')
18766 + assert r.files == ("files/test", "files/test2")
18767 assert "'files/test', 'files/test2'" in str(r)
18768
18769 def test_multiple_duplicates(self):
18770 - r = self.assertReports(self.mk_check(), [self.mk_pkg(
18771 - {'test': 'abc', 'test2': 'abc', 'test3': 'bcd', 'test4': 'bcd', 'test5': 'zzz'})])
18772 + r = self.assertReports(
18773 + self.mk_check(),
18774 + [
18775 + self.mk_pkg(
18776 + {"test": "abc", "test2": "abc", "test3": "bcd", "test4": "bcd", "test5": "zzz"}
18777 + )
18778 + ],
18779 + )
18780 assert len(r) == 2
18781 assert isinstance(r[0], pkgdir.DuplicateFiles)
18782 assert isinstance(r[1], pkgdir.DuplicateFiles)
18783 - assert (
18784 - tuple(sorted(x.files for x in r)) ==
18785 - (('files/test', 'files/test2'), ('files/test3', 'files/test4'))
18786 + assert tuple(sorted(x.files for x in r)) == (
18787 + ("files/test", "files/test2"),
18788 + ("files/test3", "files/test4"),
18789 )
18790
18791
18792 @@ -89,29 +96,29 @@ class TestEmptyFile(PkgDirCheckBase):
18793 """Check EmptyFile results."""
18794
18795 def test_nonempty_file(self):
18796 - self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh'})])
18797 + self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh"})])
18798
18799 def test_single_empty_file(self):
18800 assert isinstance(
18801 - self.assertReport(self.mk_check(), [self.mk_pkg({'test': ''})]),
18802 - pkgdir.EmptyFile)
18803 + self.assertReport(self.mk_check(), [self.mk_pkg({"test": ""})]), pkgdir.EmptyFile
18804 + )
18805
18806 def test_multiple_empty_files(self):
18807 - r = self.assertReports(self.mk_check(), [self.mk_pkg({'test': '', 'test2': ''})])
18808 + r = self.assertReports(self.mk_check(), [self.mk_pkg({"test": "", "test2": ""})])
18809 assert len(r) == 2
18810 assert isinstance(r[0], pkgdir.EmptyFile)
18811 assert isinstance(r[1], pkgdir.EmptyFile)
18812 - assert sorted(x.filename for x in r) == ['files/test', 'files/test2']
18813 + assert sorted(x.filename for x in r) == ["files/test", "files/test2"]
18814
18815 def test_mixture_of_files(self):
18816 - r = self.assertReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh', 'test2': ''})])
18817 + r = self.assertReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh", "test2": ""})])
18818 assert isinstance(r, pkgdir.EmptyFile)
18819 - assert r.filename == 'files/test2'
18820 - assert 'files/test2' in str(r)
18821 - r = self.assertReport(self.mk_check(), [self.mk_pkg({'test': '', 'test2': 'asdfgh'})])
18822 + assert r.filename == "files/test2"
18823 + assert "files/test2" in str(r)
18824 + r = self.assertReport(self.mk_check(), [self.mk_pkg({"test": "", "test2": "asdfgh"})])
18825 assert isinstance(r, pkgdir.EmptyFile)
18826 - assert r.filename == 'files/test'
18827 - assert 'files/test' in str(r)
18828 + assert r.filename == "files/test"
18829 + assert "files/test" in str(r)
18830
18831
18832 class TestMismatchedPN(PkgDirCheckBase):
18833 @@ -119,29 +126,29 @@ class TestMismatchedPN(PkgDirCheckBase):
18834
18835 def test_multiple_regular_ebuilds(self):
18836 pkg = self.mk_pkg()
18837 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
18838 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-1.ebuild'))
18839 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-2.ebuild'))
18840 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
18841 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-1.ebuild"))
18842 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-2.ebuild"))
18843 self.assertNoReport(self.mk_check(), [pkg])
18844
18845 def test_single_mismatched_ebuild(self):
18846 pkg = self.mk_pkg()
18847 - touch(pjoin(os.path.dirname(pkg.path), 'mismatched-0.ebuild'))
18848 + touch(pjoin(os.path.dirname(pkg.path), "mismatched-0.ebuild"))
18849 r = self.assertReport(self.mk_check(), [pkg])
18850 assert isinstance(r, pkgdir.MismatchedPN)
18851 - assert r.ebuilds == ('mismatched-0',)
18852 - assert 'mismatched-0' in str(r)
18853 + assert r.ebuilds == ("mismatched-0",)
18854 + assert "mismatched-0" in str(r)
18855
18856 def test_multiple_mismatched_ebuilds(self):
18857 pkg = self.mk_pkg()
18858 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
18859 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-1.ebuild'))
18860 - touch(pjoin(os.path.dirname(pkg.path), 'mismatched-0.ebuild'))
18861 - touch(pjoin(os.path.dirname(pkg.path), 'abc-1.ebuild'))
18862 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
18863 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-1.ebuild"))
18864 + touch(pjoin(os.path.dirname(pkg.path), "mismatched-0.ebuild"))
18865 + touch(pjoin(os.path.dirname(pkg.path), "abc-1.ebuild"))
18866 r = self.assertReport(self.mk_check(), [pkg])
18867 assert isinstance(r, pkgdir.MismatchedPN)
18868 - assert r.ebuilds == ('abc-1', 'mismatched-0')
18869 - assert 'abc-1, mismatched-0' in str(r)
18870 + assert r.ebuilds == ("abc-1", "mismatched-0")
18871 + assert "abc-1, mismatched-0" in str(r)
18872
18873
18874 class TestInvalidPN(PkgDirCheckBase):
18875 @@ -149,27 +156,27 @@ class TestInvalidPN(PkgDirCheckBase):
18876
18877 def test_regular_ebuild(self):
18878 pkg = self.mk_pkg()
18879 - touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
18880 + touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
18881 self.assertNoReport(self.mk_check(), [pkg])
18882
18883 def test_single_invalid_ebuild(self):
18884 - pkg = self.mk_pkg(category='sys-apps', package='invalid')
18885 - touch(pjoin(os.path.dirname(pkg.path), 'invalid-0-foo.ebuild'))
18886 + pkg = self.mk_pkg(category="sys-apps", package="invalid")
18887 + touch(pjoin(os.path.dirname(pkg.path), "invalid-0-foo.ebuild"))
18888 r = self.assertReport(self.mk_check(), [pkg])
18889 assert isinstance(r, pkgdir.InvalidPN)
18890 - assert r.ebuilds == ('invalid-0-foo',)
18891 - assert 'invalid-0-foo' in str(r)
18892 + assert r.ebuilds == ("invalid-0-foo",)
18893 + assert "invalid-0-foo" in str(r)
18894
18895 def test_multiple_invalid_ebuilds(self):
18896 - pkg = self.mk_pkg(category='sys-apps', package='bar')
18897 - touch(pjoin(os.path.dirname(pkg.path), 'bar-0.ebuild'))
18898 - touch(pjoin(os.path.dirname(pkg.path), 'bar-1.ebuild'))
18899 - touch(pjoin(os.path.dirname(pkg.path), 'bar-0-foo1.ebuild'))
18900 - touch(pjoin(os.path.dirname(pkg.path), 'bar-1-foo2.ebuild'))
18901 + pkg = self.mk_pkg(category="sys-apps", package="bar")
18902 + touch(pjoin(os.path.dirname(pkg.path), "bar-0.ebuild"))
18903 + touch(pjoin(os.path.dirname(pkg.path), "bar-1.ebuild"))
18904 + touch(pjoin(os.path.dirname(pkg.path), "bar-0-foo1.ebuild"))
18905 + touch(pjoin(os.path.dirname(pkg.path), "bar-1-foo2.ebuild"))
18906 r = self.assertReport(self.mk_check(), [pkg])
18907 assert isinstance(r, pkgdir.InvalidPN)
18908 - assert r.ebuilds == ('bar-0-foo1', 'bar-1-foo2')
18909 - assert 'bar-0-foo1, bar-1-foo2' in str(r)
18910 + assert r.ebuilds == ("bar-0-foo1", "bar-1-foo2")
18911 + assert "bar-0-foo1, bar-1-foo2" in str(r)
18912
18913
18914 class TestInvalidUTF8(PkgDirCheckBase):
18915 @@ -177,26 +184,26 @@ class TestInvalidUTF8(PkgDirCheckBase):
18916
18917 def test_ascii_ebuild(self):
18918 pkg = self.mk_pkg()
18919 - ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
18920 - with open(ebuild_path, 'w', encoding='ascii') as f:
18921 + ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
18922 + with open(ebuild_path, "w", encoding="ascii") as f:
18923 f.write('EAPI=7\nDESCRIPTION="foobar"\n')
18924 self.assertNoReport(self.mk_check(), [pkg])
18925
18926 def test_utf8_ebuild(self):
18927 pkg = self.mk_pkg()
18928 - ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
18929 - with open(ebuild_path, 'w') as f:
18930 + ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
18931 + with open(ebuild_path, "w") as f:
18932 f.write('EAPI=6\nDESCRIPTION="fóóbár"\n')
18933 self.assertNoReport(self.mk_check(), [pkg])
18934
18935 def test_latin1_ebuild(self):
18936 pkg = self.mk_pkg()
18937 - ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
18938 - with open(ebuild_path, 'w', encoding='latin-1') as f:
18939 + ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
18940 + with open(ebuild_path, "w", encoding="latin-1") as f:
18941 f.write('EAPI=5\nDESCRIPTION="fôòbår"\n')
18942 r = self.assertReport(self.mk_check(), [pkg])
18943 assert isinstance(r, pkgdir.InvalidUTF8)
18944 - assert r.filename == f'{pkg.package}-0.ebuild'
18945 + assert r.filename == f"{pkg.package}-0.ebuild"
18946 assert r.filename in str(r)
18947
18948
18949 @@ -207,44 +214,48 @@ class TestEqualVersions(PkgDirCheckBase):
18950
18951 def test_it(self):
18952 # pkg with no revision
18953 - pkg_a = self.mk_pkg(version='0')
18954 + pkg_a = self.mk_pkg(version="0")
18955 self.assertNoReport(self.mk_check(), [pkg_a])
18956
18957 # single, matching revision
18958 pkg_b = self.mk_pkg(
18959 - category=pkg_a.category, package=pkg_a.package, version='0', revision='-r0')
18960 + category=pkg_a.category, package=pkg_a.package, version="0", revision="-r0"
18961 + )
18962 r = self.assertReport(self.mk_check(), [pkg_a, pkg_b])
18963 assert isinstance(r, pkgdir.EqualVersions)
18964 - assert r.versions == ('0', '0-r0')
18965 - assert '[ 0, 0-r0 ]' in str(r)
18966 + assert r.versions == ("0", "0-r0")
18967 + assert "[ 0, 0-r0 ]" in str(r)
18968
18969 # multiple, matching revisions
18970 pkg_c = self.mk_pkg(
18971 - category=pkg_a.category, package=pkg_a.package, version='0', revision='-r000')
18972 + category=pkg_a.category, package=pkg_a.package, version="0", revision="-r000"
18973 + )
18974 r = self.assertReport(self.mk_check(), [pkg_a, pkg_b, pkg_c])
18975 assert isinstance(r, pkgdir.EqualVersions)
18976 - assert r.versions == ('0', '0-r0', '0-r000')
18977 - assert '[ 0, 0-r0, 0-r000 ]' in str(r)
18978 + assert r.versions == ("0", "0-r0", "0-r000")
18979 + assert "[ 0, 0-r0, 0-r000 ]" in str(r)
18980
18981 # unsorted, matching revisions
18982 - pkg_new_version = self.mk_pkg(
18983 - category=pkg_a.category, package=pkg_a.package, version='1')
18984 + pkg_new_version = self.mk_pkg(category=pkg_a.category, package=pkg_a.package, version="1")
18985 r = self.assertReport(self.mk_check(), [pkg_b, pkg_new_version, pkg_c, pkg_a])
18986 assert isinstance(r, pkgdir.EqualVersions)
18987 - assert r.versions == ('0', '0-r0', '0-r000')
18988 - assert '[ 0, 0-r0, 0-r000 ]' in str(r)
18989 + assert r.versions == ("0", "0-r0", "0-r000")
18990 + assert "[ 0, 0-r0, 0-r000 ]" in str(r)
18991
18992 # multiple, matching revisions with 0 prefixes
18993 pkg_d = self.mk_pkg(
18994 - category=pkg_a.category, package=pkg_a.package, version='0', revision='-r1')
18995 + category=pkg_a.category, package=pkg_a.package, version="0", revision="-r1"
18996 + )
18997 pkg_e = self.mk_pkg(
18998 - category=pkg_a.category, package=pkg_a.package, version='0', revision='-r01')
18999 + category=pkg_a.category, package=pkg_a.package, version="0", revision="-r01"
19000 + )
19001 pkg_f = self.mk_pkg(
19002 - category=pkg_a.category, package=pkg_a.package, version='0', revision='-r001')
19003 + category=pkg_a.category, package=pkg_a.package, version="0", revision="-r001"
19004 + )
19005 r = self.assertReport(self.mk_check(), [pkg_d, pkg_e, pkg_f])
19006 assert isinstance(r, pkgdir.EqualVersions)
19007 - assert r.versions == ('0-r001', '0-r01', '0-r1')
19008 - assert '[ 0-r001, 0-r01, 0-r1 ]' in str(r)
19009 + assert r.versions == ("0-r001", "0-r01", "0-r1")
19010 + assert "[ 0-r001, 0-r01, 0-r1 ]" in str(r)
19011
19012
19013 class TestSizeViolation(PkgDirCheckBase):
19014 @@ -252,50 +263,51 @@ class TestSizeViolation(PkgDirCheckBase):
19015
19016 def test_files_under_size_limit(self):
19017 pkg = self.mk_pkg()
19018 - for name, size in (('small', 1024*10),
19019 - ('limit', 1024*20-1)):
19020 - with open(pjoin(self.filesdir, name), 'w') as f:
19021 + for name, size in (("small", 1024 * 10), ("limit", 1024 * 20 - 1)):
19022 + with open(pjoin(self.filesdir, name), "w") as f:
19023 f.seek(size)
19024 - f.write('\0')
19025 + f.write("\0")
19026 self.assertNoReport(self.mk_check(), [pkg])
19027
19028 def test_single_file_over_limit(self):
19029 pkg = self.mk_pkg()
19030 - with open(pjoin(self.filesdir, 'over'), 'w') as f:
19031 - f.seek(1024*20)
19032 - f.write('\0')
19033 + with open(pjoin(self.filesdir, "over"), "w") as f:
19034 + f.seek(1024 * 20)
19035 + f.write("\0")
19036 r = self.assertReport(self.mk_check(), [pkg])
19037 assert isinstance(r, pkgdir.SizeViolation)
19038 - assert r.filename == 'files/over'
19039 - assert r.size == 1024*20+1
19040 - assert 'files/over' in str(r)
19041 + assert r.filename == "files/over"
19042 + assert r.size == 1024 * 20 + 1
19043 + assert "files/over" in str(r)
19044
19045 def test_multiple_files_over_limit(self):
19046 pkg = self.mk_pkg()
19047 - for name, size in (('small', 1024*10),
19048 - ('limit', 1024*20-1),
19049 - ('over', 1024*20),
19050 - ('massive', 1024*100)):
19051 - with open(pjoin(self.filesdir, name), 'w') as f:
19052 + for name, size in (
19053 + ("small", 1024 * 10),
19054 + ("limit", 1024 * 20 - 1),
19055 + ("over", 1024 * 20),
19056 + ("massive", 1024 * 100),
19057 + ):
19058 + with open(pjoin(self.filesdir, name), "w") as f:
19059 f.seek(size)
19060 - f.write('\0')
19061 + f.write("\0")
19062 r = self.assertReports(self.mk_check(), [pkg])
19063 assert len(r) == 3
19064 assert isinstance(r[0], pkgdir.SizeViolation)
19065 assert isinstance(r[1], pkgdir.SizeViolation)
19066 assert isinstance(r[2], pkgdir.TotalSizeViolation)
19067 - assert (
19068 - tuple(sorted((x.filename, x.size) for x in r[:2])) ==
19069 - (('files/massive', 1024*100+1), ('files/over', 1024*20+1))
19070 + assert tuple(sorted((x.filename, x.size) for x in r[:2])) == (
19071 + ("files/massive", 1024 * 100 + 1),
19072 + ("files/over", 1024 * 20 + 1),
19073 )
19074 - assert r[2].size == 1024*(10+20+20+100)+4-1
19075 + assert r[2].size == 1024 * (10 + 20 + 20 + 100) + 4 - 1
19076
19077
19078 class TestExecutableFile(PkgDirCheckBase):
19079 """Check ExecutableFile results."""
19080
19081 def test_non_empty_filesdir(self):
19082 - self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh'})])
19083 + self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh"})])
19084
19085 def test_executable_ebuild(self):
19086 pkg = self.mk_pkg()
19087 @@ -307,54 +319,53 @@ class TestExecutableFile(PkgDirCheckBase):
19088
19089 def test_executable_manifest_and_metadata(self):
19090 pkg = self.mk_pkg()
19091 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'), mode=0o755)
19092 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'), mode=0o744)
19093 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"), mode=0o755)
19094 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"), mode=0o744)
19095 r = self.assertReports(self.mk_check(), [pkg])
19096 assert len(r) == 2
19097 assert isinstance(r[0], pkgdir.ExecutableFile)
19098 assert isinstance(r[1], pkgdir.ExecutableFile)
19099 - assert (
19100 - tuple(sorted(x.filename for x in r)) ==
19101 - ('Manifest', 'metadata.xml')
19102 - )
19103 + assert tuple(sorted(x.filename for x in r)) == ("Manifest", "metadata.xml")
19104
19105 def test_executable_filesdir_file(self):
19106 - pkg = self.mk_pkg({'foo.init': 'blah'})
19107 + pkg = self.mk_pkg({"foo.init": "blah"})
19108 touch(pkg.path)
19109 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19110 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19111 - os.chmod(pjoin(os.path.dirname(pkg.path), 'files', 'foo.init'), 0o645)
19112 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19113 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19114 + os.chmod(pjoin(os.path.dirname(pkg.path), "files", "foo.init"), 0o645)
19115 r = self.assertReport(self.mk_check(), [pkg])
19116 assert isinstance(r, pkgdir.ExecutableFile)
19117 - assert r.filename == 'files/foo.init'
19118 - assert 'files/foo.init' in str(r)
19119 + assert r.filename == "files/foo.init"
19120 + assert "files/foo.init" in str(r)
19121
19122
19123 class TestBannedCharacter(PkgDirCheckBase):
19124 """Check BannedCharacter results."""
19125
19126 def test_regular_files(self):
19127 - pkg = self.mk_pkg({'foo.init': 'blah'})
19128 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19129 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19130 + pkg = self.mk_pkg({"foo.init": "blah"})
19131 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19132 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19133 self.assertNoReport(self.mk_check(), [pkg])
19134
19135 def test_filenames_outside_allowed_charsets(self):
19136 - pkg = self.mk_pkg({
19137 - 'foo.init': 'bar',
19138 - 'foo.init~': 'foo',
19139 - })
19140 + pkg = self.mk_pkg(
19141 + {
19142 + "foo.init": "bar",
19143 + "foo.init~": "foo",
19144 + }
19145 + )
19146 # vim backup files are flagged by default
19147 r = self.assertReport(self.mk_check(), [pkg])
19148 assert isinstance(r, pkgdir.BannedCharacter)
19149 - assert 'files/foo.init~' in str(r)
19150 + assert "files/foo.init~" in str(r)
19151
19152 # but results are suppressed if a matching git ignore entry exists
19153 - for ignore_file in ('.gitignore', '.git/info/exclude'):
19154 + for ignore_file in (".gitignore", ".git/info/exclude"):
19155 path = pjoin(self.repo.location, ignore_file)
19156 ensure_dirs(os.path.dirname(path))
19157 - with open(path, 'w') as f:
19158 - f.write('*~')
19159 + with open(path, "w") as f:
19160 + f.write("*~")
19161 self.assertNoReport(self.mk_check(), [pkg])
19162 os.unlink(path)
19163
19164 @@ -363,40 +374,40 @@ class TestUnknownPkgDirEntry(PkgDirCheckBase):
19165 """Check UnknownPkgDirEntry results."""
19166
19167 def test_regular_files(self):
19168 - pkg = self.mk_pkg({'foo.init': 'blah'})
19169 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19170 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19171 + pkg = self.mk_pkg({"foo.init": "blah"})
19172 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19173 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19174 self.assertNoReport(self.mk_check(), [pkg])
19175
19176 def test_unknown_non_gentoo_repo(self):
19177 - pkg = self.mk_pkg({'foo.init': 'blah'})
19178 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19179 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19180 - touch(pjoin(os.path.dirname(pkg.path), 'foo-2'))
19181 + pkg = self.mk_pkg({"foo.init": "blah"})
19182 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19183 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19184 + touch(pjoin(os.path.dirname(pkg.path), "foo-2"))
19185 self.assertNoReport(self.mk_check(), [pkg])
19186
19187 def test_unknown_gentoo_repo(self):
19188 - pkg = self.mk_pkg({'foo.init': 'blah'})
19189 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19190 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19191 - touch(pjoin(os.path.dirname(pkg.path), 'foo-2'))
19192 + pkg = self.mk_pkg({"foo.init": "blah"})
19193 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19194 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19195 + touch(pjoin(os.path.dirname(pkg.path), "foo-2"))
19196 r = self.assertReport(self.mk_check(gentoo=True), [pkg])
19197 assert isinstance(r, pkgdir.UnknownPkgDirEntry)
19198 - assert 'foo-2' in str(r)
19199 + assert "foo-2" in str(r)
19200
19201 def test_unknown_gitignore(self):
19202 - pkg = self.mk_pkg(files={'foo.init': 'blah'}, category='dev-util', package='foo')
19203 - touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
19204 - touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
19205 - touch(pjoin(os.path.dirname(pkg.path), 'foo-0.ebuild'))
19206 - touch(pjoin(os.path.dirname(pkg.path), 'foo-0.ebuild.swp'))
19207 + pkg = self.mk_pkg(files={"foo.init": "blah"}, category="dev-util", package="foo")
19208 + touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
19209 + touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
19210 + touch(pjoin(os.path.dirname(pkg.path), "foo-0.ebuild"))
19211 + touch(pjoin(os.path.dirname(pkg.path), "foo-0.ebuild.swp"))
19212 r = self.assertReport(self.mk_check(gentoo=True), [pkg])
19213 assert isinstance(r, pkgdir.UnknownPkgDirEntry)
19214 - assert 'foo-0.ebuild.swp' in str(r)
19215 + assert "foo-0.ebuild.swp" in str(r)
19216
19217 # results are suppressed if a matching .gitignore entry exists
19218 - with open(pjoin(self.repo.location, '.gitignore'), 'w') as f:
19219 - f.write('*.swp')
19220 + with open(pjoin(self.repo.location, ".gitignore"), "w") as f:
19221 + f.write("*.swp")
19222 self.assertNoReport(self.mk_check(gentoo=True), [pkg])
19223
19224
19225 @@ -411,17 +422,17 @@ class TestLiveOnlyCheck(misc.ReportTestCase):
19226
19227 # initialize parent repo
19228 self.parent_git_repo = make_git_repo()
19229 - self.parent_repo = make_repo(self.parent_git_repo.path, repo_id='gentoo')
19230 - self.parent_git_repo.add_all('initial commit')
19231 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo")
19232 + self.parent_git_repo.add_all("initial commit")
19233 # create a stub pkg and commit it
19234 - self.parent_repo.create_ebuild('cat/pkg-0', properties='live')
19235 - self.parent_git_repo.add_all('cat/pkg-0')
19236 + self.parent_repo.create_ebuild("cat/pkg-0", properties="live")
19237 + self.parent_git_repo.add_all("cat/pkg-0")
19238
19239 # initialize child repo
19240 self.child_git_repo = make_git_repo()
19241 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
19242 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
19243 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
19244 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
19245 + self.child_git_repo.run(["git", "pull", "origin", "main"])
19246 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
19247 self.child_repo = make_repo(self.child_git_repo.path)
19248
19249 def init_check(self, options=None, future=0):
19250 @@ -434,45 +445,49 @@ class TestLiveOnlyCheck(misc.ReportTestCase):
19251
19252 def _options(self, **kwargs):
19253 args = [
19254 - 'scan', '-q', '--cache-dir', self.cache_dir,
19255 - '--repo', self.child_repo.location,
19256 + "scan",
19257 + "-q",
19258 + "--cache-dir",
19259 + self.cache_dir,
19260 + "--repo",
19261 + self.child_repo.location,
19262 ]
19263 options, _ = self._tool.parse_args(args)
19264 return options
19265
19266 def test_no_git_support(self):
19267 options = self._options()
19268 - options.cache['git'] = False
19269 - with pytest.raises(SkipCheck, match='git cache support required'):
19270 + options.cache["git"] = False
19271 + with pytest.raises(SkipCheck, match="git cache support required"):
19272 self.init_check(options)
19273
19274 def test_keywords_exist(self):
19275 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
19276 - self.parent_git_repo.add_all('cat/pkg-1')
19277 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
19278 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
19279 + self.parent_git_repo.add_all("cat/pkg-1")
19280 + self.child_git_repo.run(["git", "pull", "origin", "main"])
19281 self.init_check()
19282 self.assertNoReport(self.check, self.source)
19283
19284 def test_all_live_pkgs(self):
19285 - self.parent_repo.create_ebuild('cat/pkg-1', properties='live')
19286 - self.parent_git_repo.add_all('cat/pkg-1')
19287 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
19288 + self.parent_repo.create_ebuild("cat/pkg-1", properties="live")
19289 + self.parent_git_repo.add_all("cat/pkg-1")
19290 + self.child_git_repo.run(["git", "pull", "origin", "main"])
19291 self.init_check()
19292 # result will trigger for any package age
19293 - expected = pkgdir.LiveOnlyPackage(0, pkg=UnversionedCPV('cat/pkg'))
19294 + expected = pkgdir.LiveOnlyPackage(0, pkg=UnversionedCPV("cat/pkg"))
19295 r = self.assertReport(self.check, self.source)
19296 assert r == expected
19297
19298 # packages are now a year old
19299 self.init_check(future=365)
19300 - expected = pkgdir.LiveOnlyPackage(365, pkg=UnversionedCPV('cat/pkg'))
19301 + expected = pkgdir.LiveOnlyPackage(365, pkg=UnversionedCPV("cat/pkg"))
19302 r = self.assertReport(self.check, self.source)
19303 assert r == expected
19304
19305 def test_uncommitted_local_ebuild(self):
19306 - self.parent_repo.create_ebuild('cat/pkg-1', properties='live')
19307 - self.parent_git_repo.add_all('cat/pkg-1')
19308 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
19309 - self.child_repo.create_ebuild('cat/pkg-2', properties='live')
19310 + self.parent_repo.create_ebuild("cat/pkg-1", properties="live")
19311 + self.parent_git_repo.add_all("cat/pkg-1")
19312 + self.child_git_repo.run(["git", "pull", "origin", "main"])
19313 + self.child_repo.create_ebuild("cat/pkg-2", properties="live")
19314 self.init_check(future=180)
19315 self.assertNoReport(self.check, self.source)
19316
19317 diff --git a/tests/checks/test_python.py b/tests/checks/test_python.py
19318 index 843975b5..eb4c44fb 100644
19319 --- a/tests/checks/test_python.py
19320 +++ b/tests/checks/test_python.py
19321 @@ -9,180 +9,201 @@ class TestPythonCheck(misc.ReportTestCase):
19322 check_kls = python.PythonCheck
19323
19324 def mk_pkg(self, cpv="app-foo/bar-1", **kwargs):
19325 - kwargs.setdefault('EAPI', '7')
19326 + kwargs.setdefault("EAPI", "7")
19327 return misc.FakePkg(cpv, data=kwargs)
19328
19329 def test_multiple_eclasses(self):
19330 r = self.assertReport(
19331 self.check,
19332 - self.mk_pkg(_eclasses_=['python-any-r1', 'python-single-r1'],
19333 - DEPEND='dev-lang/python'))
19334 + self.mk_pkg(_eclasses_=["python-any-r1", "python-single-r1"], DEPEND="dev-lang/python"),
19335 + )
19336 assert isinstance(r, python.PythonEclassError)
19337
19338 def test_missing_eclass_depend(self):
19339 self.assertNoReport(
19340 - self.check,
19341 - self.mk_pkg(_eclasses_=['python-any-r1'], DEPEND='dev-lang/python'))
19342 - self.assertNoReport(self.check, self.mk_pkg(DEPEND='dev-foo/frobnicate'))
19343 + self.check, self.mk_pkg(_eclasses_=["python-any-r1"], DEPEND="dev-lang/python")
19344 + )
19345 + self.assertNoReport(self.check, self.mk_pkg(DEPEND="dev-foo/frobnicate"))
19346
19347 - r = self.assertReport(self.check, self.mk_pkg(DEPEND='dev-lang/python'))
19348 + r = self.assertReport(self.check, self.mk_pkg(DEPEND="dev-lang/python"))
19349 assert isinstance(r, python.MissingPythonEclass)
19350 assert 'missing python-any-r1 eclass usage for DEPEND="dev-lang/python"' in str(r)
19351
19352 - self.assertNoReport(self.check, self.mk_pkg(DEPEND='dev-lang/python:2.7'))
19353 + self.assertNoReport(self.check, self.mk_pkg(DEPEND="dev-lang/python:2.7"))
19354 assert isinstance(
19355 - self.assertReport(self.check, self.mk_pkg(DEPEND='dev-lang/python:*')),
19356 - python.MissingPythonEclass)
19357 + self.assertReport(self.check, self.mk_pkg(DEPEND="dev-lang/python:*")),
19358 + python.MissingPythonEclass,
19359 + )
19360 assert isinstance(
19361 - self.assertReport(self.check, self.mk_pkg(DEPEND='=dev-lang/python-2*')),
19362 - python.MissingPythonEclass)
19363 + self.assertReport(self.check, self.mk_pkg(DEPEND="=dev-lang/python-2*")),
19364 + python.MissingPythonEclass,
19365 + )
19366 assert isinstance(
19367 self.assertReport(
19368 - self.check,
19369 - self.mk_pkg(DEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
19370 - python.MissingPythonEclass)
19371 + self.check, self.mk_pkg(DEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
19372 + ),
19373 + python.MissingPythonEclass,
19374 + )
19375
19376 def test_missing_eclass_bdepend(self):
19377 self.assertNoReport(
19378 - self.check,
19379 - self.mk_pkg(_eclasses_=['python-any-r1'], BDEPEND='dev-lang/python'))
19380 - self.assertNoReport(self.check, self.mk_pkg(BDEPEND='dev-foo/frobnicate'))
19381 + self.check, self.mk_pkg(_eclasses_=["python-any-r1"], BDEPEND="dev-lang/python")
19382 + )
19383 + self.assertNoReport(self.check, self.mk_pkg(BDEPEND="dev-foo/frobnicate"))
19384
19385 assert isinstance(
19386 - self.assertReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python')),
19387 - python.MissingPythonEclass)
19388 - self.assertNoReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python:2.7'))
19389 + self.assertReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python")),
19390 + python.MissingPythonEclass,
19391 + )
19392 + self.assertNoReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python:2.7"))
19393 assert isinstance(
19394 - self.assertReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python:*')),
19395 - python.MissingPythonEclass)
19396 + self.assertReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python:*")),
19397 + python.MissingPythonEclass,
19398 + )
19399 assert isinstance(
19400 - self.assertReport(self.check, self.mk_pkg(BDEPEND='=dev-lang/python-2*')),
19401 - python.MissingPythonEclass)
19402 + self.assertReport(self.check, self.mk_pkg(BDEPEND="=dev-lang/python-2*")),
19403 + python.MissingPythonEclass,
19404 + )
19405 assert isinstance(
19406 self.assertReport(
19407 - self.check,
19408 - self.mk_pkg(BDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
19409 - python.MissingPythonEclass)
19410 + self.check, self.mk_pkg(BDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
19411 + ),
19412 + python.MissingPythonEclass,
19413 + )
19414
19415 def test_missing_eclass_rdepend(self):
19416 self.assertNoReport(
19417 - self.check,
19418 - self.mk_pkg(_eclasses_=['python-r1'], RDEPEND='dev-lang/python:3.7'))
19419 + self.check, self.mk_pkg(_eclasses_=["python-r1"], RDEPEND="dev-lang/python:3.7")
19420 + )
19421 self.assertNoReport(
19422 - self.check,
19423 - self.mk_pkg(_eclasses_=['python-single-r1'], RDEPEND='dev-lang/python:3.7'))
19424 - self.assertNoReport(self.check, self.mk_pkg(RDEPEND='dev-foo/frobnicate'))
19425 + self.check, self.mk_pkg(_eclasses_=["python-single-r1"], RDEPEND="dev-lang/python:3.7")
19426 + )
19427 + self.assertNoReport(self.check, self.mk_pkg(RDEPEND="dev-foo/frobnicate"))
19428
19429 - r = self.assertReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python'))
19430 + r = self.assertReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python"))
19431 assert isinstance(r, python.MissingPythonEclass)
19432 - assert 'missing python-r1 or python-single-r1 eclass' in str(r)
19433 + assert "missing python-r1 or python-single-r1 eclass" in str(r)
19434
19435 - self.assertNoReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python:2.7'))
19436 + self.assertNoReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python:2.7"))
19437 assert isinstance(
19438 - self.assertReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python:=')),
19439 - python.MissingPythonEclass)
19440 + self.assertReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python:=")),
19441 + python.MissingPythonEclass,
19442 + )
19443 assert isinstance(
19444 - self.assertReport(self.check, self.mk_pkg(RDEPEND='=dev-lang/python-2*')),
19445 - python.MissingPythonEclass)
19446 + self.assertReport(self.check, self.mk_pkg(RDEPEND="=dev-lang/python-2*")),
19447 + python.MissingPythonEclass,
19448 + )
19449 assert isinstance(
19450 self.assertReport(
19451 - self.check,
19452 - self.mk_pkg(RDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
19453 - python.MissingPythonEclass)
19454 + self.check, self.mk_pkg(RDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
19455 + ),
19456 + python.MissingPythonEclass,
19457 + )
19458
19459 def test_missing_eclass_pdepend(self):
19460 self.assertNoReport(
19461 - self.check,
19462 - self.mk_pkg(_eclasses_=['python-r1'], PDEPEND='dev-lang/python:3.7'))
19463 + self.check, self.mk_pkg(_eclasses_=["python-r1"], PDEPEND="dev-lang/python:3.7")
19464 + )
19465 self.assertNoReport(
19466 - self.check,
19467 - self.mk_pkg(_eclasses_=['python-single-r1'], PDEPEND='dev-lang/python:3.7'))
19468 - self.assertNoReport(self.check, self.mk_pkg(PDEPEND='dev-foo/frobnicate'))
19469 + self.check, self.mk_pkg(_eclasses_=["python-single-r1"], PDEPEND="dev-lang/python:3.7")
19470 + )
19471 + self.assertNoReport(self.check, self.mk_pkg(PDEPEND="dev-foo/frobnicate"))
19472
19473 assert isinstance(
19474 - self.assertReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python')),
19475 - python.MissingPythonEclass)
19476 - self.assertNoReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python:2.7'))
19477 + self.assertReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python")),
19478 + python.MissingPythonEclass,
19479 + )
19480 + self.assertNoReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python:2.7"))
19481 assert isinstance(
19482 - self.assertReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python:=')),
19483 - python.MissingPythonEclass)
19484 + self.assertReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python:=")),
19485 + python.MissingPythonEclass,
19486 + )
19487 assert isinstance(
19488 - self.assertReport(self.check, self.mk_pkg(PDEPEND='=dev-lang/python-2*')),
19489 - python.MissingPythonEclass)
19490 + self.assertReport(self.check, self.mk_pkg(PDEPEND="=dev-lang/python-2*")),
19491 + python.MissingPythonEclass,
19492 + )
19493 assert isinstance(
19494 self.assertReport(
19495 - self.check,
19496 - self.mk_pkg(PDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
19497 - python.MissingPythonEclass)
19498 + self.check, self.mk_pkg(PDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
19499 + ),
19500 + python.MissingPythonEclass,
19501 + )
19502
19503 def test_valid_packages(self):
19504 self.assertNoReport(
19505 self.check,
19506 self.mk_pkg(
19507 - _eclasses_=['python-r1'],
19508 - IUSE='python_targets_python3_5 '
19509 - 'python_targets_python3_6',
19510 - RDEPEND='python_targets_python3_5? ( '
19511 - ' dev-lang/python:3.5 ) '
19512 - 'python_targets_python3_6? ( '
19513 - ' dev-lang/python:3.6 )',
19514 - REQUIRED_USE='|| ( python_targets_python3_5 '
19515 - ' python_targets_python3_6 )'))
19516 + _eclasses_=["python-r1"],
19517 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19518 + RDEPEND="python_targets_python3_5? ( "
19519 + " dev-lang/python:3.5 ) "
19520 + "python_targets_python3_6? ( "
19521 + " dev-lang/python:3.6 )",
19522 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19523 + ),
19524 + )
19525
19526 # python-single-r1 with one implementation does not use PST
19527 self.assertNoReport(
19528 self.check,
19529 - self.mk_pkg(_eclasses_=['python-single-r1'],
19530 - IUSE='python_targets_python3_5',
19531 - RDEPEND='python_targets_python3_5? ( '
19532 - ' dev-lang/python:3.5 )',
19533 - REQUIRED_USE='python_targets_python3_5'))
19534 + self.mk_pkg(
19535 + _eclasses_=["python-single-r1"],
19536 + IUSE="python_targets_python3_5",
19537 + RDEPEND="python_targets_python3_5? ( " " dev-lang/python:3.5 )",
19538 + REQUIRED_USE="python_targets_python3_5",
19539 + ),
19540 + )
19541 self.assertNoReport(
19542 self.check,
19543 self.mk_pkg(
19544 - _eclasses_=['python-single-r1'],
19545 - IUSE='python_targets_python3_5 '
19546 - 'python_targets_python3_6 '
19547 - 'python_single_target_python3_5 '
19548 - 'python_single_target_python3_6',
19549 - RDEPEND='python_single_target_python3_5? ( '
19550 - ' dev-lang/python:3.5 ) '
19551 - 'python_single_target_python3_6? ( '
19552 - ' dev-lang/python:3.6 )',
19553 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
19554 - ' python_single_target_python3_6 ) '
19555 - 'python_single_target_python3_5? ( '
19556 - ' python_targets_python3_5 ) '
19557 - 'python_single_target_python3_6? ( '
19558 - ' python_targets_python3_6 )'))
19559 + _eclasses_=["python-single-r1"],
19560 + IUSE="python_targets_python3_5 "
19561 + "python_targets_python3_6 "
19562 + "python_single_target_python3_5 "
19563 + "python_single_target_python3_6",
19564 + RDEPEND="python_single_target_python3_5? ( "
19565 + " dev-lang/python:3.5 ) "
19566 + "python_single_target_python3_6? ( "
19567 + " dev-lang/python:3.6 )",
19568 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
19569 + " python_single_target_python3_6 ) "
19570 + "python_single_target_python3_5? ( "
19571 + " python_targets_python3_5 ) "
19572 + "python_single_target_python3_6? ( "
19573 + " python_targets_python3_6 )",
19574 + ),
19575 + )
19576
19577 self.assertNoReport(
19578 self.check,
19579 - self.mk_pkg(_eclasses_=['python-any-r1'],
19580 - DEPEND='|| ( '
19581 - ' dev-lang/python:3.5 '
19582 - ' dev-lang/python:3.6 )'))
19583 + self.mk_pkg(
19584 + _eclasses_=["python-any-r1"],
19585 + DEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
19586 + ),
19587 + )
19588 self.assertNoReport(
19589 - self.check,
19590 - self.mk_pkg(_eclasses_=['python-any-r1'], DEPEND='dev-lang/python:3.5'))
19591 + self.check, self.mk_pkg(_eclasses_=["python-any-r1"], DEPEND="dev-lang/python:3.5")
19592 + )
19593 self.assertNoReport(
19594 self.check,
19595 - self.mk_pkg(_eclasses_=['python-any-r1'],
19596 - BDEPEND='|| ( '
19597 - ' dev-lang/python:3.5 '
19598 - ' dev-lang/python:3.6 )'))
19599 + self.mk_pkg(
19600 + _eclasses_=["python-any-r1"],
19601 + BDEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
19602 + ),
19603 + )
19604
19605 def test_missing_required_use(self):
19606 r = self.assertReport(
19607 self.check,
19608 self.mk_pkg(
19609 - _eclasses_=['python-r1'],
19610 - IUSE='python_targets_python3_5 '
19611 - 'python_targets_python3_6',
19612 - RDEPEND='python_targets_python3_5? ( '
19613 - ' dev-lang/python:3.5 ) '
19614 - 'python_targets_python3_6? ( '
19615 - ' dev-lang/python:3.6 )'))
19616 + _eclasses_=["python-r1"],
19617 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19618 + RDEPEND="python_targets_python3_5? ( "
19619 + " dev-lang/python:3.5 ) "
19620 + "python_targets_python3_6? ( "
19621 + " dev-lang/python:3.6 )",
19622 + ),
19623 + )
19624 assert isinstance(r, python.PythonMissingRequiredUse)
19625 assert 'missing REQUIRED_USE="${PYTHON_REQUIRED_USE}"' in str(r)
19626
19627 @@ -191,93 +212,105 @@ class TestPythonCheck(misc.ReportTestCase):
19628 self.assertReport(
19629 self.check,
19630 self.mk_pkg(
19631 - _eclasses_=['python-r1'],
19632 - IUSE='python_targets_python3_5 '
19633 - 'python_targets_python3_6',
19634 - RDEPEND='python_targets_python3_5? ( '
19635 - ' dev-lang/python:3.5 ) '
19636 - 'python_targets_python3_6? ( '
19637 - ' dev-lang/python:3.6 )',
19638 - REQUIRED_USE='|| ( python_targets_python3_5 )')),
19639 - python.PythonMissingRequiredUse)
19640 + _eclasses_=["python-r1"],
19641 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19642 + RDEPEND="python_targets_python3_5? ( "
19643 + " dev-lang/python:3.5 ) "
19644 + "python_targets_python3_6? ( "
19645 + " dev-lang/python:3.6 )",
19646 + REQUIRED_USE="|| ( python_targets_python3_5 )",
19647 + ),
19648 + ),
19649 + python.PythonMissingRequiredUse,
19650 + )
19651
19652 assert isinstance(
19653 self.assertReport(
19654 self.check,
19655 self.mk_pkg(
19656 - _eclasses_=['python-r1'],
19657 - IUSE='python_targets_python3_5 '
19658 - 'python_targets_python3_6 '
19659 - 'python_targets_python3_7',
19660 - RDEPEND='python_targets_python3_5? ( '
19661 - ' dev-lang/python:3.5 ) '
19662 - 'python_targets_python3_6? ( '
19663 - ' dev-lang/python:3.6 ) '
19664 - 'python_targets_python3_7? ( '
19665 - ' dev-lang/python:3.7 )',
19666 - REQUIRED_USE='|| ( python_targets_python3_6 '
19667 - ' python_targets_python3_7 )')),
19668 - python.PythonMissingRequiredUse)
19669 + _eclasses_=["python-r1"],
19670 + IUSE="python_targets_python3_5 "
19671 + "python_targets_python3_6 "
19672 + "python_targets_python3_7",
19673 + RDEPEND="python_targets_python3_5? ( "
19674 + " dev-lang/python:3.5 ) "
19675 + "python_targets_python3_6? ( "
19676 + " dev-lang/python:3.6 ) "
19677 + "python_targets_python3_7? ( "
19678 + " dev-lang/python:3.7 )",
19679 + REQUIRED_USE="|| ( python_targets_python3_6 " " python_targets_python3_7 )",
19680 + ),
19681 + ),
19682 + python.PythonMissingRequiredUse,
19683 + )
19684
19685 assert isinstance(
19686 self.assertReport(
19687 self.check,
19688 self.mk_pkg(
19689 - _eclasses_=['python-single-r1'],
19690 - IUSE='python_targets_python3_5 '
19691 - 'python_targets_python3_6 '
19692 - 'python_single_target_python3_5 '
19693 - 'python_single_target_python3_6',
19694 - RDEPEND='python_single_target_python3_5? ( '
19695 - ' dev-lang/python:3.5 ) '
19696 - 'python_single_target_python3_6? ( '
19697 - ' dev-lang/python:3.6 )')),
19698 - python.PythonMissingRequiredUse)
19699 + _eclasses_=["python-single-r1"],
19700 + IUSE="python_targets_python3_5 "
19701 + "python_targets_python3_6 "
19702 + "python_single_target_python3_5 "
19703 + "python_single_target_python3_6",
19704 + RDEPEND="python_single_target_python3_5? ( "
19705 + " dev-lang/python:3.5 ) "
19706 + "python_single_target_python3_6? ( "
19707 + " dev-lang/python:3.6 )",
19708 + ),
19709 + ),
19710 + python.PythonMissingRequiredUse,
19711 + )
19712
19713 # incomplete REQUIRED_USE
19714 assert isinstance(
19715 self.assertReport(
19716 self.check,
19717 self.mk_pkg(
19718 - _eclasses_=['python-single-r1'],
19719 - IUSE='python_targets_python3_5 '
19720 - 'python_targets_python3_6 '
19721 - 'python_single_target_python3_5 '
19722 - 'python_single_target_python3_6',
19723 - RDEPEND='python_single_target_python3_5? ( '
19724 - ' dev-lang/python:3.5 ) '
19725 - 'python_single_target_python3_6? ( '
19726 - ' dev-lang/python:3.6 )',
19727 - REQUIRED_USE='^^ ( python_single_target_python3_5 )')),
19728 - python.PythonMissingRequiredUse)
19729 + _eclasses_=["python-single-r1"],
19730 + IUSE="python_targets_python3_5 "
19731 + "python_targets_python3_6 "
19732 + "python_single_target_python3_5 "
19733 + "python_single_target_python3_6",
19734 + RDEPEND="python_single_target_python3_5? ( "
19735 + " dev-lang/python:3.5 ) "
19736 + "python_single_target_python3_6? ( "
19737 + " dev-lang/python:3.6 )",
19738 + REQUIRED_USE="^^ ( python_single_target_python3_5 )",
19739 + ),
19740 + ),
19741 + python.PythonMissingRequiredUse,
19742 + )
19743
19744 # || instead of ^^ in python-single-r1
19745 assert isinstance(
19746 self.assertReport(
19747 self.check,
19748 self.mk_pkg(
19749 - _eclasses_=['python-single-r1'],
19750 - IUSE='python_targets_python3_5 '
19751 - 'python_targets_python3_6 '
19752 - 'python_single_target_python3_5 '
19753 - 'python_single_target_python3_6',
19754 - RDEPEND='python_single_target_python3_5? ( '
19755 - ' dev-lang/python:3.5 ) '
19756 - 'python_single_target_python3_6? ( '
19757 - ' dev-lang/python:3.6 )',
19758 - REQUIRED_USE='|| ( python_targets_python3_5 '
19759 - ' python_targets_python3_6 )')),
19760 - python.PythonMissingRequiredUse)
19761 + _eclasses_=["python-single-r1"],
19762 + IUSE="python_targets_python3_5 "
19763 + "python_targets_python3_6 "
19764 + "python_single_target_python3_5 "
19765 + "python_single_target_python3_6",
19766 + RDEPEND="python_single_target_python3_5? ( "
19767 + " dev-lang/python:3.5 ) "
19768 + "python_single_target_python3_6? ( "
19769 + " dev-lang/python:3.6 )",
19770 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19771 + ),
19772 + ),
19773 + python.PythonMissingRequiredUse,
19774 + )
19775
19776 def test_missing_deps(self):
19777 r = self.assertReport(
19778 self.check,
19779 self.mk_pkg(
19780 - _eclasses_=['python-r1'],
19781 - IUSE='python_targets_python3_5 '
19782 - 'python_targets_python3_6',
19783 - REQUIRED_USE='|| ( python_targets_python3_5 '
19784 - ' python_targets_python3_6 )'))
19785 + _eclasses_=["python-r1"],
19786 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19787 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19788 + ),
19789 + )
19790 assert isinstance(r, python.PythonMissingDeps)
19791 assert 'missing RDEPEND="${PYTHON_DEPS}"' in str(r)
19792
19793 @@ -285,13 +318,12 @@ class TestPythonCheck(misc.ReportTestCase):
19794 r = self.assertReport(
19795 self.check,
19796 self.mk_pkg(
19797 - _eclasses_=['python-r1'],
19798 - IUSE='python_targets_python3_5 '
19799 - 'python_targets_python3_6',
19800 - RDEPEND='python_targets_python3_5? ( '
19801 - ' dev-lang/python:3.5 )',
19802 - REQUIRED_USE='|| ( python_targets_python3_5 '
19803 - ' python_targets_python3_6 )'))
19804 + _eclasses_=["python-r1"],
19805 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19806 + RDEPEND="python_targets_python3_5? ( " " dev-lang/python:3.5 )",
19807 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19808 + ),
19809 + )
19810 assert isinstance(r, python.PythonMissingDeps)
19811 assert 'missing RDEPEND="${PYTHON_DEPS}"' in str(r)
19812
19813 @@ -301,69 +333,76 @@ class TestPythonCheck(misc.ReportTestCase):
19814 self.assertReport(
19815 self.check,
19816 self.mk_pkg(
19817 - _eclasses_=['python-r1'],
19818 - IUSE='python_targets_python3_5 '
19819 - 'python_targets_python3_6',
19820 - RDEPEND='python_targets_python3_5? ( '
19821 - ' dev-foo/bar ) '
19822 - 'python_targets_python3_6? ( '
19823 - ' dev-lang/python:3.6 )',
19824 - REQUIRED_USE='|| ( python_targets_python3_5 '
19825 - ' python_targets_python3_6 )')),
19826 - python.PythonMissingDeps)
19827 + _eclasses_=["python-r1"],
19828 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19829 + RDEPEND="python_targets_python3_5? ( "
19830 + " dev-foo/bar ) "
19831 + "python_targets_python3_6? ( "
19832 + " dev-lang/python:3.6 )",
19833 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19834 + ),
19835 + ),
19836 + python.PythonMissingDeps,
19837 + )
19838
19839 # DEPEND only, RDEPEND missing
19840 assert isinstance(
19841 self.assertReport(
19842 self.check,
19843 self.mk_pkg(
19844 - _eclasses_=['python-r1'],
19845 - IUSE='python_targets_python3_5 '
19846 - 'python_targets_python3_6',
19847 - DEPEND='python_targets_python3_5? ( '
19848 - ' dev-lang/python:3.5 ) '
19849 - 'python_targets_python3_6? ( '
19850 - ' dev-lang/python:3.6 )',
19851 - REQUIRED_USE='|| ( python_targets_python3_5 '
19852 - ' python_targets_python3_6 )')),
19853 - python.PythonMissingDeps)
19854 + _eclasses_=["python-r1"],
19855 + IUSE="python_targets_python3_5 " "python_targets_python3_6",
19856 + DEPEND="python_targets_python3_5? ( "
19857 + " dev-lang/python:3.5 ) "
19858 + "python_targets_python3_6? ( "
19859 + " dev-lang/python:3.6 )",
19860 + REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
19861 + ),
19862 + ),
19863 + python.PythonMissingDeps,
19864 + )
19865
19866 assert isinstance(
19867 self.assertReport(
19868 self.check,
19869 self.mk_pkg(
19870 - _eclasses_=['python-single-r1'],
19871 - IUSE='python_targets_python3_5 '
19872 - 'python_targets_python3_6 '
19873 - 'python_single_target_python3_5 '
19874 - 'python_single_target_python3_6',
19875 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
19876 - ' python_single_target_python3_6 ) '
19877 - 'python_single_target_python3_5? ( '
19878 - ' python_targets_python3_5 ) '
19879 - 'python_single_target_python3_6? ( '
19880 - ' python_targets_python3_6 )')),
19881 - python.PythonMissingDeps)
19882 + _eclasses_=["python-single-r1"],
19883 + IUSE="python_targets_python3_5 "
19884 + "python_targets_python3_6 "
19885 + "python_single_target_python3_5 "
19886 + "python_single_target_python3_6",
19887 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
19888 + " python_single_target_python3_6 ) "
19889 + "python_single_target_python3_5? ( "
19890 + " python_targets_python3_5 ) "
19891 + "python_single_target_python3_6? ( "
19892 + " python_targets_python3_6 )",
19893 + ),
19894 + ),
19895 + python.PythonMissingDeps,
19896 + )
19897
19898 # incomplete deps
19899 assert isinstance(
19900 self.assertReport(
19901 self.check,
19902 self.mk_pkg(
19903 - _eclasses_=['python-single-r1'],
19904 - IUSE='python_targets_python3_5 '
19905 - 'python_targets_python3_6 '
19906 - 'python_single_target_python3_5 '
19907 - 'python_single_target_python3_6',
19908 - RDEPEND='python_single_target_python3_5? ( '
19909 - ' dev-lang/python:3.5 )',
19910 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
19911 - ' python_single_target_python3_6 ) '
19912 - 'python_single_target_python3_5? ( '
19913 - ' python_targets_python3_5 ) '
19914 - 'python_single_target_python3_6? ( '
19915 - ' python_targets_python3_6 )')),
19916 - python.PythonMissingDeps)
19917 + _eclasses_=["python-single-r1"],
19918 + IUSE="python_targets_python3_5 "
19919 + "python_targets_python3_6 "
19920 + "python_single_target_python3_5 "
19921 + "python_single_target_python3_6",
19922 + RDEPEND="python_single_target_python3_5? ( " " dev-lang/python:3.5 )",
19923 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
19924 + " python_single_target_python3_6 ) "
19925 + "python_single_target_python3_5? ( "
19926 + " python_targets_python3_5 ) "
19927 + "python_single_target_python3_6? ( "
19928 + " python_targets_python3_6 )",
19929 + ),
19930 + ),
19931 + python.PythonMissingDeps,
19932 + )
19933
19934 # check that irrelevant dep with same USE conditional does not wrongly
19935 # satisfy the check
19936 @@ -371,44 +410,50 @@ class TestPythonCheck(misc.ReportTestCase):
19937 self.assertReport(
19938 self.check,
19939 self.mk_pkg(
19940 - _eclasses_=['python-single-r1'],
19941 - IUSE='python_targets_python3_5 '
19942 - 'python_targets_python3_6 '
19943 - 'python_single_target_python3_5 '
19944 - 'python_single_target_python3_6',
19945 - RDEPEND='python_single_target_python3_5? ( '
19946 - ' dev-foo/bar ) '
19947 - 'python_single_target_python3_6? ( '
19948 - ' dev-lang/python:3.6 )',
19949 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
19950 - ' python_single_target_python3_6 ) '
19951 - 'python_single_target_python3_5? ( '
19952 - ' python_targets_python3_5 ) '
19953 - 'python_single_target_python3_6? ( '
19954 - ' python_targets_python3_6 )')),
19955 - python.PythonMissingDeps)
19956 + _eclasses_=["python-single-r1"],
19957 + IUSE="python_targets_python3_5 "
19958 + "python_targets_python3_6 "
19959 + "python_single_target_python3_5 "
19960 + "python_single_target_python3_6",
19961 + RDEPEND="python_single_target_python3_5? ( "
19962 + " dev-foo/bar ) "
19963 + "python_single_target_python3_6? ( "
19964 + " dev-lang/python:3.6 )",
19965 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
19966 + " python_single_target_python3_6 ) "
19967 + "python_single_target_python3_5? ( "
19968 + " python_targets_python3_5 ) "
19969 + "python_single_target_python3_6? ( "
19970 + " python_targets_python3_6 )",
19971 + ),
19972 + ),
19973 + python.PythonMissingDeps,
19974 + )
19975
19976 # DEPEND only, RDEPEND missing
19977 assert isinstance(
19978 self.assertReport(
19979 self.check,
19980 self.mk_pkg(
19981 - _eclasses_=['python-single-r1'],
19982 - IUSE='python_targets_python3_5 '
19983 - 'python_targets_python3_6 '
19984 - 'python_single_target_python3_5 '
19985 - 'python_single_target_python3_6',
19986 - DEPEND='python_single_target_python3_5? ( '
19987 - ' dev-lang/python:3.5 ) '
19988 - 'python_single_target_python3_6? ( '
19989 - ' dev-lang/python:3.6 )',
19990 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
19991 - ' python_single_target_python3_6 ) '
19992 - 'python_single_target_python3_5? ( '
19993 - ' python_targets_python3_5 ) '
19994 - 'python_single_target_python3_6? ( '
19995 - ' python_targets_python3_6 )')),
19996 - python.PythonMissingDeps)
19997 + _eclasses_=["python-single-r1"],
19998 + IUSE="python_targets_python3_5 "
19999 + "python_targets_python3_6 "
20000 + "python_single_target_python3_5 "
20001 + "python_single_target_python3_6",
20002 + DEPEND="python_single_target_python3_5? ( "
20003 + " dev-lang/python:3.5 ) "
20004 + "python_single_target_python3_6? ( "
20005 + " dev-lang/python:3.6 )",
20006 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
20007 + " python_single_target_python3_6 ) "
20008 + "python_single_target_python3_5? ( "
20009 + " python_targets_python3_5 ) "
20010 + "python_single_target_python3_6? ( "
20011 + " python_targets_python3_6 )",
20012 + ),
20013 + ),
20014 + python.PythonMissingDeps,
20015 + )
20016
20017 # check that the check isn't wrongly satisfied by PYTHON_TARGETS
20018 # in python-single-r1 (PYTHON_SINGLE_TARGET expected)
20019 @@ -416,38 +461,40 @@ class TestPythonCheck(misc.ReportTestCase):
20020 self.assertReport(
20021 self.check,
20022 self.mk_pkg(
20023 - _eclasses_=['python-single-r1'],
20024 - IUSE='python_targets_python3_5 '
20025 - 'python_targets_python3_6 '
20026 - 'python_single_target_python3_5 '
20027 - 'python_single_target_python3_6',
20028 - RDEPEND='python_targets_python3_5? ( '
20029 - ' dev-lang/python:3.5 ) '
20030 - 'python_targets_python3_6? ( '
20031 - ' dev-lang/python:3.6 )',
20032 - REQUIRED_USE='^^ ( python_single_target_python3_5 '
20033 - ' python_single_target_python3_6 ) '
20034 - 'python_single_target_python3_5? ( '
20035 - ' python_targets_python3_5 ) '
20036 - 'python_single_target_python3_6? ( '
20037 - ' python_targets_python3_6 )')),
20038 - python.PythonMissingDeps)
20039 + _eclasses_=["python-single-r1"],
20040 + IUSE="python_targets_python3_5 "
20041 + "python_targets_python3_6 "
20042 + "python_single_target_python3_5 "
20043 + "python_single_target_python3_6",
20044 + RDEPEND="python_targets_python3_5? ( "
20045 + " dev-lang/python:3.5 ) "
20046 + "python_targets_python3_6? ( "
20047 + " dev-lang/python:3.6 )",
20048 + REQUIRED_USE="^^ ( python_single_target_python3_5 "
20049 + " python_single_target_python3_6 ) "
20050 + "python_single_target_python3_5? ( "
20051 + " python_targets_python3_5 ) "
20052 + "python_single_target_python3_6? ( "
20053 + " python_targets_python3_6 )",
20054 + ),
20055 + ),
20056 + python.PythonMissingDeps,
20057 + )
20058
20059 assert isinstance(
20060 - self.assertReport(self.check, self.mk_pkg(_eclasses_=['python-any-r1'])),
20061 - python.PythonMissingDeps)
20062 + self.assertReport(self.check, self.mk_pkg(_eclasses_=["python-any-r1"])),
20063 + python.PythonMissingDeps,
20064 + )
20065
20066 def test_runtime_dep_in_any_r1(self):
20067 r = self.assertReport(
20068 self.check,
20069 self.mk_pkg(
20070 - _eclasses_=['python-any-r1'],
20071 - DEPEND='|| ( '
20072 - ' dev-lang/python:3.5 '
20073 - ' dev-lang/python:3.6 )',
20074 - RDEPEND='|| ( '
20075 - ' dev-lang/python:3.5 '
20076 - ' dev-lang/python:3.6 )'))
20077 + _eclasses_=["python-any-r1"],
20078 + DEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
20079 + RDEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
20080 + ),
20081 + )
20082 assert isinstance(r, python.PythonRuntimeDepInAnyR1)
20083 assert 'inherits python-any-r1 with RDEPEND="dev-lang/python:3.5"' in str(r)
20084
20085 @@ -455,6 +502,8 @@ class TestPythonCheck(misc.ReportTestCase):
20086 self.assertNoReport(
20087 self.check,
20088 self.mk_pkg(
20089 - _eclasses_=['python-any-r1'],
20090 - DEPEND='dev-lang/python:3.5',
20091 - RDEPEND='!dev-python/pypy3-bin:0'))
20092 + _eclasses_=["python-any-r1"],
20093 + DEPEND="dev-lang/python:3.5",
20094 + RDEPEND="!dev-python/pypy3-bin:0",
20095 + ),
20096 + )
20097
20098 diff --git a/tests/checks/test_repo.py b/tests/checks/test_repo.py
20099 index cd685a52..4220e055 100644
20100 --- a/tests/checks/test_repo.py
20101 +++ b/tests/checks/test_repo.py
20102 @@ -17,15 +17,14 @@ class TestRepoDirCheck(misc.Tmpdir, misc.ReportTestCase):
20103 check_kls = repo.RepoDirCheck
20104
20105 def mk_check(self):
20106 - self.repo = FakeRepo(repo_id='repo', location=self.dir)
20107 - options = arghparse.Namespace(
20108 - target_repo=self.repo, cache={'git': False}, gentoo_repo=True)
20109 + self.repo = FakeRepo(repo_id="repo", location=self.dir)
20110 + options = arghparse.Namespace(target_repo=self.repo, cache={"git": False}, gentoo_repo=True)
20111 git_addon = addons.git.GitAddon(options)
20112 return repo.RepoDirCheck(options, git_addon=git_addon)
20113
20114 def mk_pkg(self, cpvstr):
20115 pkg = atom.atom(cpvstr)
20116 - filesdir = pjoin(self.repo.location, pkg.category, pkg.package, 'files')
20117 + filesdir = pjoin(self.repo.location, pkg.category, pkg.package, "files")
20118 os.makedirs(filesdir, exist_ok=True)
20119 return filesdir
20120
20121 @@ -34,100 +33,100 @@ class TestRepoDirCheck(misc.Tmpdir, misc.ReportTestCase):
20122
20123 def test_empty_file(self):
20124 check = self.mk_check()
20125 - bin_path = pjoin(self.repo.location, 'foo')
20126 + bin_path = pjoin(self.repo.location, "foo")
20127 touch(bin_path)
20128 self.assertNoReport(check, [])
20129
20130 def test_regular_file(self):
20131 check = self.mk_check()
20132 - with open(pjoin(self.repo.location, 'foo'), 'w') as f:
20133 - f.write('bar')
20134 + with open(pjoin(self.repo.location, "foo"), "w") as f:
20135 + f.write("bar")
20136 self.assertNoReport(check, [])
20137
20138 def test_unreadable_file(self):
20139 check = self.mk_check()
20140 - with open(pjoin(self.repo.location, 'foo'), 'w') as f:
20141 - f.write('bar')
20142 - with mock.patch('pkgcheck.open') as mocked_open:
20143 - mocked_open.side_effect = IOError('fake exception')
20144 + with open(pjoin(self.repo.location, "foo"), "w") as f:
20145 + f.write("bar")
20146 + with mock.patch("pkgcheck.open") as mocked_open:
20147 + mocked_open.side_effect = IOError("fake exception")
20148 self.assertNoReport(check, [])
20149
20150 def test_ignored_root_dirs(self):
20151 for d in self.check_kls.ignored_root_dirs:
20152 check = self.mk_check()
20153 - bin_path = pjoin(self.repo.location, d, 'foo')
20154 + bin_path = pjoin(self.repo.location, d, "foo")
20155 os.makedirs(os.path.dirname(bin_path))
20156 - with open(bin_path, 'wb') as f:
20157 - f.write(b'\xd3\xad\xbe\xef')
20158 + with open(bin_path, "wb") as f:
20159 + f.write(b"\xd3\xad\xbe\xef")
20160 self.assertNoReport(check, [])
20161
20162 def test_null_bytes(self):
20163 check = self.mk_check()
20164 - with open(pjoin(self.repo.location, 'foo'), 'wb') as f:
20165 - f.write(b'foo\x00\xffbar')
20166 + with open(pjoin(self.repo.location, "foo"), "wb") as f:
20167 + f.write(b"foo\x00\xffbar")
20168 r = self.assertReport(check, [])
20169 assert isinstance(r, repo.BinaryFile)
20170 - assert r.path == 'foo'
20171 + assert r.path == "foo"
20172 assert "'foo'" in str(r)
20173
20174 def test_root_dir_binary(self):
20175 check = self.mk_check()
20176 - bin_path = pjoin(self.repo.location, 'foo')
20177 - with open(bin_path, 'wb') as f:
20178 - f.write(b'\xd3\xad\xbe\xef')
20179 + bin_path = pjoin(self.repo.location, "foo")
20180 + with open(bin_path, "wb") as f:
20181 + f.write(b"\xd3\xad\xbe\xef")
20182 r = self.assertReport(check, [])
20183 assert isinstance(r, repo.BinaryFile)
20184 - assert r.path == 'foo'
20185 + assert r.path == "foo"
20186 assert "'foo'" in str(r)
20187
20188 def test_ebuild_filesdir_binary(self):
20189 check = self.mk_check()
20190 - filesdir = self.mk_pkg('dev-util/foo')
20191 - with open(pjoin(filesdir, 'foo'), 'wb') as f:
20192 - f.write(b'\xd3\xad\xbe\xef')
20193 + filesdir = self.mk_pkg("dev-util/foo")
20194 + with open(pjoin(filesdir, "foo"), "wb") as f:
20195 + f.write(b"\xd3\xad\xbe\xef")
20196 r = self.assertReport(check, [])
20197 assert isinstance(r, repo.BinaryFile)
20198 - assert r.path == 'dev-util/foo/files/foo'
20199 + assert r.path == "dev-util/foo/files/foo"
20200 assert "'dev-util/foo/files/foo'" in str(r)
20201
20202 def test_gitignore(self):
20203 # distfiles located in deprecated in-tree location are reported by default
20204 check = self.mk_check()
20205 - distfiles = pjoin(self.repo.location, 'distfiles')
20206 + distfiles = pjoin(self.repo.location, "distfiles")
20207 os.mkdir(distfiles)
20208 - with open(pjoin(distfiles, 'foo-0.tar.gz'), 'wb') as f:
20209 - f.write(b'\xd3\xad\xbe\xef')
20210 + with open(pjoin(distfiles, "foo-0.tar.gz"), "wb") as f:
20211 + f.write(b"\xd3\xad\xbe\xef")
20212 r = self.assertReport(check, [])
20213 assert isinstance(r, repo.BinaryFile)
20214 assert "distfiles/foo-0.tar.gz" in str(r)
20215
20216 # but results are suppressed if a matching git ignore entry exists
20217 - for ignore_file in ('.gitignore', '.git/info/exclude'):
20218 + for ignore_file in (".gitignore", ".git/info/exclude"):
20219 path = pjoin(self.repo.location, ignore_file)
20220 ensure_dirs(os.path.dirname(path))
20221 - with open(path, 'w') as f:
20222 - f.write('/distfiles/')
20223 + with open(path, "w") as f:
20224 + f.write("/distfiles/")
20225 self.assertNoReport(self.mk_check(), [])
20226 os.unlink(path)
20227
20228 def test_non_utf8_encodings(self):
20229 # non-english languages courtesy of google translate mangling
20230 langs = (
20231 - ("example text that shouldn't trigger", 'ascii'),
20232 - ('نص المثال الذي لا ينبغي أن يؤدي', 'cp1256'), # arabic
20233 - ('пример текста, который не должен срабатывать', 'koi8_r'), # russian
20234 - ('उदाहरण पाठ जो ट्रिगर नहीं होना चाहिए', 'utf-16'), # hindi
20235 - ('مثال کے متن جو ٹرگر نہ ہوں۔', 'utf-16'), # urdu
20236 - ('ဖြစ်ပေါ်မပေးသင့်ကြောင်းဥပမာစာသား', 'utf-32'), # burmese
20237 - ('उदाहरण पाठ जुन ट्रिगर हुँदैन', 'utf-32'), # nepali
20238 - ('トリガーするべきではないテキストの例', 'shift_jis'), # japanese
20239 - ('트리거해서는 안되는 예제 텍스트', 'cp949'), # korean
20240 - ('不应触发的示例文本', 'gb2312'), # simplified chinese
20241 - ('不應觸發的示例文本', 'gb18030'), # traditional chinese
20242 + ("example text that shouldn't trigger", "ascii"),
20243 + ("نص المثال الذي لا ينبغي أن يؤدي", "cp1256"), # arabic
20244 + ("пример текста, который не должен срабатывать", "koi8_r"), # russian
20245 + ("उदाहरण पाठ जो ट्रिगर नहीं होना चाहिए", "utf-16"), # hindi
20246 + ("مثال کے متن جو ٹرگر نہ ہوں۔", "utf-16"), # urdu
20247 + ("ဖြစ်ပေါ်မပေးသင့်ကြောင်းဥပမာစာသား", "utf-32"), # burmese
20248 + ("उदाहरण पाठ जुन ट्रिगर हुँदैन", "utf-32"), # nepali
20249 + ("トリガーするべきではないテキストの例", "shift_jis"), # japanese
20250 + ("트리거해서는 안되는 예제 텍스트", "cp949"), # korean
20251 + ("不应触发的示例文本", "gb2312"), # simplified chinese
20252 + ("不應觸發的示例文本", "gb18030"), # traditional chinese
20253 )
20254 for text, encoding in langs:
20255 check = self.mk_check()
20256 - with open(pjoin(self.repo.location, 'foo'), 'wb') as f:
20257 + with open(pjoin(self.repo.location, "foo"), "wb") as f:
20258 data = text.encode(encoding)
20259 f.write(data)
20260 self.assertNoReport(check, [])
20261
20262 diff --git a/tests/checks/test_repo_metadata.py b/tests/checks/test_repo_metadata.py
20263 index 2221e283..ff550d7d 100644
20264 --- a/tests/checks/test_repo_metadata.py
20265 +++ b/tests/checks/test_repo_metadata.py
20266 @@ -16,24 +16,25 @@ class TestPackageUpdatesCheck(misc.Tmpdir, misc.ReportTestCase):
20267 def mk_check(self, pkgs=(), **kwargs):
20268 # TODO: switch to using a repo fixture when available
20269 repo_dir = pjoin(self.dir, misc.random_str())
20270 - os.makedirs(pjoin(repo_dir, 'metadata'))
20271 - with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
20272 - f.write('masters =\n')
20273 + os.makedirs(pjoin(repo_dir, "metadata"))
20274 + with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
20275 + f.write("masters =\n")
20276
20277 - os.makedirs(pjoin(repo_dir, 'profiles', 'updates'))
20278 - with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
20279 - f.write('fake\n')
20280 + os.makedirs(pjoin(repo_dir, "profiles", "updates"))
20281 + with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
20282 + f.write("fake\n")
20283 for filename, updates in kwargs.items():
20284 - with open(pjoin(repo_dir, 'profiles', 'updates', filename), 'w') as f:
20285 - f.write('\n'.join(updates))
20286 + with open(pjoin(repo_dir, "profiles", "updates", filename), "w") as f:
20287 + f.write("\n".join(updates))
20288
20289 for pkg in pkgs:
20290 pkg = FakePkg(pkg)
20291 pkg_path = pjoin(
20292 - repo_dir, pkg.category, pkg.package, f'{pkg.package}-{pkg.fullver}.ebuild')
20293 + repo_dir, pkg.category, pkg.package, f"{pkg.package}-{pkg.fullver}.ebuild"
20294 + )
20295 os.makedirs(os.path.dirname(pkg_path), exist_ok=True)
20296 - with open(pkg_path, 'w') as f:
20297 - f.write('SLOT=0\n')
20298 + with open(pkg_path, "w") as f:
20299 + f.write("SLOT=0\n")
20300
20301 repo = UnconfiguredTree(repo_dir)
20302 options = arghparse.Namespace(target_repo=repo, search_repo=repo)
20303 @@ -44,87 +45,91 @@ class TestPackageUpdatesCheck(misc.Tmpdir, misc.ReportTestCase):
20304 self.assertNoReport(self.mk_check(), [])
20305
20306 # empty file
20307 - updates = {'1Q-2020': []}
20308 + updates = {"1Q-2020": []}
20309 self.assertNoReport(self.mk_check(**updates), [])
20310
20311 def test_bad_update_filenames(self):
20312 # only files named using the format [1-4]Q-[YYYY] are allowed
20313 - updates = {'foobar': ['blah']}
20314 + updates = {"foobar": ["blah"]}
20315 r = self.assertReport(self.mk_check(**updates), [])
20316 assert isinstance(r, repo_metadata.BadPackageUpdate)
20317 assert "incorrectly named update file: 'foobar'" in str(r)
20318
20319 - updates = {'5Q-2020': ['blah']}
20320 + updates = {"5Q-2020": ["blah"]}
20321 r = self.assertReport(self.mk_check(**updates), [])
20322 assert isinstance(r, repo_metadata.BadPackageUpdate)
20323 assert "incorrectly named update file: '5Q-2020'" in str(r)
20324
20325 # hidden files will be flagged
20326 - updates = {'.1Q-2020.swp': ['blah']}
20327 + updates = {".1Q-2020.swp": ["blah"]}
20328 r = self.assertReport(self.mk_check(**updates), [])
20329 assert isinstance(r, repo_metadata.BadPackageUpdate)
20330 assert "incorrectly named update file: '.1Q-2020.swp'" in str(r)
20331
20332 def test_empty_line(self):
20333 - updates = {'1Q-2020': [' ']}
20334 + updates = {"1Q-2020": [" "]}
20335 r = self.assertReport(self.mk_check(**updates), [])
20336 assert isinstance(r, repo_metadata.BadPackageUpdate)
20337 assert "file '1Q-2020': empty line 1" in str(r)
20338
20339 def test_extra_whitespace(self):
20340 - pkgs = ('dev-util/foo-0', 'dev-util/bar-1')
20341 - for update in (' move dev-util/foo dev-util/bar', # prefix
20342 - 'move dev-util/foo dev-util/bar '): # suffix
20343 - updates = {'1Q-2020': [update]}
20344 + pkgs = ("dev-util/foo-0", "dev-util/bar-1")
20345 + for update in (
20346 + " move dev-util/foo dev-util/bar", # prefix
20347 + "move dev-util/foo dev-util/bar ",
20348 + ): # suffix
20349 + updates = {"1Q-2020": [update]}
20350 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20351 assert isinstance(r, repo_metadata.BadPackageUpdate)
20352 - assert 'extra whitespace' in str(r)
20353 - assert 'on line 1' in str(r)
20354 + assert "extra whitespace" in str(r)
20355 + assert "on line 1" in str(r)
20356
20357 def test_old_pkg_update(self):
20358 - pkgs = ('dev-util/blah-0', 'dev-libs/foon-1')
20359 - for update in ('move dev-util/foo dev-util/bar', # old pkg move
20360 - 'slotmove dev-util/bar 0 1'): # old slot move
20361 - updates = {'1Q-2020': [update]}
20362 + pkgs = ("dev-util/blah-0", "dev-libs/foon-1")
20363 + for update in (
20364 + "move dev-util/foo dev-util/bar", # old pkg move
20365 + "slotmove dev-util/bar 0 1",
20366 + ): # old slot move
20367 + updates = {"1Q-2020": [update]}
20368 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20369 assert isinstance(r, repo_metadata.OldPackageUpdate)
20370 - assert r.pkg == 'dev-util/bar'
20371 + assert r.pkg == "dev-util/bar"
20372 assert "'dev-util/bar' unavailable" in str(r)
20373
20374 def test_old_multimove_pkg_update(self):
20375 - update = ['move dev-util/foo dev-util/bar', 'move dev-util/bar dev-util/blah']
20376 - pkgs = ('dev-util/blaz-0', 'dev-libs/foon-1')
20377 - updates = {'1Q-2020': update}
20378 + update = ["move dev-util/foo dev-util/bar", "move dev-util/bar dev-util/blah"]
20379 + pkgs = ("dev-util/blaz-0", "dev-libs/foon-1")
20380 + updates = {"1Q-2020": update}
20381 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20382 assert isinstance(r, repo_metadata.OldMultiMovePackageUpdate)
20383 - assert r.pkg == 'dev-util/blah'
20384 - assert r.moves == ('dev-util/foo', 'dev-util/bar', 'dev-util/blah')
20385 + assert r.pkg == "dev-util/blah"
20386 + assert r.moves == ("dev-util/foo", "dev-util/bar", "dev-util/blah")
20387 assert "'dev-util/blah' unavailable" in str(r)
20388
20389 def test_multimove_pkg_update(self):
20390 - update = ['move dev-util/foo dev-util/bar', 'move dev-util/bar dev-util/blah']
20391 - pkgs = ('dev-util/blah-0', 'dev-libs/foon-1')
20392 - updates = {'1Q-2020': update}
20393 + update = ["move dev-util/foo dev-util/bar", "move dev-util/bar dev-util/blah"]
20394 + pkgs = ("dev-util/blah-0", "dev-libs/foon-1")
20395 + updates = {"1Q-2020": update}
20396 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20397 assert isinstance(r, repo_metadata.MultiMovePackageUpdate)
20398 - assert r.pkg == 'dev-util/foo'
20399 - assert r.moves == ('dev-util/foo', 'dev-util/bar', 'dev-util/blah')
20400 + assert r.pkg == "dev-util/foo"
20401 + assert r.moves == ("dev-util/foo", "dev-util/bar", "dev-util/blah")
20402 assert "'dev-util/foo': multi-move update" in str(r)
20403
20404 def test_move_to_self_pkg_update(self):
20405 - update = ['move dev-util/foo dev-util/foo']
20406 - pkgs = ('dev-util/foo-0',)
20407 - updates = {'1Q-2020': update}
20408 + update = ["move dev-util/foo dev-util/foo"]
20409 + pkgs = ("dev-util/foo-0",)
20410 + updates = {"1Q-2020": update}
20411 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20412 assert isinstance(r, repo_metadata.RedundantPackageUpdate)
20413 - assert r.updates == ('move', 'dev-util/foo', 'dev-util/foo')
20414 + assert r.updates == ("move", "dev-util/foo", "dev-util/foo")
20415 assert "update line moves to the same package/slot" in str(r)
20416
20417 def test_slot_move_to_self_pkg_update(self):
20418 - update = ['slotmove dev-util/foo 0 0']
20419 - pkgs = ('dev-util/foo-0',)
20420 - updates = {'1Q-2020': update}
20421 + update = ["slotmove dev-util/foo 0 0"]
20422 + pkgs = ("dev-util/foo-0",)
20423 + updates = {"1Q-2020": update}
20424 r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
20425 assert isinstance(r, repo_metadata.RedundantPackageUpdate)
20426 - assert r.updates == ('slotmove', 'dev-util/foo', '0', '0')
20427 + assert r.updates == ("slotmove", "dev-util/foo", "0", "0")
20428 assert "update line moves to the same package/slot" in str(r)
20429
20430 diff --git a/tests/checks/test_stablereq.py b/tests/checks/test_stablereq.py
20431 index b51bf9bc..2e181e57 100644
20432 --- a/tests/checks/test_stablereq.py
20433 +++ b/tests/checks/test_stablereq.py
20434 @@ -21,17 +21,17 @@ class TestStableRequestCheck(ReportTestCase):
20435
20436 # initialize parent repo
20437 self.parent_git_repo = make_git_repo()
20438 - self.parent_repo = make_repo(self.parent_git_repo.path, repo_id='gentoo')
20439 - self.parent_git_repo.add_all('initial commit')
20440 + self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo")
20441 + self.parent_git_repo.add_all("initial commit")
20442 # create a stub pkg and commit it
20443 - self.parent_repo.create_ebuild('cat/pkg-0')
20444 - self.parent_git_repo.add_all('cat/pkg-0')
20445 + self.parent_repo.create_ebuild("cat/pkg-0")
20446 + self.parent_git_repo.add_all("cat/pkg-0")
20447
20448 # initialize child repo
20449 self.child_git_repo = make_git_repo()
20450 - self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
20451 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20452 - self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
20453 + self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
20454 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20455 + self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
20456 self.child_repo = make_repo(self.child_git_repo.path)
20457
20458 def init_check(self, options=None, future=0, stable_time=None):
20459 @@ -44,50 +44,57 @@ class TestStableRequestCheck(ReportTestCase):
20460
20461 def _options(self, stable_time=None, **kwargs):
20462 args = [
20463 - 'scan', '-q', '--cache-dir', self.cache_dir,
20464 - '--repo', self.child_repo.location,
20465 + "scan",
20466 + "-q",
20467 + "--cache-dir",
20468 + self.cache_dir,
20469 + "--repo",
20470 + self.child_repo.location,
20471 ]
20472 if stable_time is not None:
20473 - args.extend(['--stabletime', str(stable_time)])
20474 + args.extend(["--stabletime", str(stable_time)])
20475 options, _ = self._tool.parse_args(args)
20476 return options
20477
20478 def test_no_git_support(self):
20479 options = self._options()
20480 - options.cache['git'] = False
20481 - with pytest.raises(SkipCheck, match='git cache support required'):
20482 + options.cache["git"] = False
20483 + with pytest.raises(SkipCheck, match="git cache support required"):
20484 self.init_check(options)
20485
20486 def test_no_stable_keywords(self):
20487 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
20488 - self.parent_git_repo.add_all('cat/pkg-1')
20489 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20490 - self.parent_git_repo.add_all('cat/pkg-2')
20491 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20492 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
20493 + self.parent_git_repo.add_all("cat/pkg-1")
20494 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20495 + self.parent_git_repo.add_all("cat/pkg-2")
20496 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20497 self.init_check()
20498 self.assertNoReport(self.check, self.source)
20499
20500 def test_uncommitted_local_ebuild(self):
20501 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20502 - self.parent_git_repo.add_all('cat/pkg-1')
20503 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20504 - self.child_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20505 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20506 + self.parent_git_repo.add_all("cat/pkg-1")
20507 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20508 + self.child_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20509 self.init_check(future=30)
20510 self.assertNoReport(self.check, self.source)
20511
20512 - @pytest.mark.parametrize(("stable_time", "less_days", "more_days"), (
20513 - pytest.param(None, (0, 1, 10, 20, 29), (30, 31), id="stable_time=unset"),
20514 - pytest.param(1, (0,), (1, 10), id="stable_time=1"),
20515 - pytest.param(14, (0, 1, 10, 13), (14, 15, 30), id="stable_time=14"),
20516 - pytest.param(30, (0, 1, 10, 20, 29), (30, 31), id="stable_time=30"),
20517 - pytest.param(100, (98, 99), (100, 101), id="stable_time=100"),
20518 - ))
20519 + @pytest.mark.parametrize(
20520 + ("stable_time", "less_days", "more_days"),
20521 + (
20522 + pytest.param(None, (0, 1, 10, 20, 29), (30, 31), id="stable_time=unset"),
20523 + pytest.param(1, (0,), (1, 10), id="stable_time=1"),
20524 + pytest.param(14, (0, 1, 10, 13), (14, 15, 30), id="stable_time=14"),
20525 + pytest.param(30, (0, 1, 10, 20, 29), (30, 31), id="stable_time=30"),
20526 + pytest.param(100, (98, 99), (100, 101), id="stable_time=100"),
20527 + ),
20528 + )
20529 def test_existing_stable_keywords(self, stable_time, less_days, more_days):
20530 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20531 - self.parent_git_repo.add_all('cat/pkg-1')
20532 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20533 - self.parent_git_repo.add_all('cat/pkg-2')
20534 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20535 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20536 + self.parent_git_repo.add_all("cat/pkg-1")
20537 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20538 + self.parent_git_repo.add_all("cat/pkg-2")
20539 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20540
20541 # packages are not old enough to trigger any results
20542 for future in less_days:
20543 @@ -98,74 +105,74 @@ class TestStableRequestCheck(ReportTestCase):
20544 for future in more_days:
20545 self.init_check(future=future, stable_time=stable_time)
20546 r = self.assertReport(self.check, self.source)
20547 - expected = StableRequest('0', ['~amd64'], future, pkg=VersionedCPV('cat/pkg-2'))
20548 + expected = StableRequest("0", ["~amd64"], future, pkg=VersionedCPV("cat/pkg-2"))
20549 assert r == expected
20550
20551 def test_multislot_with_unstable_slot(self):
20552 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20553 - self.parent_git_repo.add_all('cat/pkg-1')
20554 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'], slot='1')
20555 - self.parent_git_repo.add_all('cat/pkg-2')
20556 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20557 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20558 + self.parent_git_repo.add_all("cat/pkg-1")
20559 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"], slot="1")
20560 + self.parent_git_repo.add_all("cat/pkg-2")
20561 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20562 self.init_check(future=30)
20563 r = self.assertReport(self.check, self.source)
20564 - expected = StableRequest('1', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
20565 + expected = StableRequest("1", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
20566 assert r == expected
20567
20568 def test_moved_category(self):
20569 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20570 - self.parent_git_repo.add_all('cat/pkg-1')
20571 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20572 - self.parent_git_repo.add_all('cat/pkg-2')
20573 - self.parent_git_repo.move('cat', 'newcat')
20574 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20575 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20576 + self.parent_git_repo.add_all("cat/pkg-1")
20577 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20578 + self.parent_git_repo.add_all("cat/pkg-2")
20579 + self.parent_git_repo.move("cat", "newcat")
20580 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20581 self.init_check(future=30)
20582 r = self.assertReport(self.check, self.source)
20583 - expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('newcat/pkg-2'))
20584 + expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("newcat/pkg-2"))
20585 assert r == expected
20586
20587 def test_moved_package(self):
20588 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20589 - self.parent_git_repo.add_all('cat/pkg-1')
20590 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20591 - self.parent_git_repo.add_all('cat/pkg-2')
20592 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20593 + self.parent_git_repo.add_all("cat/pkg-1")
20594 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20595 + self.parent_git_repo.add_all("cat/pkg-2")
20596
20597 # rename pkg and commit results
20598 path = self.parent_git_repo.path
20599 - new_pkg_dir = pjoin(path, 'cat/newpkg')
20600 - os.rename(pjoin(path, 'cat/pkg'), new_pkg_dir)
20601 + new_pkg_dir = pjoin(path, "cat/newpkg")
20602 + os.rename(pjoin(path, "cat/pkg"), new_pkg_dir)
20603 for i, f in enumerate(sorted(os.listdir(new_pkg_dir))):
20604 - os.rename(pjoin(new_pkg_dir, f), pjoin(new_pkg_dir, f'newpkg-{i}.ebuild'))
20605 + os.rename(pjoin(new_pkg_dir, f), pjoin(new_pkg_dir, f"newpkg-{i}.ebuild"))
20606 self.parent_git_repo.add_all()
20607 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20608 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20609
20610 self.init_check(future=30)
20611 r = self.assertReport(self.check, self.source)
20612 - expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/newpkg-2'))
20613 + expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/newpkg-2"))
20614 assert r == expected
20615
20616 def test_renamed_ebuild(self):
20617 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20618 - self.parent_git_repo.add_all('cat/pkg-1')
20619 - self.parent_repo.create_ebuild('cat/pkg-2_rc1', keywords=['~amd64'])
20620 - self.parent_git_repo.add_all('cat/pkg-2_rc1')
20621 - self.parent_git_repo.move('cat/pkg/pkg-2_rc1.ebuild', 'cat/pkg/pkg-2.ebuild')
20622 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20623 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20624 + self.parent_git_repo.add_all("cat/pkg-1")
20625 + self.parent_repo.create_ebuild("cat/pkg-2_rc1", keywords=["~amd64"])
20626 + self.parent_git_repo.add_all("cat/pkg-2_rc1")
20627 + self.parent_git_repo.move("cat/pkg/pkg-2_rc1.ebuild", "cat/pkg/pkg-2.ebuild")
20628 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20629 self.init_check(future=30)
20630 r = self.assertReport(self.check, self.source)
20631 - expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
20632 + expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
20633 assert r == expected
20634
20635 def test_modified_ebuild(self):
20636 - self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
20637 - self.parent_git_repo.add_all('cat/pkg-1')
20638 - self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
20639 - self.parent_git_repo.add_all('cat/pkg-2')
20640 - with open(pjoin(self.parent_git_repo.path, 'cat/pkg/pkg-2.ebuild'), 'a') as f:
20641 - f.write('# comment\n')
20642 - self.parent_git_repo.add_all('cat/pkg-2: add comment')
20643 - self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
20644 + self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
20645 + self.parent_git_repo.add_all("cat/pkg-1")
20646 + self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
20647 + self.parent_git_repo.add_all("cat/pkg-2")
20648 + with open(pjoin(self.parent_git_repo.path, "cat/pkg/pkg-2.ebuild"), "a") as f:
20649 + f.write("# comment\n")
20650 + self.parent_git_repo.add_all("cat/pkg-2: add comment")
20651 + self.child_git_repo.run(["git", "pull", "origin", "main"])
20652 self.init_check(future=30)
20653 r = self.assertReport(self.check, self.source)
20654 - expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
20655 + expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
20656 assert r == expected
20657
20658 diff --git a/tests/checks/test_whitespace.py b/tests/checks/test_whitespace.py
20659 index bc61998c..f90495b6 100644
20660 --- a/tests/checks/test_whitespace.py
20661 +++ b/tests/checks/test_whitespace.py
20662 @@ -15,7 +15,6 @@ class WhitespaceCheckTest(misc.ReportTestCase):
20663
20664
20665 class TestWhitespaceFound(WhitespaceCheckTest):
20666 -
20667 def test_leading(self):
20668 fake_src = [
20669 "# This is our first fake ebuild\n",
20670 @@ -27,7 +26,7 @@ class TestWhitespaceFound(WhitespaceCheckTest):
20671 r = self.assertReport(self.check, fake_pkg)
20672 assert isinstance(r, whitespace.WhitespaceFound)
20673 assert r.lines == (2,)
20674 - assert 'leading whitespace' in str(r)
20675 + assert "leading whitespace" in str(r)
20676
20677 def test_trailing(self):
20678 fake_src = [
20679 @@ -40,11 +39,10 @@ class TestWhitespaceFound(WhitespaceCheckTest):
20680 r = self.assertReport(self.check, fake_pkg)
20681 assert isinstance(r, whitespace.WhitespaceFound)
20682 assert r.lines == (2,)
20683 - assert 'trailing whitespace' in str(r)
20684 + assert "trailing whitespace" in str(r)
20685
20686
20687 class TestWrongIndentFound(WhitespaceCheckTest):
20688 -
20689 def test_it(self):
20690 fake_src = [
20691 "# This is our first fake ebuild\n",
20692 @@ -56,11 +54,10 @@ class TestWrongIndentFound(WhitespaceCheckTest):
20693 r = self.assertReport(self.check, fake_pkg)
20694 assert isinstance(r, whitespace.WrongIndentFound)
20695 assert r.lines == (2,)
20696 - assert 'whitespace in indentation' in str(r)
20697 + assert "whitespace in indentation" in str(r)
20698
20699
20700 class TestDoubleEmptyLine(WhitespaceCheckTest):
20701 -
20702 def test_it(self):
20703 fake_src = [
20704 "# This is our first fake ebuild\n",
20705 @@ -73,11 +70,10 @@ class TestDoubleEmptyLine(WhitespaceCheckTest):
20706 r = self.assertReport(self.check, fake_pkg)
20707 assert isinstance(r, whitespace.DoubleEmptyLine)
20708 assert r.lines == (3,)
20709 - assert 'unneeded empty line' in str(r)
20710 + assert "unneeded empty line" in str(r)
20711
20712
20713 class TestNoNewLineOnEnd(WhitespaceCheckTest):
20714 -
20715 def test_it(self):
20716 fake_src = [
20717 "# This is our first fake ebuild\n",
20718 @@ -87,11 +83,10 @@ class TestNoNewLineOnEnd(WhitespaceCheckTest):
20719
20720 r = self.assertReport(self.check, fake_pkg)
20721 assert isinstance(r, whitespace.NoFinalNewline)
20722 - assert 'lacks an ending newline' in str(r)
20723 + assert "lacks an ending newline" in str(r)
20724
20725
20726 class TestTrailingNewLineOnEnd(WhitespaceCheckTest):
20727 -
20728 def test_it(self):
20729 fake_src = [
20730 "# This is our first fake ebuild\n",
20731 @@ -102,43 +97,43 @@ class TestTrailingNewLineOnEnd(WhitespaceCheckTest):
20732
20733 r = self.assertReport(self.check, fake_pkg)
20734 assert isinstance(r, whitespace.TrailingEmptyLine)
20735 - assert 'trailing blank line(s)' in str(r)
20736 + assert "trailing blank line(s)" in str(r)
20737
20738
20739 def generate_whitespace_data():
20740 """Generate bad whitespace list for the current python version."""
20741 all_whitespace_chars = set(
20742 - re.findall(r'\s', ''.join(chr(c) for c in range(sys.maxunicode + 1))))
20743 - allowed_whitespace_chars = {'\t', '\n', ' '}
20744 + re.findall(r"\s", "".join(chr(c) for c in range(sys.maxunicode + 1)))
20745 + )
20746 + allowed_whitespace_chars = {"\t", "\n", " "}
20747 bad_whitespace_chars = tuple(sorted(all_whitespace_chars - allowed_whitespace_chars))
20748 return whitespace.WhitespaceData(unicodedata.unidata_version, bad_whitespace_chars)
20749
20750
20751 class TestBadWhitespaceCharacter(WhitespaceCheckTest):
20752 -
20753 def test_outdated_bad_whitespace_chars(self):
20754 """Check if the hardcoded bad whitespace character list is outdated."""
20755 updated_whitespace_data = generate_whitespace_data()
20756 if updated_whitespace_data.unicode_version != whitespace.whitespace_data.unicode_version:
20757 - assert updated_whitespace_data.chars == whitespace.whitespace_data.chars, \
20758 - f'outdated character list for Unicode version {unicodedata.unidata_version}'
20759 + assert (
20760 + updated_whitespace_data.chars == whitespace.whitespace_data.chars
20761 + ), f"outdated character list for Unicode version {unicodedata.unidata_version}"
20762
20763 def test_bad_whitespace_chars(self):
20764 for char in whitespace.whitespace_data.chars:
20765 fake_src = [
20766 - 'src_prepare() {\n',
20767 + "src_prepare() {\n",
20768 f'\tcd "${{S}}"/cpp ||{char}die\n',
20769 - '}\n',
20770 + "}\n",
20771 ]
20772 fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
20773
20774 r = self.assertReport(self.check, fake_pkg)
20775 assert isinstance(r, whitespace.BadWhitespaceCharacter)
20776 - assert f'bad whitespace character {repr(char)} on line 2' in str(r)
20777 + assert f"bad whitespace character {repr(char)} on line 2" in str(r)
20778
20779
20780 class TestMultipleChecks(WhitespaceCheckTest):
20781 -
20782 def test_it(self):
20783 fake_src = [
20784 "# This is our first fake ebuild\n",
20785
20786 diff --git a/tests/conftest.py b/tests/conftest.py
20787 index 675110a6..a836e3ba 100644
20788 --- a/tests/conftest.py
20789 +++ b/tests/conftest.py
20790 @@ -17,7 +17,7 @@ from snakeoil.contexts import os_environ
20791 from snakeoil.formatters import PlainTextFormatter
20792 from snakeoil.osutils import pjoin
20793
20794 -pytest_plugins = ['pkgcore']
20795 +pytest_plugins = ["pkgcore"]
20796 REPO_ROOT = Path(__file__).parent.parent
20797
20798
20799 @@ -43,7 +43,7 @@ def default_session_fixture(request):
20800 """Fixture run globally for the entire test session."""
20801 stack = ExitStack()
20802 # don't load the default system or user config files
20803 - stack.enter_context(patch('pkgcheck.cli.ConfigFileParser.default_configs', ()))
20804 + stack.enter_context(patch("pkgcheck.cli.ConfigFileParser.default_configs", ()))
20805 stack.enter_context(os_environ(**(git_config := GitConfig()).config_env))
20806
20807 def unpatch():
20808 @@ -59,40 +59,44 @@ def testconfig(tmp_path_factory):
20809
20810 Also, repo entries for all the bundled test repos.
20811 """
20812 - config = tmp_path_factory.mktemp('testconfig')
20813 - repos_conf = config / 'repos.conf'
20814 - stubrepo = pjoin(pkgcore_const.DATA_PATH, 'stubrepo')
20815 - testdir = REPO_ROOT / 'testdata/repos'
20816 - with open(repos_conf, 'w') as f:
20817 - f.write(textwrap.dedent(f"""\
20818 - [DEFAULT]
20819 - main-repo = standalone
20820 - [stubrepo]
20821 - location = {stubrepo}
20822 - """))
20823 + config = tmp_path_factory.mktemp("testconfig")
20824 + repos_conf = config / "repos.conf"
20825 + stubrepo = pjoin(pkgcore_const.DATA_PATH, "stubrepo")
20826 + testdir = REPO_ROOT / "testdata/repos"
20827 + with open(repos_conf, "w") as f:
20828 + f.write(
20829 + textwrap.dedent(
20830 + f"""\
20831 + [DEFAULT]
20832 + main-repo = standalone
20833 + [stubrepo]
20834 + location = {stubrepo}
20835 + """
20836 + )
20837 + )
20838 for repo in testdir.iterdir():
20839 - f.write(f'[{repo.name}]\n')
20840 - f.write(f'location = {repo}\n')
20841 - profile_path = pjoin(stubrepo, 'profiles', 'default')
20842 - os.symlink(profile_path, str(config / 'make.profile'))
20843 + f.write(f"[{repo.name}]\n")
20844 + f.write(f"location = {repo}\n")
20845 + profile_path = pjoin(stubrepo, "profiles", "default")
20846 + os.symlink(profile_path, str(config / "make.profile"))
20847 return str(config)
20848
20849
20850 @pytest.fixture(scope="session")
20851 def cache_dir(tmp_path_factory):
20852 """Generate a cache directory for pkgcheck."""
20853 - cache_dir = tmp_path_factory.mktemp('cache')
20854 + cache_dir = tmp_path_factory.mktemp("cache")
20855 return str(cache_dir)
20856
20857
20858 @pytest.fixture
20859 def fakerepo(tmp_path_factory):
20860 """Generate a stub repo."""
20861 - fakerepo = tmp_path_factory.mktemp('fakerepo')
20862 - (profiles := fakerepo / 'profiles').mkdir(parents=True)
20863 - (profiles / 'repo_name').write_text('fakerepo\n')
20864 - (metadata := fakerepo / 'metadata').mkdir(parents=True)
20865 - (metadata / 'layout.conf').write_text('masters =\n')
20866 + fakerepo = tmp_path_factory.mktemp("fakerepo")
20867 + (profiles := fakerepo / "profiles").mkdir(parents=True)
20868 + (profiles / "repo_name").write_text("fakerepo\n")
20869 + (metadata := fakerepo / "metadata").mkdir(parents=True)
20870 + (metadata / "layout.conf").write_text("masters =\n")
20871 return fakerepo
20872
20873
20874
20875 diff --git a/tests/misc.py b/tests/misc.py
20876 index cf317e4e..92d25721 100644
20877 --- a/tests/misc.py
20878 +++ b/tests/misc.py
20879 @@ -26,18 +26,18 @@ from snakeoil.sequences import split_negations
20880 @dataclass
20881 class Profile:
20882 """Profile record used to create profiles in a repository."""
20883 +
20884 path: str
20885 arch: str
20886 - status: str = 'stable'
20887 + status: str = "stable"
20888 deprecated: bool = False
20889 defaults: List[str] = None
20890 - eapi: str = '5'
20891 + eapi: str = "5"
20892
20893
20894 # TODO: merge this with the pkgcore-provided equivalent
20895 class FakePkg(package):
20896 -
20897 - def __init__(self, cpvstr, data=None, parent=None, ebuild='', **kwargs):
20898 + def __init__(self, cpvstr, data=None, parent=None, ebuild="", **kwargs):
20899 if data is None:
20900 data = {}
20901
20902 @@ -46,7 +46,7 @@ class FakePkg(package):
20903
20904 cpv = VersionedCPV(cpvstr)
20905 # TODO: make pkgcore generate empty shared pkg data when None is passed
20906 - mxml = repo_objs.LocalMetadataXml('')
20907 + mxml = repo_objs.LocalMetadataXml("")
20908 shared = repo_objs.SharedPkgData(metadata_xml=mxml, manifest=None)
20909 super().__init__(shared, parent, cpv.category, cpv.package, cpv.fullver)
20910 object.__setattr__(self, "data", data)
20911 @@ -58,7 +58,7 @@ class FakePkg(package):
20912
20913 @property
20914 def eapi(self):
20915 - return get_eapi(self.data.get('EAPI', '0'))
20916 + return get_eapi(self.data.get("EAPI", "0"))
20917
20918 @property
20919 def ebuild(self):
20920 @@ -88,8 +88,11 @@ class FakeFilesDirPkg(package):
20921 cpv = VersionedCPV(cpvstr)
20922 super().__init__(shared, factory(repo), cpv.category, cpv.package, cpv.fullver)
20923 object.__setattr__(self, "data", data)
20924 - object.__setattr__(self, "path", pjoin(
20925 - repo.location, cpv.category, cpv.package, f'{cpv.package}-{cpv.fullver}.ebuild'))
20926 + object.__setattr__(
20927 + self,
20928 + "path",
20929 + pjoin(repo.location, cpv.category, cpv.package, f"{cpv.package}-{cpv.fullver}.ebuild"),
20930 + )
20931
20932
20933 class ReportTestCase:
20934 @@ -133,7 +136,7 @@ class ReportTestCase:
20935
20936 def assertReport(self, check, data):
20937 results = self.assertReports(check, data)
20938 - results_str = '\n'.join(map(str, results))
20939 + results_str = "\n".join(map(str, results))
20940 assert len(results) == 1, f"expected one report, got {len(results)}:\n{results_str}"
20941 self._assertReportSanity(*results)
20942 result = results[0]
20943 @@ -141,40 +144,51 @@ class ReportTestCase:
20944
20945
20946 class FakeProfile:
20947 -
20948 - def __init__(self, masked_use={}, stable_masked_use={}, forced_use={},
20949 - stable_forced_use={}, pkg_use={}, provides={}, iuse_effective=[],
20950 - use=[], masks=[], unmasks=[], arch='x86', name='none'):
20951 + def __init__(
20952 + self,
20953 + masked_use={},
20954 + stable_masked_use={},
20955 + forced_use={},
20956 + stable_forced_use={},
20957 + pkg_use={},
20958 + provides={},
20959 + iuse_effective=[],
20960 + use=[],
20961 + masks=[],
20962 + unmasks=[],
20963 + arch="x86",
20964 + name="none",
20965 + ):
20966 self.provides_repo = SimpleTree(provides)
20967
20968 self.masked_use = ChunkedDataDict()
20969 self.masked_use.update_from_stream(
20970 - chunked_data(atom(k), *split_negations(v))
20971 - for k, v in masked_use.items())
20972 + chunked_data(atom(k), *split_negations(v)) for k, v in masked_use.items()
20973 + )
20974 self.masked_use.freeze()
20975
20976 self.stable_masked_use = ChunkedDataDict()
20977 self.stable_masked_use.update_from_stream(
20978 - chunked_data(atom(k), *split_negations(v))
20979 - for k, v in stable_masked_use.items())
20980 + chunked_data(atom(k), *split_negations(v)) for k, v in stable_masked_use.items()
20981 + )
20982 self.stable_masked_use.freeze()
20983
20984 self.forced_use = ChunkedDataDict()
20985 self.forced_use.update_from_stream(
20986 - chunked_data(atom(k), *split_negations(v))
20987 - for k, v in forced_use.items())
20988 + chunked_data(atom(k), *split_negations(v)) for k, v in forced_use.items()
20989 + )
20990 self.forced_use.freeze()
20991
20992 self.stable_forced_use = ChunkedDataDict()
20993 self.stable_forced_use.update_from_stream(
20994 - chunked_data(atom(k), *split_negations(v))
20995 - for k, v in stable_forced_use.items())
20996 + chunked_data(atom(k), *split_negations(v)) for k, v in stable_forced_use.items()
20997 + )
20998 self.stable_forced_use.freeze()
20999
21000 self.pkg_use = ChunkedDataDict()
21001 self.pkg_use.update_from_stream(
21002 - chunked_data(atom(k), *split_negations(v))
21003 - for k, v in pkg_use.items())
21004 + chunked_data(atom(k), *split_negations(v)) for k, v in pkg_use.items()
21005 + )
21006 self.pkg_use.freeze()
21007
21008 self.masks = tuple(map(atom, masks))
21009 @@ -199,7 +213,7 @@ class Tmpdir:
21010
21011 def random_str(length=10):
21012 """Generate a random string of ASCII characters of a given length."""
21013 - return ''.join(random.choice(string.ascii_letters) for _ in range(length))
21014 + return "".join(random.choice(string.ascii_letters) for _ in range(length))
21015
21016
21017 # TODO: combine this with pkgcheck.checks.init_checks()
21018 @@ -224,6 +238,5 @@ def init_check(check_cls, options):
21019 except CacheDisabled as e:
21020 raise SkipCheck(cls, e)
21021
21022 - required_addons = {
21023 - base.param_name(x): addons_map[x] for x in addon.required_addons}
21024 + required_addons = {base.param_name(x): addons_map[x] for x in addon.required_addons}
21025 return addon, required_addons, source
21026
21027 diff --git a/tests/scripts/test_argparse_actions.py b/tests/scripts/test_argparse_actions.py
21028 index d46d4560..283eb716 100644
21029 --- a/tests/scripts/test_argparse_actions.py
21030 +++ b/tests/scripts/test_argparse_actions.py
21031 @@ -11,61 +11,59 @@ from snakeoil.cli import arghparse
21032
21033
21034 class TestConfigArg:
21035 -
21036 @pytest.fixture(autouse=True)
21037 def _create_argparser(self):
21038 self.parser = arghparse.ArgumentParser()
21039 - self.parser.add_argument('--config', action=argparse_actions.ConfigArg)
21040 + self.parser.add_argument("--config", action=argparse_actions.ConfigArg)
21041
21042 def test_none(self):
21043 options = self.parser.parse_args([])
21044 assert options.config is None
21045
21046 def test_enabled(self):
21047 - for arg in ('config_file', '/path/to/config/file'):
21048 - options = self.parser.parse_args(['--config', arg])
21049 + for arg in ("config_file", "/path/to/config/file"):
21050 + options = self.parser.parse_args(["--config", arg])
21051 assert options.config == arg
21052
21053 def test_disabled(self):
21054 - for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
21055 - options = self.parser.parse_args(['--config', arg])
21056 + for arg in ("False", "false", "No", "no", "N", "n"):
21057 + options = self.parser.parse_args(["--config", arg])
21058 assert options.config is False
21059
21060
21061 class TestFilterArgs:
21062 -
21063 @pytest.fixture(autouse=True)
21064 def _create_argparser(self):
21065 self.parser = arghparse.ArgumentParser()
21066 - self.parser.set_defaults(config_checksets={'cset': ['StableRequestCheck']})
21067 - self.parser.add_argument('--filter', action=argparse_actions.FilterArgs)
21068 + self.parser.set_defaults(config_checksets={"cset": ["StableRequestCheck"]})
21069 + self.parser.add_argument("--filter", action=argparse_actions.FilterArgs)
21070
21071 def test_none(self):
21072 options = self.parser.parse_args([])
21073 assert options.filter is None
21074
21075 def test_unknown_filter(self, capsys):
21076 - for arg in ('foo', 'foo:PkgDirCheck'):
21077 + for arg in ("foo", "foo:PkgDirCheck"):
21078 with pytest.raises(SystemExit) as excinfo:
21079 - self.parser.parse_args(['--filter', arg])
21080 + self.parser.parse_args(["--filter", arg])
21081 out, err = capsys.readouterr()
21082 assert not out
21083 assert "unknown filter: 'foo'" in err
21084 assert excinfo.value.code == 2
21085
21086 def test_disabled(self):
21087 - for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
21088 - options = self.parser.parse_args(['--filter', arg])
21089 + for arg in ("False", "false", "No", "no", "N", "n"):
21090 + options = self.parser.parse_args(["--filter", arg])
21091 assert options.filter == {}
21092
21093 def test_enabled(self):
21094 - for arg in ('latest', 'latest:StableRequest', 'latest:StableRequestCheck', 'latest:cset'):
21095 - options = self.parser.parse_args(['--filter', arg])
21096 - assert objects.KEYWORDS['StableRequest'] in options.filter
21097 + for arg in ("latest", "latest:StableRequest", "latest:StableRequestCheck", "latest:cset"):
21098 + options = self.parser.parse_args(["--filter", arg])
21099 + assert objects.KEYWORDS["StableRequest"] in options.filter
21100
21101 def test_unknown_value(self, capsys):
21102 with pytest.raises(SystemExit) as excinfo:
21103 - self.parser.parse_args(['--filter', 'latest:foo'])
21104 + self.parser.parse_args(["--filter", "latest:foo"])
21105 out, err = capsys.readouterr()
21106 assert not out
21107 assert "unknown checkset, check, or keyword: 'foo'" in err
21108 @@ -73,11 +71,10 @@ class TestFilterArgs:
21109
21110
21111 class TestCacheNegations:
21112 -
21113 @pytest.fixture(autouse=True)
21114 def _create_argparser(self):
21115 self.parser = arghparse.ArgumentParser()
21116 - self.parser.add_argument('--cache', action=argparse_actions.CacheNegations)
21117 + self.parser.add_argument("--cache", action=argparse_actions.CacheNegations)
21118 self.caches = [x.type for x in CachedAddon.caches.values()]
21119
21120 def test_defaults(self):
21121 @@ -86,27 +83,27 @@ class TestCacheNegations:
21122
21123 def test_unknown(self, capsys):
21124 with pytest.raises(SystemExit) as excinfo:
21125 - self.parser.parse_args(['--cache', 'foo'])
21126 + self.parser.parse_args(["--cache", "foo"])
21127 out, err = capsys.readouterr()
21128 assert not out
21129 assert "unknown cache type: 'foo'" in err
21130 assert excinfo.value.code == 2
21131
21132 def test_all(self):
21133 - for arg in ('True', 'true', 'Yes', 'yes', 'Y', 'y'):
21134 - options = self.parser.parse_args(['--cache', arg])
21135 + for arg in ("True", "true", "Yes", "yes", "Y", "y"):
21136 + options = self.parser.parse_args(["--cache", arg])
21137 for k, v in options.cache.items():
21138 assert v is True
21139
21140 def test_none(self):
21141 - for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
21142 - options = self.parser.parse_args(['--cache', arg])
21143 + for arg in ("False", "false", "No", "no", "N", "n"):
21144 + options = self.parser.parse_args(["--cache", arg])
21145 for k, v in options.cache.items():
21146 assert v is False
21147
21148 def test_enabled(self):
21149 cache = self.caches[random.randrange(len(self.caches))]
21150 - options = self.parser.parse_args(['--cache', cache])
21151 + options = self.parser.parse_args(["--cache", cache])
21152 for k, v in options.cache.items():
21153 if k == cache:
21154 assert v is True
21155 @@ -115,7 +112,7 @@ class TestCacheNegations:
21156
21157 def test_disabled(self):
21158 cache = self.caches[random.randrange(len(self.caches))]
21159 - options = self.parser.parse_args([f'--cache=-{cache}'])
21160 + options = self.parser.parse_args([f"--cache=-{cache}"])
21161 for k, v in options.cache.items():
21162 if k == cache:
21163 assert v is False
21164 @@ -124,66 +121,70 @@ class TestCacheNegations:
21165
21166
21167 class TestChecksetArgs:
21168 -
21169 @pytest.fixture(autouse=True)
21170 def _setup(self, tool, tmp_path):
21171 self.tool = tool
21172 - self.cache_dir = str(tmp_path / '.cache')
21173 - self.config = str(tmp_path / 'config')
21174 - self.args = ['scan', '--cache-dir', self.cache_dir]
21175 + self.cache_dir = str(tmp_path / ".cache")
21176 + self.config = str(tmp_path / "config")
21177 + self.args = ["scan", "--cache-dir", self.cache_dir]
21178
21179 def test_unknown(self, capsys):
21180 - for opt in ('-C', '--checksets'):
21181 + for opt in ("-C", "--checksets"):
21182 with pytest.raises(SystemExit) as excinfo:
21183 - self.tool.parse_args(self.args + [opt, 'foo'])
21184 + self.tool.parse_args(self.args + [opt, "foo"])
21185 out, err = capsys.readouterr()
21186 assert not out
21187 assert "unknown checkset: 'foo'" in err
21188 assert excinfo.value.code == 2
21189
21190 def test_aliases(self):
21191 - for opt in ('-C', '--checksets'):
21192 + for opt in ("-C", "--checksets"):
21193 # net
21194 - options, _ = self.tool.parse_args(self.args + [opt, 'net'])
21195 + options, _ = self.tool.parse_args(self.args + [opt, "net"])
21196 network_checks = [
21197 - c for c, v in objects.CHECKS.items() if issubclass(v, checks.NetworkCheck)]
21198 + c for c, v in objects.CHECKS.items() if issubclass(v, checks.NetworkCheck)
21199 + ]
21200 assert options.selected_checks == set(network_checks)
21201
21202 # all
21203 - options, _ = self.tool.parse_args(self.args + [opt, 'all'])
21204 + options, _ = self.tool.parse_args(self.args + [opt, "all"])
21205 assert options.selected_checks == set(objects.CHECKS)
21206
21207 def test_sets(self, capsys):
21208 - with open(self.config, 'w') as f:
21209 - f.write(textwrap.dedent("""\
21210 - [CHECKSETS]
21211 - set1=StableRequest
21212 - set2=-StableRequest
21213 - set3=SourcingCheck,-InvalidEapi,-InvalidSlot
21214 - bad=foo
21215 - """))
21216 + with open(self.config, "w") as f:
21217 + f.write(
21218 + textwrap.dedent(
21219 + """\
21220 + [CHECKSETS]
21221 + set1=StableRequest
21222 + set2=-StableRequest
21223 + set3=SourcingCheck,-InvalidEapi,-InvalidSlot
21224 + bad=foo
21225 + """
21226 + )
21227 + )
21228 configs = [self.config]
21229 - with patch('pkgcheck.cli.ConfigFileParser.default_configs', configs):
21230 - for opt in ('-C', '--checksets'):
21231 + with patch("pkgcheck.cli.ConfigFileParser.default_configs", configs):
21232 + for opt in ("-C", "--checksets"):
21233 # enabled keyword
21234 - for arg in ('set1', '-set2'):
21235 - options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
21236 - assert options.filtered_keywords == {objects.KEYWORDS['StableRequest']}
21237 - assert options.enabled_checks == {objects.CHECKS['StableRequestCheck']}
21238 + for arg in ("set1", "-set2"):
21239 + options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
21240 + assert options.filtered_keywords == {objects.KEYWORDS["StableRequest"]}
21241 + assert options.enabled_checks == {objects.CHECKS["StableRequestCheck"]}
21242
21243 # disabled keyword
21244 - for arg in ('-set1', 'set2'):
21245 - options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
21246 - assert objects.KEYWORDS['StableRequest'] not in options.filtered_keywords
21247 + for arg in ("-set1", "set2"):
21248 + options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
21249 + assert objects.KEYWORDS["StableRequest"] not in options.filtered_keywords
21250
21251 # check/keywords mixture
21252 - options, _ = self.tool.parse_args(self.args + [f'{opt}=set3'])
21253 - assert options.filtered_keywords == {objects.KEYWORDS['SourcingError']}
21254 - assert options.enabled_checks == {objects.CHECKS['SourcingCheck']}
21255 + options, _ = self.tool.parse_args(self.args + [f"{opt}=set3"])
21256 + assert options.filtered_keywords == {objects.KEYWORDS["SourcingError"]}
21257 + assert options.enabled_checks == {objects.CHECKS["SourcingCheck"]}
21258
21259 # unknown value
21260 with pytest.raises(SystemExit) as excinfo:
21261 - self.tool.parse_args(self.args + [f'{opt}=bad'])
21262 + self.tool.parse_args(self.args + [f"{opt}=bad"])
21263 out, err = capsys.readouterr()
21264 assert not out
21265 assert "'bad' checkset, unknown check or keyword: 'foo'" in err
21266 @@ -191,173 +192,167 @@ class TestChecksetArgs:
21267
21268
21269 class TestScopeArgs:
21270 -
21271 @pytest.fixture(autouse=True)
21272 def _setup(self, tool, tmp_path):
21273 self.tool = tool
21274 self.cache_dir = str(tmp_path)
21275 - self.args = ['scan', '--cache-dir', self.cache_dir]
21276 + self.args = ["scan", "--cache-dir", self.cache_dir]
21277
21278 def test_unknown_scope(self, capsys):
21279 - for opt in ('-s', '--scopes'):
21280 + for opt in ("-s", "--scopes"):
21281 with pytest.raises(SystemExit) as excinfo:
21282 - options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
21283 + options, _ = self.tool.parse_args(self.args + [opt, "foo"])
21284 assert excinfo.value.code == 2
21285 out, err = capsys.readouterr()
21286 - err = err.strip().split('\n')
21287 + err = err.strip().split("\n")
21288 assert "unknown scope: 'foo'" in err[-1]
21289
21290 def test_missing_scope(self, capsys):
21291 - for opt in ('-s', '--scopes'):
21292 + for opt in ("-s", "--scopes"):
21293 with pytest.raises(SystemExit) as excinfo:
21294 options, _ = self.tool.parse_args(self.args + [opt])
21295 assert excinfo.value.code == 2
21296 out, err = capsys.readouterr()
21297 - err = err.strip().split('\n')
21298 - assert err[0] == (
21299 - 'pkgcheck scan: error: argument -s/--scopes: expected one argument')
21300 + err = err.strip().split("\n")
21301 + assert err[0] == ("pkgcheck scan: error: argument -s/--scopes: expected one argument")
21302
21303 def test_disabled(self):
21304 - options, _ = self.tool.parse_args(self.args + ['--scopes=-eclass'])
21305 + options, _ = self.tool.parse_args(self.args + ["--scopes=-eclass"])
21306 assert options.selected_scopes == frozenset()
21307
21308 def test_enabled(self):
21309 - options, _ = self.tool.parse_args(self.args + ['--scopes', 'repo'])
21310 - assert options.selected_scopes == frozenset([base.scopes['repo']])
21311 + options, _ = self.tool.parse_args(self.args + ["--scopes", "repo"])
21312 + assert options.selected_scopes == frozenset([base.scopes["repo"]])
21313
21314
21315 class TestCheckArgs:
21316 -
21317 @pytest.fixture(autouse=True)
21318 def _setup(self, tool, tmp_path):
21319 self.tool = tool
21320 self.cache_dir = str(tmp_path)
21321 - self.args = ['scan', '--cache-dir', self.cache_dir]
21322 + self.args = ["scan", "--cache-dir", self.cache_dir]
21323
21324 def test_unknown_check(self, capsys):
21325 - for opt in ('-c', '--checks'):
21326 + for opt in ("-c", "--checks"):
21327 with pytest.raises(SystemExit) as excinfo:
21328 - options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
21329 + options, _ = self.tool.parse_args(self.args + [opt, "foo"])
21330 assert excinfo.value.code == 2
21331 out, err = capsys.readouterr()
21332 - err = err.strip().split('\n')
21333 + err = err.strip().split("\n")
21334 assert "unknown check: 'foo'" in err[-1]
21335
21336 def test_token_errors(self):
21337 - for opt in ('-c', '--checks'):
21338 - for operation in ('-', '+'):
21339 + for opt in ("-c", "--checks"):
21340 + for operation in ("-", "+"):
21341 with pytest.raises(argparse.ArgumentTypeError) as excinfo:
21342 - options, _ = self.tool.parse_args(self.args + [f'{opt}={operation}'])
21343 - assert 'without a token' in str(excinfo.value)
21344 + options, _ = self.tool.parse_args(self.args + [f"{opt}={operation}"])
21345 + assert "without a token" in str(excinfo.value)
21346
21347 def test_missing_check(self, capsys):
21348 - for opt in ('-c', '--checks'):
21349 + for opt in ("-c", "--checks"):
21350 with pytest.raises(SystemExit) as excinfo:
21351 options, _ = self.tool.parse_args(self.args + [opt])
21352 assert excinfo.value.code == 2
21353 out, err = capsys.readouterr()
21354 - err = err.strip().split('\n')
21355 - assert err[0] == (
21356 - 'pkgcheck scan: error: argument -c/--checks: expected one argument')
21357 + err = err.strip().split("\n")
21358 + assert err[0] == ("pkgcheck scan: error: argument -c/--checks: expected one argument")
21359
21360 def test_neutral(self):
21361 - for opt in ('-c', '--checks'):
21362 - options, _ = self.tool.parse_args(self.args + [opt, 'UnusedLicensesCheck'])
21363 - assert options.selected_checks == frozenset(['UnusedLicensesCheck'])
21364 + for opt in ("-c", "--checks"):
21365 + options, _ = self.tool.parse_args(self.args + [opt, "UnusedLicensesCheck"])
21366 + assert options.selected_checks == frozenset(["UnusedLicensesCheck"])
21367
21368 def test_subtractive(self):
21369 - for opt in ('-c', '--checks'):
21370 + for opt in ("-c", "--checks"):
21371 check = list(objects.CHECKS)[random.randrange(len(objects.CHECKS))]
21372 - options, _ = self.tool.parse_args(self.args + [f'{opt}=-{check}'])
21373 + options, _ = self.tool.parse_args(self.args + [f"{opt}=-{check}"])
21374 assert options.selected_checks == frozenset()
21375
21376 def test_additive(self):
21377 - for opt in ('-c', '--checks'):
21378 + for opt in ("-c", "--checks"):
21379 options, _ = self.tool.parse_args(self.args)
21380 assert issubclass(checks.perl.PerlCheck, checks.OptionalCheck)
21381 assert checks.perl.PerlCheck not in set(options.enabled_checks)
21382 - options, _ = self.tool.parse_args(self.args + [f'{opt}=+PerlCheck'])
21383 + options, _ = self.tool.parse_args(self.args + [f"{opt}=+PerlCheck"])
21384 assert checks.perl.PerlCheck in set(options.enabled_checks)
21385 - assert options.selected_checks == frozenset(['PerlCheck'])
21386 + assert options.selected_checks == frozenset(["PerlCheck"])
21387
21388
21389 class TestKeywordArgs:
21390 -
21391 @pytest.fixture(autouse=True)
21392 def _setup(self, tool, tmp_path):
21393 self.tool = tool
21394 self.cache_dir = str(tmp_path)
21395 - self.args = ['scan', '--cache-dir', self.cache_dir]
21396 + self.args = ["scan", "--cache-dir", self.cache_dir]
21397
21398 def test_unknown_keyword(self, capsys):
21399 - for opt in ('-k', '--keywords'):
21400 + for opt in ("-k", "--keywords"):
21401 with pytest.raises(SystemExit) as excinfo:
21402 - options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
21403 + options, _ = self.tool.parse_args(self.args + [opt, "foo"])
21404 assert excinfo.value.code == 2
21405 out, err = capsys.readouterr()
21406 - err = err.strip().split('\n')
21407 + err = err.strip().split("\n")
21408 assert "unknown keyword: 'foo'" in err[-1]
21409
21410 def test_missing_keyword(self, capsys):
21411 - for opt in ('-k', '--keywords'):
21412 + for opt in ("-k", "--keywords"):
21413 with pytest.raises(SystemExit) as excinfo:
21414 options, _ = self.tool.parse_args(self.args + [opt])
21415 assert excinfo.value.code == 2
21416 out, err = capsys.readouterr()
21417 - err = err.strip().split('\n')
21418 - assert err[0] == (
21419 - 'pkgcheck scan: error: argument -k/--keywords: expected one argument')
21420 + err = err.strip().split("\n")
21421 + assert err[0] == ("pkgcheck scan: error: argument -k/--keywords: expected one argument")
21422
21423 def test_enabled(self):
21424 - for opt in ('-k', '--keywords'):
21425 - options, _ = self.tool.parse_args(self.args + [opt, 'UnusedLicenses'])
21426 - assert options.selected_keywords == frozenset(['UnusedLicenses'])
21427 - assert options.filtered_keywords == frozenset([objects.KEYWORDS['UnusedLicenses']])
21428 + for opt in ("-k", "--keywords"):
21429 + options, _ = self.tool.parse_args(self.args + [opt, "UnusedLicenses"])
21430 + assert options.selected_keywords == frozenset(["UnusedLicenses"])
21431 + assert options.filtered_keywords == frozenset([objects.KEYWORDS["UnusedLicenses"]])
21432 assert options.enabled_checks == {checks.repo_metadata.UnusedLicensesCheck}
21433
21434 def test_disabled_check(self):
21435 """Disabling all keywords for a given check also disables the check."""
21436 - for opt in ('-k', '--keywords'):
21437 + for opt in ("-k", "--keywords"):
21438 default_checks = set(objects.CHECKS.default.values())
21439 default_keywords = set().union(*(v.known_results for v in default_checks))
21440 keyword = checks.repo_metadata.UnusedLicenses
21441 check = checks.repo_metadata.UnusedLicensesCheck
21442 assert check in default_checks
21443 assert check.known_results == frozenset([keyword])
21444 - options, _ = self.tool.parse_args(self.args + [f'{opt}=-UnusedLicenses'])
21445 + options, _ = self.tool.parse_args(self.args + [f"{opt}=-UnusedLicenses"])
21446 assert options.selected_keywords == frozenset()
21447 assert options.filtered_keywords == frozenset(default_keywords - {keyword})
21448 assert check not in set(options.enabled_checks)
21449
21450 def test_disabled(self):
21451 - for opt in ('-k', '--keywords'):
21452 + for opt in ("-k", "--keywords"):
21453 default_keywords = set().union(
21454 - *(v.known_results for v in objects.CHECKS.default.values()))
21455 + *(v.known_results for v in objects.CHECKS.default.values())
21456 + )
21457 keyword_cls = list(default_keywords)[random.randrange(len(default_keywords))]
21458 keyword = keyword_cls.__name__
21459 - options, _ = self.tool.parse_args(self.args + [f'{opt}=-{keyword}'])
21460 + options, _ = self.tool.parse_args(self.args + [f"{opt}=-{keyword}"])
21461 assert options.selected_keywords == frozenset()
21462 assert options.filtered_keywords == frozenset(default_keywords - {keyword_cls})
21463
21464 def test_aliases(self):
21465 - for opt in ('-k', '--keywords'):
21466 - for alias in ('error', 'warning', 'info'):
21467 + for opt in ("-k", "--keywords"):
21468 + for alias in ("error", "warning", "info"):
21469 options, _ = self.tool.parse_args(self.args + [opt, alias])
21470 alias_keywords = list(getattr(objects.KEYWORDS, alias))
21471 assert options.selected_keywords == frozenset(alias_keywords)
21472
21473
21474 class TestExitArgs:
21475 -
21476 @pytest.fixture(autouse=True)
21477 def _setup(self, tool, tmp_path):
21478 self.tool = tool
21479 self.cache_dir = str(tmp_path)
21480 - self.args = ['scan', '--cache-dir', self.cache_dir]
21481 + self.args = ["scan", "--cache-dir", self.cache_dir]
21482
21483 def test_unknown(self, capsys):
21484 with pytest.raises(SystemExit) as excinfo:
21485 - self.tool.parse_args(self.args + ['--exit', 'foo'])
21486 + self.tool.parse_args(self.args + ["--exit", "foo"])
21487 out, err = capsys.readouterr()
21488 assert not out
21489 assert "unknown checkset, check, or keyword: 'foo'" in err
21490 @@ -368,22 +363,22 @@ class TestExitArgs:
21491 assert options.exit_keywords == ()
21492
21493 def test_default(self):
21494 - options, _ = self.tool.parse_args(self.args + ['--exit'])
21495 + options, _ = self.tool.parse_args(self.args + ["--exit"])
21496 assert options.exit_keywords == frozenset(objects.KEYWORDS.error.values())
21497
21498 def test_enabled(self):
21499 keyword = list(objects.KEYWORDS)[random.randrange(len(objects.KEYWORDS))]
21500 objs = (objects.KEYWORDS[x] for x in objects.KEYWORDS.aliases.get(keyword, [keyword]))
21501 - options, _ = self.tool.parse_args(self.args + ['--exit', keyword])
21502 + options, _ = self.tool.parse_args(self.args + ["--exit", keyword])
21503 assert options.exit_keywords == frozenset(objs)
21504
21505 def test_disabled(self):
21506 keyword = list(objects.KEYWORDS)[random.randrange(len(objects.KEYWORDS))]
21507 objs = (objects.KEYWORDS[x] for x in objects.KEYWORDS.aliases.get(keyword, [keyword]))
21508 - options, _ = self.tool.parse_args(self.args + [f'--exit=-{keyword}'])
21509 + options, _ = self.tool.parse_args(self.args + [f"--exit=-{keyword}"])
21510 assert options.exit_keywords == frozenset(objects.KEYWORDS.error.values()) - frozenset(objs)
21511
21512 def test_aliases(self):
21513 - for alias in ('error', 'warning', 'info'):
21514 - options, _ = self.tool.parse_args(self.args + [f'--exit={alias}'])
21515 + for alias in ("error", "warning", "info"):
21516 + options, _ = self.tool.parse_args(self.args + [f"--exit={alias}"])
21517 assert options.exit_keywords == frozenset(getattr(objects.KEYWORDS, alias).values())
21518
21519 diff --git a/tests/scripts/test_pkgcheck.py b/tests/scripts/test_pkgcheck.py
21520 index 8478a746..49e2f8b6 100644
21521 --- a/tests/scripts/test_pkgcheck.py
21522 +++ b/tests/scripts/test_pkgcheck.py
21523 @@ -11,27 +11,27 @@ def test_script_run(capsys):
21524 """Test regular code path for running scripts."""
21525 script = partial(run, project)
21526
21527 - with patch(f'{project}.scripts.import_module') as import_module:
21528 + with patch(f"{project}.scripts.import_module") as import_module:
21529 import_module.side_effect = ImportError("baz module doesn't exist")
21530
21531 # default error path when script import fails
21532 - with patch('sys.argv', [project]):
21533 + with patch("sys.argv", [project]):
21534 with pytest.raises(SystemExit) as excinfo:
21535 script()
21536 assert excinfo.value.code == 1
21537 out, err = capsys.readouterr()
21538 - err = err.strip().split('\n')
21539 + err = err.strip().split("\n")
21540 assert len(err) == 3
21541 assert err[0] == "Failed importing: baz module doesn't exist!"
21542 assert err[1].startswith(f"Verify that {project} and its deps")
21543 assert err[2] == "Add --debug to the commandline for a traceback."
21544
21545 # running with --debug should raise an ImportError when there are issues
21546 - with patch('sys.argv', [project, '--debug']):
21547 + with patch("sys.argv", [project, "--debug"]):
21548 with pytest.raises(ImportError):
21549 script()
21550 out, err = capsys.readouterr()
21551 - err = err.strip().split('\n')
21552 + err = err.strip().split("\n")
21553 assert len(err) == 2
21554 assert err[0] == "Failed importing: baz module doesn't exist!"
21555 assert err[1].startswith(f"Verify that {project} and its deps")
21556 @@ -44,7 +44,7 @@ class TestPkgcheck:
21557 script = partial(run, project)
21558
21559 def test_version(self, capsys):
21560 - with patch('sys.argv', [project, '--version']):
21561 + with patch("sys.argv", [project, "--version"]):
21562 with pytest.raises(SystemExit) as excinfo:
21563 self.script()
21564 assert excinfo.value.code == 0
21565
21566 diff --git a/tests/scripts/test_pkgcheck_cache.py b/tests/scripts/test_pkgcheck_cache.py
21567 index 0414b4ec..023a3161 100644
21568 --- a/tests/scripts/test_pkgcheck_cache.py
21569 +++ b/tests/scripts/test_pkgcheck_cache.py
21570 @@ -14,95 +14,95 @@ class TestPkgcheckCache:
21571 @pytest.fixture(autouse=True)
21572 def _setup(self, testconfig, tmp_path):
21573 self.cache_dir = str(tmp_path)
21574 - self.args = [
21575 - project, '--config', testconfig,
21576 - 'cache', '--cache-dir', self.cache_dir]
21577 + self.args = [project, "--config", testconfig, "cache", "--cache-dir", self.cache_dir]
21578
21579 def test_cache_profiles(self, capsys):
21580 # force standalone repo profiles cache regen
21581 - for args in (['-u', '-f'], ['--update', '--force']):
21582 - with patch('sys.argv', self.args + args + ['-t', 'profiles']):
21583 + for args in (["-u", "-f"], ["--update", "--force"]):
21584 + with patch("sys.argv", self.args + args + ["-t", "profiles"]):
21585 with pytest.raises(SystemExit):
21586 self.script()
21587
21588 # verify the profiles cache shows up
21589 - with patch('sys.argv', self.args):
21590 + with patch("sys.argv", self.args):
21591 with pytest.raises(SystemExit) as excinfo:
21592 self.script()
21593 out, err = capsys.readouterr()
21594 assert not err
21595 out = out.strip().splitlines()
21596 - assert out[-1].startswith('standalone-')
21597 + assert out[-1].startswith("standalone-")
21598 assert excinfo.value.code == 0
21599
21600 # pretend to remove it
21601 - for arg in ('-n', '--dry-run'):
21602 - with patch('sys.argv', self.args + [arg] + ['-Rt', 'profiles']):
21603 + for arg in ("-n", "--dry-run"):
21604 + with patch("sys.argv", self.args + [arg] + ["-Rt", "profiles"]):
21605 with pytest.raises(SystemExit):
21606 self.script()
21607 out, err = capsys.readouterr()
21608 - assert err == ''
21609 - assert out.startswith(f'Would remove {self.cache_dir}')
21610 + assert err == ""
21611 + assert out.startswith(f"Would remove {self.cache_dir}")
21612
21613 # fail to remove it
21614 - for arg in ('-R', '--remove'):
21615 - with patch('pkgcheck.addons.caches.os.unlink') as unlink, \
21616 - patch('sys.argv', self.args + [arg] + ['-t', 'profiles']):
21617 - unlink.side_effect = IOError('bad perms')
21618 + for arg in ("-R", "--remove"):
21619 + with patch("pkgcheck.addons.caches.os.unlink") as unlink, patch(
21620 + "sys.argv", self.args + [arg] + ["-t", "profiles"]
21621 + ):
21622 + unlink.side_effect = IOError("bad perms")
21623 with pytest.raises(SystemExit) as excinfo:
21624 self.script()
21625 out, err = capsys.readouterr()
21626 assert not out
21627 assert os.listdir(self.cache_dir)
21628 - assert err.startswith('pkgcheck cache: error: failed removing profiles cache')
21629 + assert err.startswith("pkgcheck cache: error: failed removing profiles cache")
21630 assert excinfo.value.code == 2
21631
21632 # actually remove it
21633 - for arg in ('-R', '--remove'):
21634 - with patch('sys.argv', self.args + [arg] + ['-t', 'profiles']):
21635 + for arg in ("-R", "--remove"):
21636 + with patch("sys.argv", self.args + [arg] + ["-t", "profiles"]):
21637 with pytest.raises(SystemExit):
21638 self.script()
21639
21640 # verify it's gone
21641 - with patch('sys.argv', self.args):
21642 + with patch("sys.argv", self.args):
21643 with pytest.raises(SystemExit) as excinfo:
21644 self.script()
21645 out, err = capsys.readouterr()
21646 - assert (out, err) == ('', '')
21647 + assert (out, err) == ("", "")
21648 assert excinfo.value.code == 0
21649
21650 def test_cache_forced_removal(self, capsys):
21651 # force standalone repo profiles cache regen
21652 - with patch('sys.argv', self.args + ['-uf']):
21653 + with patch("sys.argv", self.args + ["-uf"]):
21654 with pytest.raises(SystemExit):
21655 self.script()
21656
21657 # fail to forcibly remove all
21658 - with patch('pkgcheck.addons.caches.shutil.rmtree') as rmtree, \
21659 - patch('sys.argv', self.args + ['-Rf']):
21660 - rmtree.side_effect = IOError('bad perms')
21661 + with patch("pkgcheck.addons.caches.shutil.rmtree") as rmtree, patch(
21662 + "sys.argv", self.args + ["-Rf"]
21663 + ):
21664 + rmtree.side_effect = IOError("bad perms")
21665 with pytest.raises(SystemExit) as excinfo:
21666 self.script()
21667 out, err = capsys.readouterr()
21668 assert not out
21669 - assert err.strip() == 'pkgcheck cache: error: failed removing cache dir: bad perms'
21670 + assert err.strip() == "pkgcheck cache: error: failed removing cache dir: bad perms"
21671 assert excinfo.value.code == 2
21672
21673 # actually forcibly remove all
21674 - with patch('sys.argv', self.args + ['-Rf']):
21675 + with patch("sys.argv", self.args + ["-Rf"]):
21676 with pytest.raises(SystemExit) as excinfo:
21677 self.script()
21678 out, err = capsys.readouterr()
21679 - assert (out, err) == ('', '')
21680 + assert (out, err) == ("", "")
21681 assert excinfo.value.code == 0
21682
21683 # cache dir has been entirely blown away
21684 assert not os.path.exists(self.cache_dir)
21685
21686 # forcing removal again does nothing
21687 - with patch('sys.argv', self.args + ['-Rf']):
21688 + with patch("sys.argv", self.args + ["-Rf"]):
21689 with pytest.raises(SystemExit) as excinfo:
21690 self.script()
21691 out, err = capsys.readouterr()
21692 - assert (out, err) == ('', '')
21693 + assert (out, err) == ("", "")
21694 assert excinfo.value.code == 0
21695
21696 diff --git a/tests/scripts/test_pkgcheck_ci.py b/tests/scripts/test_pkgcheck_ci.py
21697 index 2ac21d7c..bc0d9cbf 100644
21698 --- a/tests/scripts/test_pkgcheck_ci.py
21699 +++ b/tests/scripts/test_pkgcheck_ci.py
21700 @@ -10,61 +10,61 @@ from pkgcore.ebuild.cpv import VersionedCPV
21701
21702 class TestPkgcheckCi:
21703
21704 - script = partial(run, 'pkgcheck')
21705 + script = partial(run, "pkgcheck")
21706
21707 @pytest.fixture(autouse=True)
21708 def _setup(self, testconfig, tmp_path):
21709 self.cache_dir = str(tmp_path)
21710 - base_args = ['--config', testconfig]
21711 - self.scan_args = ['--config', 'no', '--cache-dir', self.cache_dir]
21712 + base_args = ["--config", testconfig]
21713 + self.scan_args = ["--config", "no", "--cache-dir", self.cache_dir]
21714 # args for running pkgcheck like a script
21715 - self.args = ['pkgcheck'] + base_args + ['ci'] + self.scan_args
21716 + self.args = ["pkgcheck"] + base_args + ["ci"] + self.scan_args
21717
21718 def test_empty_repo(self, capsys, repo):
21719 - with patch('sys.argv', self.args + [repo.location]):
21720 + with patch("sys.argv", self.args + [repo.location]):
21721 with pytest.raises(SystemExit) as excinfo:
21722 self.script()
21723 assert excinfo.value.code == 0
21724 out, err = capsys.readouterr()
21725 - assert out == err == ''
21726 + assert out == err == ""
21727
21728 def test_exit_status(self, repo):
21729 # create good ebuild and another with an invalid EAPI
21730 - repo.create_ebuild('cat/pkg-0')
21731 - repo.create_ebuild('cat/pkg-1', eapi='-1')
21732 + repo.create_ebuild("cat/pkg-0")
21733 + repo.create_ebuild("cat/pkg-1", eapi="-1")
21734 # exit status isn't enabled by default
21735 - args = ['-r', repo.location]
21736 - with patch('sys.argv', self.args + args):
21737 + args = ["-r", repo.location]
21738 + with patch("sys.argv", self.args + args):
21739 with pytest.raises(SystemExit) as excinfo:
21740 self.script()
21741 assert excinfo.value.code == 0
21742
21743 # all error level results are flagged by default when enabled
21744 - with patch('sys.argv', self.args + args + ['--exit']):
21745 + with patch("sys.argv", self.args + args + ["--exit"]):
21746 with pytest.raises(SystemExit) as excinfo:
21747 self.script()
21748 assert excinfo.value.code == 1
21749
21750 # selective error results will only flag those specified
21751 - with patch('sys.argv', self.args + args + ['--exit', 'InvalidSlot']):
21752 + with patch("sys.argv", self.args + args + ["--exit", "InvalidSlot"]):
21753 with pytest.raises(SystemExit) as excinfo:
21754 self.script()
21755 assert excinfo.value.code == 0
21756 - with patch('sys.argv', self.args + args + ['--exit', 'InvalidEapi']):
21757 + with patch("sys.argv", self.args + args + ["--exit", "InvalidEapi"]):
21758 with pytest.raises(SystemExit) as excinfo:
21759 self.script()
21760 assert excinfo.value.code == 1
21761
21762 def test_failures(self, tmp_path, repo):
21763 - repo.create_ebuild('cat/pkg-1', slot='')
21764 - failures = str(tmp_path / 'failures.json')
21765 - args = ['--failures', failures, '--exit', '-r', repo.location]
21766 - with patch('sys.argv', self.args + args):
21767 + repo.create_ebuild("cat/pkg-1", slot="")
21768 + failures = str(tmp_path / "failures.json")
21769 + args = ["--failures", failures, "--exit", "-r", repo.location]
21770 + with patch("sys.argv", self.args + args):
21771 with pytest.raises(SystemExit) as excinfo:
21772 self.script()
21773 assert excinfo.value.code == 1
21774
21775 with open(str(failures)) as f:
21776 results = list(JsonStream.from_iter(f))
21777 - pkg = VersionedCPV('cat/pkg-1')
21778 - assert results == [InvalidSlot('slot', 'SLOT cannot be unset or empty', pkg=pkg)]
21779 + pkg = VersionedCPV("cat/pkg-1")
21780 + assert results == [InvalidSlot("slot", "SLOT cannot be unset or empty", pkg=pkg)]
21781
21782 diff --git a/tests/scripts/test_pkgcheck_replay.py b/tests/scripts/test_pkgcheck_replay.py
21783 index 67ad3486..c2aeda66 100644
21784 --- a/tests/scripts/test_pkgcheck_replay.py
21785 +++ b/tests/scripts/test_pkgcheck_replay.py
21786 @@ -18,70 +18,69 @@ class TestPkgcheckReplay:
21787
21788 @pytest.fixture(autouse=True)
21789 def _setup(self, testconfig):
21790 - self.args = [project, '--config', testconfig, 'replay']
21791 + self.args = [project, "--config", testconfig, "replay"]
21792
21793 def test_missing_file_arg(self, capsys):
21794 - with patch('sys.argv', self.args):
21795 + with patch("sys.argv", self.args):
21796 with pytest.raises(SystemExit) as excinfo:
21797 self.script()
21798 out, err = capsys.readouterr()
21799 assert not out
21800 - err = err.strip().split('\n')
21801 + err = err.strip().split("\n")
21802 assert len(err) == 1
21803 - assert err[0] == (
21804 - 'pkgcheck replay: error: the following arguments are required: FILE')
21805 + assert err[0] == ("pkgcheck replay: error: the following arguments are required: FILE")
21806 assert excinfo.value.code == 2
21807
21808 def test_replay(self, capsys):
21809 - result = ProfileWarning('profile warning: foo')
21810 + result = ProfileWarning("profile warning: foo")
21811 with tempfile.NamedTemporaryFile() as f:
21812 out = PlainTextFormatter(f)
21813 with JsonStream(out) as reporter:
21814 reporter.report(result)
21815 - with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
21816 + with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
21817 with pytest.raises(SystemExit) as excinfo:
21818 self.script()
21819 out, err = capsys.readouterr()
21820 assert not err
21821 - assert out == 'profile warning: foo\n'
21822 + assert out == "profile warning: foo\n"
21823 assert excinfo.value.code == 0
21824
21825 def test_corrupted_resuts(self, capsys):
21826 - result = ProfileWarning('profile warning: foo')
21827 + result = ProfileWarning("profile warning: foo")
21828 with tempfile.NamedTemporaryFile() as f:
21829 out = PlainTextFormatter(f)
21830 with JsonStream(out) as reporter:
21831 reporter.report(result)
21832 - f.write(b'corrupted')
21833 + f.write(b"corrupted")
21834 f.seek(0)
21835 - with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
21836 + with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
21837 with pytest.raises(SystemExit) as excinfo:
21838 self.script()
21839 out, err = capsys.readouterr()
21840 - assert 'corrupted results file' in err
21841 + assert "corrupted results file" in err
21842 assert excinfo.value.code == 2
21843
21844 def test_invalid_file(self, capsys):
21845 - with tempfile.NamedTemporaryFile(mode='wt') as f:
21846 - f.write('invalid file')
21847 + with tempfile.NamedTemporaryFile(mode="wt") as f:
21848 + f.write("invalid file")
21849 f.seek(0)
21850 - with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
21851 + with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
21852 with pytest.raises(SystemExit) as excinfo:
21853 self.script()
21854 out, err = capsys.readouterr()
21855 - assert err.strip() == 'pkgcheck replay: error: invalid or unsupported replay file'
21856 + assert err.strip() == "pkgcheck replay: error: invalid or unsupported replay file"
21857 assert excinfo.value.code == 2
21858
21859 def test_replay_pipe_stdin(self):
21860 - script = pytest.REPO_ROOT / 'bin/pkgcheck'
21861 - result = ProfileWarning('profile warning: foo')
21862 + script = pytest.REPO_ROOT / "bin/pkgcheck"
21863 + result = ProfileWarning("profile warning: foo")
21864 with tempfile.NamedTemporaryFile() as f:
21865 out = PlainTextFormatter(f)
21866 with JsonStream(out) as reporter:
21867 reporter.report(result)
21868 f.seek(0)
21869 p = subprocess.run(
21870 - [script, 'replay', '-R', 'StrReporter', '-'],
21871 - stdin=f, stdout=subprocess.PIPE)
21872 - assert p.stdout.decode() == 'profile warning: foo\n'
21873 + [script, "replay", "-R", "StrReporter", "-"], stdin=f, stdout=subprocess.PIPE
21874 + )
21875 + assert p.stdout.decode() == "profile warning: foo\n"
21876 assert p.returncode == 0
21877
21878 diff --git a/tests/scripts/test_pkgcheck_scan.py b/tests/scripts/test_pkgcheck_scan.py
21879 index c224d83a..bba0547d 100644
21880 --- a/tests/scripts/test_pkgcheck_scan.py
21881 +++ b/tests/scripts/test_pkgcheck_scan.py
21882 @@ -28,36 +28,35 @@ from ..misc import Profile
21883
21884
21885 class TestPkgcheckScanParseArgs:
21886 -
21887 def test_skipped_checks(self, tool):
21888 - options, _ = tool.parse_args(['scan'])
21889 + options, _ = tool.parse_args(["scan"])
21890 assert options.enabled_checks
21891 # some checks should always be skipped by default
21892 assert set(options.enabled_checks) != set(objects.CHECKS.values())
21893
21894 def test_enabled_check(self, tool):
21895 - options, _ = tool.parse_args(['scan', '-c', 'PkgDirCheck'])
21896 + options, _ = tool.parse_args(["scan", "-c", "PkgDirCheck"])
21897 assert options.enabled_checks == {checks_mod.pkgdir.PkgDirCheck}
21898
21899 def test_disabled_check(self, tool):
21900 - options, _ = tool.parse_args(['scan'])
21901 + options, _ = tool.parse_args(["scan"])
21902 assert checks_mod.pkgdir.PkgDirCheck in options.enabled_checks
21903 - options, _ = tool.parse_args(['scan', '-c=-PkgDirCheck'])
21904 + options, _ = tool.parse_args(["scan", "-c=-PkgDirCheck"])
21905 assert options.enabled_checks
21906 assert checks_mod.pkgdir.PkgDirCheck not in options.enabled_checks
21907
21908 def test_targets(self, tool):
21909 - options, _ = tool.parse_args(['scan', 'dev-util/foo'])
21910 - assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
21911 + options, _ = tool.parse_args(["scan", "dev-util/foo"])
21912 + assert list(options.restrictions) == [(base.package_scope, atom.atom("dev-util/foo"))]
21913
21914 def test_stdin_targets(self, tool):
21915 - with patch('sys.stdin', StringIO('dev-util/foo')):
21916 - options, _ = tool.parse_args(['scan', '-'])
21917 - assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
21918 + with patch("sys.stdin", StringIO("dev-util/foo")):
21919 + options, _ = tool.parse_args(["scan", "-"])
21920 + assert list(options.restrictions) == [(base.package_scope, atom.atom("dev-util/foo"))]
21921
21922 def test_invalid_targets(self, tool, capsys):
21923 with pytest.raises(SystemExit) as excinfo:
21924 - options, _ = tool.parse_args(['scan', 'dev-util/f$o'])
21925 + options, _ = tool.parse_args(["scan", "dev-util/f$o"])
21926 assert excinfo.value.code == 2
21927 out, err = capsys.readouterr()
21928 err = err.strip()
21929 @@ -65,91 +64,97 @@ class TestPkgcheckScanParseArgs:
21930
21931 def test_unknown_path_target(self, tool, capsys):
21932 with pytest.raises(SystemExit) as excinfo:
21933 - tool.parse_args(['scan', '/foo/bar'])
21934 + tool.parse_args(["scan", "/foo/bar"])
21935 assert excinfo.value.code == 2
21936 out, err = capsys.readouterr()
21937 - err = err.strip().split('\n')
21938 + err = err.strip().split("\n")
21939 assert err[-1].startswith(
21940 - "pkgcheck scan: error: 'standalone' repo doesn't contain: '/foo/bar'")
21941 + "pkgcheck scan: error: 'standalone' repo doesn't contain: '/foo/bar'"
21942 + )
21943
21944 def test_target_repo_id(self, tool):
21945 - options, _ = tool.parse_args(['scan', 'standalone'])
21946 - assert options.target_repo.repo_id == 'standalone'
21947 + options, _ = tool.parse_args(["scan", "standalone"])
21948 + assert options.target_repo.repo_id == "standalone"
21949 assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
21950
21951 def test_target_dir_path(self, repo, tool):
21952 - options, _ = tool.parse_args(['scan', repo.location])
21953 - assert options.target_repo.repo_id == 'fake'
21954 + options, _ = tool.parse_args(["scan", repo.location])
21955 + assert options.target_repo.repo_id == "fake"
21956 assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
21957
21958 def test_target_dir_path_in_repo(self, repo, tool):
21959 - path = pjoin(repo.location, 'profiles')
21960 - options, _ = tool.parse_args(['scan', path])
21961 - assert options.target_repo.repo_id == 'fake'
21962 + path = pjoin(repo.location, "profiles")
21963 + options, _ = tool.parse_args(["scan", path])
21964 + assert options.target_repo.repo_id == "fake"
21965 assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
21966
21967 def test_target_dir_path_in_configured_repo(self, tool):
21968 - options, _ = tool.parse_args(['scan', 'standalone'])
21969 - path = pjoin(options.target_repo.location, 'profiles')
21970 - options, _ = tool.parse_args(['scan', path])
21971 - assert options.target_repo.repo_id == 'standalone'
21972 + options, _ = tool.parse_args(["scan", "standalone"])
21973 + path = pjoin(options.target_repo.location, "profiles")
21974 + options, _ = tool.parse_args(["scan", path])
21975 + assert options.target_repo.repo_id == "standalone"
21976 assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
21977
21978 def test_target_non_repo_path(self, tool, capsys, tmp_path):
21979 with pytest.raises(SystemExit) as excinfo:
21980 - tool.parse_args(['scan', str(tmp_path)])
21981 + tool.parse_args(["scan", str(tmp_path)])
21982 assert excinfo.value.code == 2
21983 out, err = capsys.readouterr()
21984 assert not out
21985 assert err.startswith(
21986 - f"pkgcheck scan: error: 'standalone' repo doesn't contain: '{str(tmp_path)}'")
21987 + f"pkgcheck scan: error: 'standalone' repo doesn't contain: '{str(tmp_path)}'"
21988 + )
21989
21990 def test_target_invalid_repo(self, tool, capsys, make_repo):
21991 - repo = make_repo(masters=['unknown'])
21992 + repo = make_repo(masters=["unknown"])
21993 with pytest.raises(SystemExit) as excinfo:
21994 - tool.parse_args(['scan', repo.location])
21995 + tool.parse_args(["scan", repo.location])
21996 assert excinfo.value.code == 2
21997 out, err = capsys.readouterr()
21998 assert not out
21999 err = err.strip()
22000 - assert err.startswith('pkgcheck scan: error: repo init failed')
22001 + assert err.startswith("pkgcheck scan: error: repo init failed")
22002 assert err.endswith("has missing masters: 'unknown'")
22003
22004 def test_target_file_path(self, repo, tool):
22005 - os.makedirs(pjoin(repo.location, 'dev-util', 'foo'))
22006 - ebuild_path = pjoin(repo.location, 'dev-util', 'foo', 'foo-0.ebuild')
22007 + os.makedirs(pjoin(repo.location, "dev-util", "foo"))
22008 + ebuild_path = pjoin(repo.location, "dev-util", "foo", "foo-0.ebuild")
22009 touch(ebuild_path)
22010 - options, _ = tool.parse_args(['scan', ebuild_path])
22011 + options, _ = tool.parse_args(["scan", ebuild_path])
22012 restrictions = [
22013 - restricts.CategoryDep('dev-util'),
22014 - restricts.PackageDep('foo'),
22015 - restricts.VersionMatch('=', '0'),
22016 + restricts.CategoryDep("dev-util"),
22017 + restricts.PackageDep("foo"),
22018 + restricts.VersionMatch("=", "0"),
22019 + ]
22020 + assert list(options.restrictions) == [
22021 + (base.version_scope, packages.AndRestriction(*restrictions))
22022 ]
22023 - assert list(options.restrictions) == [(base.version_scope, packages.AndRestriction(*restrictions))]
22024 - assert options.target_repo.repo_id == 'fake'
22025 + assert options.target_repo.repo_id == "fake"
22026
22027 def test_target_package_dir_cwd(self, repo, tool):
22028 - os.makedirs(pjoin(repo.location, 'dev-util', 'foo'))
22029 - with chdir(pjoin(repo.location, 'dev-util', 'foo')):
22030 - options, _ = tool.parse_args(['scan'])
22031 - assert options.target_repo.repo_id == 'fake'
22032 + os.makedirs(pjoin(repo.location, "dev-util", "foo"))
22033 + with chdir(pjoin(repo.location, "dev-util", "foo")):
22034 + options, _ = tool.parse_args(["scan"])
22035 + assert options.target_repo.repo_id == "fake"
22036 restrictions = [
22037 - restricts.CategoryDep('dev-util'),
22038 - restricts.PackageDep('foo'),
22039 + restricts.CategoryDep("dev-util"),
22040 + restricts.PackageDep("foo"),
22041 + ]
22042 + assert list(options.restrictions) == [
22043 + (base.package_scope, packages.AndRestriction(*restrictions))
22044 ]
22045 - assert list(options.restrictions) == [(base.package_scope, packages.AndRestriction(*restrictions))]
22046
22047 def test_target_repo_dir_cwd(self, repo, tool):
22048 with chdir(repo.location):
22049 - options, _ = tool.parse_args(['scan'])
22050 - assert options.target_repo.repo_id == 'fake'
22051 + options, _ = tool.parse_args(["scan"])
22052 + assert options.target_repo.repo_id == "fake"
22053 assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
22054
22055 def test_unknown_repo(self, tmp_path, capsys, tool):
22056 - for opt in ('-r', '--repo'):
22057 + for opt in ("-r", "--repo"):
22058 with pytest.raises(SystemExit) as excinfo:
22059 with chdir(str(tmp_path)):
22060 - options, _ = tool.parse_args(['scan', opt, 'foo'])
22061 + options, _ = tool.parse_args(["scan", opt, "foo"])
22062 assert excinfo.value.code == 2
22063 out, err = capsys.readouterr()
22064 assert not out
22065 @@ -158,27 +163,26 @@ class TestPkgcheckScanParseArgs:
22066 )
22067
22068 def test_invalid_repo(self, tmp_path, capsys, tool):
22069 - (tmp_path / 'foo').touch()
22070 - for opt in ('-r', '--repo'):
22071 + (tmp_path / "foo").touch()
22072 + for opt in ("-r", "--repo"):
22073 with pytest.raises(SystemExit) as excinfo:
22074 with chdir(str(tmp_path)):
22075 - options, _ = tool.parse_args(['scan', opt, 'foo'])
22076 + options, _ = tool.parse_args(["scan", opt, "foo"])
22077 assert excinfo.value.code == 2
22078 out, err = capsys.readouterr()
22079 assert not out
22080 - assert err.startswith(
22081 - "pkgcheck scan: error: argument -r/--repo: repo init failed:")
22082 + assert err.startswith("pkgcheck scan: error: argument -r/--repo: repo init failed:")
22083
22084 def test_valid_repo(self, tool):
22085 - for opt in ('-r', '--repo'):
22086 - options, _ = tool.parse_args(['scan', opt, 'standalone'])
22087 - assert options.target_repo.repo_id == 'standalone'
22088 + for opt in ("-r", "--repo"):
22089 + options, _ = tool.parse_args(["scan", opt, "standalone"])
22090 + assert options.target_repo.repo_id == "standalone"
22091 assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
22092
22093 def test_unknown_reporter(self, capsys, tool):
22094 - for opt in ('-R', '--reporter'):
22095 + for opt in ("-R", "--reporter"):
22096 with pytest.raises(SystemExit) as excinfo:
22097 - options, _ = tool.parse_args(['scan', opt, 'foo'])
22098 + options, _ = tool.parse_args(["scan", opt, "foo"])
22099 assert excinfo.value.code == 2
22100 out, err = capsys.readouterr()
22101 assert not out
22102 @@ -187,161 +191,185 @@ class TestPkgcheckScanParseArgs:
22103 def test_format_reporter(self, capsys, tool):
22104 # missing --format
22105 with pytest.raises(SystemExit) as excinfo:
22106 - tool.parse_args(['scan', '-R', 'FormatReporter'])
22107 + tool.parse_args(["scan", "-R", "FormatReporter"])
22108 assert excinfo.value.code == 2
22109 out, err = capsys.readouterr()
22110 - err = err.strip().split('\n')
22111 - assert err[-1].endswith(
22112 - "missing or empty --format option required by FormatReporter")
22113 + err = err.strip().split("\n")
22114 + assert err[-1].endswith("missing or empty --format option required by FormatReporter")
22115
22116 # missing -R FormatReporter
22117 with pytest.raises(SystemExit) as excinfo:
22118 - tool.parse_args(['scan', '--format', 'foo'])
22119 + tool.parse_args(["scan", "--format", "foo"])
22120 assert excinfo.value.code == 2
22121 out, err = capsys.readouterr()
22122 - err = err.strip().split('\n')
22123 - assert err[-1].endswith(
22124 - "--format option is only valid when using FormatReporter")
22125 + err = err.strip().split("\n")
22126 + assert err[-1].endswith("--format option is only valid when using FormatReporter")
22127
22128 # properly set
22129 - options, _ = tool.parse_args(
22130 - ['scan', '-R', 'FormatReporter', '--format', 'foo'])
22131 + options, _ = tool.parse_args(["scan", "-R", "FormatReporter", "--format", "foo"])
22132
22133 def test_cwd(self, capsys, tool):
22134 # regularly working
22135 - options, _ = tool.parse_args(['scan'])
22136 + options, _ = tool.parse_args(["scan"])
22137 assert options.cwd == os.getcwd()
22138
22139 # pretend the CWD was removed out from under us
22140 - with patch('os.getcwd') as getcwd:
22141 - getcwd.side_effect = FileNotFoundError('CWD is gone')
22142 - options, _ = tool.parse_args(['scan'])
22143 + with patch("os.getcwd") as getcwd:
22144 + getcwd.side_effect = FileNotFoundError("CWD is gone")
22145 + options, _ = tool.parse_args(["scan"])
22146 assert options.cwd == const.DATA_PATH
22147
22148 def test_eclass_target(self, fakerepo, tool):
22149 - (eclass_dir := fakerepo / 'eclass').mkdir()
22150 - (eclass_path := eclass_dir / 'foo.eclass').touch()
22151 - options, _ = tool.parse_args(['scan', str(eclass_path)])
22152 - assert list(options.restrictions) == [(base.eclass_scope, 'foo')]
22153 + (eclass_dir := fakerepo / "eclass").mkdir()
22154 + (eclass_path := eclass_dir / "foo.eclass").touch()
22155 + options, _ = tool.parse_args(["scan", str(eclass_path)])
22156 + assert list(options.restrictions) == [(base.eclass_scope, "foo")]
22157
22158 def test_profiles_target(self, fakerepo, tool):
22159 - profiles_path = str(fakerepo / 'profiles')
22160 - options, _ = tool.parse_args(['scan', profiles_path])
22161 + profiles_path = str(fakerepo / "profiles")
22162 + options, _ = tool.parse_args(["scan", profiles_path])
22163 assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
22164
22165 def test_profiles_path_target_file(self, fakerepo, tool):
22166 - (pkg_mask_path := fakerepo / 'profiles/package.mask').touch()
22167 - options, _ = tool.parse_args(['scan', str(pkg_mask_path)])
22168 + (pkg_mask_path := fakerepo / "profiles/package.mask").touch()
22169 + options, _ = tool.parse_args(["scan", str(pkg_mask_path)])
22170 assert list(options.restrictions) == [(base.profile_node_scope, str(pkg_mask_path))]
22171
22172 def test_profiles_path_target_dir(self, fakerepo, tool):
22173 - (profile_dir := fakerepo / 'profiles/default').mkdir(parents=True)
22174 - (pkg_mask_path := profile_dir / 'package.mask').touch()
22175 - (pkg_use_path := profile_dir / 'package.use').touch()
22176 - options, _ = tool.parse_args(['scan', str(profile_dir)])
22177 - assert list(options.restrictions) == [(base.profile_node_scope, {str(pkg_mask_path), str(pkg_use_path)})]
22178 + (profile_dir := fakerepo / "profiles/default").mkdir(parents=True)
22179 + (pkg_mask_path := profile_dir / "package.mask").touch()
22180 + (pkg_use_path := profile_dir / "package.use").touch()
22181 + options, _ = tool.parse_args(["scan", str(profile_dir)])
22182 + assert list(options.restrictions) == [
22183 + (base.profile_node_scope, {str(pkg_mask_path), str(pkg_use_path)})
22184 + ]
22185
22186 def test_no_default_repo(self, tool, capsys):
22187 - stubconfig = pjoin(pkgcore_const.DATA_PATH, 'stubconfig')
22188 + stubconfig = pjoin(pkgcore_const.DATA_PATH, "stubconfig")
22189 with pytest.raises(SystemExit) as excinfo:
22190 - tool.parse_args(['--config', stubconfig, 'scan'])
22191 + tool.parse_args(["--config", stubconfig, "scan"])
22192 assert excinfo.value.code == 2
22193 out, err = capsys.readouterr()
22194 assert not out
22195 assert err.strip() == "pkgcheck scan: error: no default repo found"
22196
22197 - @pytest.mark.parametrize(('makeopts', 'expected_jobs'), (
22198 - ('', 4),
22199 - ('-j1', 1),
22200 - ('--jobs=6 -l 1', 6),
22201 - ('--load 1', 4),
22202 - ))
22203 + @pytest.mark.parametrize(
22204 + ("makeopts", "expected_jobs"),
22205 + (
22206 + ("", 4),
22207 + ("-j1", 1),
22208 + ("--jobs=6 -l 1", 6),
22209 + ("--load 1", 4),
22210 + ),
22211 + )
22212 def test_makeopts_parsing(self, parser, makeopts, expected_jobs):
22213 - with patch('os.cpu_count', return_value=4), \
22214 - os_environ(MAKEOPTS=makeopts):
22215 + with patch("os.cpu_count", return_value=4), os_environ(MAKEOPTS=makeopts):
22216
22217 - options = parser.parse_args(['scan'])
22218 + options = parser.parse_args(["scan"])
22219 assert options.jobs == expected_jobs
22220 assert options.tasks == 5 * expected_jobs
22221
22222 def test_no_color(self, parser, tmp_path):
22223 - (config_file := tmp_path / 'config').write_text(textwrap.dedent('''\
22224 - [DEFAULT]
22225 - color = true
22226 - '''))
22227 + (config_file := tmp_path / "config").write_text(
22228 + textwrap.dedent(
22229 + """\
22230 + [DEFAULT]
22231 + color = true
22232 + """
22233 + )
22234 + )
22235
22236 - args = ('scan', '--config', str(config_file))
22237 - with os_environ('NOCOLOR'):
22238 + args = ("scan", "--config", str(config_file))
22239 + with os_environ("NOCOLOR"):
22240 assert parser.parse_args(args).color is True
22241 - with os_environ(NOCOLOR='1'):
22242 + with os_environ(NOCOLOR="1"):
22243 # NOCOLOR overrides config file
22244 assert parser.parse_args(args).color is False
22245 # cmd line option overrides NOCOLOR
22246 - assert parser.parse_args([*args, '--color', 'n']).color is False
22247 - assert parser.parse_args([*args, '--color', 'y']).color is True
22248 + assert parser.parse_args([*args, "--color", "n"]).color is False
22249 + assert parser.parse_args([*args, "--color", "y"]).color is True
22250
22251
22252 class TestPkgcheckScanParseConfigArgs:
22253 -
22254 @pytest.fixture(autouse=True)
22255 def _setup(self, parser, tmp_path, repo):
22256 self.parser = parser
22257 self.repo = repo
22258 - self.args = ['scan', '-r', repo.location]
22259 + self.args = ["scan", "-r", repo.location]
22260 self.system_config = str(tmp_path / "system-config")
22261 self.user_config = str(tmp_path / "user-config")
22262 self.config = str(tmp_path / "custom-config")
22263
22264 def test_config_precedence(self):
22265 configs = [self.system_config, self.user_config]
22266 - with patch('pkgcheck.cli.ConfigFileParser.default_configs', configs):
22267 - with open(self.system_config, 'w') as f:
22268 - f.write(textwrap.dedent("""\
22269 - [DEFAULT]
22270 - jobs=1000
22271 - """))
22272 + with patch("pkgcheck.cli.ConfigFileParser.default_configs", configs):
22273 + with open(self.system_config, "w") as f:
22274 + f.write(
22275 + textwrap.dedent(
22276 + """\
22277 + [DEFAULT]
22278 + jobs=1000
22279 + """
22280 + )
22281 + )
22282 options = self.parser.parse_args(self.args)
22283 assert options.jobs == 1000
22284
22285 # user config overrides system config
22286 - with open(self.user_config, 'w') as f:
22287 - f.write(textwrap.dedent("""\
22288 - [DEFAULT]
22289 - jobs=1001
22290 - """))
22291 + with open(self.user_config, "w") as f:
22292 + f.write(
22293 + textwrap.dedent(
22294 + """\
22295 + [DEFAULT]
22296 + jobs=1001
22297 + """
22298 + )
22299 + )
22300 options = self.parser.parse_args(self.args)
22301 assert options.jobs == 1001
22302
22303 # repo config overrides user config
22304 - with open(pjoin(self.repo.location, 'metadata', 'pkgcheck.conf'), 'w') as f:
22305 - f.write(textwrap.dedent("""\
22306 - [DEFAULT]
22307 - jobs=1002
22308 - """))
22309 + with open(pjoin(self.repo.location, "metadata", "pkgcheck.conf"), "w") as f:
22310 + f.write(
22311 + textwrap.dedent(
22312 + """\
22313 + [DEFAULT]
22314 + jobs=1002
22315 + """
22316 + )
22317 + )
22318 options = self.parser.parse_args(self.args)
22319 assert options.jobs == 1002
22320
22321 # custom config overrides user config
22322 - with open(self.config, 'w') as f:
22323 - f.write(textwrap.dedent("""\
22324 - [DEFAULT]
22325 - jobs=1003
22326 - """))
22327 - config_args = self.args + ['--config', self.config]
22328 + with open(self.config, "w") as f:
22329 + f.write(
22330 + textwrap.dedent(
22331 + """\
22332 + [DEFAULT]
22333 + jobs=1003
22334 + """
22335 + )
22336 + )
22337 + config_args = self.args + ["--config", self.config]
22338 options = self.parser.parse_args(config_args)
22339 assert options.jobs == 1003
22340
22341 # repo defaults override general defaults
22342 - with open(self.config, 'a') as f:
22343 - f.write(textwrap.dedent(f"""\
22344 - [{self.repo.repo_id}]
22345 - jobs=1004
22346 - """))
22347 + with open(self.config, "a") as f:
22348 + f.write(
22349 + textwrap.dedent(
22350 + f"""\
22351 + [{self.repo.repo_id}]
22352 + jobs=1004
22353 + """
22354 + )
22355 + )
22356 options = self.parser.parse_args(config_args)
22357 assert options.jobs == 1004
22358
22359 # command line options override all config settings
22360 - options = self.parser.parse_args(config_args + ['--jobs', '9999'])
22361 + options = self.parser.parse_args(config_args + ["--jobs", "9999"])
22362 assert options.jobs == 9999
22363
22364
22365 @@ -349,143 +377,146 @@ class TestPkgcheckScan:
22366
22367 script = staticmethod(partial(run, project))
22368
22369 - repos_data = pytest.REPO_ROOT / 'testdata/data/repos'
22370 - repos_dir = pytest.REPO_ROOT / 'testdata/repos'
22371 - repos = tuple(sorted(x.name for x in repos_data.iterdir() if x.name != 'network'))
22372 + repos_data = pytest.REPO_ROOT / "testdata/data/repos"
22373 + repos_dir = pytest.REPO_ROOT / "testdata/repos"
22374 + repos = tuple(sorted(x.name for x in repos_data.iterdir() if x.name != "network"))
22375
22376 _all_results = [
22377 (cls, result)
22378 for name, cls in sorted(objects.CHECKS.items())
22379 if not issubclass(cls, checks_mod.NetworkCheck)
22380 - for result in sorted(cls.known_results, key=attrgetter('__name__'))
22381 + for result in sorted(cls.known_results, key=attrgetter("__name__"))
22382 ]
22383
22384 @pytest.fixture(autouse=True)
22385 def _setup(self, testconfig, tmp_path):
22386 self.cache_dir = str(tmp_path)
22387 - base_args = ['--config', testconfig]
22388 + base_args = ["--config", testconfig]
22389 self.scan = partial(scan, base_args=base_args)
22390 # args for running `pkgcheck scan` via API call
22391 - self.scan_args = ['--config', 'no', '--cache-dir', self.cache_dir]
22392 + self.scan_args = ["--config", "no", "--cache-dir", self.cache_dir]
22393 # args for running pkgcheck like a script
22394 - self.args = [project] + base_args + ['scan'] + self.scan_args
22395 + self.args = [project] + base_args + ["scan"] + self.scan_args
22396
22397 def test_empty_repo(self, capsys, repo):
22398 - with patch('sys.argv', self.args + [repo.location]):
22399 + with patch("sys.argv", self.args + [repo.location]):
22400 with pytest.raises(SystemExit) as excinfo:
22401 self.script()
22402 assert excinfo.value.code == 0
22403 out, err = capsys.readouterr()
22404 - assert out == err == ''
22405 + assert out == err == ""
22406
22407 def test_no_matching_checks_scope(self, tool):
22408 - options, _ = tool.parse_args(['scan', 'standalone'])
22409 - path = pjoin(options.target_repo.location, 'profiles')
22410 - error = 'no matching checks available for profiles scope'
22411 + options, _ = tool.parse_args(["scan", "standalone"])
22412 + path = pjoin(options.target_repo.location, "profiles")
22413 + error = "no matching checks available for profiles scope"
22414 with pytest.raises(base.PkgcheckUserException, match=error):
22415 - self.scan(self.scan_args + ['-c', 'PkgDirCheck', path])
22416 + self.scan(self.scan_args + ["-c", "PkgDirCheck", path])
22417
22418 def test_stdin_targets_with_no_args(self):
22419 - with patch('sys.stdin', StringIO()):
22420 - with pytest.raises(base.PkgcheckUserException, match='no targets'):
22421 - self.scan(self.scan_args + ['-'])
22422 + with patch("sys.stdin", StringIO()):
22423 + with pytest.raises(base.PkgcheckUserException, match="no targets"):
22424 + self.scan(self.scan_args + ["-"])
22425
22426 def test_exit_status(self, repo):
22427 # create good ebuild and another with an invalid EAPI
22428 - repo.create_ebuild('newcat/pkg-0')
22429 - repo.create_ebuild('newcat/pkg-1', eapi='-1')
22430 + repo.create_ebuild("newcat/pkg-0")
22431 + repo.create_ebuild("newcat/pkg-1", eapi="-1")
22432 # exit status isn't enabled by default
22433 - args = ['-r', repo.location]
22434 - with patch('sys.argv', self.args + args):
22435 + args = ["-r", repo.location]
22436 + with patch("sys.argv", self.args + args):
22437 with pytest.raises(SystemExit) as excinfo:
22438 self.script()
22439 assert excinfo.value.code == 0
22440
22441 # all error level results are flagged by default when enabled
22442 - with patch('sys.argv', self.args + args + ['--exit']):
22443 + with patch("sys.argv", self.args + args + ["--exit"]):
22444 with pytest.raises(SystemExit) as excinfo:
22445 self.script()
22446 assert excinfo.value.code == 1
22447
22448 # selective error results will only flag those specified
22449 - with patch('sys.argv', self.args + args + ['--exit', 'InvalidSlot']):
22450 + with patch("sys.argv", self.args + args + ["--exit", "InvalidSlot"]):
22451 with pytest.raises(SystemExit) as excinfo:
22452 self.script()
22453 assert excinfo.value.code == 0
22454 - with patch('sys.argv', self.args + args + ['--exit', 'InvalidEapi']):
22455 + with patch("sys.argv", self.args + args + ["--exit", "InvalidEapi"]):
22456 with pytest.raises(SystemExit) as excinfo:
22457 self.script()
22458 assert excinfo.value.code == 1
22459
22460 def test_filter_latest(self, make_repo):
22461 - repo = make_repo(arches=['amd64'])
22462 + repo = make_repo(arches=["amd64"])
22463 # create stub profile to suppress ArchesWithoutProfiles result
22464 - repo.create_profiles([Profile('stub', 'amd64')])
22465 + repo.create_profiles([Profile("stub", "amd64")])
22466 # create ebuild with unknown keywords
22467 - repo.create_ebuild('cat/pkg-0', keywords=['unknown'], homepage='https://example.com')
22468 + repo.create_ebuild("cat/pkg-0", keywords=["unknown"], homepage="https://example.com")
22469 # and a good ebuild for the latest version
22470 - repo.create_ebuild('cat/pkg-1', keywords=['amd64'], homepage='https://example.com')
22471 + repo.create_ebuild("cat/pkg-1", keywords=["amd64"], homepage="https://example.com")
22472
22473 # results for old pkgs will be shown by default
22474 - args = ['-r', repo.location]
22475 - with patch('sys.argv', self.args + args):
22476 + args = ["-r", repo.location]
22477 + with patch("sys.argv", self.args + args):
22478 results = list(self.scan(self.scan_args + args))
22479 assert len(results) == 1
22480
22481 # but are ignored when running using the 'latest' filter
22482 - for opt in ('-f', '--filter'):
22483 - for arg in ('latest', 'latest:KeywordsCheck', 'latest:UnknownKeywords'):
22484 + for opt in ("-f", "--filter"):
22485 + for arg in ("latest", "latest:KeywordsCheck", "latest:UnknownKeywords"):
22486 assert not list(self.scan(self.scan_args + args + [opt, arg]))
22487
22488 def test_scan_restrictions(self, repo):
22489 # create two ebuilds with bad EAPIs
22490 - repo.create_ebuild('cat/pkg-0', eapi='-1')
22491 - repo.create_ebuild('cat/pkg-1', eapi='-1')
22492 + repo.create_ebuild("cat/pkg-0", eapi="-1")
22493 + repo.create_ebuild("cat/pkg-1", eapi="-1")
22494
22495 # matching version restriction returns a single result
22496 - results = list(self.scan(self.scan_args + ['-r', repo.location, '=cat/pkg-0']))
22497 - assert [x.version for x in results] == ['0']
22498 + results = list(self.scan(self.scan_args + ["-r", repo.location, "=cat/pkg-0"]))
22499 + assert [x.version for x in results] == ["0"]
22500
22501 # unmatching version restriction returns no results
22502 - results = list(self.scan(self.scan_args + ['-r', repo.location, '=cat/pkg-2']))
22503 + results = list(self.scan(self.scan_args + ["-r", repo.location, "=cat/pkg-2"]))
22504 assert not results
22505
22506 # matching package restriction returns two sorted results
22507 - results = list(self.scan(self.scan_args + ['-r', repo.location, 'cat/pkg']))
22508 - assert [x.version for x in results] == ['0', '1']
22509 + results = list(self.scan(self.scan_args + ["-r", repo.location, "cat/pkg"]))
22510 + assert [x.version for x in results] == ["0", "1"]
22511
22512 # unmatching package restriction returns no results
22513 - results = list(self.scan(self.scan_args + ['-r', repo.location, 'cat/unknown']))
22514 + results = list(self.scan(self.scan_args + ["-r", repo.location, "cat/unknown"]))
22515 assert not results
22516
22517 def test_explict_skip_check(self):
22518 """SkipCheck exceptions are raised when triggered for explicitly enabled checks."""
22519 - error = 'network checks not enabled'
22520 + error = "network checks not enabled"
22521 with pytest.raises(base.PkgcheckException, match=error):
22522 - self.scan(self.scan_args + ['-C', 'net'])
22523 + self.scan(self.scan_args + ["-C", "net"])
22524
22525 def test_cache_disabled_skip_check(self):
22526 """SkipCheck exceptions are raised when enabled checks require disabled cache types."""
22527 - args = ['--cache=-git', '-c', 'StableRequestCheck']
22528 - error = 'StableRequestCheck: git cache support required'
22529 + args = ["--cache=-git", "-c", "StableRequestCheck"]
22530 + error = "StableRequestCheck: git cache support required"
22531 with pytest.raises(base.PkgcheckException, match=error):
22532 self.scan(self.scan_args + args)
22533
22534 - @pytest.mark.parametrize('module', (
22535 - pytest.param('pkgcheck.pipeline.UnversionedSource', id='producer'),
22536 - pytest.param('pkgcheck.runners.SyncCheckRunner.run', id='consumer'),
22537 - ))
22538 + @pytest.mark.parametrize(
22539 + "module",
22540 + (
22541 + pytest.param("pkgcheck.pipeline.UnversionedSource", id="producer"),
22542 + pytest.param("pkgcheck.runners.SyncCheckRunner.run", id="consumer"),
22543 + ),
22544 + )
22545 def test_pipeline_exceptions(self, module):
22546 """Test checkrunner pipeline against unhandled exceptions."""
22547 with patch(module) as faked:
22548 - faked.side_effect = Exception('pipeline failed')
22549 - with pytest.raises(base.PkgcheckException, match='Exception: pipeline failed'):
22550 + faked.side_effect = Exception("pipeline failed")
22551 + with pytest.raises(base.PkgcheckException, match="Exception: pipeline failed"):
22552 list(self.scan(self.scan_args))
22553
22554 # nested mapping of repos to checks/keywords they cover
22555 _checks = defaultdict(lambda: defaultdict(set))
22556
22557 - @pytest.mark.parametrize('repo', repos)
22558 + @pytest.mark.parametrize("repo", repos)
22559 def test_scan_repo_data(self, repo):
22560 """Make sure the test data is up to date check/result naming wise."""
22561 for check in (self.repos_data / repo).iterdir():
22562 @@ -506,19 +537,19 @@ class TestPkgcheckScan:
22563 _results = {}
22564 _verbose_results = {}
22565
22566 - @pytest.mark.parametrize('repo', repos)
22567 + @pytest.mark.parametrize("repo", repos)
22568 def test_scan_repo(self, repo, tmp_path, verbosity=0):
22569 """Scan a target repo, saving results for verification."""
22570 repo_dir = self.repos_dir / repo
22571
22572 # run all existing triggers
22573 triggers = [
22574 - pjoin(root, 'trigger.sh')
22575 + pjoin(root, "trigger.sh")
22576 for root, _dirs, files in os.walk(self.repos_data / repo)
22577 - if 'trigger.sh' in files
22578 + if "trigger.sh" in files
22579 ]
22580 if triggers:
22581 - triggered_repo = tmp_path / f'triggered-{repo}'
22582 + triggered_repo = tmp_path / f"triggered-{repo}"
22583 shutil.copytree(repo_dir, triggered_repo)
22584 for trigger in triggers:
22585 self._script(trigger, triggered_repo)
22586 @@ -526,19 +557,19 @@ class TestPkgcheckScan:
22587
22588 if repo not in self._checks:
22589 self.test_scan_repo_data(repo)
22590 - args = (['-v'] * verbosity) + ['-r', str(repo_dir), '-c', ','.join(self._checks[repo])]
22591 + args = (["-v"] * verbosity) + ["-r", str(repo_dir), "-c", ",".join(self._checks[repo])]
22592
22593 # add any defined extra repo args
22594 try:
22595 - args.extend(shlex.split((repo_dir / 'metadata/pkgcheck-args').read_text()))
22596 + args.extend(shlex.split((repo_dir / "metadata/pkgcheck-args").read_text()))
22597 except FileNotFoundError:
22598 pass
22599
22600 results = []
22601 for result in self.scan(self.scan_args + args):
22602 # ignore results generated from stubs
22603 - stubs = (getattr(result, x, '') for x in ('category', 'package'))
22604 - if any(x.startswith('stub') for x in stubs):
22605 + stubs = (getattr(result, x, "") for x in ("category", "package"))
22606 + if any(x.startswith("stub") for x in stubs):
22607 continue
22608 results.append(result)
22609
22610 @@ -549,7 +580,7 @@ class TestPkgcheckScan:
22611 self._results[repo] = set(results)
22612 assert len(results) == len(self._results[repo])
22613
22614 - @pytest.mark.parametrize('repo', repos)
22615 + @pytest.mark.parametrize("repo", repos)
22616 def test_scan_repo_verbose(self, repo, tmp_path):
22617 """Scan a target repo in verbose mode, saving results for verification."""
22618 return self.test_scan_repo(repo, tmp_path, verbosity=1)
22619 @@ -572,7 +603,7 @@ class TestPkgcheckScan:
22620 output = f.read().decode()
22621 return output
22622
22623 - @pytest.mark.parametrize('repo', repos)
22624 + @pytest.mark.parametrize("repo", repos)
22625 def test_scan_verify(self, repo, tmp_path):
22626 """Run pkgcheck against test pkgs in bundled repo, verifying result output."""
22627 results = set()
22628 @@ -584,15 +615,19 @@ class TestPkgcheckScan:
22629 for check, keywords in self._checks[repo].items():
22630 for keyword in keywords:
22631 # verify the expected results were seen during the repo scans
22632 - expected_results = self._get_results(f'{repo}/{check}/{keyword}/expected.json')
22633 - assert expected_results, 'regular results must always exist'
22634 - assert self._render_results(expected_results), 'failed rendering results'
22635 + expected_results = self._get_results(f"{repo}/{check}/{keyword}/expected.json")
22636 + assert expected_results, "regular results must always exist"
22637 + assert self._render_results(expected_results), "failed rendering results"
22638 results.update(expected_results)
22639
22640 # when expected verbose results exist use them, otherwise fallback to using the regular ones
22641 - expected_verbose_results = self._get_results(f'{repo}/{check}/{keyword}/expected-verbose.json')
22642 + expected_verbose_results = self._get_results(
22643 + f"{repo}/{check}/{keyword}/expected-verbose.json"
22644 + )
22645 if expected_verbose_results:
22646 - assert self._render_results(expected_verbose_results), 'failed rendering verbose results'
22647 + assert self._render_results(
22648 + expected_verbose_results
22649 + ), "failed rendering verbose results"
22650 verbose_results.update(expected_verbose_results)
22651 else:
22652 verbose_results.update(expected_results)
22653 @@ -600,34 +635,39 @@ class TestPkgcheckScan:
22654 if results != self._results[repo]:
22655 missing = self._render_results(results - self._results[repo])
22656 unknown = self._render_results(self._results[repo] - results)
22657 - error = ['unmatched repo scan results:']
22658 + error = ["unmatched repo scan results:"]
22659 if missing:
22660 - error.append(f'{repo} repo missing expected results:\n{missing}')
22661 + error.append(f"{repo} repo missing expected results:\n{missing}")
22662 if unknown:
22663 - error.append(f'{repo} repo unknown results:\n{unknown}')
22664 - pytest.fail('\n'.join(error))
22665 + error.append(f"{repo} repo unknown results:\n{unknown}")
22666 + pytest.fail("\n".join(error))
22667 if verbose_results != self._verbose_results[repo]:
22668 missing = self._render_results(verbose_results - self._verbose_results[repo])
22669 unknown = self._render_results(self._verbose_results[repo] - verbose_results)
22670 - error = ['unmatched verbose repo scan results:']
22671 + error = ["unmatched verbose repo scan results:"]
22672 if missing:
22673 - error.append(f'{repo} repo missing expected results:\n{missing}')
22674 + error.append(f"{repo} repo missing expected results:\n{missing}")
22675 if unknown:
22676 - error.append(f'{repo} repo unknown results:\n{unknown}')
22677 - pytest.fail('\n'.join(error))
22678 + error.append(f"{repo} repo unknown results:\n{unknown}")
22679 + pytest.fail("\n".join(error))
22680
22681 @staticmethod
22682 def _patch(fix, repo_path):
22683 with fix.open() as fix_file:
22684 try:
22685 subprocess.run(
22686 - ['patch', '-p1'], cwd=repo_path, stdin=fix_file,
22687 - capture_output=True, check=True, text=True)
22688 + ["patch", "-p1"],
22689 + cwd=repo_path,
22690 + stdin=fix_file,
22691 + capture_output=True,
22692 + check=True,
22693 + text=True,
22694 + )
22695 except subprocess.CalledProcessError as exc:
22696 error = exc.stderr if exc.stderr else exc.stdout
22697 pytest.fail(error)
22698
22699 - @pytest.mark.parametrize('check, result', _all_results)
22700 + @pytest.mark.parametrize("check, result", _all_results)
22701 def test_fix(self, check, result, tmp_path):
22702 """Apply fixes to pkgs, verifying the related results are fixed."""
22703 check_name = check.__name__
22704 @@ -635,36 +675,36 @@ class TestPkgcheckScan:
22705 tested = False
22706 for repo in self.repos:
22707 keyword_dir = self.repos_data / repo / check_name / keyword
22708 - if (fix := keyword_dir / 'fix.patch').exists():
22709 + if (fix := keyword_dir / "fix.patch").exists():
22710 func = self._patch
22711 - elif (fix := keyword_dir / 'fix.sh').exists():
22712 + elif (fix := keyword_dir / "fix.sh").exists():
22713 func = self._script
22714 else:
22715 continue
22716
22717 # apply a fix if one exists and make sure the related result doesn't appear
22718 repo_dir = self.repos_dir / repo
22719 - fixed_repo = tmp_path / f'fixed-{repo}'
22720 + fixed_repo = tmp_path / f"fixed-{repo}"
22721 shutil.copytree(repo_dir, fixed_repo)
22722 func(fix, fixed_repo)
22723
22724 - args = ['-r', str(fixed_repo), '-c', check_name, '-k', keyword]
22725 + args = ["-r", str(fixed_repo), "-c", check_name, "-k", keyword]
22726
22727 # add any defined extra repo args
22728 try:
22729 - with open(f'{repo_dir}/metadata/pkgcheck-args') as f:
22730 + with open(f"{repo_dir}/metadata/pkgcheck-args") as f:
22731 args.extend(shlex.split(f.read()))
22732 except FileNotFoundError:
22733 pass
22734
22735 results = list(self.scan(self.scan_args + args))
22736 if results:
22737 - error = ['unexpected repo scan results:']
22738 + error = ["unexpected repo scan results:"]
22739 error.append(self._render_results(results))
22740 - pytest.fail('\n'.join(error))
22741 + pytest.fail("\n".join(error))
22742
22743 shutil.rmtree(fixed_repo)
22744 tested = True
22745
22746 if not tested:
22747 - pytest.skip('fix not available')
22748 + pytest.skip("fix not available")
22749
22750 diff --git a/tests/scripts/test_pkgcheck_show.py b/tests/scripts/test_pkgcheck_show.py
22751 index 4557ecfd..ee40f7cf 100644
22752 --- a/tests/scripts/test_pkgcheck_show.py
22753 +++ b/tests/scripts/test_pkgcheck_show.py
22754 @@ -15,39 +15,39 @@ class TestPkgcheckShow:
22755
22756 @pytest.fixture(autouse=True)
22757 def _setup(self, testconfig):
22758 - self.args = [project, '--config', testconfig, 'show']
22759 + self.args = [project, "--config", testconfig, "show"]
22760
22761 def test_show_no_args(self, capsys):
22762 # defaults to outputting keywords list if no option is passed
22763 - with patch('sys.argv', self.args):
22764 + with patch("sys.argv", self.args):
22765 with pytest.raises(SystemExit) as excinfo:
22766 self.script()
22767 out, err = capsys.readouterr()
22768 assert not err
22769 - out = out.strip().split('\n')
22770 + out = out.strip().split("\n")
22771 assert out == sorted(objects.KEYWORDS.keys())
22772 assert excinfo.value.code == 0
22773
22774 def test_show_keywords(self, capsys):
22775 - for arg in ('-k', '--keywords'):
22776 + for arg in ("-k", "--keywords"):
22777 # regular mode
22778 - with patch('sys.argv', self.args + [arg]):
22779 + with patch("sys.argv", self.args + [arg]):
22780 with pytest.raises(SystemExit) as excinfo:
22781 self.script()
22782 out, err = capsys.readouterr()
22783 assert not err
22784 - out = out.strip().split('\n')
22785 + out = out.strip().split("\n")
22786 regular_output = out
22787 assert out == sorted(objects.KEYWORDS.keys())
22788 assert excinfo.value.code == 0
22789
22790 # verbose mode
22791 - with patch('sys.argv', self.args + [arg, '-v']):
22792 + with patch("sys.argv", self.args + [arg, "-v"]):
22793 with pytest.raises(SystemExit) as excinfo:
22794 self.script()
22795 out, err = capsys.readouterr()
22796 assert not err
22797 - out = out.strip().split('\n')
22798 + out = out.strip().split("\n")
22799 verbose_output = out
22800 assert excinfo.value.code == 0
22801
22802 @@ -55,25 +55,25 @@ class TestPkgcheckShow:
22803 assert len(regular_output) < len(verbose_output)
22804
22805 def test_show_checks(self, capsys):
22806 - for arg in ('-c', '--checks'):
22807 + for arg in ("-c", "--checks"):
22808 # regular mode
22809 - with patch('sys.argv', self.args + [arg]):
22810 + with patch("sys.argv", self.args + [arg]):
22811 with pytest.raises(SystemExit) as excinfo:
22812 self.script()
22813 out, err = capsys.readouterr()
22814 assert not err
22815 - out = out.strip().split('\n')
22816 + out = out.strip().split("\n")
22817 regular_output = out
22818 assert out == sorted(objects.CHECKS.keys())
22819 assert excinfo.value.code == 0
22820
22821 # verbose mode
22822 - with patch('sys.argv', self.args + [arg, '-v']):
22823 + with patch("sys.argv", self.args + [arg, "-v"]):
22824 with pytest.raises(SystemExit) as excinfo:
22825 self.script()
22826 out, err = capsys.readouterr()
22827 assert not err
22828 - out = out.strip().split('\n')
22829 + out = out.strip().split("\n")
22830 verbose_output = out
22831 assert excinfo.value.code == 0
22832
22833 @@ -81,50 +81,50 @@ class TestPkgcheckShow:
22834 assert len(regular_output) < len(verbose_output)
22835
22836 def test_show_scopes(self, capsys):
22837 - for arg in ('-s', '--scopes'):
22838 - with patch('sys.argv', self.args + [arg]):
22839 + for arg in ("-s", "--scopes"):
22840 + with patch("sys.argv", self.args + [arg]):
22841 with pytest.raises(SystemExit) as excinfo:
22842 self.script()
22843 out, err = capsys.readouterr()
22844 assert not err
22845 - out = out.strip().split('\n')
22846 + out = out.strip().split("\n")
22847 assert out == list(base.scopes)
22848 assert excinfo.value.code == 0
22849 - regular_output = '\n'.join(itertools.chain(out))
22850 + regular_output = "\n".join(itertools.chain(out))
22851
22852 # verbose mode
22853 - with patch('sys.argv', self.args + [arg, '-v']):
22854 + with patch("sys.argv", self.args + [arg, "-v"]):
22855 with pytest.raises(SystemExit) as excinfo:
22856 self.script()
22857 out, err = capsys.readouterr()
22858 assert not err
22859 - out = out.strip().split('\n')
22860 + out = out.strip().split("\n")
22861 assert excinfo.value.code == 0
22862 - verbose_output = '\n'.join(itertools.chain(out))
22863 + verbose_output = "\n".join(itertools.chain(out))
22864
22865 # verbose output shows more info
22866 assert len(regular_output) < len(verbose_output)
22867
22868 def test_show_reporters(self, capsys):
22869 - for arg in ('-r', '--reporters'):
22870 + for arg in ("-r", "--reporters"):
22871 # regular mode
22872 - with patch('sys.argv', self.args + [arg]):
22873 + with patch("sys.argv", self.args + [arg]):
22874 with pytest.raises(SystemExit) as excinfo:
22875 self.script()
22876 out, err = capsys.readouterr()
22877 assert not err
22878 - out = out.strip().split('\n')
22879 + out = out.strip().split("\n")
22880 regular_output = out
22881 assert out == sorted(objects.REPORTERS.keys())
22882 assert excinfo.value.code == 0
22883
22884 # verbose mode
22885 - with patch('sys.argv', self.args + [arg, '-v']):
22886 + with patch("sys.argv", self.args + [arg, "-v"]):
22887 with pytest.raises(SystemExit) as excinfo:
22888 self.script()
22889 out, err = capsys.readouterr()
22890 assert not err
22891 - out = out.strip().split('\n')
22892 + out = out.strip().split("\n")
22893 verbose_output = out
22894 assert excinfo.value.code == 0
22895
22896 @@ -132,27 +132,27 @@ class TestPkgcheckShow:
22897 assert len(regular_output) < len(verbose_output)
22898
22899 def test_show_caches(self, capsys):
22900 - for arg in ('-C', '--caches'):
22901 - with patch('sys.argv', self.args + [arg]):
22902 + for arg in ("-C", "--caches"):
22903 + with patch("sys.argv", self.args + [arg]):
22904 with pytest.raises(SystemExit) as excinfo:
22905 self.script()
22906 out, err = capsys.readouterr()
22907 assert not err
22908 - out = out.strip().split('\n')
22909 + out = out.strip().split("\n")
22910 cache_objs = caches.CachedAddon.caches.values()
22911 assert out == sorted(x.type for x in cache_objs)
22912 assert excinfo.value.code == 0
22913 - regular_output = '\n'.join(itertools.chain(out))
22914 + regular_output = "\n".join(itertools.chain(out))
22915
22916 # verbose mode
22917 - with patch('sys.argv', self.args + [arg, '-v']):
22918 + with patch("sys.argv", self.args + [arg, "-v"]):
22919 with pytest.raises(SystemExit) as excinfo:
22920 self.script()
22921 out, err = capsys.readouterr()
22922 assert not err
22923 - out = out.strip().split('\n')
22924 + out = out.strip().split("\n")
22925 assert excinfo.value.code == 0
22926 - verbose_output = '\n'.join(itertools.chain(out))
22927 + verbose_output = "\n".join(itertools.chain(out))
22928
22929 # verbose output shows more info
22930 assert len(regular_output) < len(verbose_output)
22931
22932 diff --git a/tests/test_api.py b/tests/test_api.py
22933 index f3db534b..cf546ee5 100644
22934 --- a/tests/test_api.py
22935 +++ b/tests/test_api.py
22936 @@ -8,31 +8,32 @@ from pkgcheck import objects
22937
22938
22939 class TestScanApi:
22940 -
22941 @pytest.fixture(autouse=True)
22942 def _setup(self, testconfig):
22943 - self.base_args = ['--config', testconfig]
22944 - self.scan_args = ['--config', 'no', '--cache', 'no']
22945 + self.base_args = ["--config", testconfig]
22946 + self.scan_args = ["--config", "no", "--cache", "no"]
22947
22948 def test_argparse_error(self, repo):
22949 - with pytest.raises(PkgcheckException, match='unrecognized arguments'):
22950 - scan(['-r', repo.location, '--foo'])
22951 + with pytest.raises(PkgcheckException, match="unrecognized arguments"):
22952 + scan(["-r", repo.location, "--foo"])
22953
22954 def test_no_scan_args(self):
22955 pipe = scan(base_args=self.base_args)
22956 - assert pipe.options.target_repo.repo_id == 'standalone'
22957 + assert pipe.options.target_repo.repo_id == "standalone"
22958
22959 def test_no_base_args(self, repo):
22960 - assert [] == list(scan(self.scan_args + ['-r', repo.location]))
22961 + assert [] == list(scan(self.scan_args + ["-r", repo.location]))
22962
22963 def test_keyword_import(self):
22964 """Keyword classes are importable from the top-level module."""
22965 from pkgcheck import NonsolvableDeps, Result
22966 +
22967 assert issubclass(NonsolvableDeps, Result)
22968
22969 def test_module_attributes(self):
22970 """All keyword class names are shown for the top-level module."""
22971 import pkgcheck
22972 +
22973 assert set(objects.KEYWORDS) < set(dir(pkgcheck))
22974
22975 def test_sigint_handling(self, repo):
22976 @@ -49,10 +50,10 @@ class TestScanApi:
22977
22978 def sleep():
22979 """Notify testing process then sleep."""
22980 - queue.put('ready')
22981 + queue.put("ready")
22982 time.sleep(100)
22983
22984 - with patch('pkgcheck.pipeline.Pipeline.__iter__') as fake_iter:
22985 + with patch("pkgcheck.pipeline.Pipeline.__iter__") as fake_iter:
22986 fake_iter.side_effect = partial(sleep)
22987 try:
22988 iter(scan([repo.location]))
22989 @@ -62,7 +63,7 @@ class TestScanApi:
22990 queue.put(None)
22991 sys.exit(1)
22992
22993 - mp_ctx = multiprocessing.get_context('fork')
22994 + mp_ctx = multiprocessing.get_context("fork")
22995 queue = mp_ctx.SimpleQueue()
22996 p = mp_ctx.Process(target=run, args=(queue,))
22997 p.start()
22998
22999 diff --git a/tests/test_base.py b/tests/test_base.py
23000 index 08acaf8d..7c6aa905 100644
23001 --- a/tests/test_base.py
23002 +++ b/tests/test_base.py
23003 @@ -6,7 +6,6 @@ from pkgcheck.base import ProgressManager
23004
23005
23006 class TestScope:
23007 -
23008 def test_rich_comparisons(self):
23009 assert base.commit_scope < base.repo_scope
23010 assert base.commit_scope < 0
23011 @@ -32,15 +31,14 @@ class TestScope:
23012
23013
23014 class TestProgressManager:
23015 -
23016 def test_no_output(self, capsys):
23017 # output disabled due to lower verbosity setting
23018 - with patch('sys.stdout.isatty', return_value=True):
23019 + with patch("sys.stdout.isatty", return_value=True):
23020 with ProgressManager(verbosity=-1) as progress:
23021 for x in range(10):
23022 progress(x)
23023 # output disabled due to non-tty output
23024 - with patch('sys.stdout.isatty', return_value=False):
23025 + with patch("sys.stdout.isatty", return_value=False):
23026 with ProgressManager(verbosity=1) as progress:
23027 for x in range(10):
23028 progress(x)
23029 @@ -49,20 +47,20 @@ class TestProgressManager:
23030 assert not err
23031
23032 def test_output(self, capsys):
23033 - with patch('sys.stdout.isatty', return_value=True):
23034 + with patch("sys.stdout.isatty", return_value=True):
23035 with ProgressManager(verbosity=0) as progress:
23036 for x in range(10):
23037 progress(x)
23038 out, err = capsys.readouterr()
23039 assert not out
23040 - assert not err.strip().split('\r') == list(range(10))
23041 + assert not err.strip().split("\r") == list(range(10))
23042
23043 def test_cached_output(self, capsys):
23044 - with patch('sys.stdout.isatty', return_value=True):
23045 + with patch("sys.stdout.isatty", return_value=True):
23046 with ProgressManager(verbosity=0) as progress:
23047 data = list(range(10))
23048 for x in chain.from_iterable(zip(data, data)):
23049 progress(x)
23050 out, err = capsys.readouterr()
23051 assert not out
23052 - assert not err.strip().split('\r') == list(range(10))
23053 + assert not err.strip().split("\r") == list(range(10))
23054
23055 diff --git a/tests/test_cli.py b/tests/test_cli.py
23056 index 4ad8011d..b2935b28 100644
23057 --- a/tests/test_cli.py
23058 +++ b/tests/test_cli.py
23059 @@ -6,10 +6,9 @@ from snakeoil.cli import arghparse
23060
23061
23062 class TestConfigFileParser:
23063 -
23064 @pytest.fixture(autouse=True)
23065 def _create_argparser(self, tmp_path):
23066 - self.config_file = str(tmp_path / 'config')
23067 + self.config_file = str(tmp_path / "config")
23068 self.parser = arghparse.ArgumentParser()
23069 self.namespace = arghparse.Namespace()
23070 self.config_parser = cli.ConfigFileParser(self.parser)
23071 @@ -22,65 +21,81 @@ class TestConfigFileParser:
23072
23073 def test_ignored_configs(self):
23074 # nonexistent config files are ignored
23075 - config = self.config_parser.parse_config(('foo', 'bar'))
23076 + config = self.config_parser.parse_config(("foo", "bar"))
23077 assert config.sections() == []
23078
23079 def test_bad_config_format(self, capsys):
23080 - with open(self.config_file, 'w') as f:
23081 - f.write('foobar\n')
23082 + with open(self.config_file, "w") as f:
23083 + f.write("foobar\n")
23084 with pytest.raises(SystemExit) as excinfo:
23085 self.config_parser.parse_config((self.config_file,))
23086 out, err = capsys.readouterr()
23087 assert not out
23088 - assert 'parsing config file failed:' in err
23089 + assert "parsing config file failed:" in err
23090 assert excinfo.value.code == 2
23091
23092 def test_nonexistent_config_options(self, capsys):
23093 """Nonexistent parser arguments cause errors."""
23094 - with open(self.config_file, 'w') as f:
23095 - f.write(textwrap.dedent("""
23096 - [DEFAULT]
23097 - foo=bar
23098 - """))
23099 + with open(self.config_file, "w") as f:
23100 + f.write(
23101 + textwrap.dedent(
23102 + """
23103 + [DEFAULT]
23104 + foo=bar
23105 + """
23106 + )
23107 + )
23108 with pytest.raises(SystemExit) as excinfo:
23109 self.config_parser.parse_config_options(self.namespace, configs=[self.config_file])
23110 out, err = capsys.readouterr()
23111 assert not out
23112 - assert 'failed loading config: unknown arguments: --foo=bar' in err
23113 + assert "failed loading config: unknown arguments: --foo=bar" in err
23114 assert excinfo.value.code == 2
23115
23116 def test_config_options(self):
23117 - self.parser.add_argument('--foo')
23118 - with open(self.config_file, 'w') as f:
23119 - f.write(textwrap.dedent("""
23120 - [DEFAULT]
23121 - foo=bar
23122 - """))
23123 - namespace = self.parser.parse_args(['--foo', 'foo'])
23124 - assert namespace.foo == 'foo'
23125 + self.parser.add_argument("--foo")
23126 + with open(self.config_file, "w") as f:
23127 + f.write(
23128 + textwrap.dedent(
23129 + """
23130 + [DEFAULT]
23131 + foo=bar
23132 + """
23133 + )
23134 + )
23135 + namespace = self.parser.parse_args(["--foo", "foo"])
23136 + assert namespace.foo == "foo"
23137 # config args override matching namespace attrs
23138 namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
23139 - assert namespace.foo == 'bar'
23140 + assert namespace.foo == "bar"
23141
23142 def test_config_checksets(self):
23143 namespace = self.parser.parse_args([])
23144 namespace.config_checksets = {}
23145
23146 # checksets section exists with no entries
23147 - with open(self.config_file, 'w') as f:
23148 - f.write(textwrap.dedent("""
23149 - [CHECKSETS]
23150 - """))
23151 + with open(self.config_file, "w") as f:
23152 + f.write(
23153 + textwrap.dedent(
23154 + """
23155 + [CHECKSETS]
23156 + """
23157 + )
23158 + )
23159 namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
23160 assert namespace.config_checksets == {}
23161
23162 # checksets section with entries including empty set
23163 - with open(self.config_file, 'w') as f:
23164 - f.write(textwrap.dedent("""
23165 - [CHECKSETS]
23166 - set1=keyword
23167 - set2=check,-keyword
23168 - set3=
23169 - """))
23170 + with open(self.config_file, "w") as f:
23171 + f.write(
23172 + textwrap.dedent(
23173 + """
23174 + [CHECKSETS]
23175 + set1=keyword
23176 + set2=check,-keyword
23177 + set3=
23178 + """
23179 + )
23180 + )
23181 namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
23182 - assert namespace.config_checksets == {'set1': ['keyword'], 'set2': ['check', '-keyword']}
23183 + assert namespace.config_checksets == {"set1": ["keyword"], "set2": ["check", "-keyword"]}
23184
23185 diff --git a/tests/test_feeds.py b/tests/test_feeds.py
23186 index a1540048..c098f060 100644
23187 --- a/tests/test_feeds.py
23188 +++ b/tests/test_feeds.py
23189 @@ -6,54 +6,52 @@ from .misc import FakePkg, Profile
23190
23191
23192 class TestQueryCacheAddon:
23193 -
23194 @pytest.fixture(autouse=True)
23195 def _setup(self, tool):
23196 self.tool = tool
23197 - self.args = ['scan']
23198 + self.args = ["scan"]
23199
23200 def test_opts(self):
23201 - for val in ('version', 'package', 'category'):
23202 - options, _ = self.tool.parse_args(self.args + ['--reset-caching-per', val])
23203 + for val in ("version", "package", "category"):
23204 + options, _ = self.tool.parse_args(self.args + ["--reset-caching-per", val])
23205 assert options.query_caching_freq == val
23206
23207 def test_default(self):
23208 options, _ = self.tool.parse_args(self.args)
23209 - assert options.query_caching_freq == 'package'
23210 + assert options.query_caching_freq == "package"
23211
23212 def test_feed(self):
23213 options, _ = self.tool.parse_args(self.args)
23214 addon = feeds.QueryCache(options)
23215 - assert addon.options.query_caching_freq == 'package'
23216 - addon.query_cache['foo'] = 'bar'
23217 - pkg = FakePkg('dev-util/diffball-0.5')
23218 + assert addon.options.query_caching_freq == "package"
23219 + addon.query_cache["foo"] = "bar"
23220 + pkg = FakePkg("dev-util/diffball-0.5")
23221 addon.feed(pkg)
23222 assert not addon.query_cache
23223
23224
23225 class TestEvaluateDepSet:
23226 -
23227 @pytest.fixture(autouse=True)
23228 def _setup(self, tool, repo, tmp_path):
23229 self.tool = tool
23230 self.repo = repo
23231 - self.args = ['scan', '--cache-dir', str(tmp_path), '--repo', repo.location]
23232 + self.args = ["scan", "--cache-dir", str(tmp_path), "--repo", repo.location]
23233 profiles = [
23234 - Profile('1', 'x86'),
23235 - Profile('2', 'x86'),
23236 - Profile('3', 'ppc'),
23237 + Profile("1", "x86"),
23238 + Profile("2", "x86"),
23239 + Profile("3", "ppc"),
23240 ]
23241 self.repo.create_profiles(profiles)
23242 - self.repo.arches.update(['amd64', 'ppc', 'x86'])
23243 + self.repo.arches.update(["amd64", "ppc", "x86"])
23244
23245 - with open(pjoin(self.repo.path, 'profiles', '1', 'package.use.stable.mask'), 'w') as f:
23246 - f.write('dev-util/diffball foo')
23247 - with open(pjoin(self.repo.path, 'profiles', '2', 'package.use.stable.force'), 'w') as f:
23248 - f.write('=dev-util/diffball-0.1 bar foo')
23249 - with open(pjoin(self.repo.path, 'profiles', '3', 'package.use.stable.force'), 'w') as f:
23250 - f.write('dev-util/diffball bar foo')
23251 + with open(pjoin(self.repo.path, "profiles", "1", "package.use.stable.mask"), "w") as f:
23252 + f.write("dev-util/diffball foo")
23253 + with open(pjoin(self.repo.path, "profiles", "2", "package.use.stable.force"), "w") as f:
23254 + f.write("=dev-util/diffball-0.1 bar foo")
23255 + with open(pjoin(self.repo.path, "profiles", "3", "package.use.stable.force"), "w") as f:
23256 + f.write("dev-util/diffball bar foo")
23257
23258 - options, _ = self.tool.parse_args(self.args + ['--profiles=1,2,3'])
23259 + options, _ = self.tool.parse_args(self.args + ["--profiles=1,2,3"])
23260 profile_addon = addons.init_addon(addons.profiles.ProfileAddon, options)
23261 self.addon = feeds.EvaluateDepSet(options, profile_addon=profile_addon)
23262
23263 @@ -72,35 +70,45 @@ class TestEvaluateDepSet:
23264 l = get_rets("0.0.2", "depend")
23265 assert len(l) == 1, f"must collapse all profiles down to one run: got {l!r}"
23266 assert len(l[0][1]) == 4, "must have four runs, (arch and ~arch for each profile)"
23267 - assert sorted(set(x.name for x in l[0][1])) == ['1', '2'], f"must have two profiles: got {l!r}"
23268 - assert l[0][1][0].key == 'x86'
23269 - assert l[0][1][1].key == 'x86'
23270 + assert sorted(set(x.name for x in l[0][1])) == [
23271 + "1",
23272 + "2",
23273 + ], f"must have two profiles: got {l!r}"
23274 + assert l[0][1][0].key == "x86"
23275 + assert l[0][1][1].key == "x86"
23276
23277 l = get_rets(
23278 - "0.1", "rdepend",
23279 + "0.1",
23280 + "rdepend",
23281 RDEPEND="x? ( dev-util/confcache ) foo? ( dev-util/foo ) "
23282 - "bar? ( dev-util/bar ) !bar? ( dev-util/nobar ) x11-libs/xserver"
23283 + "bar? ( dev-util/bar ) !bar? ( dev-util/nobar ) x11-libs/xserver",
23284 )
23285
23286 assert len(l) == 3, f"must collapse all profiles down to 3 runs: got {l!r}"
23287
23288 # ordering is potentially random; thus pull out which depset result is
23289 # which based upon profile
23290 - l1 = [x for x in l if x[1][0].name == '1'][0]
23291 - l2 = [x for x in l if x[1][0].name == '2'][0]
23292 -
23293 - assert (
23294 - set(str(l1[0]).split()) ==
23295 - {'dev-util/confcache', 'dev-util/bar', 'dev-util/nobar', 'x11-libs/xserver'})
23296 -
23297 - assert (
23298 - set(str(l2[0]).split()) ==
23299 - {'dev-util/confcache', 'dev-util/foo', 'dev-util/bar', 'x11-libs/xserver'})
23300 + l1 = [x for x in l if x[1][0].name == "1"][0]
23301 + l2 = [x for x in l if x[1][0].name == "2"][0]
23302 +
23303 + assert set(str(l1[0]).split()) == {
23304 + "dev-util/confcache",
23305 + "dev-util/bar",
23306 + "dev-util/nobar",
23307 + "x11-libs/xserver",
23308 + }
23309 +
23310 + assert set(str(l2[0]).split()) == {
23311 + "dev-util/confcache",
23312 + "dev-util/foo",
23313 + "dev-util/bar",
23314 + "x11-libs/xserver",
23315 + }
23316
23317 # test feed wiping, using an empty depset; if it didn't clear, then
23318 # results from a pkg/attr tuple from above would come through rather
23319 # then an empty.
23320 - pkg = FakePkg('dev-util/diffball-0.5')
23321 + pkg = FakePkg("dev-util/diffball-0.5")
23322 self.addon.feed(pkg)
23323 l = get_rets("0.1", "rdepend")
23324 assert len(l) == 1, f"feed didn't clear the cache- should be len 1: {l!r}"
23325 @@ -110,20 +118,25 @@ class TestEvaluateDepSet:
23326 # ensure it handles arch right.
23327 l = get_rets("0", "depend", KEYWORDS="ppc x86")
23328 assert len(l) == 1, f"should be len 1, got {l!r}"
23329 - assert sorted(set(x.name for x in l[0][1])) == ["1", "2", "3"], (
23330 - f"should have three profiles of 1-3, got {l[0][1]!r}")
23331 + assert sorted(set(x.name for x in l[0][1])) == [
23332 + "1",
23333 + "2",
23334 + "3",
23335 + ], f"should have three profiles of 1-3, got {l[0][1]!r}"
23336
23337 # ensure it's caching profile collapsing, iow, keywords for same ver
23338 # that's partially cached (single attr at least) should *not* change
23339 # things.
23340
23341 l = get_rets("0", "depend", KEYWORDS="ppc")
23342 - assert sorted(set(x.name for x in l[0][1])) == ['1', '2', '3'], (
23343 + assert sorted(set(x.name for x in l[0][1])) == ["1", "2", "3"], (
23344 f"should have 3 profiles, got {l[0][1]!r}\nthis indicates it's "
23345 - "re-identifying profiles every invocation, which is unwarranted ")
23346 + "re-identifying profiles every invocation, which is unwarranted "
23347 + )
23348
23349 - l = get_rets("1", "depend", KEYWORDS="ppc x86",
23350 - DEPEND="ppc? ( dev-util/ppc ) !ppc? ( dev-util/x86 )")
23351 + l = get_rets(
23352 + "1", "depend", KEYWORDS="ppc x86", DEPEND="ppc? ( dev-util/ppc ) !ppc? ( dev-util/x86 )"
23353 + )
23354 assert len(l) == 2, f"should be len 2, got {l!r}"
23355
23356 # same issue, figure out what is what
23357
23358 diff --git a/tests/test_reporters.py b/tests/test_reporters.py
23359 index 462cc44e..4a0cda39 100644
23360 --- a/tests/test_reporters.py
23361 +++ b/tests/test_reporters.py
23362 @@ -16,15 +16,15 @@ class BaseReporter:
23363
23364 @pytest.fixture(autouse=True)
23365 def _setup(self):
23366 - self.log_warning = profiles.ProfileWarning(Exception('profile warning'))
23367 - self.log_error = profiles.ProfileError(Exception('profile error'))
23368 - pkg = FakePkg('dev-libs/foo-0')
23369 - self.commit_result = git.InvalidCommitMessage('no commit message', commit='8d86269bb4c7')
23370 - self.category_result = metadata_xml.CatMissingMetadataXml('metadata.xml', pkg=pkg)
23371 - self.package_result = pkgdir.InvalidPN(('bar', 'baz'), pkg=pkg)
23372 - self.versioned_result = metadata.BadFilename(('0.tar.gz', 'foo.tar.gz'), pkg=pkg)
23373 - self.line_result = codingstyle.ReadonlyVariable('P', line='P=6', lineno=7, pkg=pkg)
23374 - self.lines_result = codingstyle.EbuildUnquotedVariable('D', lines=(5, 7), pkg=pkg)
23375 + self.log_warning = profiles.ProfileWarning(Exception("profile warning"))
23376 + self.log_error = profiles.ProfileError(Exception("profile error"))
23377 + pkg = FakePkg("dev-libs/foo-0")
23378 + self.commit_result = git.InvalidCommitMessage("no commit message", commit="8d86269bb4c7")
23379 + self.category_result = metadata_xml.CatMissingMetadataXml("metadata.xml", pkg=pkg)
23380 + self.package_result = pkgdir.InvalidPN(("bar", "baz"), pkg=pkg)
23381 + self.versioned_result = metadata.BadFilename(("0.tar.gz", "foo.tar.gz"), pkg=pkg)
23382 + self.line_result = codingstyle.ReadonlyVariable("P", line="P=6", lineno=7, pkg=pkg)
23383 + self.lines_result = codingstyle.EbuildUnquotedVariable("D", lines=(5, 7), pkg=pkg)
23384
23385 def mk_reporter(self, **kwargs):
23386 out = PlainTextFormatter(sys.stdout)
23387 @@ -49,106 +49,121 @@ class BaseReporter:
23388 class TestStrReporter(BaseReporter):
23389
23390 reporter_cls = reporters.StrReporter
23391 - add_report_output = dedent("""\
23392 - commit 8d86269bb4c7: no commit message
23393 - profile warning
23394 - dev-libs: category is missing metadata.xml
23395 - dev-libs/foo: invalid package names: [ bar, baz ]
23396 - dev-libs/foo-0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23397 - dev-libs/foo-0: read-only variable 'P' assigned, line 7: P=6
23398 - dev-libs/foo-0: unquoted variable D on lines: 5, 7
23399 - """)
23400 + add_report_output = dedent(
23401 + """\
23402 + commit 8d86269bb4c7: no commit message
23403 + profile warning
23404 + dev-libs: category is missing metadata.xml
23405 + dev-libs/foo: invalid package names: [ bar, baz ]
23406 + dev-libs/foo-0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23407 + dev-libs/foo-0: read-only variable 'P' assigned, line 7: P=6
23408 + dev-libs/foo-0: unquoted variable D on lines: 5, 7
23409 + """
23410 + )
23411
23412
23413 class TestFancyReporter(BaseReporter):
23414
23415 reporter_cls = reporters.FancyReporter
23416 - add_report_output = dedent("""\
23417 - commit
23418 - InvalidCommitMessage: commit 8d86269bb4c7: no commit message
23419 + add_report_output = dedent(
23420 + """\
23421 + commit
23422 + InvalidCommitMessage: commit 8d86269bb4c7: no commit message
23423
23424 - profiles
23425 - ProfileWarning: profile warning
23426 + profiles
23427 + ProfileWarning: profile warning
23428
23429 - dev-libs
23430 - CatMissingMetadataXml: category is missing metadata.xml
23431 + dev-libs
23432 + CatMissingMetadataXml: category is missing metadata.xml
23433
23434 - dev-libs/foo
23435 - InvalidPN: invalid package names: [ bar, baz ]
23436 - BadFilename: version 0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23437 - ReadonlyVariable: version 0: read-only variable 'P' assigned, line 7: P=6
23438 - UnquotedVariable: version 0: unquoted variable D on lines: 5, 7
23439 - """)
23440 + dev-libs/foo
23441 + InvalidPN: invalid package names: [ bar, baz ]
23442 + BadFilename: version 0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23443 + ReadonlyVariable: version 0: read-only variable 'P' assigned, line 7: P=6
23444 + UnquotedVariable: version 0: unquoted variable D on lines: 5, 7
23445 + """
23446 + )
23447
23448
23449 class TestJsonReporter(BaseReporter):
23450
23451 reporter_cls = reporters.JsonReporter
23452 - add_report_output = dedent("""\
23453 - {"_style": {"InvalidCommitMessage": "commit 8d86269bb4c7: no commit message"}}
23454 - {"_warning": {"ProfileWarning": "profile warning"}}
23455 - {"dev-libs": {"_error": {"CatMissingMetadataXml": "category is missing metadata.xml"}}}
23456 - {"dev-libs": {"foo": {"_error": {"InvalidPN": "invalid package names: [ bar, baz ]"}}}}
23457 - {"dev-libs": {"foo": {"0": {"_warning": {"BadFilename": "bad filenames: [ 0.tar.gz, foo.tar.gz ]"}}}}}
23458 - {"dev-libs": {"foo": {"0": {"_warning": {"ReadonlyVariable": "read-only variable 'P' assigned, line 7: P=6"}}}}}
23459 - {"dev-libs": {"foo": {"0": {"_warning": {"UnquotedVariable": "unquoted variable D on lines: 5, 7"}}}}}
23460 - """)
23461 + add_report_output = dedent(
23462 + """\
23463 + {"_style": {"InvalidCommitMessage": "commit 8d86269bb4c7: no commit message"}}
23464 + {"_warning": {"ProfileWarning": "profile warning"}}
23465 + {"dev-libs": {"_error": {"CatMissingMetadataXml": "category is missing metadata.xml"}}}
23466 + {"dev-libs": {"foo": {"_error": {"InvalidPN": "invalid package names: [ bar, baz ]"}}}}
23467 + {"dev-libs": {"foo": {"0": {"_warning": {"BadFilename": "bad filenames: [ 0.tar.gz, foo.tar.gz ]"}}}}}
23468 + {"dev-libs": {"foo": {"0": {"_warning": {"ReadonlyVariable": "read-only variable 'P' assigned, line 7: P=6"}}}}}
23469 + {"dev-libs": {"foo": {"0": {"_warning": {"UnquotedVariable": "unquoted variable D on lines: 5, 7"}}}}}
23470 + """
23471 + )
23472
23473
23474 class TestXmlReporter(BaseReporter):
23475
23476 reporter_cls = reporters.XmlReporter
23477 - add_report_output = dedent("""\
23478 - <checks>
23479 - <result><class>InvalidCommitMessage</class><msg>commit 8d86269bb4c7: no commit message</msg></result>
23480 - <result><class>ProfileWarning</class><msg>profile warning</msg></result>
23481 - <result><category>dev-libs</category><class>CatMissingMetadataXml</class><msg>category is missing metadata.xml</msg></result>
23482 - <result><category>dev-libs</category><package>foo</package><class>InvalidPN</class><msg>invalid package names: [ bar, baz ]</msg></result>
23483 - <result><category>dev-libs</category><package>foo</package><version>0</version><class>BadFilename</class><msg>bad filenames: [ 0.tar.gz, foo.tar.gz ]</msg></result>
23484 - <result><category>dev-libs</category><package>foo</package><version>0</version><class>ReadonlyVariable</class><msg>read-only variable 'P' assigned, line 7: P=6</msg></result>
23485 - <result><category>dev-libs</category><package>foo</package><version>0</version><class>UnquotedVariable</class><msg>unquoted variable D on lines: 5, 7</msg></result>
23486 - </checks>
23487 - """)
23488 + add_report_output = dedent(
23489 + """\
23490 + <checks>
23491 + <result><class>InvalidCommitMessage</class><msg>commit 8d86269bb4c7: no commit message</msg></result>
23492 + <result><class>ProfileWarning</class><msg>profile warning</msg></result>
23493 + <result><category>dev-libs</category><class>CatMissingMetadataXml</class><msg>category is missing metadata.xml</msg></result>
23494 + <result><category>dev-libs</category><package>foo</package><class>InvalidPN</class><msg>invalid package names: [ bar, baz ]</msg></result>
23495 + <result><category>dev-libs</category><package>foo</package><version>0</version><class>BadFilename</class><msg>bad filenames: [ 0.tar.gz, foo.tar.gz ]</msg></result>
23496 + <result><category>dev-libs</category><package>foo</package><version>0</version><class>ReadonlyVariable</class><msg>read-only variable 'P' assigned, line 7: P=6</msg></result>
23497 + <result><category>dev-libs</category><package>foo</package><version>0</version><class>UnquotedVariable</class><msg>unquoted variable D on lines: 5, 7</msg></result>
23498 + </checks>
23499 + """
23500 + )
23501
23502
23503 class TestCsvReporter(BaseReporter):
23504
23505 reporter_cls = reporters.CsvReporter
23506 - add_report_output = dedent("""\
23507 - ,,,commit 8d86269bb4c7: no commit message
23508 - ,,,profile warning
23509 - dev-libs,,,category is missing metadata.xml
23510 - dev-libs,foo,,"invalid package names: [ bar, baz ]"
23511 - dev-libs,foo,0,"bad filenames: [ 0.tar.gz, foo.tar.gz ]"
23512 - dev-libs,foo,0,"read-only variable 'P' assigned, line 7: P=6"
23513 - dev-libs,foo,0,"unquoted variable D on lines: 5, 7"
23514 - """)
23515 + add_report_output = dedent(
23516 + """\
23517 + ,,,commit 8d86269bb4c7: no commit message
23518 + ,,,profile warning
23519 + dev-libs,,,category is missing metadata.xml
23520 + dev-libs,foo,,"invalid package names: [ bar, baz ]"
23521 + dev-libs,foo,0,"bad filenames: [ 0.tar.gz, foo.tar.gz ]"
23522 + dev-libs,foo,0,"read-only variable 'P' assigned, line 7: P=6"
23523 + dev-libs,foo,0,"unquoted variable D on lines: 5, 7"
23524 + """
23525 + )
23526
23527
23528 class TestFormatReporter(BaseReporter):
23529
23530 - reporter_cls = partial(reporters.FormatReporter, '')
23531 + reporter_cls = partial(reporters.FormatReporter, "")
23532
23533 def test_add_report(self, capsys):
23534 for format_str, expected in (
23535 - ('r', 'r\n' * 7),
23536 - ('{category}', 'dev-libs\n' * 5),
23537 - ('{category}/{package}', '/\n/\ndev-libs/\n' + 'dev-libs/foo\n' * 4),
23538 - ('{category}/{package}-{version}', '/-\n/-\ndev-libs/-\ndev-libs/foo-\n' + 'dev-libs/foo-0\n' * 3),
23539 - ('{name}',
23540 - 'InvalidCommitMessage\nProfileWarning\nCatMissingMetadataXml\nInvalidPN\nBadFilename\nReadonlyVariable\nUnquotedVariable\n'),
23541 - ('{foo}', ''),
23542 - ):
23543 + ("r", "r\n" * 7),
23544 + ("{category}", "dev-libs\n" * 5),
23545 + ("{category}/{package}", "/\n/\ndev-libs/\n" + "dev-libs/foo\n" * 4),
23546 + (
23547 + "{category}/{package}-{version}",
23548 + "/-\n/-\ndev-libs/-\ndev-libs/foo-\n" + "dev-libs/foo-0\n" * 3,
23549 + ),
23550 + (
23551 + "{name}",
23552 + "InvalidCommitMessage\nProfileWarning\nCatMissingMetadataXml\nInvalidPN\nBadFilename\nReadonlyVariable\nUnquotedVariable\n",
23553 + ),
23554 + ("{foo}", ""),
23555 + ):
23556 self.reporter_cls = partial(reporters.FormatReporter, format_str)
23557 self.add_report_output = expected
23558 super().test_add_report(capsys)
23559
23560 def test_unsupported_index(self, capsys):
23561 - self.reporter_cls = partial(reporters.FormatReporter, '{0}')
23562 + self.reporter_cls = partial(reporters.FormatReporter, "{0}")
23563 with self.mk_reporter() as reporter:
23564 with pytest.raises(base.PkgcheckUserException) as excinfo:
23565 reporter.report(self.versioned_result)
23566 - assert 'integer indexes are not supported' in str(excinfo.value)
23567 + assert "integer indexes are not supported" in str(excinfo.value)
23568
23569
23570 class TestJsonStream(BaseReporter):
23571 @@ -158,8 +173,13 @@ class TestJsonStream(BaseReporter):
23572 def test_add_report(self, capsys):
23573 with self.mk_reporter() as reporter:
23574 for result in (
23575 - self.log_warning, self.log_error, self.commit_result,
23576 - self.category_result, self.package_result, self.versioned_result):
23577 + self.log_warning,
23578 + self.log_error,
23579 + self.commit_result,
23580 + self.category_result,
23581 + self.package_result,
23582 + self.versioned_result,
23583 + ):
23584 reporter.report(result)
23585 out, err = capsys.readouterr()
23586 assert not err
23587 @@ -169,28 +189,30 @@ class TestJsonStream(BaseReporter):
23588 def test_deserialize_error(self):
23589 with self.mk_reporter() as reporter:
23590 # deserializing non-result objects raises exception
23591 - obj = reporter.to_json(['result'])
23592 - with pytest.raises(reporters.DeserializationError, match='failed loading'):
23593 + obj = reporter.to_json(["result"])
23594 + with pytest.raises(reporters.DeserializationError, match="failed loading"):
23595 next(reporter.from_iter([obj]))
23596
23597 # deserializing mangled JSON result objects raises exception
23598 obj = reporter.to_json(self.versioned_result)
23599 - del obj['__class__']
23600 + del obj["__class__"]
23601 json_obj = json.dumps(obj)
23602 - with pytest.raises(reporters.DeserializationError, match='unknown result'):
23603 + with pytest.raises(reporters.DeserializationError, match="unknown result"):
23604 next(reporter.from_iter([json_obj]))
23605
23606
23607 class TestFlycheckReporter(BaseReporter):
23608
23609 reporter_cls = reporters.FlycheckReporter
23610 - add_report_output = dedent("""\
23611 - -.ebuild:0:style:InvalidCommitMessage: commit 8d86269bb4c7: no commit message
23612 - -.ebuild:0:warning:ProfileWarning: profile warning
23613 - -.ebuild:0:error:CatMissingMetadataXml: category is missing metadata.xml
23614 - foo-.ebuild:0:error:InvalidPN: invalid package names: [ bar, baz ]
23615 - foo-0.ebuild:0:warning:BadFilename: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23616 - foo-0.ebuild:7:warning:ReadonlyVariable: read-only variable 'P' assigned, line 7: P=6
23617 - foo-0.ebuild:5:warning:UnquotedVariable: unquoted variable D
23618 - foo-0.ebuild:7:warning:UnquotedVariable: unquoted variable D
23619 - """)
23620 + add_report_output = dedent(
23621 + """\
23622 + -.ebuild:0:style:InvalidCommitMessage: commit 8d86269bb4c7: no commit message
23623 + -.ebuild:0:warning:ProfileWarning: profile warning
23624 + -.ebuild:0:error:CatMissingMetadataXml: category is missing metadata.xml
23625 + foo-.ebuild:0:error:InvalidPN: invalid package names: [ bar, baz ]
23626 + foo-0.ebuild:0:warning:BadFilename: bad filenames: [ 0.tar.gz, foo.tar.gz ]
23627 + foo-0.ebuild:7:warning:ReadonlyVariable: read-only variable 'P' assigned, line 7: P=6
23628 + foo-0.ebuild:5:warning:UnquotedVariable: unquoted variable D
23629 + foo-0.ebuild:7:warning:UnquotedVariable: unquoted variable D
23630 + """
23631 + )