aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorArthur Zamarin <arthurzam@gentoo.org>2022-12-29 19:51:23 +0200
committerArthur Zamarin <arthurzam@gentoo.org>2022-12-29 19:51:23 +0200
commitae7dd4184f63185880738c5133f326fe47c6606a (patch)
treed6d0e55d9684fef5bca6a9035a37763dca8b8402
parentbump snakeoil minimal version (diff)
downloadpkgcheck-ae7dd4184f63185880738c5133f326fe47c6606a.tar.gz
pkgcheck-ae7dd4184f63185880738c5133f326fe47c6606a.tar.bz2
pkgcheck-ae7dd4184f63185880738c5133f326fe47c6606a.zip
format using black
Signed-off-by: Arthur Zamarin <arthurzam@gentoo.org>
-rw-r--r--Makefile4
-rwxr-xr-xdata/share/pkgcheck/ci.py10
-rw-r--r--doc/conf.py193
-rwxr-xr-xdoc/generate/pkgcheck/checks.py37
-rwxr-xr-xdoc/generate/pkgcheck/keywords.py25
-rwxr-xr-xdoc/generate/pkgcheck/reporters.py16
-rw-r--r--pyproject.toml3
-rw-r--r--setup.py139
-rw-r--r--src/pkgcheck/__init__.py10
-rw-r--r--src/pkgcheck/addons/__init__.py125
-rw-r--r--src/pkgcheck/addons/caches.py33
-rw-r--r--src/pkgcheck/addons/eclass.py15
-rw-r--r--src/pkgcheck/addons/git.py253
-rw-r--r--src/pkgcheck/addons/net.py12
-rw-r--r--src/pkgcheck/addons/profiles.py236
-rw-r--r--src/pkgcheck/api.py4
-rw-r--r--src/pkgcheck/base.py52
-rw-r--r--src/pkgcheck/bash/__init__.py32
-rw-r--r--src/pkgcheck/checks/__init__.py26
-rw-r--r--src/pkgcheck/checks/acct.py53
-rw-r--r--src/pkgcheck/checks/cleanup.py44
-rw-r--r--src/pkgcheck/checks/codingstyle.py554
-rw-r--r--src/pkgcheck/checks/dropped_keywords.py6
-rw-r--r--src/pkgcheck/checks/eclass.py132
-rw-r--r--src/pkgcheck/checks/git.py256
-rw-r--r--src/pkgcheck/checks/glsa.py20
-rw-r--r--src/pkgcheck/checks/header.py78
-rw-r--r--src/pkgcheck/checks/imlate.py53
-rw-r--r--src/pkgcheck/checks/metadata.py540
-rw-r--r--src/pkgcheck/checks/metadata_xml.py249
-rw-r--r--src/pkgcheck/checks/network.py144
-rw-r--r--src/pkgcheck/checks/overlays.py36
-rw-r--r--src/pkgcheck/checks/perl.py36
-rw-r--r--src/pkgcheck/checks/pkgdir.py89
-rw-r--r--src/pkgcheck/checks/profiles.py208
-rw-r--r--src/pkgcheck/checks/python.py238
-rw-r--r--src/pkgcheck/checks/repo.py11
-rw-r--r--src/pkgcheck/checks/repo_metadata.py165
-rw-r--r--src/pkgcheck/checks/reserved.py46
-rw-r--r--src/pkgcheck/checks/stablereq.py22
-rw-r--r--src/pkgcheck/checks/unstable_only.py14
-rw-r--r--src/pkgcheck/checks/visibility.py134
-rw-r--r--src/pkgcheck/checks/whitespace.py81
-rw-r--r--src/pkgcheck/cli.py21
-rw-r--r--src/pkgcheck/const.py29
-rw-r--r--src/pkgcheck/feeds.py24
-rw-r--r--src/pkgcheck/log.py2
-rw-r--r--src/pkgcheck/objects.py46
-rw-r--r--src/pkgcheck/packages.py25
-rw-r--r--src/pkgcheck/pipeline.py42
-rw-r--r--src/pkgcheck/reporters.py103
-rw-r--r--src/pkgcheck/results.py57
-rw-r--r--src/pkgcheck/runners.py11
-rwxr-xr-xsrc/pkgcheck/scripts/__init__.py22
-rw-r--r--src/pkgcheck/scripts/argparse_actions.py104
-rw-r--r--src/pkgcheck/scripts/argparsers.py49
-rw-r--r--src/pkgcheck/scripts/pkgcheck.py3
-rw-r--r--src/pkgcheck/scripts/pkgcheck_cache.py51
-rw-r--r--src/pkgcheck/scripts/pkgcheck_ci.py6
-rw-r--r--src/pkgcheck/scripts/pkgcheck_replay.py18
-rw-r--r--src/pkgcheck/scripts/pkgcheck_scan.py212
-rw-r--r--src/pkgcheck/scripts/pkgcheck_show.py117
-rw-r--r--src/pkgcheck/sources.py60
-rw-r--r--src/pkgcheck/utils.py15
-rw-r--r--testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py4
-rw-r--r--testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py8
-rw-r--r--testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py10
-rw-r--r--testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py2
-rw-r--r--testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py2
-rw-r--r--testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py4
-rw-r--r--testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py8
-rw-r--r--testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py10
-rw-r--r--testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py2
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py4
-rw-r--r--testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py8
-rw-r--r--testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py11
-rw-r--r--testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py2
-rw-r--r--tests/addons/test_addons.py271
-rw-r--r--tests/addons/test_eclass.py122
-rw-r--r--tests/addons/test_git.py525
-rw-r--r--tests/checks/test_acct.py118
-rw-r--r--tests/checks/test_all.py61
-rw-r--r--tests/checks/test_cleanup.py70
-rw-r--r--tests/checks/test_codingstyle.py340
-rw-r--r--tests/checks/test_dropped_keywords.py50
-rw-r--r--tests/checks/test_git.py559
-rw-r--r--tests/checks/test_glsa.py36
-rw-r--r--tests/checks/test_header.py48
-rw-r--r--tests/checks/test_imlate.py111
-rw-r--r--tests/checks/test_metadata.py839
-rw-r--r--tests/checks/test_network.py85
-rw-r--r--tests/checks/test_perl.py50
-rw-r--r--tests/checks/test_pkgdir.py335
-rw-r--r--tests/checks/test_python.py603
-rw-r--r--tests/checks/test_repo.py85
-rw-r--r--tests/checks/test_repo_metadata.py95
-rw-r--r--tests/checks/test_stablereq.py151
-rw-r--r--tests/checks/test_whitespace.py35
-rw-r--r--tests/conftest.py50
-rw-r--r--tests/misc.py65
-rw-r--r--tests/scripts/test_argparse_actions.py231
-rw-r--r--tests/scripts/test_pkgcheck.py12
-rw-r--r--tests/scripts/test_pkgcheck_cache.py58
-rw-r--r--tests/scripts/test_pkgcheck_ci.py38
-rw-r--r--tests/scripts/test_pkgcheck_replay.py41
-rw-r--r--tests/scripts/test_pkgcheck_scan.py480
-rw-r--r--tests/scripts/test_pkgcheck_show.py64
-rw-r--r--tests/test_api.py21
-rw-r--r--tests/test_base.py14
-rw-r--r--tests/test_cli.py81
-rw-r--r--tests/test_feeds.py99
-rw-r--r--tests/test_reporters.py196
130 files changed, 6347 insertions, 5290 deletions
diff --git a/Makefile b/Makefile
index 736ed18b..1d48fcb2 100644
--- a/Makefile
+++ b/Makefile
@@ -12,3 +12,7 @@ sdist wheel:
.PHONY: clean
clean:
$(RM) -r build doc/man/pkgcheck doc/generated dist
+
+.PHONY: format
+format:
+ $(PYTHON) -m black .
diff --git a/data/share/pkgcheck/ci.py b/data/share/pkgcheck/ci.py
index c9f438cd..9920a910 100755
--- a/data/share/pkgcheck/ci.py
+++ b/data/share/pkgcheck/ci.py
@@ -3,13 +3,13 @@
import json
import urllib.request
-JSON_URL = 'https://raw.githubusercontent.com/mgorny/pkgcheck2html/master/pkgcheck2html.conf.json'
+JSON_URL = "https://raw.githubusercontent.com/mgorny/pkgcheck2html/master/pkgcheck2html.conf.json"
with urllib.request.urlopen(JSON_URL) as f:
ci_data = json.loads(f.read())
-with open('pkgcheck.conf', 'w') as f:
- f.write('[CHECKSETS]\nGentooCI =\n')
+with open("pkgcheck.conf", "w") as f:
+ f.write("[CHECKSETS]\nGentooCI =\n")
for k, v in sorted(ci_data.items()):
- if v == 'err':
- f.write(f' {k}\n')
+ if v == "err":
+ f.write(f" {k}\n")
diff --git a/doc/conf.py b/doc/conf.py
index 440c6603..84aae9ea 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -16,208 +16,205 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
+# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.autosummary',
- 'sphinx.ext.autosectionlabel',
- 'sphinx.ext.doctest',
- 'sphinx.ext.extlinks',
- 'sphinx.ext.intersphinx',
- 'sphinx.ext.todo',
- 'sphinx.ext.coverage',
- 'sphinx.ext.ifconfig',
- 'sphinx.ext.viewcode',
- 'snakeoil.dist.sphinxext',
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.autosectionlabel",
+ "sphinx.ext.doctest",
+ "sphinx.ext.extlinks",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.todo",
+ "sphinx.ext.coverage",
+ "sphinx.ext.ifconfig",
+ "sphinx.ext.viewcode",
+ "snakeoil.dist.sphinxext",
]
# Add any paths that contain templates here, relative to this directory.
-#templates_path = ['_templates']
+# templates_path = ['_templates']
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = 'pkgcheck'
-authors = ''
-copyright = '2006-2022, pkgcheck contributors'
+project = "pkgcheck"
+authors = ""
+copyright = "2006-2022, pkgcheck contributors"
# version is set by snakeoil extension
-release = 'master'
+release = "master"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
-#language = None
+# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ['_build', 'generated']
+exclude_patterns = ["_build", "generated"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
-#show_authors = False
+# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
+# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'default'
+html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-#html_theme_options = {}
+# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
+# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-#html_static_path = ['_static']
+# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
-#html_extra_path = []
+# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = 'pkgcheckdoc'
+htmlhelp_basename = "pkgcheckdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
+ # The paper size ('letterpaper' or 'a4paper').
+ #'papersize': 'letterpaper',
+ # The font size ('10pt', '11pt' or '12pt').
+ #'pointsize': '10pt',
+ # Additional stuff for the LaTeX preamble.
+ #'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- ('index', 'pkgcheck.tex', 'pkgcheck Documentation',
- authors, 'manual'),
+ ("index", "pkgcheck.tex", "pkgcheck Documentation", authors, "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
# If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
@@ -227,7 +224,7 @@ latex_documents = [
man_pages = []
# If true, show URL addresses after external links.
-#man_show_urls = False
+# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
@@ -235,22 +232,28 @@ man_pages = []
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'pkgcheck', 'pkgcheck Documentation',
- authors, 'pkgcheck', 'One line description of project.',
- 'Miscellaneous'),
+ (
+ "index",
+ "pkgcheck",
+ "pkgcheck Documentation",
+ authors,
+ "pkgcheck",
+ "One line description of project.",
+ "Miscellaneous",
+ ),
]
# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
+# texinfo_appendices = []
# If false, no module index is generated.
-#texinfo_domain_indices = True
+# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
+# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
+# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
@@ -262,62 +265,62 @@ epub_publisher = authors
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
-#epub_basename = 'pkgcheck'
+# epub_basename = 'pkgcheck'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
-#epub_theme = 'epub'
+# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
-#epub_language = ''
+# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
-#epub_scheme = ''
+# epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
-#epub_identifier = ''
+# epub_identifier = ''
# A unique identification for the text.
-#epub_uid = ''
+# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
-#epub_cover = ()
+# epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
-#epub_guide = ()
+# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
-#epub_pre_files = []
+# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
-#epub_post_files = []
+# epub_post_files = []
# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
+epub_exclude_files = ["search.html"]
# The depth of the table of contents in toc.ncx.
-#epub_tocdepth = 3
+# epub_tocdepth = 3
# Allow duplicate toc entries.
-#epub_tocdup = True
+# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
-#epub_tocscope = 'default'
+# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
-#epub_fix_images = False
+# epub_fix_images = False
# Scale large images.
-#epub_max_image_width = 0
+# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#epub_show_urls = 'inline'
+# epub_show_urls = 'inline'
# If false, no index is generated.
-#epub_use_index = True
+# epub_use_index = True
diff --git a/doc/generate/pkgcheck/checks.py b/doc/generate/pkgcheck/checks.py
index 171186fe..429a17e3 100755
--- a/doc/generate/pkgcheck/checks.py
+++ b/doc/generate/pkgcheck/checks.py
@@ -30,7 +30,7 @@ def main(f=sys.stdout, **kwargs):
def _rst_header(char, text, newline=True, leading=False):
if newline:
- out('\n', end='')
+ out("\n", end="")
if leading:
out(char * len(text))
out(text)
@@ -43,33 +43,38 @@ def main(f=sys.stdout, **kwargs):
wrapper = TextWrapper(width=85)
for scope in base.scopes.values():
- _rst_header('-', scope.desc.capitalize() + ' scope', leading=True)
+ _rst_header("-", scope.desc.capitalize() + " scope", leading=True)
checks = (x for x in objects.CHECKS.values() if x.scope == scope)
for check in checks:
if check.__doc__ is not None:
try:
- summary, explanation = check.__doc__.split('\n', 1)
+ summary, explanation = check.__doc__.split("\n", 1)
except ValueError:
summary = check.__doc__
explanation = None
else:
summary = None
- _rst_header('-', check.__name__)
+ _rst_header("-", check.__name__)
if summary:
- out('\n' + dedent(summary).strip())
+ out("\n" + dedent(summary).strip())
if explanation:
- explanation = '\n'.join(dedent(explanation).strip().split('\n'))
- out('\n' + explanation)
+ explanation = "\n".join(dedent(explanation).strip().split("\n"))
+ out("\n" + explanation)
if issubclass(check, GentooRepoCheck):
- out('\n\n- Gentoo repo specific')
- known_results = ', '.join(
- f'`{r.__name__}`_' for r in
- sorted(check.known_results, key=attrgetter('__name__')))
- out('\n' + '\n'.join(wrapper.wrap(
- f"(known result{_pl(check.known_results)}: {known_results})")))
-
-
-if __name__ == '__main__':
+ out("\n\n- Gentoo repo specific")
+ known_results = ", ".join(
+ f"`{r.__name__}`_"
+ for r in sorted(check.known_results, key=attrgetter("__name__"))
+ )
+ out(
+ "\n"
+ + "\n".join(
+ wrapper.wrap(f"(known result{_pl(check.known_results)}: {known_results})")
+ )
+ )
+
+
+if __name__ == "__main__":
main()
diff --git a/doc/generate/pkgcheck/keywords.py b/doc/generate/pkgcheck/keywords.py
index 628d4c8f..c6cfe432 100755
--- a/doc/generate/pkgcheck/keywords.py
+++ b/doc/generate/pkgcheck/keywords.py
@@ -25,7 +25,7 @@ def main(f=sys.stdout, **kwargs):
def _rst_header(char, text, newline=True, leading=False):
if newline:
- out('\n', end='')
+ out("\n", end="")
if leading:
out(char * len(text))
out(text)
@@ -41,32 +41,31 @@ def main(f=sys.stdout, **kwargs):
related_checks[keyword].add(check)
for scope in base.scopes.values():
- _rst_header('-', scope.desc.capitalize() + ' scope', leading=True)
+ _rst_header("-", scope.desc.capitalize() + " scope", leading=True)
keywords = (x for x in objects.KEYWORDS.values() if x.scope == scope)
for keyword in keywords:
if keyword.__doc__ is not None:
try:
- summary, explanation = keyword.__doc__.split('\n', 1)
+ summary, explanation = keyword.__doc__.split("\n", 1)
except ValueError:
summary = keyword.__doc__
explanation = None
else:
summary = None
- _rst_header('-', keyword.__name__)
+ _rst_header("-", keyword.__name__)
if summary:
- out('\n' + dedent(summary).strip())
+ out("\n" + dedent(summary).strip())
if explanation:
- explanation = '\n'.join(dedent(explanation).strip().split('\n'))
- out('\n' + explanation)
+ explanation = "\n".join(dedent(explanation).strip().split("\n"))
+ out("\n" + explanation)
if all(issubclass(x, GentooRepoCheck) for x in related_checks[keyword]):
- out(f'\n- Gentoo repo specific')
- out('\n' + f'- level: {keyword.level}')
- checks = ', '.join(sorted(
- f'`{c.__name__}`_' for c in related_checks[keyword]))
- out(f'- related check{_pl(related_checks[keyword])}: {checks}')
+ out("\n- Gentoo repo specific")
+ out("\n" + f"- level: {keyword.level}")
+ checks = ", ".join(sorted(f"`{c.__name__}`_" for c in related_checks[keyword]))
+ out(f"- related check{_pl(related_checks[keyword])}: {checks}")
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/doc/generate/pkgcheck/reporters.py b/doc/generate/pkgcheck/reporters.py
index e0debc35..9f503811 100755
--- a/doc/generate/pkgcheck/reporters.py
+++ b/doc/generate/pkgcheck/reporters.py
@@ -14,7 +14,7 @@ def main(f=sys.stdout, **kwargs):
def _rst_header(char, text, newline=True, leading=False):
if newline:
- out('\n', end='')
+ out("\n", end="")
if leading:
out(char * len(text))
out(text)
@@ -24,25 +24,25 @@ def main(f=sys.stdout, **kwargs):
if __doc__ is not None:
out(__doc__.strip())
- _rst_header('=', 'Reporters', newline=False)
+ _rst_header("=", "Reporters", newline=False)
for reporter in objects.REPORTERS.values():
if reporter.__doc__ is not None:
try:
- summary, explanation = reporter.__doc__.split('\n', 1)
+ summary, explanation = reporter.__doc__.split("\n", 1)
except ValueError:
summary = reporter.__doc__
explanation = None
else:
summary = None
- _rst_header('-', reporter.__name__, leading=True)
+ _rst_header("-", reporter.__name__, leading=True)
if summary:
- out('\n' + dedent(summary).strip())
+ out("\n" + dedent(summary).strip())
if explanation:
- explanation = '\n'.join(dedent(explanation).strip().split('\n'))
- out('\n' + explanation)
+ explanation = "\n".join(dedent(explanation).strip().split("\n"))
+ out("\n" + explanation)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/pyproject.toml b/pyproject.toml
index 74945a14..da1db2bc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -73,6 +73,9 @@ zip-safe = false
[tool.setuptools.dynamic]
version = {attr = "pkgcheck.__version__"}
+[tool.black]
+line-length = 100
+
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-vv -ra -l"
diff --git a/setup.py b/setup.py
index 4149fae3..f6e96a36 100644
--- a/setup.py
+++ b/setup.py
@@ -13,14 +13,13 @@ from setuptools.command.sdist import sdist as orig_sdist
from wheel.bdist_wheel import bdist_wheel as orig_bdist_wheel
-use_system_tree_sitter_bash = bool(os.environ.get(
- 'USE_SYSTEM_TREE_SITTER_BASH', False))
+use_system_tree_sitter_bash = bool(os.environ.get("USE_SYSTEM_TREE_SITTER_BASH", False))
@contextmanager
def sys_path():
orig_path = sys.path[:]
- sys.path.insert(0, str(Path.cwd() / 'src'))
+ sys.path.insert(0, str(Path.cwd() / "src"))
try:
yield
finally:
@@ -28,7 +27,7 @@ def sys_path():
class build_treesitter(Command, SubCommand):
- description = 'build tree-sitter-bash library'
+ description = "build tree-sitter-bash library"
def initialize_options(self):
pass
@@ -37,29 +36,30 @@ class build_treesitter(Command, SubCommand):
pass
def get_source_files(self):
- cwd = Path(__file__).parent / 'tree-sitter-bash/src'
+ cwd = Path(__file__).parent / "tree-sitter-bash/src"
return [
- str(cwd / 'GNUmakefile'), str(cwd / 'tree_sitter/parser.h'),
- str(cwd / 'parser.c'), str(cwd / 'scanner.cc'),
+ str(cwd / "GNUmakefile"),
+ str(cwd / "tree_sitter/parser.h"),
+ str(cwd / "parser.c"),
+ str(cwd / "scanner.cc"),
]
- library_path = Path(__file__).parent / 'src/pkgcheck/bash/lang.so'
+ library_path = Path(__file__).parent / "src/pkgcheck/bash/lang.so"
def run(self):
if not use_system_tree_sitter_bash:
if not self.library_path.exists():
- logging.info('building tree-sitter-bash library')
+ logging.info("building tree-sitter-bash library")
with sys_path():
from pkgcheck.bash import build_library
- build_library(self.library_path, ['tree-sitter-bash'])
+ build_library(self.library_path, ["tree-sitter-bash"])
class build(orig_build):
- sub_commands = orig_build.sub_commands + [('build_treesitter', None)]
+ sub_commands = orig_build.sub_commands + [("build_treesitter", None)]
class install(orig_install):
-
def finalize_options(self):
"""Force platlib install since non-python libraries are included."""
super().finalize_options()
@@ -70,14 +70,18 @@ class install(orig_install):
self.write_obj_lists()
self.generate_files()
- self.copy_tree('data', self.install_data)
+ self.copy_tree("data", self.install_data)
install_dir = Path(self.install_lib)
if not use_system_tree_sitter_bash:
- self.reinitialize_command('build').ensure_finalized()
- (dst := install_dir / 'pkgcheck/bash').mkdir(parents=True, exist_ok=True)
- self.copy_file(build_treesitter.library_path, dst / 'lang.so',
- preserve_mode=True, preserve_times=False)
+ self.reinitialize_command("build").ensure_finalized()
+ (dst := install_dir / "pkgcheck/bash").mkdir(parents=True, exist_ok=True)
+ self.copy_file(
+ build_treesitter.library_path,
+ dst / "lang.so",
+ preserve_mode=True,
+ preserve_times=False,
+ )
def write_obj_lists(self):
"""Generate config file of keyword, check, and other object lists."""
@@ -88,7 +92,6 @@ class install(orig_install):
# hack to drop quotes on modules in generated files
class _kls:
-
def __init__(self, module):
self.module = module
@@ -100,41 +103,50 @@ class install(orig_install):
modules = defaultdict(set)
objs = defaultdict(list)
- for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'):
+ for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
for name, cls in getattr(objects, obj).items():
- parent, module = cls.__module__.rsplit('.', 1)
+ parent, module = cls.__module__.rsplit(".", 1)
modules[parent].add(module)
- objs[obj].append((name, _kls(f'{module}.{name}')))
+ objs[obj].append((name, _kls(f"{module}.{name}")))
- keywords = tuple(objs['KEYWORDS'])
- checks = tuple(objs['CHECKS'])
- reporters = tuple(objs['REPORTERS'])
+ keywords = tuple(objs["KEYWORDS"])
+ checks = tuple(objs["CHECKS"])
+ reporters = tuple(objs["REPORTERS"])
- logging.info(f'writing objects to {objects_path!r}')
- with objects_path.open('w') as f:
+ logging.info(f"writing objects to {objects_path!r}")
+ with objects_path.open("w") as f:
objects_path.chmod(0o644)
for k, v in sorted(modules.items()):
f.write(f"from {k} import {', '.join(sorted(v))}\n")
- f.write(dedent(f"""\
- KEYWORDS = {keywords}
- CHECKS = {checks}
- REPORTERS = {reporters}
- """))
-
- logging.info(f'writing path constants to {const_path!r}')
- with const_path.open('w') as f:
+ f.write(
+ dedent(
+ f"""\
+ KEYWORDS = {keywords}
+ CHECKS = {checks}
+ REPORTERS = {reporters}
+ """
+ )
+ )
+
+ logging.info(f"writing path constants to {const_path!r}")
+ with const_path.open("w") as f:
const_path.chmod(0o644)
- f.write(dedent("""\
- from os.path import abspath, exists, join
- import sys
- INSTALL_PREFIX = abspath(sys.prefix)
- if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
- INSTALL_PREFIX = abspath(sys.base_prefix)
- DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
- """))
+ f.write(
+ dedent(
+ """\
+ from os.path import abspath, exists, join
+ import sys
+ INSTALL_PREFIX = abspath(sys.prefix)
+ if not exists(join(INSTALL_PREFIX, 'lib/pkgcore')):
+ INSTALL_PREFIX = abspath(sys.base_prefix)
+ DATA_PATH = join(INSTALL_PREFIX, 'share/pkgcheck')
+ """
+ )
+ )
logging.info("generating version info")
from snakeoil.version import get_git_version
+
verinfo_path.write_text(f"version_info={get_git_version(Path(__file__).parent)!r}")
def generate_files(self):
@@ -142,22 +154,21 @@ class install(orig_install):
from pkgcheck import base, objects
from pkgcheck.addons import caches
- (dst := Path(self.install_data) / 'share/pkgcheck').mkdir(parents=True, exist_ok=True)
+ (dst := Path(self.install_data) / "share/pkgcheck").mkdir(parents=True, exist_ok=True)
- logging.info('Generating available scopes')
- (dst / 'scopes').write_text('\n'.join(base.scopes) + '\n')
+ logging.info("Generating available scopes")
+ (dst / "scopes").write_text("\n".join(base.scopes) + "\n")
- logging.info('Generating available cache types')
+ logging.info("Generating available cache types")
cache_objs = caches.CachedAddon.caches.values()
- (dst / 'caches').write_text('\n'.join(x.type for x in cache_objs) + '\n')
+ (dst / "caches").write_text("\n".join(x.type for x in cache_objs) + "\n")
- for obj in ('KEYWORDS', 'CHECKS', 'REPORTERS'):
- logging.info(f'Generating {obj.lower()} list')
- (dst / obj.lower()).write_text('\n'.join(getattr(objects, obj)) + '\n')
+ for obj in ("KEYWORDS", "CHECKS", "REPORTERS"):
+ logging.info(f"Generating {obj.lower()} list")
+ (dst / obj.lower()).write_text("\n".join(getattr(objects, obj)) + "\n")
class bdist_wheel(orig_bdist_wheel):
-
def finalize_options(self):
super().finalize_options()
self.root_is_pure = False # Mark us as not a pure python package
@@ -165,30 +176,34 @@ class bdist_wheel(orig_bdist_wheel):
def get_tag(self):
_, _, plat = super().get_tag()
# We don't contain any python source, nor any python extensions
- return 'py3', 'none', plat
+ return "py3", "none", plat
class sdist(orig_sdist):
-
def make_release_tree(self, base_dir, files):
super().make_release_tree(base_dir, files)
base_dir = Path(base_dir)
- if (man_page := Path(__file__).parent / 'build/sphinx/man/pkgcheck.1').exists():
- (base_dir / 'man').mkdir(parents=True, exist_ok=True)
- self.copy_file(man_page, base_dir / 'man/pkgcheck.1', preserve_mode=False, preserve_times=False)
+ if (man_page := Path(__file__).parent / "build/sphinx/man/pkgcheck.1").exists():
+ (base_dir / "man").mkdir(parents=True, exist_ok=True)
+ self.copy_file(
+ man_page, base_dir / "man/pkgcheck.1", preserve_mode=False, preserve_times=False
+ )
logging.info("generating version info")
from snakeoil.version import get_git_version
- (base_dir / 'src/pkgcheck/_verinfo.py').write_text(f"version_info={get_git_version(Path(__file__).parent)!r}")
+
+ (base_dir / "src/pkgcheck/_verinfo.py").write_text(
+ f"version_info={get_git_version(Path(__file__).parent)!r}"
+ )
setup(
cmdclass={
- 'bdist_wheel': bdist_wheel,
- 'build': build,
- 'build_treesitter': build_treesitter,
- 'install': install,
- 'sdist': sdist,
+ "bdist_wheel": bdist_wheel,
+ "build": build,
+ "build_treesitter": build_treesitter,
+ "install": install,
+ "sdist": sdist,
}
)
diff --git a/src/pkgcheck/__init__.py b/src/pkgcheck/__init__.py
index 699538a1..8e50bdfc 100644
--- a/src/pkgcheck/__init__.py
+++ b/src/pkgcheck/__init__.py
@@ -4,9 +4,9 @@ from .api import keywords, scan
from .base import PkgcheckException
from .results import Result
-__all__ = ('keywords', 'scan', 'PkgcheckException', 'Result')
-__title__ = 'pkgcheck'
-__version__ = '0.10.20'
+__all__ = ("keywords", "scan", "PkgcheckException", "Result")
+__title__ = "pkgcheck"
+__version__ = "0.10.20"
def __getattr__(name):
@@ -15,9 +15,9 @@ def __getattr__(name):
return keywords[name]
try:
- return _import('.' + name, __name__)
+ return _import("." + name, __name__)
except ImportError:
- raise AttributeError(f'module {__name__} has no attribute {name}')
+ raise AttributeError(f"module {__name__} has no attribute {name}")
def __dir__():
diff --git a/src/pkgcheck/addons/__init__.py b/src/pkgcheck/addons/__init__.py
index 4d7aff48..5a5be2d0 100644
--- a/src/pkgcheck/addons/__init__.py
+++ b/src/pkgcheck/addons/__init__.py
@@ -26,18 +26,18 @@ class ArchesArgs(arghparse.CommaSeparatedNegations):
if not enabled:
# enable all non-prefix arches
- enabled = set(arch for arch in all_arches if '-' not in arch)
+ enabled = set(arch for arch in all_arches if "-" not in arch)
arches = set(enabled).difference(disabled)
if all_arches and (unknown_arches := arches.difference(all_arches)):
- es = pluralism(unknown_arches, plural='es')
- unknown = ', '.join(unknown_arches)
- valid = ', '.join(sorted(all_arches))
- parser.error(f'unknown arch{es}: {unknown} (valid arches: {valid})')
+ es = pluralism(unknown_arches, plural="es")
+ unknown = ", ".join(unknown_arches)
+ valid = ", ".join(sorted(all_arches))
+ parser.error(f"unknown arch{es}: {unknown} (valid arches: {valid})")
# check if any selected arch only has experimental profiles
for arch in arches:
- if all(p.status == 'exp' for p in namespace.target_repo.profiles if p.arch == arch):
+ if all(p.status == "exp" for p in namespace.target_repo.profiles if p.arch == arch):
namespace.exp_profiles_required = True
break
@@ -51,11 +51,17 @@ class ArchesAddon(base.Addon):
@classmethod
def mangle_argparser(cls, parser):
- group = parser.add_argument_group('arches')
+ group = parser.add_argument_group("arches")
group.add_argument(
- '-a', '--arches', dest='selected_arches', metavar='ARCH', default=(),
- action=arghparse.Delayed, target=ArchesArgs, priority=100,
- help='comma separated list of arches to enable/disable',
+ "-a",
+ "--arches",
+ dest="selected_arches",
+ metavar="ARCH",
+ default=(),
+ action=arghparse.Delayed,
+ target=ArchesArgs,
+ priority=100,
+ help="comma separated list of arches to enable/disable",
docs="""
Comma separated list of arches to enable and disable.
@@ -67,8 +73,9 @@ class ArchesAddon(base.Addon):
By default all repo defined arches are used; however,
stable-related checks (e.g. UnstableOnly) default to the set of
arches having stable profiles in the target repo.
- """)
- parser.bind_delayed_default(1000, 'arches')(cls._default_arches)
+ """,
+ )
+ parser.bind_delayed_default(1000, "arches")(cls._default_arches)
@staticmethod
def _default_arches(namespace, attr):
@@ -81,14 +88,14 @@ class KeywordsAddon(base.Addon):
def __init__(self, *args):
super().__init__(*args)
- special = {'-*'}
+ special = {"-*"}
self.arches = self.options.target_repo.known_arches
- unstable = {'~' + x for x in self.arches}
- disabled = {'-' + x for x in chain(self.arches, unstable)}
+ unstable = {"~" + x for x in self.arches}
+ disabled = {"-" + x for x in chain(self.arches, unstable)}
self.valid = special | self.arches | unstable | disabled
# Note: '*' and '~*' are portage-only, i.e. not in the spec, so they
# don't belong in the main tree.
- self.portage = {'*', '~*'}
+ self.portage = {"*", "~*"}
class StableArchesAddon(base.Addon):
@@ -98,7 +105,7 @@ class StableArchesAddon(base.Addon):
@classmethod
def mangle_argparser(cls, parser):
- parser.bind_delayed_default(1001, 'stable_arches')(cls._default_stable_arches)
+ parser.bind_delayed_default(1001, "stable_arches")(cls._default_stable_arches)
@staticmethod
def _default_stable_arches(namespace, attr):
@@ -106,11 +113,12 @@ class StableArchesAddon(base.Addon):
target_repo = namespace.target_repo
if not namespace.selected_arches:
# use known stable arches (GLEP 72) if arches aren't specified
- stable_arches = target_repo.config.arches_desc['stable']
+ stable_arches = target_repo.config.arches_desc["stable"]
# fallback to determining stable arches from profiles.desc if arches.desc doesn't exist
if not stable_arches:
- stable_arches = set().union(*(
- repo.profiles.arches('stable') for repo in target_repo.trees))
+ stable_arches = set().union(
+ *(repo.profiles.arches("stable") for repo in target_repo.trees)
+ )
else:
stable_arches = namespace.arches
@@ -129,17 +137,17 @@ class UnstatedIuse(results.VersionResult, results.Error):
@property
def desc(self):
- msg = [f'attr({self.attr})']
+ msg = [f"attr({self.attr})"]
if self.profile is not None:
if self.num_profiles is not None:
- num_profiles = f' ({self.num_profiles} total)'
+ num_profiles = f" ({self.num_profiles} total)"
else:
- num_profiles = ''
- msg.append(f'profile {self.profile!r}{num_profiles}')
- flags = ', '.join(self.flags)
+ num_profiles = ""
+ msg.append(f"profile {self.profile!r}{num_profiles}")
+ flags = ", ".join(self.flags)
s = pluralism(self.flags)
- msg.extend([f'unstated flag{s}', f'[ {flags} ]'])
- return ': '.join(msg)
+ msg.extend([f"unstated flag{s}", f"[ {flags} ]"])
+ return ": ".join(msg)
class UseAddon(base.Addon):
@@ -153,7 +161,8 @@ class UseAddon(base.Addon):
for p in target_repo.profiles:
try:
self.profiles.append(
- target_repo.profiles.create_profile(p, load_profile_base=False))
+ target_repo.profiles.create_profile(p, load_profile_base=False)
+ )
except profiles_mod.ProfileError:
continue
@@ -173,8 +182,8 @@ class UseAddon(base.Addon):
for repo in target_repo.trees:
known_iuse.update(flag for matcher, (flag, desc) in repo.config.use_desc)
known_iuse_expand.update(
- flag for flags in repo.config.use_expand_desc.values()
- for flag, desc in flags)
+ flag for flags in repo.config.use_expand_desc.values() for flag, desc in flags
+ )
self.collapsed_iuse = misc.non_incremental_collapsed_restrict_to_data(
((packages.AlwaysTrue, known_iuse),),
@@ -186,8 +195,9 @@ class UseAddon(base.Addon):
self.ignore = not (c_implicit_iuse or known_iuse or known_iuse_expand)
if self.ignore:
logger.debug(
- 'disabling use/iuse validity checks since no usable '
- 'use.desc and use.local.desc were found')
+ "disabling use/iuse validity checks since no usable "
+ "use.desc and use.local.desc were found"
+ )
def allowed_iuse(self, pkg):
return self.collapsed_iuse.pull_data(pkg).union(pkg.local_use)
@@ -213,9 +223,14 @@ class UseAddon(base.Addon):
v.append(node.restriction)
yield from self._flatten_restricts(
iflatten_instance(node.payload, skip_filter),
- skip_filter, stated, unstated, attr, v)
+ skip_filter,
+ stated,
+ unstated,
+ attr,
+ v,
+ )
continue
- elif attr == 'required_use':
+ elif attr == "required_use":
unstated.update(filterfalse(stated.__contains__, node.vals))
yield k, tuple(v)
@@ -248,8 +263,11 @@ class UseAddon(base.Addon):
skip_filter = (packages.Conditional,) + klasses
nodes = iflatten_instance(seq, skip_filter)
unstated = set()
- vals = dict(self._flatten_restricts(
- nodes, skip_filter, stated=pkg.iuse_stripped, unstated=unstated, attr=attr))
+ vals = dict(
+ self._flatten_restricts(
+ nodes, skip_filter, stated=pkg.iuse_stripped, unstated=unstated, attr=attr
+ )
+ )
return vals, self._unstated_iuse(pkg, attr, unstated)
@@ -258,24 +276,27 @@ class NetAddon(base.Addon):
@classmethod
def mangle_argparser(cls, parser):
- group = parser.add_argument_group('network')
+ group = parser.add_argument_group("network")
group.add_argument(
- '--timeout', type=float, default='5',
- help='timeout used for network checks')
+ "--timeout", type=float, default="5", help="timeout used for network checks"
+ )
group.add_argument(
- '--user-agent', default='Wget/1.20.3 (linux-gnu)',
- help='custom user agent spoofing')
+ "--user-agent", default="Wget/1.20.3 (linux-gnu)", help="custom user agent spoofing"
+ )
@property
def session(self):
try:
from .net import Session
+
return Session(
- concurrent=self.options.tasks, timeout=self.options.timeout,
- user_agent=self.options.user_agent)
+ concurrent=self.options.tasks,
+ timeout=self.options.timeout,
+ user_agent=self.options.user_agent,
+ )
except ImportError as e:
- if e.name == 'requests':
- raise PkgcheckUserException('network checks require requests to be installed')
+ if e.name == "requests":
+ raise PkgcheckUserException("network checks require requests to be installed")
raise
@@ -290,10 +311,14 @@ def init_addon(cls, options, addons_map=None, **kwargs):
# initialize and inject all required addons for a given addon's inheritance
# tree as kwargs
required_addons = chain.from_iterable(
- x.required_addons for x in cls.__mro__ if issubclass(x, base.Addon))
- kwargs.update({
- base.param_name(addon): init_addon(addon, options, addons_map)
- for addon in required_addons})
+ x.required_addons for x in cls.__mro__ if issubclass(x, base.Addon)
+ )
+ kwargs.update(
+ {
+ base.param_name(addon): init_addon(addon, options, addons_map)
+ for addon in required_addons
+ }
+ )
# verify the cache type is enabled
if issubclass(cls, caches.CachedAddon) and not options.cache[cls.cache.type]:
@@ -302,7 +327,7 @@ def init_addon(cls, options, addons_map=None, **kwargs):
addon = addons_map[cls] = cls(options, **kwargs)
# force cache updates
- force_cache = getattr(options, 'force_cache', False)
+ force_cache = getattr(options, "force_cache", False)
if isinstance(addon, caches.CachedAddon):
addon.update_cache(force=force_cache)
diff --git a/src/pkgcheck/addons/caches.py b/src/pkgcheck/addons/caches.py
index 665efc4f..9cd13e58 100644
--- a/src/pkgcheck/addons/caches.py
+++ b/src/pkgcheck/addons/caches.py
@@ -23,6 +23,7 @@ from ..log import logger
@dataclass(frozen=True)
class CacheData:
"""Cache registry data."""
+
type: str
file: str
version: int
@@ -31,7 +32,7 @@ class CacheData:
class Cache:
"""Mixin for data caches."""
- __getattr__ = klass.GetAttrProxy('_cache')
+ __getattr__ = klass.GetAttrProxy("_cache")
class DictCache(UserDict, Cache):
@@ -46,7 +47,7 @@ class CacheDisabled(PkgcheckException):
"""Exception flagging that a requested cache type is disabled."""
def __init__(self, cache):
- super().__init__(f'{cache.type} cache support required')
+ super().__init__(f"{cache.type} cache support required")
class CachedAddon(Addon):
@@ -61,7 +62,7 @@ class CachedAddon(Addon):
"""Register available caches."""
super().__init_subclass__(**kwargs)
if cls.cache is None:
- raise ValueError(f'invalid cache registry: {cls!r}')
+ raise ValueError(f"invalid cache registry: {cls!r}")
cls.caches[cls] = cls.cache
def update_cache(self, repo=None, force=False):
@@ -75,17 +76,16 @@ class CachedAddon(Addon):
using the same identifier don't use the same cache file.
"""
token = blake2b(repo.location.encode()).hexdigest()[:10]
- dirname = f'{repo.repo_id.lstrip(os.sep)}-{token}'
- return pjoin(self.options.cache_dir, 'repos', dirname, self.cache.file)
+ dirname = f"{repo.repo_id.lstrip(os.sep)}-{token}"
+ return pjoin(self.options.cache_dir, "repos", dirname, self.cache.file)
def load_cache(self, path, fallback=None):
cache = fallback
try:
- with open(path, 'rb') as f:
+ with open(path, "rb") as f:
cache = pickle.load(f)
if cache.version != self.cache.version:
- logger.debug(
- 'forcing %s cache regen due to outdated version', self.cache.type)
+ logger.debug("forcing %s cache regen due to outdated version", self.cache.type)
os.remove(path)
cache = fallback
except IGNORED_EXCEPTIONS:
@@ -93,7 +93,7 @@ class CachedAddon(Addon):
except FileNotFoundError:
pass
except Exception as e:
- logger.debug('forcing %s cache regen: %s', self.cache.type, e)
+ logger.debug("forcing %s cache regen: %s", self.cache.type, e)
os.remove(path)
cache = fallback
return cache
@@ -104,17 +104,16 @@ class CachedAddon(Addon):
with AtomicWriteFile(path, binary=True) as f:
pickle.dump(data, f, protocol=-1)
except IOError as e:
- msg = f'failed dumping {self.cache.type} cache: {path!r}: {e.strerror}'
+ msg = f"failed dumping {self.cache.type} cache: {path!r}: {e.strerror}"
raise PkgcheckUserException(msg)
@klass.jit_attr
def existing_caches(self):
"""Mapping of all existing cache types to file paths."""
caches_map = {}
- repos_dir = pjoin(self.options.cache_dir, 'repos')
- for cache in sorted(self.caches.values(), key=attrgetter('type')):
- caches_map[cache.type] = tuple(sorted(
- pathlib.Path(repos_dir).rglob(cache.file)))
+ repos_dir = pjoin(self.options.cache_dir, "repos")
+ for cache in sorted(self.caches.values(), key=attrgetter("type")):
+ caches_map[cache.type] = tuple(sorted(pathlib.Path(repos_dir).rglob(cache.file)))
return ImmutableDict(caches_map)
def remove_caches(self):
@@ -125,14 +124,14 @@ class CachedAddon(Addon):
except FileNotFoundError:
pass
except IOError as e:
- raise PkgcheckUserException(f'failed removing cache dir: {e}')
+ raise PkgcheckUserException(f"failed removing cache dir: {e}")
else:
try:
for cache_type, paths in self.existing_caches.items():
if self.options.cache.get(cache_type, False):
for path in paths:
if self.options.dry_run:
- print(f'Would remove {path}')
+ print(f"Would remove {path}")
else:
os.unlink(path)
# remove empty cache dirs
@@ -145,4 +144,4 @@ class CachedAddon(Addon):
continue
raise
except IOError as e:
- raise PkgcheckUserException(f'failed removing {cache_type} cache: {path!r}: {e}')
+ raise PkgcheckUserException(f"failed removing {cache_type} cache: {path!r}: {e}")
diff --git a/src/pkgcheck/addons/eclass.py b/src/pkgcheck/addons/eclass.py
index fd4d085e..5e5e77ee 100644
--- a/src/pkgcheck/addons/eclass.py
+++ b/src/pkgcheck/addons/eclass.py
@@ -49,8 +49,7 @@ class EclassAddon(caches.CachedAddon):
"""Eclass support for various checks."""
# cache registry
- cache = caches.CacheData(type='eclass', file='eclass.pickle',
- version=EclassDoc.ABI_VERSION)
+ cache = caches.CacheData(type="eclass", file="eclass.pickle", version=EclassDoc.ABI_VERSION)
def __init__(self, *args):
super().__init__(*args)
@@ -81,7 +80,7 @@ class EclassAddon(caches.CachedAddon):
def update_cache(self, force=False):
"""Update related cache and push updates to disk."""
for repo in self.options.target_repo.trees:
- eclass_dir = pjoin(repo.location, 'eclass')
+ eclass_dir = pjoin(repo.location, "eclass")
cache_file = self.cache_file(repo)
cache_eclasses = False
eclasses = {}
@@ -91,15 +90,17 @@ class EclassAddon(caches.CachedAddon):
# check for eclass removals
for name in list(eclasses):
- if not os.path.exists(pjoin(eclass_dir, f'{name}.eclass')):
+ if not os.path.exists(pjoin(eclass_dir, f"{name}.eclass")):
del eclasses[name]
cache_eclasses = True
# verify the repo has eclasses
try:
repo_eclasses = sorted(
- (x[:-7], pjoin(eclass_dir, x)) for x in os.listdir(eclass_dir)
- if x.endswith('.eclass'))
+ (x[:-7], pjoin(eclass_dir, x))
+ for x in os.listdir(eclass_dir)
+ if x.endswith(".eclass")
+ )
except FileNotFoundError:
repo_eclasses = []
@@ -115,7 +116,7 @@ class EclassAddon(caches.CachedAddon):
raise KeyError
except (KeyError, AttributeError):
try:
- progress(f'{repo} -- updating eclass cache: {name:<{padding}}')
+ progress(f"{repo} -- updating eclass cache: {name:<{padding}}")
eclasses[name] = EclassDoc(path, sourced=True, repo=repo)
cache_eclasses = True
except IOError:
diff --git a/src/pkgcheck/addons/git.py b/src/pkgcheck/addons/git.py
index 02ec96ea..26c1d06d 100644
--- a/src/pkgcheck/addons/git.py
+++ b/src/pkgcheck/addons/git.py
@@ -37,6 +37,7 @@ from . import caches
@dataclass(frozen=True, eq=False)
class GitCommit:
"""Git commit objects."""
+
hash: str
commit_time: int
author: str
@@ -57,6 +58,7 @@ class GitCommit:
@dataclass(frozen=True)
class GitPkgChange:
"""Git package change objects."""
+
atom: atom_cls
status: str
commit: str
@@ -82,16 +84,18 @@ class GitConfig:
def __init__(self):
fd, self.path = tempfile.mkstemp()
- os.write(fd, b'[safe]\n\tdirectory = *\n')
+ os.write(fd, b"[safe]\n\tdirectory = *\n")
os.close(fd)
@property
def config_env(self):
# ignore global user and system git config, but disable safe.directory
- return ImmutableDict({
- 'GIT_CONFIG_GLOBAL': self.path,
- 'GIT_CONFIG_SYSTEM': '',
- })
+ return ImmutableDict(
+ {
+ "GIT_CONFIG_GLOBAL": self.path,
+ "GIT_CONFIG_SYSTEM": "",
+ }
+ )
def close(self):
os.unlink(self.path)
@@ -104,21 +108,25 @@ class GitLog:
self._running = False
self.git_config = GitConfig()
self.proc = subprocess.Popen(
- cmd, cwd=path,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=self.git_config.config_env)
+ cmd,
+ cwd=path,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=self.git_config.config_env,
+ )
def __iter__(self):
return self
def __next__(self):
# use replacement character for non-UTF8 decoding issues (issue #166)
- line = self.proc.stdout.readline().decode('utf-8', 'replace')
+ line = self.proc.stdout.readline().decode("utf-8", "replace")
# verify git log is running as expected after pulling the first line
if not self._running:
if self.proc.poll() or not line:
error = self.proc.stderr.read().decode().strip()
- raise GitError(f'failed running git log: {error}')
+ raise GitError(f"failed running git log: {error}")
self._running = True
self.git_config.close()
@@ -133,14 +141,14 @@ class _ParseGitRepo:
"""Generic iterator for custom git log output parsing support."""
# git command to run on the targeted repo
- _git_cmd = 'git log --name-status --diff-filter=ARMD -z'
+ _git_cmd = "git log --name-status --diff-filter=ARMD -z"
# custom git log format lines, see the "PRETTY FORMATS" section of
# the git log man page for details
_format = ()
# path regexes for git log parsing, validation is handled on instantiation
- _ebuild_re = re.compile(r'^(?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$')
+ _ebuild_re = re.compile(r"^(?P<category>[^/]+)/[^/]+/(?P<package>[^/]+)\.ebuild$")
def __init__(self, path, commit_range):
self.path = os.path.realpath(path)
@@ -161,12 +169,12 @@ class _ParseGitRepo:
@property
def changes(self):
"""Generator of file change status with changed packages."""
- changes = deque(next(self.git_log).strip('\x00').split('\x00'))
+ changes = deque(next(self.git_log).strip("\x00").split("\x00"))
while changes:
status = changes.popleft()
- if status.startswith('R'):
+ if status.startswith("R"):
# matched R status change
- status = 'R'
+ status = "R"
old = changes.popleft()
new = changes.popleft()
if (mo := self._ebuild_re.match(old)) and (mn := self._ebuild_re.match(new)):
@@ -191,11 +199,11 @@ class GitRepoCommits(_ParseGitRepo):
"""Parse git log output into an iterator of commit objects."""
_format = (
- '%h', # abbreviated commit hash
- '%ct', # commit timestamp
- '%an <%ae>', # Author Name <author@email.com>
- '%cn <%ce>', # Committer Name <committer@email.com>
- '%B', # commit message
+ "%h", # abbreviated commit hash
+ "%ct", # commit timestamp
+ "%an <%ae>", # Author Name <author@email.com>
+ "%cn <%ce>", # Committer Name <committer@email.com>
+ "%B", # commit message
)
def __next__(self):
@@ -203,13 +211,13 @@ class GitRepoCommits(_ParseGitRepo):
commit_time = int(next(self.git_log))
author = next(self.git_log)
committer = next(self.git_log)
- message = list(takewhile(lambda x: x != '\x00', self.git_log))
+ message = list(takewhile(lambda x: x != "\x00", self.git_log))
pkgs = defaultdict(set)
for status, atoms in self.changes:
- if status == 'R':
+ if status == "R":
old, new = atoms
- pkgs['A'].add(new)
- pkgs['D'].add(old)
+ pkgs["A"].add(new)
+ pkgs["D"].add(old)
else:
pkgs[status].update(atoms)
return GitCommit(commit_hash, commit_time, author, committer, message, ImmutableDict(pkgs))
@@ -219,8 +227,8 @@ class GitRepoPkgs(_ParseGitRepo):
"""Parse git log output into an iterator of package change objects."""
_format = (
- '%h', # abbreviated commit hash
- '%ct', # commit time
+ "%h", # abbreviated commit hash
+ "%ct", # commit time
)
def __init__(self, *args, local=False):
@@ -234,24 +242,21 @@ class GitRepoPkgs(_ParseGitRepo):
return self._pkgs.popleft()
except IndexError:
commit_hash = next(self.git_log)
- commit_time = int(next(self.git_log).rstrip('\x00'))
+ commit_time = int(next(self.git_log).rstrip("\x00"))
self._pkg_changes(commit_hash, commit_time)
def _pkg_changes(self, commit_hash, commit_time):
"""Queue package change objects from git log file changes."""
for status, pkgs in self.changes:
- if status == 'R':
+ if status == "R":
old, new = pkgs
if not self.local: # treat rename as addition and removal
- self._pkgs.append(
- GitPkgChange(new, 'A', commit_hash, commit_time))
- self._pkgs.append(
- GitPkgChange(old, 'D', commit_hash, commit_time))
+ self._pkgs.append(GitPkgChange(new, "A", commit_hash, commit_time))
+ self._pkgs.append(GitPkgChange(old, "D", commit_hash, commit_time))
else:
# renames are split into add/remove ops at
# the check level for the local commits repo
- self._pkgs.append(GitPkgChange(
- new, 'R', commit_hash, commit_time, old))
+ self._pkgs.append(GitPkgChange(new, "R", commit_hash, commit_time, old))
else:
self._pkgs.append(GitPkgChange(pkgs[0], status, commit_hash, commit_time))
@@ -264,26 +269,31 @@ class _GitCommitPkg(cpv.VersionedCPV):
# add additional attrs
sf = object.__setattr__
- sf(self, 'time', time)
- sf(self, 'status', status)
- sf(self, 'commit', commit)
- sf(self, 'old', old)
+ sf(self, "time", time)
+ sf(self, "status", status)
+ sf(self, "commit", commit)
+ sf(self, "old", old)
def old_pkg(self):
"""Create a new object from a rename commit's old atom."""
return self.__class__(
- self.old.category, self.old.package, self.status, self.old.version,
- self.time, self.commit)
+ self.old.category,
+ self.old.package,
+ self.status,
+ self.old.version,
+ self.time,
+ self.commit,
+ )
class GitChangedRepo(SimpleTree):
"""Historical git repo consisting of the latest changed packages."""
# selected pkg status filter
- _status_filter = {'A', 'R', 'M', 'D'}
+ _status_filter = {"A", "R", "M", "D"}
def __init__(self, *args, **kwargs):
- kwargs.setdefault('pkg_klass', _GitCommitPkg)
+ kwargs.setdefault("pkg_klass", _GitCommitPkg)
super().__init__(*args, **kwargs)
def _get_versions(self, cp):
@@ -298,25 +308,26 @@ class GitChangedRepo(SimpleTree):
for cp in sorter(candidates):
yield from sorter(
raw_pkg_cls(cp[0], cp[1], status, *commit)
- for status, commit in self.versions.get(cp, ()))
+ for status, commit in self.versions.get(cp, ())
+ )
class GitModifiedRepo(GitChangedRepo):
"""Historical git repo consisting of the latest modified packages."""
- _status_filter = {'A', 'M'}
+ _status_filter = {"A", "M"}
class GitAddedRepo(GitChangedRepo):
"""Historical git repo consisting of added packages."""
- _status_filter = {'A'}
+ _status_filter = {"A"}
class GitRemovedRepo(GitChangedRepo):
"""Historical git repo consisting of removed packages."""
- _status_filter = {'D'}
+ _status_filter = {"D"}
class _ScanGit(argparse.Action):
@@ -325,11 +336,11 @@ class _ScanGit(argparse.Action):
def __init__(self, *args, staged=False, **kwargs):
super().__init__(*args, **kwargs)
if staged:
- default_ref = 'HEAD'
- diff_cmd = ['git', 'diff-index', '--name-only', '--cached', '-z']
+ default_ref = "HEAD"
+ diff_cmd = ["git", "diff-index", "--name-only", "--cached", "-z"]
else:
- default_ref = 'origin..HEAD'
- diff_cmd = ['git', 'diff-tree', '-r', '--name-only', '-z']
+ default_ref = "origin..HEAD"
+ diff_cmd = ["git", "diff-tree", "-r", "--name-only", "-z"]
self.staged = staged
self.default_ref = default_ref
@@ -340,26 +351,30 @@ class _ScanGit(argparse.Action):
try:
p = subprocess.run(
self.diff_cmd + [ref],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- cwd=namespace.target_repo.location, check=True, encoding='utf8')
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=namespace.target_repo.location,
+ check=True,
+ encoding="utf8",
+ )
except FileNotFoundError as e:
parser.error(str(e))
except subprocess.CalledProcessError as e:
error = e.stderr.splitlines()[0]
- parser.error(f'failed running git: {error}')
+ parser.error(f"failed running git: {error}")
if not p.stdout:
# no changes exist, exit early
parser.exit()
- eclass_re = re.compile(r'^eclass/(?P<eclass>\S+)\.eclass$')
+ eclass_re = re.compile(r"^eclass/(?P<eclass>\S+)\.eclass$")
eclasses, profiles, pkgs = OrderedSet(), OrderedSet(), OrderedSet()
- for path in p.stdout.strip('\x00').split('\x00'):
+ for path in p.stdout.strip("\x00").split("\x00"):
path_components = path.split(os.sep)
if mo := eclass_re.match(path):
- eclasses.add(mo.group('eclass'))
- elif path_components[0] == 'profiles':
+ eclasses.add(mo.group("eclass"))
+ elif path_components[0] == "profiles":
profiles.add(path)
elif path_components[0] in namespace.target_repo.categories:
try:
@@ -384,13 +399,14 @@ class _ScanGit(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
if namespace.targets:
- targets = ' '.join(namespace.targets)
+ targets = " ".join(namespace.targets)
s = pluralism(namespace.targets)
- parser.error(f'{option_string} is mutually exclusive with target{s}: {targets}')
+ parser.error(f"{option_string} is mutually exclusive with target{s}: {targets}")
if not self.staged:
# avoid circular import issues
from .. import objects
+
# enable git checks
namespace.enabled_checks.update(objects.CHECKS.select(GitCommitsCheck).values())
@@ -422,16 +438,21 @@ class GitAddon(caches.CachedAddon):
"""
# cache registry
- cache = caches.CacheData(type='git', file='git.pickle', version=5)
+ cache = caches.CacheData(type="git", file="git.pickle", version=5)
@classmethod
def mangle_argparser(cls, parser):
- group = parser.add_argument_group('git', docs=cls.__doc__)
+ group = parser.add_argument_group("git", docs=cls.__doc__)
git_opts = group.add_mutually_exclusive_group()
git_opts.add_argument(
- '--commits', nargs='?', default=False, metavar='tree-ish',
- action=arghparse.Delayed, target=_ScanGit, priority=10,
- help='determine scan targets from unpushed commits',
+ "--commits",
+ nargs="?",
+ default=False,
+ metavar="tree-ish",
+ action=arghparse.Delayed,
+ target=_ScanGit,
+ priority=10,
+ help="determine scan targets from unpushed commits",
docs="""
Targets are determined from the committed changes compared to a
given reference that defaults to the repo's origin.
@@ -440,21 +461,28 @@ class GitAddon(caches.CachedAddon):
the current branch compared to the branch named 'old' use
``pkgcheck scan --commits old``. For two separate branches
named 'old' and 'new' use ``pkgcheck scan --commits old..new``.
- """)
+ """,
+ )
git_opts.add_argument(
- '--staged', nargs='?', default=False, metavar='tree-ish',
- action=arghparse.Delayed, target=partial(_ScanGit, staged=True), priority=10,
- help='determine scan targets from staged changes',
+ "--staged",
+ nargs="?",
+ default=False,
+ metavar="tree-ish",
+ action=arghparse.Delayed,
+ target=partial(_ScanGit, staged=True),
+ priority=10,
+ help="determine scan targets from staged changes",
docs="""
Targets are determined using all staged changes for the git
repo. Unstaged changes and untracked files are ignored by
temporarily stashing them during the scanning process.
- """)
+ """,
+ )
def __init__(self, *args):
super().__init__(*args)
try:
- find_binary('git')
+ find_binary("git")
except CommandNotFound:
raise caches.CacheDisabled(self.cache)
@@ -465,14 +493,14 @@ class GitAddon(caches.CachedAddon):
def _gitignore(self):
"""Load a repo's .gitignore and .git/info/exclude files for path matching."""
patterns = []
- for path in ('.gitignore', '.git/info/exclude'):
+ for path in (".gitignore", ".git/info/exclude"):
try:
with open(pjoin(self.options.target_repo.location, path)) as f:
patterns.extend(f)
except (FileNotFoundError, IOError):
pass
if patterns:
- return PathSpec.from_lines('gitwildmatch', patterns)
+ return PathSpec.from_lines("gitwildmatch", patterns)
return None
def gitignored(self, path):
@@ -489,23 +517,31 @@ class GitAddon(caches.CachedAddon):
"""Retrieve a git repo's commit hash for a specific commit object."""
try:
p = subprocess.run(
- ['git', 'rev-parse', commit],
- stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
- cwd=path, check=True, encoding='utf8')
+ ["git", "rev-parse", commit],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ cwd=path,
+ check=True,
+ encoding="utf8",
+ )
except subprocess.CalledProcessError:
- raise GitError(f'failed retrieving commit hash for git repo: {path!r}')
+ raise GitError(f"failed retrieving commit hash for git repo: {path!r}")
return p.stdout.strip()
@staticmethod
- def _get_current_branch(path, commit='HEAD'):
+ def _get_current_branch(path, commit="HEAD"):
"""Retrieve a git repo's current branch for a specific commit object."""
try:
p = subprocess.run(
- ['git', 'rev-parse', '--abbrev-ref', commit],
- stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
- cwd=path, check=True, encoding='utf8')
+ ["git", "rev-parse", "--abbrev-ref", commit],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ cwd=path,
+ check=True,
+ encoding="utf8",
+ )
except subprocess.CalledProcessError:
- raise GitError(f'failed retrieving branch for git repo: {path!r}')
+ raise GitError(f"failed retrieving branch for git repo: {path!r}")
return p.stdout.strip()
@staticmethod
@@ -513,12 +549,16 @@ class GitAddon(caches.CachedAddon):
"""Retrieve a git repo's default branch used with origin remote."""
try:
p = subprocess.run(
- ['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
- stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
- cwd=path, check=True, encoding='utf8')
+ ["git", "symbolic-ref", "refs/remotes/origin/HEAD"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ cwd=path,
+ check=True,
+ encoding="utf8",
+ )
except subprocess.CalledProcessError:
- raise GitError(f'failed retrieving branch for git repo: {path!r}')
- return p.stdout.strip().split('/')[-1]
+ raise GitError(f"failed retrieving branch for git repo: {path!r}")
+ return p.stdout.strip().split("/")[-1]
@staticmethod
def pkg_history(repo, commit_range, data=None, local=False, verbosity=-1):
@@ -535,11 +575,12 @@ class GitAddon(caches.CachedAddon):
if local:
commit = (atom.fullver, pkg.commit_time, pkg.commit, pkg.old)
else:
- date = datetime.fromtimestamp(pkg.commit_time).strftime('%Y-%m-%d')
- progress(f'{repo} -- updating git cache: commit date: {date}')
+ date = datetime.fromtimestamp(pkg.commit_time).strftime("%Y-%m-%d")
+ progress(f"{repo} -- updating git cache: commit date: {date}")
commit = (atom.fullver, pkg.commit_time, pkg.commit)
- data.setdefault(atom.category, {}).setdefault(
- atom.package, {}).setdefault(pkg.status, []).append(commit)
+ data.setdefault(atom.category, {}).setdefault(atom.package, {}).setdefault(
+ pkg.status, []
+ ).append(commit)
return data
def update_cache(self, force=False):
@@ -551,10 +592,12 @@ class GitAddon(caches.CachedAddon):
# skip cache usage when not running on the default branch
if branch != default_branch:
logger.debug(
- 'skipping %s git repo cache update on '
- 'non-default branch %r', repo, branch)
+ "skipping %s git repo cache update on " "non-default branch %r",
+ repo,
+ branch,
+ )
continue
- commit = self._get_commit_hash(repo.location, 'origin/HEAD')
+ commit = self._get_commit_hash(repo.location, "origin/HEAD")
except GitError:
continue
@@ -567,18 +610,18 @@ class GitAddon(caches.CachedAddon):
git_cache = self.load_cache(cache_file)
if git_cache is None or commit != git_cache.commit:
- logger.debug('updating %s git repo cache to %s', repo, commit[:13])
+ logger.debug("updating %s git repo cache to %s", repo, commit[:13])
if git_cache is None:
data = {}
- commit_range = 'origin/HEAD'
+ commit_range = "origin/HEAD"
else:
data = git_cache.data
- commit_range = f'{git_cache.commit}..origin/HEAD'
+ commit_range = f"{git_cache.commit}..origin/HEAD"
try:
self.pkg_history(
- repo, commit_range, data=data,
- verbosity=self.options.verbosity)
+ repo, commit_range, data=data, verbosity=self.options.verbosity
+ )
except GitError as e:
raise PkgcheckUserException(str(e))
git_cache = GitCache(data, self.cache, commit=commit)
@@ -595,7 +638,7 @@ class GitAddon(caches.CachedAddon):
git_repos = []
for repo in self.options.target_repo.trees:
git_cache = self._cached_repos.get(repo.location, {})
- git_repos.append(repo_cls(git_cache, repo_id=f'{repo.repo_id}-history'))
+ git_repos.append(repo_cls(git_cache, repo_id=f"{repo.repo_id}-history"))
if len(git_repos) > 1:
return multiplex.tree(*git_repos)
@@ -606,14 +649,14 @@ class GitAddon(caches.CachedAddon):
data = {}
try:
- origin = self._get_commit_hash(target_repo.location, 'origin/HEAD')
- head = self._get_commit_hash(target_repo.location, 'HEAD')
+ origin = self._get_commit_hash(target_repo.location, "origin/HEAD")
+ head = self._get_commit_hash(target_repo.location, "HEAD")
if origin != head:
- data = self.pkg_history(target_repo, 'origin/HEAD..HEAD', local=True)
+ data = self.pkg_history(target_repo, "origin/HEAD..HEAD", local=True)
except GitError as e:
raise PkgcheckUserException(str(e))
- repo_id = f'{target_repo.repo_id}-commits'
+ repo_id = f"{target_repo.repo_id}-commits"
return repo_cls(data, repo_id=repo_id)
def commits(self):
@@ -621,10 +664,10 @@ class GitAddon(caches.CachedAddon):
commits = ()
try:
- origin = self._get_commit_hash(target_repo.location, 'origin/HEAD')
- head = self._get_commit_hash(target_repo.location, 'HEAD')
+ origin = self._get_commit_hash(target_repo.location, "origin/HEAD")
+ head = self._get_commit_hash(target_repo.location, "HEAD")
if origin != head:
- commits = GitRepoCommits(target_repo.location, 'origin/HEAD..HEAD')
+ commits = GitRepoCommits(target_repo.location, "origin/HEAD..HEAD")
except GitError as e:
raise PkgcheckUserException(str(e))
diff --git a/src/pkgcheck/addons/net.py b/src/pkgcheck/addons/net.py
index 0fad98b0..6db5432d 100644
--- a/src/pkgcheck/addons/net.py
+++ b/src/pkgcheck/addons/net.py
@@ -8,7 +8,7 @@ import requests
from ..checks.network import RequestError, SSLError
# suppress all urllib3 log messages
-logging.getLogger('urllib3').propagate = False
+logging.getLogger("urllib3").propagate = False
class Session(requests.Session):
@@ -26,15 +26,15 @@ class Session(requests.Session):
# block when urllib3 connection pool is full
concurrent = concurrent if concurrent is not None else os.cpu_count() * 5
a = requests.adapters.HTTPAdapter(pool_maxsize=concurrent, pool_block=True)
- self.mount('https://', a)
- self.mount('http://', a)
+ self.mount("https://", a)
+ self.mount("http://", a)
# spoof user agent
- self.headers['User-Agent'] = user_agent
+ self.headers["User-Agent"] = user_agent
def send(self, req, **kwargs):
# forcibly use the session timeout
- kwargs['timeout'] = self.timeout
+ kwargs["timeout"] = self.timeout
try:
with super().send(req, **kwargs) as r:
r.raise_for_status()
@@ -42,6 +42,6 @@ class Session(requests.Session):
except requests.exceptions.SSLError as e:
raise SSLError(e)
except requests.exceptions.ConnectionError as e:
- raise RequestError(e, 'connection failed')
+ raise RequestError(e, "connection failed")
except requests.exceptions.RequestException as e:
raise RequestError(e)
diff --git a/src/pkgcheck/addons/profiles.py b/src/pkgcheck/addons/profiles.py
index 02b31eda..799cd94a 100644
--- a/src/pkgcheck/addons/profiles.py
+++ b/src/pkgcheck/addons/profiles.py
@@ -22,15 +22,28 @@ from . import ArchesAddon, caches
class ProfileData:
-
- def __init__(self, repo, profile_name, key, provides, vfilter,
- iuse_effective, use, pkg_use, masked_use, forced_use, lookup_cache, insoluble,
- status, deprecated):
+ def __init__(
+ self,
+ repo,
+ profile_name,
+ key,
+ provides,
+ vfilter,
+ iuse_effective,
+ use,
+ pkg_use,
+ masked_use,
+ forced_use,
+ lookup_cache,
+ insoluble,
+ status,
+ deprecated,
+ ):
self.repo = repo
self.name = profile_name
self.key = key
self.provides_repo = provides
- self.provides_has_match = getattr(provides, 'has_match', provides.match)
+ self.provides_has_match = getattr(provides, "has_match", provides.match)
self.iuse_effective = iuse_effective
self.use = use
self.pkg_use = pkg_use
@@ -47,8 +60,7 @@ class ProfileData:
# pointless intermediate sets unless required
# kindly don't change that in any modifications, it adds up.
enabled = known_flags.intersection(self.forced_use.pull_data(pkg))
- immutable = enabled.union(
- filter(known_flags.__contains__, self.masked_use.pull_data(pkg)))
+ immutable = enabled.union(filter(known_flags.__contains__, self.masked_use.pull_data(pkg)))
if force_disabled := self.masked_use.pull_data(pkg):
enabled = enabled.difference(force_disabled)
return immutable, enabled
@@ -64,19 +76,19 @@ class ProfilesArgs(arghparse.CommaSeparatedNegations):
@staticmethod
def norm_name(repo, s):
"""Expand status keywords and format paths."""
- if s in ('dev', 'exp', 'stable', 'deprecated'):
+ if s in ("dev", "exp", "stable", "deprecated"):
yield from repo.profiles.get_profiles(status=s)
- elif s == 'all':
+ elif s == "all":
yield from repo.profiles
else:
try:
yield repo.profiles[os.path.normpath(s)]
except KeyError:
- raise ValueError(f'nonexistent profile: {s!r}')
+ raise ValueError(f"nonexistent profile: {s!r}")
def __call__(self, parser, namespace, values, option_string=None):
disabled, enabled = self.parse_values(values)
- namespace.ignore_deprecated_profiles = 'deprecated' not in enabled
+ namespace.ignore_deprecated_profiles = "deprecated" not in enabled
# Expand status keywords, e.g. 'stable' -> set of stable profiles, and
# translate selections into profile objs.
@@ -104,18 +116,23 @@ class ProfileAddon(caches.CachedAddon):
# non-profile dirs found in the profiles directory, generally only in
# the gentoo repo, but could be in overlays as well
- non_profile_dirs = frozenset(['desc', 'updates'])
+ non_profile_dirs = frozenset(["desc", "updates"])
# cache registry
- cache = caches.CacheData(type='profiles', file='profiles.pickle', version=2)
+ cache = caches.CacheData(type="profiles", file="profiles.pickle", version=2)
@classmethod
def mangle_argparser(cls, parser):
- group = parser.add_argument_group('profiles')
+ group = parser.add_argument_group("profiles")
group.add_argument(
- '-p', '--profiles', metavar='PROFILE', dest='selected_profiles',
- action=arghparse.Delayed, target=ProfilesArgs, priority=101,
- help='comma separated list of profiles to enable/disable',
+ "-p",
+ "--profiles",
+ metavar="PROFILE",
+ dest="selected_profiles",
+ action=arghparse.Delayed,
+ target=ProfilesArgs,
+ priority=101,
+ help="comma separated list of profiles to enable/disable",
docs="""
Comma separated list of profiles to enable and disable for
scanning. Any profiles specified in this fashion will be the
@@ -137,8 +154,9 @@ class ProfileAddon(caches.CachedAddon):
to only scan all stable profiles pass the ``stable`` argument
to --profiles. Additionally the keyword ``all`` can be used to
scan all defined profiles in the target repo.
- """)
- parser.bind_delayed_default(1001, 'profiles')(cls._default_profiles)
+ """,
+ )
+ parser.bind_delayed_default(1001, "profiles")(cls._default_profiles)
@staticmethod
def _default_profiles(namespace, attr):
@@ -148,8 +166,8 @@ class ProfileAddon(caches.CachedAddon):
# that require them to operate properly.
target_repo = namespace.target_repo
profiles = set(target_repo.profiles)
- if not getattr(namespace, 'exp_profiles_required', False):
- profiles -= set(ProfilesArgs.norm_name(target_repo, 'exp'))
+ if not getattr(namespace, "exp_profiles_required", False):
+ profiles -= set(ProfilesArgs.norm_name(target_repo, "exp"))
setattr(namespace, attr, profiles)
def __init__(self, *args, arches_addon):
@@ -160,7 +178,7 @@ class ProfileAddon(caches.CachedAddon):
self.arch_profiles = defaultdict(list)
self.target_repo = self.options.target_repo
- ignore_deprecated = getattr(self.options, 'ignore_deprecated_profiles', True)
+ ignore_deprecated = getattr(self.options, "ignore_deprecated_profiles", True)
for p in sorted(self.options.profiles):
if p.deprecated and ignore_deprecated:
@@ -171,7 +189,7 @@ class ProfileAddon(caches.CachedAddon):
# Only throw errors if the profile was selected by the user, bad
# repo profiles will be caught during repo metadata scans.
if self.options.selected_profiles is not None:
- raise PkgcheckUserException(f'invalid profile: {e.path!r}: {e.error}')
+ raise PkgcheckUserException(f"invalid profile: {e.path!r}: {e.error}")
continue
self.arch_profiles[p.arch].append((profile, p))
@@ -180,7 +198,7 @@ class ProfileAddon(caches.CachedAddon):
"""Given a profile object, return its file set and most recent mtime."""
cache = {}
while True:
- profile = (yield)
+ profile = yield
profile_mtime = 0
profile_files = []
for node in profile.stack:
@@ -204,8 +222,7 @@ class ProfileAddon(caches.CachedAddon):
"""Mapping of profile age and file sets used to check cache viability."""
data = {}
gen_profile_data = self._profile_files()
- for profile_obj, profile in chain.from_iterable(
- self.arch_profiles.values()):
+ for profile_obj, profile in chain.from_iterable(self.arch_profiles.values()):
mtime, files = gen_profile_data.send(profile_obj)
data[profile] = (mtime, files)
next(gen_profile_data)
@@ -220,7 +237,7 @@ class ProfileAddon(caches.CachedAddon):
for repo in self.target_repo.trees:
cache_file = self.cache_file(repo)
# add profiles-base -> repo mapping to ease storage procedure
- cached_profiles[repo.config.profiles_base]['repo'] = repo
+ cached_profiles[repo.config.profiles_base]["repo"] = repo
if not force:
cache = self.load_cache(cache_file, fallback={})
cached_profiles[repo.config.profiles_base].update(cache)
@@ -228,14 +245,21 @@ class ProfileAddon(caches.CachedAddon):
chunked_data_cache = {}
for arch in sorted(self.options.arches):
- stable_key, unstable_key = arch, f'~{arch}'
+ stable_key, unstable_key = arch, f"~{arch}"
stable_r = packages.PackageRestriction(
- "keywords", values.ContainmentMatch2((stable_key,)))
+ "keywords", values.ContainmentMatch2((stable_key,))
+ )
unstable_r = packages.PackageRestriction(
- "keywords", values.ContainmentMatch2((stable_key, unstable_key,)))
+ "keywords",
+ values.ContainmentMatch2(
+ (
+ stable_key,
+ unstable_key,
+ )
+ ),
+ )
- default_masked_use = tuple(set(
- x for x in official_arches if x != stable_key))
+ default_masked_use = tuple(set(x for x in official_arches if x != stable_key))
# padding for progress output
padding = max(len(x) for x in self.options.arches)
@@ -244,23 +268,25 @@ class ProfileAddon(caches.CachedAddon):
files = self.profile_data.get(profile)
try:
cached_profile = cached_profiles[profile.base][profile.path]
- if files != cached_profile['files']:
+ if files != cached_profile["files"]:
# force refresh of outdated cache entry
raise KeyError
- masks = cached_profile['masks']
- unmasks = cached_profile['unmasks']
- immutable_flags = cached_profile['immutable_flags']
- stable_immutable_flags = cached_profile['stable_immutable_flags']
- enabled_flags = cached_profile['enabled_flags']
- stable_enabled_flags = cached_profile['stable_enabled_flags']
- pkg_use = cached_profile['pkg_use']
- iuse_effective = cached_profile['iuse_effective']
- use = cached_profile['use']
- provides_repo = cached_profile['provides_repo']
+ masks = cached_profile["masks"]
+ unmasks = cached_profile["unmasks"]
+ immutable_flags = cached_profile["immutable_flags"]
+ stable_immutable_flags = cached_profile["stable_immutable_flags"]
+ enabled_flags = cached_profile["enabled_flags"]
+ stable_enabled_flags = cached_profile["stable_enabled_flags"]
+ pkg_use = cached_profile["pkg_use"]
+ iuse_effective = cached_profile["iuse_effective"]
+ use = cached_profile["use"]
+ provides_repo = cached_profile["provides_repo"]
except KeyError:
try:
- progress(f'{repo} -- updating profiles cache: {profile.arch:<{padding}}')
+ progress(
+ f"{repo} -- updating profiles cache: {profile.arch:<{padding}}"
+ )
masks = profile_obj.masks
unmasks = profile_obj.unmasks
@@ -270,7 +296,9 @@ class ProfileAddon(caches.CachedAddon):
immutable_flags.optimize(cache=chunked_data_cache)
immutable_flags.freeze()
- stable_immutable_flags = profile_obj.stable_masked_use.clone(unfreeze=True)
+ stable_immutable_flags = profile_obj.stable_masked_use.clone(
+ unfreeze=True
+ )
stable_immutable_flags.add_bare_global((), default_masked_use)
stable_immutable_flags.optimize(cache=chunked_data_cache)
stable_immutable_flags.freeze()
@@ -280,7 +308,9 @@ class ProfileAddon(caches.CachedAddon):
enabled_flags.optimize(cache=chunked_data_cache)
enabled_flags.freeze()
- stable_enabled_flags = profile_obj.stable_forced_use.clone(unfreeze=True)
+ stable_enabled_flags = profile_obj.stable_forced_use.clone(
+ unfreeze=True
+ )
stable_enabled_flags.add_bare_global((), (stable_key,))
stable_enabled_flags.optimize(cache=chunked_data_cache)
stable_enabled_flags.freeze()
@@ -290,25 +320,28 @@ class ProfileAddon(caches.CachedAddon):
provides_repo = profile_obj.provides_repo
# finalize enabled USE flags
- use = frozenset(misc.incremental_expansion(
- profile_obj.use, msg_prefix='while expanding USE'))
+ use = frozenset(
+ misc.incremental_expansion(
+ profile_obj.use, msg_prefix="while expanding USE"
+ )
+ )
except profiles_mod.ProfileError:
# unsupported EAPI or other issue, profile checks will catch this
continue
- cached_profiles[profile.base]['update'] = True
+ cached_profiles[profile.base]["update"] = True
cached_profiles[profile.base][profile.path] = {
- 'files': files,
- 'masks': masks,
- 'unmasks': unmasks,
- 'immutable_flags': immutable_flags,
- 'stable_immutable_flags': stable_immutable_flags,
- 'enabled_flags': enabled_flags,
- 'stable_enabled_flags': stable_enabled_flags,
- 'pkg_use': pkg_use,
- 'iuse_effective': iuse_effective,
- 'use': use,
- 'provides_repo': provides_repo,
+ "files": files,
+ "masks": masks,
+ "unmasks": unmasks,
+ "immutable_flags": immutable_flags,
+ "stable_immutable_flags": stable_immutable_flags,
+ "enabled_flags": enabled_flags,
+ "stable_enabled_flags": stable_enabled_flags,
+ "pkg_use": pkg_use,
+ "iuse_effective": iuse_effective,
+ "use": use,
+ "provides_repo": provides_repo,
}
# used to interlink stable/unstable lookups so that if
@@ -323,50 +356,63 @@ class ProfileAddon(caches.CachedAddon):
# note that the cache/insoluble are inversly paired;
# stable cache is usable for unstable, but not vice versa.
# unstable insoluble is usable for stable, but not vice versa
- vfilter = domain.generate_filter(self.target_repo.pkg_masks | masks, unmasks)
- self.profile_filters.setdefault(stable_key, []).append(ProfileData(
- repo.repo_id,
- profile.path, stable_key,
- provides_repo,
- packages.AndRestriction(vfilter, stable_r),
- iuse_effective,
- use,
- pkg_use,
- stable_immutable_flags, stable_enabled_flags,
- stable_cache,
- ProtectedSet(unstable_insoluble),
- profile.status,
- profile.deprecated))
-
- self.profile_filters.setdefault(unstable_key, []).append(ProfileData(
- repo.repo_id,
- profile.path, unstable_key,
- provides_repo,
- packages.AndRestriction(vfilter, unstable_r),
- iuse_effective,
- use,
- pkg_use,
- immutable_flags, enabled_flags,
- ProtectedSet(stable_cache),
- unstable_insoluble,
- profile.status,
- profile.deprecated))
+ vfilter = domain.generate_filter(
+ self.target_repo.pkg_masks | masks, unmasks
+ )
+ self.profile_filters.setdefault(stable_key, []).append(
+ ProfileData(
+ repo.repo_id,
+ profile.path,
+ stable_key,
+ provides_repo,
+ packages.AndRestriction(vfilter, stable_r),
+ iuse_effective,
+ use,
+ pkg_use,
+ stable_immutable_flags,
+ stable_enabled_flags,
+ stable_cache,
+ ProtectedSet(unstable_insoluble),
+ profile.status,
+ profile.deprecated,
+ )
+ )
+
+ self.profile_filters.setdefault(unstable_key, []).append(
+ ProfileData(
+ repo.repo_id,
+ profile.path,
+ unstable_key,
+ provides_repo,
+ packages.AndRestriction(vfilter, unstable_r),
+ iuse_effective,
+ use,
+ pkg_use,
+ immutable_flags,
+ enabled_flags,
+ ProtectedSet(stable_cache),
+ unstable_insoluble,
+ profile.status,
+ profile.deprecated,
+ )
+ )
# dump updated profile filters
for k, v in cached_profiles.items():
- if v.pop('update', False):
- repo = v.pop('repo')
+ if v.pop("update", False):
+ repo = v.pop("repo")
cache_file = self.cache_file(repo)
- cache = caches.DictCache(
- cached_profiles[repo.config.profiles_base], self.cache)
+ cache = caches.DictCache(cached_profiles[repo.config.profiles_base], self.cache)
self.save_cache(cache, cache_file)
for key, profile_list in self.profile_filters.items():
similar = self.profile_evaluate_dict[key] = []
for profile in profile_list:
for existing in similar:
- if (existing[0].masked_use == profile.masked_use and
- existing[0].forced_use == profile.forced_use):
+ if (
+ existing[0].masked_use == profile.masked_use
+ and existing[0].forced_use == profile.forced_use
+ ):
existing.append(profile)
break
else:
@@ -377,7 +423,7 @@ class ProfileAddon(caches.CachedAddon):
# the use processing across each of 'em.
groups = []
keywords = pkg.keywords
- unstable_keywords = (f'~{x}' for x in keywords if x[0] != '~')
+ unstable_keywords = (f"~{x}" for x in keywords if x[0] != "~")
for key in chain(keywords, unstable_keywords):
if profile_grps := self.profile_evaluate_dict.get(key):
for profiles in profile_grps:
diff --git a/src/pkgcheck/api.py b/src/pkgcheck/api.py
index bcf30234..c704f8c6 100644
--- a/src/pkgcheck/api.py
+++ b/src/pkgcheck/api.py
@@ -39,8 +39,8 @@ def scan(args=None, /, *, base_args=None):
if base_args is None:
base_args = []
- with patch('argparse.ArgumentParser.exit', parser_exit):
- options = pkgcheck.argparser.parse_args(base_args + ['scan'] + args)
+ with patch("argparse.ArgumentParser.exit", parser_exit):
+ options = pkgcheck.argparser.parse_args(base_args + ["scan"] + args)
return Pipeline(options)
diff --git a/src/pkgcheck/base.py b/src/pkgcheck/base.py
index fc77dee6..ac49dbe4 100644
--- a/src/pkgcheck/base.py
+++ b/src/pkgcheck/base.py
@@ -26,12 +26,13 @@ from snakeoil.mappings import ImmutableDict
@dataclass(frozen=True, eq=False)
class Scope:
"""Generic scope for scans, checks, and results."""
+
desc: str
level: int
_children: tuple = ()
def __str__(self):
- return f'{self.__class__.__name__}({self.desc!r})'
+ return f"{self.__class__.__name__}({self.desc!r})"
def __lt__(self, other):
if isinstance(other, Scope):
@@ -62,8 +63,8 @@ class Scope:
return hash(self.desc)
def __repr__(self):
- address = '@%#8x' % (id(self),)
- return f'<{self.__class__.__name__} desc={self.desc!r} {address}>'
+ address = "@%#8x" % (id(self),)
+ return f"<{self.__class__.__name__} desc={self.desc!r} {address}>"
def __contains__(self, key):
return self == key or key in self._children
@@ -80,37 +81,41 @@ class PackageScope(Scope):
@dataclass(repr=False, frozen=True, eq=False)
class ConditionalScope(Scope):
"""Scope for checks run only in certain circumstances."""
+
level: int = -99
@dataclass(repr=False, frozen=True, eq=False)
class LocationScope(Scope):
"""Scope for location-specific checks."""
+
level: int = 0
# pkg-related scopes (level increasing by granularity)
-repo_scope = PackageScope('repo', 1)
-category_scope = PackageScope('category', 2)
-package_scope = PackageScope('package', 3)
-version_scope = PackageScope('version', 4)
+repo_scope = PackageScope("repo", 1)
+category_scope = PackageScope("category", 2)
+package_scope = PackageScope("package", 3)
+version_scope = PackageScope("version", 4)
# conditional (negative level) and location-specific scopes (zero level)
-commit_scope = ConditionalScope('commit')
-profile_node_scope = LocationScope('profile_node')
-profiles_scope = LocationScope('profiles', 0, (profile_node_scope,))
-eclass_scope = LocationScope('eclass')
+commit_scope = ConditionalScope("commit")
+profile_node_scope = LocationScope("profile_node")
+profiles_scope = LocationScope("profiles", 0, (profile_node_scope,))
+eclass_scope = LocationScope("eclass")
# mapping for -S/--scopes option, ordered for sorted output in the case of unknown scopes
-scopes = ImmutableDict({
- 'git': commit_scope,
- 'profiles': profiles_scope,
- 'eclass': eclass_scope,
- 'repo': repo_scope,
- 'cat': category_scope,
- 'pkg': package_scope,
- 'ver': version_scope,
-})
+scopes = ImmutableDict(
+ {
+ "git": commit_scope,
+ "profiles": profiles_scope,
+ "eclass": eclass_scope,
+ "repo": repo_scope,
+ "cat": category_scope,
+ "pkg": package_scope,
+ "ver": version_scope,
+ }
+)
class PkgcheckException(Exception):
@@ -182,12 +187,13 @@ def param_name(cls):
For example, GitAddon -> git_addon and GitCache -> git_cache.
"""
- return re.sub(r'([a-z])([A-Z])', r'\1_\2', cls.__name__).lower()
+ return re.sub(r"([a-z])([A-Z])", r"\1_\2", cls.__name__).lower()
@dataclass(frozen=True)
class LogMap:
"""Log function to callable mapping."""
+
func: str
call: typing.Callable
@@ -223,7 +229,7 @@ class ProgressManager(AbstractContextManager):
"""Callback used for progressive output."""
# avoid rewriting the same output
if s != self._cached:
- sys.stderr.write(f'{s}\r')
+ sys.stderr.write(f"{s}\r")
self._cached = s
def __enter__(self):
@@ -233,4 +239,4 @@ class ProgressManager(AbstractContextManager):
def __exit__(self, _exc_type, _exc_value, _traceback):
if self._cached is not None:
- sys.stderr.write('\n')
+ sys.stderr.write("\n")
diff --git a/src/pkgcheck/bash/__init__.py b/src/pkgcheck/bash/__init__.py
index 6faf2bb5..38f9424d 100644
--- a/src/pkgcheck/bash/__init__.py
+++ b/src/pkgcheck/bash/__init__.py
@@ -11,10 +11,10 @@ from .. import const
from ctypes.util import find_library
# path to bash parsing library on the system (may be None)
-syslib = find_library('tree-sitter-bash')
+syslib = find_library("tree-sitter-bash")
# path to bash parsing library (vendored)
-lib = pjoin(os.path.dirname(__file__), 'lang.so')
+lib = pjoin(os.path.dirname(__file__), "lang.so")
# copied from tree-sitter with the following changes:
# - prefer stdc++ over c++ when linking
@@ -50,9 +50,7 @@ def build_library(output_path, repo_paths): # pragma: no cover
source_paths.append(path.join(src_path, "scanner.cc"))
elif path.exists(path.join(src_path, "scanner.c")):
source_paths.append(path.join(src_path, "scanner.c"))
- source_mtimes = [path.getmtime(__file__)] + [
- path.getmtime(path_) for path_ in source_paths
- ]
+ source_mtimes = [path.getmtime(__file__)] + [path.getmtime(path_) for path_ in source_paths]
compiler = new_compiler()
# force `c++` compiler so the appropriate standard library is used
@@ -91,21 +89,25 @@ try:
from .. import _const
except ImportError: # pragma: no cover
# build library when running from git repo or tarball
- if syslib is None and not os.path.exists(lib) and 'tree-sitter-bash' in os.listdir(const.REPO_PATH):
- bash_src = pjoin(const.REPO_PATH, 'tree-sitter-bash')
+ if (
+ syslib is None
+ and not os.path.exists(lib)
+ and "tree-sitter-bash" in os.listdir(const.REPO_PATH)
+ ):
+ bash_src = pjoin(const.REPO_PATH, "tree-sitter-bash")
build_library(lib, [bash_src])
if syslib is not None or os.path.exists(lib):
- lang = Language(syslib or lib, 'bash')
+ lang = Language(syslib or lib, "bash")
query = partial(lang.query)
parser = Parser()
parser.set_language(lang)
# various parse tree queries
- cmd_query = query('(command) @call')
- func_query = query('(function_definition) @func')
- var_assign_query = query('(variable_assignment) @assign')
- var_query = query('(variable_name) @var')
+ cmd_query = query("(command) @call")
+ func_query = query("(function_definition) @func")
+ var_assign_query = query("(variable_assignment) @assign")
+ var_query = query("(variable_name) @var")
class ParseTree:
@@ -118,13 +120,13 @@ class ParseTree:
def node_str(self, node):
"""Return the ebuild string associated with a given parse tree node."""
- return self.data[node.start_byte:node.end_byte].decode('utf8')
+ return self.data[node.start_byte : node.end_byte].decode("utf8")
def global_query(self, query):
"""Run a given parse tree query returning only those nodes in global scope."""
for x in self.tree.root_node.children:
# skip nodes in function scope
- if x.type != 'function_definition':
+ if x.type != "function_definition":
for node, _ in query.captures(x):
yield node
@@ -132,6 +134,6 @@ class ParseTree:
"""Run a given parse tree query returning only those nodes in function scope."""
for x in self.tree.root_node.children:
# only return nodes in function scope
- if x.type == 'function_definition':
+ if x.type == "function_definition":
for node, _ in query.captures(x):
yield node
diff --git a/src/pkgcheck/checks/__init__.py b/src/pkgcheck/checks/__init__.py
index f0959257..b5caa244 100644
--- a/src/pkgcheck/checks/__init__.py
+++ b/src/pkgcheck/checks/__init__.py
@@ -42,13 +42,13 @@ class Check(feeds.Feed):
return (
sources.FilteredRepoSource,
(sources.LatestVersionsFilter,),
- (('source', self._source),)
+ (("source", self._source),),
)
elif max(x.scope for x in self.known_results) >= base.version_scope:
return (
sources.FilteredPackageRepoSource,
(sources.LatestPkgsFilter,),
- (('source', self._source),)
+ (("source", self._source),),
)
return self._source
@@ -79,9 +79,9 @@ class GentooRepoCheck(Check):
if not self.options.gentoo_repo:
check = self.__class__.__name__
if check in self.options.selected_checks:
- self.options.override_skip['gentoo'].append(check)
+ self.options.override_skip["gentoo"].append(check)
else:
- raise SkipCheck(self, 'not running against gentoo repo')
+ raise SkipCheck(self, "not running against gentoo repo")
class OverlayRepoCheck(Check):
@@ -90,7 +90,7 @@ class OverlayRepoCheck(Check):
def __init__(self, *args):
super().__init__(*args)
if not self.options.target_repo.masters:
- raise SkipCheck(self, 'not running against overlay')
+ raise SkipCheck(self, "not running against overlay")
class OptionalCheck(Check):
@@ -105,7 +105,7 @@ class GitCommitsCheck(OptionalCheck):
def __init__(self, *args):
super().__init__(*args)
if not self.options.commits:
- raise SkipCheck(self, 'not scanning against git commits')
+ raise SkipCheck(self, "not scanning against git commits")
class AsyncCheck(Check):
@@ -126,7 +126,7 @@ class NetworkCheck(AsyncCheck, OptionalCheck):
def __init__(self, *args, net_addon, **kwargs):
super().__init__(*args, **kwargs)
if not self.options.net:
- raise SkipCheck(self, 'network checks not enabled')
+ raise SkipCheck(self, "network checks not enabled")
self.timeout = self.options.timeout
self.session = net_addon.session
@@ -138,13 +138,15 @@ class MirrorsCheck(Check):
def __init__(self, *args, use_addon):
super().__init__(*args)
- self.iuse_filter = use_addon.get_filter('fetchables')
+ self.iuse_filter = use_addon.get_filter("fetchables")
def get_mirrors(self, pkg):
mirrors = []
fetchables, _ = self.iuse_filter(
- (fetch.fetchable,), pkg,
- pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True))
+ (fetch.fetchable,),
+ pkg,
+ pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True),
+ )
for f in fetchables:
for m in f.uri.visit_mirrors(treat_default_as_mirror=False):
mirrors.append(m[0].mirror_name)
@@ -164,7 +166,7 @@ class SkipCheck(base.PkgcheckUserException):
else:
# assume the check param is a raw class object
check_name = check.__name__
- super().__init__(f'{check_name}: {msg}')
+ super().__init__(f"{check_name}: {msg}")
def init_checks(enabled_addons, options, results_q, *, addons_map=None, source_map=None):
@@ -205,7 +207,7 @@ def init_checks(enabled_addons, options, results_q, *, addons_map=None, source_m
# report which check skips were overridden
for skip_type, checks in sorted(options.override_skip.items()):
s = pluralism(checks)
- checks_str = ', '.join(sorted(checks))
+ checks_str = ", ".join(sorted(checks))
logger.warning(f"running {skip_type} specific check{s}: {checks_str}")
return enabled
diff --git a/src/pkgcheck/checks/acct.py b/src/pkgcheck/checks/acct.py
index 4f144023..30953c89 100644
--- a/src/pkgcheck/checks/acct.py
+++ b/src/pkgcheck/checks/acct.py
@@ -37,7 +37,7 @@ class ConflictingAccountIdentifiers(results.Error):
@property
def desc(self):
- pkgs = ', '.join(self.pkgs)
+ pkgs = ", ".join(self.pkgs)
return f"conflicting {self.kind} id {self.identifier} usage: [ {pkgs} ]"
@@ -55,8 +55,7 @@ class OutsideRangeAccountIdentifier(results.VersionResult, results.Error):
@property
def desc(self):
- return (f"{self.kind} id {self.identifier} outside permitted "
- f"static allocation range")
+ return f"{self.kind} id {self.identifier} outside permitted " f"static allocation range"
class AcctCheck(GentooRepoCheck, RepoCheck):
@@ -71,33 +70,43 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
exist or is wrongly defined, this check is skipped.
"""
- _restricted_source = (sources.RestrictionRepoSource, (packages.OrRestriction(*(
- restricts.CategoryDep('acct-user'), restricts.CategoryDep('acct-group'))),))
- _source = (sources.RepositoryRepoSource, (), (('source', _restricted_source),))
- known_results = frozenset([
- MissingAccountIdentifier, ConflictingAccountIdentifiers,
- OutsideRangeAccountIdentifier,
- ])
+ _restricted_source = (
+ sources.RestrictionRepoSource,
+ (
+ packages.OrRestriction(
+ *(restricts.CategoryDep("acct-user"), restricts.CategoryDep("acct-group"))
+ ),
+ ),
+ )
+ _source = (sources.RepositoryRepoSource, (), (("source", _restricted_source),))
+ known_results = frozenset(
+ [
+ MissingAccountIdentifier,
+ ConflictingAccountIdentifiers,
+ OutsideRangeAccountIdentifier,
+ ]
+ )
def __init__(self, *args):
super().__init__(*args)
self.id_re = re.compile(
- r'ACCT_(?P<var>USER|GROUP)_ID=(?P<quot>[\'"]?)(?P<id>[0-9]+)(?P=quot)')
+ r'ACCT_(?P<var>USER|GROUP)_ID=(?P<quot>[\'"]?)(?P<id>[0-9]+)(?P=quot)'
+ )
self.seen_uids = defaultdict(partial(defaultdict, list))
self.seen_gids = defaultdict(partial(defaultdict, list))
uid_range, gid_range = self.load_ids_from_configuration(self.options.target_repo)
self.category_map = {
- 'acct-user': (self.seen_uids, 'USER', tuple(uid_range)),
- 'acct-group': (self.seen_gids, 'GROUP', tuple(gid_range)),
+ "acct-user": (self.seen_uids, "USER", tuple(uid_range)),
+ "acct-group": (self.seen_gids, "GROUP", tuple(gid_range)),
}
def parse_config_id_range(self, config: ConfigParser, config_key: str):
- id_ranges = config['user-group-ids'].get(config_key, None)
+ id_ranges = config["user-group-ids"].get(config_key, None)
if not id_ranges:
raise SkipCheck(self, f"metadata/qa-policy.conf: missing value for {config_key}")
try:
- for id_range in map(str.strip, id_ranges.split(',')):
- start, *end = map(int, id_range.split('-', maxsplit=1))
+ for id_range in map(str.strip, id_ranges.split(",")):
+ start, *end = map(int, id_range.split("-", maxsplit=1))
if len(end) == 0:
yield range(start, start + 1)
else:
@@ -107,11 +116,13 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
def load_ids_from_configuration(self, repo):
config = ConfigParser()
- if not config.read(pjoin(repo.location, 'metadata', 'qa-policy.conf')):
+ if not config.read(pjoin(repo.location, "metadata", "qa-policy.conf")):
raise SkipCheck(self, "failed loading 'metadata/qa-policy.conf'")
- if 'user-group-ids' not in config:
+ if "user-group-ids" not in config:
raise SkipCheck(self, "metadata/qa-policy.conf: missing section user-group-ids")
- return self.parse_config_id_range(config, 'uid-range'), self.parse_config_id_range(config, 'gid-range')
+ return self.parse_config_id_range(config, "uid-range"), self.parse_config_id_range(
+ config, "gid-range"
+ )
def feed(self, pkg):
try:
@@ -121,8 +132,8 @@ class AcctCheck(GentooRepoCheck, RepoCheck):
for line in pkg.ebuild.text_fileobj():
m = self.id_re.match(line)
- if m is not None and m.group('var') == expected_var:
- found_id = int(m.group('id'))
+ if m is not None and m.group("var") == expected_var:
+ found_id = int(m.group("id"))
break
else:
yield MissingAccountIdentifier(f"ACCT_{expected_var}_ID", pkg=pkg)
diff --git a/src/pkgcheck/checks/cleanup.py b/src/pkgcheck/checks/cleanup.py
index 076d56be..6a42a42f 100644
--- a/src/pkgcheck/checks/cleanup.py
+++ b/src/pkgcheck/checks/cleanup.py
@@ -18,8 +18,8 @@ class RedundantVersion(results.VersionResult, results.Info):
@property
def desc(self):
s = pluralism(self.later_versions)
- versions = ', '.join(self.later_versions)
- return f'slot({self.slot}) keywords are overshadowed by version{s}: {versions}'
+ versions = ", ".join(self.later_versions)
+ return f"slot({self.slot}) keywords are overshadowed by version{s}: {versions}"
class RedundantVersionCheck(Check):
@@ -40,38 +40,45 @@ class RedundantVersionCheck(Check):
@staticmethod
def mangle_argparser(parser):
parser.plugin.add_argument(
- '--stable-only', action='store_true',
- help='consider redundant versions only within stable',
+ "--stable-only",
+ action="store_true",
+ help="consider redundant versions only within stable",
docs="""
If enabled, for each slot, only consider redundant versions
with stable keywords. This is useful for cases of cleanup after
successful stabilization.
- """)
+ """,
+ )
def __init__(self, *args, profile_addon):
super().__init__(*args)
self.keywords_profiles = {
- keyword: sorted(profiles, key=attrgetter('name'))
- for keyword, profiles in profile_addon.items()}
+ keyword: sorted(profiles, key=attrgetter("name"))
+ for keyword, profiles in profile_addon.items()
+ }
def filter_later_profiles_masks(self, visible_cache, pkg, later_versions):
# check both stable/unstable profiles for stable KEYWORDS and only
# unstable profiles for unstable KEYWORDS
keywords = []
for keyword in pkg.sorted_keywords:
- if keyword[0] != '~':
- keywords.append('~' + keyword)
+ if keyword[0] != "~":
+ keywords.append("~" + keyword)
keywords.append(keyword)
# if a profile exists, where the package is visible, but the later aren't
# then it isn't redundant
- visible_profiles = tuple(profile
+ visible_profiles = tuple(
+ profile
for keyword in keywords
for profile in self.keywords_profiles.get(keyword, ())
- if visible_cache[(profile, pkg)])
+ if visible_cache[(profile, pkg)]
+ )
return tuple(
- later for later in later_versions
- if all(visible_cache[(profile, later)] for profile in visible_profiles))
+ later
+ for later in later_versions
+ if all(visible_cache[(profile, later)] for profile in visible_profiles)
+ )
def feed(self, pkgset):
if len(pkgset) == 1:
@@ -91,8 +98,9 @@ class RedundantVersionCheck(Check):
if not curr_set:
continue
- matches = [ver for ver, keys in stack if ver.slot == pkg.slot and
- not curr_set.difference(keys)]
+ matches = [
+ ver for ver, keys in stack if ver.slot == pkg.slot and not curr_set.difference(keys)
+ ]
# we've done our checks; now we inject unstable for any stable
# via this, earlier versions that are unstable only get flagged
@@ -100,7 +108,7 @@ class RedundantVersionCheck(Check):
# stable.
# also, yes, have to use list comp here- we're adding as we go
- curr_set.update([f'~{x}' for x in curr_set if not x.startswith('~')])
+ curr_set.update([f"~{x}" for x in curr_set if not x.startswith("~")])
stack.append((pkg, curr_set))
if matches:
@@ -108,7 +116,9 @@ class RedundantVersionCheck(Check):
visible_cache = defaultdictkey(lambda profile_pkg: profile_pkg[0].visible(profile_pkg[1]))
for pkg, matches in reversed(bad):
- if self.options.stable_only and all(key.startswith('~') for x in matches for key in x.keywords):
+ if self.options.stable_only and all(
+ key.startswith("~") for x in matches for key in x.keywords
+ ):
continue
if matches := self.filter_later_profiles_masks(visible_cache, pkg, matches):
later_versions = (x.fullver for x in sorted(matches))
diff --git a/src/pkgcheck/checks/codingstyle.py b/src/pkgcheck/checks/codingstyle.py
index a7d64aca..6d3e53ca 100644
--- a/src/pkgcheck/checks/codingstyle.py
+++ b/src/pkgcheck/checks/codingstyle.py
@@ -12,8 +12,8 @@ from .. import addons, bash
from .. import results, sources
from . import Check
-PREFIX_VARIABLES = ('EROOT', 'ED', 'EPREFIX')
-PATH_VARIABLES = ('BROOT', 'ROOT', 'D') + PREFIX_VARIABLES
+PREFIX_VARIABLES = ("EROOT", "ED", "EPREFIX")
+PATH_VARIABLES = ("BROOT", "ROOT", "D") + PREFIX_VARIABLES
class _CommandResult(results.LineResult):
@@ -25,13 +25,13 @@ class _CommandResult(results.LineResult):
@property
def usage_desc(self):
- return f'{self.command!r}'
+ return f"{self.command!r}"
@property
def desc(self):
- s = f'{self.usage_desc}, used on line {self.lineno}'
+ s = f"{self.usage_desc}, used on line {self.lineno}"
if self.line != self.command:
- s += f': {self.line!r}'
+ s += f": {self.line!r}"
return s
@@ -46,19 +46,19 @@ class _EapiCommandResult(_CommandResult):
@property
def usage_desc(self):
- return f'{self.command!r} {self._status} in EAPI {self.eapi}'
+ return f"{self.command!r} {self._status} in EAPI {self.eapi}"
class DeprecatedEapiCommand(_EapiCommandResult, results.Warning):
"""Ebuild uses a deprecated EAPI command."""
- _status = 'deprecated'
+ _status = "deprecated"
class BannedEapiCommand(_EapiCommandResult, results.Error):
"""Ebuild uses a banned EAPI command."""
- _status = 'banned'
+ _status = "banned"
class BadCommandsCheck(Check):
@@ -71,12 +71,16 @@ class BadCommandsCheck(Check):
for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
for node, _ in bash.cmd_query.captures(func_node):
call = pkg.node_str(node)
- name = pkg.node_str(node.child_by_field_name('name'))
+ name = pkg.node_str(node.child_by_field_name("name"))
lineno, colno = node.start_point
if name in pkg.eapi.bash_cmds_banned:
- yield BannedEapiCommand(name, line=call, lineno=lineno+1, eapi=pkg.eapi, pkg=pkg)
+ yield BannedEapiCommand(
+ name, line=call, lineno=lineno + 1, eapi=pkg.eapi, pkg=pkg
+ )
elif name in pkg.eapi.bash_cmds_deprecated:
- yield DeprecatedEapiCommand(name, line=call, lineno=lineno+1, eapi=pkg.eapi, pkg=pkg)
+ yield DeprecatedEapiCommand(
+ name, line=call, lineno=lineno + 1, eapi=pkg.eapi, pkg=pkg
+ )
class EendMissingArg(results.LineResult, results.Warning):
@@ -84,7 +88,7 @@ class EendMissingArg(results.LineResult, results.Warning):
@property
def desc(self):
- return f'eend with no arguments, on line {self.lineno}'
+ return f"eend with no arguments, on line {self.lineno}"
class EendMissingArgCheck(Check):
@@ -99,7 +103,7 @@ class EendMissingArgCheck(Check):
line = pkg.node_str(node)
if line == "eend":
lineno, _ = node.start_point
- yield EendMissingArg(line=line, lineno=lineno+1, pkg=pkg)
+ yield EendMissingArg(line=line, lineno=lineno + 1, pkg=pkg)
class MissingSlash(results.LinesResult, results.Error):
@@ -111,7 +115,7 @@ class MissingSlash(results.LinesResult, results.Error):
@property
def desc(self):
- return f'{self.match} missing trailing slash {self.lines_str}'
+ return f"{self.match} missing trailing slash {self.lines_str}"
class UnnecessarySlashStrip(results.LinesResult, results.Style):
@@ -123,7 +127,7 @@ class UnnecessarySlashStrip(results.LinesResult, results.Style):
@property
def desc(self):
- return f'{self.match} unnecessary slash strip {self.lines_str}'
+ return f"{self.match} unnecessary slash strip {self.lines_str}"
class DoublePrefixInPath(results.LinesResult, results.Error):
@@ -143,7 +147,7 @@ class DoublePrefixInPath(results.LinesResult, results.Error):
@property
def desc(self):
- return f'{self.match}: concatenates two paths containing EPREFIX {self.lines_str}'
+ return f"{self.match}: concatenates two paths containing EPREFIX {self.lines_str}"
class PathVariablesCheck(Check):
@@ -152,63 +156,84 @@ class PathVariablesCheck(Check):
_source = sources.EbuildFileRepoSource
known_results = frozenset([MissingSlash, UnnecessarySlashStrip, DoublePrefixInPath])
prefixed_dir_functions = (
- 'insinto', 'exeinto',
- 'dodir', 'keepdir',
- 'fowners', 'fperms',
+ "insinto",
+ "exeinto",
+ "dodir",
+ "keepdir",
+ "fowners",
+ "fperms",
# java-pkg-2
- 'java-pkg_jarinto', 'java-pkg_sointo',
+ "java-pkg_jarinto",
+ "java-pkg_sointo",
# python-utils-r1
- 'python_scriptinto', 'python_moduleinto',
+ "python_scriptinto",
+ "python_moduleinto",
)
# TODO: add variables to mark this status in the eclasses in order to pull
# this data from parsed eclass docs
prefixed_getters = (
# bash-completion-r1.eclass
- 'get_bashcompdir', 'get_bashhelpersdir',
+ "get_bashcompdir",
+ "get_bashhelpersdir",
# db-use.eclass
- 'db_includedir',
+ "db_includedir",
# golang-base.eclass
- 'get_golibdir_gopath',
+ "get_golibdir_gopath",
# llvm.eclass
- 'get_llvm_prefix',
+ "get_llvm_prefix",
# python-utils-r1.eclass
- 'python_get_sitedir', 'python_get_includedir',
- 'python_get_library_path', 'python_get_scriptdir',
+ "python_get_sitedir",
+ "python_get_includedir",
+ "python_get_library_path",
+ "python_get_scriptdir",
# qmake-utils.eclass
- 'qt4_get_bindir', 'qt5_get_bindir',
+ "qt4_get_bindir",
+ "qt5_get_bindir",
# s6.eclass
- 's6_get_servicedir',
+ "s6_get_servicedir",
# systemd.eclass
- 'systemd_get_systemunitdir', 'systemd_get_userunitdir',
- 'systemd_get_utildir', 'systemd_get_systemgeneratordir',
+ "systemd_get_systemunitdir",
+ "systemd_get_userunitdir",
+ "systemd_get_utildir",
+ "systemd_get_systemgeneratordir",
)
prefixed_rhs_variables = (
# catch silly ${ED}${EPREFIX} mistake ;-)
- 'EPREFIX',
+ "EPREFIX",
# python-utils-r1.eclass
- 'PYTHON', 'PYTHON_SITEDIR', 'PYTHON_INCLUDEDIR', 'PYTHON_LIBPATH',
- 'PYTHON_CONFIG', 'PYTHON_SCRIPTDIR',
+ "PYTHON",
+ "PYTHON_SITEDIR",
+ "PYTHON_INCLUDEDIR",
+ "PYTHON_LIBPATH",
+ "PYTHON_CONFIG",
+ "PYTHON_SCRIPTDIR",
)
def __init__(self, *args):
super().__init__(*args)
- self.missing_regex = re.compile(r'(\${(%s)})"?\w+/' % r'|'.join(PATH_VARIABLES))
- self.unnecessary_regex = re.compile(r'(\${(%s)%%/})' % r'|'.join(PATH_VARIABLES))
+ self.missing_regex = re.compile(r'(\${(%s)})"?\w+/' % r"|".join(PATH_VARIABLES))
+ self.unnecessary_regex = re.compile(r"(\${(%s)%%/})" % r"|".join(PATH_VARIABLES))
self.double_prefix_regex = re.compile(
- r'(\${(%s)(%%/)?}/?\$(\((%s)\)|{(%s)}))' % (
- r'|'.join(PREFIX_VARIABLES),
- r'|'.join(self.prefixed_getters),
- r'|'.join(self.prefixed_rhs_variables)))
+ r"(\${(%s)(%%/)?}/?\$(\((%s)\)|{(%s)}))"
+ % (
+ r"|".join(PREFIX_VARIABLES),
+ r"|".join(self.prefixed_getters),
+ r"|".join(self.prefixed_rhs_variables),
+ )
+ )
self.double_prefix_func_regex = re.compile(
- r'\b(%s)\s[^&|;]*\$(\((%s)\)|{(%s)})' % (
- r'|'.join(self.prefixed_dir_functions),
- r'|'.join(self.prefixed_getters),
- r'|'.join(self.prefixed_rhs_variables)))
+ r"\b(%s)\s[^&|;]*\$(\((%s)\)|{(%s)})"
+ % (
+ r"|".join(self.prefixed_dir_functions),
+ r"|".join(self.prefixed_getters),
+ r"|".join(self.prefixed_rhs_variables),
+ )
+ )
# do not catch ${foo#${EPREFIX}} and similar
self.double_prefix_func_false_positive_regex = re.compile(
- r'.*?[#]["]?\$(\((%s)\)|{(%s)})' % (
- r'|'.join(self.prefixed_getters),
- r'|'.join(self.prefixed_rhs_variables)))
+ r'.*?[#]["]?\$(\((%s)\)|{(%s)})'
+ % (r"|".join(self.prefixed_getters), r"|".join(self.prefixed_rhs_variables))
+ )
def feed(self, pkg):
missing = defaultdict(list)
@@ -221,7 +246,7 @@ class PathVariablesCheck(Check):
continue
# flag double path prefix usage on uncommented lines only
- if line[0] != '#':
+ if line[0] != "#":
if mo := self.double_prefix_regex.search(line):
double_prefix[mo.group(1)].append(lineno)
if mo := self.double_prefix_func_regex.search(line):
@@ -262,22 +287,22 @@ class AbsoluteSymlinkCheck(Check):
_source = sources.EbuildFileRepoSource
known_results = frozenset([AbsoluteSymlink])
- DIRS = ('bin', 'etc', 'lib', 'opt', 'sbin', 'srv', 'usr', 'var')
+ DIRS = ("bin", "etc", "lib", "opt", "sbin", "srv", "usr", "var")
def __init__(self, *args):
super().__init__(*args)
- dirs = '|'.join(self.DIRS)
- path_vars = '|'.join(PATH_VARIABLES)
+ dirs = "|".join(self.DIRS)
+ path_vars = "|".join(PATH_VARIABLES)
prefixed_regex = rf'"\${{({path_vars})(%/)?}}(?P<cp>")?(?(cp)\S*|.*?")'
non_prefixed_regex = rf'(?P<op>["\'])?/({dirs})(?(op).*?(?P=op)|\S*)'
- self.regex = re.compile(rf'^\s*(?P<cmd>dosym\s+({prefixed_regex}|{non_prefixed_regex}))')
+ self.regex = re.compile(rf"^\s*(?P<cmd>dosym\s+({prefixed_regex}|{non_prefixed_regex}))")
def feed(self, pkg):
for lineno, line in enumerate(pkg.lines, 1):
if not line.strip():
continue
if mo := self.regex.match(line):
- yield AbsoluteSymlink(mo.group('cmd'), line=line, lineno=lineno, pkg=pkg)
+ yield AbsoluteSymlink(mo.group("cmd"), line=line, lineno=lineno, pkg=pkg)
class DeprecatedInsinto(results.LineResult, results.Warning):
@@ -290,8 +315,8 @@ class DeprecatedInsinto(results.LineResult, results.Warning):
@property
def desc(self):
return (
- f'deprecated insinto usage (use {self.cmd} instead), '
- f'line {self.lineno}: {self.line}'
+ f"deprecated insinto usage (use {self.cmd} instead), "
+ f"line {self.lineno}: {self.line}"
)
@@ -301,21 +326,25 @@ class InsintoCheck(Check):
_source = sources.EbuildFileRepoSource
known_results = frozenset([DeprecatedInsinto])
- path_mapping = ImmutableDict({
- '/etc/conf.d': 'doconfd or newconfd',
- '/etc/env.d': 'doenvd or newenvd',
- '/etc/init.d': 'doinitd or newinitd',
- '/etc/pam.d': 'dopamd or newpamd from pam.eclass',
- '/usr/share/applications': 'domenu or newmenu from desktop.eclass',
- })
+ path_mapping = ImmutableDict(
+ {
+ "/etc/conf.d": "doconfd or newconfd",
+ "/etc/env.d": "doenvd or newenvd",
+ "/etc/init.d": "doinitd or newinitd",
+ "/etc/pam.d": "dopamd or newpamd from pam.eclass",
+ "/usr/share/applications": "domenu or newmenu from desktop.eclass",
+ }
+ )
def __init__(self, *args):
super().__init__(*args)
- paths = '|'.join(s.replace('/', '/+') + '/?' for s in self.path_mapping)
+ paths = "|".join(s.replace("/", "/+") + "/?" for s in self.path_mapping)
self._insinto_re = re.compile(
- rf'(?P<insinto>insinto[ \t]+(?P<path>{paths})(?!/\w+))(?:$|[/ \t])')
+ rf"(?P<insinto>insinto[ \t]+(?P<path>{paths})(?!/\w+))(?:$|[/ \t])"
+ )
self._insinto_doc_re = re.compile(
- r'(?P<insinto>insinto[ \t]+/usr/share/doc/(")?\$\{PF?\}(?(2)\2)(/\w+)*)(?:$|[/ \t])')
+ r'(?P<insinto>insinto[ \t]+/usr/share/doc/(")?\$\{PF?\}(?(2)\2)(/\w+)*)(?:$|[/ \t])'
+ )
def feed(self, pkg):
for lineno, line in enumerate(pkg.lines, 1):
@@ -323,10 +352,9 @@ class InsintoCheck(Check):
continue
matches = self._insinto_re.search(line)
if matches is not None:
- path = re.sub('//+', '/', matches.group('path'))
- cmd = self.path_mapping[path.rstrip('/')]
- yield DeprecatedInsinto(
- cmd, line=matches.group('insinto'), lineno=lineno, pkg=pkg)
+ path = re.sub("//+", "/", matches.group("path"))
+ cmd = self.path_mapping[path.rstrip("/")]
+ yield DeprecatedInsinto(cmd, line=matches.group("insinto"), lineno=lineno, pkg=pkg)
continue
# Check for insinto usage that should be replaced with
# docinto/dodoc [-r] under supported EAPIs.
@@ -334,8 +362,8 @@ class InsintoCheck(Check):
matches = self._insinto_doc_re.search(line)
if matches is not None:
yield DeprecatedInsinto(
- 'docinto/dodoc', line=matches.group('insinto'),
- lineno=lineno, pkg=pkg)
+ "docinto/dodoc", line=matches.group("insinto"), lineno=lineno, pkg=pkg
+ )
class ObsoleteUri(results.VersionResult, results.Style):
@@ -356,8 +384,10 @@ class ObsoleteUri(results.VersionResult, results.Style):
@property
def desc(self):
- return (f"obsolete fetch URI: {self.uri} on line "
- f"{self.line}, should be replaced by: {self.replacement}")
+ return (
+ f"obsolete fetch URI: {self.uri} on line "
+ f"{self.line}, should be replaced by: {self.replacement}"
+ )
class ObsoleteUriCheck(Check):
@@ -367,13 +397,17 @@ class ObsoleteUriCheck(Check):
known_results = frozenset([ObsoleteUri])
REGEXPS = (
- (r'.*\b(?P<uri>(?P<prefix>https?://github\.com/.*?/.*?/)'
- r'(?:tar|zip)ball(?P<ref>\S*))',
- r'\g<prefix>archive\g<ref>.tar.gz'),
- (r'.*\b(?P<uri>(?P<prefix>https?://gitlab\.com/.*?/(?P<pkg>.*?)/)'
- r'repository/archive\.(?P<format>tar|tar\.gz|tar\.bz2|zip)'
- r'\?ref=(?P<ref>\S*))',
- r'\g<prefix>-/archive/\g<ref>/\g<pkg>-\g<ref>.\g<format>'),
+ (
+ r".*\b(?P<uri>(?P<prefix>https?://github\.com/.*?/.*?/)"
+ r"(?:tar|zip)ball(?P<ref>\S*))",
+ r"\g<prefix>archive\g<ref>.tar.gz",
+ ),
+ (
+ r".*\b(?P<uri>(?P<prefix>https?://gitlab\.com/.*?/(?P<pkg>.*?)/)"
+ r"repository/archive\.(?P<format>tar|tar\.gz|tar\.bz2|zip)"
+ r"\?ref=(?P<ref>\S*))",
+ r"\g<prefix>-/archive/\g<ref>/\g<pkg>-\g<ref>.\g<format>",
+ ),
)
def __init__(self, *args):
@@ -382,12 +416,12 @@ class ObsoleteUriCheck(Check):
def feed(self, pkg):
for lineno, line in enumerate(pkg.lines, 1):
- if not line.strip() or line.startswith('#'):
+ if not line.strip() or line.startswith("#"):
continue
# searching for multiple matches on a single line is too slow
for regexp, repl in self.regexes:
if mo := regexp.match(line):
- uri = mo.group('uri')
+ uri = mo.group("uri")
yield ObsoleteUri(lineno, uri, regexp.sub(repl, uri), pkg=pkg)
@@ -405,8 +439,10 @@ class BetterCompressionUri(results.LineResult, results.Style):
@property
def desc(self):
- return (f"line {self.lineno}: better compression URI using extension "
- f"{self.replacement!r} for {self.line!r}")
+ return (
+ f"line {self.lineno}: better compression URI using extension "
+ f"{self.replacement!r} for {self.line!r}"
+ )
class BetterCompressionCheck(Check):
@@ -416,8 +452,10 @@ class BetterCompressionCheck(Check):
known_results = frozenset([BetterCompressionUri])
REGEXPS = (
- (r'.*\b(?P<uri>https?://[^/]*?gitlab[^/]*?/.*/-/archive/.*?/\S*\.(?:tar\.gz|tar(?!.bz2)|zip))',
- '.tar.bz2'),
+ (
+ r".*\b(?P<uri>https?://[^/]*?gitlab[^/]*?/.*/-/archive/.*?/\S*\.(?:tar\.gz|tar(?!.bz2)|zip))",
+ ".tar.bz2",
+ ),
)
def __init__(self, *args):
@@ -426,12 +464,12 @@ class BetterCompressionCheck(Check):
def feed(self, pkg):
for lineno, line in enumerate(pkg.lines, 1):
- if not line.strip() or line.startswith('#'):
+ if not line.strip() or line.startswith("#"):
continue
# searching for multiple matches on a single line is too slow
for regexp, replacement in self.regexes:
if mo := regexp.match(line):
- uri = mo.group('uri')
+ uri = mo.group("uri")
yield BetterCompressionUri(replacement, lineno=lineno, line=uri, pkg=pkg)
@@ -445,7 +483,7 @@ class HomepageInSrcUri(results.VersionResult, results.Style):
@property
def desc(self):
- return '${HOMEPAGE} in SRC_URI'
+ return "${HOMEPAGE} in SRC_URI"
class StaticSrcUri(results.VersionResult, results.Style):
@@ -462,7 +500,7 @@ class StaticSrcUri(results.VersionResult, results.Style):
@property
def desc(self):
- return f'{self.static_str!r} in SRC_URI, replace with {self.replacement}'
+ return f"{self.static_str!r} in SRC_URI, replace with {self.replacement}"
class ReferenceInMetadataVar(results.VersionResult, results.Style):
@@ -491,8 +529,8 @@ class ReferenceInMetadataVar(results.VersionResult, results.Style):
@property
def desc(self):
s = pluralism(self.refs)
- refs = ', '.join(self.refs)
- return f'{self.variable} includes variable{s}: {refs}'
+ refs = ", ".join(self.refs)
+ return f"{self.variable} includes variable{s}: {refs}"
class MultipleKeywordsLines(results.LinesResult, results.Style):
@@ -530,13 +568,14 @@ class MetadataVarCheck(Check):
"""Scan various globally assigned metadata variables for issues."""
_source = sources.EbuildParseRepoSource
- known_results = frozenset([
- HomepageInSrcUri, StaticSrcUri, ReferenceInMetadataVar, MultipleKeywordsLines])
+ known_results = frozenset(
+ [HomepageInSrcUri, StaticSrcUri, ReferenceInMetadataVar, MultipleKeywordsLines]
+ )
# mapping between registered variables and verification methods
known_variables = {}
- @verify_vars('HOMEPAGE', 'KEYWORDS')
+ @verify_vars("HOMEPAGE", "KEYWORDS")
def _raw_text(self, var, node, value, pkg):
matches = []
for var_node, _ in bash.var_query.captures(node):
@@ -544,12 +583,12 @@ class MetadataVarCheck(Check):
if matches:
yield ReferenceInMetadataVar(var, stable_unique(matches), pkg=pkg)
- @verify_vars('LICENSE')
+ @verify_vars("LICENSE")
def _raw_text_license(self, var, node, value, pkg):
matches = []
for var_node, _ in bash.var_query.captures(node):
var_str = pkg.node_str(var_node.parent).strip()
- if var_str in ['$LICENSE', '${LICENSE}']:
+ if var_str in ["$LICENSE", "${LICENSE}"]:
continue # LICENSE in LICENSE is ok
matches.append(var_str)
if matches:
@@ -557,47 +596,43 @@ class MetadataVarCheck(Check):
def build_src_uri_variants_regex(self, pkg):
p, pv = pkg.P, pkg.PV
- replacements = {
- p: '${P}',
- pv: '${PV}'
- }
+ replacements = {p: "${P}", pv: "${PV}"}
replacements.setdefault(p.capitalize(), "${P^}")
replacements.setdefault(p.upper(), "${P^^}")
for value, replacement in tuple(replacements.items()):
- replacements.setdefault(value.replace('.', ''), replacement.replace('}', '//.}'))
- replacements.setdefault(value.replace('.', '_'), replacement.replace('}', '//./_}'))
- replacements.setdefault(value.replace('.', '-'), replacement.replace('}', '//./-}'))
+ replacements.setdefault(value.replace(".", ""), replacement.replace("}", "//.}"))
+ replacements.setdefault(value.replace(".", "_"), replacement.replace("}", "//./_}"))
+ replacements.setdefault(value.replace(".", "-"), replacement.replace("}", "//./-}"))
pos = 0
- positions = [pos := pv.find('.', pos+1) for _ in range(pv.count('.'))]
+ positions = [pos := pv.find(".", pos + 1) for _ in range(pv.count("."))]
- for sep in ('', '-', '_'):
- replacements.setdefault(pv.replace('.', sep, 1), f"$(ver_rs 1 {sep!r})")
- for count in range(2, pv.count('.')):
- replacements.setdefault(pv.replace('.', sep, count), f"$(ver_rs 1-{count} {sep!r})")
+ for sep in ("", "-", "_"):
+ replacements.setdefault(pv.replace(".", sep, 1), f"$(ver_rs 1 {sep!r})")
+ for count in range(2, pv.count(".")):
+ replacements.setdefault(pv.replace(".", sep, count), f"$(ver_rs 1-{count} {sep!r})")
for pos, index in enumerate(positions[1:], start=2):
replacements.setdefault(pv[:index], f"$(ver_cut 1-{pos})")
replacements = sorted(replacements.items(), key=lambda x: -len(x[0]))
- return tuple(zip(*replacements))[1], '|'.join(
- rf'(?P<r{index}>{re.escape(s)})'
- for index, (s, _) in enumerate(replacements)
+ return tuple(zip(*replacements))[1], "|".join(
+ rf"(?P<r{index}>{re.escape(s)})" for index, (s, _) in enumerate(replacements)
)
- @verify_vars('SRC_URI')
+ @verify_vars("SRC_URI")
def _src_uri(self, var, node, value, pkg):
- if '${HOMEPAGE}' in value:
+ if "${HOMEPAGE}" in value:
yield HomepageInSrcUri(pkg=pkg)
replacements, regex = self.build_src_uri_variants_regex(pkg)
- static_src_uri_re = rf'(?:/|{re.escape(pkg.PN)}[-._]?|->\s*)[v]?(?P<static_str>({regex}))'
+ static_src_uri_re = rf"(?:/|{re.escape(pkg.PN)}[-._]?|->\s*)[v]?(?P<static_str>({regex}))"
static_urls = {}
for match in re.finditer(static_src_uri_re, value):
relevant = {key: value for key, value in match.groupdict().items() if value is not None}
- static_str = relevant.pop('static_str')
+ static_str = relevant.pop("static_str")
assert len(relevant) == 1
key = int(tuple(relevant.keys())[0][1:])
static_urls[static_str] = replacements[key]
@@ -608,12 +643,12 @@ class MetadataVarCheck(Check):
def feed(self, pkg):
keywords_lines = set()
for node in pkg.global_query(bash.var_assign_query):
- name = pkg.node_str(node.child_by_field_name('name'))
+ name = pkg.node_str(node.child_by_field_name("name"))
if name in self.known_variables:
# RHS value node should be last
val_node = node.children[-1]
val_str = pkg.node_str(val_node)
- if name == 'KEYWORDS':
+ if name == "KEYWORDS":
keywords_lines.add(node.start_point[0] + 1)
keywords_lines.add(node.end_point[0] + 1)
yield from self.known_variables[name](self, name, val_node, val_str, pkg)
@@ -633,7 +668,7 @@ class MissingInherits(results.VersionResult, results.Warning):
@property
def desc(self):
- return f'{self.eclass}: missing inherit usage: {repr(self.usage)}, line {self.lineno}'
+ return f"{self.eclass}: missing inherit usage: {repr(self.usage)}, line {self.lineno}"
class IndirectInherits(results.VersionResult, results.Warning):
@@ -651,7 +686,7 @@ class IndirectInherits(results.VersionResult, results.Warning):
@property
def desc(self):
- return f'{self.eclass}: indirect inherit usage: {repr(self.usage)}, line {self.lineno}'
+ return f"{self.eclass}: indirect inherit usage: {repr(self.usage)}, line {self.lineno}"
class UnusedInherits(results.VersionResult, results.Warning):
@@ -663,9 +698,9 @@ class UnusedInherits(results.VersionResult, results.Warning):
@property
def desc(self):
- es = pluralism(self.eclasses, plural='es')
- eclasses = ', '.join(self.eclasses)
- return f'unused eclass{es}: {eclasses}'
+ es = pluralism(self.eclasses, plural="es")
+ eclasses = ", ".join(self.eclasses)
+ return f"unused eclass{es}: {eclasses}"
class InternalEclassUsage(results.VersionResult, results.Warning):
@@ -679,7 +714,7 @@ class InternalEclassUsage(results.VersionResult, results.Warning):
@property
def desc(self):
- return f'{self.eclass}: internal usage: {repr(self.usage)}, line {self.lineno}'
+ return f"{self.eclass}: internal usage: {repr(self.usage)}, line {self.lineno}"
class InheritsCheck(Check):
@@ -690,8 +725,9 @@ class InheritsCheck(Check):
"""
_source = sources.EbuildParseRepoSource
- known_results = frozenset([
- MissingInherits, IndirectInherits, UnusedInherits, InternalEclassUsage])
+ known_results = frozenset(
+ [MissingInherits, IndirectInherits, UnusedInherits, InternalEclassUsage]
+ )
required_addons = (addons.eclass.EclassAddon,)
def __init__(self, *args, eclass_addon):
@@ -703,7 +739,8 @@ class InheritsCheck(Check):
# register internal and exported funcs/vars for all eclasses
for eclass, eclass_obj in self.eclass_cache.items():
self.internals[eclass] = (
- eclass_obj.internal_function_names | eclass_obj.internal_variable_names)
+ eclass_obj.internal_function_names | eclass_obj.internal_variable_names
+ )
for name in eclass_obj.exported_function_names:
self.exported.setdefault(name, set()).add(eclass)
# Don't use all exported vars in order to avoid
@@ -716,9 +753,7 @@ class InheritsCheck(Check):
self.eapi_funcs = {}
for eapi in EAPI.known_eapis.values():
s = set(eapi.bash_cmds_internal | eapi.bash_cmds_deprecated)
- s.update(
- x for x in (eapi.bash_funcs | eapi.bash_funcs_global)
- if not x.startswith('_'))
+ s.update(x for x in (eapi.bash_funcs | eapi.bash_funcs_global) if not x.startswith("_"))
self.eapi_funcs[eapi] = frozenset(s)
# register EAPI-related vars to ignore
@@ -751,7 +786,7 @@ class InheritsCheck(Check):
# register variables assigned in ebuilds
assigned_vars = dict()
for node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
- name = pkg.node_str(node.child_by_field_name('name'))
+ name = pkg.node_str(node.child_by_field_name("name"))
if eclass := self.get_eclass(name, pkg):
assigned_vars[name] = eclass
@@ -759,8 +794,8 @@ class InheritsCheck(Check):
used = defaultdict(list)
for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
call = pkg.node_str(node)
- name = pkg.node_str(node.child_by_field_name('name'))
- if name == 'inherit':
+ name = pkg.node_str(node.child_by_field_name("name"))
+ if name == "inherit":
# register conditional eclasses
eclasses = call.split()[1:]
if not pkg.inherited.intersection(eclasses):
@@ -770,12 +805,12 @@ class InheritsCheck(Check):
elif name not in self.eapi_funcs[pkg.eapi] | assigned_vars.keys():
lineno, colno = node.start_point
if eclass := self.get_eclass(name, pkg):
- used[eclass].append((lineno + 1, name, call.split('\n', 1)[0]))
+ used[eclass].append((lineno + 1, name, call.split("\n", 1)[0]))
# match captured variables with eclasses
for node, _ in bash.var_query.captures(pkg.tree.root_node):
name = pkg.node_str(node)
- if node.parent.type == 'unset_command':
+ if node.parent.type == "unset_command":
continue
if name not in self.eapi_vars[pkg.eapi] | assigned_vars.keys():
lineno, colno = node.start_point
@@ -793,7 +828,8 @@ class InheritsCheck(Check):
phases = [pkg.eapi.phases[x] for x in pkg.defined_phases]
for eclass in list(unused):
if self.eclass_cache[eclass].exported_function_names.intersection(
- f'{eclass}_{phase}' for phase in phases):
+ f"{eclass}_{phase}" for phase in phases
+ ):
unused.discard(eclass)
for eclass in list(unused):
@@ -802,7 +838,8 @@ class InheritsCheck(Check):
unused.discard(eclass)
else:
exported_eclass_keys = pkg.eapi.eclass_keys.intersection(
- self.eclass_cache[eclass].exported_variable_names)
+ self.eclass_cache[eclass].exported_variable_names
+ )
if not self.eclass_cache[eclass].exported_function_names and exported_eclass_keys:
# ignore eclasses that export ebuild metadata (e.g.
# SRC_URI, S, ...) and no functions
@@ -844,15 +881,38 @@ class ReadonlyVariableCheck(Check):
known_results = frozenset([ReadonlyVariable])
# https://devmanual.gentoo.org/ebuild-writing/variables/#predefined-read-only-variables
- readonly_vars = frozenset([
- 'P', 'PN', 'PV', 'PR', 'PVR', 'PF', 'A', 'CATEGORY', 'FILESDIR', 'WORKDIR',
- 'T', 'D', 'HOME', 'ROOT', 'DISTDIR', 'EPREFIX', 'ED', 'EROOT', 'SYSROOT',
- 'ESYSROOT', 'BROOT', 'MERGE_TYPE', 'REPLACING_VERSIONS', 'REPLACED_BY_VERSION',
- ])
+ readonly_vars = frozenset(
+ [
+ "P",
+ "PN",
+ "PV",
+ "PR",
+ "PVR",
+ "PF",
+ "A",
+ "CATEGORY",
+ "FILESDIR",
+ "WORKDIR",
+ "T",
+ "D",
+ "HOME",
+ "ROOT",
+ "DISTDIR",
+ "EPREFIX",
+ "ED",
+ "EROOT",
+ "SYSROOT",
+ "ESYSROOT",
+ "BROOT",
+ "MERGE_TYPE",
+ "REPLACING_VERSIONS",
+ "REPLACED_BY_VERSION",
+ ]
+ )
def feed(self, pkg):
for node in pkg.global_query(bash.var_assign_query):
- name = pkg.node_str(node.child_by_field_name('name'))
+ name = pkg.node_str(node.child_by_field_name("name"))
if name in self.readonly_vars:
call = pkg.node_str(node)
lineno, colno = node.start_point
@@ -862,7 +922,7 @@ class ReadonlyVariableCheck(Check):
class VariableScope(results.BaseLinesResult, results.AliasResult, results.Warning):
"""Variable used outside its defined scope."""
- _name = 'VariableScope'
+ _name = "VariableScope"
def __init__(self, variable, func, **kwargs):
super().__init__(**kwargs)
@@ -871,7 +931,7 @@ class VariableScope(results.BaseLinesResult, results.AliasResult, results.Warnin
@property
def desc(self):
- return f'variable {self.variable!r} used in {self.func!r} {self.lines_str}'
+ return f"variable {self.variable!r} used in {self.func!r} {self.lines_str}"
class EbuildVariableScope(VariableScope, results.VersionResult):
@@ -885,28 +945,30 @@ class VariableScopeCheck(Check):
known_results = frozenset([EbuildVariableScope])
# see https://projects.gentoo.org/pms/7/pms.html#x1-10900011.1
- variable_map = ImmutableDict({
- 'A': ('src_', 'pkg_nofetch'),
- 'AA': ('src_', 'pkg_nofetch'),
- 'FILESDIR': 'src_',
- 'DISTDIR': 'src_',
- 'WORKDIR': 'src_',
- 'S': 'src_',
- 'PORTDIR': 'src_',
- 'ECLASSDIR': 'src_',
- 'ROOT': 'pkg_',
- 'EROOT': 'pkg_',
- 'SYSROOT': ('src_', 'pkg_setup'),
- 'ESYSROOT': ('src_', 'pkg_setup'),
- 'BROOT': ('src_', 'pkg_setup'),
- 'D': ('src_install', 'pkg_preinst', 'pkg_postint'),
- 'ED': ('src_install', 'pkg_preinst', 'pkg_postint'),
- 'DESTTREE': 'src_install',
- 'INSDESTTREE': 'src_install',
- 'MERGE_TYPE': 'pkg_',
- 'REPLACING_VERSIONS': 'pkg_',
- 'REPLACED_BY_VERSION': ('pkg_prerm', 'pkg_postrm'),
- })
+ variable_map = ImmutableDict(
+ {
+ "A": ("src_", "pkg_nofetch"),
+ "AA": ("src_", "pkg_nofetch"),
+ "FILESDIR": "src_",
+ "DISTDIR": "src_",
+ "WORKDIR": "src_",
+ "S": "src_",
+ "PORTDIR": "src_",
+ "ECLASSDIR": "src_",
+ "ROOT": "pkg_",
+ "EROOT": "pkg_",
+ "SYSROOT": ("src_", "pkg_setup"),
+ "ESYSROOT": ("src_", "pkg_setup"),
+ "BROOT": ("src_", "pkg_setup"),
+ "D": ("src_install", "pkg_preinst", "pkg_postint"),
+ "ED": ("src_install", "pkg_preinst", "pkg_postint"),
+ "DESTTREE": "src_install",
+ "INSDESTTREE": "src_install",
+ "MERGE_TYPE": "pkg_",
+ "REPLACING_VERSIONS": "pkg_",
+ "REPLACED_BY_VERSION": ("pkg_prerm", "pkg_postrm"),
+ }
+ )
# mapping of bad variables for each EAPI phase function
scoped_vars = {}
@@ -919,7 +981,7 @@ class VariableScopeCheck(Check):
def feed(self, pkg):
for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
- func_name = pkg.node_str(func_node.child_by_field_name('name'))
+ func_name = pkg.node_str(func_node.child_by_field_name("name"))
if variables := self.scoped_vars[pkg.eapi].get(func_name):
usage = defaultdict(set)
for var_node, _ in bash.var_query.captures(func_node):
@@ -951,23 +1013,23 @@ class RedundantDodirCheck(Check):
def __init__(self, *args):
super().__init__(*args)
- cmds = r'|'.join(('insinto', 'exeinto', 'docinto'))
- self.cmds_regex = re.compile(rf'^\s*(?P<cmd>({cmds}))\s+(?P<path>\S+)')
- self.dodir_regex = re.compile(r'^\s*(?P<call>dodir\s+(?P<path>\S+))')
+ cmds = r"|".join(("insinto", "exeinto", "docinto"))
+ self.cmds_regex = re.compile(rf"^\s*(?P<cmd>({cmds}))\s+(?P<path>\S+)")
+ self.dodir_regex = re.compile(r"^\s*(?P<call>dodir\s+(?P<path>\S+))")
def feed(self, pkg):
lines = enumerate(pkg.lines, 1)
for lineno, line in lines:
line = line.strip()
- if not line or line[0] == '#':
+ if not line or line[0] == "#":
continue
if dodir := self.dodir_regex.match(line):
lineno, line = next(lines)
if cmd := self.cmds_regex.match(line):
- if dodir.group('path') == cmd.group('path'):
+ if dodir.group("path") == cmd.group("path"):
yield RedundantDodir(
- cmd.group('cmd'), line=dodir.group('call'),
- lineno=lineno - 1, pkg=pkg)
+ cmd.group("cmd"), line=dodir.group("call"), lineno=lineno - 1, pkg=pkg
+ )
class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.Warning):
@@ -977,7 +1039,7 @@ class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.War
contexts.
"""
- _name = 'UnquotedVariable'
+ _name = "UnquotedVariable"
def __init__(self, variable, **kwargs):
super().__init__(**kwargs)
@@ -985,7 +1047,7 @@ class UnquotedVariable(results.BaseLinesResult, results.AliasResult, results.War
@property
def desc(self):
- return f'unquoted variable {self.variable} {self.lines_str}'
+ return f"unquoted variable {self.variable} {self.lines_str}"
class EbuildUnquotedVariable(UnquotedVariable, results.VersionResult):
@@ -997,48 +1059,65 @@ class EclassUnquotedVariable(UnquotedVariable, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class _UnquotedVariablesCheck(Check):
"""Scan files for variables that should be quoted like D, FILESDIR, etc."""
- message_commands = frozenset({
- "die", "echo", "eerror", "einfo", "elog", "eqawarn", "ewarn", ":"
- })
- var_names = frozenset({
- "D", "DISTDIR", "FILESDIR", "S", "T", "ROOT", "BROOT", "WORKDIR", "ED",
- "EPREFIX", "EROOT", "SYSROOT", "ESYSROOT", "TMPDIR", "HOME",
- # variables for multibuild.eclass
- "BUILD_DIR",
- })
-
- node_types_ok = frozenset({
- # Variable is sitting in a string, all good
- 'string',
- # Variable is part of a shell assignment, and does not need to be
- # quoted. for example S=${WORKDIR}/${PN} is ok.
- 'variable_assignment',
- # Variable is being used in a unset command.
- 'unset_command',
- # Variable is part of declaring variables, and does not need to be
- # quoted. for example local TMPDIR is ok.
- 'declaration_command',
- # Variable sits inside a [[ ]] test command and it's OK not to be quoted
- 'test_command',
- # Variable is being used in a heredoc body, no need to specify quotes.
- 'heredoc_body',
- })
+ message_commands = frozenset(
+ {"die", "echo", "eerror", "einfo", "elog", "eqawarn", "ewarn", ":"}
+ )
+ var_names = frozenset(
+ {
+ "D",
+ "DISTDIR",
+ "FILESDIR",
+ "S",
+ "T",
+ "ROOT",
+ "BROOT",
+ "WORKDIR",
+ "ED",
+ "EPREFIX",
+ "EROOT",
+ "SYSROOT",
+ "ESYSROOT",
+ "TMPDIR",
+ "HOME",
+ # variables for multibuild.eclass
+ "BUILD_DIR",
+ }
+ )
+
+ node_types_ok = frozenset(
+ {
+ # Variable is sitting in a string, all good
+ "string",
+ # Variable is part of a shell assignment, and does not need to be
+ # quoted. for example S=${WORKDIR}/${PN} is ok.
+ "variable_assignment",
+ # Variable is being used in a unset command.
+ "unset_command",
+ # Variable is part of declaring variables, and does not need to be
+ # quoted. for example local TMPDIR is ok.
+ "declaration_command",
+ # Variable sits inside a [[ ]] test command and it's OK not to be quoted
+ "test_command",
+ # Variable is being used in a heredoc body, no need to specify quotes.
+ "heredoc_body",
+ }
+ )
def _var_needs_quotes(self, pkg, node):
pnode = node.parent
while pnode != node:
if pnode.type in self.node_types_ok:
return False
- elif pnode.type == 'command':
- cmd = pkg.node_str(pnode.child_by_field_name('name'))
+ elif pnode.type == "command":
+ cmd = pkg.node_str(pnode.child_by_field_name("name"))
return cmd not in self.message_commands
- elif pnode.type in 'array':
+ elif pnode.type in "array":
# Variable is sitting unquoted in an array
return True
pnode = pnode.parent
@@ -1058,7 +1137,7 @@ class _UnquotedVariablesCheck(Check):
if var_name in self.var_names:
if self._var_needs_quotes(item, var_node):
lineno, _ = var_node.start_point
- hits[var_name].add(lineno+1)
+ hits[var_name].add(lineno + 1)
for var_name, lines in hits.items():
yield var_name, sorted(lines)
@@ -1094,7 +1173,7 @@ class ExcessiveLineLength(results.LinesResult, results.Style):
@property
def desc(self):
- return f'excessive line length (over {self.line_length} characters) {self.lines_str}'
+ return f"excessive line length (over {self.line_length} characters) {self.lines_str}"
class LineLengthCheck(Check):
@@ -1105,8 +1184,8 @@ class LineLengthCheck(Check):
def __init__(self, options, **kwargs):
super().__init__(options, **kwargs)
- self.exception = re.compile(r'\s*(?:DESCRIPTION|KEYWORDS|IUSE)=')
- str_length = f'[^\'\"]{{{ExcessiveLineLength.word_length},}}'
+ self.exception = re.compile(r"\s*(?:DESCRIPTION|KEYWORDS|IUSE)=")
+ str_length = f"[^'\"]{{{ExcessiveLineLength.word_length},}}"
self.long_string = re.compile(rf'"{str_length}"|\'{str_length}\'')
def feed(self, pkg):
@@ -1115,11 +1194,11 @@ class LineLengthCheck(Check):
if len(line) <= ExcessiveLineLength.line_length:
continue
if self.exception.match(line):
- continue # exception variables which are fine to be long
+ continue # exception variables which are fine to be long
if max(map(len, line.split())) > ExcessiveLineLength.word_length:
- continue # if one part of the line is very long word
+ continue # if one part of the line is very long word
if self.long_string.search(line):
- continue # skip lines with long quoted string
+ continue # skip lines with long quoted string
lines.append(lineno)
if lines:
yield ExcessiveLineLength(lines=lines, pkg=pkg)
@@ -1134,7 +1213,7 @@ class InstallCompressedManpage(results.LineResult, results.Warning):
@property
def desc(self):
- return f'line {self.lineno}: compressed manpage {self.line!r} passed to {self.func}'
+ return f"line {self.lineno}: compressed manpage {self.line!r} passed to {self.func}"
class InstallCompressedInfo(results.LineResult, results.Warning):
@@ -1146,7 +1225,7 @@ class InstallCompressedInfo(results.LineResult, results.Warning):
@property
def desc(self):
- return f'line {self.lineno}: compressed info {self.line!r} passed to {self.func}'
+ return f"line {self.lineno}: compressed info {self.line!r} passed to {self.func}"
class DoCompressedFilesCheck(Check):
@@ -1155,23 +1234,27 @@ class DoCompressedFilesCheck(Check):
_source = sources.EbuildParseRepoSource
known_results = frozenset([InstallCompressedManpage, InstallCompressedInfo])
- compresion_extentions = ('.Z', '.gz', '.bz2', '.lzma', '.lz', '.lzo', '.lz4', '.xz', '.zst')
- functions = ImmutableDict({
- 'doman': InstallCompressedManpage,
- 'newman': InstallCompressedManpage,
- 'doinfo': InstallCompressedInfo,
- })
+ compresion_extentions = (".Z", ".gz", ".bz2", ".lzma", ".lz", ".lzo", ".lz4", ".xz", ".zst")
+ functions = ImmutableDict(
+ {
+ "doman": InstallCompressedManpage,
+ "newman": InstallCompressedManpage,
+ "doinfo": InstallCompressedInfo,
+ }
+ )
def feed(self, pkg):
for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
- call_name = pkg.node_str(node.child_by_field_name('name'))
+ call_name = pkg.node_str(node.child_by_field_name("name"))
if call_name not in self.functions:
continue
for arg in node.children[1:]:
- arg_name = pkg.node_str(arg).strip('\'\"')
+ arg_name = pkg.node_str(arg).strip("'\"")
lineno, _ = arg.start_point
if arg_name.endswith(self.compresion_extentions):
- yield self.functions[call_name](call_name, lineno=lineno+1, line=arg_name, pkg=pkg)
+ yield self.functions[call_name](
+ call_name, lineno=lineno + 1, line=arg_name, pkg=pkg
+ )
class NonPosixHeadTailUsage(results.LineResult, results.Warning):
@@ -1183,13 +1266,14 @@ class NonPosixHeadTailUsage(results.LineResult, results.Warning):
.. [#] https://devmanual.gentoo.org/tools-reference/head-and-tail/index.html
"""
+
def __init__(self, command, **kwargs):
super().__init__(**kwargs)
self.command = command
@property
def desc(self):
- return f'line {self.lineno}: non-posix usage of {self.command!r}: {self.line!r}'
+ return f"line {self.lineno}: non-posix usage of {self.command!r}: {self.line!r}"
class NonPosixCheck(Check):
@@ -1200,21 +1284,23 @@ class NonPosixCheck(Check):
def __init__(self, options, **kwargs):
super().__init__(options, **kwargs)
- self.re_head_tail = re.compile(r'[+-]\d+')
+ self.re_head_tail = re.compile(r"[+-]\d+")
def check_head_tail(self, pkg, call_node, call_name):
- prev_arg = ''
+ prev_arg = ""
for arg in map(pkg.node_str, call_node.children[1:]):
- if (self.re_head_tail.match(arg) and
- not (prev_arg.startswith('-') and prev_arg.endswith(('n', 'c')))):
+ if self.re_head_tail.match(arg) and not (
+ prev_arg.startswith("-") and prev_arg.endswith(("n", "c"))
+ ):
lineno, _ = call_node.start_point
- yield NonPosixHeadTailUsage(f'{call_name} {arg}',
- lineno=lineno+1, line=pkg.node_str(call_node), pkg=pkg)
+ yield NonPosixHeadTailUsage(
+ f"{call_name} {arg}", lineno=lineno + 1, line=pkg.node_str(call_node), pkg=pkg
+ )
break
prev_arg = arg
def feed(self, pkg):
for call_node, _ in bash.cmd_query.captures(pkg.tree.root_node):
- call_name = pkg.node_str(call_node.child_by_field_name('name'))
- if call_name in ('head', 'tail'):
+ call_name = pkg.node_str(call_node.child_by_field_name("name"))
+ if call_name in ("head", "tail"):
yield from self.check_head_tail(pkg, call_node, call_name)
diff --git a/src/pkgcheck/checks/dropped_keywords.py b/src/pkgcheck/checks/dropped_keywords.py
index 87ec8cf1..613076ef 100644
--- a/src/pkgcheck/checks/dropped_keywords.py
+++ b/src/pkgcheck/checks/dropped_keywords.py
@@ -13,7 +13,7 @@ class DroppedKeywords(results.VersionResult, results.Warning):
@property
def desc(self):
- return ', '.join(self.arches)
+ return ", ".join(self.arches)
class DroppedKeywordsCheck(Check):
@@ -36,7 +36,7 @@ class DroppedKeywordsCheck(Check):
for pkg in pkgset:
pkg_arches = {x.lstrip("~-") for x in pkg.keywords}
# special keywords -*, *, and ~* override all dropped keywords
- if '*' in pkg_arches:
+ if "*" in pkg_arches:
drops = set()
else:
drops = previous_arches.difference(pkg_arches) | seen_arches.difference(pkg_arches)
@@ -45,7 +45,7 @@ class DroppedKeywordsCheck(Check):
changes[key].append(pkg)
if changes:
# ignore missing arches on previous versions that were re-enabled
- disabled_arches = {x.lstrip("-") for x in pkg.keywords if x.startswith('-')}
+ disabled_arches = {x.lstrip("-") for x in pkg.keywords if x.startswith("-")}
adds = pkg_arches.difference(previous_arches) - disabled_arches
for key in adds:
if key in changes:
diff --git a/src/pkgcheck/checks/eclass.py b/src/pkgcheck/checks/eclass.py
index d48df115..5c4f205f 100644
--- a/src/pkgcheck/checks/eclass.py
+++ b/src/pkgcheck/checks/eclass.py
@@ -24,10 +24,10 @@ class DeprecatedEclass(results.VersionResult, results.Warning):
@property
def desc(self):
if self.replacement is not None:
- replacement = f'migrate to {self.replacement}'
+ replacement = f"migrate to {self.replacement}"
else:
- replacement = 'no replacement'
- return f'uses deprecated eclass: {self.eclass} ({replacement})'
+ replacement = "no replacement"
+ return f"uses deprecated eclass: {self.eclass} ({replacement})"
class DeprecatedEclassVariable(results.LineResult, results.Warning):
@@ -41,10 +41,10 @@ class DeprecatedEclassVariable(results.LineResult, results.Warning):
@property
def desc(self):
if self.replacement is not None:
- replacement = f'migrate to {self.replacement}'
+ replacement = f"migrate to {self.replacement}"
else:
- replacement = 'no replacement'
- return f'uses deprecated variable on line {self.lineno}: {self.variable} ({replacement})'
+ replacement = "no replacement"
+ return f"uses deprecated variable on line {self.lineno}: {self.variable} ({replacement})"
class DeprecatedEclassFunction(results.LineResult, results.Warning):
@@ -58,10 +58,10 @@ class DeprecatedEclassFunction(results.LineResult, results.Warning):
@property
def desc(self):
if self.replacement is not None:
- replacement = f'migrate to {self.replacement}'
+ replacement = f"migrate to {self.replacement}"
else:
- replacement = 'no replacement'
- return f'uses deprecated function on line {self.lineno}: {self.function} ({replacement})'
+ replacement = "no replacement"
+ return f"uses deprecated function on line {self.lineno}: {self.function} ({replacement})"
class DuplicateEclassInherit(results.LineResult, results.Style):
@@ -79,7 +79,7 @@ class DuplicateEclassInherit(results.LineResult, results.Style):
@property
def desc(self):
- return f'duplicate eclass inherit {self.eclass!r}, line {self.lineno}'
+ return f"duplicate eclass inherit {self.eclass!r}, line {self.lineno}"
class MisplacedEclassVar(results.LineResult, results.Error):
@@ -95,17 +95,22 @@ class MisplacedEclassVar(results.LineResult, results.Error):
@property
def desc(self):
- return f'invalid pre-inherit placement, line {self.lineno}: {self.line!r}'
+ return f"invalid pre-inherit placement, line {self.lineno}: {self.line!r}"
class EclassUsageCheck(Check):
"""Scan packages for various eclass-related issues."""
_source = sources.EbuildParseRepoSource
- known_results = frozenset([
- DeprecatedEclass, DeprecatedEclassVariable, DeprecatedEclassFunction,
- DuplicateEclassInherit, MisplacedEclassVar,
- ])
+ known_results = frozenset(
+ [
+ DeprecatedEclass,
+ DeprecatedEclassVariable,
+ DeprecatedEclassFunction,
+ DuplicateEclassInherit,
+ MisplacedEclassVar,
+ ]
+ )
required_addons = (addons.eclass.EclassAddon,)
def __init__(self, *args, eclass_addon):
@@ -126,12 +131,11 @@ class EclassUsageCheck(Check):
# scan for any misplaced @PRE_INHERIT variables
if pre_inherits:
for node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
- var_name = pkg.node_str(node.child_by_field_name('name'))
+ var_name = pkg.node_str(node.child_by_field_name("name"))
lineno, _colno = node.start_point
if var_name in pre_inherits and lineno > pre_inherits[var_name]:
line = pkg.node_str(node)
- yield MisplacedEclassVar(
- var_name, line=line, lineno=lineno+1, pkg=pkg)
+ yield MisplacedEclassVar(var_name, line=line, lineno=lineno + 1, pkg=pkg)
def check_deprecated_variables(self, pkg, inherits):
"""Check for usage of @DEPRECATED variables or functions."""
@@ -154,7 +158,8 @@ class EclassUsageCheck(Check):
if not isinstance(replacement, str):
replacement = None
yield DeprecatedEclassVariable(
- var_name, replacement, line=line, lineno=lineno+1, pkg=pkg)
+ var_name, replacement, line=line, lineno=lineno + 1, pkg=pkg
+ )
def check_deprecated_functions(self, pkg, inherits):
"""Check for usage of @DEPRECATED variables or functions."""
@@ -169,7 +174,7 @@ class EclassUsageCheck(Check):
# scan for usage of @DEPRECATED functions
if deprecated:
for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
- func_name = pkg.node_str(node.child_by_field_name('name'))
+ func_name = pkg.node_str(node.child_by_field_name("name"))
lineno, _colno = node.start_point
if func_name in deprecated:
line = pkg.node_str(node)
@@ -177,15 +182,16 @@ class EclassUsageCheck(Check):
if not isinstance(replacement, str):
replacement = None
yield DeprecatedEclassFunction(
- func_name, replacement, line=line, lineno=lineno+1, pkg=pkg)
+ func_name, replacement, line=line, lineno=lineno + 1, pkg=pkg
+ )
def feed(self, pkg):
if pkg.inherit:
inherited = set()
inherits = []
for node, _ in bash.cmd_query.captures(pkg.tree.root_node):
- name = pkg.node_str(node.child_by_field_name('name'))
- if name == 'inherit':
+ name = pkg.node_str(node.child_by_field_name("name"))
+ if name == "inherit":
call = pkg.node_str(node)
# filter out line continuations and conditional inherits
if eclasses := [x for x in call.split()[1:] if x in pkg.inherit]:
@@ -198,7 +204,8 @@ class EclassUsageCheck(Check):
inherited.add(eclass)
else:
yield DuplicateEclassInherit(
- eclass, line=call, lineno=lineno+1, pkg=pkg)
+ eclass, line=call, lineno=lineno + 1, pkg=pkg
+ )
# verify @PRE_INHERIT variable placement
yield from self.check_pre_inherits(pkg, inherits)
@@ -218,7 +225,7 @@ class EclassVariableScope(VariableScope, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class EclassExportFuncsBeforeInherit(results.EclassResult, results.Error):
@@ -235,8 +242,10 @@ class EclassExportFuncsBeforeInherit(results.EclassResult, results.Error):
@property
def desc(self):
- return (f'{self.eclass}: EXPORT_FUNCTIONS (line {self.export_line}) called before inherit (line '
- f'{self.inherit_line})')
+ return (
+ f"{self.eclass}: EXPORT_FUNCTIONS (line {self.export_line}) called before inherit (line "
+ f"{self.inherit_line})"
+ )
class EclassParseCheck(Check):
@@ -261,12 +270,12 @@ class EclassParseCheck(Check):
return variables
def feed(self, eclass):
- func_prefix = f'{eclass.name}_'
+ func_prefix = f"{eclass.name}_"
for func_node, _ in bash.func_query.captures(eclass.tree.root_node):
- func_name = eclass.node_str(func_node.child_by_field_name('name'))
+ func_name = eclass.node_str(func_node.child_by_field_name("name"))
if not func_name.startswith(func_prefix):
continue
- phase = func_name[len(func_prefix):]
+ phase = func_name[len(func_prefix) :]
if variables := self.eclass_phase_vars(eclass, phase):
usage = defaultdict(set)
for var_node, _ in bash.var_query.captures(func_node):
@@ -275,17 +284,20 @@ class EclassParseCheck(Check):
lineno, colno = var_node.start_point
usage[var_name].add(lineno + 1)
for var, lines in sorted(usage.items()):
- yield EclassVariableScope(var, func_name, lines=sorted(lines), eclass=eclass.name)
+ yield EclassVariableScope(
+ var, func_name, lines=sorted(lines), eclass=eclass.name
+ )
export_funcs_called = None
for node in eclass.global_query(bash.cmd_query):
call = eclass.node_str(node)
- if call.startswith('EXPORT_FUNCTIONS'):
+ if call.startswith("EXPORT_FUNCTIONS"):
export_funcs_called = node.start_point[0] + 1
- elif call.startswith('inherit'):
+ elif call.startswith("inherit"):
if export_funcs_called is not None:
- yield EclassExportFuncsBeforeInherit(export_funcs_called, node.start_point[0] + 1,
- eclass=eclass.name)
+ yield EclassExportFuncsBeforeInherit(
+ export_funcs_called, node.start_point[0] + 1, eclass=eclass.name
+ )
break
@@ -299,7 +311,7 @@ class EclassBashSyntaxError(results.EclassResult, results.Error):
@property
def desc(self):
- return f'{self.eclass}: bash syntax error, line {self.lineno}: {self.error}'
+ return f"{self.eclass}: bash syntax error, line {self.lineno}: {self.error}"
class EclassDocError(results.EclassResult, results.Warning):
@@ -316,7 +328,7 @@ class EclassDocError(results.EclassResult, results.Warning):
@property
def desc(self):
- return f'{self.eclass}: failed parsing eclass docs: {self.error}'
+ return f"{self.eclass}: failed parsing eclass docs: {self.error}"
class EclassDocMissingFunc(results.EclassResult, results.Warning):
@@ -329,8 +341,8 @@ class EclassDocMissingFunc(results.EclassResult, results.Warning):
@property
def desc(self):
s = pluralism(self.functions)
- funcs = ', '.join(self.functions)
- return f'{self.eclass}: undocumented function{s}: {funcs}'
+ funcs = ", ".join(self.functions)
+ return f"{self.eclass}: undocumented function{s}: {funcs}"
class EclassDocMissingVar(results.EclassResult, results.Warning):
@@ -348,16 +360,17 @@ class EclassDocMissingVar(results.EclassResult, results.Warning):
@property
def desc(self):
s = pluralism(self.variables)
- variables = ', '.join(self.variables)
- return f'{self.eclass}: undocumented variable{s}: {variables}'
+ variables = ", ".join(self.variables)
+ return f"{self.eclass}: undocumented variable{s}: {variables}"
class EclassCheck(Check):
"""Scan eclasses for various issues."""
_source = sources.EclassRepoSource
- known_results = frozenset([
- EclassBashSyntaxError, EclassDocError, EclassDocMissingFunc, EclassDocMissingVar])
+ known_results = frozenset(
+ [EclassBashSyntaxError, EclassDocError, EclassDocMissingFunc, EclassDocMissingVar]
+ )
def __init__(self, *args):
super().__init__(*args)
@@ -370,37 +383,44 @@ class EclassCheck(Check):
def feed(self, eclass):
# check for eclass bash syntax errors
p = subprocess.run(
- ['bash', '-n', shlex.quote(eclass.path)],
- stderr=subprocess.PIPE, stdout=subprocess.DEVNULL,
- env={'LC_ALL': 'C'}, encoding='utf8')
+ ["bash", "-n", shlex.quote(eclass.path)],
+ stderr=subprocess.PIPE,
+ stdout=subprocess.DEVNULL,
+ env={"LC_ALL": "C"},
+ encoding="utf8",
+ )
if p.returncode != 0 and p.stderr:
lineno = 0
error = []
for line in p.stderr.splitlines():
- path, line, msg = line.split(': ', 2)
+ path, line, msg = line.split(": ", 2)
lineno = line[5:]
- error.append(msg.strip('\n'))
- error = ': '.join(error)
+ error.append(msg.strip("\n"))
+ error = ": ".join(error)
yield EclassBashSyntaxError(lineno, error, eclass=eclass)
report_logs = (
- LogMap('pkgcore.log.logger.error', partial(EclassDocError, eclass=eclass)),
- LogMap('pkgcore.log.logger.warning', partial(EclassDocError, eclass=eclass)),
+ LogMap("pkgcore.log.logger.error", partial(EclassDocError, eclass=eclass)),
+ LogMap("pkgcore.log.logger.warning", partial(EclassDocError, eclass=eclass)),
)
with LogReports(*report_logs) as log_reports:
eclass_obj = EclassDoc(eclass.path, sourced=True)
yield from log_reports
- phase_funcs = {f'{eclass}_{phase}' for phase in self.known_phases}
+ phase_funcs = {f"{eclass}_{phase}" for phase in self.known_phases}
funcs_missing_docs = (
- eclass_obj.exported_function_names - phase_funcs - eclass_obj.function_names)
+ eclass_obj.exported_function_names - phase_funcs - eclass_obj.function_names
+ )
if funcs_missing_docs:
yield EclassDocMissingFunc(sorted(funcs_missing_docs), eclass=eclass)
# ignore underscore-prefixed vars (mostly used for avoiding multiple inherits)
- exported_vars = {x for x in eclass_obj.exported_variable_names if not x.startswith('_')}
+ exported_vars = {x for x in eclass_obj.exported_variable_names if not x.startswith("_")}
vars_missing_docs = (
- exported_vars - self.eclass_keys
- - eclass_obj.variable_names - eclass_obj.function_variable_names)
+ exported_vars
+ - self.eclass_keys
+ - eclass_obj.variable_names
+ - eclass_obj.function_variable_names
+ )
if vars_missing_docs:
yield EclassDocMissingVar(sorted(vars_missing_docs), eclass=eclass)
diff --git a/src/pkgcheck/checks/git.py b/src/pkgcheck/checks/git.py
index c06c8278..a54ce61e 100644
--- a/src/pkgcheck/checks/git.py
+++ b/src/pkgcheck/checks/git.py
@@ -56,7 +56,7 @@ class GitCommitsSource(sources.Source):
class IncorrectCopyright(results.AliasResult, results.Warning):
"""Changed file with incorrect copyright date."""
- _name = 'IncorrectCopyright'
+ _name = "IncorrectCopyright"
def __init__(self, year, line, **kwargs):
super().__init__(**kwargs)
@@ -65,7 +65,7 @@ class IncorrectCopyright(results.AliasResult, results.Warning):
@property
def desc(self):
- return f'incorrect copyright year {self.year}: {self.line!r}'
+ return f"incorrect copyright year {self.year}: {self.line!r}"
class EbuildIncorrectCopyright(IncorrectCopyright, results.VersionResult):
@@ -82,8 +82,8 @@ class DirectStableKeywords(results.VersionResult, results.Error):
@property
def desc(self):
s = pluralism(self.keywords)
- keywords = ', '.join(self.keywords)
- return f'directly committed with stable keyword{s}: [ {keywords} ]'
+ keywords = ", ".join(self.keywords)
+ return f"directly committed with stable keyword{s}: [ {keywords} ]"
class _DroppedKeywords(results.PackageResult):
@@ -99,23 +99,22 @@ class _DroppedKeywords(results.PackageResult):
@property
def desc(self):
s = pluralism(self.keywords)
- keywords = ', '.join(self.keywords)
+ keywords = ", ".join(self.keywords)
return (
- f'commit {self.commit} (or later) dropped {self._status} '
- f'keyword{s}: [ {keywords} ]'
+ f"commit {self.commit} (or later) dropped {self._status} " f"keyword{s}: [ {keywords} ]"
)
class DroppedUnstableKeywords(_DroppedKeywords, results.Error):
"""Unstable keywords dropped from package."""
- _status = 'unstable'
+ _status = "unstable"
class DroppedStableKeywords(_DroppedKeywords, results.Error):
"""Stable keywords dropped from package."""
- _status = 'stable'
+ _status = "stable"
class DirectNoMaintainer(results.PackageResult, results.Error):
@@ -123,7 +122,7 @@ class DirectNoMaintainer(results.PackageResult, results.Error):
@property
def desc(self):
- return 'directly committed with no package maintainer'
+ return "directly committed with no package maintainer"
class RdependChange(results.VersionResult, results.Warning):
@@ -131,7 +130,7 @@ class RdependChange(results.VersionResult, results.Warning):
@property
def desc(self):
- return 'RDEPEND modified without revbump'
+ return "RDEPEND modified without revbump"
class MissingSlotmove(results.VersionResult, results.Error):
@@ -150,7 +149,7 @@ class MissingSlotmove(results.VersionResult, results.Error):
@property
def desc(self):
- return f'changed SLOT: {self.old} -> {self.new}'
+ return f"changed SLOT: {self.old} -> {self.new}"
class MissingMove(results.PackageResult, results.Error):
@@ -169,7 +168,7 @@ class MissingMove(results.PackageResult, results.Error):
@property
def desc(self):
- return f'renamed package: {self.old} -> {self.new}'
+ return f"renamed package: {self.old} -> {self.new}"
class _RemovalRepo(UnconfiguredTree):
@@ -177,17 +176,17 @@ class _RemovalRepo(UnconfiguredTree):
def __init__(self, repo):
self.__parent_repo = repo
- self.__tmpdir = TemporaryDirectory(prefix='tmp-pkgcheck-', suffix='.repo')
+ self.__tmpdir = TemporaryDirectory(prefix="tmp-pkgcheck-", suffix=".repo")
self.__created = False
repo_dir = self.__tmpdir.name
# set up some basic repo files so pkgcore doesn't complain
- os.makedirs(pjoin(repo_dir, 'metadata'))
- with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
+ os.makedirs(pjoin(repo_dir, "metadata"))
+ with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
f.write(f"masters = {' '.join(x.repo_id for x in repo.trees)}\n")
- os.makedirs(pjoin(repo_dir, 'profiles'))
- with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('old-repo\n')
+ os.makedirs(pjoin(repo_dir, "profiles"))
+ with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
+ f.write("old-repo\n")
super().__init__(repo_dir)
def cleanup(self):
@@ -205,34 +204,44 @@ class _RemovalRepo(UnconfiguredTree):
def _populate(self, pkgs):
"""Populate the repo with a given sequence of historical packages."""
- pkg = min(pkgs, key=attrgetter('time'))
+ pkg = min(pkgs, key=attrgetter("time"))
paths = [pjoin(pkg.category, pkg.package)]
- for subdir in ('eclass', 'profiles'):
+ for subdir in ("eclass", "profiles"):
if os.path.exists(pjoin(self.__parent_repo.location, subdir)):
paths.append(subdir)
old_files = subprocess.Popen(
- ['git', 'archive', f'{pkg.commit}~1'] + paths,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- cwd=self.__parent_repo.location)
+ ["git", "archive", f"{pkg.commit}~1"] + paths,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=self.__parent_repo.location,
+ )
if old_files.poll():
error = old_files.stderr.read().decode().strip()
- raise PkgcheckUserException(f'failed populating archive repo: {error}')
- with tarfile.open(mode='r|', fileobj=old_files.stdout) as tar:
+ raise PkgcheckUserException(f"failed populating archive repo: {error}")
+ with tarfile.open(mode="r|", fileobj=old_files.stdout) as tar:
tar.extractall(path=self.location)
class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
"""Check unpushed git package commits for various issues."""
- _source = (sources.PackageRepoSource, (), (('source', GitCommitsRepoSource),))
+ _source = (sources.PackageRepoSource, (), (("source", GitCommitsRepoSource),))
required_addons = (git.GitAddon,)
- known_results = frozenset([
- DirectStableKeywords, DirectNoMaintainer, RdependChange, EbuildIncorrectCopyright,
- DroppedStableKeywords, DroppedUnstableKeywords, MissingSlotmove, MissingMove,
- ])
+ known_results = frozenset(
+ [
+ DirectStableKeywords,
+ DirectNoMaintainer,
+ RdependChange,
+ EbuildIncorrectCopyright,
+ DroppedStableKeywords,
+ DroppedUnstableKeywords,
+ MissingSlotmove,
+ MissingMove,
+ ]
+ )
# package categories that are committed with stable keywords
- allowed_direct_stable = frozenset(['acct-user', 'acct-group'])
+ allowed_direct_stable = frozenset(["acct-user", "acct-group"])
def __init__(self, *args, git_addon):
super().__init__(*args)
@@ -268,25 +277,23 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
pkg = pkgs[0]
removal_repo = self.removal_repo(pkgs)
- old_keywords = set().union(*(
- p.keywords for p in removal_repo.match(pkg.unversioned_atom)))
- new_keywords = set().union(*(
- p.keywords for p in self.repo.match(pkg.unversioned_atom)))
+ old_keywords = set().union(*(p.keywords for p in removal_repo.match(pkg.unversioned_atom)))
+ new_keywords = set().union(*(p.keywords for p in self.repo.match(pkg.unversioned_atom)))
dropped_keywords = old_keywords - new_keywords
dropped_stable_keywords = dropped_keywords & self.valid_arches
dropped_unstable_keywords = set()
- for keyword in (x for x in dropped_keywords if x[0] == '~'):
+ for keyword in (x for x in dropped_keywords if x[0] == "~"):
arch = keyword[1:]
if arch in self.valid_arches and arch not in new_keywords:
dropped_unstable_keywords.add(keyword)
if dropped_stable_keywords:
- yield DroppedStableKeywords(
- sort_keywords(dropped_stable_keywords), pkg.commit, pkg=pkg)
+ yield DroppedStableKeywords(sort_keywords(dropped_stable_keywords), pkg.commit, pkg=pkg)
if dropped_unstable_keywords:
yield DroppedUnstableKeywords(
- sort_keywords(dropped_unstable_keywords), pkg.commit, pkg=pkg)
+ sort_keywords(dropped_unstable_keywords), pkg.commit, pkg=pkg
+ )
def rename_checks(self, pkgs):
"""Check for issues due to package modifications."""
@@ -297,9 +304,7 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
if old_key == new_key:
return
- pkgmoves = (
- x[1:] for x in self.repo.config.updates.get(old_key, ())
- if x[0] == 'move')
+ pkgmoves = (x[1:] for x in self.repo.config.updates.get(old_key, ()) if x[0] == "move")
for old, new in pkgmoves:
if old.key == old_key and new.key == new_key:
@@ -334,8 +339,8 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
old_slot, new_slot = old_pkg.slot, new_pkg.slot
if old_slot != new_slot:
slotmoves = (
- x[1:] for x in self.repo.config.updates.get(new_pkg.key, ())
- if x[0] == 'slotmove')
+ x[1:] for x in self.repo.config.updates.get(new_pkg.key, ()) if x[0] == "slotmove"
+ )
for atom, moved_slot in slotmoves:
if atom.match(old_pkg) and new_slot == moved_slot:
break
@@ -347,33 +352,33 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
# under the --diff-filter option in git log parsing support and are
# disambiguated as follows:
# A -> added, R -> renamed, M -> modified, D -> deleted
- pkg_map = {'A': set(), 'R': set(), 'M': set(), 'D': set()}
+ pkg_map = {"A": set(), "R": set(), "M": set(), "D": set()}
# Iterate over pkg commits in chronological order (git log defaults to
# the reverse) discarding matching pkg commits where relevant.
for pkg in reversed(pkgset):
pkg_map[pkg.status].add(pkg)
- if pkg.status == 'A':
- pkg_map['D'].discard(pkg)
- elif pkg.status == 'D':
- pkg_map['A'].discard(pkg)
- elif pkg.status == 'R':
+ if pkg.status == "A":
+ pkg_map["D"].discard(pkg)
+ elif pkg.status == "D":
+ pkg_map["A"].discard(pkg)
+ elif pkg.status == "R":
# create pkg add/removal for rename operation
- pkg_map['A'].add(pkg)
- pkg_map['D'].add(pkg.old_pkg())
+ pkg_map["A"].add(pkg)
+ pkg_map["D"].add(pkg.old_pkg())
# run removed package checks
- if pkg_map['D']:
- yield from self.removal_checks(list(pkg_map['D']))
+ if pkg_map["D"]:
+ yield from self.removal_checks(list(pkg_map["D"]))
# run renamed package checks
- if pkg_map['R']:
- yield from self.rename_checks(list(pkg_map['R']))
+ if pkg_map["R"]:
+ yield from self.rename_checks(list(pkg_map["R"]))
# run modified package checks
- if modified := [pkg for pkg in pkg_map['M'] if pkg not in pkg_map['D']]:
+ if modified := [pkg for pkg in pkg_map["M"] if pkg not in pkg_map["D"]]:
yield from self.modified_checks(modified)
for git_pkg in pkgset:
# remaining checks are irrelevant for removed packages
- if git_pkg in pkg_map['D']:
+ if git_pkg in pkg_map["D"]:
continue
# pull actual package object from repo
@@ -386,15 +391,15 @@ class GitPkgCommitsCheck(GentooRepoCheck, GitCommitsCheck):
# check copyright on new/modified ebuilds
if mo := copyright_regex.match(line):
- year = mo.group('end')
+ year = mo.group("end")
if int(year) != self.today.year:
- yield EbuildIncorrectCopyright(year, line.strip('\n'), pkg=pkg)
+ yield EbuildIncorrectCopyright(year, line.strip("\n"), pkg=pkg)
# checks for newly added ebuilds
- if git_pkg.status == 'A':
+ if git_pkg.status == "A":
# check for directly added stable ebuilds
if pkg.category not in self.allowed_direct_stable:
- if stable_keywords := sorted(x for x in pkg.keywords if x[0] not in '~-'):
+ if stable_keywords := sorted(x for x in pkg.keywords if x[0] not in "~-"):
yield DirectStableKeywords(stable_keywords, pkg=pkg)
# pkg was just added to the tree
@@ -422,8 +427,8 @@ class MissingSignOff(results.CommitResult, results.Error):
@property
def desc(self):
s = pluralism(self.missing_sign_offs)
- sign_offs = ', '.join(self.missing_sign_offs)
- return f'commit {self.commit}, missing sign-off{s}: {sign_offs}'
+ sign_offs = ", ".join(self.missing_sign_offs)
+ return f"commit {self.commit}, missing sign-off{s}: {sign_offs}"
class InvalidCommitTag(results.CommitResult, results.Style):
@@ -453,7 +458,7 @@ class InvalidCommitMessage(results.CommitResult, results.Style):
@property
def desc(self):
- return f'commit {self.commit}: {self.error}'
+ return f"commit {self.commit}: {self.error}"
class BadCommitSummary(results.CommitResult, results.Style):
@@ -474,7 +479,7 @@ class BadCommitSummary(results.CommitResult, results.Style):
@property
def desc(self):
- return f'commit {self.commit}, {self.error}: {self.summary!r}'
+ return f"commit {self.commit}, {self.error}: {self.summary!r}"
def verify_tags(*tags, required=False):
@@ -498,28 +503,37 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
"""Check unpushed git commit messages for various issues."""
_source = GitCommitsSource
- known_results = frozenset([
- MissingSignOff, InvalidCommitTag, InvalidCommitMessage, BadCommitSummary,
- ])
+ known_results = frozenset(
+ [
+ MissingSignOff,
+ InvalidCommitTag,
+ InvalidCommitMessage,
+ BadCommitSummary,
+ ]
+ )
# mapping between known commit tags and verification methods
known_tags = {}
- _commit_footer_regex = re.compile(r'^(?P<tag>[a-zA-Z0-9_-]+): (?P<value>.*)$')
- _git_cat_file_regex = re.compile(r'^(?P<object>.+?) (?P<status>.+)$')
+ _commit_footer_regex = re.compile(r"^(?P<tag>[a-zA-Z0-9_-]+): (?P<value>.*)$")
+ _git_cat_file_regex = re.compile(r"^(?P<object>.+?) (?P<status>.+)$")
# categories exception for rule of having package version in summary
- skipped_categories = frozenset({
- 'acct-group', 'acct-user', 'virtual',
- })
+ skipped_categories = frozenset(
+ {
+ "acct-group",
+ "acct-user",
+ "virtual",
+ }
+ )
def __init__(self, *args):
super().__init__(*args)
# mapping of required tags to forcibly run verifications methods
self._required_tags = ImmutableDict(
- ((tag, verify), [])
- for tag, (verify, required) in self.known_tags.items() if required)
+ ((tag, verify), []) for tag, (verify, required) in self.known_tags.items() if required
+ )
- @verify_tags('Signed-off-by', required=True)
+ @verify_tags("Signed-off-by", required=True)
def _signed_off_by_tag(self, tag, values, commit):
"""Verify commit contains all required sign offs in accordance with GLEP 76."""
required_sign_offs = {commit.author, commit.committer}
@@ -527,14 +541,13 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
if missing_sign_offs:
yield MissingSignOff(sorted(missing_sign_offs), commit=commit)
- @verify_tags('Gentoo-Bug')
+ @verify_tags("Gentoo-Bug")
def _deprecated_tag(self, tag, values, commit):
"""Flag deprecated tags that shouldn't be used."""
for value in values:
- yield InvalidCommitTag(
- tag, value, f"{tag} tag is no longer valid", commit=commit)
+ yield InvalidCommitTag(tag, value, f"{tag} tag is no longer valid", commit=commit)
- @verify_tags('Bug', 'Closes')
+ @verify_tags("Bug", "Closes")
def _bug_tag(self, tag, values, commit):
"""Verify values are URLs for Bug/Closes tags."""
for value in values:
@@ -544,40 +557,44 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
continue
if parsed.scheme.lower() not in ("http", "https"):
yield InvalidCommitTag(
- tag, value, "invalid protocol; should be http or https", commit=commit)
+ tag, value, "invalid protocol; should be http or https", commit=commit
+ )
@klass.jit_attr_none
def git_cat_file(self):
"""Start a `git cat-file` process to verify git repo hashes."""
return subprocess.Popen(
- ['git', 'cat-file', '--batch-check'],
+ ["git", "cat-file", "--batch-check"],
cwd=self.options.target_repo.location,
- stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
- encoding='utf8', bufsize=1)
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ encoding="utf8",
+ bufsize=1,
+ )
- @verify_tags('Fixes', 'Reverts')
+ @verify_tags("Fixes", "Reverts")
def _commit_tag(self, tag, values, commit):
"""Verify referenced commits exist for Fixes/Reverts tags."""
- self.git_cat_file.stdin.write('\n'.join(values) + '\n')
+ self.git_cat_file.stdin.write("\n".join(values) + "\n")
if self.git_cat_file.poll() is None:
for _ in range(len(values)):
line = self.git_cat_file.stdout.readline().strip()
if mo := self._git_cat_file_regex.match(line):
- value = mo.group('object')
- status = mo.group('status')
- if not status.startswith('commit '):
- yield InvalidCommitTag(
- tag, value, f'{status} commit', commit=commit)
+ value = mo.group("object")
+ status = mo.group("status")
+ if not status.startswith("commit "):
+ yield InvalidCommitTag(tag, value, f"{status} commit", commit=commit)
def feed(self, commit):
if len(commit.message) == 0:
- yield InvalidCommitMessage('no commit message', commit=commit)
+ yield InvalidCommitMessage("no commit message", commit=commit)
return
# drop leading '*: ' prefix assuming it's a package/eclass/file/path
summary = commit.message[0]
- if len(summary.split(': ', 1)[-1]) > 69:
- yield InvalidCommitMessage('summary is too long', commit=commit)
+ if len(summary.split(": ", 1)[-1]) > 69:
+ yield InvalidCommitMessage("summary is too long", commit=commit)
# categorize package changes
pkg_changes = defaultdict(set)
@@ -590,19 +607,21 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
if len({x.package for x in atoms}) == 1:
# changes to a single cat/pn
atom = next(iter(atoms))
- if not re.match(rf'^{re.escape(atom.key)}: ', summary):
- error = f'summary missing {atom.key!r} package prefix'
+ if not re.match(rf"^{re.escape(atom.key)}: ", summary):
+ error = f"summary missing {atom.key!r} package prefix"
yield BadCommitSummary(error, summary, commit=commit)
# check for version in summary for singular, non-revision bumps
- if len(commit.pkgs['A']) == 1 and category not in self.skipped_categories:
- atom = next(iter(commit.pkgs['A']))
- if not atom.revision and not re.match(rf'^.+\bv?{re.escape(atom.version)}\b.*$', summary):
- error = f'summary missing package version {atom.version!r}'
+ if len(commit.pkgs["A"]) == 1 and category not in self.skipped_categories:
+ atom = next(iter(commit.pkgs["A"]))
+ if not atom.revision and not re.match(
+ rf"^.+\bv?{re.escape(atom.version)}\b.*$", summary
+ ):
+ error = f"summary missing package version {atom.version!r}"
yield BadCommitSummary(error, summary, commit=commit)
else:
# mutiple pkg changes in the same category
- if not re.match(rf'^{re.escape(category)}: ', summary):
- error = f'summary missing {category!r} category prefix'
+ if not re.match(rf"^{re.escape(category)}: ", summary):
+ error = f"summary missing {category!r} category prefix"
yield BadCommitSummary(error, summary, commit=commit)
# verify message body
@@ -613,18 +632,17 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
if not line.strip():
continue
if self._commit_footer_regex.match(line) is None:
- if not body and commit.message[1] != '':
- yield InvalidCommitMessage(
- 'missing empty line before body', commit=commit)
+ if not body and commit.message[1] != "":
+ yield InvalidCommitMessage("missing empty line before body", commit=commit)
# still processing the body
body = True
if len(line.split()) > 1 and len(line) > 80:
yield InvalidCommitMessage(
- f'line {lineno} greater than 80 chars: {line!r}', commit=commit)
+ f"line {lineno} greater than 80 chars: {line!r}", commit=commit
+ )
else:
- if commit.message[lineno - 1] != '':
- yield InvalidCommitMessage(
- 'missing empty line before tags', commit=commit)
+ if commit.message[lineno - 1] != "":
+ yield InvalidCommitMessage("missing empty line before tags", commit=commit)
# push it back on the stack
i = chain([line], i)
break
@@ -637,20 +655,20 @@ class GitCommitMessageCheck(GentooRepoCheck, GitCommitsCheck):
if not line.strip():
# single empty end line is ignored
if lineno != len(commit.message):
- yield InvalidCommitMessage(
- f'empty line {lineno} in footer', commit=commit)
+ yield InvalidCommitMessage(f"empty line {lineno} in footer", commit=commit)
continue
if mo := self._commit_footer_regex.match(line):
# register known tags for verification
- tag = mo.group('tag')
+ tag = mo.group("tag")
try:
func, required = self.known_tags[tag]
- tags.setdefault((tag, func), []).append(mo.group('value'))
+ tags.setdefault((tag, func), []).append(mo.group("value"))
except KeyError:
continue
else:
yield InvalidCommitMessage(
- f'non-tag in footer, line {lineno}: {line!r}', commit=commit)
+ f"non-tag in footer, line {lineno}: {line!r}", commit=commit
+ )
# run tag verification methods
for (tag, func), values in tags.items():
@@ -662,7 +680,7 @@ class EclassIncorrectCopyright(IncorrectCopyright, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class GitEclassCommitsCheck(GentooRepoCheck, GitCommitsCheck):
@@ -679,6 +697,6 @@ class GitEclassCommitsCheck(GentooRepoCheck, GitCommitsCheck):
# check copyright on new/modified eclasses
line = next(iter(eclass.lines))
if mo := copyright_regex.match(line):
- year = mo.group('end')
+ year = mo.group("end")
if int(year) != self.today.year:
- yield EclassIncorrectCopyright(year, line.strip('\n'), eclass=eclass)
+ yield EclassIncorrectCopyright(year, line.strip("\n"), eclass=eclass)
diff --git a/src/pkgcheck/checks/glsa.py b/src/pkgcheck/checks/glsa.py
index 2f869099..79d4ec65 100644
--- a/src/pkgcheck/checks/glsa.py
+++ b/src/pkgcheck/checks/glsa.py
@@ -23,8 +23,8 @@ class VulnerablePackage(results.VersionResult, results.Error):
@property
def desc(self):
s = pluralism(self.arches)
- arches = ', '.join(self.arches)
- return f'vulnerable via {self.glsa}, keyword{s}: {arches}'
+ arches = ", ".join(self.arches)
+ return f"vulnerable via {self.glsa}, keyword{s}: {arches}"
class GlsaCheck(GentooRepoCheck):
@@ -37,8 +37,7 @@ class GlsaCheck(GentooRepoCheck):
@staticmethod
def mangle_argparser(parser):
- parser.plugin.add_argument(
- "--glsa-dir", type=existent_dir, help="custom glsa directory")
+ parser.plugin.add_argument("--glsa-dir", type=existent_dir, help="custom glsa directory")
def __init__(self, *args):
super().__init__(*args)
@@ -46,12 +45,12 @@ class GlsaCheck(GentooRepoCheck):
if glsa_dir is None:
# search for glsa dir in target repo and then any masters
for repo in reversed(self.options.target_repo.trees):
- path = pjoin(repo.location, 'metadata', 'glsa')
+ path = pjoin(repo.location, "metadata", "glsa")
if os.path.isdir(path):
glsa_dir = path
break
else:
- raise SkipCheck(self, 'no available glsa source')
+ raise SkipCheck(self, "no available glsa source")
# this is a bit brittle
self.vulns = defaultdict(list)
@@ -63,13 +62,14 @@ class GlsaCheck(GentooRepoCheck):
for vuln in self.vulns.get(pkg.key, ()):
if vuln.match(pkg):
arches = set()
- for v in collect_package_restrictions(vuln, ['keywords']):
+ for v in collect_package_restrictions(vuln, ["keywords"]):
if isinstance(v.restriction, values.ContainmentMatch2):
- arches.update(x.lstrip('~') for x in v.restriction.vals)
+ arches.update(x.lstrip("~") for x in v.restriction.vals)
else:
raise Exception(
- f'unexpected restriction sequence- {v.restriction} in {vuln}')
- keys = {x.lstrip('~') for x in pkg.keywords if not x.startswith('-')}
+ f"unexpected restriction sequence- {v.restriction} in {vuln}"
+ )
+ keys = {x.lstrip("~") for x in pkg.keywords if not x.startswith("-")}
if arches:
arches = sorted(arches.intersection(keys))
assert arches
diff --git a/src/pkgcheck/checks/header.py b/src/pkgcheck/checks/header.py
index c08c4c8b..429b26ce 100644
--- a/src/pkgcheck/checks/header.py
+++ b/src/pkgcheck/checks/header.py
@@ -5,8 +5,7 @@ import re
from .. import results, sources
from . import GentooRepoCheck
-copyright_regex = re.compile(
- r'^# Copyright (?P<begin>\d{4}-)?(?P<end>\d{4}) (?P<holder>.+)$')
+copyright_regex = re.compile(r"^# Copyright (?P<begin>\d{4}-)?(?P<end>\d{4}) (?P<holder>.+)$")
class _FileHeaderResult(results.Result):
@@ -30,11 +29,11 @@ class InvalidCopyright(_FileHeaderResult, results.AliasResult, results.Error):
# Copyright YEARS Gentoo Authors
"""
- _name = 'InvalidCopyright'
+ _name = "InvalidCopyright"
@property
def desc(self):
- return f'invalid copyright: {self.line!r}'
+ return f"invalid copyright: {self.line!r}"
class OldGentooCopyright(_FileHeaderResult, results.AliasResult, results.Warning):
@@ -49,7 +48,7 @@ class OldGentooCopyright(_FileHeaderResult, results.AliasResult, results.Warning
holder instead.
"""
- _name = 'OldGentooCopyright'
+ _name = "OldGentooCopyright"
@property
def desc(self):
@@ -65,7 +64,7 @@ class NonGentooAuthorsCopyright(_FileHeaderResult, results.AliasResult, results.
via bugs.gentoo.org.
"""
- _name = 'NonGentooAuthorsCopyright'
+ _name = "NonGentooAuthorsCopyright"
@property
def desc(self):
@@ -82,13 +81,13 @@ class InvalidLicenseHeader(_FileHeaderResult, results.AliasResult, results.Error
# Distributed under the terms of the GNU General Public License v2
"""
- _name = 'InvalidLicenseHeader'
+ _name = "InvalidLicenseHeader"
@property
def desc(self):
if self.line:
- return f'invalid license header: {self.line!r}'
- return 'missing license header'
+ return f"invalid license header: {self.line!r}"
+ return "missing license header"
class _HeaderCheck(GentooRepoCheck):
@@ -98,12 +97,17 @@ class _HeaderCheck(GentooRepoCheck):
_old_copyright = OldGentooCopyright
_non_gentoo_authors = NonGentooAuthorsCopyright
_invalid_license = InvalidLicenseHeader
- known_results = frozenset([
- _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
- ])
- _item_attr = 'pkg'
-
- license_header = '# Distributed under the terms of the GNU General Public License v2'
+ known_results = frozenset(
+ [
+ _invalid_copyright,
+ _old_copyright,
+ _non_gentoo_authors,
+ _invalid_license,
+ ]
+ )
+ _item_attr = "pkg"
+
+ license_header = "# Distributed under the terms of the GNU General Public License v2"
def args(self, item):
return {self._item_attr: item}
@@ -114,19 +118,19 @@ class _HeaderCheck(GentooRepoCheck):
if mo := copyright_regex.match(line):
# Copyright policy is active since 2018-10-21, so it applies
# to all ebuilds committed in 2019 and later
- if int(mo.group('end')) >= 2019:
- if mo.group('holder') == 'Gentoo Foundation':
+ if int(mo.group("end")) >= 2019:
+ if mo.group("holder") == "Gentoo Foundation":
yield self._old_copyright(line, **self.args(item))
# Gentoo policy requires 'Gentoo Authors'
- elif mo.group('holder') != 'Gentoo Authors':
+ elif mo.group("holder") != "Gentoo Authors":
yield self._non_gentoo_authors(line, **self.args(item))
else:
yield self._invalid_copyright(line, **self.args(item))
try:
- line = item.lines[1].strip('\n')
+ line = item.lines[1].strip("\n")
except IndexError:
- line = ''
+ line = ""
if line != self.license_header:
yield self._invalid_license(line, **self.args(item))
@@ -156,10 +160,15 @@ class EbuildHeaderCheck(_HeaderCheck):
_old_copyright = EbuildOldGentooCopyright
_non_gentoo_authors = EbuildNonGentooAuthorsCopyright
_invalid_license = EbuildInvalidLicenseHeader
- known_results = frozenset([
- _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
- ])
- _item_attr = 'pkg'
+ known_results = frozenset(
+ [
+ _invalid_copyright,
+ _old_copyright,
+ _non_gentoo_authors,
+ _invalid_license,
+ ]
+ )
+ _item_attr = "pkg"
class EclassInvalidCopyright(InvalidCopyright, results.EclassResult):
@@ -167,7 +176,7 @@ class EclassInvalidCopyright(InvalidCopyright, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class EclassOldGentooCopyright(OldGentooCopyright, results.EclassResult):
@@ -175,7 +184,7 @@ class EclassOldGentooCopyright(OldGentooCopyright, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class EclassNonGentooAuthorsCopyright(NonGentooAuthorsCopyright, results.EclassResult):
@@ -183,7 +192,7 @@ class EclassNonGentooAuthorsCopyright(NonGentooAuthorsCopyright, results.EclassR
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class EclassInvalidLicenseHeader(InvalidLicenseHeader, results.EclassResult):
@@ -191,7 +200,7 @@ class EclassInvalidLicenseHeader(InvalidLicenseHeader, results.EclassResult):
@property
def desc(self):
- return f'{self.eclass}: {super().desc}'
+ return f"{self.eclass}: {super().desc}"
class EclassHeaderCheck(_HeaderCheck):
@@ -203,7 +212,12 @@ class EclassHeaderCheck(_HeaderCheck):
_old_copyright = EclassOldGentooCopyright
_non_gentoo_authors = EclassNonGentooAuthorsCopyright
_invalid_license = EclassInvalidLicenseHeader
- known_results = frozenset([
- _invalid_copyright, _old_copyright, _non_gentoo_authors, _invalid_license,
- ])
- _item_attr = 'eclass'
+ known_results = frozenset(
+ [
+ _invalid_copyright,
+ _old_copyright,
+ _non_gentoo_authors,
+ _invalid_license,
+ ]
+ )
+ _item_attr = "eclass"
diff --git a/src/pkgcheck/checks/imlate.py b/src/pkgcheck/checks/imlate.py
index a9688873..ee0da6d8 100644
--- a/src/pkgcheck/checks/imlate.py
+++ b/src/pkgcheck/checks/imlate.py
@@ -18,11 +18,11 @@ class PotentialStable(results.VersionResult, results.Info):
@property
def desc(self):
- es = pluralism(self.stable, plural='es')
- stable = ', '.join(self.stable)
+ es = pluralism(self.stable, plural="es")
+ stable = ", ".join(self.stable)
s = pluralism(self.keywords)
- keywords = ', '.join(self.keywords)
- return f'slot({self.slot}), stabled arch{es}: [ {stable} ], potential{s}: [ {keywords} ]'
+ keywords = ", ".join(self.keywords)
+ return f"slot({self.slot}), stabled arch{es}: [ {stable} ], potential{s}: [ {keywords} ]"
class LaggingStable(results.VersionResult, results.Info):
@@ -36,10 +36,10 @@ class LaggingStable(results.VersionResult, results.Info):
@property
def desc(self):
- es = pluralism(self.stable, plural='es')
- stable = ', '.join(self.stable)
- keywords = ', '.join(self.keywords)
- return f'slot({self.slot}), stabled arch{es}: [ {stable} ], lagging: [ {keywords} ]'
+ es = pluralism(self.stable, plural="es")
+ stable = ", ".join(self.stable)
+ keywords = ", ".join(self.keywords)
+ return f"slot({self.slot}), stabled arch{es}: [ {stable} ], lagging: [ {keywords} ]"
class ImlateCheck(Check):
@@ -52,28 +52,33 @@ class ImlateCheck(Check):
@staticmethod
def mangle_argparser(parser):
parser.plugin.add_argument(
- "--source-arches", action='csv', metavar='ARCH',
+ "--source-arches",
+ action="csv",
+ metavar="ARCH",
help="comma separated list of arches to compare against for lagging stabilization",
docs="""
Comma separated list of arches to compare against for
lagging stabilization.
The default arches are all stable arches (unless --arches is specified).
- """)
+ """,
+ )
def __init__(self, *args, stable_arches_addon=None):
super().__init__(*args)
self.all_arches = frozenset(self.options.arches)
- self.stable_arches = frozenset(arch.strip().lstrip("~") for arch in self.options.stable_arches)
- self.target_arches = frozenset(f'~{arch}' for arch in self.stable_arches)
+ self.stable_arches = frozenset(
+ arch.strip().lstrip("~") for arch in self.options.stable_arches
+ )
+ self.target_arches = frozenset(f"~{arch}" for arch in self.stable_arches)
source_arches = self.options.source_arches
if source_arches is None:
source_arches = self.options.stable_arches
- self.source_arches = frozenset(
- arch.lstrip("~") for arch in source_arches)
+ self.source_arches = frozenset(arch.lstrip("~") for arch in source_arches)
self.source_filter = packages.PackageRestriction(
- "keywords", values.ContainmentMatch2(self.source_arches))
+ "keywords", values.ContainmentMatch2(self.source_arches)
+ )
def feed(self, pkgset):
pkg_slotted = defaultdict(list)
@@ -84,7 +89,7 @@ class ImlateCheck(Check):
for slot, pkgs in sorted(pkg_slotted.items()):
slot_keywords = set().union(*(pkg.keywords for pkg in pkgs))
stable_slot_keywords = self.all_arches.intersection(slot_keywords)
- potential_slot_stables = {'~' + x for x in stable_slot_keywords}
+ potential_slot_stables = {"~" + x for x in stable_slot_keywords}
newer_slot_stables = set()
for pkg in reversed(pkgs):
# only consider pkgs with keywords that contain the targeted arches
@@ -93,23 +98,21 @@ class ImlateCheck(Check):
continue
# current pkg stable keywords
- stable = {'~' + x for x in self.source_arches.intersection(pkg.keywords)}
+ stable = {"~" + x for x in self.source_arches.intersection(pkg.keywords)}
lagging = potential_slot_stables.intersection(pkg.keywords)
# skip keywords that have newer stable versions
- lagging -= {'~' + x for x in newer_slot_stables}
+ lagging -= {"~" + x for x in newer_slot_stables}
lagging -= stable
if lagging:
- stable_kwds = (x for x in pkg.keywords if not x[0] in ('~', '-'))
- yield LaggingStable(
- slot, sorted(stable_kwds), sorted(lagging), pkg=pkg)
+ stable_kwds = (x for x in pkg.keywords if not x[0] in ("~", "-"))
+ yield LaggingStable(slot, sorted(stable_kwds), sorted(lagging), pkg=pkg)
- unstable_keywords = {x for x in pkg.keywords if x[0] == '~'}
+ unstable_keywords = {x for x in pkg.keywords if x[0] == "~"}
potential = self.target_arches.intersection(unstable_keywords)
potential -= lagging | stable
if potential:
- stable_kwds = (x for x in pkg.keywords if not x[0] in ('~', '-'))
- yield PotentialStable(
- slot, sorted(stable_kwds), sorted(potential), pkg=pkg)
+ stable_kwds = (x for x in pkg.keywords if not x[0] in ("~", "-"))
+ yield PotentialStable(slot, sorted(stable_kwds), sorted(potential), pkg=pkg)
break
diff --git a/src/pkgcheck/checks/metadata.py b/src/pkgcheck/checks/metadata.py
index 56d54529..8a26024c 100644
--- a/src/pkgcheck/checks/metadata.py
+++ b/src/pkgcheck/checks/metadata.py
@@ -37,32 +37,32 @@ class _LicenseResult(results.VersionResult):
@property
def desc(self):
s = pluralism(self.licenses)
- licenses = ', '.join(self.licenses)
- return f'{self.license_type} license{s}: {licenses}'
+ licenses = ", ".join(self.licenses)
+ return f"{self.license_type} license{s}: {licenses}"
class UnknownLicense(_LicenseResult, results.Error):
"""License usage with no matching license file."""
- license_type = 'unknown'
+ license_type = "unknown"
class DeprecatedLicense(_LicenseResult, results.Warning):
"""Deprecated license usage."""
- license_type = 'deprecated'
+ license_type = "deprecated"
class MissingLicense(results.VersionResult, results.Error):
"""Package has no LICENSE defined."""
- desc = 'no license defined'
+ desc = "no license defined"
class InvalidLicense(results.MetadataError, results.VersionResult):
"""Package's LICENSE is invalid."""
- attr = 'license'
+ attr = "license"
class MissingLicenseRestricts(results.VersionResult, results.Warning):
@@ -76,10 +76,9 @@ class MissingLicenseRestricts(results.VersionResult, results.Warning):
@property
def desc(self):
- restrictions = ' '.join(self.restrictions)
+ restrictions = " ".join(self.restrictions)
return (
- f'{self.license_group} license {self.license!r} '
- f'requires RESTRICT="{restrictions}"'
+ f"{self.license_group} license {self.license!r} " f'requires RESTRICT="{restrictions}"'
)
@@ -94,23 +93,30 @@ class UnnecessaryLicense(results.VersionResult, results.Warning):
class LicenseCheck(Check):
"""LICENSE validity checks."""
- known_results = frozenset([
- InvalidLicense, MissingLicense, UnknownLicense, DeprecatedLicense,
- UnnecessaryLicense, UnstatedIuse, MissingLicenseRestricts,
- ])
+ known_results = frozenset(
+ [
+ InvalidLicense,
+ MissingLicense,
+ UnknownLicense,
+ DeprecatedLicense,
+ UnnecessaryLicense,
+ UnstatedIuse,
+ MissingLicenseRestricts,
+ ]
+ )
# categories for ebuilds that can lack LICENSE settings
- unlicensed_categories = frozenset(['virtual', 'acct-group', 'acct-user'])
+ unlicensed_categories = frozenset(["virtual", "acct-group", "acct-user"])
required_addons = (addons.UseAddon,)
def __init__(self, *args, use_addon):
super().__init__(*args)
repo = self.options.target_repo
- self.iuse_filter = use_addon.get_filter('license')
- self.deprecated = repo.licenses.groups.get('DEPRECATED', frozenset())
- self.eula = repo.licenses.groups.get('EULA', frozenset())
- self.mirror_restricts = frozenset(['fetch', 'mirror'])
+ self.iuse_filter = use_addon.get_filter("license")
+ self.deprecated = repo.licenses.groups.get("DEPRECATED", frozenset())
+ self.eula = repo.licenses.groups.get("EULA", frozenset())
+ self.mirror_restricts = frozenset(["fetch", "mirror"])
def _required_licenses(self, license_group, nodes, restricts=None):
"""Determine required licenses from a given license group."""
@@ -140,14 +146,13 @@ class LicenseCheck(Check):
restricts = set().union(*(x.vals for x in restrictions if not x.negate))
license_restrictions = pkg.restrict.evaluate_depset(restricts)
missing_restricts = []
- if 'bindist' not in license_restrictions:
- missing_restricts.append('bindist')
+ if "bindist" not in license_restrictions:
+ missing_restricts.append("bindist")
if not self.mirror_restricts.intersection(license_restrictions):
if pkg.fetchables:
- missing_restricts.append('mirror')
+ missing_restricts.append("mirror")
if missing_restricts:
- yield MissingLicenseRestricts(
- 'EULA', license, missing_restricts, pkg=pkg)
+ yield MissingLicenseRestricts("EULA", license, missing_restricts, pkg=pkg)
# flatten license depset
licenses, unstated = self.iuse_filter((str,), pkg, pkg.license)
@@ -178,26 +183,26 @@ class _UseFlagsResult(results.VersionResult):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(map(repr, sorted(self.flags)))
- return f'{self.flag_type} USE flag{s}: {flags}'
+ flags = ", ".join(map(repr, sorted(self.flags)))
+ return f"{self.flag_type} USE flag{s}: {flags}"
class InvalidUseFlags(_UseFlagsResult, results.Error):
"""Package IUSE contains invalid USE flags."""
- flag_type = 'invalid'
+ flag_type = "invalid"
class UnknownUseFlags(_UseFlagsResult, results.Error):
"""Package IUSE contains unknown USE flags."""
- flag_type = 'unknown'
+ flag_type = "unknown"
class BadDefaultUseFlags(_UseFlagsResult, results.Error):
"""Package IUSE contains bad default USE flags."""
- flag_type = 'bad default'
+ flag_type = "bad default"
class IuseCheck(Check):
@@ -205,19 +210,22 @@ class IuseCheck(Check):
required_addons = (addons.UseAddon,)
known_results = frozenset([InvalidUseFlags, UnknownUseFlags, BadDefaultUseFlags])
- use_expand_groups = ('cpu_flags',)
+ use_expand_groups = ("cpu_flags",)
def __init__(self, *args, use_addon):
super().__init__(*args)
self.iuse_handler = use_addon
- self.bad_defaults = tuple(['-'] + [f'+{x}_' for x in self.use_expand_groups])
+ self.bad_defaults = tuple(["-"] + [f"+{x}_" for x in self.use_expand_groups])
def feed(self, pkg):
if invalid := sorted(x for x in pkg.iuse_stripped if not pkg.eapi.is_valid_use_flag(x)):
yield InvalidUseFlags(invalid, pkg=pkg)
- if pkg.eapi.options.iuse_defaults and (bad_defaults := sorted(
- x for x in pkg.iuse if x.startswith(self.bad_defaults) and len(x) > 1)):
+ if pkg.eapi.options.iuse_defaults and (
+ bad_defaults := sorted(
+ x for x in pkg.iuse if x.startswith(self.bad_defaults) and len(x) > 1
+ )
+ ):
yield BadDefaultUseFlags(bad_defaults, pkg=pkg)
if not self.iuse_handler.ignore:
@@ -243,13 +251,13 @@ class _EapiResult(results.VersionResult):
class DeprecatedEapi(_EapiResult, results.Warning):
"""Package's EAPI is deprecated according to repo metadata."""
- _type = 'deprecated'
+ _type = "deprecated"
class BannedEapi(_EapiResult, results.Error):
"""Package's EAPI is banned according to repo metadata."""
- _type = 'banned'
+ _type = "banned"
class StableKeywordsOnTestingEapi(results.VersionResult, results.Error):
@@ -281,8 +289,9 @@ class UnsupportedEclassEapi(results.VersionResult, results.Warning):
class EapiCheck(Check):
"""Scan for packages with banned or deprecated EAPIs."""
- known_results = frozenset([DeprecatedEapi, BannedEapi, UnsupportedEclassEapi,
- StableKeywordsOnTestingEapi])
+ known_results = frozenset(
+ [DeprecatedEapi, BannedEapi, UnsupportedEclassEapi, StableKeywordsOnTestingEapi]
+ )
required_addons = (addons.eclass.EclassAddon,)
def __init__(self, *args, eclass_addon):
@@ -297,7 +306,7 @@ class EapiCheck(Check):
yield DeprecatedEapi(pkg.eapi, pkg=pkg)
if eapi_str in self.options.target_repo.config.eapis_testing:
- stable_keywords_gen = (k for k in pkg.keywords if not k.startswith(('~', '-')))
+ stable_keywords_gen = (k for k in pkg.keywords if not k.startswith(("~", "-")))
if stable_keywords := sorted(stable_keywords_gen):
yield StableKeywordsOnTestingEapi(pkg.eapi, stable_keywords, pkg=pkg)
@@ -310,19 +319,19 @@ class EapiCheck(Check):
class InvalidEapi(results.MetadataError, results.VersionResult):
"""Package's EAPI is invalid."""
- attr = 'eapi'
+ attr = "eapi"
class InvalidSlot(results.MetadataError, results.VersionResult):
"""Package's SLOT is invalid."""
- attr = 'slot'
+ attr = "slot"
class SourcingError(results.MetadataError, results.VersionResult):
"""Failed sourcing ebuild."""
- attr = 'data'
+ attr = "data"
class SourcingCheck(Check):
@@ -346,8 +355,9 @@ class RequiredUseDefaults(results.VersionResult, results.Warning):
or modifying REQUIRED_USE.
"""
- def __init__(self, required_use, use=(), keyword=None,
- profile=None, num_profiles=None, **kwargs):
+ def __init__(
+ self, required_use, use=(), keyword=None, profile=None, num_profiles=None, **kwargs
+ ):
super().__init__(**kwargs)
self.required_use = required_use
self.use = tuple(use)
@@ -359,40 +369,48 @@ class RequiredUseDefaults(results.VersionResult, results.Warning):
def desc(self):
if not self.use:
if self.num_profiles is not None and self.num_profiles > 1:
- num_profiles = f' ({self.num_profiles} total)'
+ num_profiles = f" ({self.num_profiles} total)"
else:
- num_profiles = ''
+ num_profiles = ""
# collapsed version
return (
- f'profile: {self.profile!r}{num_profiles} '
- f'failed REQUIRED_USE: {self.required_use}'
+ f"profile: {self.profile!r}{num_profiles} "
+ f"failed REQUIRED_USE: {self.required_use}"
)
return (
- f'keyword: {self.keyword}, profile: {self.profile!r}, '
+ f"keyword: {self.keyword}, profile: {self.profile!r}, "
f"default USE: [{', '.join(self.use)}] "
- f'-- failed REQUIRED_USE: {self.required_use}'
+ f"-- failed REQUIRED_USE: {self.required_use}"
)
class InvalidRequiredUse(results.MetadataError, results.VersionResult):
"""Package's REQUIRED_USE is invalid."""
- attr = 'required_use'
+ attr = "required_use"
class RequiredUseCheck(Check):
"""REQUIRED_USE validity checks."""
# only run the check for EAPI 4 and above
- _source = (sources.RestrictionRepoSource, (
- packages.PackageRestriction('eapi', values.GetAttrRestriction(
- 'options.has_required_use', values.FunctionRestriction(bool))),))
+ _source = (
+ sources.RestrictionRepoSource,
+ (
+ packages.PackageRestriction(
+ "eapi",
+ values.GetAttrRestriction(
+ "options.has_required_use", values.FunctionRestriction(bool)
+ ),
+ ),
+ ),
+ )
required_addons = (addons.UseAddon, addons.profiles.ProfileAddon)
known_results = frozenset([InvalidRequiredUse, RequiredUseDefaults, UnstatedIuse])
def __init__(self, *args, use_addon, profile_addon):
super().__init__(*args)
- self.iuse_filter = use_addon.get_filter('required_use')
+ self.iuse_filter = use_addon.get_filter("required_use")
self.profiles = profile_addon
def feed(self, pkg):
@@ -404,15 +422,15 @@ class RequiredUseCheck(Check):
# unstable profiles for unstable KEYWORDS
keywords = []
for keyword in pkg.sorted_keywords:
- if keyword[0] != '~':
+ if keyword[0] != "~":
keywords.append(keyword)
- keywords.append('~' + keyword.lstrip('~'))
+ keywords.append("~" + keyword.lstrip("~"))
# check USE defaults (pkg IUSE defaults + profile USE) against
# REQUIRED_USE for all profiles matching a pkg's KEYWORDS
failures = defaultdict(list)
for keyword in keywords:
- for profile in sorted(self.profiles.get(keyword, ()), key=attrgetter('name')):
+ for profile in sorted(self.profiles.get(keyword, ()), key=attrgetter("name")):
# skip packages masked by the profile
if profile.visible(pkg):
src = FakeConfigurable(pkg, profile)
@@ -424,15 +442,15 @@ class RequiredUseCheck(Check):
# report all failures with profile info in verbose mode
for node, profile_info in failures.items():
for use, keyword, profile in profile_info:
- yield RequiredUseDefaults(
- str(node), sorted(use), keyword, profile, pkg=pkg)
+ yield RequiredUseDefaults(str(node), sorted(use), keyword, profile, pkg=pkg)
else:
# only report one failure per REQUIRED_USE node in regular mode
for node, profile_info in failures.items():
num_profiles = len(profile_info)
_use, _keyword, profile = profile_info[0]
yield RequiredUseDefaults(
- str(node), profile=profile, num_profiles=num_profiles, pkg=pkg)
+ str(node), profile=profile, num_profiles=num_profiles, pkg=pkg
+ )
class UnusedLocalUse(results.PackageResult, results.Warning):
@@ -445,8 +463,8 @@ class UnusedLocalUse(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'unused local USE flag{s}: [ {flags} ]'
+ flags = ", ".join(self.flags)
+ return f"unused local USE flag{s}: [ {flags} ]"
class MatchingGlobalUse(results.PackageResult, results.Warning):
@@ -526,8 +544,8 @@ class MissingLocalUseDesc(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'local USE flag{s} missing description{s}: [ {flags} ]'
+ flags = ", ".join(self.flags)
+ return f"local USE flag{s} missing description{s}: [ {flags} ]"
class LocalUseCheck(Check):
@@ -535,23 +553,27 @@ class LocalUseCheck(Check):
_source = sources.PackageRepoSource
required_addons = (addons.UseAddon,)
- known_results = frozenset([
- UnusedLocalUse, MatchingGlobalUse, ProbableGlobalUse,
- ProbableUseExpand, UnderscoreInUseFlag, UnstatedIuse,
- MissingLocalUseDesc,
- ])
+ known_results = frozenset(
+ [
+ UnusedLocalUse,
+ MatchingGlobalUse,
+ ProbableGlobalUse,
+ ProbableUseExpand,
+ UnderscoreInUseFlag,
+ UnstatedIuse,
+ MissingLocalUseDesc,
+ ]
+ )
def __init__(self, *args, use_addon):
super().__init__(*args)
repo_config = self.options.target_repo.config
self.iuse_handler = use_addon
- self.global_use = {
- flag: desc for matcher, (flag, desc) in repo_config.use_desc}
+ self.global_use = {flag: desc for matcher, (flag, desc) in repo_config.use_desc}
self.use_expand = dict()
for group in repo_config.use_expand_desc.keys():
- self.use_expand[group] = {
- flag for flag, desc in repo_config.use_expand_desc[group]}
+ self.use_expand[group] = {flag for flag, desc in repo_config.use_expand_desc[group]}
def feed(self, pkgs):
pkg = pkgs[0]
@@ -568,9 +590,9 @@ class LocalUseCheck(Check):
yield MatchingGlobalUse(flag, pkg=pkg)
elif ratio >= 0.75:
yield ProbableGlobalUse(flag, pkg=pkg)
- elif '_' in flag:
+ elif "_" in flag:
for group, flags in self.use_expand.items():
- if flag.startswith(f'{group}_'):
+ if flag.startswith(f"{group}_"):
if flag not in flags:
yield ProbableUseExpand(flag, group.upper(), pkg=pkg)
break
@@ -608,23 +630,32 @@ class UseFlagWithoutDeps(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'special small-files USE flag{s} without effect on dependencies: [ {flags} ]'
+ flags = ", ".join(self.flags)
+ return f"special small-files USE flag{s} without effect on dependencies: [ {flags} ]"
class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
"""Check for USE flags without effects."""
- known_results = frozenset({
- UseFlagWithoutDeps,
- })
+ known_results = frozenset(
+ {
+ UseFlagWithoutDeps,
+ }
+ )
- warn_use_small_files = frozenset({
- 'ipv6', 'logrotate', 'unicode',
- 'bash-completion', 'fish-completion', 'zsh-completion', 'vim-syntax',
- # TODO: enable those one day
- # 'systemd',
- })
+ warn_use_small_files = frozenset(
+ {
+ "ipv6",
+ "logrotate",
+ "unicode",
+ "bash-completion",
+ "fish-completion",
+ "zsh-completion",
+ "vim-syntax",
+ # TODO: enable those one day
+ # 'systemd',
+ }
+ )
def feed(self, pkg):
used_flags = set(pkg.local_use)
@@ -632,15 +663,18 @@ class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
deps = getattr(pkg, attr.lower())
use_values = set()
- use_values.update(itertools.chain.from_iterable(
- atom.use or ()
- for atom in iflatten_instance(deps, atom_cls)
- ))
- use_values.update(itertools.chain.from_iterable(
- atom.restriction.vals
- for atom in iflatten_instance(deps, packages.Conditional)
- if isinstance(atom, packages.Conditional) and atom.attr == 'use'
- ))
+ use_values.update(
+ itertools.chain.from_iterable(
+ atom.use or () for atom in iflatten_instance(deps, atom_cls)
+ )
+ )
+ use_values.update(
+ itertools.chain.from_iterable(
+ atom.restriction.vals
+ for atom in iflatten_instance(deps, packages.Conditional)
+ if isinstance(atom, packages.Conditional) and atom.attr == "use"
+ )
+ )
for check_use in self.warn_use_small_files:
if any(check_use in use for use in use_values):
used_flags.add(check_use)
@@ -649,6 +683,7 @@ class UseFlagsWithoutEffectsCheck(GentooRepoCheck):
if flags:
yield UseFlagWithoutDeps(flags, pkg=pkg)
+
class MissingSlotDep(results.VersionResult, results.Warning):
"""Missing slot value in dependencies.
@@ -672,18 +707,22 @@ class MissingSlotDep(results.VersionResult, results.Warning):
@property
def desc(self):
- return (
- f"{self.dep!r} matches more than one slot: "
- f"[ {', '.join(self.dep_slots)} ]")
+ return f"{self.dep!r} matches more than one slot: " f"[ {', '.join(self.dep_slots)} ]"
class MissingSlotDepCheck(Check):
"""Check for missing slot dependencies."""
# only run the check for EAPI 5 and above
- _source = (sources.RestrictionRepoSource, (
- packages.PackageRestriction('eapi', values.GetAttrRestriction(
- 'options.sub_slotting', values.FunctionRestriction(bool))),))
+ _source = (
+ sources.RestrictionRepoSource,
+ (
+ packages.PackageRestriction(
+ "eapi",
+ values.GetAttrRestriction("options.sub_slotting", values.FunctionRestriction(bool)),
+ ),
+ ),
+ )
required_addons = (addons.UseAddon,)
known_results = frozenset([MissingSlotDep])
@@ -696,8 +735,11 @@ class MissingSlotDepCheck(Check):
depend, _ = self.iuse_filter((atom_cls,), pkg, pkg.depend)
# skip deps that are blockers or have explicit slots/slot operators
- for dep in (x for x in set(rdepend).intersection(depend) if not
- (x.blocks or x.slot is not None or x.slot_operator is not None)):
+ for dep in (
+ x
+ for x in set(rdepend).intersection(depend)
+ if not (x.blocks or x.slot is not None or x.slot_operator is not None)
+ ):
dep_slots = {x.slot for x in pkg.repo.itermatch(dep.no_usedeps)}
if len(dep_slots) > 1:
yield MissingSlotDep(str(dep), sorted(dep_slots), pkg=pkg)
@@ -738,10 +780,10 @@ class MissingUseDepDefault(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.pkgs)
- pkgs = ', '.join(self.pkgs)
+ pkgs = ", ".join(self.pkgs)
return (
f'{self.attr}="{self.atom}": USE flag {self.flag!r} missing from '
- f'package{s}: [ {pkgs} ]'
+ f"package{s}: [ {pkgs} ]"
)
@@ -755,7 +797,7 @@ class DeprecatedDep(results.VersionResult, results.Warning):
@property
def desc(self):
- ies = pluralism(self.atoms, singular='y', plural='ies')
+ ies = pluralism(self.atoms, singular="y", plural="ies")
return f"{self.attr}: deprecated dependenc{ies}: {' '.join(self.atoms)}"
@@ -776,31 +818,31 @@ class BadDependency(results.VersionResult, results.Error):
class InvalidDepend(results.MetadataError, results.VersionResult):
"""Package has invalid DEPEND."""
- attr = 'depend'
+ attr = "depend"
class InvalidRdepend(results.MetadataError, results.VersionResult):
"""Package has invalid RDEPEND."""
- attr = 'rdepend'
+ attr = "rdepend"
class InvalidPdepend(results.MetadataError, results.VersionResult):
"""Package has invalid PDEPEND."""
- attr = 'pdepend'
+ attr = "pdepend"
class InvalidBdepend(results.MetadataError, results.VersionResult):
"""Package has invalid BDEPEND."""
- attr = 'bdepend'
+ attr = "bdepend"
class InvalidIdepend(results.MetadataError, results.VersionResult):
"""Package has invalid IDEPEND."""
- attr = 'idepend'
+ attr = "idepend"
class MisplacedWeakBlocker(results.Warning, results.VersionResult):
@@ -821,25 +863,35 @@ class MisplacedWeakBlocker(results.Warning, results.VersionResult):
@property
def desc(self):
- return f'{self.attr}: misplaced weak blocker: {self.atom}'
+ return f"{self.attr}: misplaced weak blocker: {self.atom}"
class DependencyCheck(Check):
"""Verify dependency attributes (e.g. RDEPEND)."""
required_addons = (addons.UseAddon,)
- known_results = frozenset([
- BadDependency, MissingPackageRevision, MissingUseDepDefault,
- UnstatedIuse, DeprecatedDep, InvalidDepend, InvalidRdepend,
- InvalidPdepend, InvalidBdepend, InvalidIdepend, MisplacedWeakBlocker,
- ])
+ known_results = frozenset(
+ [
+ BadDependency,
+ MissingPackageRevision,
+ MissingUseDepDefault,
+ UnstatedIuse,
+ DeprecatedDep,
+ InvalidDepend,
+ InvalidRdepend,
+ InvalidPdepend,
+ InvalidBdepend,
+ InvalidIdepend,
+ MisplacedWeakBlocker,
+ ]
+ )
def __init__(self, *args, use_addon):
super().__init__(*args)
self.deprecated = self.options.target_repo.deprecated.match
self.iuse_filter = use_addon.get_filter()
- self.conditional_ops = {'?', '='}
- self.use_defaults = {'(+)', '(-)'}
+ self.conditional_ops = {"?", "="}
+ self.use_defaults = {"(+)", "(-)"}
def _check_use_deps(self, attr, atom):
"""Check dependencies for missing USE dep defaults."""
@@ -849,7 +901,7 @@ class DependencyCheck(Check):
x = x[:-1]
if x[-3:] in self.use_defaults:
continue
- stripped_use.append(x.lstrip('!-'))
+ stripped_use.append(x.lstrip("!-"))
if stripped_use:
missing_use_deps = defaultdict(set)
for pkg in self.options.search_repo.match(atom.no_usedeps):
@@ -868,12 +920,13 @@ class DependencyCheck(Check):
try:
deps = getattr(pkg, attr)
except MetadataException as e:
- cls = globals()[f'Invalid{attr.capitalize()}']
+ cls = globals()[f"Invalid{attr.capitalize()}"]
yield cls(attr, e.msg(), pkg=pkg)
continue
nodes, unstated = self.iuse_filter(
- (atom_cls, boolean.OrRestriction), pkg, deps, attr=attr)
+ (atom_cls, boolean.OrRestriction), pkg, deps, attr=attr
+ )
yield from unstated
for node in nodes:
@@ -892,9 +945,10 @@ class DependencyCheck(Check):
if all(self.deprecated(x.versioned_atom) for x in pkgs):
deprecated[attr].add(atom)
- if in_or_restriction and atom.slot_operator == '=':
+ if in_or_restriction and atom.slot_operator == "=":
yield BadDependency(
- attr, atom, '= slot operator used inside || block', pkg=pkg)
+ attr, atom, "= slot operator used inside || block", pkg=pkg
+ )
if pkg.eapi.options.has_use_dep_defaults and atom.use is not None:
missing_use_deps = self._check_use_deps(attr, atom)
@@ -902,22 +956,23 @@ class DependencyCheck(Check):
pkgs = (x.cpvstr for x in sorted(atoms))
yield MissingUseDepDefault(attr, str(atom), use, pkgs, pkg=pkg)
- if atom.op == '=' and not atom.revision:
+ if atom.op == "=" and not atom.revision:
yield MissingPackageRevision(attr, str(atom), pkg=pkg)
if atom.blocks:
if atom.match(pkg):
yield BadDependency(attr, atom, "package blocks itself", pkg=pkg)
- elif atom.slot_operator == '=':
+ elif atom.slot_operator == "=":
yield BadDependency(
- attr, atom, '= slot operator used in blocker', pkg=pkg)
+ attr, atom, "= slot operator used in blocker", pkg=pkg
+ )
elif not atom.blocks_strongly:
weak_blocks[attr].add(atom)
- for attr in ('depend', 'bdepend'):
- weak_blocks[attr].difference_update(weak_blocks['rdepend'])
- weak_blocks['idepend'].difference_update(weak_blocks['rdepend'], weak_blocks['depend'])
- for attr in ('depend', 'bdepend', 'idepend', 'pdepend'):
+ for attr in ("depend", "bdepend"):
+ weak_blocks[attr].difference_update(weak_blocks["rdepend"])
+ weak_blocks["idepend"].difference_update(weak_blocks["rdepend"], weak_blocks["depend"])
+ for attr in ("depend", "bdepend", "idepend", "pdepend"):
for atom in weak_blocks[attr]:
yield MisplacedWeakBlocker(attr, atom, pkg=pkg)
@@ -941,7 +996,7 @@ class OutdatedBlocker(results.VersionResult, results.Info):
def desc(self):
return (
f'outdated blocker {self.attr}="{self.atom}": '
- f'last match removed {self.age} years ago'
+ f"last match removed {self.age} years ago"
)
@@ -961,10 +1016,7 @@ class NonexistentBlocker(results.VersionResult, results.Warning):
@property
def desc(self):
- return (
- f'nonexistent blocker {self.attr}="{self.atom}": '
- 'no matches in repo history'
- )
+ return f'nonexistent blocker {self.attr}="{self.atom}": ' "no matches in repo history"
class OutdatedBlockersCheck(Check):
@@ -985,7 +1037,7 @@ class OutdatedBlockersCheck(Check):
for attr in sorted(x.lower() for x in pkg.eapi.dep_keys):
blockers = (x for x in iflatten_instance(getattr(pkg, attr), atom_cls) if x.blocks)
for atom in blockers:
- if atom.op == '=*':
+ if atom.op == "=*":
atom_str = f"={atom.cpvstr}*"
else:
atom_str = atom.op + atom.cpvstr
@@ -1084,7 +1136,7 @@ class VirtualKeywordsUpdate(results.VersionResult, results.Info):
@property
def desc(self):
s = pluralism(self.keywords)
- keywords = ', '.join(self.keywords)
+ keywords = ", ".join(self.keywords)
return f"KEYWORDS update{s} available: {keywords}"
@@ -1092,10 +1144,16 @@ class KeywordsCheck(Check):
"""Check package keywords for sanity; empty keywords, and -* are flagged."""
required_addons = (addons.UseAddon, addons.KeywordsAddon)
- known_results = frozenset([
- BadKeywords, UnknownKeywords, OverlappingKeywords, DuplicateKeywords,
- UnsortedKeywords, VirtualKeywordsUpdate,
- ])
+ known_results = frozenset(
+ [
+ BadKeywords,
+ UnknownKeywords,
+ OverlappingKeywords,
+ DuplicateKeywords,
+ UnsortedKeywords,
+ VirtualKeywordsUpdate,
+ ]
+ )
def __init__(self, *args, use_addon, keywords_addon):
super().__init__(*args)
@@ -1103,7 +1161,7 @@ class KeywordsCheck(Check):
self.keywords = keywords_addon
def feed(self, pkg):
- if pkg.keywords == ('-*',):
+ if pkg.keywords == ("-*",):
yield BadKeywords(pkg)
else:
# check for unknown keywords
@@ -1115,11 +1173,12 @@ class KeywordsCheck(Check):
yield UnknownKeywords(sorted(unknown), pkg=pkg)
# check for overlapping keywords
- unstable = {x[1:] for x in pkg.keywords if x[0] == '~'}
- stable = {x for x in pkg.keywords if x[0] != '~'}
+ unstable = {x[1:] for x in pkg.keywords if x[0] == "~"}
+ stable = {x for x in pkg.keywords if x[0] != "~"}
if overlapping := unstable & stable:
- keywords = ', '.join(map(
- str, sorted(zip(overlapping, ('~' + x for x in overlapping)))))
+ keywords = ", ".join(
+ map(str, sorted(zip(overlapping, ("~" + x for x in overlapping))))
+ )
yield OverlappingKeywords(keywords, pkg=pkg)
# check for duplicate keywords
@@ -1139,19 +1198,21 @@ class KeywordsCheck(Check):
yield UnsortedKeywords(pkg.keywords, pkg=pkg)
else:
yield UnsortedKeywords(
- pkg.keywords, sorted_keywords=pkg.sorted_keywords, pkg=pkg)
+ pkg.keywords, sorted_keywords=pkg.sorted_keywords, pkg=pkg
+ )
- if pkg.category == 'virtual':
+ if pkg.category == "virtual":
dep_keywords = defaultdict(set)
rdepend, _ = self.iuse_filter((atom_cls,), pkg, pkg.rdepend)
for dep in set(rdepend):
for p in self.options.search_repo.match(dep.no_usedeps):
dep_keywords[dep].update(
- x for x in p.keywords if x.lstrip('~') in self.keywords.arches)
+ x for x in p.keywords if x.lstrip("~") in self.keywords.arches
+ )
if dep_keywords:
dep_keywords = set.intersection(*dep_keywords.values())
pkg_keywords = set(pkg.keywords)
- pkg_keywords.update(f'~{x}' for x in pkg.keywords if x[0] != '~')
+ pkg_keywords.update(f"~{x}" for x in pkg.keywords if x[0] != "~")
if keywords := dep_keywords - pkg_keywords:
yield VirtualKeywordsUpdate(sort_keywords(keywords), pkg=pkg)
@@ -1166,8 +1227,8 @@ class MissingUri(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.filenames)
- filenames = ', '.join(map(repr, self.filenames))
- return f'unfetchable file{s}: {filenames}'
+ filenames = ", ".join(map(repr, self.filenames))
+ return f"unfetchable file{s}: {filenames}"
class UnknownMirror(results.VersionResult, results.Error):
@@ -1180,7 +1241,7 @@ class UnknownMirror(results.VersionResult, results.Error):
@property
def desc(self):
- return f'unknown mirror {self.mirror!r} from URI {self.uri!r}'
+ return f"unknown mirror {self.mirror!r} from URI {self.uri!r}"
class BadProtocol(results.VersionResult, results.Error):
@@ -1197,8 +1258,8 @@ class BadProtocol(results.VersionResult, results.Error):
@property
def desc(self):
s = pluralism(self.uris)
- uris = ', '.join(map(repr, self.uris))
- return f'bad protocol {self.protocol!r} in URI{s}: {uris}'
+ uris = ", ".join(map(repr, self.uris))
+ return f"bad protocol {self.protocol!r} in URI{s}: {uris}"
class RedundantUriRename(results.VersionResult, results.Style):
@@ -1226,8 +1287,8 @@ class BadFilename(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.filenames)
- filenames = ', '.join(self.filenames)
- return f'bad filename{s}: [ {filenames} ]'
+ filenames = ", ".join(self.filenames)
+ return f"bad filename{s}: [ {filenames} ]"
class TarballAvailable(results.VersionResult, results.Style):
@@ -1244,14 +1305,14 @@ class TarballAvailable(results.VersionResult, results.Style):
@property
def desc(self):
s = pluralism(self.uris)
- uris = ' '.join(self.uris)
- return f'zip archive{s} used when tarball available: [ {uris} ]'
+ uris = " ".join(self.uris)
+ return f"zip archive{s} used when tarball available: [ {uris} ]"
class InvalidSrcUri(results.MetadataError, results.VersionResult):
"""Package's SRC_URI is invalid."""
- attr = 'fetchables'
+ attr = "fetchables"
class SrcUriCheck(Check):
@@ -1262,19 +1323,28 @@ class SrcUriCheck(Check):
"""
required_addons = (addons.UseAddon,)
- known_results = frozenset([
- BadFilename, BadProtocol, MissingUri, InvalidSrcUri,
- RedundantUriRename, TarballAvailable, UnknownMirror, UnstatedIuse,
- ])
+ known_results = frozenset(
+ [
+ BadFilename,
+ BadProtocol,
+ MissingUri,
+ InvalidSrcUri,
+ RedundantUriRename,
+ TarballAvailable,
+ UnknownMirror,
+ UnstatedIuse,
+ ]
+ )
valid_protos = frozenset(["http", "https", "ftp"])
def __init__(self, *args, use_addon):
super().__init__(*args)
- self.iuse_filter = use_addon.get_filter('fetchables')
+ self.iuse_filter = use_addon.get_filter("fetchables")
self.zip_to_tar_re = re.compile(
- r'https?://(github\.com/.*?/.*?/archive/.+\.zip|'
- r'gitlab\.com/.*?/.*?/-/archive/.+\.zip)')
+ r"https?://(github\.com/.*?/.*?/archive/.+\.zip|"
+ r"gitlab\.com/.*?/.*?/-/archive/.+\.zip)"
+ )
def feed(self, pkg):
lacks_uri = set()
@@ -1283,13 +1353,17 @@ class SrcUriCheck(Check):
bad_filenames = set()
tarball_available = set()
- report_uris = LogMap('pkgcore.log.logger.info', partial(RedundantUriRename, pkg))
+ report_uris = LogMap("pkgcore.log.logger.info", partial(RedundantUriRename, pkg))
with LogReports(report_uris) as log_reports:
fetchables, unstated = self.iuse_filter(
- (fetchable,), pkg,
+ (fetchable,),
+ pkg,
pkg.generate_fetchables(
- allow_missing_checksums=True, ignore_unknown_mirrors=True,
- skip_default_mirrors=True))
+ allow_missing_checksums=True,
+ ignore_unknown_mirrors=True,
+ skip_default_mirrors=True,
+ ),
+ )
yield from log_reports
yield from unstated
@@ -1300,7 +1374,8 @@ class SrcUriCheck(Check):
mirrors = f_inst.uri.visit_mirrors(treat_default_as_mirror=False)
unknown_mirrors = [
- (m, sub_uri) for m, sub_uri in mirrors if isinstance(m, unknown_mirror)]
+ (m, sub_uri) for m, sub_uri in mirrors if isinstance(m, unknown_mirror)
+ ]
for mirror, sub_uri in unknown_mirrors:
uri = f"{mirror}/{sub_uri}"
yield UnknownMirror(mirror.mirror_name, uri, pkg=pkg)
@@ -1311,12 +1386,12 @@ class SrcUriCheck(Check):
PN = re.escape(pkg.PN)
PV = re.escape(pkg.PV)
exts = pkg.eapi.archive_exts_regex_pattern
- bad_filenames_re = rf'^({PN}|v?{PV}|[0-9a-f]{{40}}){exts}$'
+ bad_filenames_re = rf"^({PN}|v?{PV}|[0-9a-f]{{40}}){exts}$"
if re.match(bad_filenames_re, f_inst.filename):
bad_filenames.add(f_inst.filename)
restricts = set().union(*(x.vals for x in restrictions if not x.negate))
- if not f_inst.uri and 'fetch' not in pkg.restrict.evaluate_depset(restricts):
+ if not f_inst.uri and "fetch" not in pkg.restrict.evaluate_depset(restricts):
lacks_uri.add(f_inst.filename)
else:
bad_protocols = defaultdict(set)
@@ -1349,8 +1424,8 @@ class BadDescription(results.VersionResult, results.Style):
@property
def desc(self):
- pkg_desc = f'DESCRIPTION="{self.pkg_desc}" ' if self.pkg_desc else ''
- return f'{pkg_desc}{self.msg}'
+ pkg_desc = f'DESCRIPTION="{self.pkg_desc}" ' if self.pkg_desc else ""
+ return f"{pkg_desc}{self.msg}"
class DescriptionCheck(Check):
@@ -1403,31 +1478,33 @@ class HomepageCheck(Check):
known_results = frozenset([BadHomepage])
# categories for ebuilds that should lack HOMEPAGE
- missing_categories = frozenset(['virtual', 'acct-group', 'acct-user'])
+ missing_categories = frozenset(["virtual", "acct-group", "acct-user"])
# generic sites that shouldn't be used for HOMEPAGE
- generic_sites = frozenset(['https://www.gentoo.org', 'https://gentoo.org'])
+ generic_sites = frozenset(["https://www.gentoo.org", "https://gentoo.org"])
def feed(self, pkg):
if not pkg.homepage:
if pkg.category not in self.missing_categories:
- yield BadHomepage('HOMEPAGE empty/unset', pkg=pkg)
+ yield BadHomepage("HOMEPAGE empty/unset", pkg=pkg)
else:
if pkg.category in self.missing_categories:
yield BadHomepage(
- f'HOMEPAGE should be undefined for {pkg.category!r} packages', pkg=pkg)
+ f"HOMEPAGE should be undefined for {pkg.category!r} packages", pkg=pkg
+ )
else:
for homepage in pkg.homepage:
- if homepage.rstrip('/') in self.generic_sites:
- yield BadHomepage(f'unspecific HOMEPAGE: {homepage}', pkg=pkg)
+ if homepage.rstrip("/") in self.generic_sites:
+ yield BadHomepage(f"unspecific HOMEPAGE: {homepage}", pkg=pkg)
else:
- i = homepage.find('://')
+ i = homepage.find("://")
if i == -1:
- yield BadHomepage(f'HOMEPAGE={homepage!r} lacks protocol', pkg=pkg)
+ yield BadHomepage(f"HOMEPAGE={homepage!r} lacks protocol", pkg=pkg)
elif homepage[:i] not in SrcUriCheck.valid_protos:
yield BadHomepage(
- f'HOMEPAGE={homepage!r} uses unsupported '
- f'protocol {homepage[:i]!r}',
- pkg=pkg)
+ f"HOMEPAGE={homepage!r} uses unsupported "
+ f"protocol {homepage[:i]!r}",
+ pkg=pkg,
+ )
class UnknownRestrict(results.VersionResult, results.Warning):
@@ -1439,7 +1516,7 @@ class UnknownRestrict(results.VersionResult, results.Warning):
@property
def desc(self):
- restricts = ' '.join(self.restricts)
+ restricts = " ".join(self.restricts)
return f'unknown RESTRICT="{restricts}"'
@@ -1452,20 +1529,20 @@ class UnknownProperties(results.VersionResult, results.Warning):
@property
def desc(self):
- properties = ' '.join(self.properties)
+ properties = " ".join(self.properties)
return f'unknown PROPERTIES="{properties}"'
class InvalidRestrict(results.MetadataError, results.VersionResult):
"""Package's RESTRICT is invalid."""
- attr = 'restrict'
+ attr = "restrict"
class InvalidProperties(results.MetadataError, results.VersionResult):
"""Package's PROPERTIES is invalid."""
- attr = 'properties'
+ attr = "properties"
class _RestrictPropertiesCheck(Check):
@@ -1482,7 +1559,7 @@ class _RestrictPropertiesCheck(Check):
# pull allowed values from a repo and its masters
allowed = []
for repo in self.options.target_repo.trees:
- allowed.extend(getattr(repo.config, f'{self._attr}_allowed'))
+ allowed.extend(getattr(repo.config, f"{self._attr}_allowed"))
self.allowed = frozenset(allowed)
def feed(self, pkg):
@@ -1499,7 +1576,7 @@ class RestrictCheck(_RestrictPropertiesCheck):
"""RESTRICT related checks."""
known_results = frozenset([UnknownRestrict, UnstatedIuse, InvalidRestrict])
- _attr = 'restrict'
+ _attr = "restrict"
_unknown_result_cls = UnknownRestrict
@@ -1507,7 +1584,7 @@ class PropertiesCheck(_RestrictPropertiesCheck):
"""PROPERTIES related checks."""
known_results = frozenset([UnknownProperties, UnstatedIuse, InvalidProperties])
- _attr = 'properties'
+ _attr = "properties"
_unknown_result_cls = UnknownProperties
@@ -1536,15 +1613,16 @@ class RestrictTestCheck(Check):
super().__init__(*args)
# create "!test? ( test )" conditional to match restrictions against
self.test_restrict = packages.Conditional(
- 'use', values.ContainmentMatch2('test', negate=True), ['test'])
+ "use", values.ContainmentMatch2("test", negate=True), ["test"]
+ )
def feed(self, pkg):
- if 'test' not in pkg.iuse:
+ if "test" not in pkg.iuse:
return
# conditional is unnecessary if it already exists or is in unconditional form
for r in pkg.restrict:
- if r in ('test', self.test_restrict):
+ if r in ("test", self.test_restrict):
return
yield MissingTestRestrict(pkg=pkg)
@@ -1567,7 +1645,7 @@ class MissingUnpackerDep(results.VersionResult, results.Warning):
def desc(self):
# determine proper dep type from pkg EAPI
eapi_obj = get_eapi(self.eapi)
- dep_type = 'BDEPEND' if 'BDEPEND' in eapi_obj.metadata_keys else 'DEPEND'
+ dep_type = "BDEPEND" if "BDEPEND" in eapi_obj.metadata_keys else "DEPEND"
if len(self.unpackers) == 1:
dep = self.unpackers[0]
@@ -1575,7 +1653,7 @@ class MissingUnpackerDep(results.VersionResult, results.Warning):
dep = f"|| ( {' '.join(self.unpackers)} )"
s = pluralism(self.filenames)
- filenames = ', '.join(self.filenames)
+ filenames = ", ".join(self.filenames)
return f'missing {dep_type}="{dep}" for SRC_URI archive{s}: [ {filenames} ]'
@@ -1585,26 +1663,30 @@ class MissingUnpackerDepCheck(Check):
known_results = frozenset([MissingUnpackerDep])
required_addons = (addons.UseAddon,)
- non_system_unpackers = ImmutableDict({
- '.zip': frozenset(['app-arch/unzip']),
- '.7z': frozenset(['app-arch/p7zip']),
- '.rar': frozenset(['app-arch/rar', 'app-arch/unrar']),
- '.lha': frozenset(['app-arch/lha']),
- '.lzh': frozenset(['app-arch/lha']),
- })
+ non_system_unpackers = ImmutableDict(
+ {
+ ".zip": frozenset(["app-arch/unzip"]),
+ ".7z": frozenset(["app-arch/p7zip"]),
+ ".rar": frozenset(["app-arch/rar", "app-arch/unrar"]),
+ ".lha": frozenset(["app-arch/lha"]),
+ ".lzh": frozenset(["app-arch/lha"]),
+ }
+ )
def __init__(self, *args, use_addon):
super().__init__(*args)
self.dep_filter = use_addon.get_filter()
- self.fetch_filter = use_addon.get_filter('fetchables')
+ self.fetch_filter = use_addon.get_filter("fetchables")
def feed(self, pkg):
# ignore conditionals
fetchables, _ = self.fetch_filter(
- (fetchable,), pkg,
+ (fetchable,),
+ pkg,
pkg.generate_fetchables(
- allow_missing_checksums=True, ignore_unknown_mirrors=True,
- skip_default_mirrors=True))
+ allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
+ ),
+ )
missing_unpackers = defaultdict(set)
@@ -1616,7 +1698,7 @@ class MissingUnpackerDepCheck(Check):
# toss all the potentially missing unpackers that properly include deps
if missing_unpackers:
- for dep_type in ('bdepend', 'depend'):
+ for dep_type in ("bdepend", "depend"):
deps, _ = self.dep_filter((atom_cls,), pkg, getattr(pkg, dep_type))
deps = {x.key for x in deps}
for unpackers in list(missing_unpackers.keys()):
@@ -1624,8 +1706,7 @@ class MissingUnpackerDepCheck(Check):
missing_unpackers.pop(unpackers, None)
for unpackers, filenames in missing_unpackers.items():
- yield MissingUnpackerDep(
- str(pkg.eapi), sorted(filenames), sorted(unpackers), pkg=pkg)
+ yield MissingUnpackerDep(str(pkg.eapi), sorted(filenames), sorted(unpackers), pkg=pkg)
class VirtualWithSingleProvider(results.PackageResult, results.Warning):
@@ -1644,32 +1725,31 @@ class VirtualWithSingleProvider(results.PackageResult, results.Warning):
@property
def desc(self):
- return f'virtual package with a single provider: {self.provider}'
+ return f"virtual package with a single provider: {self.provider}"
class VirtualWithBdepend(results.VersionResult, results.Warning):
"""Virtual package with a BDEPEND defined."""
- desc = 'virtual package with a BDEPEND defined'
+ desc = "virtual package with a BDEPEND defined"
class VirtualWithDepend(results.VersionResult, results.Warning):
"""Virtual package with a BDEPEND defined."""
- desc = 'virtual package with a DEPEND defined'
+ desc = "virtual package with a DEPEND defined"
class VirtualProvidersCheck(Check):
"""Check providers of virtual packages."""
- _restricted_source = (sources.RestrictionRepoSource, (restricts.CategoryDep('virtual'), ))
- _source = (sources.PackageRepoSource, (), (('source', _restricted_source),))
- known_results = frozenset([VirtualWithSingleProvider,
- VirtualWithBdepend, VirtualWithDepend])
+ _restricted_source = (sources.RestrictionRepoSource, (restricts.CategoryDep("virtual"),))
+ _source = (sources.PackageRepoSource, (), (("source", _restricted_source),))
+ known_results = frozenset([VirtualWithSingleProvider, VirtualWithBdepend, VirtualWithDepend])
useless_depends = (
- ('depend', VirtualWithDepend),
- ('bdepend', VirtualWithBdepend),
+ ("depend", VirtualWithDepend),
+ ("bdepend", VirtualWithBdepend),
)
def __init__(self, options, **kwargs):
@@ -1678,10 +1758,13 @@ class VirtualProvidersCheck(Check):
self.deprecated = self.options.target_repo.deprecated
def pkg_has_conditional_exception(self, pkgs):
- return any(use.startswith(('elibc', 'kernel'))
+ return any(
+ use.startswith(("elibc", "kernel"))
for pkg in pkgs
for dep in iflatten_instance(pkg.rdepend, (atom_cls, packages.Conditional))
- if isinstance(dep, packages.Conditional) and dep.attr == 'use' and isinstance(dep.restriction, values.ContainmentMatch)
+ if isinstance(dep, packages.Conditional)
+ and dep.attr == "use"
+ and isinstance(dep.restriction, values.ContainmentMatch)
for use in dep.restriction.vals
)
@@ -1692,15 +1775,10 @@ class VirtualProvidersCheck(Check):
yield cls(pkg=pkg)
if not any(self.deprecated.match(pkg) for pkg in pkgs):
- pkgs_rdepends = tuple(
- tuple(iflatten_instance(pkg.rdepend, atom_cls))
- for pkg in pkgs
- )
+ pkgs_rdepends = tuple(tuple(iflatten_instance(pkg.rdepend, atom_cls)) for pkg in pkgs)
if max(map(len, pkgs_rdepends)) == 1:
unversioned_rdepends = {
- deps[0].unversioned_atom
- for deps in pkgs_rdepends
- if len(deps) == 1
+ deps[0].unversioned_atom for deps in pkgs_rdepends if len(deps) == 1
}
if len(unversioned_rdepends) == 1 and not self.pkg_has_conditional_exception(pkgs):
yield VirtualWithSingleProvider(unversioned_rdepends.pop(), pkg=pkgs[0])
diff --git a/src/pkgcheck/checks/metadata_xml.py b/src/pkgcheck/checks/metadata_xml.py
index 2182585b..0fcc31ac 100644
--- a/src/pkgcheck/checks/metadata_xml.py
+++ b/src/pkgcheck/checks/metadata_xml.py
@@ -25,7 +25,7 @@ class _MissingXml(results.Error):
@property
def desc(self):
- return f'{self._attr} is missing {self.filename}'
+ return f"{self._attr} is missing {self.filename}"
class _BadlyFormedXml(results.Warning):
@@ -38,7 +38,7 @@ class _BadlyFormedXml(results.Warning):
@property
def desc(self):
- return f'{self._attr} {self.filename} is not well formed xml: {self.error}'
+ return f"{self._attr} {self.filename} is not well formed xml: {self.error}"
class _InvalidXml(results.Error):
@@ -51,7 +51,7 @@ class _InvalidXml(results.Error):
@property
def desc(self):
- return f'{self._attr} {self.filename} violates metadata.xsd:\n{self.message}'
+ return f"{self._attr} {self.filename} violates metadata.xsd:\n{self.message}"
class _MetadataXmlInvalidPkgRef(results.Error):
@@ -65,8 +65,8 @@ class _MetadataXmlInvalidPkgRef(results.Error):
@property
def desc(self):
return (
- f'{self._attr} {self.filename} <pkg/> '
- f'references unknown/invalid package: {self.pkgtext!r}'
+ f"{self._attr} {self.filename} <pkg/> "
+ f"references unknown/invalid package: {self.pkgtext!r}"
)
@@ -81,8 +81,8 @@ class _MetadataXmlInvalidCatRef(results.Error):
@property
def desc(self):
return (
- f'{self._attr} {self.filename} <cat/> references '
- f'unknown/invalid category: {self.cattext!r}'
+ f"{self._attr} {self.filename} <cat/> references "
+ f"unknown/invalid category: {self.cattext!r}"
)
@@ -97,8 +97,8 @@ class MaintainerNeeded(results.PackageResult, results.Warning):
@property
def desc(self):
if not self.needed:
- return f'{self.filename}: missing maintainer-needed comment'
- return f'{self.filename}: invalid maintainer-needed comment'
+ return f"{self.filename}: missing maintainer-needed comment"
+ return f"{self.filename}: invalid maintainer-needed comment"
class MaintainerWithoutProxy(results.PackageResult, results.Warning):
@@ -119,8 +119,8 @@ class MaintainerWithoutProxy(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.maintainers)
- maintainers = ', '.join(self.maintainers)
- return f'{self.filename}: proxied maintainer{s} missing proxy dev/project: {maintainers}'
+ maintainers = ", ".join(self.maintainers)
+ return f"{self.filename}: proxied maintainer{s} missing proxy dev/project: {maintainers}"
class ProxyWithoutProxied(results.PackageResult, results.Warning):
@@ -137,7 +137,7 @@ class ProxyWithoutProxied(results.PackageResult, results.Warning):
@property
def desc(self):
- return f'{self.filename}: proxy with no proxied maintainer'
+ return f"{self.filename}: proxy with no proxied maintainer"
class NonexistentProjectMaintainer(results.PackageResult, results.Warning):
@@ -151,8 +151,8 @@ class NonexistentProjectMaintainer(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.emails)
- emails = ', '.join(self.emails)
- return f'{self.filename}: nonexistent project maintainer{s}: {emails}'
+ emails = ", ".join(self.emails)
+ return f"{self.filename}: nonexistent project maintainer{s}: {emails}"
class WrongMaintainerType(results.PackageResult, results.Warning):
@@ -166,7 +166,7 @@ class WrongMaintainerType(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.emails)
- emails = ', '.join(self.emails)
+ emails = ", ".join(self.emails)
return f'{self.filename}: project maintainer{s} with type="person": {emails}'
@@ -222,7 +222,7 @@ class _MetadataXmlIndentation(results.BaseLinesResult, results.Style):
@property
def desc(self):
- return f'{self.filename}: metadata.xml has inconsistent indentation {self.lines_str}'
+ return f"{self.filename}: metadata.xml has inconsistent indentation {self.lines_str}"
class CatMetadataXmlIndentation(_MetadataXmlIndentation, results.CategoryResult):
@@ -250,7 +250,7 @@ class _MetadataXmlEmptyElement(results.Style):
@property
def desc(self):
- return f'{self.filename}: empty element {self.element!r} on line {self.line}'
+ return f"{self.filename}: empty element {self.element!r} on line {self.line}"
class CatMetadataXmlEmptyElement(_MetadataXmlEmptyElement, results.CategoryResult):
@@ -288,8 +288,10 @@ class InvalidRemoteID(results.PackageResult, results.Warning):
@property
def desc(self):
- return (f"remote-id value {self.id_value!r} invalid for "
- f"type={self.id_type!r}, expected: {self.expected!r}")
+ return (
+ f"remote-id value {self.id_value!r} invalid for "
+ f"type={self.id_type!r}, expected: {self.expected!r}"
+ )
class _XmlBaseCheck(Check):
@@ -306,13 +308,12 @@ class _XmlBaseCheck(Check):
self.repo_base = self.options.target_repo.location
self.pkgref_cache = {}
# content validation checks to run after parsing XML doc
- self._checks = tuple(
- getattr(self, x) for x in dir(self) if x.startswith('_check_'))
+ self._checks = tuple(getattr(self, x) for x in dir(self) if x.startswith("_check_"))
# Prefer xsd file from the target repository or its masters, falling
# back to the file installed with pkgcore.
for repo in reversed(self.options.target_repo.trees):
- metadata_xsd = pjoin(repo.location, 'metadata', 'xml-schema', 'metadata.xsd')
+ metadata_xsd = pjoin(repo.location, "metadata", "xml-schema", "metadata.xsd")
if os.path.isfile(metadata_xsd):
try:
self.schema = etree.XMLSchema(etree.parse(metadata_xsd))
@@ -321,7 +322,7 @@ class _XmlBaseCheck(Check):
# ignore invalid xsd files
pass
else:
- metadata_xsd = pjoin(pkgcore_const.DATA_PATH, 'xml-schema', 'metadata.xsd')
+ metadata_xsd = pjoin(pkgcore_const.DATA_PATH, "xml-schema", "metadata.xsd")
self.schema = etree.XMLSchema(etree.parse(metadata_xsd))
def _check_doc(self, pkg, loc, doc):
@@ -330,16 +331,19 @@ class _XmlBaseCheck(Check):
# 'stabilize-allarches' which is allowed to be empty and 'flag' which
# is caught by MissingLocalUseDesc.
for el in doc.getroot().iterdescendants():
- if (not el.getchildren() and (el.text is None or not el.text.strip())
- and el.tag not in ('flag', 'stabilize-allarches')):
+ if (
+ not el.getchildren()
+ and (el.text is None or not el.text.strip())
+ and el.tag not in ("flag", "stabilize-allarches")
+ ):
yield self.empty_element(os.path.basename(loc), el.tag, el.sourceline, pkg=pkg)
- for el in doc.findall('.//cat'):
+ for el in doc.findall(".//cat"):
c = el.text.strip()
if c not in self.options.search_repo.categories:
yield self.catref_error(os.path.basename(loc), c, pkg=pkg)
- for el in doc.findall('.//pkg'):
+ for el in doc.findall(".//pkg"):
p = el.text.strip()
if p not in self.pkgref_cache:
try:
@@ -358,7 +362,7 @@ class _XmlBaseCheck(Check):
indents = set()
with open(loc) as f:
for lineno, line in enumerate(f, 1):
- for i in line[:-len(line.lstrip())]:
+ for i in line[: -len(line.lstrip())]:
if i != orig_indent:
if orig_indent is None:
orig_indent = i
@@ -370,7 +374,7 @@ class _XmlBaseCheck(Check):
@staticmethod
def _format_lxml_errors(error_log):
for x in error_log:
- yield f'line {x.line}, col {x.column}: ({x.type_name}) {x.message}'
+ yield f"line {x.line}, col {x.column}: ({x.type_name}) {x.message}"
def _parse_xml(self, pkg, loc):
try:
@@ -387,7 +391,7 @@ class _XmlBaseCheck(Check):
# note: while doc is available, do not pass it here as it may
# trigger undefined behavior due to incorrect structure
if self.schema is not None and not self.schema.validate(doc):
- message = '\n'.join(self._format_lxml_errors(self.schema.error_log))
+ message = "\n".join(self._format_lxml_errors(self.schema.error_log))
yield self.invalid_error(os.path.basename(loc), message, pkg=pkg)
return
@@ -413,64 +417,74 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
indent_error = PkgMetadataXmlIndentation
empty_element = PkgMetadataXmlEmptyElement
- known_results = frozenset([
- PkgBadlyFormedXml, PkgInvalidXml, PkgMissingMetadataXml,
- PkgMetadataXmlInvalidPkgRef, PkgMetadataXmlInvalidCatRef,
- PkgMetadataXmlIndentation, PkgMetadataXmlEmptyElement, MaintainerNeeded,
- MaintainerWithoutProxy, ProxyWithoutProxied, RedundantLongDescription,
- NonexistentProjectMaintainer, WrongMaintainerType, InvalidRemoteID,
- ])
+ known_results = frozenset(
+ [
+ PkgBadlyFormedXml,
+ PkgInvalidXml,
+ PkgMissingMetadataXml,
+ PkgMetadataXmlInvalidPkgRef,
+ PkgMetadataXmlInvalidCatRef,
+ PkgMetadataXmlIndentation,
+ PkgMetadataXmlEmptyElement,
+ MaintainerNeeded,
+ MaintainerWithoutProxy,
+ ProxyWithoutProxied,
+ RedundantLongDescription,
+ NonexistentProjectMaintainer,
+ WrongMaintainerType,
+ InvalidRemoteID,
+ ]
+ )
- _one_component_validator_re = re.compile(r'^[^/]+$')
- _two_components_validator_re = re.compile(r'^[^/]+/[^/]+$')
- _gitlab_validator_re = re.compile(r'^([^/]+/)*[^/]+/[^/]+$')
+ _one_component_validator_re = re.compile(r"^[^/]+$")
+ _two_components_validator_re = re.compile(r"^[^/]+/[^/]+$")
+ _gitlab_validator_re = re.compile(r"^([^/]+/)*[^/]+/[^/]+$")
remote_id_validators = {
# {name}-style remotes
- 'cpan': (_one_component_validator_re, '{project}'),
- 'cpan-module': (_one_component_validator_re, '{module}'),
- 'cran': (_one_component_validator_re, '{project}'),
- 'ctan': (_one_component_validator_re, '{project}'),
- 'google-code': (_one_component_validator_re, '{project}'),
- 'osdn': (_one_component_validator_re, '{project}'),
- 'pear': (_one_component_validator_re, '{project}'),
- 'pecl': (_one_component_validator_re, '{project}'),
- 'pypi': (_one_component_validator_re, '{project}'),
- 'rubygems': (_one_component_validator_re, '{project}'),
- 'sourceforge': (_one_component_validator_re, '{project}'),
+ "cpan": (_one_component_validator_re, "{project}"),
+ "cpan-module": (_one_component_validator_re, "{module}"),
+ "cran": (_one_component_validator_re, "{project}"),
+ "ctan": (_one_component_validator_re, "{project}"),
+ "google-code": (_one_component_validator_re, "{project}"),
+ "osdn": (_one_component_validator_re, "{project}"),
+ "pear": (_one_component_validator_re, "{project}"),
+ "pecl": (_one_component_validator_re, "{project}"),
+ "pypi": (_one_component_validator_re, "{project}"),
+ "rubygems": (_one_component_validator_re, "{project}"),
+ "sourceforge": (_one_component_validator_re, "{project}"),
# {name} with a special check for lp: prefix
- 'launchpad': (re.compile(r'^(?!lp:)[^/]+$'), '{project}'),
+ "launchpad": (re.compile(r"^(?!lp:)[^/]+$"), "{project}"),
# {owner}/{name}-style remotes
- 'bitbucket': (_two_components_validator_re, '{username}/{project}'),
- 'github': (_two_components_validator_re, '{username}/{project}'),
+ "bitbucket": (_two_components_validator_re, "{username}/{project}"),
+ "github": (_two_components_validator_re, "{username}/{project}"),
# gitlab (2+ components)
- 'gitlab': (_gitlab_validator_re, '{username}/[{group}/...]{repo}'),
- 'heptapod': (_gitlab_validator_re, '{username}/[{group}/...]{repo}'),
+ "gitlab": (_gitlab_validator_re, "{username}/[{group}/...]{repo}"),
+ "heptapod": (_gitlab_validator_re, "{username}/[{group}/...]{repo}"),
# cpe
- 'cpe': (re.compile(r'^cpe:/[aho]:[^:]+:[^:]+$'),
- 'cpe:/[aho]:{vendor}:{product}'),
+ "cpe": (re.compile(r"^cpe:/[aho]:[^:]+:[^:]+$"), "cpe:/[aho]:{vendor}:{product}"),
# 1+ component + no ".git" suffix
- 'gentoo': (re.compile(r'^([^/]+/)*[^/]+(?<!\.git)$'),
- '[{group}/...]{repo}'),
+ "gentoo": (re.compile(r"^([^/]+/)*[^/]+(?<!\.git)$"), "[{group}/...]{repo}"),
# a positive decimal number
- 'vim': (re.compile(r'^[1-9]\d*$'), '{script_id}'),
+ "vim": (re.compile(r"^[1-9]\d*$"), "{script_id}"),
}
@staticmethod
def _maintainer_proxied_key(m):
if m.proxied is not None:
return m.proxied
- if m.email == 'proxy-maint@gentoo.org':
- return 'proxy'
- if m.email.endswith('@gentoo.org'):
- return 'no'
- return 'yes'
+ if m.email == "proxy-maint@gentoo.org":
+ return "proxy"
+ if m.email.endswith("@gentoo.org"):
+ return "no"
+ return "yes"
def _check_maintainers(self, pkg, loc, doc):
"""Validate maintainers in package metadata for the gentoo repo."""
if self.options.gentoo_repo:
maintainer_needed = any(
- c.text.strip() == 'maintainer-needed' for c in doc.xpath('//comment()'))
+ c.text.strip() == "maintainer-needed" for c in doc.xpath("//comment()")
+ )
if pkg.maintainers:
# check for invalid maintainer-needed comment
if maintainer_needed:
@@ -478,15 +492,14 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
# determine proxy maintainer status
proxied, devs, proxies = [], [], []
- proxy_map = {'yes': proxied, 'no': devs, 'proxy': proxies}
+ proxy_map = {"yes": proxied, "no": devs, "proxy": proxies}
for m in pkg.maintainers:
proxy_map[self._maintainer_proxied_key(m)].append(m)
# check proxy maintainers
if not devs and not proxies:
maintainers = sorted(map(str, pkg.maintainers))
- yield MaintainerWithoutProxy(
- os.path.basename(loc), maintainers, pkg=pkg)
+ yield MaintainerWithoutProxy(os.path.basename(loc), maintainers, pkg=pkg)
elif not proxied and proxies:
yield ProxyWithoutProxied(os.path.basename(loc), pkg=pkg)
elif not maintainer_needed:
@@ -498,25 +511,27 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
nonexistent = []
wrong_maintainers = []
for m in pkg.maintainers:
- if m.maint_type == 'project' and m.email not in projects:
+ if m.maint_type == "project" and m.email not in projects:
nonexistent.append(m.email)
- elif m.maint_type == 'person' and m.email in projects:
+ elif m.maint_type == "person" and m.email in projects:
wrong_maintainers.append(m.email)
if nonexistent:
yield NonexistentProjectMaintainer(
- os.path.basename(loc), sorted(nonexistent), pkg=pkg)
+ os.path.basename(loc), sorted(nonexistent), pkg=pkg
+ )
if wrong_maintainers:
yield WrongMaintainerType(
- os.path.basename(loc), sorted(wrong_maintainers), pkg=pkg)
+ os.path.basename(loc), sorted(wrong_maintainers), pkg=pkg
+ )
def _check_longdescription(self, pkg, loc, doc):
if pkg.longdescription is not None:
match_ratio = SequenceMatcher(None, pkg.description, pkg.longdescription).ratio()
if match_ratio > 0.75:
- msg = 'metadata.xml longdescription closely matches DESCRIPTION'
+ msg = "metadata.xml longdescription closely matches DESCRIPTION"
yield RedundantLongDescription(msg, pkg=pkg)
elif len(pkg.longdescription) < 100:
- msg = 'metadata.xml longdescription is too short'
+ msg = "metadata.xml longdescription is too short"
yield RedundantLongDescription(msg, pkg=pkg)
def _check_remote_id(self, pkg, loc, doc):
@@ -533,13 +548,13 @@ class PackageMetadataXmlCheck(_XmlBaseCheck):
def _get_xml_location(self, pkg):
"""Return the metadata.xml location for a given package."""
- return pjoin(os.path.dirname(pkg.ebuild.path), 'metadata.xml')
+ return pjoin(os.path.dirname(pkg.ebuild.path), "metadata.xml")
class CategoryMetadataXmlCheck(_XmlBaseCheck):
"""Category level metadata.xml scans."""
- _source = (sources.CategoryRepoSource, (), (('source', sources.RawRepoSource),))
+ _source = (sources.CategoryRepoSource, (), (("source", sources.RawRepoSource),))
misformed_error = CatBadlyFormedXml
invalid_error = CatInvalidXml
missing_error = CatMissingMetadataXml
@@ -548,15 +563,21 @@ class CategoryMetadataXmlCheck(_XmlBaseCheck):
indent_error = CatMetadataXmlIndentation
empty_element = CatMetadataXmlEmptyElement
- known_results = frozenset([
- CatBadlyFormedXml, CatInvalidXml, CatMissingMetadataXml,
- CatMetadataXmlInvalidPkgRef, CatMetadataXmlInvalidCatRef,
- CatMetadataXmlIndentation, CatMetadataXmlEmptyElement,
- ])
+ known_results = frozenset(
+ [
+ CatBadlyFormedXml,
+ CatInvalidXml,
+ CatMissingMetadataXml,
+ CatMetadataXmlInvalidPkgRef,
+ CatMetadataXmlInvalidCatRef,
+ CatMetadataXmlIndentation,
+ CatMetadataXmlEmptyElement,
+ ]
+ )
def _get_xml_location(self, pkg):
"""Return the metadata.xml location for a given package's category."""
- return pjoin(self.repo_base, pkg.category, 'metadata.xml')
+ return pjoin(self.repo_base, pkg.category, "metadata.xml")
class MissingRemoteId(results.PackageResult, results.Info):
@@ -577,8 +598,10 @@ class MissingRemoteId(results.PackageResult, results.Info):
@property
def desc(self):
- return (f'missing <remote-id type="{self.remote_type}">'
- f'{self.value}</remote-id> (inferred from URI {self.uri!r})')
+ return (
+ f'missing <remote-id type="{self.remote_type}">'
+ f"{self.value}</remote-id> (inferred from URI {self.uri!r})"
+ )
class MissingRemoteIdCheck(Check):
@@ -587,37 +610,47 @@ class MissingRemoteIdCheck(Check):
_source = sources.PackageRepoSource
known_results = frozenset([MissingRemoteId])
- _gitlab_match = r'(?P<value>(\w[^/]*/)*\w[^/]*/\w[^/]*)'
+ _gitlab_match = r"(?P<value>(\w[^/]*/)*\w[^/]*/\w[^/]*)"
remotes_map = (
- ('bitbucket', r'https://bitbucket.org/(?P<value>[^/]+/[^/]+)'),
- ('freedesktop-gitlab', rf'https://gitlab.freedesktop.org/{_gitlab_match}'),
- ('github', r'https://github.com/(?P<value>[^/]+/[^/]+)'),
- ('gitlab', rf'https://gitlab.com/{_gitlab_match}'),
- ('gnome-gitlab', rf'https://gitlab.gnome.org/{_gitlab_match}'),
- ('heptapod', rf'https://foss.heptapod.net/{_gitlab_match}'),
- ('launchpad', r'https://launchpad.net/(?P<value>[^/]+)'),
- ('pypi', r'https://pypi.org/project/(?P<value>[^/]+)'),
- ('pypi', r'https://files.pythonhosted.org/packages/source/\S/(?P<value>[^/]+)'),
- ('savannah', r'https://savannah.gnu.org/projects/(?P<value>[^/]+)'),
- ('savannah-nongnu', r'https://savannah.nongnu.org/projects/(?P<value>[^/]+)'),
- ('sourceforge', r'https://downloads.sourceforge.(net|io)/(?:project/)?(?P<value>[^/]+)'),
- ('sourceforge', r'https://sourceforge.(net|io)/projects/(?P<value>[^/]+)'),
- ('sourceforge', r'https://(?P<value>[^/]+).sourceforge.(net|io)/'),
- ('sourcehut', r'https://sr.ht/(?P<value>[^/]+/[^/]+)'),
+ ("bitbucket", r"https://bitbucket.org/(?P<value>[^/]+/[^/]+)"),
+ ("freedesktop-gitlab", rf"https://gitlab.freedesktop.org/{_gitlab_match}"),
+ ("github", r"https://github.com/(?P<value>[^/]+/[^/]+)"),
+ ("gitlab", rf"https://gitlab.com/{_gitlab_match}"),
+ ("gnome-gitlab", rf"https://gitlab.gnome.org/{_gitlab_match}"),
+ ("heptapod", rf"https://foss.heptapod.net/{_gitlab_match}"),
+ ("launchpad", r"https://launchpad.net/(?P<value>[^/]+)"),
+ ("pypi", r"https://pypi.org/project/(?P<value>[^/]+)"),
+ ("pypi", r"https://files.pythonhosted.org/packages/source/\S/(?P<value>[^/]+)"),
+ ("savannah", r"https://savannah.gnu.org/projects/(?P<value>[^/]+)"),
+ ("savannah-nongnu", r"https://savannah.nongnu.org/projects/(?P<value>[^/]+)"),
+ ("sourceforge", r"https://downloads.sourceforge.(net|io)/(?:project/)?(?P<value>[^/]+)"),
+ ("sourceforge", r"https://sourceforge.(net|io)/projects/(?P<value>[^/]+)"),
+ ("sourceforge", r"https://(?P<value>[^/]+).sourceforge.(net|io)/"),
+ ("sourcehut", r"https://sr.ht/(?P<value>[^/]+/[^/]+)"),
)
def __init__(self, options, **kwargs):
super().__init__(options, **kwargs)
- self.remotes_map = tuple((remote_type, re.compile(regex)) for remote_type, regex in self.remotes_map)
+ self.remotes_map = tuple(
+ (remote_type, re.compile(regex)) for remote_type, regex in self.remotes_map
+ )
def feed(self, pkgset):
remotes = {u.type: (None, None) for u in pkgset[0].upstreams}
for pkg in sorted(pkgset, reverse=True):
- fetchables = iflatten_instance(pkg.generate_fetchables(allow_missing_checksums=True,
- ignore_unknown_mirrors=True, skip_default_mirrors=True), (fetchable, Conditional))
- all_urls = set(chain.from_iterable(f.uri for f in fetchables if isinstance(f, fetchable)))
- urls = {url for url in all_urls if not url.endswith(('.patch', '.diff'))}
+ fetchables = iflatten_instance(
+ pkg.generate_fetchables(
+ allow_missing_checksums=True,
+ ignore_unknown_mirrors=True,
+ skip_default_mirrors=True,
+ ),
+ (fetchable, Conditional),
+ )
+ all_urls = set(
+ chain.from_iterable(f.uri for f in fetchables if isinstance(f, fetchable))
+ )
+ urls = {url for url in all_urls if not url.endswith((".patch", ".diff"))}
urls = sorted(urls.union(pkg.homepage), key=len)
for remote_type, regex in self.remotes_map:
@@ -625,7 +658,7 @@ class MissingRemoteIdCheck(Check):
continue
for url in urls:
if mo := regex.match(url):
- remotes[remote_type] = (mo.group('value'), url)
+ remotes[remote_type] = (mo.group("value"), url)
break
for remote_type, (value, url) in remotes.items():
diff --git a/src/pkgcheck/checks/network.py b/src/pkgcheck/checks/network.py
index cad8e536..f51796e2 100644
--- a/src/pkgcheck/checks/network.py
+++ b/src/pkgcheck/checks/network.py
@@ -24,8 +24,8 @@ class _UrlResult(results.VersionResult, results.Warning):
@property
def desc(self):
if self.url in self.message:
- return f'{self.attr}: {self.message}'
- return f'{self.attr}: {self.message}: {self.url}'
+ return f"{self.attr}: {self.message}"
+ return f"{self.attr}: {self.message}: {self.url}"
class DeadUrl(_UrlResult):
@@ -38,8 +38,8 @@ class SSLCertificateError(_UrlResult):
@property
def desc(self):
if self.url in self.message:
- return f'{self.attr}: SSL cert error: {self.message}'
- return f'{self.attr}: SSL cert error: {self.message}: {self.url}'
+ return f"{self.attr}: SSL cert error: {self.message}"
+ return f"{self.attr}: SSL cert error: {self.message}: {self.url}"
class _UpdatedUrlResult(results.VersionResult, results.Warning):
@@ -58,20 +58,20 @@ class _UpdatedUrlResult(results.VersionResult, results.Warning):
msg = [self.attr]
if self.message is not None:
msg.append(self.message)
- msg.append(f'{self.url} -> {self.new_url}')
- return ': '.join(msg)
+ msg.append(f"{self.url} -> {self.new_url}")
+ return ": ".join(msg)
class RedirectedUrl(_UpdatedUrlResult):
"""Package with a URL that permanently redirects to a different site."""
- message = 'permanently redirected'
+ message = "permanently redirected"
class HttpsUrlAvailable(_UpdatedUrlResult):
"""URL uses http:// when https:// is available."""
- message = 'HTTPS url available'
+ message = "HTTPS url available"
class _RequestException(Exception):
@@ -100,9 +100,14 @@ class _UrlCheck(NetworkCheck):
_source = sources.LatestVersionRepoSource
- known_results = frozenset([
- DeadUrl, RedirectedUrl, HttpsUrlAvailable, SSLCertificateError,
- ])
+ known_results = frozenset(
+ [
+ DeadUrl,
+ RedirectedUrl,
+ HttpsUrlAvailable,
+ SSLCertificateError,
+ ]
+ )
def _http_check(self, attr, url, *, pkg):
"""Verify http:// and https:// URLs."""
@@ -113,14 +118,14 @@ class _UrlCheck(NetworkCheck):
for response in r.history:
if not response.is_permanent_redirect:
break
- redirected_url = response.headers['location']
- hsts = 'strict-transport-security' in response.headers
+ redirected_url = response.headers["location"]
+ hsts = "strict-transport-security" in response.headers
if redirected_url:
- if redirected_url.startswith('https://') and url.startswith('http://'):
+ if redirected_url.startswith("https://") and url.startswith("http://"):
result = HttpsUrlAvailable(attr, url, redirected_url, pkg=pkg)
- elif redirected_url.startswith('http://') and hsts:
- redirected_url = f'https://{redirected_url[7:]}'
+ elif redirected_url.startswith("http://") and hsts:
+ redirected_url = f"https://{redirected_url[7:]}"
result = RedirectedUrl(attr, url, redirected_url, pkg=pkg)
else:
result = RedirectedUrl(attr, url, redirected_url, pkg=pkg)
@@ -139,16 +144,16 @@ class _UrlCheck(NetworkCheck):
for response in r.history:
if not response.is_permanent_redirect:
break
- redirected_url = response.headers['location']
- hsts = 'strict-transport-security' in response.headers
+ redirected_url = response.headers["location"]
+ hsts = "strict-transport-security" in response.headers
# skip result if http:// URL check was redirected to https://
if not isinstance(future.result(), HttpsUrlAvailable):
if redirected_url:
- if redirected_url.startswith('https://'):
+ if redirected_url.startswith("https://"):
result = HttpsUrlAvailable(attr, orig_url, redirected_url, pkg=pkg)
- elif redirected_url.startswith('http://') and hsts:
- redirected_url = f'https://{redirected_url[7:]}'
+ elif redirected_url.startswith("http://") and hsts:
+ redirected_url = f"https://{redirected_url[7:]}"
result = HttpsUrlAvailable(attr, orig_url, redirected_url, pkg=pkg)
else:
result = HttpsUrlAvailable(attr, orig_url, url, pkg=pkg)
@@ -182,7 +187,7 @@ class _UrlCheck(NetworkCheck):
if pkg is not None:
# recreate result object with different pkg target and attr
attrs = result._attrs.copy()
- attrs['attr'] = attr
+ attrs["attr"] = attr
result = result._create(**attrs, pkg=pkg)
self.results_q.put([result])
@@ -203,29 +208,36 @@ class _UrlCheck(NetworkCheck):
future.add_done_callback(partial(self.task_done, None, None))
futures[url] = future
else:
- future.add_done_callback(partial(self.task_done, kwargs['pkg'], attr))
+ future.add_done_callback(partial(self.task_done, kwargs["pkg"], attr))
def schedule(self, pkg, executor, futures):
"""Schedule verification methods to run in separate threads for all flagged URLs."""
http_urls = []
for attr, url in self._get_urls(pkg):
- if url.startswith('ftp://'):
- self._schedule_check(
- self._ftp_check, attr, url, executor, futures, pkg=pkg)
- elif url.startswith(('https://', 'http://')):
- self._schedule_check(
- self._http_check, attr, url, executor, futures, pkg=pkg)
+ if url.startswith("ftp://"):
+ self._schedule_check(self._ftp_check, attr, url, executor, futures, pkg=pkg)
+ elif url.startswith(("https://", "http://")):
+ self._schedule_check(self._http_check, attr, url, executor, futures, pkg=pkg)
http_urls.append((attr, url))
http_urls = tuple(http_urls)
http_to_https_urls = (
- (attr, url, f'https://{url[7:]}') for (attr, url) in http_urls
- if url.startswith('http://'))
+ (attr, url, f"https://{url[7:]}")
+ for (attr, url) in http_urls
+ if url.startswith("http://")
+ )
for attr, orig_url, url in http_to_https_urls:
future = futures[orig_url]
self._schedule_check(
- self._https_available_check, attr, url, executor, futures,
- future=future, orig_url=orig_url, pkg=pkg)
+ self._https_available_check,
+ attr,
+ url,
+ executor,
+ futures,
+ future=future,
+ orig_url=orig_url,
+ pkg=pkg,
+ )
class HomepageUrlCheck(_UrlCheck):
@@ -233,7 +245,7 @@ class HomepageUrlCheck(_UrlCheck):
def _get_urls(self, pkg):
for url in pkg.homepage:
- yield 'HOMEPAGE', url
+ yield "HOMEPAGE", url
class FetchablesUrlCheck(_UrlCheck):
@@ -243,18 +255,20 @@ class FetchablesUrlCheck(_UrlCheck):
def __init__(self, *args, use_addon, **kwargs):
super().__init__(*args, **kwargs)
- self.fetch_filter = use_addon.get_filter('fetchables')
+ self.fetch_filter = use_addon.get_filter("fetchables")
def _get_urls(self, pkg):
# ignore conditionals
fetchables, _ = self.fetch_filter(
- (fetchable,), pkg,
+ (fetchable,),
+ pkg,
pkg.generate_fetchables(
- allow_missing_checksums=True, ignore_unknown_mirrors=True,
- skip_default_mirrors=True))
+ allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
+ ),
+ )
for f in fetchables.keys():
for url in f.uri:
- yield 'SRC_URI', url
+ yield "SRC_URI", url
class MetadataUrlCheck(_UrlCheck):
@@ -264,31 +278,31 @@ class MetadataUrlCheck(_UrlCheck):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.protocols = ('http://', 'https://', 'ftp://')
+ self.protocols = ("http://", "https://", "ftp://")
self.remote_map = {
- 'bitbucket': 'https://bitbucket.org/{project}',
- 'cpan': 'https://metacpan.org/dist/{project}',
+ "bitbucket": "https://bitbucket.org/{project}",
+ "cpan": "https://metacpan.org/dist/{project}",
# some packages include a lot of modules, and scanning them
# DoS-es metacpan
# 'cpan-module': 'https://metacpan.org/pod/{project}',
- 'cran': 'https://cran.r-project.org/web/packages/{project}/',
- 'ctan': 'https://ctan.org/pkg/{project}',
- 'freedesktop-gitlab': 'https://gitlab.freedesktop.org/{project}.git/',
- 'gentoo': 'https://gitweb.gentoo.org/{project}.git/',
- 'github': 'https://github.com/{project}',
- 'gitlab': 'https://gitlab.com/{project}',
- 'gnome-gitlab': 'https://gitlab.gnome.org/{project}.git/',
- 'hackage': 'https://hackage.haskell.org/package/{project}',
- 'launchpad': 'https://launchpad.net/{project}',
- 'osdn': 'https://osdn.net/projects/{project}/',
- 'pecl': 'https://pecl.php.net/package/{project}',
- 'pypi': 'https://pypi.org/project/{project}/',
- 'rubygems': 'https://rubygems.org/gems/{project}',
- 'savannah': 'https://savannah.gnu.org/projects/{project}',
- 'savannah-nongnu': 'https://savannah.nongnu.org/projects/{project}',
- 'sourceforge': 'https://sourceforge.net/projects/{project}/',
- 'sourcehut': 'https://sr.ht/{project}/',
- 'vim': 'https://vim.org/scripts/script.php?script_id={project}',
+ "cran": "https://cran.r-project.org/web/packages/{project}/",
+ "ctan": "https://ctan.org/pkg/{project}",
+ "freedesktop-gitlab": "https://gitlab.freedesktop.org/{project}.git/",
+ "gentoo": "https://gitweb.gentoo.org/{project}.git/",
+ "github": "https://github.com/{project}",
+ "gitlab": "https://gitlab.com/{project}",
+ "gnome-gitlab": "https://gitlab.gnome.org/{project}.git/",
+ "hackage": "https://hackage.haskell.org/package/{project}",
+ "launchpad": "https://launchpad.net/{project}",
+ "osdn": "https://osdn.net/projects/{project}/",
+ "pecl": "https://pecl.php.net/package/{project}",
+ "pypi": "https://pypi.org/project/{project}/",
+ "rubygems": "https://rubygems.org/gems/{project}",
+ "savannah": "https://savannah.gnu.org/projects/{project}",
+ "savannah-nongnu": "https://savannah.nongnu.org/projects/{project}",
+ "sourceforge": "https://sourceforge.net/projects/{project}/",
+ "sourcehut": "https://sr.ht/{project}/",
+ "vim": "https://vim.org/scripts/script.php?script_id={project}",
# these platforms return 200 for errors, so no point in trying
# 'google-code': 'https://code.google.com/archive/p/{project}/',
# 'heptapod': 'https://foss.heptapod.net/{project}',
@@ -302,20 +316,20 @@ class MetadataUrlCheck(_UrlCheck):
return
# TODO: move upstream parsing to a pkgcore attribute?
- for element in ('changelog', 'doc', 'bugs-to', 'remote-id'):
- for x in tree.xpath(f'//upstream/{element}'):
+ for element in ("changelog", "doc", "bugs-to", "remote-id"):
+ for x in tree.xpath(f"//upstream/{element}"):
if x.text:
url = x.text
- if element == 'remote-id':
+ if element == "remote-id":
# Use remote-id -> URL map to determine actual URL,
# skipping verification for unmapped remote-ids.
try:
- url = self.remote_map[x.attrib['type']].format(project=url)
+ url = self.remote_map[x.attrib["type"]].format(project=url)
except KeyError:
continue
# skip unsupported protocols, e.g. mailto URLs from bugs-to
if url.startswith(self.protocols):
- yield f'metadata.xml: {element}', url
+ yield f"metadata.xml: {element}", url
def schedule(self, pkgs, *args, **kwargs):
super().schedule(pkgs[-1], *args, **kwargs)
diff --git a/src/pkgcheck/checks/overlays.py b/src/pkgcheck/checks/overlays.py
index 1268542a..cbe3d5b6 100644
--- a/src/pkgcheck/checks/overlays.py
+++ b/src/pkgcheck/checks/overlays.py
@@ -18,8 +18,8 @@ class UnusedInMastersLicenses(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.licenses)
- licenses = ', '.join(self.licenses)
- return f'unused license{s} in master repo(s): {licenses}'
+ licenses = ", ".join(self.licenses)
+ return f"unused license{s} in master repo(s): {licenses}"
class UnusedInMastersMirrors(results.VersionResult, results.Warning):
@@ -35,8 +35,8 @@ class UnusedInMastersMirrors(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.mirrors)
- mirrors = ', '.join(self.mirrors)
- return f'unused mirror{s} in master repo(s): {mirrors}'
+ mirrors = ", ".join(self.mirrors)
+ return f"unused mirror{s} in master repo(s): {mirrors}"
class UnusedInMastersEclasses(results.VersionResult, results.Warning):
@@ -51,9 +51,9 @@ class UnusedInMastersEclasses(results.VersionResult, results.Warning):
@property
def desc(self):
- es = pluralism(self.eclasses, plural='es')
- eclasses = ', '.join(self.eclasses)
- return f'unused eclass{es} in master repo(s): {eclasses}'
+ es = pluralism(self.eclasses, plural="es")
+ eclasses = ", ".join(self.eclasses)
+ return f"unused eclass{es} in master repo(s): {eclasses}"
class UnusedInMastersGlobalUse(results.VersionResult, results.Warning):
@@ -69,18 +69,22 @@ class UnusedInMastersGlobalUse(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'use.desc unused flag{s} in master repo(s): {flags}'
+ flags = ", ".join(self.flags)
+ return f"use.desc unused flag{s} in master repo(s): {flags}"
class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCheck):
"""Check for various metadata that may be removed from master repos."""
_source = sources.RepositoryRepoSource
- known_results = frozenset([
- UnusedInMastersLicenses, UnusedInMastersMirrors, UnusedInMastersEclasses,
- UnusedInMastersGlobalUse,
- ])
+ known_results = frozenset(
+ [
+ UnusedInMastersLicenses,
+ UnusedInMastersMirrors,
+ UnusedInMastersEclasses,
+ UnusedInMastersGlobalUse,
+ ]
+ )
def start(self):
self.unused_master_licenses = set()
@@ -93,8 +97,7 @@ class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCh
self.unused_master_licenses.update(repo.licenses)
self.unused_master_mirrors.update(repo.mirrors.keys())
self.unused_master_eclasses.update(repo.eclass_cache.eclasses.keys())
- self.unused_master_flags.update(
- flag for matcher, (flag, desc) in repo.config.use_desc)
+ self.unused_master_flags.update(flag for matcher, (flag, desc) in repo.config.use_desc)
# determine unused licenses/mirrors/eclasses/flags across all master repos
for repo in self.options.target_repo.masters:
@@ -103,7 +106,8 @@ class UnusedInMastersCheck(MirrorsCheck, OverlayRepoCheck, RepoCheck, OptionalCh
self.unused_master_mirrors.difference_update(self.get_mirrors(pkg))
self.unused_master_eclasses.difference_update(pkg.inherited)
self.unused_master_flags.difference_update(
- pkg.iuse_stripped.difference(pkg.local_use.keys()))
+ pkg.iuse_stripped.difference(pkg.local_use.keys())
+ )
def feed(self, pkg):
# report licenses used in the pkg but not in any pkg from the master repo(s)
diff --git a/src/pkgcheck/checks/perl.py b/src/pkgcheck/checks/perl.py
index 3f6c2dde..f2c3bd25 100644
--- a/src/pkgcheck/checks/perl.py
+++ b/src/pkgcheck/checks/perl.py
@@ -19,7 +19,7 @@ class MismatchedPerlVersion(results.VersionResult, results.Warning):
@property
def desc(self):
- return f'DIST_VERSION={self.dist_version} normalizes to {self.normalized}'
+ return f"DIST_VERSION={self.dist_version} normalizes to {self.normalized}"
class _PerlException(Exception):
@@ -36,25 +36,29 @@ class _PerlConnection:
# start perl client for normalizing perl module versions into package versions
try:
self.perl_client = subprocess.Popen(
- ['perl', pjoin(const.DATA_PATH, 'perl-version.pl')],
- text=True, bufsize=1,
- stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ ["perl", pjoin(const.DATA_PATH, "perl-version.pl")],
+ text=True,
+ bufsize=1,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
except FileNotFoundError:
- raise _PerlException('perl not installed on system')
+ raise _PerlException("perl not installed on system")
# check if the script is running
ready = self.perl_client.stdout.readline().strip()
- if ready != 'ready' or self.perl_client.poll():
- err_msg = 'failed to run perl script'
+ if ready != "ready" or self.perl_client.poll():
+ err_msg = "failed to run perl script"
if options.verbosity > 0:
stderr = self.perl_client.stderr.read().strip()
- err_msg += f': {stderr}'
+ err_msg += f": {stderr}"
raise _PerlException(err_msg)
def normalize(self, version):
"""Normalize a given version number to its perl equivalent."""
with self.process_lock:
- self.perl_client.stdin.write(version + '\n')
+ self.perl_client.stdin.write(version + "\n")
return self.perl_client.stdout.readline().strip()
def __del__(self):
@@ -66,14 +70,16 @@ class _PerlConnection:
class PerlCheck(OptionalCheck):
"""Perl ebuild related checks."""
- _restricted_source = (sources.RestrictionRepoSource, (
- packages.PackageRestriction('inherited', values.ContainmentMatch2('perl-module')),))
- _source = (sources.EbuildFileRepoSource, (), (('source', _restricted_source),))
+ _restricted_source = (
+ sources.RestrictionRepoSource,
+ (packages.PackageRestriction("inherited", values.ContainmentMatch2("perl-module")),),
+ )
+ _source = (sources.EbuildFileRepoSource, (), (("source", _restricted_source),))
known_results = frozenset([MismatchedPerlVersion])
def __init__(self, *args):
super().__init__(*args)
- self.dist_version_re = re.compile(r'DIST_VERSION=(?P<dist_version>\d+(\.\d+)*)\s*\n')
+ self.dist_version_re = re.compile(r"DIST_VERSION=(?P<dist_version>\d+(\.\d+)*)\s*\n")
# Initialize connection with perl script. This is done during
# __init__() since only one running version of the script is shared
# between however many scanning processes will be run. Also, it makes
@@ -84,8 +90,8 @@ class PerlCheck(OptionalCheck):
raise SkipCheck(self, str(e))
def feed(self, pkg):
- if mo := self.dist_version_re.search(''.join(pkg.lines)):
- dist_version = mo.group('dist_version')
+ if mo := self.dist_version_re.search("".join(pkg.lines)):
+ dist_version = mo.group("dist_version")
normalized = self.perl.normalize(dist_version)
if normalized != pkg.version:
yield MismatchedPerlVersion(dist_version, normalized, pkg=pkg)
diff --git a/src/pkgcheck/checks/pkgdir.py b/src/pkgcheck/checks/pkgdir.py
index cc82c7c8..ef90baac 100644
--- a/src/pkgcheck/checks/pkgdir.py
+++ b/src/pkgcheck/checks/pkgdir.py
@@ -14,9 +14,9 @@ from . import Check, GentooRepoCheck
# allowed filename characters: "a-zA-Z0-9._-+:"
allowed_filename_chars = set()
-allowed_filename_chars.update(chr(x) for x in range(ord('a'), ord('z') + 1))
-allowed_filename_chars.update(chr(x) for x in range(ord('A'), ord('Z') + 1))
-allowed_filename_chars.update(chr(x) for x in range(ord('0'), ord('9') + 1))
+allowed_filename_chars.update(chr(x) for x in range(ord("a"), ord("z") + 1))
+allowed_filename_chars.update(chr(x) for x in range(ord("A"), ord("Z") + 1))
+allowed_filename_chars.update(chr(x) for x in range(ord("0"), ord("9") + 1))
allowed_filename_chars.update([".", "-", "_", "+", ":"])
@@ -30,8 +30,8 @@ class MismatchedPN(results.PackageResult, results.Error):
@property
def desc(self):
s = pluralism(self.ebuilds)
- ebuilds = ', '.join(self.ebuilds)
- return f'mismatched package name{s}: [ {ebuilds} ]'
+ ebuilds = ", ".join(self.ebuilds)
+ return f"mismatched package name{s}: [ {ebuilds} ]"
class InvalidPN(results.PackageResult, results.Error):
@@ -44,8 +44,8 @@ class InvalidPN(results.PackageResult, results.Error):
@property
def desc(self):
s = pluralism(self.ebuilds)
- ebuilds = ', '.join(self.ebuilds)
- return f'invalid package name{s}: [ {ebuilds} ]'
+ ebuilds = ", ".join(self.ebuilds)
+ return f"invalid package name{s}: [ {ebuilds} ]"
class EqualVersions(results.PackageResult, results.Error):
@@ -74,8 +74,8 @@ class DuplicateFiles(results.PackageResult, results.Warning):
@property
def desc(self):
- files = ', '.join(map(repr, self.files))
- return f'duplicate identical files in FILESDIR: {files}'
+ files = ", ".join(map(repr, self.files))
+ return f"duplicate identical files in FILESDIR: {files}"
class EmptyFile(results.PackageResult, results.Warning):
@@ -87,7 +87,7 @@ class EmptyFile(results.PackageResult, results.Warning):
@property
def desc(self):
- return f'empty file in FILESDIR: {self.filename!r}'
+ return f"empty file in FILESDIR: {self.filename!r}"
class ExecutableFile(results.PackageResult, results.Warning):
@@ -99,7 +99,7 @@ class ExecutableFile(results.PackageResult, results.Warning):
@property
def desc(self):
- return f'unnecessary executable bit: {self.filename!r}'
+ return f"unnecessary executable bit: {self.filename!r}"
class UnknownPkgDirEntry(results.PackageResult, results.Warning):
@@ -115,9 +115,9 @@ class UnknownPkgDirEntry(results.PackageResult, results.Warning):
@property
def desc(self):
- files = ', '.join(map(repr, self.filenames))
- y = pluralism(self.filenames, singular='y', plural='ies')
- return f'unknown entr{y}: {files}'
+ files = ", ".join(map(repr, self.filenames))
+ y = pluralism(self.filenames, singular="y", plural="ies")
+ return f"unknown entr{y}: {files}"
class SizeViolation(results.PackageResult, results.Warning):
@@ -132,8 +132,10 @@ class SizeViolation(results.PackageResult, results.Warning):
@property
def desc(self):
- return (f'{self.filename!r} exceeds {sizeof_fmt(self.limit)} in size; '
- f'{sizeof_fmt(self.size)} total')
+ return (
+ f"{self.filename!r} exceeds {sizeof_fmt(self.limit)} in size; "
+ f"{sizeof_fmt(self.size)} total"
+ )
class TotalSizeViolation(results.PackageResult, results.Warning):
@@ -147,8 +149,10 @@ class TotalSizeViolation(results.PackageResult, results.Warning):
@property
def desc(self):
- return (f'files/ directory exceeds {sizeof_fmt(self.limit)} in size; '
- f'{sizeof_fmt(self.size)} total')
+ return (
+ f"files/ directory exceeds {sizeof_fmt(self.limit)} in size; "
+ f"{sizeof_fmt(self.size)} total"
+ )
class BannedCharacter(results.PackageResult, results.Error):
@@ -167,8 +171,8 @@ class BannedCharacter(results.PackageResult, results.Error):
@property
def desc(self):
s = pluralism(self.chars)
- chars = ', '.join(map(repr, self.chars))
- return f'filename {self.filename!r} character{s} outside allowed set: {chars}'
+ chars = ", ".join(map(repr, self.chars))
+ return f"filename {self.filename!r} character{s} outside allowed set: {chars}"
class InvalidUTF8(results.PackageResult, results.Error):
@@ -187,17 +191,27 @@ class InvalidUTF8(results.PackageResult, results.Error):
class PkgDirCheck(Check):
"""Scan ebuild directory for various file-related issues."""
- _source = (sources.PackageRepoSource, (), (('source', sources.RawRepoSource),))
+ _source = (sources.PackageRepoSource, (), (("source", sources.RawRepoSource),))
ignore_dirs = frozenset(["cvs", ".svn", ".bzr"])
required_addons = (addons.git.GitAddon,)
- known_results = frozenset([
- DuplicateFiles, EmptyFile, ExecutableFile, UnknownPkgDirEntry, SizeViolation,
- BannedCharacter, InvalidUTF8, MismatchedPN, InvalidPN, TotalSizeViolation,
- ])
+ known_results = frozenset(
+ [
+ DuplicateFiles,
+ EmptyFile,
+ ExecutableFile,
+ UnknownPkgDirEntry,
+ SizeViolation,
+ BannedCharacter,
+ InvalidUTF8,
+ MismatchedPN,
+ InvalidPN,
+ TotalSizeViolation,
+ ]
+ )
# TODO: put some 'preferred algorithms by purpose' into snakeoil?
- digest_algo = 'sha256'
+ digest_algo = "sha256"
def __init__(self, *args, git_addon):
super().__init__(*args)
@@ -206,7 +220,7 @@ class PkgDirCheck(Check):
def feed(self, pkgset):
pkg = pkgset[0]
pkg_path = pjoin(self.options.target_repo.location, pkg.category, pkg.package)
- ebuild_ext = '.ebuild'
+ ebuild_ext = ".ebuild"
mismatched = []
invalid = []
unknown = []
@@ -228,20 +242,19 @@ class PkgDirCheck(Check):
if filename.endswith(ebuild_ext):
try:
- with open(path, mode='rb') as f:
+ with open(path, mode="rb") as f:
f.read(8192).decode()
except UnicodeDecodeError as e:
yield InvalidUTF8(filename, str(e), pkg=pkg)
- pkg_name = os.path.basename(filename[:-len(ebuild_ext)])
+ pkg_name = os.path.basename(filename[: -len(ebuild_ext)])
try:
- pkg_atom = atom_cls(f'={pkg.category}/{pkg_name}')
+ pkg_atom = atom_cls(f"={pkg.category}/{pkg_name}")
if pkg_atom.package != os.path.basename(pkg_path):
mismatched.append(pkg_name)
except MalformedAtom:
invalid.append(pkg_name)
- elif (self.options.gentoo_repo and
- filename not in ('Manifest', 'metadata.xml', 'files')):
+ elif self.options.gentoo_repo and filename not in ("Manifest", "metadata.xml", "files"):
unknown.append(filename)
if mismatched:
@@ -254,7 +267,7 @@ class PkgDirCheck(Check):
files_by_size = defaultdict(list)
pkg_path_len = len(pkg_path) + 1
total_size = 0
- for root, dirs, files in os.walk(pjoin(pkg_path, 'files')):
+ for root, dirs, files in os.walk(pjoin(pkg_path, "files")):
# don't visit any ignored directories
for d in self.ignore_dirs.intersection(dirs):
dirs.remove(d)
@@ -274,10 +287,12 @@ class PkgDirCheck(Check):
total_size += file_stat.st_size
if file_stat.st_size > SizeViolation.limit:
yield SizeViolation(
- pjoin(base_dir, filename), file_stat.st_size, pkg=pkg)
+ pjoin(base_dir, filename), file_stat.st_size, pkg=pkg
+ )
if banned_chars := set(filename) - allowed_filename_chars:
yield BannedCharacter(
- pjoin(base_dir, filename), sorted(banned_chars), pkg=pkg)
+ pjoin(base_dir, filename), sorted(banned_chars), pkg=pkg
+ )
if total_size > TotalSizeViolation.limit:
yield TotalSizeViolation(total_size, pkg=pkg)
@@ -324,9 +339,9 @@ class LiveOnlyPackage(results.PackageResult, results.Warning):
@property
def desc(self):
if self.age < 365:
- return f'all versions are VCS-based added over {self.age} days ago'
+ return f"all versions are VCS-based added over {self.age} days ago"
years = round(self.age / 365, 2)
- return f'all versions are VCS-based added over {years} years ago'
+ return f"all versions are VCS-based added over {years} years ago"
class LiveOnlyCheck(GentooRepoCheck):
diff --git a/src/pkgcheck/checks/profiles.py b/src/pkgcheck/checks/profiles.py
index db49cf38..8673ed7d 100644
--- a/src/pkgcheck/checks/profiles.py
+++ b/src/pkgcheck/checks/profiles.py
@@ -25,7 +25,7 @@ class UnknownProfilePackage(results.ProfilesResult, results.Warning):
@property
def desc(self):
- return f'{self.path!r}: unknown package: {self.atom!r}'
+ return f"{self.path!r}: unknown package: {self.atom!r}"
class UnmatchedProfilePackageUnmask(results.ProfilesResult, results.Warning):
@@ -42,7 +42,7 @@ class UnmatchedProfilePackageUnmask(results.ProfilesResult, results.Warning):
@property
def desc(self):
- return f'{self.path!r}: unmask of not masked package: {self.atom!r}'
+ return f"{self.path!r}: unmask of not masked package: {self.atom!r}"
class UnknownProfilePackageUse(results.ProfilesResult, results.Warning):
@@ -57,9 +57,9 @@ class UnknownProfilePackageUse(results.ProfilesResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- atom = f'{self.atom}[{flags}]'
- return f'{self.path!r}: unknown package USE flag{s}: {atom!r}'
+ flags = ", ".join(self.flags)
+ atom = f"{self.atom}[{flags}]"
+ return f"{self.path!r}: unknown package USE flag{s}: {atom!r}"
class UnknownProfileUse(results.ProfilesResult, results.Warning):
@@ -73,8 +73,8 @@ class UnknownProfileUse(results.ProfilesResult, results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(map(repr, self.flags))
- return f'{self.path!r}: unknown USE flag{s}: {flags}'
+ flags = ", ".join(map(repr, self.flags))
+ return f"{self.path!r}: unknown USE flag{s}: {flags}"
class UnknownProfilePackageKeywords(results.ProfilesResult, results.Warning):
@@ -89,8 +89,8 @@ class UnknownProfilePackageKeywords(results.ProfilesResult, results.Warning):
@property
def desc(self):
s = pluralism(self.keywords)
- keywords = ', '.join(map(repr, self.keywords))
- return f'{self.path!r}: unknown package keyword{s}: {self.atom}: {keywords}'
+ keywords = ", ".join(map(repr, self.keywords))
+ return f"{self.path!r}: unknown package keyword{s}: {self.atom}: {keywords}"
class UnknownProfileUseExpand(results.ProfilesResult, results.Warning):
@@ -104,8 +104,8 @@ class UnknownProfileUseExpand(results.ProfilesResult, results.Warning):
@property
def desc(self):
s = pluralism(self.groups)
- groups = ', '.join(self.groups)
- return f'{self.path!r}: unknown USE_EXPAND group{s}: {groups}'
+ groups = ", ".join(self.groups)
+ return f"{self.path!r}: unknown USE_EXPAND group{s}: {groups}"
class ProfileWarning(results.ProfilesResult, results.LogWarning):
@@ -118,8 +118,8 @@ class ProfileError(results.ProfilesResult, results.LogError):
# mapping of profile log levels to result classes
_logmap = (
- base.LogMap('pkgcore.log.logger.warning', ProfileWarning),
- base.LogMap('pkgcore.log.logger.error', ProfileError),
+ base.LogMap("pkgcore.log.logger.warning", ProfileWarning),
+ base.LogMap("pkgcore.log.logger.error", ProfileError),
)
@@ -145,12 +145,18 @@ class ProfilesCheck(Check):
_source = sources.ProfilesRepoSource
required_addons = (addons.UseAddon, addons.KeywordsAddon)
- known_results = frozenset([
- UnknownProfilePackage, UnmatchedProfilePackageUnmask,
- UnknownProfilePackageUse, UnknownProfileUse,
- UnknownProfilePackageKeywords, UnknownProfileUseExpand,
- ProfileWarning, ProfileError,
- ])
+ known_results = frozenset(
+ [
+ UnknownProfilePackage,
+ UnmatchedProfilePackageUnmask,
+ UnknownProfilePackageUse,
+ UnknownProfileUse,
+ UnknownProfilePackageKeywords,
+ UnknownProfileUseExpand,
+ ProfileWarning,
+ ProfileError,
+ ]
+ )
# mapping between known files and verification methods
known_files = {}
@@ -165,16 +171,18 @@ class ProfilesCheck(Check):
local_iuse = {use for _pkg, (use, _desc) in repo.config.use_local_desc}
self.available_iuse = frozenset(
- local_iuse | use_addon.global_iuse |
- use_addon.global_iuse_expand | use_addon.global_iuse_implicit)
+ local_iuse
+ | use_addon.global_iuse
+ | use_addon.global_iuse_expand
+ | use_addon.global_iuse_implicit
+ )
- @verify_files(('parent', 'parents'),
- ('eapi', 'eapi'))
+ @verify_files(("parent", "parents"), ("eapi", "eapi"))
def _pull_attr(self, *args):
"""Verification only needs to pull the profile attr."""
yield from ()
- @verify_files(('deprecated', 'deprecated'))
+ @verify_files(("deprecated", "deprecated"))
def _deprecated(self, filename, node, vals):
# make sure replacement profile exists
if vals is not None:
@@ -183,47 +191,51 @@ class ProfilesCheck(Check):
addons.profiles.ProfileNode(pjoin(self.profiles_dir, replacement))
except profiles_mod.ProfileError:
yield ProfileError(
- f'nonexistent replacement {replacement!r} '
- f'for deprecated profile: {node.name!r}')
+ f"nonexistent replacement {replacement!r} "
+ f"for deprecated profile: {node.name!r}"
+ )
# non-spec files
- @verify_files(('package.keywords', 'keywords'),
- ('package.accept_keywords', 'accept_keywords'))
+ @verify_files(("package.keywords", "keywords"), ("package.accept_keywords", "accept_keywords"))
def _pkg_keywords(self, filename, node, vals):
for atom, keywords in vals:
if invalid := sorted(set(keywords) - self.keywords.valid):
- yield UnknownProfilePackageKeywords(
- pjoin(node.name, filename), atom, invalid)
-
- @verify_files(('use.force', 'use_force'),
- ('use.stable.force', 'use_stable_force'),
- ('use.mask', 'use_mask'),
- ('use.stable.mask', 'use_stable_mask'))
+ yield UnknownProfilePackageKeywords(pjoin(node.name, filename), atom, invalid)
+
+ @verify_files(
+ ("use.force", "use_force"),
+ ("use.stable.force", "use_stable_force"),
+ ("use.mask", "use_mask"),
+ ("use.stable.mask", "use_stable_mask"),
+ )
def _use(self, filename, node, vals):
# TODO: give ChunkedDataDict some dict view methods
d = vals.render_to_dict()
for _, entries in d.items():
for _, disabled, enabled in entries:
if unknown_disabled := set(disabled) - self.available_iuse:
- flags = ('-' + u for u in unknown_disabled)
- yield UnknownProfileUse(
- pjoin(node.name, filename), flags)
+ flags = ("-" + u for u in unknown_disabled)
+ yield UnknownProfileUse(pjoin(node.name, filename), flags)
if unknown_enabled := set(enabled) - self.available_iuse:
- yield UnknownProfileUse(
- pjoin(node.name, filename), unknown_enabled)
+ yield UnknownProfileUse(pjoin(node.name, filename), unknown_enabled)
- @verify_files(('packages', 'packages'),
- ('package.unmask', 'unmasks'),
- ('package.deprecated', 'pkg_deprecated'))
+ @verify_files(
+ ("packages", "packages"),
+ ("package.unmask", "unmasks"),
+ ("package.deprecated", "pkg_deprecated"),
+ )
def _pkg_atoms(self, filename, node, vals):
for x in iflatten_instance(vals, atom_cls):
if not self.search_repo.match(x):
yield UnknownProfilePackage(pjoin(node.name, filename), x)
- @verify_files(('package.mask', 'masks'),)
+ @verify_files(
+ ("package.mask", "masks"),
+ )
def _pkg_masks(self, filename, node, vals):
- all_masked = set().union(*(masked[1]
- for p in profiles_mod.ProfileStack(node.path).stack if (masked := p.masks)))
+ all_masked = set().union(
+ *(masked[1] for p in profiles_mod.ProfileStack(node.path).stack if (masked := p.masks))
+ )
unmasked, masked = vals
for x in masked:
@@ -235,11 +247,13 @@ class ProfilesCheck(Check):
elif x not in all_masked:
yield UnmatchedProfilePackageUnmask(pjoin(node.name, filename), x)
- @verify_files(('package.use', 'pkg_use'),
- ('package.use.force', 'pkg_use_force'),
- ('package.use.stable.force', 'pkg_use_stable_force'),
- ('package.use.mask', 'pkg_use_mask'),
- ('package.use.stable.mask', 'pkg_use_stable_mask'))
+ @verify_files(
+ ("package.use", "pkg_use"),
+ ("package.use.force", "pkg_use_force"),
+ ("package.use.stable.force", "pkg_use_stable_force"),
+ ("package.use.mask", "pkg_use_mask"),
+ ("package.use.stable.mask", "pkg_use_stable_mask"),
+ )
def _pkg_use(self, filename, node, vals):
# TODO: give ChunkedDataDict some dict view methods
d = vals
@@ -251,19 +265,18 @@ class ProfilesCheck(Check):
if pkgs := self.search_repo.match(a):
available = {u for pkg in pkgs for u in pkg.iuse_stripped}
if unknown_disabled := set(disabled) - available:
- flags = ('-' + u for u in unknown_disabled)
- yield UnknownProfilePackageUse(
- pjoin(node.name, filename), a, flags)
+ flags = ("-" + u for u in unknown_disabled)
+ yield UnknownProfilePackageUse(pjoin(node.name, filename), a, flags)
if unknown_enabled := set(enabled) - available:
yield UnknownProfilePackageUse(
- pjoin(node.name, filename), a, unknown_enabled)
+ pjoin(node.name, filename), a, unknown_enabled
+ )
else:
- yield UnknownProfilePackage(
- pjoin(node.name, filename), a)
+ yield UnknownProfilePackage(pjoin(node.name, filename), a)
- @verify_files(('make.defaults', 'make_defaults'))
+ @verify_files(("make.defaults", "make_defaults"))
def _make_defaults(self, filename, node, vals):
- if defined := set(vals.get('USE_EXPAND', '').split()):
+ if defined := set(vals.get("USE_EXPAND", "").split()):
if unknown := defined - self.use_expand_groups:
yield UnknownProfileUseExpand(pjoin(node.name, filename), sorted(unknown))
@@ -286,8 +299,8 @@ class UnusedProfileDirs(results.ProfilesResult, results.Warning):
@property
def desc(self):
s = pluralism(self.dirs)
- dirs = ', '.join(map(repr, self.dirs))
- return f'unused profile dir{s}: {dirs}'
+ dirs = ", ".join(map(repr, self.dirs))
+ return f"unused profile dir{s}: {dirs}"
class ArchesWithoutProfiles(results.ProfilesResult, results.Warning):
@@ -299,9 +312,9 @@ class ArchesWithoutProfiles(results.ProfilesResult, results.Warning):
@property
def desc(self):
- es = pluralism(self.arches, plural='es')
- arches = ', '.join(self.arches)
- return f'arch{es} without profiles: {arches}'
+ es = pluralism(self.arches, plural="es")
+ arches = ", ".join(self.arches)
+ return f"arch{es} without profiles: {arches}"
class NonexistentProfilePath(results.ProfilesResult, results.Error):
@@ -313,7 +326,7 @@ class NonexistentProfilePath(results.ProfilesResult, results.Error):
@property
def desc(self):
- return f'nonexistent profile path: {self.path!r}'
+ return f"nonexistent profile path: {self.path!r}"
class LaggingProfileEapi(results.ProfilesResult, results.Warning):
@@ -329,8 +342,8 @@ class LaggingProfileEapi(results.ProfilesResult, results.Warning):
@property
def desc(self):
return (
- f'{self.profile!r} profile has EAPI {self.eapi}, '
- f'{self.parent!r} parent has EAPI {self.parent_eapi}'
+ f"{self.profile!r} profile has EAPI {self.eapi}, "
+ f"{self.parent!r} parent has EAPI {self.parent_eapi}"
)
@@ -352,13 +365,13 @@ class _ProfileEapiResult(results.ProfilesResult):
class BannedProfileEapi(_ProfileEapiResult, results.Error):
"""Profile has an EAPI that is banned in the repository."""
- _type = 'banned'
+ _type = "banned"
class DeprecatedProfileEapi(_ProfileEapiResult, results.Warning):
"""Profile has an EAPI that is deprecated in the repository."""
- _type = 'deprecated'
+ _type = "deprecated"
class UnknownCategoryDirs(results.ProfilesResult, results.Warning):
@@ -373,9 +386,9 @@ class UnknownCategoryDirs(results.ProfilesResult, results.Warning):
@property
def desc(self):
- dirs = ', '.join(self.dirs)
+ dirs = ", ".join(self.dirs)
s = pluralism(self.dirs)
- return f'unknown category dir{s}: {dirs}'
+ return f"unknown category dir{s}: {dirs}"
class NonexistentCategories(results.ProfilesResult, results.Warning):
@@ -387,9 +400,9 @@ class NonexistentCategories(results.ProfilesResult, results.Warning):
@property
def desc(self):
- categories = ', '.join(self.categories)
- ies = pluralism(self.categories, singular='y', plural='ies')
- return f'nonexistent profiles/categories entr{ies}: {categories}'
+ categories = ", ".join(self.categories)
+ ies = pluralism(self.categories, singular="y", plural="ies")
+ return f"nonexistent profiles/categories entr{ies}: {categories}"
def dir_parents(path):
@@ -399,11 +412,11 @@ def dir_parents(path):
>>> list(dir_parents('/root/foo/bar/baz'))
['root/foo/bar', 'root/foo', 'root']
"""
- path = os.path.normpath(path.strip('/'))
+ path = os.path.normpath(path.strip("/"))
while path:
yield path
dirname, _basename = os.path.split(path)
- path = dirname.rstrip('/')
+ path = dirname.rstrip("/")
class RepoProfilesCheck(RepoCheck):
@@ -415,14 +428,23 @@ class RepoProfilesCheck(RepoCheck):
_source = (sources.EmptySource, (base.profiles_scope,))
required_addons = (addons.profiles.ProfileAddon,)
- known_results = frozenset([
- ArchesWithoutProfiles, UnusedProfileDirs, NonexistentProfilePath,
- UnknownCategoryDirs, NonexistentCategories, LaggingProfileEapi,
- ProfileError, ProfileWarning, BannedProfileEapi, DeprecatedProfileEapi,
- ])
+ known_results = frozenset(
+ [
+ ArchesWithoutProfiles,
+ UnusedProfileDirs,
+ NonexistentProfilePath,
+ UnknownCategoryDirs,
+ NonexistentCategories,
+ LaggingProfileEapi,
+ ProfileError,
+ ProfileWarning,
+ BannedProfileEapi,
+ DeprecatedProfileEapi,
+ ]
+ )
# known profile status types for the gentoo repo
- known_profile_statuses = frozenset(['stable', 'dev', 'exp'])
+ known_profile_statuses = frozenset(["stable", "dev", "exp"])
def __init__(self, *args, profile_addon):
super().__init__(*args)
@@ -433,17 +455,21 @@ class RepoProfilesCheck(RepoCheck):
def finish(self):
if self.options.gentoo_repo:
- if unknown_category_dirs := set(self.repo.category_dirs).difference(self.repo.categories):
+ if unknown_category_dirs := set(self.repo.category_dirs).difference(
+ self.repo.categories
+ ):
yield UnknownCategoryDirs(sorted(unknown_category_dirs))
- if nonexistent_categories := set(self.repo.config.categories).difference(self.repo.category_dirs):
+ if nonexistent_categories := set(self.repo.config.categories).difference(
+ self.repo.category_dirs
+ ):
yield NonexistentCategories(sorted(nonexistent_categories))
if arches_without_profiles := set(self.arches) - set(self.repo.profiles.arches()):
yield ArchesWithoutProfiles(sorted(arches_without_profiles))
- root_profile_dirs = {'embedded'}
+ root_profile_dirs = {"embedded"}
available_profile_dirs = set()
for root, _dirs, _files in os.walk(self.profiles_dir):
- if d := root[len(self.profiles_dir):].lstrip('/'):
+ if d := root[len(self.profiles_dir) :].lstrip("/"):
available_profile_dirs.add(d)
available_profile_dirs -= self.non_profile_dirs | root_profile_dirs
@@ -456,8 +482,11 @@ class RepoProfilesCheck(RepoCheck):
# forcibly parse profiles.desc and convert log warnings/errors into reports
with base.LogReports(*_logmap) as log_reports:
profiles = Profiles.parse(
- self.profiles_dir, self.repo.repo_id,
- known_status=known_profile_statuses, known_arch=self.arches)
+ self.profiles_dir,
+ self.repo.repo_id,
+ known_status=known_profile_statuses,
+ known_arch=self.arches,
+ )
yield from log_reports
banned_eapis = self.repo.config.profile_eapis_banned
@@ -484,8 +513,7 @@ class RepoProfilesCheck(RepoCheck):
for profile, parents in lagging_profile_eapi.items():
parent = parents[-1]
- yield LaggingProfileEapi(
- profile.name, str(profile.eapi), parent.name, str(parent.eapi))
+ yield LaggingProfileEapi(profile.name, str(profile.eapi), parent.name, str(parent.eapi))
for profile in banned_profile_eapi:
yield BannedProfileEapi(profile.name, profile.eapi)
for profile in deprecated_profile_eapi:
diff --git a/src/pkgcheck/checks/python.py b/src/pkgcheck/checks/python.py
index d8f7eb0b..510689bb 100644
--- a/src/pkgcheck/checks/python.py
+++ b/src/pkgcheck/checks/python.py
@@ -15,14 +15,14 @@ from .. import addons, bash, results, sources
from . import Check
# NB: distutils-r1 inherits one of the first two
-ECLASSES = frozenset(['python-r1', 'python-single-r1', 'python-any-r1'])
+ECLASSES = frozenset(["python-r1", "python-single-r1", "python-any-r1"])
-IUSE_PREFIX = 'python_targets_'
-IUSE_PREFIX_S = 'python_single_target_'
+IUSE_PREFIX = "python_targets_"
+IUSE_PREFIX_S = "python_single_target_"
-GITHUB_ARCHIVE_RE = re.compile(r'^https://github\.com/[^/]+/[^/]+/archive/')
-SNAPSHOT_RE = re.compile(r'[a-fA-F0-9]{40}\.tar\.gz$')
-USE_FLAGS_PYTHON_USEDEP = re.compile(r'\[(.+,)?\$\{PYTHON_USEDEP\}(,.+)?\]$')
+GITHUB_ARCHIVE_RE = re.compile(r"^https://github\.com/[^/]+/[^/]+/archive/")
+SNAPSHOT_RE = re.compile(r"[a-fA-F0-9]{40}\.tar\.gz$")
+USE_FLAGS_PYTHON_USEDEP = re.compile(r"\[(.+,)?\$\{PYTHON_USEDEP\}(,.+)?\]$")
def get_python_eclass(pkg):
@@ -30,8 +30,7 @@ def get_python_eclass(pkg):
# All three eclasses block one another, but check and throw an error
# just in case it isn't caught when sourcing the ebuild.
if len(eclasses) > 1:
- raise ValueError(
- f"python eclasses are mutually exclusive: [ {', '.join(eclasses)} ]")
+ raise ValueError(f"python eclasses are mutually exclusive: [ {', '.join(eclasses)} ]")
return next(iter(eclasses)) if eclasses else None
@@ -139,10 +138,7 @@ class DistutilsNonPEP517Build(results.VersionResult, results.Warning):
@property
def desc(self):
- return (
- "uses deprecated non-PEP517 build mode, please switch to "
- "DISTUTILS_USE_PEP517=..."
- )
+ return "uses deprecated non-PEP517 build mode, please switch to " "DISTUTILS_USE_PEP517=..."
class PythonHasVersionUsage(results.LinesResult, results.Style):
@@ -158,7 +154,7 @@ class PythonHasVersionUsage(results.LinesResult, results.Style):
@property
def desc(self):
- return f'usage of has_version {self.lines_str}, replace with python_has_version'
+ return f"usage of has_version {self.lines_str}, replace with python_has_version"
class PythonHasVersionMissingPythonUseDep(results.LineResult, results.Error):
@@ -174,7 +170,9 @@ class PythonHasVersionMissingPythonUseDep(results.LineResult, results.Error):
@property
def desc(self):
- return f'line: {self.lineno}: missing [${{PYTHON_USEDEP}}] suffix for argument "{self.line}"'
+ return (
+ f'line: {self.lineno}: missing [${{PYTHON_USEDEP}}] suffix for argument "{self.line}"'
+ )
class PythonAnyMismatchedUseHasVersionCheck(results.VersionResult, results.Warning):
@@ -198,8 +196,8 @@ class PythonAnyMismatchedUseHasVersionCheck(results.VersionResult, results.Warni
@property
def desc(self):
s = pluralism(self.use_flags)
- use_flags = ', '.join(map(str, self.use_flags))
- return f'{self.dep_category}: mismatch for {self.dep_atom} check use flag{s} [{use_flags}] in {self.location}'
+ use_flags = ", ".join(map(str, self.use_flags))
+ return f"{self.dep_category}: mismatch for {self.dep_atom} check use flag{s} [{use_flags}] in {self.location}"
class PythonAnyMismatchedDepHasVersionCheck(results.VersionResult, results.Warning):
@@ -222,8 +220,9 @@ class PythonAnyMismatchedDepHasVersionCheck(results.VersionResult, results.Warni
@property
def desc(self):
- use_flags = ', '.join(map(str, self.use_flags))
- return f'{self.dep_category}: missing check for {self.dep_atom}[{use_flags}] in {self.location!r}'
+ use_flags = ", ".join(map(str, self.use_flags))
+ return f"{self.dep_category}: missing check for {self.dep_atom}[{use_flags}] in {self.location!r}"
+
class PythonCheck(Check):
"""Python eclass checks.
@@ -233,32 +232,37 @@ class PythonCheck(Check):
"""
_source = sources.EbuildParseRepoSource
- known_results = frozenset([
- MissingPythonEclass, PythonMissingRequiredUse,
- PythonMissingDeps, PythonRuntimeDepInAnyR1, PythonEclassError,
- DistutilsNonPEP517Build,
- PythonHasVersionUsage,
- PythonHasVersionMissingPythonUseDep,
- PythonAnyMismatchedUseHasVersionCheck,
- PythonAnyMismatchedDepHasVersionCheck,
- ])
+ known_results = frozenset(
+ [
+ MissingPythonEclass,
+ PythonMissingRequiredUse,
+ PythonMissingDeps,
+ PythonRuntimeDepInAnyR1,
+ PythonEclassError,
+ DistutilsNonPEP517Build,
+ PythonHasVersionUsage,
+ PythonHasVersionMissingPythonUseDep,
+ PythonAnyMismatchedUseHasVersionCheck,
+ PythonAnyMismatchedDepHasVersionCheck,
+ ]
+ )
has_version_known_flags = {
- '-b': 'BDEPEND',
- '-r': 'RDEPEND',
- '-d': 'DEPEND',
- '--host-root': 'BDEPEND',
+ "-b": "BDEPEND",
+ "-r": "RDEPEND",
+ "-d": "DEPEND",
+ "--host-root": "BDEPEND",
}
has_version_default = {
- 'has_version': 'DEPEND',
- 'python_has_version': 'BDEPEND',
+ "has_version": "DEPEND",
+ "python_has_version": "BDEPEND",
}
eclass_any_dep_func = {
- 'python-single-r1': 'python_gen_cond_dep',
- 'python-any-r1': 'python_gen_any_dep',
- 'python-r1': 'python_gen_any_dep',
+ "python-single-r1": "python_gen_cond_dep",
+ "python-any-r1": "python_gen_any_dep",
+ "python-r1": "python_gen_any_dep",
}
def scan_tree_recursively(self, deptree, expected_cls):
@@ -269,8 +273,7 @@ class PythonCheck(Check):
yield deptree
def check_required_use(self, requse, flags, prefix, container_cls):
- for token in self.scan_tree_recursively(requse,
- values.ContainmentMatch2):
+ for token in self.scan_tree_recursively(requse, values.ContainmentMatch2):
# pkgcore collapses single flag in ||/^^, so expect top-level flags
# when len(flags) == 1
if len(flags) > 1 and not isinstance(token, container_cls):
@@ -281,7 +284,7 @@ class PythonCheck(Check):
continue
name = next(iter(x.vals))
if name.startswith(prefix):
- matched.add(name[len(prefix):])
+ matched.add(name[len(prefix) :])
elif isinstance(token, container_cls):
# skip the ||/^^ if it contains at least one foreign flag
break
@@ -304,7 +307,7 @@ class PythonCheck(Check):
continue
if not any(is_python_interpreter(y) for y in x if isinstance(y, atom)):
continue
- matched.add(flag[len(prefix):])
+ matched.add(flag[len(prefix) :])
if matched == flags:
return True
return False
@@ -322,7 +325,7 @@ class PythonCheck(Check):
pep517_value = None
for var_node, _ in bash.var_assign_query.captures(pkg.tree.root_node):
- var_name = pkg.node_str(var_node.child_by_field_name('name'))
+ var_name = pkg.node_str(var_node.child_by_field_name("name"))
if var_name == "DISTUTILS_OPTIONAL":
has_distutils_optional = True
@@ -334,7 +337,6 @@ class PythonCheck(Check):
# there's nothing for us to do anyway.
has_distutils_deps = True
-
if pep517_value is None:
yield DistutilsNonPEP517Build(pkg=pkg)
elif has_distutils_optional and not has_distutils_deps and pep517_value != "no":
@@ -344,11 +346,14 @@ class PythonCheck(Check):
if "dev-python/gpep517" not in iflatten_instance(pkg.bdepend, atom):
yield PythonMissingDeps("BDEPEND", pkg=pkg, dep_value="DISTUTILS_DEPS")
-
@staticmethod
def _prepare_deps(deps: str):
try:
- deps_str = deps.strip('\"\'').replace('\\$', '$').replace('${PYTHON_USEDEP}', 'pkgcheck_python_usedep')
+ deps_str = (
+ deps.strip("\"'")
+ .replace("\\$", "$")
+ .replace("${PYTHON_USEDEP}", "pkgcheck_python_usedep")
+ )
return iflatten_instance(DepSet.parse(deps_str, atom), atom)
except DepsetParseError:
# if we are unable to parse that dep's string, skip it
@@ -357,18 +362,20 @@ class PythonCheck(Check):
def build_python_gen_any_dep_calls(self, pkg, any_dep_func):
check_deps = defaultdict(set)
for var_node in pkg.global_query(bash.var_assign_query):
- name = pkg.node_str(var_node.child_by_field_name('name'))
- if name in {'DEPEND', 'BDEPEND'}:
+ name = pkg.node_str(var_node.child_by_field_name("name"))
+ if name in {"DEPEND", "BDEPEND"}:
for call_node, _ in bash.cmd_query.captures(var_node):
- call_name = pkg.node_str(call_node.child_by_field_name('name'))
+ call_name = pkg.node_str(call_node.child_by_field_name("name"))
if call_name == any_dep_func and len(call_node.children) > 1:
- check_deps[name].update(self._prepare_deps(
- pkg.node_str(call_node.children[1])))
+ check_deps[name].update(
+ self._prepare_deps(pkg.node_str(call_node.children[1]))
+ )
return {dep: frozenset(atoms) for dep, atoms in check_deps.items()}
- def report_mismatch_check_deps(self, pkg, python_check_deps, has_version_checked_deps, any_dep_func):
- for dep_type in frozenset(python_check_deps.keys()).union(
- has_version_checked_deps.keys()):
+ def report_mismatch_check_deps(
+ self, pkg, python_check_deps, has_version_checked_deps, any_dep_func
+ ):
+ for dep_type in frozenset(python_check_deps.keys()).union(has_version_checked_deps.keys()):
extra = has_version_checked_deps[dep_type] - python_check_deps.get(dep_type, set())
missing = python_check_deps.get(dep_type, set()) - has_version_checked_deps[dep_type]
for diff, other, location in (
@@ -380,28 +387,35 @@ class PythonCheck(Check):
for other_dep in other:
if dep_atom == str(other_dep.versioned_atom):
if diff_flags := set(other_dep.use) - set(dep.use):
- yield PythonAnyMismatchedUseHasVersionCheck(pkg=pkg,
- dep_category=dep_type, dep_atom=dep_atom,
- use_flags=diff_flags, location=location)
+ yield PythonAnyMismatchedUseHasVersionCheck(
+ pkg=pkg,
+ dep_category=dep_type,
+ dep_atom=dep_atom,
+ use_flags=diff_flags,
+ location=location,
+ )
break
else:
- use_flags = {'${PYTHON_USEDEP}'} | set(dep.use) \
- - {'pkgcheck_python_usedep'}
- yield PythonAnyMismatchedDepHasVersionCheck(pkg=pkg,
- dep_category=dep_type, dep_atom=dep_atom,
- use_flags=use_flags, location=location)
+ use_flags = {"${PYTHON_USEDEP}"} | set(dep.use) - {"pkgcheck_python_usedep"}
+ yield PythonAnyMismatchedDepHasVersionCheck(
+ pkg=pkg,
+ dep_category=dep_type,
+ dep_atom=dep_atom,
+ use_flags=use_flags,
+ location=location,
+ )
@staticmethod
def _prepare_dep_type(pkg, dep_type: str) -> str:
- if dep_type == 'BDEPEND' not in pkg.eapi.dep_keys:
- return 'DEPEND'
+ if dep_type == "BDEPEND" not in pkg.eapi.dep_keys:
+ return "DEPEND"
return dep_type
def check_python_check_deps(self, pkg, func_node, python_check_deps, any_dep_func):
has_version_checked_deps = defaultdict(set)
has_version_lines = set()
for node, _ in bash.cmd_query.captures(func_node):
- call_name = pkg.node_str(node.child_by_field_name('name'))
+ call_name = pkg.node_str(node.child_by_field_name("name"))
if call_name == "has_version":
lineno, _ = node.start_point
has_version_lines.add(lineno + 1)
@@ -412,19 +426,21 @@ class PythonCheck(Check):
if new_dep_mode := self.has_version_known_flags.get(arg_name, None):
dep_mode = self._prepare_dep_type(pkg, new_dep_mode)
else:
- arg_name = arg_name.strip('\"\'')
+ arg_name = arg_name.strip("\"'")
if not USE_FLAGS_PYTHON_USEDEP.search(arg_name):
lineno, _ = arg.start_point
yield PythonHasVersionMissingPythonUseDep(
- lineno=lineno+1, line=arg_name, pkg=pkg)
+ lineno=lineno + 1, line=arg_name, pkg=pkg
+ )
else:
- has_version_checked_deps[dep_mode].update(
- self._prepare_deps(arg_name))
+ has_version_checked_deps[dep_mode].update(self._prepare_deps(arg_name))
if has_version_lines:
yield PythonHasVersionUsage(lines=sorted(has_version_lines), pkg=pkg)
- yield from self.report_mismatch_check_deps(pkg, python_check_deps, has_version_checked_deps, any_dep_func)
+ yield from self.report_mismatch_check_deps(
+ pkg, python_check_deps, has_version_checked_deps, any_dep_func
+ )
def feed(self, pkg):
try:
@@ -450,21 +466,21 @@ class PythonCheck(Check):
else:
recommendation = "python-any-r1"
yield MissingPythonEclass(recommendation, attr.upper(), str(p), pkg=pkg)
- elif eclass in ('python-r1', 'python-single-r1'):
+ elif eclass in ("python-r1", "python-single-r1"):
# grab Python implementations from IUSE
- iuse = {x.lstrip('+-') for x in pkg.iuse}
+ iuse = {x.lstrip("+-") for x in pkg.iuse}
- if eclass == 'python-r1':
- flags = {x[len(IUSE_PREFIX):] for x in iuse if x.startswith(IUSE_PREFIX)}
+ if eclass == "python-r1":
+ flags = {x[len(IUSE_PREFIX) :] for x in iuse if x.startswith(IUSE_PREFIX)}
req_use_args = (flags, IUSE_PREFIX, OrRestriction)
else:
- flags = {x[len(IUSE_PREFIX_S):] for x in iuse if x.startswith(IUSE_PREFIX_S)}
+ flags = {x[len(IUSE_PREFIX_S) :] for x in iuse if x.startswith(IUSE_PREFIX_S)}
req_use_args = (flags, IUSE_PREFIX_S, JustOneRestriction)
if not self.check_required_use(pkg.required_use, *req_use_args):
yield PythonMissingRequiredUse(pkg=pkg)
if not self.check_depend(pkg.rdepend, *(req_use_args[:2])):
- yield PythonMissingDeps('RDEPEND', pkg=pkg)
+ yield PythonMissingDeps("RDEPEND", pkg=pkg)
else: # python-any-r1
for attr in ("rdepend", "pdepend"):
for p in iflatten_instance(getattr(pkg, attr), atom):
@@ -476,10 +492,12 @@ class PythonCheck(Check):
for attr in ("depend", "bdepend")
for p in iflatten_instance(getattr(pkg, attr), atom)
):
- yield PythonMissingDeps('DEPEND', pkg=pkg)
+ yield PythonMissingDeps("DEPEND", pkg=pkg)
# We're not interested in testing fake objects from TestPythonCheck
- if eclass is None or not isinstance(pkg, sources._ParsedPkg) or not hasattr(pkg, 'tree'): # pragma: no cover
+ if (
+ eclass is None or not isinstance(pkg, sources._ParsedPkg) or not hasattr(pkg, "tree")
+ ): # pragma: no cover
return
if "distutils-r1" in pkg.inherited:
@@ -488,9 +506,11 @@ class PythonCheck(Check):
any_dep_func = self.eclass_any_dep_func[eclass]
python_check_deps = self.build_python_gen_any_dep_calls(pkg, any_dep_func)
for func_node, _ in bash.func_query.captures(pkg.tree.root_node):
- func_name = pkg.node_str(func_node.child_by_field_name('name'))
+ func_name = pkg.node_str(func_node.child_by_field_name("name"))
if func_name == "python_check_deps":
- yield from self.check_python_check_deps(pkg, func_node, python_check_deps, any_dep_func)
+ yield from self.check_python_check_deps(
+ pkg, func_node, python_check_deps, any_dep_func
+ )
class PythonCompatUpdate(results.VersionResult, results.Info):
@@ -503,8 +523,8 @@ class PythonCompatUpdate(results.VersionResult, results.Info):
@property
def desc(self):
s = pluralism(self.updates)
- updates = ', '.join(self.updates)
- return f'PYTHON_COMPAT update{s} available: {updates}'
+ updates = ", ".join(self.updates)
+ return f"PYTHON_COMPAT update{s} available: {updates}"
class PythonCompatCheck(Check):
@@ -520,32 +540,32 @@ class PythonCompatCheck(Check):
super().__init__(*args)
repo = self.options.target_repo
# sorter for python targets leveraging USE_EXPAND flag ordering from repo
- self.sorter = repo.use_expand_sorter('python_targets')
+ self.sorter = repo.use_expand_sorter("python_targets")
# determine available PYTHON_TARGET use flags
targets = []
for target, _desc in repo.use_expand_desc.get(IUSE_PREFIX[:-1], ()):
- if target[len(IUSE_PREFIX):].startswith('python'):
- targets.append(target[len(IUSE_PREFIX):])
+ if target[len(IUSE_PREFIX) :].startswith("python"):
+ targets.append(target[len(IUSE_PREFIX) :])
multi_targets = tuple(sorted(targets, key=self.sorter))
# determine available PYTHON_SINGLE_TARGET use flags
targets = []
for target, _desc in repo.use_expand_desc.get(IUSE_PREFIX_S[:-1], ()):
- if target[len(IUSE_PREFIX_S):].startswith('python'):
- targets.append(target[len(IUSE_PREFIX_S):])
+ if target[len(IUSE_PREFIX_S) :].startswith("python"):
+ targets.append(target[len(IUSE_PREFIX_S) :])
single_targets = tuple(sorted(targets, key=self.sorter))
self.params = {
- 'python-r1': (multi_targets, IUSE_PREFIX, None),
- 'python-single-r1': (single_targets, (IUSE_PREFIX, IUSE_PREFIX_S), None),
- 'python-any-r1': (multi_targets, (IUSE_PREFIX, IUSE_PREFIX_S), ('depend', 'bdepend')),
+ "python-r1": (multi_targets, IUSE_PREFIX, None),
+ "python-single-r1": (single_targets, (IUSE_PREFIX, IUSE_PREFIX_S), None),
+ "python-any-r1": (multi_targets, (IUSE_PREFIX, IUSE_PREFIX_S), ("depend", "bdepend")),
}
def python_deps(self, deps, prefix):
for dep in (x for x in deps if x.use):
for x in dep.use:
- if x.startswith(('-', '!')):
+ if x.startswith(("-", "!")):
continue
if x.startswith(prefix):
yield dep.no_usedeps
@@ -573,19 +593,25 @@ class PythonCompatCheck(Check):
try:
# determine the latest supported python version
latest_target = sorted(
- (f"python{x.slot.replace('.', '_')}" for x in deps
- if x.key == 'dev-lang/python' and x.slot is not None), key=self.sorter)[-1]
+ (
+ f"python{x.slot.replace('.', '_')}"
+ for x in deps
+ if x.key == "dev-lang/python" and x.slot is not None
+ ),
+ key=self.sorter,
+ )[-1]
except IndexError:
# should be flagged by PythonMissingDeps
return
# ignore pkgs that probably aren't py3 compatible
- if latest_target == 'python2_7':
+ if latest_target == "python2_7":
return
# determine python impls to target
- targets = set(itertools.takewhile(
- lambda x: x != latest_target, reversed(available_targets)))
+ targets = set(
+ itertools.takewhile(lambda x: x != latest_target, reversed(available_targets))
+ )
if targets:
try:
@@ -595,7 +621,9 @@ class PythonCompatCheck(Check):
latest = sorted(self.options.search_repo.match(dep))[-1]
targets.intersection_update(
f"python{x.rsplit('python', 1)[-1]}"
- for x in latest.iuse_stripped if x.startswith(prefix))
+ for x in latest.iuse_stripped
+ if x.startswith(prefix)
+ )
if not targets:
return
except IndexError:
@@ -624,20 +652,20 @@ class PythonGHDistfileSuffix(results.VersionResult, results.Warning):
@property
def desc(self):
- return (f"GitHub archive {self.filename!r} ({self.uri!r}) is not "
- "using '.gh.tar.gz' suffix")
+ return (
+ f"GitHub archive {self.filename!r} ({self.uri!r}) is not " "using '.gh.tar.gz' suffix"
+ )
class PythonGHDistfileSuffixCheck(Check):
- """Check ebuilds with PyPI remotes for missing ".gh.tar.gz" suffixes.
- """
+ """Check ebuilds with PyPI remotes for missing ".gh.tar.gz" suffixes."""
required_addons = (addons.UseAddon,)
known_results = frozenset([PythonGHDistfileSuffix])
def __init__(self, *args, use_addon):
super().__init__(*args)
- self.iuse_filter = use_addon.get_filter('fetchables')
+ self.iuse_filter = use_addon.get_filter("fetchables")
def feed(self, pkg):
# consider only packages with pypi remote-id
@@ -646,10 +674,12 @@ class PythonGHDistfileSuffixCheck(Check):
# look for GitHub archives
fetchables, _ = self.iuse_filter(
- (fetch.fetchable,), pkg,
- pkg.generate_fetchables(allow_missing_checksums=True,
- ignore_unknown_mirrors=True,
- skip_default_mirrors=True))
+ (fetch.fetchable,),
+ pkg,
+ pkg.generate_fetchables(
+ allow_missing_checksums=True, ignore_unknown_mirrors=True, skip_default_mirrors=True
+ ),
+ )
for f in fetchables:
# skip files that have the correct suffix already
if f.filename.endswith(".gh.tar.gz"):
diff --git a/src/pkgcheck/checks/repo.py b/src/pkgcheck/checks/repo.py
index b12eb352..8b12f68d 100644
--- a/src/pkgcheck/checks/repo.py
+++ b/src/pkgcheck/checks/repo.py
@@ -28,14 +28,13 @@ class RepoDirCheck(GentooRepoCheck, RepoCheck):
known_results = frozenset([BinaryFile])
# repo root level directories that are ignored
- ignored_root_dirs = frozenset(['.git'])
+ ignored_root_dirs = frozenset([".git"])
def __init__(self, *args, git_addon):
super().__init__(*args)
self.gitignored = git_addon.gitignored
self.repo = self.options.target_repo
- self.ignored_paths = {
- pjoin(self.repo.location, x) for x in self.ignored_root_dirs}
+ self.ignored_paths = {pjoin(self.repo.location, x) for x in self.ignored_root_dirs}
self.dirs = [self.repo.location]
def finish(self):
@@ -47,7 +46,7 @@ class RepoDirCheck(GentooRepoCheck, RepoCheck):
self.dirs.append(entry.path)
elif is_binary(entry.path):
if not self.gitignored(entry.path):
- rel_path = entry.path[len(self.repo.location) + 1:]
+ rel_path = entry.path[len(self.repo.location) + 1 :]
yield BinaryFile(rel_path)
@@ -58,7 +57,7 @@ class EmptyCategoryDir(results.CategoryResult, results.Warning):
@property
def desc(self):
- return f'empty category directory: {self.category}'
+ return f"empty category directory: {self.category}"
class EmptyPackageDir(results.PackageResult, results.Warning):
@@ -68,7 +67,7 @@ class EmptyPackageDir(results.PackageResult, results.Warning):
@property
def desc(self):
- return f'empty package directory: {self.category}/{self.package}'
+ return f"empty package directory: {self.category}/{self.package}"
class EmptyDirsCheck(GentooRepoCheck, RepoCheck):
diff --git a/src/pkgcheck/checks/repo_metadata.py b/src/pkgcheck/checks/repo_metadata.py
index a8466f0f..003ff891 100644
--- a/src/pkgcheck/checks/repo_metadata.py
+++ b/src/pkgcheck/checks/repo_metadata.py
@@ -79,11 +79,16 @@ class PackageUpdatesCheck(RepoCheck):
"""Scan profiles/updates/* for outdated entries and other issues."""
_source = (sources.EmptySource, (base.profiles_scope,))
- known_results = frozenset([
- MultiMovePackageUpdate, OldMultiMovePackageUpdate,
- OldPackageUpdate, MovedPackageUpdate, BadPackageUpdate,
- RedundantPackageUpdate,
- ])
+ known_results = frozenset(
+ [
+ MultiMovePackageUpdate,
+ OldMultiMovePackageUpdate,
+ OldPackageUpdate,
+ MovedPackageUpdate,
+ BadPackageUpdate,
+ RedundantPackageUpdate,
+ ]
+ )
def __init__(self, *args):
super().__init__(*args)
@@ -92,8 +97,8 @@ class PackageUpdatesCheck(RepoCheck):
def finish(self):
logmap = (
- base.LogMap('pkgcore.log.logger.warning', MovedPackageUpdate),
- base.LogMap('pkgcore.log.logger.error', BadPackageUpdate),
+ base.LogMap("pkgcore.log.logger.warning", MovedPackageUpdate),
+ base.LogMap("pkgcore.log.logger.error", BadPackageUpdate),
)
# convert log warnings/errors into reports
@@ -106,8 +111,8 @@ class PackageUpdatesCheck(RepoCheck):
old_slotmove_updates = {}
for pkg, updates in repo_updates.items():
- move_updates = [x for x in updates if x[0] == 'move']
- slotmove_updates = [x for x in updates if x[0] == 'slotmove']
+ move_updates = [x for x in updates if x[0] == "move"]
+ slotmove_updates = [x for x in updates if x[0] == "slotmove"]
# check for multi-updates, a -> b, b -> c, ...
if len(move_updates) > 1:
@@ -126,7 +131,7 @@ class PackageUpdatesCheck(RepoCheck):
# scan updates for old entries with removed packages
for x in slotmove_updates:
_, pkg, newslot = x
- orig_line = ('slotmove', str(pkg)[:-(len(pkg.slot) + 1)], pkg.slot, newslot)
+ orig_line = ("slotmove", str(pkg)[: -(len(pkg.slot) + 1)], pkg.slot, newslot)
if not self.search_repo.match(pkg.unversioned_atom):
# reproduce updates file line data for result output
old_slotmove_updates[pkg.key] = orig_line
@@ -160,8 +165,8 @@ class UnusedLicenses(results.Warning):
@property
def desc(self):
s = pluralism(self.licenses)
- licenses = ', '.join(self.licenses)
- return f'unused license{s}: {licenses}'
+ licenses = ", ".join(self.licenses)
+ return f"unused license{s}: {licenses}"
class UnusedLicensesCheck(RepoCheck):
@@ -199,8 +204,8 @@ class UnusedMirrors(results.Warning):
@property
def desc(self):
s = pluralism(self.mirrors)
- mirrors = ', '.join(self.mirrors)
- return f'unused mirror{s}: {mirrors}'
+ mirrors = ", ".join(self.mirrors)
+ return f"unused mirror{s}: {mirrors}"
class UnusedMirrorsCheck(MirrorsCheck, RepoCheck):
@@ -234,9 +239,9 @@ class UnusedEclasses(results.Warning):
@property
def desc(self):
- es = pluralism(self.eclasses, plural='es')
- eclasses = ', '.join(self.eclasses)
- return f'unused eclass{es}: {eclasses}'
+ es = pluralism(self.eclasses, plural="es")
+ eclasses = ", ".join(self.eclasses)
+ return f"unused eclass{es}: {eclasses}"
class UnusedEclassesCheck(RepoCheck):
@@ -253,8 +258,9 @@ class UnusedEclassesCheck(RepoCheck):
master_eclasses = set()
for repo in self.options.target_repo.masters:
master_eclasses.update(repo.eclass_cache.eclasses.keys())
- self.unused_eclasses = set(
- self.options.target_repo.eclass_cache.eclasses.keys()) - master_eclasses
+ self.unused_eclasses = (
+ set(self.options.target_repo.eclass_cache.eclasses.keys()) - master_eclasses
+ )
def feed(self, pkg):
self.unused_eclasses.difference_update(pkg.inherited)
@@ -276,8 +282,8 @@ class UnknownLicenses(results.Warning):
@property
def desc(self):
s = pluralism(self.licenses)
- licenses = ', '.join(self.licenses)
- return f'license group {self.group!r} has unknown license{s}: [ {licenses} ]'
+ licenses = ", ".join(self.licenses)
+ return f"license group {self.group!r} has unknown license{s}: [ {licenses} ]"
class LicenseGroupsCheck(RepoCheck):
@@ -307,10 +313,10 @@ class PotentialLocalUse(results.Info):
@property
def desc(self):
s = pluralism(self.pkgs)
- pkgs = ', '.join(self.pkgs)
+ pkgs = ", ".join(self.pkgs)
return (
- f'global USE flag {self.flag!r} is a potential local, '
- f'used by {len(self.pkgs)} package{s}: {pkgs}'
+ f"global USE flag {self.flag!r} is a potential local, "
+ f"used by {len(self.pkgs)} package{s}: {pkgs}"
)
@@ -324,8 +330,8 @@ class UnusedGlobalUse(results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'use.desc unused flag{s}: {flags}'
+ flags = ", ".join(self.flags)
+ return f"use.desc unused flag{s}: {flags}"
class UnusedGlobalUseExpand(results.Warning):
@@ -338,8 +344,8 @@ class UnusedGlobalUseExpand(results.Warning):
@property
def desc(self):
s = pluralism(self.flags)
- flags = ', '.join(self.flags)
- return f'unused flag{s}: {flags}'
+ flags = ", ".join(self.flags)
+ return f"unused flag{s}: {flags}"
class PotentialGlobalUse(results.Info):
@@ -354,7 +360,8 @@ class PotentialGlobalUse(results.Info):
def desc(self):
return (
f"local USE flag {self.flag!r} is a potential global "
- f"used by {len(self.pkgs)} packages: {', '.join(self.pkgs)}")
+ f"used by {len(self.pkgs)} packages: {', '.join(self.pkgs)}"
+ )
def _dfs(graph, start, visited=None):
@@ -369,11 +376,16 @@ def _dfs(graph, start, visited=None):
class GlobalUseCheck(RepoCheck):
"""Check global USE and USE_EXPAND flags for various issues."""
- _source = (sources.RepositoryRepoSource, (), (('source', sources.PackageRepoSource),))
+ _source = (sources.RepositoryRepoSource, (), (("source", sources.PackageRepoSource),))
required_addons = (addons.UseAddon,)
- known_results = frozenset([
- PotentialLocalUse, PotentialGlobalUse, UnusedGlobalUse, UnusedGlobalUseExpand,
- ])
+ known_results = frozenset(
+ [
+ PotentialLocalUse,
+ PotentialGlobalUse,
+ UnusedGlobalUse,
+ UnusedGlobalUseExpand,
+ ]
+ )
def __init__(self, *args, use_addon):
super().__init__(*args)
@@ -394,7 +406,7 @@ class GlobalUseCheck(RepoCheck):
# calculate USE flag description difference ratios
diffs = {}
for i, (i_pkg, i_desc) in enumerate(pkgs):
- for j, (j_pkg, j_desc) in enumerate(pkgs[i + 1:]):
+ for j, (j_pkg, j_desc) in enumerate(pkgs[i + 1 :]):
diffs[(i, i + j + 1)] = SequenceMatcher(None, i_desc, j_desc).ratio()
# create an adjacency list using all closely matching flags pairs
@@ -424,11 +436,12 @@ class GlobalUseCheck(RepoCheck):
yield [pkgs[i][0] for i in component]
def finish(self):
- repo_global_use = {
- flag for matcher, (flag, desc) in self.repo.config.use_desc}
+ repo_global_use = {flag for matcher, (flag, desc) in self.repo.config.use_desc}
repo_global_use_expand = {
- flag for use_expand in self.repo.config.use_expand_desc.values()
- for flag, desc in use_expand}
+ flag
+ for use_expand in self.repo.config.use_expand_desc.values()
+ for flag, desc in use_expand
+ }
repo_local_use = self.repo.config.use_local_desc
unused_global_use = []
unused_global_use_expand = []
@@ -481,7 +494,8 @@ class MissingChksum(results.VersionResult, results.Warning):
def desc(self):
return (
f"{self.filename!r} missing required chksums: "
- f"{', '.join(self.missing)}; has chksums: {', '.join(self.existing)}")
+ f"{', '.join(self.missing)}; has chksums: {', '.join(self.existing)}"
+ )
class DeprecatedChksum(results.VersionResult, results.Warning):
@@ -495,8 +509,8 @@ class DeprecatedChksum(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.deprecated)
- deprecated = ', '.join(self.deprecated)
- return f'{self.filename!r} has deprecated checksum{s}: {deprecated}'
+ deprecated = ", ".join(self.deprecated)
+ return f"{self.filename!r} has deprecated checksum{s}: {deprecated}"
class MissingManifest(results.VersionResult, results.Error):
@@ -509,8 +523,8 @@ class MissingManifest(results.VersionResult, results.Error):
@property
def desc(self):
s = pluralism(self.files)
- files = ', '.join(self.files)
- return f'distfile{s} missing from Manifest: [ {files} ]'
+ files = ", ".join(self.files)
+ return f"distfile{s} missing from Manifest: [ {files} ]"
class UnknownManifest(results.PackageResult, results.Warning):
@@ -523,8 +537,8 @@ class UnknownManifest(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.files)
- files = ', '.join(self.files)
- return f'unknown distfile{s} in Manifest: [ {files} ]'
+ files = ", ".join(self.files)
+ return f"unknown distfile{s} in Manifest: [ {files} ]"
class UnnecessaryManifest(results.PackageResult, results.Warning):
@@ -537,14 +551,14 @@ class UnnecessaryManifest(results.PackageResult, results.Warning):
@property
def desc(self):
s = pluralism(self.files)
- files = ', '.join(self.files)
- return f'unnecessary file{s} in Manifest: [ {files} ]'
+ files = ", ".join(self.files)
+ return f"unnecessary file{s} in Manifest: [ {files} ]"
class InvalidManifest(results.MetadataError, results.PackageResult):
"""Package's Manifest file is invalid."""
- attr = 'manifest'
+ attr = "manifest"
class ManifestCheck(Check):
@@ -556,19 +570,27 @@ class ManifestCheck(Check):
required_addons = (addons.UseAddon,)
_source = sources.PackageRepoSource
- known_results = frozenset([
- MissingChksum, MissingManifest, UnknownManifest, UnnecessaryManifest,
- DeprecatedChksum, InvalidManifest,
- ])
+ known_results = frozenset(
+ [
+ MissingChksum,
+ MissingManifest,
+ UnknownManifest,
+ UnnecessaryManifest,
+ DeprecatedChksum,
+ InvalidManifest,
+ ]
+ )
def __init__(self, *args, use_addon):
super().__init__(*args)
repo = self.options.target_repo
self.preferred_checksums = frozenset(
- repo.config.manifests.hashes if hasattr(repo, 'config') else ())
+ repo.config.manifests.hashes if hasattr(repo, "config") else ()
+ )
self.required_checksums = frozenset(
- repo.config.manifests.required_hashes if hasattr(repo, 'config') else ())
- self.iuse_filter = use_addon.get_filter('fetchables')
+ repo.config.manifests.required_hashes if hasattr(repo, "config") else ()
+ )
+ self.iuse_filter = use_addon.get_filter("fetchables")
def feed(self, pkgset):
pkg_manifest = pkgset[0].manifest
@@ -577,8 +599,10 @@ class ManifestCheck(Check):
for pkg in pkgset:
pkg.release_cached_data()
fetchables, _ = self.iuse_filter(
- (fetch.fetchable,), pkg,
- pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True))
+ (fetch.fetchable,),
+ pkg,
+ pkg.generate_fetchables(allow_missing_checksums=True, ignore_unknown_mirrors=True),
+ )
fetchables = set(fetchables)
pkg.release_cached_data()
@@ -593,8 +617,8 @@ class ManifestCheck(Check):
missing = self.required_checksums.difference(f_inst.chksums)
if f_inst.filename not in missing_manifests and missing:
yield MissingChksum(
- f_inst.filename, sorted(missing),
- sorted(f_inst.chksums), pkg=pkg)
+ f_inst.filename, sorted(missing), sorted(f_inst.chksums), pkg=pkg
+ )
elif f_inst.chksums and self.preferred_checksums != frozenset(f_inst.chksums):
deprecated = set(f_inst.chksums).difference(self.preferred_checksums)
yield DeprecatedChksum(f_inst.filename, sorted(deprecated), pkg=pkg)
@@ -602,7 +626,7 @@ class ManifestCheck(Check):
if pkg_manifest.thin:
unnecessary_manifests = []
- for attr in ('aux_files', 'ebuilds', 'misc'):
+ for attr in ("aux_files", "ebuilds", "misc"):
unnecessary_manifests.extend(getattr(pkg_manifest, attr, []))
if unnecessary_manifests:
yield UnnecessaryManifest(sorted(unnecessary_manifests), pkg=pkgset[0])
@@ -624,12 +648,12 @@ class ConflictingChksums(results.VersionResult, results.Error):
@property
def desc(self):
s = pluralism(self.chksums)
- chksums = ', '.join(self.chksums)
+ chksums = ", ".join(self.chksums)
pkgs_s = pluralism(self.pkgs)
- pkgs = ', '.join(self.pkgs)
+ pkgs = ", ".join(self.pkgs)
return (
- f'distfile {self.filename!r} has different checksum{s} '
- f'({chksums}) for package{pkgs_s}: {pkgs}'
+ f"distfile {self.filename!r} has different checksum{s} "
+ f"({chksums}) for package{pkgs_s}: {pkgs}"
)
@@ -644,9 +668,9 @@ class MatchingChksums(results.VersionResult, results.Warning):
@property
def desc(self):
- msg = f'distfile {self.filename!r} matches checksums for {self.orig_file!r}'
- if f'{self.category}/{self.package}' != self.orig_pkg:
- msg += f' from {self.orig_pkg}'
+ msg = f"distfile {self.filename!r} matches checksums for {self.orig_file!r}"
+ if f"{self.category}/{self.package}" != self.orig_pkg:
+ msg += f" from {self.orig_pkg}"
return msg
@@ -657,7 +681,7 @@ class ManifestCollisionCheck(Check):
different filenames with matching checksums.
"""
- _source = (sources.RepositoryRepoSource, (), (('source', sources.PackageRepoSource),))
+ _source = (sources.RepositoryRepoSource, (), (("source", sources.PackageRepoSource),))
known_results = frozenset([ConflictingChksums, MatchingChksums])
def __init__(self, *args):
@@ -665,15 +689,14 @@ class ManifestCollisionCheck(Check):
self.seen_files = {}
self.seen_chksums = {}
# ignore go.mod false positives (issue #228)
- self._ignored_files_re = re.compile(r'^.*%2F@v.*\.mod$')
+ self._ignored_files_re = re.compile(r"^.*%2F@v.*\.mod$")
def _conflicts(self, pkg):
"""Check for similarly named distfiles with different checksums."""
for filename, chksums in pkg.manifest.distfiles.items():
existing = self.seen_files.get(filename)
if existing is None:
- self.seen_files[filename] = (
- [pkg.key], dict(chksums.items()))
+ self.seen_files[filename] = ([pkg.key], dict(chksums.items()))
continue
seen_pkgs, seen_chksums = existing
conflicting_chksums = []
diff --git a/src/pkgcheck/checks/reserved.py b/src/pkgcheck/checks/reserved.py
index 8448179a..a67d1683 100644
--- a/src/pkgcheck/checks/reserved.py
+++ b/src/pkgcheck/checks/reserved.py
@@ -7,12 +7,17 @@ from . import Check
class _ReservedNameCheck(Check):
- reserved_prefixes = ('__', 'abort', 'dyn', 'prep')
- reserved_substrings = ('hook', 'paludis', 'portage') # 'ebuild' is special case
- reserved_ebuild_regex = re.compile(r'(.*[^a-zA-Z])?ebuild.*')
+ reserved_prefixes = ("__", "abort", "dyn", "prep")
+ reserved_substrings = ("hook", "paludis", "portage") # 'ebuild' is special case
+ reserved_ebuild_regex = re.compile(r"(.*[^a-zA-Z])?ebuild.*")
"""Portage variables whose use is half-legitimate and harmless if the package manager doesn't support them."""
- special_whitelist = ('EBUILD_DEATH_HOOKS', 'EBUILD_SUCCESS_HOOKS', 'PORTAGE_QUIET', 'PORTAGE_ACTUAL_DISTDIR')
+ special_whitelist = (
+ "EBUILD_DEATH_HOOKS",
+ "EBUILD_SUCCESS_HOOKS",
+ "PORTAGE_QUIET",
+ "PORTAGE_ACTUAL_DISTDIR",
+ )
"""Approved good exceptions to using of variables."""
variables_usage_whitelist = {"EBUILD_PHASE", "EBUILD_PHASE_FUNC"}
@@ -24,32 +29,37 @@ class _ReservedNameCheck(Check):
test_name = used_name.lower()
for reserved in self.reserved_prefixes:
if test_name.startswith(reserved):
- yield used_name, used_type, reserved, 'prefix', lineno+1
+ yield used_name, used_type, reserved, "prefix", lineno + 1
for reserved in self.reserved_substrings:
if reserved in test_name:
- yield used_name, used_type, reserved, 'substring', lineno+1
+ yield used_name, used_type, reserved, "substring", lineno + 1
if self.reserved_ebuild_regex.match(test_name):
- yield used_name, used_type, 'ebuild', 'substring', lineno+1
+ yield used_name, used_type, "ebuild", "substring", lineno + 1
def _feed(self, item):
- yield from self._check('function', {
- item.node_str(node.child_by_field_name('name')): node.start_point
- for node, _ in bash.func_query.captures(item.tree.root_node)
- })
+ yield from self._check(
+ "function",
+ {
+ item.node_str(node.child_by_field_name("name")): node.start_point
+ for node, _ in bash.func_query.captures(item.tree.root_node)
+ },
+ )
used_variables = {
- item.node_str(node.child_by_field_name('name')): node.start_point
+ item.node_str(node.child_by_field_name("name")): node.start_point
for node, _ in bash.var_assign_query.captures(item.tree.root_node)
}
for node, _ in bash.var_query.captures(item.tree.root_node):
if (name := item.node_str(node)) not in self.variables_usage_whitelist:
used_variables.setdefault(name, node.start_point)
- yield from self._check('variable', used_variables)
+ yield from self._check("variable", used_variables)
class EclassReservedName(results.EclassResult, results.Warning):
"""Eclass uses reserved variable or function name for package manager."""
- def __init__(self, used_name: str, used_type: str, reserved_word: str, reserved_type: str, **kwargs):
+ def __init__(
+ self, used_name: str, used_type: str, reserved_word: str, reserved_type: str, **kwargs
+ ):
super().__init__(**kwargs)
self.used_name = used_name
self.used_type = used_type
@@ -101,7 +111,7 @@ class EbuildReservedCheck(_ReservedNameCheck):
super().__init__(options, **kwargs)
self.phases_hooks = {
eapi_name: {
- f'{prefix}_{phase}' for phase in eapi.phases.values() for prefix in ('pre', 'post')
+ f"{prefix}_{phase}" for phase in eapi.phases.values() for prefix in ("pre", "post")
}
for eapi_name, eapi in EAPI.known_eapis.items()
}
@@ -111,7 +121,9 @@ class EbuildReservedCheck(_ReservedNameCheck):
yield EbuildReservedName(*args, lineno=lineno, line=used_name, pkg=pkg)
for node, _ in bash.func_query.captures(pkg.tree.root_node):
- used_name = pkg.node_str(node.child_by_field_name('name'))
+ used_name = pkg.node_str(node.child_by_field_name("name"))
if used_name in self.phases_hooks[str(pkg.eapi)]:
lineno, _ = node.start_point
- yield EbuildReservedName('function', used_name, 'phase hook', lineno=lineno+1, line=used_name, pkg=pkg)
+ yield EbuildReservedName(
+ "function", used_name, "phase hook", lineno=lineno + 1, line=used_name, pkg=pkg
+ )
diff --git a/src/pkgcheck/checks/stablereq.py b/src/pkgcheck/checks/stablereq.py
index 3f396e08..57e41a84 100644
--- a/src/pkgcheck/checks/stablereq.py
+++ b/src/pkgcheck/checks/stablereq.py
@@ -20,7 +20,7 @@ class StableRequest(results.VersionResult, results.Info):
@property
def desc(self):
s = pluralism(self.keywords)
- keywords = ', '.join(self.keywords)
+ keywords = ", ".join(self.keywords)
return (
f"slot({self.slot}) no change in {self.age} days "
f"for unstable keyword{s}: [ {keywords} ]"
@@ -37,19 +37,25 @@ class StableRequestCheck(GentooRepoCheck):
Note that packages with no stable keywords won't trigger this at all.
Instead they'll be caught by the UnstableOnly check.
"""
- _source = (sources.PackageRepoSource, (), (('source', sources.UnmaskedRepoSource),))
+
+ _source = (sources.PackageRepoSource, (), (("source", sources.UnmaskedRepoSource),))
required_addons = (addons.git.GitAddon,)
known_results = frozenset([StableRequest])
@staticmethod
def mangle_argparser(parser):
parser.plugin.add_argument(
- '--stabletime', metavar='DAYS', dest='stable_time', default=30,
- type=arghparse.positive_int, help='set number of days before stabilisation',
+ "--stabletime",
+ metavar="DAYS",
+ dest="stable_time",
+ default=30,
+ type=arghparse.positive_int,
+ help="set number of days before stabilisation",
docs="""
An integer number of days before a package version is flagged by
StableRequestCheck. Defaults to 30 days.
- """)
+ """,
+ )
def __init__(self, *args, git_addon):
super().__init__(*args)
@@ -64,7 +70,7 @@ class StableRequestCheck(GentooRepoCheck):
pkg_slotted[pkg.slot].append(pkg)
pkg_keywords.update(pkg.keywords)
- if stable_pkg_keywords := {x for x in pkg_keywords if x[0] not in {'-', '~'}}:
+ if stable_pkg_keywords := {x for x in pkg_keywords if x[0] not in {"-", "~"}}:
for slot, pkgs in sorted(pkg_slotted.items()):
slot_keywords = set().union(*(pkg.keywords for pkg in pkgs))
stable_slot_keywords = slot_keywords.intersection(stable_pkg_keywords)
@@ -82,11 +88,11 @@ class StableRequestCheck(GentooRepoCheck):
added = datetime.fromtimestamp(match.time)
days_old = (self.today - added).days
if days_old >= self.options.stable_time:
- pkg_stable_keywords = {x.lstrip('~') for x in pkg.keywords}
+ pkg_stable_keywords = {x.lstrip("~") for x in pkg.keywords}
if stable_slot_keywords:
keywords = stable_slot_keywords.intersection(pkg_stable_keywords)
else:
keywords = stable_pkg_keywords.intersection(pkg_stable_keywords)
- keywords = sorted('~' + x for x in keywords)
+ keywords = sorted("~" + x for x in keywords)
yield StableRequest(slot, keywords, days_old, pkg=pkg)
break
diff --git a/src/pkgcheck/checks/unstable_only.py b/src/pkgcheck/checks/unstable_only.py
index 2d08f635..0fc8b9f6 100644
--- a/src/pkgcheck/checks/unstable_only.py
+++ b/src/pkgcheck/checks/unstable_only.py
@@ -18,10 +18,10 @@ class UnstableOnly(results.PackageResult, results.Info):
@property
def desc(self):
- es = pluralism(self.arches, plural='es')
- arches = ', '.join(self.arches)
- versions = ', '.join(self.versions)
- return f'for arch{es}: [ {arches} ], all versions are unstable: [ {versions} ]'
+ es = pluralism(self.arches, plural="es")
+ arches = ", ".join(self.arches)
+ versions = ", ".join(self.versions)
+ return f"for arch{es}: [ {arches} ], all versions are unstable: [ {versions} ]"
class UnstableOnlyCheck(GentooRepoCheck):
@@ -39,10 +39,8 @@ class UnstableOnlyCheck(GentooRepoCheck):
self.arch_restricts = {}
for arch in arches:
self.arch_restricts[arch] = [
- packages.PackageRestriction(
- "keywords", values.ContainmentMatch2((arch,))),
- packages.PackageRestriction(
- "keywords", values.ContainmentMatch2((f"~{arch}",)))
+ packages.PackageRestriction("keywords", values.ContainmentMatch2((arch,))),
+ packages.PackageRestriction("keywords", values.ContainmentMatch2((f"~{arch}",))),
]
def feed(self, pkgset):
diff --git a/src/pkgcheck/checks/visibility.py b/src/pkgcheck/checks/visibility.py
index 021a738f..5440db7f 100644
--- a/src/pkgcheck/checks/visibility.py
+++ b/src/pkgcheck/checks/visibility.py
@@ -12,30 +12,29 @@ from . import Check
class FakeConfigurable:
- "Package wrapper binding profile data."""
+ "Package wrapper binding profile data." ""
configurable = True
- __slots__ = ('use', 'iuse', '_forced_use', '_masked_use', '_pkg_use', '_raw_pkg', '_profile')
+ __slots__ = ("use", "iuse", "_forced_use", "_masked_use", "_pkg_use", "_raw_pkg", "_profile")
def __init__(self, pkg, profile):
- object.__setattr__(self, '_raw_pkg', pkg)
- object.__setattr__(self, '_profile', profile)
-
- object.__setattr__(
- self, '_forced_use', self._profile.forced_use.pull_data(self._raw_pkg))
- object.__setattr__(
- self, '_masked_use', self._profile.masked_use.pull_data(self._raw_pkg))
- object.__setattr__(
- self, '_pkg_use', self._profile.pkg_use.pull_data(self._raw_pkg))
- use_defaults = {x[1:] for x in pkg.iuse if x[0] == '+'}
- enabled_use = (use_defaults | profile.use | self._pkg_use | self._forced_use) - self._masked_use
- object.__setattr__(
- self, 'use', frozenset(enabled_use & (profile.iuse_effective | pkg.iuse_effective)))
+ object.__setattr__(self, "_raw_pkg", pkg)
+ object.__setattr__(self, "_profile", profile)
+
+ object.__setattr__(self, "_forced_use", self._profile.forced_use.pull_data(self._raw_pkg))
+ object.__setattr__(self, "_masked_use", self._profile.masked_use.pull_data(self._raw_pkg))
+ object.__setattr__(self, "_pkg_use", self._profile.pkg_use.pull_data(self._raw_pkg))
+ use_defaults = {x[1:] for x in pkg.iuse if x[0] == "+"}
+ enabled_use = (
+ use_defaults | profile.use | self._pkg_use | self._forced_use
+ ) - self._masked_use
object.__setattr__(
- self, 'iuse', frozenset(profile.iuse_effective.union(pkg.iuse_stripped)))
+ self, "use", frozenset(enabled_use & (profile.iuse_effective | pkg.iuse_effective))
+ )
+ object.__setattr__(self, "iuse", frozenset(profile.iuse_effective.union(pkg.iuse_stripped)))
def request_enable(self, attr, *vals):
- if attr != 'use':
+ if attr != "use":
return False
set_vals = frozenset(vals)
@@ -47,7 +46,7 @@ class FakeConfigurable:
return set_vals.isdisjoint(self._masked_use)
def request_disable(self, attr, *vals):
- if attr != 'use':
+ if attr != "use":
return False
set_vals = frozenset(vals)
@@ -70,7 +69,7 @@ class FakeConfigurable:
__getattr__ = klass.GetAttrProxy("_raw_pkg")
def __setattr__(self, attr, val):
- raise AttributeError(self, 'is immutable')
+ raise AttributeError(self, "is immutable")
class _BlockMemoryExhaustion(Exception):
@@ -78,10 +77,13 @@ class _BlockMemoryExhaustion(Exception):
# This is fast path code, hence the seperated implementations.
-if getattr(atom, '_TRANSITIVE_USE_ATOM_BUG_IS_FIXED', False):
+if getattr(atom, "_TRANSITIVE_USE_ATOM_BUG_IS_FIXED", False):
+
def _eapi2_flatten(val):
return isinstance(val, atom) and not isinstance(val, transitive_use_atom)
+
else:
+
def _eapi2_flatten(val):
if isinstance(val, transitive_use_atom):
if len([x for x in val.use if x.endswith("?")]) > 16:
@@ -107,13 +109,13 @@ class VisibleVcsPkg(results.VersionResult, results.Warning):
@property
def desc(self):
if self.num_profiles is not None and self.num_profiles > 1:
- num_profiles = f' ({self.num_profiles} total)'
+ num_profiles = f" ({self.num_profiles} total)"
else:
- num_profiles = ''
+ num_profiles = ""
return (
f'VCS version visible for KEYWORDS="{self.arch}", '
- f'profile {self.profile}{num_profiles}'
+ f"profile {self.profile}{num_profiles}"
)
@@ -128,8 +130,8 @@ class NonexistentDeps(results.VersionResult, results.Warning):
@property
def desc(self):
s = pluralism(self.nonexistent)
- nonexistent = ', '.join(self.nonexistent)
- return f'{self.attr}: nonexistent package{s}: {nonexistent}'
+ nonexistent = ", ".join(self.nonexistent)
+ return f"{self.attr}: nonexistent package{s}: {nonexistent}"
class UncheckableDep(results.VersionResult, results.Warning):
@@ -147,8 +149,17 @@ class UncheckableDep(results.VersionResult, results.Warning):
class NonsolvableDeps(results.VersionResult, results.AliasResult, results.Error):
"""No potential solution for a depset attribute."""
- def __init__(self, attr, keyword, profile, deps, profile_status,
- profile_deprecated, num_profiles=None, **kwargs):
+ def __init__(
+ self,
+ attr,
+ keyword,
+ profile,
+ deps,
+ profile_status,
+ profile_deprecated,
+ num_profiles=None,
+ **kwargs,
+ ):
super().__init__(**kwargs)
self.attr = attr
self.keyword = keyword
@@ -160,12 +171,12 @@ class NonsolvableDeps(results.VersionResult, results.AliasResult, results.Error)
@property
def desc(self):
- profile_status = 'deprecated ' if self.profile_deprecated else ''
- profile_status += self.profile_status or 'custom'
+ profile_status = "deprecated " if self.profile_deprecated else ""
+ profile_status += self.profile_status or "custom"
if self.num_profiles is not None and self.num_profiles > 1:
- num_profiles = f' ({self.num_profiles} total)'
+ num_profiles = f" ({self.num_profiles} total)"
else:
- num_profiles = ''
+ num_profiles = ""
return (
f"nonsolvable depset({self.attr}) keyword({self.keyword}) "
@@ -186,7 +197,7 @@ class NonsolvableDepsInExp(NonsolvableDeps):
"""No potential solution for dependency on exp profile."""
# results require experimental profiles to be enabled
- _profile = 'exp'
+ _profile = "exp"
class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
@@ -198,18 +209,24 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
"""
required_addons = (addons.profiles.ProfileAddon,)
- known_results = frozenset([
- VisibleVcsPkg, NonexistentDeps, UncheckableDep,
- NonsolvableDepsInStable, NonsolvableDepsInDev, NonsolvableDepsInExp,
- ])
+ known_results = frozenset(
+ [
+ VisibleVcsPkg,
+ NonexistentDeps,
+ UncheckableDep,
+ NonsolvableDepsInStable,
+ NonsolvableDepsInDev,
+ NonsolvableDepsInExp,
+ ]
+ )
def __init__(self, *args, profile_addon):
super().__init__(*args, profile_addon=profile_addon)
self.profiles = profile_addon
self.report_cls_map = {
- 'stable': NonsolvableDepsInStable,
- 'dev': NonsolvableDepsInDev,
- 'exp': NonsolvableDepsInExp,
+ "stable": NonsolvableDepsInStable,
+ "dev": NonsolvableDepsInDev,
+ "exp": NonsolvableDepsInExp,
}
def feed(self, pkg):
@@ -238,8 +255,7 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
# on don't have to use the slower get method
self.query_cache[node] = ()
else:
- matches = caching_iter(
- self.options.search_repo.itermatch(node))
+ matches = caching_iter(self.options.search_repo.itermatch(node))
if matches:
self.query_cache[node] = matches
if orig_node is not node:
@@ -263,10 +279,8 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
continue
depset = getattr(pkg, attr)
profile_failures = defaultdict(lambda: defaultdict(set))
- for edepset, profiles in self.collapse_evaluate_depset(
- pkg, attr, depset):
- for profile, failures in self.process_depset(
- pkg, attr, depset, edepset, profiles):
+ for edepset, profiles in self.collapse_evaluate_depset(pkg, attr, depset):
+ for profile, failures in self.process_depset(pkg, attr, depset, edepset, profiles):
failures = tuple(map(str, sorted(stable_unique(failures))))
profile_failures[failures][profile.status].add(profile)
@@ -276,24 +290,36 @@ class VisibilityCheck(feeds.EvaluateDepSet, feeds.QueryCache, Check):
for failures, profiles in profile_failures.items():
for profile_status, cls in self.report_cls_map.items():
for profile in sorted(
- profiles.get(profile_status, ()),
- key=attrgetter('key', 'name')):
+ profiles.get(profile_status, ()), key=attrgetter("key", "name")
+ ):
yield cls(
- attr, profile.key, profile.name, failures,
- profile_status, profile.deprecated, pkg=pkg)
+ attr,
+ profile.key,
+ profile.name,
+ failures,
+ profile_status,
+ profile.deprecated,
+ pkg=pkg,
+ )
else:
# only report one failure per depset per profile type in regular mode
for failures, profiles in profile_failures.items():
for profile_status, cls in self.report_cls_map.items():
status_profiles = sorted(
- profiles.get(profile_status, ()),
- key=attrgetter('key', 'name'))
+ profiles.get(profile_status, ()), key=attrgetter("key", "name")
+ )
if status_profiles:
profile = status_profiles[0]
yield cls(
- attr, profile.key, profile.name,
- failures, profile_status,
- profile.deprecated, len(status_profiles), pkg=pkg)
+ attr,
+ profile.key,
+ profile.name,
+ failures,
+ profile_status,
+ profile.deprecated,
+ len(status_profiles),
+ pkg=pkg,
+ )
def check_visibility_vcs(self, pkg):
visible = []
diff --git a/src/pkgcheck/checks/whitespace.py b/src/pkgcheck/checks/whitespace.py
index 356a3634..823a8cfd 100644
--- a/src/pkgcheck/checks/whitespace.py
+++ b/src/pkgcheck/checks/whitespace.py
@@ -68,25 +68,48 @@ class BadWhitespaceCharacter(results.LineResult, results.Warning):
@property
def desc(self):
return (
- f'bad whitespace character {self.char} on line {self.lineno}'
- f', char {self.position}: {self.line}'
+ f"bad whitespace character {self.char} on line {self.lineno}"
+ f", char {self.position}: {self.line}"
)
class WhitespaceData(NamedTuple):
"""Data format to register hardcoded list of bad whitespace characters."""
+
unicode_version: str
chars: tuple
whitespace_data = WhitespaceData(
- unicode_version='12.1.0',
+ unicode_version="12.1.0",
chars=(
- '\x0b', '\x0c', '\r', '\x1c', '\x1d', '\x1e', '\x1f', '\x85', '\xa0',
- '\u1680', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005',
- '\u2006', '\u2007', '\u2008', '\u2009', '\u200a', '\u2028', '\u2029',
- '\u202f', '\u205f', '\u3000',
- )
+ "\x0b",
+ "\x0c",
+ "\r",
+ "\x1c",
+ "\x1d",
+ "\x1e",
+ "\x1f",
+ "\x85",
+ "\xa0",
+ "\u1680",
+ "\u2000",
+ "\u2001",
+ "\u2002",
+ "\u2003",
+ "\u2004",
+ "\u2005",
+ "\u2006",
+ "\u2007",
+ "\u2008",
+ "\u2009",
+ "\u200a",
+ "\u2028",
+ "\u2029",
+ "\u202f",
+ "\u205f",
+ "\u3000",
+ ),
)
@@ -94,17 +117,23 @@ class WhitespaceCheck(Check):
"""Scan ebuild for useless whitespace."""
_source = sources.EbuildFileRepoSource
- known_results = frozenset([
- WhitespaceFound, WrongIndentFound, DoubleEmptyLine,
- TrailingEmptyLine, NoFinalNewline, BadWhitespaceCharacter
- ])
+ known_results = frozenset(
+ [
+ WhitespaceFound,
+ WrongIndentFound,
+ DoubleEmptyLine,
+ TrailingEmptyLine,
+ NoFinalNewline,
+ BadWhitespaceCharacter,
+ ]
+ )
- _indent_regex = re.compile('^\t* \t+')
+ _indent_regex = re.compile("^\t* \t+")
def __init__(self, *args):
super().__init__(*args)
- bad_whitespace = ''.join(whitespace_data.chars)
- self.bad_whitespace_regex = re.compile(rf'(?P<char>[{bad_whitespace}])')
+ bad_whitespace = "".join(whitespace_data.chars)
+ self.bad_whitespace_regex = re.compile(rf"(?P<char>[{bad_whitespace}])")
def feed(self, pkg):
lastlineempty = False
@@ -116,14 +145,18 @@ class WhitespaceCheck(Check):
for lineno, line in enumerate(pkg.lines, 1):
for match in self.bad_whitespace_regex.finditer(line):
yield BadWhitespaceCharacter(
- repr(match.group('char')), match.end('char'),
- line=repr(line), lineno=lineno, pkg=pkg)
-
- if line != '\n':
+ repr(match.group("char")),
+ match.end("char"),
+ line=repr(line),
+ lineno=lineno,
+ pkg=pkg,
+ )
+
+ if line != "\n":
lastlineempty = False
- if line[-2:-1] == ' ' or line[-2:-1] == '\t':
+ if line[-2:-1] == " " or line[-2:-1] == "\t":
trailing.append(lineno)
- elif line[0] == ' ':
+ elif line[0] == " ":
leading.append(lineno)
if self._indent_regex.match(line):
indent.append(lineno)
@@ -132,9 +165,9 @@ class WhitespaceCheck(Check):
else:
lastlineempty = True
if trailing:
- yield WhitespaceFound('trailing', lines=trailing, pkg=pkg)
+ yield WhitespaceFound("trailing", lines=trailing, pkg=pkg)
if leading:
- yield WhitespaceFound('leading', lines=leading, pkg=pkg)
+ yield WhitespaceFound("leading", lines=leading, pkg=pkg)
if indent:
yield WrongIndentFound(indent, pkg=pkg)
if double_empty:
@@ -143,5 +176,5 @@ class WhitespaceCheck(Check):
yield TrailingEmptyLine(pkg=pkg)
# Dealing with empty ebuilds is just paranoia
- if pkg.lines and not pkg.lines[-1].endswith('\n'):
+ if pkg.lines and not pkg.lines[-1].endswith("\n"):
yield NoFinalNewline(pkg=pkg)
diff --git a/src/pkgcheck/cli.py b/src/pkgcheck/cli.py
index 5450788e..55e9f30a 100644
--- a/src/pkgcheck/cli.py
+++ b/src/pkgcheck/cli.py
@@ -14,10 +14,9 @@ from . import const
class Tool(commandline.Tool):
-
def main(self):
# suppress all pkgcore log messages
- logging.getLogger('pkgcore').setLevel(100)
+ logging.getLogger("pkgcore").setLevel(100)
return super().main()
@@ -50,14 +49,16 @@ class ConfigFileParser:
for f in configs:
config.read(f)
except configparser.ParsingError as e:
- self.parser.error(f'parsing config file failed: {e}')
+ self.parser.error(f"parsing config file failed: {e}")
return config
def parse_config_sections(self, namespace, sections):
"""Parse options from a given iterable of config section names."""
- with patch('snakeoil.cli.arghparse.ArgumentParser.error', self._config_error):
+ with patch("snakeoil.cli.arghparse.ArgumentParser.error", self._config_error):
for section in (x for x in sections if x in self.config):
- config_args = [f'--{k}={v}' if v else f'--{k}' for k, v in self.config.items(section)]
+ config_args = [
+ f"--{k}={v}" if v else f"--{k}" for k, v in self.config.items(section)
+ ]
namespace, args = self.parser.parse_known_optionals(config_args, namespace)
if args:
self.parser.error(f"unknown arguments: {' '.join(args)}")
@@ -74,16 +75,16 @@ class ConfigFileParser:
self._config = None
# load default options
- namespace = self.parse_config_sections(namespace, ['DEFAULT'])
+ namespace = self.parse_config_sections(namespace, ["DEFAULT"])
# load any defined checksets -- empty checksets are ignored
- if 'CHECKSETS' in self.config:
- for k, v in self.config.items('CHECKSETS'):
+ if "CHECKSETS" in self.config:
+ for k, v in self.config.items("CHECKSETS"):
if v:
- namespace.config_checksets[k] = re.split('[,\n]', v.strip())
+ namespace.config_checksets[k] = re.split("[,\n]", v.strip())
return namespace
def _config_error(self, message, status=2):
"""Stub to replace error method that notes config failure."""
- self.parser.exit(status, f'{self.parser.prog}: failed loading config: {message}\n')
+ self.parser.exit(status, f"{self.parser.prog}: failed loading config: {message}\n")
diff --git a/src/pkgcheck/const.py b/src/pkgcheck/const.py
index 7e440ce4..61b0922f 100644
--- a/src/pkgcheck/const.py
+++ b/src/pkgcheck/const.py
@@ -25,17 +25,20 @@ def _GET_CONST(attr, default_value):
# determine XDG compatible paths
for xdg_var, var_name, fallback_dir in (
- ('XDG_CONFIG_HOME', 'USER_CONFIG_PATH', '~/.config'),
- ('XDG_CACHE_HOME', 'USER_CACHE_PATH', '~/.cache'),
- ('XDG_DATA_HOME', 'USER_DATA_PATH', '~/.local/share')):
+ ("XDG_CONFIG_HOME", "USER_CONFIG_PATH", "~/.config"),
+ ("XDG_CACHE_HOME", "USER_CACHE_PATH", "~/.cache"),
+ ("XDG_DATA_HOME", "USER_DATA_PATH", "~/.local/share"),
+):
setattr(
- _module, var_name,
- os.environ.get(xdg_var, os.path.join(os.path.expanduser(fallback_dir), 'pkgcheck')))
-
-REPO_PATH = _GET_CONST('REPO_PATH', _reporoot)
-DATA_PATH = _GET_CONST('DATA_PATH', '%(REPO_PATH)s/data/share/pkgcheck')
-
-USER_CACHE_DIR = getattr(_module, 'USER_CACHE_PATH')
-USER_CONF_FILE = os.path.join(getattr(_module, 'USER_CONFIG_PATH'), 'pkgcheck.conf')
-SYSTEM_CONF_FILE = '/etc/pkgcheck/pkgcheck.conf'
-BUNDLED_CONF_FILE = os.path.join(DATA_PATH, 'pkgcheck.conf')
+ _module,
+ var_name,
+ os.environ.get(xdg_var, os.path.join(os.path.expanduser(fallback_dir), "pkgcheck")),
+ )
+
+REPO_PATH = _GET_CONST("REPO_PATH", _reporoot)
+DATA_PATH = _GET_CONST("DATA_PATH", "%(REPO_PATH)s/data/share/pkgcheck")
+
+USER_CACHE_DIR = getattr(_module, "USER_CACHE_PATH")
+USER_CONF_FILE = os.path.join(getattr(_module, "USER_CONFIG_PATH"), "pkgcheck.conf")
+SYSTEM_CONF_FILE = "/etc/pkgcheck/pkgcheck.conf"
+BUNDLED_CONF_FILE = os.path.join(DATA_PATH, "pkgcheck.conf")
diff --git a/src/pkgcheck/feeds.py b/src/pkgcheck/feeds.py
index 0edffc2c..e09874dc 100644
--- a/src/pkgcheck/feeds.py
+++ b/src/pkgcheck/feeds.py
@@ -38,15 +38,16 @@ class Feed(base.Addon):
class QueryCache(Feed):
-
@staticmethod
def mangle_argparser(parser):
- group = parser.add_argument_group('query caching')
+ group = parser.add_argument_group("query caching")
group.add_argument(
- '--reset-caching-per', dest='query_caching_freq',
- choices=('version', 'package', 'category'), default='package',
- help='control how often the cache is cleared '
- '(version, package or category)')
+ "--reset-caching-per",
+ dest="query_caching_freq",
+ choices=("version", "package", "category"),
+ default="package",
+ help="control how often the cache is cleared " "(version, package or category)",
+ )
@staticmethod
def _version(item):
@@ -63,7 +64,7 @@ class QueryCache(Feed):
def __init__(self, options):
super().__init__(options)
self.query_cache = {}
- self._keyfunc = getattr(self, f'_{options.query_caching_freq}')
+ self._keyfunc = getattr(self, f"_{options.query_caching_freq}")
self._key = None
def feed(self, item):
@@ -76,7 +77,6 @@ class QueryCache(Feed):
class EvaluateDepSet(Feed):
-
def __init__(self, *args, profile_addon):
super().__init__(*args)
self.pkg_evaluate_depsets_cache = {}
@@ -95,15 +95,15 @@ class EvaluateDepSet(Feed):
self.pkg_profiles_cache[pkg] = profile_grps
# strip use dep defaults so known flags get identified correctly
- diuse = frozenset(
- x[:-3] if x[-1] == ')' else x for x in depset.known_conditionals)
+ diuse = frozenset(x[:-3] if x[-1] == ")" else x for x in depset.known_conditionals)
collapsed = {}
for profiles in profile_grps:
immutable, enabled = profiles[0].identify_use(pkg, diuse)
collapsed.setdefault((immutable, enabled), []).extend(profiles)
- return [(depset.evaluate_depset(k[1], tristate_filter=k[0]), v)
- for k, v in collapsed.items()]
+ return [
+ (depset.evaluate_depset(k[1], tristate_filter=k[0]), v) for k, v in collapsed.items()
+ ]
def collapse_evaluate_depset(self, pkg, attr, depset):
depset_profiles = self.pkg_evaluate_depsets_cache.get((pkg, attr))
diff --git a/src/pkgcheck/log.py b/src/pkgcheck/log.py
index 0bc11269..6db8441b 100644
--- a/src/pkgcheck/log.py
+++ b/src/pkgcheck/log.py
@@ -9,4 +9,4 @@ import logging
# overrides the root logger handler.
logging.basicConfig()
-logger = logging.getLogger('pkgcheck')
+logger = logging.getLogger("pkgcheck")
diff --git a/src/pkgcheck/objects.py b/src/pkgcheck/objects.py
index b91d07b6..51f2bed2 100644
--- a/src/pkgcheck/objects.py
+++ b/src/pkgcheck/objects.py
@@ -21,15 +21,15 @@ except ImportError: # pragma: no cover
def _find_modules(module): # pragma: no cover
"""Generator of all public modules under a given module."""
- if getattr(module, '__path__', False):
- for _imp, name, _ in pkgutil.walk_packages(module.__path__, module.__name__ + '.'):
+ if getattr(module, "__path__", False):
+ for _imp, name, _ in pkgutil.walk_packages(module.__path__, module.__name__ + "."):
# skip "private" modules
- if name.rsplit('.', 1)[1][0] == '_':
+ if name.rsplit(".", 1)[1][0] == "_":
continue
try:
yield import_module(name)
except ImportError as e:
- raise Exception(f'failed importing {name!r}: {e}')
+ raise Exception(f"failed importing {name!r}: {e}")
else:
yield module
@@ -37,27 +37,31 @@ def _find_modules(module): # pragma: no cover
def _find_classes(module, matching_cls, skip=()): # pragma: no cover
"""Generator of all subclasses of a selected class under a given module."""
for _name, cls in inspect.getmembers(module):
- if (inspect.isclass(cls) and issubclass(cls, matching_cls)
- and cls.__name__[0] != '_' and cls not in skip):
+ if (
+ inspect.isclass(cls)
+ and issubclass(cls, matching_cls)
+ and cls.__name__[0] != "_"
+ and cls not in skip
+ ):
yield cls
def _find_obj_classes(module_name, target_cls): # pragma: no cover
"""Determine mapping of object class names to class objects."""
- module = import_module(f'.{module_name}', 'pkgcheck')
- cls_module, cls_name = target_cls.rsplit('.', 1)
- matching_cls = getattr(import_module(f'.{cls_module}', 'pkgcheck'), cls_name)
+ module = import_module(f".{module_name}", "pkgcheck")
+ cls_module, cls_name = target_cls.rsplit(".", 1)
+ matching_cls = getattr(import_module(f".{cls_module}", "pkgcheck"), cls_name)
# skip top-level, base classes
base_classes = {matching_cls}
- if os.path.basename(module.__file__) == '__init__.py':
+ if os.path.basename(module.__file__) == "__init__.py":
base_classes.update(_find_classes(module, matching_cls))
classes = {}
for m in _find_modules(module):
for cls in _find_classes(m, matching_cls, skip=base_classes):
if cls.__name__ in classes and classes[cls.__name__] != cls:
- raise Exception(f'object name overlap: {cls} and {classes[cls.__name__]}')
+ raise Exception(f"object name overlap: {cls} and {classes[cls.__name__]}")
classes[cls.__name__] = cls
return classes
@@ -120,7 +124,7 @@ def _keyword_alias(alias=None):
def __set_name__(self, cls, name):
key = alias if alias is not None else name
cls._alias_keywords.add(key)
- jit_attr = klass.jit_attr_named(f'_{self.func.__name__}')
+ jit_attr = klass.jit_attr_named(f"_{self.func.__name__}")
func = jit_attr(partial(self.func))
setattr(cls, name, func)
@@ -136,6 +140,7 @@ class _KeywordsLazyDict(_LazyDict):
def aliases(self):
"""Mapping of aliases to their respective mappings."""
from . import results
+
alias_map = {x: getattr(self, x) for x in self._alias_keywords}
# support class-based aliasing
for k, v in self._dict.items():
@@ -147,24 +152,28 @@ class _KeywordsLazyDict(_LazyDict):
def error(self):
"""Mapping of all error level keywords."""
from . import results
+
return ImmutableDict(self.select(results.Error))
@_keyword_alias()
def warning(self):
"""Mapping of all warning level keywords."""
from . import results
+
return ImmutableDict(self.select(results.Warning))
@_keyword_alias()
def style(self):
"""Mapping of all style level keywords."""
from . import results
+
return ImmutableDict(self.select(results.Style))
@_keyword_alias()
def info(self):
"""Mapping of all info level keywords."""
from . import results
+
return ImmutableDict(self.select(results.Info))
@klass.jit_attr
@@ -180,11 +189,12 @@ class _ChecksLazyDict(_LazyDict):
def default(self):
"""Mapping of all default-enabled checks."""
from . import checks
- return ImmutableDict({
- k: v for k, v in self._dict.items()
- if not issubclass(v, checks.OptionalCheck)})
+
+ return ImmutableDict(
+ {k: v for k, v in self._dict.items() if not issubclass(v, checks.OptionalCheck)}
+ )
-KEYWORDS = _KeywordsLazyDict('KEYWORDS', ('checks', 'results.Result'))
-CHECKS = _ChecksLazyDict('CHECKS', ('checks', 'checks.Check'))
-REPORTERS = _LazyDict('REPORTERS', ('reporters', 'reporters.Reporter'))
+KEYWORDS = _KeywordsLazyDict("KEYWORDS", ("checks", "results.Result"))
+CHECKS = _ChecksLazyDict("CHECKS", ("checks", "checks.Check"))
+REPORTERS = _LazyDict("REPORTERS", ("reporters", "reporters.Reporter"))
diff --git a/src/pkgcheck/packages.py b/src/pkgcheck/packages.py
index e2a07aa1..195b9d19 100644
--- a/src/pkgcheck/packages.py
+++ b/src/pkgcheck/packages.py
@@ -11,6 +11,7 @@ from snakeoil import klass
@dataclass(frozen=True, eq=False)
class RawCPV:
"""Raw CPV objects supporting basic restrictions/sorting."""
+
category: str
package: str
fullver: str
@@ -19,18 +20,18 @@ class RawCPV:
def __post_init__(self):
if self.fullver is not None:
- version, _, revision = self.fullver.partition('-r')
- object.__setattr__(self, 'version', version)
- object.__setattr__(self, 'revision', cpv.Revision(revision))
+ version, _, revision = self.fullver.partition("-r")
+ object.__setattr__(self, "version", version)
+ object.__setattr__(self, "revision", cpv.Revision(revision))
@property
def key(self):
- return f'{self.category}/{self.package}'
+ return f"{self.category}/{self.package}"
@property
def versioned_atom(self):
if self.fullver:
- return atom.atom(f'={self}')
+ return atom.atom(f"={self}")
return atom.atom(str(self))
@property
@@ -45,19 +46,19 @@ class RawCPV:
def __str__(self):
if self.fullver:
- return f'{self.category}/{self.package}-{self.fullver}'
- return f'{self.category}/{self.package}'
+ return f"{self.category}/{self.package}-{self.fullver}"
+ return f"{self.category}/{self.package}"
def __repr__(self):
- address = '@%#8x' % (id(self),)
- return f'<{self.__class__.__name__} cpv={self.versioned_atom.cpvstr!r} {address}>'
+ address = "@%#8x" % (id(self),)
+ return f"<{self.__class__.__name__} cpv={self.versioned_atom.cpvstr!r} {address}>"
@total_ordering
class WrappedPkg:
"""Generic package wrapper used to inject attributes into package objects."""
- __slots__ = ('_pkg',)
+ __slots__ = ("_pkg",)
def __init__(self, pkg):
self._pkg = pkg
@@ -77,8 +78,8 @@ class WrappedPkg:
def __hash__(self):
return hash(self._pkg)
- __getattr__ = klass.GetAttrProxy('_pkg')
- __dir__ = klass.DirProxy('_pkg')
+ __getattr__ = klass.GetAttrProxy("_pkg")
+ __dir__ = klass.DirProxy("_pkg")
class FilteredPkg(WrappedPkg):
diff --git a/src/pkgcheck/pipeline.py b/src/pkgcheck/pipeline.py
index 0dd8f9b4..184f3454 100644
--- a/src/pkgcheck/pipeline.py
+++ b/src/pkgcheck/pipeline.py
@@ -29,7 +29,7 @@ class Pipeline:
self.errors = []
# pkgcheck currently requires the fork start method (#254)
- self._mp_ctx = multiprocessing.get_context('fork')
+ self._mp_ctx = multiprocessing.get_context("fork")
self._results_q = self._mp_ctx.SimpleQueue()
# create checkrunners
@@ -44,19 +44,19 @@ class Pipeline:
if self.options.pkg_scan:
# package level scans sort all returned results
self._ordered_results = {
- scope: [] for scope in base.scopes.values()
- if scope >= base.package_scope
+ scope: [] for scope in base.scopes.values() if scope >= base.package_scope
}
else:
# scoped mapping for caching repo and location specific results
self._ordered_results = {
- scope: [] for scope in reversed(list(base.scopes.values()))
+ scope: []
+ for scope in reversed(list(base.scopes.values()))
if scope <= base.repo_scope
}
def _filter_checks(self, scope):
"""Verify check scope against given scope to determine activation."""
- for check in sorted(self.options.enabled_checks, key=attrgetter('__name__')):
+ for check in sorted(self.options.enabled_checks, key=attrgetter("__name__")):
if isinstance(check.scope, base.ConditionalScope):
# conditionally enabled check
yield check
@@ -77,7 +77,7 @@ class Pipeline:
def _create_runners(self):
"""Initialize and categorize checkrunners for results pipeline."""
- pipes = {'async': [], 'sync': [], 'sequential': []}
+ pipes = {"async": [], "sync": [], "sequential": []}
# use addon/source caches to avoid re-initializing objects
addons_map = {}
@@ -88,15 +88,20 @@ class Pipeline:
addons = list(base.get_addons(self._filter_checks(scope)))
if not addons:
raise base.PkgcheckUserException(
- f'no matching checks available for {scope.desc} scope')
+ f"no matching checks available for {scope.desc} scope"
+ )
checks = init_checks(
- addons, self.options, self._results_q,
- addons_map=addons_map, source_map=source_map)
+ addons, self.options, self._results_q, addons_map=addons_map, source_map=source_map
+ )
# Initialize checkrunners per source type using separate runner for
# async checks and categorize them for parallelization based on the
# scan and source scope.
- runners = {'async': defaultdict(list), 'sync': defaultdict(list), 'sequential': defaultdict(list)}
+ runners = {
+ "async": defaultdict(list),
+ "sync": defaultdict(list),
+ "sequential": defaultdict(list),
+ }
for (source, runner_cls), check_objs in checks.items():
runner = runner_cls(self.options, source, check_objs)
if not self.options.pkg_scan and source.scope >= base.package_scope:
@@ -183,8 +188,9 @@ class Pipeline:
"""Consumer that runs scanning tasks, queuing results for output."""
try:
for scope, restrict, i, runners in iter(work_q.get, None):
- if results := sorted(chain.from_iterable(
- pipes[i][-1][scope][j].run(restrict) for j in runners)):
+ if results := sorted(
+ chain.from_iterable(pipes[i][-1][scope][j].run(restrict) for j in runners)
+ ):
self._results_q.put(results)
except Exception: # pragma: no cover
# traceback can't be pickled so serialize it
@@ -213,21 +219,19 @@ class Pipeline:
# schedule asynchronous checks in a separate process
async_proc = None
- if async_pipes := self._pipes['async']:
- async_proc = self._mp_ctx.Process(
- target=self._schedule_async, args=(async_pipes,))
+ if async_pipes := self._pipes["async"]:
+ async_proc = self._mp_ctx.Process(target=self._schedule_async, args=(async_pipes,))
async_proc.start()
# run synchronous checks using a process pool
- if sync_pipes := self._pipes['sync']:
+ if sync_pipes := self._pipes["sync"]:
work_q = self._mp_ctx.SimpleQueue()
- pool = self._mp_ctx.Pool(
- self.options.jobs, self._run_checks, (sync_pipes, work_q))
+ pool = self._mp_ctx.Pool(self.options.jobs, self._run_checks, (sync_pipes, work_q))
pool.close()
self._queue_work(sync_pipes, work_q)
pool.join()
- if sequential_pipes := self._pipes['sequential']:
+ if sequential_pipes := self._pipes["sequential"]:
for _scope, restriction, pipes in sequential_pipes:
for runner in chain.from_iterable(pipes.values()):
if results := tuple(runner.run(restriction)):
diff --git a/src/pkgcheck/reporters.py b/src/pkgcheck/reporters.py
index 3696f5fd..089037d1 100644
--- a/src/pkgcheck/reporters.py
+++ b/src/pkgcheck/reporters.py
@@ -62,15 +62,15 @@ class StrReporter(Reporter):
def _process_report(self):
# scope to result prefix mapping
scope_prefix_map = {
- base.version_scope: '{category}/{package}-{version}: ',
- base.package_scope: '{category}/{package}: ',
- base.category_scope: '{category}: ',
+ base.version_scope: "{category}/{package}-{version}: ",
+ base.package_scope: "{category}/{package}: ",
+ base.category_scope: "{category}: ",
}
while True:
- result = (yield)
- prefix = scope_prefix_map.get(result.scope, '').format(**vars(result))
- self.out.write(f'{prefix}{result.desc}')
+ result = yield
+ prefix = scope_prefix_map.get(result.scope, "").format(**vars(result))
+ self.out.write(f"{prefix}{result.desc}")
self.out.stream.flush()
@@ -92,9 +92,9 @@ class FancyReporter(Reporter):
prev_key = None
while True:
- result = (yield)
+ result = yield
if result.scope in (base.version_scope, base.package_scope):
- key = f'{result.category}/{result.package}'
+ key = f"{result.category}/{result.package}"
elif result.scope == base.category_scope:
key = result.category
else:
@@ -103,17 +103,16 @@ class FancyReporter(Reporter):
if key != prev_key:
if prev_key is not None:
self.out.write()
- self.out.write(self.out.bold, self.out.fg('blue'), key, self.out.reset)
+ self.out.write(self.out.bold, self.out.fg("blue"), key, self.out.reset)
prev_key = key
- self.out.first_prefix.append(' ')
- self.out.later_prefix.append(' ')
- s = ''
+ self.out.first_prefix.append(" ")
+ self.out.later_prefix.append(" ")
+ s = ""
if result.scope == base.version_scope:
s = f"version {result.version}: "
self.out.write(
- self.out.fg(result.color),
- result.name, self.out.reset,
- ': ', s, result.desc)
+ self.out.fg(result.color), result.name, self.out.reset, ": ", s, result.desc
+ )
self.out.first_prefix.pop()
self.out.later_prefix.pop()
self.out.stream.flush()
@@ -145,10 +144,10 @@ class JsonReporter(Reporter):
}
while True:
- result = (yield)
+ result = yield
data = json_dict()
d = scope_map.get(result.scope, lambda x, y: x)(data, result)
- d['_' + result.level][result.name] = result.desc
+ d["_" + result.level][result.name] = result.desc
self.out.write(json.dumps(data))
# flush output so partial objects aren't written
self.out.stream.flush()
@@ -160,27 +159,28 @@ class XmlReporter(Reporter):
priority = -1000
def _start(self):
- self.out.write('<checks>')
+ self.out.write("<checks>")
def _finish(self):
- self.out.write('</checks>')
+ self.out.write("</checks>")
@coroutine
def _process_report(self):
- result_template = (
- "<result><class>%(class)s</class>"
- "<msg>%(msg)s</msg></result>")
+ result_template = "<result><class>%(class)s</class>" "<msg>%(msg)s</msg></result>"
cat_template = (
"<result><category>%(category)s</category>"
- "<class>%(class)s</class><msg>%(msg)s</msg></result>")
+ "<class>%(class)s</class><msg>%(msg)s</msg></result>"
+ )
pkg_template = (
"<result><category>%(category)s</category>"
"<package>%(package)s</package><class>%(class)s</class>"
- "<msg>%(msg)s</msg></result>")
+ "<msg>%(msg)s</msg></result>"
+ )
ver_template = (
"<result><category>%(category)s</category>"
"<package>%(package)s</package><version>%(version)s</version>"
- "<class>%(class)s</class><msg>%(msg)s</msg></result>")
+ "<class>%(class)s</class><msg>%(msg)s</msg></result>"
+ )
scope_map = {
base.category_scope: cat_template,
@@ -189,10 +189,10 @@ class XmlReporter(Reporter):
}
while True:
- result = (yield)
- d = {k: getattr(result, k, '') for k in ('category', 'package', 'version')}
- d['class'] = xml_escape(result.name)
- d['msg'] = xml_escape(result.desc)
+ result = yield
+ d = {k: getattr(result, k, "") for k in ("category", "package", "version")}
+ d["class"] = xml_escape(result.name)
+ d["msg"] = xml_escape(result.desc)
self.out.write(scope_map.get(result.scope, result_template) % d)
@@ -211,19 +211,18 @@ class CsvReporter(Reporter):
@coroutine
def _process_report(self):
- writer = csv.writer(
- self.out,
- doublequote=False,
- escapechar='\\',
- lineterminator='')
+ writer = csv.writer(self.out, doublequote=False, escapechar="\\", lineterminator="")
while True:
- result = (yield)
- writer.writerow((
- getattr(result, 'category', ''),
- getattr(result, 'package', ''),
- getattr(result, 'version', ''),
- result.desc))
+ result = yield
+ writer.writerow(
+ (
+ getattr(result, "category", ""),
+ getattr(result, "package", ""),
+ getattr(result, "version", ""),
+ result.desc,
+ )
+ )
class _ResultFormatter(Formatter):
@@ -235,9 +234,8 @@ class _ResultFormatter(Formatter):
try:
return kwds[key]
except KeyError:
- return ''
- raise base.PkgcheckUserException(
- 'FormatReporter: integer indexes are not supported')
+ return ""
+ raise base.PkgcheckUserException("FormatReporter: integer indexes are not supported")
class FormatReporter(Reporter):
@@ -253,10 +251,10 @@ class FormatReporter(Reporter):
def _process_report(self):
formatter = _ResultFormatter()
# provide expansions for result desc, level, and output name properties
- properties = ('desc', 'level', 'name')
+ properties = ("desc", "level", "name")
while True:
- result = (yield)
+ result = yield
attrs = vars(result)
attrs.update((k, getattr(result, k)) for k in properties)
s = formatter.format(self.format_str, **attrs)
@@ -279,7 +277,7 @@ class JsonStream(Reporter):
def to_json(obj):
"""Serialize results and other objects to JSON."""
if isinstance(obj, Result):
- d = {'__class__': obj.__class__.__name__}
+ d = {"__class__": obj.__class__.__name__}
d.update(obj._attrs)
return d
return str(obj)
@@ -289,19 +287,20 @@ class JsonStream(Reporter):
"""Deserialize results from a given iterable."""
# avoid circular import issues
from . import objects
+
try:
for data in map(json.loads, iterable):
- cls = objects.KEYWORDS[data.pop('__class__')]
+ cls = objects.KEYWORDS[data.pop("__class__")]
yield cls._create(**data)
except (json.decoder.JSONDecodeError, UnicodeDecodeError, DeserializationError) as e:
- raise DeserializationError('failed loading') from e
+ raise DeserializationError("failed loading") from e
except (KeyError, InvalidResult):
- raise DeserializationError('unknown result')
+ raise DeserializationError("unknown result")
@coroutine
def _process_report(self):
while True:
- result = (yield)
+ result = yield
self.out.write(json.dumps(result, default=self.to_json))
@@ -316,11 +315,11 @@ class FlycheckReporter(Reporter):
@coroutine
def _process_report(self):
while True:
- result = (yield)
+ result = yield
file = f'{getattr(result, "package", "")}-{getattr(result, "version", "")}.ebuild'
message = f'{getattr(result, "name")}: {getattr(result, "desc")}'
if isinstance(result, BaseLinesResult):
- message = message.replace(result.lines_str, '').strip()
+ message = message.replace(result.lines_str, "").strip()
for lineno in result.lines:
self.out.write(f'{file}:{lineno}:{getattr(result, "level")}:{message}')
else:
diff --git a/src/pkgcheck/results.py b/src/pkgcheck/results.py
index cac8fbfa..23d639fc 100644
--- a/src/pkgcheck/results.py
+++ b/src/pkgcheck/results.py
@@ -34,7 +34,7 @@ class Result:
cls.name = cls._name if cls._name is not None else cls.__name__
def __str__(self):
- return f'{self.name}: {self.desc}'
+ return f"{self.name}: {self.desc}"
@property
def desc(self):
@@ -43,24 +43,24 @@ class Result:
@property
def _attrs(self):
"""Return all public result attributes."""
- return {k: v for k, v in self.__dict__.items() if not k.startswith('_')}
+ return {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
@classmethod
def _create(cls, **kwargs):
"""Create a new result object from a given attributes dict."""
if issubclass(cls, CategoryResult):
- category = kwargs.pop('category', None)
- package = kwargs.pop('package', None)
- version = kwargs.pop('version', None)
- if 'pkg' not in kwargs:
+ category = kwargs.pop("category", None)
+ package = kwargs.pop("package", None)
+ version = kwargs.pop("version", None)
+ if "pkg" not in kwargs:
# recreate pkg param from related, separated attributes
if category is None:
- raise InvalidResult('missing category')
+ raise InvalidResult("missing category")
if issubclass(cls, PackageResult) and package is None:
- raise InvalidResult('missing package')
+ raise InvalidResult("missing package")
if issubclass(cls, VersionResult) and version is None:
- raise InvalidResult('missing version')
- kwargs['pkg'] = RawCPV(category, package, version)
+ raise InvalidResult("missing version")
+ kwargs["pkg"] = RawCPV(category, package, version)
return cls(**kwargs)
def __eq__(self, other):
@@ -91,36 +91,36 @@ class BaseLinesResult:
@property
def lines_str(self):
s = pluralism(self.lines)
- lines = ', '.join(map(str, self.lines))
- return f'on line{s}: {lines}'
+ lines = ", ".join(map(str, self.lines))
+ return f"on line{s}: {lines}"
class Error(Result):
"""Result with an error priority level."""
- level = 'error'
- color = 'red'
+ level = "error"
+ color = "red"
class Warning(Result):
"""Result with a warning priority level."""
- level = 'warning'
- color = 'yellow'
+ level = "warning"
+ color = "yellow"
class Style(Result):
"""Result with a coding style priority level."""
- level = 'style'
- color = 'cyan'
+ level = "style"
+ color = "cyan"
class Info(Result):
"""Result with an info priority level."""
- level = 'info'
- color = 'green'
+ level = "info"
+ color = "green"
class CommitResult(Result):
@@ -131,7 +131,7 @@ class CommitResult(Result):
def __init__(self, commit, **kwargs):
super().__init__(**kwargs)
self.commit = str(commit)
- self._attr = 'commit'
+ self._attr = "commit"
def __lt__(self, other):
try:
@@ -159,7 +159,7 @@ class EclassResult(Result):
def __init__(self, eclass, **kwargs):
super().__init__(**kwargs)
self.eclass = str(eclass)
- self._attr = 'eclass'
+ self._attr = "eclass"
def __lt__(self, other):
try:
@@ -182,7 +182,7 @@ class CategoryResult(Result):
def __init__(self, pkg, **kwargs):
super().__init__(**kwargs)
self.category = pkg.category
- self._attr = 'category'
+ self._attr = "category"
def __lt__(self, other):
try:
@@ -201,7 +201,7 @@ class PackageResult(CategoryResult):
def __init__(self, pkg, **kwargs):
super().__init__(pkg, **kwargs)
self.package = pkg.package
- self._attr = 'package'
+ self._attr = "package"
def __lt__(self, other):
try:
@@ -223,11 +223,11 @@ class VersionResult(PackageResult):
pkg = pkg._pkg
super().__init__(pkg, **kwargs)
self.version = pkg.fullver
- self._attr = 'version'
+ self._attr = "version"
@klass.jit_attr
def ver_rev(self):
- version, _, revision = self.version.partition('-r')
+ version, _, revision = self.version.partition("-r")
revision = cpv.Revision(revision)
return version, revision
@@ -307,10 +307,9 @@ class MetadataError(Error):
if cls.attr is not None:
setting = cls.results.setdefault(cls.attr, cls)
if setting != cls:
- raise ValueError(
- f'metadata attribute {cls.attr!r} already registered: {setting!r}')
+ raise ValueError(f"metadata attribute {cls.attr!r} already registered: {setting!r}")
else:
- raise ValueError(f'class missing metadata attributes: {cls!r}')
+ raise ValueError(f"class missing metadata attributes: {cls!r}")
def __init__(self, attr, msg, **kwargs):
super().__init__(**kwargs)
diff --git a/src/pkgcheck/runners.py b/src/pkgcheck/runners.py
index b1aa8e64..86bdbe6e 100644
--- a/src/pkgcheck/runners.py
+++ b/src/pkgcheck/runners.py
@@ -31,7 +31,7 @@ class CheckRunner:
class SyncCheckRunner(CheckRunner):
"""Generic runner for synchronous checks."""
- type = 'sync'
+ type = "sync"
def __init__(self, *args):
super().__init__(*args)
@@ -43,7 +43,8 @@ class SyncCheckRunner(CheckRunner):
# only report metadata errors for version-scoped sources
if self.source.scope == base.version_scope:
self.source.itermatch = partial(
- self.source.itermatch, error_callback=self._metadata_error_cb)
+ self.source.itermatch, error_callback=self._metadata_error_cb
+ )
def _metadata_error_cb(self, e, check=None):
"""Callback handling MetadataError results."""
@@ -58,7 +59,7 @@ class SyncCheckRunner(CheckRunner):
# so they can be noticed and fixed.
result_cls = MetadataError.results[e.attr]
if result_cls in known_results:
- error_str = ': '.join(e.msg().split('\n'))
+ error_str = ": ".join(e.msg().split("\n"))
result = result_cls(e.attr, error_str, pkg=e.pkg)
self._metadata_errors.append((e.pkg, result))
@@ -98,7 +99,7 @@ class SequentialCheckRunner(SyncCheckRunner):
Checks that must not be run in parallel, will be run on the main process.
"""
- type = 'sequential'
+ type = "sequential"
class AsyncCheckRunner(CheckRunner):
@@ -109,7 +110,7 @@ class AsyncCheckRunner(CheckRunner):
on completion.
"""
- type = 'async'
+ type = "async"
def schedule(self, executor, futures, restrict=packages.AlwaysTrue):
"""Schedule all checks to run via the given executor."""
diff --git a/src/pkgcheck/scripts/__init__.py b/src/pkgcheck/scripts/__init__.py
index 351cc7c9..7757a9c0 100755
--- a/src/pkgcheck/scripts/__init__.py
+++ b/src/pkgcheck/scripts/__init__.py
@@ -19,19 +19,21 @@ def run(script_name):
try:
from pkgcheck.cli import Tool
- script_module = '.'.join(
- os.path.realpath(__file__).split(os.path.sep)[-3:-1] +
- [script_name.replace('-', '_')])
+
+ script_module = ".".join(
+ os.path.realpath(__file__).split(os.path.sep)[-3:-1] + [script_name.replace("-", "_")]
+ )
script = import_module(script_module)
except ImportError as e:
- python_version = '.'.join(map(str, sys.version_info[:3]))
- sys.stderr.write(f'Failed importing: {e}!\n')
+ python_version = ".".join(map(str, sys.version_info[:3]))
+ sys.stderr.write(f"Failed importing: {e}!\n")
sys.stderr.write(
- 'Verify that pkgcheck and its deps are properly installed '
- f'and/or PYTHONPATH is set correctly for python {python_version}.\n')
- if '--debug' in sys.argv[1:]:
+ "Verify that pkgcheck and its deps are properly installed "
+ f"and/or PYTHONPATH is set correctly for python {python_version}.\n"
+ )
+ if "--debug" in sys.argv[1:]:
raise
- sys.stderr.write('Add --debug to the commandline for a traceback.\n')
+ sys.stderr.write("Add --debug to the commandline for a traceback.\n")
sys.exit(1)
tool = Tool(script.argparser)
@@ -46,5 +48,5 @@ def main():
run(os.path.basename(sys.argv[0]))
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/src/pkgcheck/scripts/argparse_actions.py b/src/pkgcheck/scripts/argparse_actions.py
index 8d6485f6..67a18f73 100644
--- a/src/pkgcheck/scripts/argparse_actions.py
+++ b/src/pkgcheck/scripts/argparse_actions.py
@@ -16,7 +16,7 @@ class ConfigArg(argparse._StoreAction):
"""Store config path string or False when explicitly disabled."""
def __call__(self, parser, namespace, values, option_string=None):
- if values.lower() in ('false', 'no', 'n'):
+ if values.lower() in ("false", "no", "n"):
values = False
setattr(namespace, self.dest, values)
@@ -30,13 +30,13 @@ def object_to_keywords(namespace, obj):
elif obj in namespace.config_checksets:
yield from chain(*ChecksetArgs.checksets_to_keywords(namespace, [obj]))
else:
- raise ValueError(f'unknown checkset, check, or keyword: {obj!r}')
+ raise ValueError(f"unknown checkset, check, or keyword: {obj!r}")
class FilterArgs(arghparse.CommaSeparatedValues):
"""Apply filters to an entire scan or specific checks/keywords."""
- known_filters = frozenset(['latest'])
+ known_filters = frozenset(["latest"])
def __call__(self, parser, namespace, values, option_string=None):
values = self.parse_values(values)
@@ -44,14 +44,14 @@ class FilterArgs(arghparse.CommaSeparatedValues):
disabled = False
for val in values:
- if ':' in val:
- filter_type, target = val.split(':')
+ if ":" in val:
+ filter_type, target = val.split(":")
try:
keywords = object_to_keywords(namespace, target)
filter_map.update({x: filter_type for x in keywords})
except ValueError as e:
raise argparse.ArgumentError(self, str(e))
- elif val.lower() in ('false', 'no', 'n'):
+ elif val.lower() in ("false", "no", "n"):
# disable all filters
disabled = True
break
@@ -63,19 +63,24 @@ class FilterArgs(arghparse.CommaSeparatedValues):
# validate selected filters
if unknown := set(filter_map.values()) - self.known_filters:
s = pluralism(unknown)
- unknown = ', '.join(map(repr, unknown))
- available = ', '.join(sorted(self.known_filters))
+ unknown = ", ".join(map(repr, unknown))
+ available = ", ".join(sorted(self.known_filters))
raise argparse.ArgumentError(
- self, f'unknown filter{s}: {unknown} (available: {available})')
+ self, f"unknown filter{s}: {unknown} (available: {available})"
+ )
filters = {}
if not disabled:
# pull default filters
filters.update(objects.KEYWORDS.filter)
# ignore invalid keywords -- only keywords version scope and higher are affected
- filters.update({
- objects.KEYWORDS[k]: v for k, v in filter_map.items()
- if objects.KEYWORDS[k].scope >= base.version_scope})
+ filters.update(
+ {
+ objects.KEYWORDS[k]: v
+ for k, v in filter_map.items()
+ if objects.KEYWORDS[k].scope >= base.version_scope
+ }
+ )
setattr(namespace, self.dest, ImmutableDict(filters))
@@ -104,20 +109,21 @@ class CacheNegations(arghparse.CommaSeparatedNegations):
def parse_values(self, values):
all_cache_types = {cache.type for cache in CachedAddon.caches.values()}
disabled, enabled = [], list(all_cache_types)
- if values is None or values.lower() in ('y', 'yes', 'true'):
+ if values is None or values.lower() in ("y", "yes", "true"):
pass
- elif values.lower() in ('n', 'no', 'false'):
+ elif values.lower() in ("n", "no", "false"):
disabled = list(all_cache_types)
else:
disabled, enabled = super().parse_values(values)
disabled = set(disabled)
enabled = set(enabled) if enabled else all_cache_types
if unknown := (disabled | enabled) - all_cache_types:
- unknowns = ', '.join(map(repr, unknown))
- choices = ', '.join(map(repr, sorted(self.caches)))
+ unknowns = ", ".join(map(repr, unknown))
+ choices = ", ".join(map(repr, sorted(self.caches)))
s = pluralism(unknown)
raise argparse.ArgumentError(
- self, f'unknown cache type{s}: {unknowns} (choose from {choices})')
+ self, f"unknown cache type{s}: {unknowns} (choose from {choices})"
+ )
enabled = set(enabled).difference(disabled)
return enabled
@@ -135,8 +141,8 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.aliases = {
- 'all': list(objects.CHECKS.values()),
- 'net': list(objects.CHECKS.select(NetworkCheck).values()),
+ "all": list(objects.CHECKS.values()),
+ "net": list(objects.CHECKS.select(NetworkCheck).values()),
}
def expand_aliases(self, args):
@@ -157,7 +163,7 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
for arg in args:
for x in namespace.config_checksets[arg]:
# determine if checkset item is disabled or enabled
- if x[0] == '-':
+ if x[0] == "-":
x = x[1:]
keywords = disabled
else:
@@ -168,7 +174,7 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
elif x in objects.KEYWORDS:
keywords.append(x)
else:
- raise ValueError(f'{arg!r} checkset, unknown check or keyword: {x!r}')
+ raise ValueError(f"{arg!r} checkset, unknown check or keyword: {x!r}")
return disabled, enabled
def __call__(self, parser, namespace, values, option_string=None):
@@ -177,11 +183,12 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
# validate selected checksets
if unknown := set(disabled + enabled) - set(self.aliases) - set(checksets):
- unknown_str = ', '.join(map(repr, unknown))
- available = ', '.join(sorted(chain(checksets, self.aliases)))
+ unknown_str = ", ".join(map(repr, unknown))
+ available = ", ".join(sorted(chain(checksets, self.aliases)))
s = pluralism(unknown)
raise argparse.ArgumentError(
- self, f'unknown checkset{s}: {unknown_str} (available: {available})')
+ self, f"unknown checkset{s}: {unknown_str} (available: {available})"
+ )
# expand aliases into keywords
disabled, disabled_aliases = self.expand_aliases(disabled)
@@ -203,12 +210,12 @@ class ChecksetArgs(arghparse.CommaSeparatedNegations):
args = []
if enabled_keywords:
keywords_set = {objects.KEYWORDS[x] for x in enabled_keywords}
- checks = ','.join(
- k for k, v in objects.CHECKS.items()
- if v.known_results.intersection(keywords_set))
- args.append(f'--checks={checks}')
- keywords = ','.join(enabled_keywords | {f'-{x}' for x in disabled_keywords})
- args.append(f'--keywords={keywords}')
+ checks = ",".join(
+ k for k, v in objects.CHECKS.items() if v.known_results.intersection(keywords_set)
+ )
+ args.append(f"--checks={checks}")
+ keywords = ",".join(enabled_keywords | {f"-{x}" for x in disabled_keywords})
+ args.append(f"--keywords={keywords}")
parser._parse_known_args(args, namespace)
@@ -220,21 +227,22 @@ class ScopeArgs(arghparse.CommaSeparatedNegations):
# validate selected scopes
if unknown_scopes := set(disabled + enabled) - set(base.scopes):
- unknown = ', '.join(map(repr, unknown_scopes))
- available = ', '.join(base.scopes)
+ unknown = ", ".join(map(repr, unknown_scopes))
+ available = ", ".join(base.scopes)
s = pluralism(unknown_scopes)
raise argparse.ArgumentError(
- self, f'unknown scope{s}: {unknown} (available: {available})')
+ self, f"unknown scope{s}: {unknown} (available: {available})"
+ )
disabled = set(chain.from_iterable(base.scopes[x] for x in disabled))
enabled = set(chain.from_iterable(base.scopes[x] for x in enabled))
if enabled:
- namespace.enabled_checks = {
- c for c in objects.CHECKS.values() if c.scope in enabled}
+ namespace.enabled_checks = {c for c in objects.CHECKS.values() if c.scope in enabled}
if disabled:
namespace.enabled_checks.difference_update(
- c for c in objects.CHECKS.values() if c.scope in disabled)
+ c for c in objects.CHECKS.values() if c.scope in disabled
+ )
setattr(namespace, self.dest, frozenset(enabled))
@@ -247,9 +255,9 @@ class CheckArgs(arghparse.CommaSeparatedElements):
# validate selected checks
if unknown_checks := set(subtractive + neutral + additive) - set(objects.CHECKS):
- unknown = ', '.join(map(repr, unknown_checks))
+ unknown = ", ".join(map(repr, unknown_checks))
s = pluralism(unknown_checks)
- raise argparse.ArgumentError(self, f'unknown check{s}: {unknown}')
+ raise argparse.ArgumentError(self, f"unknown check{s}: {unknown}")
if neutral:
# replace the default check set
@@ -259,8 +267,7 @@ class CheckArgs(arghparse.CommaSeparatedElements):
namespace.enabled_checks.update(objects.CHECKS[c] for c in additive)
if subtractive:
# remove from the default check set
- namespace.enabled_checks.difference_update(
- objects.CHECKS[c] for c in subtractive)
+ namespace.enabled_checks.difference_update(objects.CHECKS[c] for c in subtractive)
setattr(namespace, self.dest, frozenset(neutral + additive))
@@ -278,9 +285,9 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
# validate selected keywords
if unknown_keywords := set(disabled + enabled) - set(objects.KEYWORDS):
- unknown = ', '.join(map(repr, unknown_keywords))
+ unknown = ", ".join(map(repr, unknown_keywords))
s = pluralism(unknown_keywords)
- raise argparse.ArgumentError(self, f'unknown keyword{s}: {unknown}')
+ raise argparse.ArgumentError(self, f"unknown keyword{s}: {unknown}")
# create keyword instance sets
disabled_keywords = {objects.KEYWORDS[k] for k in disabled}
@@ -292,8 +299,7 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
for check in list(namespace.enabled_checks):
if check.known_results.issubset(disabled_keywords):
namespace.enabled_checks.discard(check)
- enabled_keywords = set().union(
- *(c.known_results for c in namespace.enabled_checks))
+ enabled_keywords = set().union(*(c.known_results for c in namespace.enabled_checks))
namespace.filtered_keywords = enabled_keywords - disabled_keywords
# restrict enabled checks if none have been selected
@@ -305,7 +311,7 @@ class KeywordArgs(arghparse.CommaSeparatedNegations):
# check if experimental profiles are required for explicitly selected keywords
for r in namespace.filtered_keywords:
- if r.name in enabled and r._profile == 'exp':
+ if r.name in enabled and r._profile == "exp":
namespace.exp_profiles_required = True
break
@@ -331,17 +337,19 @@ class ExitArgs(arghparse.CommaSeparatedElements):
def __call__(self, parser, namespace, values, option_string=None):
# default to using error results if no keywords are selected
if values is None:
- values = 'error'
+ values = "error"
subtractive, neutral, additive = self.parse_values(values)
# default to using error results if no neutral keywords are selected
if not neutral:
- neutral.append('error')
+ neutral.append("error")
# expand args to keyword objects
keywords = {objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, neutral)}
keywords.update(objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, additive))
- keywords.difference_update(objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, subtractive))
+ keywords.difference_update(
+ objects.KEYWORDS[x] for x in self.args_to_keywords(namespace, subtractive)
+ )
setattr(namespace, self.dest, frozenset(keywords))
diff --git a/src/pkgcheck/scripts/argparsers.py b/src/pkgcheck/scripts/argparsers.py
index d082cf36..0fd349d7 100644
--- a/src/pkgcheck/scripts/argparsers.py
+++ b/src/pkgcheck/scripts/argparsers.py
@@ -7,18 +7,25 @@ from snakeoil.cli import arghparse
from .. import objects, reporters
reporter_argparser = arghparse.ArgumentParser(suppress=True)
-reporter_options = reporter_argparser.add_argument_group('reporter options')
+reporter_options = reporter_argparser.add_argument_group("reporter options")
reporter_options.add_argument(
- '-R', '--reporter', action='store', default=None,
- help='use a non-default reporter',
+ "-R",
+ "--reporter",
+ action="store",
+ default=None,
+ help="use a non-default reporter",
docs="""
Select a reporter to use for output.
Use ``pkgcheck show --reporters`` to see available options.
- """)
+ """,
+)
reporter_options.add_argument(
- '--format', dest='format_str', action='store', default=None,
- help='format string used with FormatReporter',
+ "--format",
+ dest="format_str",
+ action="store",
+ default=None,
+ help="format string used with FormatReporter",
docs="""
Custom format string used to format output by FormatReporter.
@@ -34,34 +41,40 @@ reporter_options.add_argument(
requested attribute expansion in the format string. In other words,
``--format {foo}`` will never produce any output because no result has the
``foo`` attribute.
- """)
+ """,
+)
@reporter_argparser.bind_final_check
def _setup_reporter(parser, namespace):
if namespace.reporter is None:
namespace.reporter = sorted(
- objects.REPORTERS.values(), key=attrgetter('priority'), reverse=True)[0]
+ objects.REPORTERS.values(), key=attrgetter("priority"), reverse=True
+ )[0]
else:
try:
namespace.reporter = objects.REPORTERS[namespace.reporter]
except KeyError:
- available = ', '.join(objects.REPORTERS)
- parser.error(
- f"no reporter matches {namespace.reporter!r} "
- f"(available: {available})")
+ available = ", ".join(objects.REPORTERS)
+ parser.error(f"no reporter matches {namespace.reporter!r} " f"(available: {available})")
if namespace.reporter is reporters.FormatReporter:
if not namespace.format_str:
- parser.error('missing or empty --format option required by FormatReporter')
+ parser.error("missing or empty --format option required by FormatReporter")
namespace.reporter = partial(namespace.reporter, namespace.format_str)
elif namespace.format_str is not None:
- parser.error('--format option is only valid when using FormatReporter')
+ parser.error("--format option is only valid when using FormatReporter")
repo_argparser = arghparse.ArgumentParser(suppress=True)
-repo_options = repo_argparser.add_argument_group('repo options')
+repo_options = repo_argparser.add_argument_group("repo options")
repo_options.add_argument(
- '-r', '--repo', metavar='REPO', dest='target_repo',
- action=commandline.StoreRepoObject, repo_type='ebuild-raw', allow_external_repos=True,
- help='target repo')
+ "-r",
+ "--repo",
+ metavar="REPO",
+ dest="target_repo",
+ action=commandline.StoreRepoObject,
+ repo_type="ebuild-raw",
+ allow_external_repos=True,
+ help="target repo",
+)
diff --git a/src/pkgcheck/scripts/pkgcheck.py b/src/pkgcheck/scripts/pkgcheck.py
index 9eb5c0c8..7ec3cf77 100644
--- a/src/pkgcheck/scripts/pkgcheck.py
+++ b/src/pkgcheck/scripts/pkgcheck.py
@@ -7,4 +7,5 @@ ebuild repositories for various issues.
from pkgcore.util import commandline
argparser = commandline.ArgumentParser(
- description=__doc__, help=False, subcmds=True, script=(__file__, __name__))
+ description=__doc__, help=False, subcmds=True, script=(__file__, __name__)
+)
diff --git a/src/pkgcheck/scripts/pkgcheck_cache.py b/src/pkgcheck/scripts/pkgcheck_cache.py
index a986bb2d..5787c65e 100644
--- a/src/pkgcheck/scripts/pkgcheck_cache.py
+++ b/src/pkgcheck/scripts/pkgcheck_cache.py
@@ -10,34 +10,37 @@ from .argparse_actions import CacheNegations
from .argparsers import repo_argparser
cache = arghparse.ArgumentParser(
- prog='pkgcheck cache', description='perform cache operations',
+ prog="pkgcheck cache",
+ description="perform cache operations",
parents=(repo_argparser,),
docs="""
Various types of caches are used by pkgcheck. This command supports
running operations on them including updates and removals.
- """)
+ """,
+)
cache.add_argument(
- '--cache-dir', type=arghparse.create_dir, default=const.USER_CACHE_DIR,
- help='directory to use for storing cache files')
+ "--cache-dir",
+ type=arghparse.create_dir,
+ default=const.USER_CACHE_DIR,
+ help="directory to use for storing cache files",
+)
cache_actions = cache.add_mutually_exclusive_group()
cache_actions.add_argument(
- '-l', '--list', dest='list_cache', action='store_true',
- help='list available caches')
+ "-l", "--list", dest="list_cache", action="store_true", help="list available caches"
+)
cache_actions.add_argument(
- '-u', '--update', dest='update_cache', action='store_true',
- help='update caches')
+ "-u", "--update", dest="update_cache", action="store_true", help="update caches"
+)
cache_actions.add_argument(
- '-R', '--remove', dest='remove_cache', action='store_true',
- help='forcibly remove caches')
+ "-R", "--remove", dest="remove_cache", action="store_true", help="forcibly remove caches"
+)
cache.add_argument(
- '-f', '--force', dest='force_cache', action='store_true',
- help='forcibly update/remove caches')
+ "-f", "--force", dest="force_cache", action="store_true", help="forcibly update/remove caches"
+)
cache.add_argument(
- '-n', '--dry-run', action='store_true',
- help='dry run without performing any changes')
-cache.add_argument(
- '-t', '--type', dest='cache', action=CacheNegations,
- help='target cache types')
+ "-n", "--dry-run", action="store_true", help="dry run without performing any changes"
+)
+cache.add_argument("-t", "--type", dest="cache", action=CacheNegations, help="target cache types")
@cache.bind_pre_parse
@@ -50,16 +53,14 @@ def _setup_cache_addons(parser, namespace):
@cache.bind_early_parse
def _setup_cache(parser, namespace, args):
if namespace.target_repo is None:
- namespace.target_repo = namespace.config.get_default('repo')
+ namespace.target_repo = namespace.config.get_default("repo")
return namespace, args
@cache.bind_final_check
def _validate_cache_args(parser, namespace):
enabled_caches = {k for k, v in namespace.cache.items() if v}
- cache_addons = (
- addon for addon in CachedAddon.caches
- if addon.cache.type in enabled_caches)
+ cache_addons = (addon for addon in CachedAddon.caches if addon.cache.type in enabled_caches)
# sort caches by type
namespace.cache_addons = sorted(cache_addons, key=lambda x: x.cache.type)
@@ -72,18 +73,18 @@ def _cache(options, out, err):
cache_obj = CachedAddon(options)
cache_obj.remove_caches()
elif options.update_cache:
- for addon_cls in options.pop('cache_addons'):
+ for addon_cls in options.pop("cache_addons"):
init_addon(addon_cls, options)
else:
# list existing caches
cache_obj = CachedAddon(options)
- repos_dir = pjoin(options.cache_dir, 'repos')
+ repos_dir = pjoin(options.cache_dir, "repos")
for cache_type in sorted(options.enabled_caches):
paths = cache_obj.existing_caches[cache_type]
if paths:
- out.write(out.fg('yellow'), f'{cache_type} caches: ', out.reset)
+ out.write(out.fg("yellow"), f"{cache_type} caches: ", out.reset)
for path in paths:
- repo = str(path.parent)[len(repos_dir):]
+ repo = str(path.parent)[len(repos_dir) :]
# non-path repo ids get path separator stripped
if repo.count(os.sep) == 1:
repo = repo.lstrip(os.sep)
diff --git a/src/pkgcheck/scripts/pkgcheck_ci.py b/src/pkgcheck/scripts/pkgcheck_ci.py
index 8db03e14..1a15f640 100644
--- a/src/pkgcheck/scripts/pkgcheck_ci.py
+++ b/src/pkgcheck/scripts/pkgcheck_ci.py
@@ -21,10 +21,10 @@ class ArgumentParser(arghparse.ArgumentParser):
return namespace, []
-ci = ArgumentParser(prog='pkgcheck ci', description='scan repo for CI')
+ci = ArgumentParser(prog="pkgcheck ci", description="scan repo for CI")
ci.add_argument(
- '--failures', type=argparse.FileType('w'),
- help='file path for storing failure results')
+ "--failures", type=argparse.FileType("w"), help="file path for storing failure results"
+)
@ci.bind_main_func
diff --git a/src/pkgcheck/scripts/pkgcheck_replay.py b/src/pkgcheck/scripts/pkgcheck_replay.py
index 2f025f5e..37e0024e 100644
--- a/src/pkgcheck/scripts/pkgcheck_replay.py
+++ b/src/pkgcheck/scripts/pkgcheck_replay.py
@@ -5,7 +5,8 @@ from ..base import PkgcheckUserException
from .argparsers import reporter_argparser
replay = arghparse.ArgumentParser(
- prog='pkgcheck replay', description='replay result streams',
+ prog="pkgcheck replay",
+ description="replay result streams",
parents=(reporter_argparser,),
docs="""
Replay previous json result streams, feeding the results into a reporter.
@@ -13,10 +14,14 @@ replay = arghparse.ArgumentParser(
Useful if you need to delay acting on results until it can be done in
one minimal window, e.g. updating a database, or want to generate
several different reports.
- """)
+ """,
+)
replay.add_argument(
- dest='results', metavar='FILE',
- type=arghparse.FileType('rb'), help='path to serialized results file')
+ dest="results",
+ metavar="FILE",
+ type=arghparse.FileType("rb"),
+ help="path to serialized results file",
+)
@replay.bind_main_func
@@ -30,8 +35,7 @@ def _replay(options, out, err):
processed += 1
except reporters.DeserializationError as e:
if not processed:
- raise PkgcheckUserException('invalid or unsupported replay file')
- raise PkgcheckUserException(
- f'corrupted results file {options.results.name!r}: {e}')
+ raise PkgcheckUserException("invalid or unsupported replay file")
+ raise PkgcheckUserException(f"corrupted results file {options.results.name!r}: {e}")
return 0
diff --git a/src/pkgcheck/scripts/pkgcheck_scan.py b/src/pkgcheck/scripts/pkgcheck_scan.py
index e5227bbf..1d583407 100644
--- a/src/pkgcheck/scripts/pkgcheck_scan.py
+++ b/src/pkgcheck/scripts/pkgcheck_scan.py
@@ -20,10 +20,12 @@ from . import argparse_actions
from .argparsers import repo_argparser, reporter_argparser
config_argparser = arghparse.ArgumentParser(suppress=True)
-config_options = config_argparser.add_argument_group('config options')
+config_options = config_argparser.add_argument_group("config options")
config_options.add_argument(
- '--config', action=argparse_actions.ConfigArg, dest='config_file',
- help='use custom pkgcheck scan settings file',
+ "--config",
+ action=argparse_actions.ConfigArg,
+ dest="config_file",
+ help="use custom pkgcheck scan settings file",
docs="""
Load custom pkgcheck scan settings from a given file.
@@ -32,21 +34,31 @@ config_options.add_argument(
It's also possible to disable all types of settings loading by
specifying an argument of 'false' or 'no'.
- """)
+ """,
+)
scan = arghparse.ArgumentParser(
- prog='pkgcheck scan', description='scan targets for QA issues',
- parents=(config_argparser, repo_argparser, reporter_argparser))
+ prog="pkgcheck scan",
+ description="scan targets for QA issues",
+ parents=(config_argparser, repo_argparser, reporter_argparser),
+)
scan.add_argument(
- 'targets', metavar='TARGET', nargs='*', action=arghparse.ParseNonblockingStdin,
- help='optional targets')
-
-main_options = scan.add_argument_group('main options')
+ "targets",
+ metavar="TARGET",
+ nargs="*",
+ action=arghparse.ParseNonblockingStdin,
+ help="optional targets",
+)
+
+main_options = scan.add_argument_group("main options")
main_options.add_argument(
- '-f', '--filter',
- action=arghparse.Delayed, target=argparse_actions.FilterArgs, priority=99,
- help='limit targeted packages for scanning',
+ "-f",
+ "--filter",
+ action=arghparse.Delayed,
+ target=argparse_actions.FilterArgs,
+ priority=99,
+ help="limit targeted packages for scanning",
docs="""
Support limiting targeted packages for scanning using a chosen filter.
@@ -62,23 +74,31 @@ main_options.add_argument(
network-related checks are filtered to avoid redundant or unnecessary
server requests. In order to forcibly disable all filtering use the
'no' argument.
- """)
+ """,
+)
main_options.add_argument(
- '-j', '--jobs', type=arghparse.positive_int,
- help='number of checks to run in parallel',
+ "-j",
+ "--jobs",
+ type=arghparse.positive_int,
+ help="number of checks to run in parallel",
docs="""
Number of checks to run in parallel, defaults to using all available
processors.
- """)
+ """,
+)
main_options.add_argument(
- '-t', '--tasks', type=arghparse.positive_int,
- help='number of asynchronous tasks to run concurrently',
+ "-t",
+ "--tasks",
+ type=arghparse.positive_int,
+ help="number of asynchronous tasks to run concurrently",
docs="""
Number of asynchronous tasks to run concurrently (defaults to 5 * CPU count).
- """)
+ """,
+)
main_options.add_argument(
- '--cache', action=argparse_actions.CacheNegations,
- help='forcibly enable/disable caches',
+ "--cache",
+ action=argparse_actions.CacheNegations,
+ help="forcibly enable/disable caches",
docs="""
All cache types are enabled by default, this option explicitly sets
which caches will be generated and used during scanning.
@@ -97,14 +117,22 @@ main_options.add_argument(
When disabled, no caches will be saved to disk and results requiring
caches (e.g. git-related checks) will be skipped.
- """)
+ """,
+)
main_options.add_argument(
- '--cache-dir', type=arghparse.create_dir, default=const.USER_CACHE_DIR,
- help='directory to use for storing cache files')
+ "--cache-dir",
+ type=arghparse.create_dir,
+ default=const.USER_CACHE_DIR,
+ help="directory to use for storing cache files",
+)
main_options.add_argument(
- '--exit', metavar='ITEM', dest='exit_keywords',
- action=argparse_actions.ExitArgs, nargs='?', default=(),
- help='checksets, checks, or keywords that trigger an error exit status',
+ "--exit",
+ metavar="ITEM",
+ dest="exit_keywords",
+ action=argparse_actions.ExitArgs,
+ nargs="?",
+ default=(),
+ help="checksets, checks, or keywords that trigger an error exit status",
docs="""
Comma-separated list of checksets, checks, or keywords to enable and
disable that trigger an exit status failure. Checkset and check
@@ -116,17 +144,25 @@ main_options.add_argument(
To specify disabled keywords prefix them with ``-``. Also, the special
arguments of ``error``, ``warning``, ``style``, and ``info`` correspond
to the related keyword groups.
- """)
+ """,
+)
-check_options = scan.add_argument_group('check selection')
+check_options = scan.add_argument_group("check selection")
check_options.add_argument(
- '--net', nargs=0,
- action=arghparse.Delayed, target=argparse_actions.EnableNet, priority=-1,
- help='enable checks that require network access')
+ "--net",
+ nargs=0,
+ action=arghparse.Delayed,
+ target=argparse_actions.EnableNet,
+ priority=-1,
+ help="enable checks that require network access",
+)
check_options.add_argument(
- '-C', '--checksets', metavar='CHECKSET', action=argparse_actions.ChecksetArgs,
- help='scan using a configured set of check/keyword args',
+ "-C",
+ "--checksets",
+ metavar="CHECKSET",
+ action=argparse_actions.ChecksetArgs,
+ help="scan using a configured set of check/keyword args",
docs="""
Comma-separated list of checksets to enable and disable for
scanning.
@@ -137,11 +173,18 @@ check_options.add_argument(
All network-related checks (which are disabled by default)
can be enabled using ``-C net``. This allows for easily running only
network checks without having to explicitly list them.
- """)
+ """,
+)
check_options.add_argument(
- '-s', '--scopes', metavar='SCOPE', dest='selected_scopes', default=(),
- action=arghparse.Delayed, target=argparse_actions.ScopeArgs, priority=51,
- help='limit checks to run by scope',
+ "-s",
+ "--scopes",
+ metavar="SCOPE",
+ dest="selected_scopes",
+ default=(),
+ action=arghparse.Delayed,
+ target=argparse_actions.ScopeArgs,
+ priority=51,
+ help="limit checks to run by scope",
docs="""
Comma-separated list of scopes to enable and disable for scanning. Any
scopes specified in this fashion will affect the checks that get
@@ -149,11 +192,19 @@ check_options.add_argument(
enabled will cause only repo-level checks to run.
Available scopes: %s
- """ % (', '.join(base.scopes)))
+ """
+ % (", ".join(base.scopes)),
+)
check_options.add_argument(
- '-c', '--checks', metavar='CHECK', dest='selected_checks', default=(),
- action=arghparse.Delayed, target=argparse_actions.CheckArgs, priority=52,
- help='limit checks to run',
+ "-c",
+ "--checks",
+ metavar="CHECK",
+ dest="selected_checks",
+ default=(),
+ action=arghparse.Delayed,
+ target=argparse_actions.CheckArgs,
+ priority=52,
+ help="limit checks to run",
docs="""
Comma-separated list of checks to enable and disable for
scanning. Any checks specified in this fashion will be the
@@ -169,11 +220,18 @@ check_options.add_argument(
optional checks in addition to the default set.
Use ``pkgcheck show --checks`` see all available checks.
- """)
+ """,
+)
check_options.add_argument(
- '-k', '--keywords', metavar='KEYWORD', dest='selected_keywords', default=(),
- action=arghparse.Delayed, target=argparse_actions.KeywordArgs, priority=53,
- help='limit keywords to scan for',
+ "-k",
+ "--keywords",
+ metavar="KEYWORD",
+ dest="selected_keywords",
+ default=(),
+ action=arghparse.Delayed,
+ target=argparse_actions.KeywordArgs,
+ priority=53,
+ help="limit keywords to scan for",
docs="""
Comma-separated list of keywords to enable and disable for
scanning. Any keywords specified in this fashion will be the
@@ -189,9 +247,10 @@ check_options.add_argument(
scan for errors use ``-k error``.
Use ``pkgcheck show --keywords`` to see available options.
- """)
+ """,
+)
-scan.plugin = scan.add_argument_group('plugin options')
+scan.plugin = scan.add_argument_group("plugin options")
def _determine_target_repo(namespace):
@@ -225,17 +284,16 @@ def _determine_target_repo(namespace):
# determine if CWD is inside an unconfigured repo
try:
- repo = namespace.domain.find_repo(
- target_dir, config=namespace.config, configure=False)
+ repo = namespace.domain.find_repo(target_dir, config=namespace.config, configure=False)
except (repo_errors.InitializationError, IOError) as e:
raise argparse.ArgumentError(None, str(e))
# fallback to the default repo
if repo is None:
- repo = namespace.config.get_default('repo')
+ repo = namespace.config.get_default("repo")
# if the bundled stub repo is the default, no default repo exists
- if repo is None or repo.location == pjoin(pkgcore_const.DATA_PATH, 'stubrepo'):
- raise argparse.ArgumentError(None, 'no default repo found')
+ if repo is None or repo.location == pjoin(pkgcore_const.DATA_PATH, "stubrepo"):
+ raise argparse.ArgumentError(None, "no default repo found")
return repo
@@ -268,9 +326,10 @@ def _path_restrict(path, repo):
def _restrict_to_scope(restrict):
"""Determine a given restriction's scope level."""
for scope, attrs in (
- (base.version_scope, ['fullver', 'version', 'rev']),
- (base.package_scope, ['package']),
- (base.category_scope, ['category'])):
+ (base.version_scope, ["fullver", "version", "rev"]),
+ (base.package_scope, ["package"]),
+ (base.category_scope, ["category"]),
+ ):
if any(collect_package_restrictions(restrict, attrs)):
return scope
return base.repo_scope
@@ -299,13 +358,13 @@ def _setup_scan(parser, namespace, args):
# parser supporting config file options
config_parser = ConfigFileParser(parser)
# always load settings from bundled config
- namespace = config_parser.parse_config_options(
- namespace, configs=[const.BUNDLED_CONF_FILE])
+ namespace = config_parser.parse_config_options(namespace, configs=[const.BUNDLED_CONF_FILE])
# load default args from system/user configs if config-loading is allowed
if namespace.config_file is None:
namespace = config_parser.parse_config_options(
- namespace, configs=ConfigFileParser.default_configs)
+ namespace, configs=ConfigFileParser.default_configs
+ )
# TODO: Limit to parsing repo and targets options here so all args don't
# have to be parsed twice, will probably require a custom snakeoil
@@ -325,14 +384,14 @@ def _setup_scan(parser, namespace, args):
namespace.target_repo = _determine_target_repo(namespace)
# determine if we're running in the gentoo repo or a clone
- namespace.gentoo_repo = 'gentoo' in namespace.target_repo.aliases
+ namespace.gentoo_repo = "gentoo" in namespace.target_repo.aliases
# multiplex of target repo and its masters used for package existence queries
namespace.search_repo = multiplex.tree(*namespace.target_repo.trees)
if namespace.config_file is not False:
# support loading repo-specific config settings from metadata/pkgcheck.conf
- repo_config_file = os.path.join(namespace.target_repo.location, 'metadata', 'pkgcheck.conf')
+ repo_config_file = os.path.join(namespace.target_repo.location, "metadata", "pkgcheck.conf")
configs = [repo_config_file]
# custom user settings take precedence over previous configs
if namespace.config_file:
@@ -342,7 +401,7 @@ def _setup_scan(parser, namespace, args):
# load repo-specific args from config if they exist
namespace = config_parser.parse_config_sections(namespace, namespace.target_repo.aliases)
- if os.getenv('NOCOLOR'):
+ if os.getenv("NOCOLOR"):
namespace.color = False
return namespace, args
@@ -356,10 +415,10 @@ def generate_restricts(repo, targets):
path = os.path.realpath(target)
# prefer path restrictions if it's in the target repo
if os.path.exists(path) and path in repo:
- if path.endswith('.eclass'):
+ if path.endswith(".eclass"):
# direct eclass file targets
yield base.eclass_scope, os.path.basename(path)[:-7]
- elif path.startswith(profiles_base) and path[len(profiles_base):]:
+ elif path.startswith(profiles_base) and path[len(profiles_base) :]:
if os.path.isdir(path):
# descend into profiles dir targets
for root, _dirs, files in os.walk(path):
@@ -381,44 +440,45 @@ def generate_restricts(repo, targets):
# use path-based error for path-based targets
if os.path.exists(path) or os.path.isabs(target):
raise PkgcheckUserException(
- f"{repo.repo_id!r} repo doesn't contain: {target!r}")
+ f"{repo.repo_id!r} repo doesn't contain: {target!r}"
+ )
raise PkgcheckUserException(str(e))
-@scan.bind_delayed_default(1000, 'jobs')
+@scan.bind_delayed_default(1000, "jobs")
def _default_jobs(namespace, attr):
"""Extract jobs count from MAKEOPTS."""
parser = argparse.ArgumentParser()
- parser.add_argument('-j', '--jobs', type=arghparse.positive_int, default=os.cpu_count())
- makeopts, _ = parser.parse_known_args(shlex.split(os.getenv('MAKEOPTS', '')))
+ parser.add_argument("-j", "--jobs", type=arghparse.positive_int, default=os.cpu_count())
+ makeopts, _ = parser.parse_known_args(shlex.split(os.getenv("MAKEOPTS", "")))
setattr(namespace, attr, makeopts.jobs)
-@scan.bind_delayed_default(1001, 'tasks')
+@scan.bind_delayed_default(1001, "tasks")
def _default_tasks(namespace, attr):
"""Set based on jobs count."""
setattr(namespace, attr, namespace.jobs * 5)
-@scan.bind_delayed_default(1000, 'filter')
+@scan.bind_delayed_default(1000, "filter")
def _default_filter(namespace, attr):
"""Use source filtering for keywords requesting it by default."""
setattr(namespace, attr, objects.KEYWORDS.filter)
-@scan.bind_delayed_default(1000, 'enabled_checks')
+@scan.bind_delayed_default(1000, "enabled_checks")
def _default_enabled_checks(namespace, attr):
"""All non-optional checks are run by default."""
setattr(namespace, attr, set(objects.CHECKS.default.values()))
-@scan.bind_delayed_default(1000, 'filtered_keywords')
+@scan.bind_delayed_default(1000, "filtered_keywords")
def _default_filtered_keywords(namespace, attr):
"""Enable all keywords to be shown by default."""
setattr(namespace, attr, set(objects.KEYWORDS.values()))
-@scan.bind_delayed_default(9999, 'restrictions')
+@scan.bind_delayed_default(9999, "restrictions")
def _determine_restrictions(namespace, attr):
"""Determine restrictions for untargeted scans and generate collapsed restriction for targeted scans."""
if namespace.targets:
@@ -428,7 +488,7 @@ def _determine_restrictions(namespace, attr):
# running pipeline.
restrictions = list(generate_restricts(namespace.target_repo, namespace.targets))
if not restrictions:
- raise PkgcheckUserException('no targets')
+ raise PkgcheckUserException("no targets")
else:
if namespace.cwd in namespace.target_repo:
scope, restrict = _path_restrict(namespace.cwd, namespace.target_repo)
@@ -445,7 +505,7 @@ def _determine_restrictions(namespace, attr):
def _scan(options, out, err):
with ExitStack() as stack:
reporter = options.reporter(out)
- for c in options.pop('contexts') + [reporter]:
+ for c in options.pop("contexts") + [reporter]:
stack.enter_context(c)
pipe = Pipeline(options)
for result in pipe:
diff --git a/src/pkgcheck/scripts/pkgcheck_show.py b/src/pkgcheck/scripts/pkgcheck_show.py
index 8273bf5e..4ea20e34 100644
--- a/src/pkgcheck/scripts/pkgcheck_show.py
+++ b/src/pkgcheck/scripts/pkgcheck_show.py
@@ -8,53 +8,72 @@ from snakeoil.formatters import decorate_forced_wrapping
from .. import base, objects
from ..addons.caches import CachedAddon
-show = arghparse.ArgumentParser(
- prog='pkgcheck show', description='show various pkgcheck info')
-list_options = show.add_argument_group('list options')
+show = arghparse.ArgumentParser(prog="pkgcheck show", description="show various pkgcheck info")
+list_options = show.add_argument_group("list options")
output_types = list_options.add_mutually_exclusive_group()
output_types.add_argument(
- '-k', '--keywords', action='store_true', default=False,
- help='show available warning/error keywords',
+ "-k",
+ "--keywords",
+ action="store_true",
+ default=False,
+ help="show available warning/error keywords",
docs="""
List all available keywords.
Use -v/--verbose to show keywords sorted into the scope they run at
(repository, category, package, or version) along with their
descriptions.
- """)
+ """,
+)
output_types.add_argument(
- '-c', '--checks', action='store_true', default=False,
- help='show available checks',
+ "-c",
+ "--checks",
+ action="store_true",
+ default=False,
+ help="show available checks",
docs="""
List all available checks.
Use -v/--verbose to show descriptions and possible keyword results for
each check.
- """)
+ """,
+)
output_types.add_argument(
- '-s', '--scopes', action='store_true', default=False,
- help='show available keyword/check scopes',
+ "-s",
+ "--scopes",
+ action="store_true",
+ default=False,
+ help="show available keyword/check scopes",
docs="""
List all available keyword and check scopes.
Use -v/--verbose to show scope descriptions.
- """)
+ """,
+)
output_types.add_argument(
- '-r', '--reporters', action='store_true', default=False,
- help='show available reporters',
+ "-r",
+ "--reporters",
+ action="store_true",
+ default=False,
+ help="show available reporters",
docs="""
List all available reporters.
Use -v/--verbose to show reporter descriptions.
- """)
+ """,
+)
output_types.add_argument(
- '-C', '--caches', action='store_true', default=False,
- help='show available caches',
+ "-C",
+ "--caches",
+ action="store_true",
+ default=False,
+ help="show available caches",
docs="""
List all available cache types.
Use -v/--verbose to show more cache information.
- """)
+ """,
+)
def dump_docstring(out, obj, prefix=None):
@@ -63,16 +82,16 @@ def dump_docstring(out, obj, prefix=None):
out.later_prefix.append(prefix)
try:
if obj.__doc__ is None:
- raise ValueError(f'no docs for {obj!r}')
+ raise ValueError(f"no docs for {obj!r}")
# Docstrings start with an unindented line, everything else is
# consistently indented.
- lines = obj.__doc__.split('\n')
+ lines = obj.__doc__.split("\n")
# some docstrings start on the second line
if firstline := lines[0].strip():
out.write(firstline)
if len(lines) > 1:
- for line in textwrap.dedent('\n'.join(lines[1:])).split('\n'):
+ for line in textwrap.dedent("\n".join(lines[1:])).split("\n"):
out.write(line)
else:
out.write()
@@ -85,23 +104,23 @@ def dump_docstring(out, obj, prefix=None):
@decorate_forced_wrapping()
def display_keywords(out, options):
if options.verbosity < 1:
- out.write('\n'.join(sorted(objects.KEYWORDS)), wrap=False)
+ out.write("\n".join(sorted(objects.KEYWORDS)), wrap=False)
else:
scopes = defaultdict(set)
for keyword in objects.KEYWORDS.values():
scopes[keyword.scope].add(keyword)
for scope in reversed(sorted(scopes)):
- out.write(out.bold, f'{scope.desc.capitalize()} scope:')
+ out.write(out.bold, f"{scope.desc.capitalize()} scope:")
out.write()
- keywords = sorted(scopes[scope], key=attrgetter('__name__'))
+ keywords = sorted(scopes[scope], key=attrgetter("__name__"))
try:
- out.first_prefix.append(' ')
- out.later_prefix.append(' ')
+ out.first_prefix.append(" ")
+ out.later_prefix.append(" ")
for keyword in keywords:
- out.write(out.fg(keyword.color), keyword.__name__, out.reset, ':')
- dump_docstring(out, keyword, prefix=' ')
+ out.write(out.fg(keyword.color), keyword.__name__, out.reset, ":")
+ dump_docstring(out, keyword, prefix=" ")
finally:
out.first_prefix.pop()
out.later_prefix.pop()
@@ -110,7 +129,7 @@ def display_keywords(out, options):
@decorate_forced_wrapping()
def display_checks(out, options):
if options.verbosity < 1:
- out.write('\n'.join(sorted(objects.CHECKS)), wrap=False)
+ out.write("\n".join(sorted(objects.CHECKS)), wrap=False)
else:
d = defaultdict(list)
for x in objects.CHECKS.values():
@@ -120,21 +139,21 @@ def display_checks(out, options):
out.write(out.bold, f"{module_name}:")
out.write()
checks = d[module_name]
- checks.sort(key=attrgetter('__name__'))
+ checks.sort(key=attrgetter("__name__"))
try:
- out.first_prefix.append(' ')
- out.later_prefix.append(' ')
+ out.first_prefix.append(" ")
+ out.later_prefix.append(" ")
for check in checks:
- out.write(out.fg('yellow'), check.__name__, out.reset, ':')
- dump_docstring(out, check, prefix=' ')
+ out.write(out.fg("yellow"), check.__name__, out.reset, ":")
+ dump_docstring(out, check, prefix=" ")
# output result types that each check can generate
keywords = []
- for r in sorted(check.known_results, key=attrgetter('__name__')):
- keywords.extend([out.fg(r.color), r.__name__, out.reset, ', '])
+ for r in sorted(check.known_results, key=attrgetter("__name__")):
+ keywords.extend([out.fg(r.color), r.__name__, out.reset, ", "])
keywords.pop()
- out.write(*([' (known results: '] + keywords + [')']))
+ out.write(*([" (known results: "] + keywords + [")"]))
out.write()
finally:
@@ -145,15 +164,15 @@ def display_checks(out, options):
@decorate_forced_wrapping()
def display_reporters(out, options):
if options.verbosity < 1:
- out.write('\n'.join(sorted(objects.REPORTERS)), wrap=False)
+ out.write("\n".join(sorted(objects.REPORTERS)), wrap=False)
else:
out.write("reporters:")
out.write()
- out.first_prefix.append(' ')
- out.later_prefix.append(' ')
- for reporter in sorted(objects.REPORTERS.values(), key=attrgetter('__name__')):
- out.write(out.bold, out.fg('yellow'), reporter.__name__)
- dump_docstring(out, reporter, prefix=' ')
+ out.first_prefix.append(" ")
+ out.later_prefix.append(" ")
+ for reporter in sorted(objects.REPORTERS.values(), key=attrgetter("__name__")):
+ out.write(out.bold, out.fg("yellow"), reporter.__name__)
+ dump_docstring(out, reporter, prefix=" ")
@show.bind_main_func
@@ -162,19 +181,19 @@ def _show(options, out, err):
display_checks(out, options)
elif options.scopes:
if options.verbosity < 1:
- out.write('\n'.join(base.scopes))
+ out.write("\n".join(base.scopes))
else:
for name, scope in base.scopes.items():
- out.write(f'{name} -- {scope.desc} scope')
+ out.write(f"{name} -- {scope.desc} scope")
elif options.reporters:
display_reporters(out, options)
elif options.caches:
if options.verbosity < 1:
- caches = sorted(map(attrgetter('type'), CachedAddon.caches.values()))
- out.write('\n'.join(caches))
+ caches = sorted(map(attrgetter("type"), CachedAddon.caches.values()))
+ out.write("\n".join(caches))
else:
- for cache in sorted(CachedAddon.caches.values(), key=attrgetter('type')):
- out.write(f'{cache.type} -- file: {cache.file}, version: {cache.version}')
+ for cache in sorted(CachedAddon.caches.values(), key=attrgetter("type")):
+ out.write(f"{cache.type} -- file: {cache.file}, version: {cache.version}")
else:
# default to showing keywords if no output option is selected
display_keywords(out, options)
diff --git a/src/pkgcheck/sources.py b/src/pkgcheck/sources.py
index e6230589..2d0832cd 100644
--- a/src/pkgcheck/sources.py
+++ b/src/pkgcheck/sources.py
@@ -64,8 +64,9 @@ class LatestVersionRepoSource(RepoSource):
"""Repo source that returns only the latest non-VCS and VCS slots"""
def itermatch(self, *args, **kwargs):
- for _, pkgs in groupby(super().itermatch(*args, **kwargs),
- key=lambda pkg: pkg.slotted_atom):
+ for _, pkgs in groupby(
+ super().itermatch(*args, **kwargs), key=lambda pkg: pkg.slotted_atom
+ ):
best_by_live = {pkg.live: pkg for pkg in pkgs}
yield from sorted(best_by_live.values())
@@ -94,7 +95,7 @@ class LatestVersionsFilter:
# determine the latest non-VCS and VCS pkgs for each slot
while key == pkg.key:
if pkg.live:
- selected_pkgs[f'vcs-{pkg.slot}'] = pkg
+ selected_pkgs[f"vcs-{pkg.slot}"] = pkg
else:
selected_pkgs[pkg.slot] = pkg
@@ -111,7 +112,8 @@ class LatestVersionsFilter:
selected_pkgs = set(selected_pkgs.values())
self._pkg_cache.extend(
- FilteredPkg(pkg=pkg) if pkg not in selected_pkgs else pkg for pkg in pkgs)
+ FilteredPkg(pkg=pkg) if pkg not in selected_pkgs else pkg for pkg in pkgs
+ )
return self._pkg_cache.popleft()
@@ -132,7 +134,7 @@ class LatestPkgsFilter:
# determine the latest non-VCS and VCS pkgs for each slot
for pkg in pkgs:
if pkg.live:
- selected_pkgs[f'vcs-{pkg.slot}'] = pkg
+ selected_pkgs[f"vcs-{pkg.slot}"] = pkg
else:
selected_pkgs[pkg.slot] = pkg
@@ -166,7 +168,7 @@ class EclassRepoSource(RepoSource):
def __init__(self, *args, eclass_addon, **kwargs):
super().__init__(*args, **kwargs)
self.eclasses = eclass_addon._eclass_repos[self.repo.location]
- self.eclass_dir = pjoin(self.repo.location, 'eclass')
+ self.eclass_dir = pjoin(self.repo.location, "eclass")
def itermatch(self, restrict, **kwargs):
if isinstance(restrict, str):
@@ -178,12 +180,13 @@ class EclassRepoSource(RepoSource):
eclasses = self.eclasses
for name in eclasses:
- yield Eclass(name, pjoin(self.eclass_dir, f'{name}.eclass'))
+ yield Eclass(name, pjoin(self.eclass_dir, f"{name}.eclass"))
@dataclass
class Profile:
"""Generic profile object."""
+
node: ProfileNode
files: set
@@ -196,8 +199,7 @@ class ProfilesRepoSource(RepoSource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.profiles_dir = self.repo.config.profiles_base
- self.non_profile_dirs = {
- f'profiles/{x}' for x in ProfileAddon.non_profile_dirs}
+ self.non_profile_dirs = {f"profiles/{x}" for x in ProfileAddon.non_profile_dirs}
self._prefix_len = len(self.repo.location.rstrip(os.sep)) + 1
def itermatch(self, restrict, **kwargs):
@@ -221,7 +223,7 @@ class ProfilesRepoSource(RepoSource):
else:
# matching all profiles
for root, _dirs, files in os.walk(self.profiles_dir):
- if root[self._prefix_len:] not in self.non_profile_dirs:
+ if root[self._prefix_len :] not in self.non_profile_dirs:
yield Profile(ProfileNode(root), set(files))
@@ -234,17 +236,15 @@ class _RawRepo(UnconfiguredTree):
Deviates from parent in that no package name check is done.
"""
cppath = pjoin(self.base, catpkg[0], catpkg[1])
- pkg = f'{catpkg[-1]}-'
+ pkg = f"{catpkg[-1]}-"
lp = len(pkg)
extension = self.extension
ext_len = -len(extension)
try:
- return tuple(
- x[lp:ext_len] for x in listdir_files(cppath)
- if x[ext_len:] == extension)
+ return tuple(x[lp:ext_len] for x in listdir_files(cppath) if x[ext_len:] == extension)
except EnvironmentError as e:
path = pjoin(self.base, os.sep.join(catpkg))
- raise KeyError(f'failed fetching versions for package {path}: {e}') from e
+ raise KeyError(f"failed fetching versions for package {path}: {e}") from e
class RawRepoSource(RepoSource):
@@ -276,8 +276,14 @@ class UnmaskedRepoSource(RepoSource):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._filtered_repo = self.options.domain.filter_repo(
- self.repo, pkg_masks=(), pkg_unmasks=(), pkg_filters=(),
- pkg_accept_keywords=(), pkg_keywords=(), profile=False)
+ self.repo,
+ pkg_masks=(),
+ pkg_unmasks=(),
+ pkg_filters=(),
+ pkg_accept_keywords=(),
+ pkg_keywords=(),
+ profile=False,
+ )
def itermatch(self, restrict, **kwargs):
yield from self._filtered_repo.itermatch(restrict, **kwargs)
@@ -286,7 +292,7 @@ class UnmaskedRepoSource(RepoSource):
class _SourcePkg(WrappedPkg):
"""Package object with file contents injected as an attribute."""
- __slots__ = ('lines',)
+ __slots__ = ("lines",)
def __init__(self, pkg):
super().__init__(pkg)
@@ -323,8 +329,8 @@ class _ParsedEclass(ParseTree):
super().__init__(data)
self.eclass = eclass
- __getattr__ = klass.GetAttrProxy('eclass')
- __dir__ = klass.DirProxy('eclass')
+ __getattr__ = klass.GetAttrProxy("eclass")
+ __dir__ = klass.DirProxy("eclass")
class EclassParseRepoSource(EclassRepoSource):
@@ -332,7 +338,7 @@ class EclassParseRepoSource(EclassRepoSource):
def itermatch(self, restrict, **kwargs):
for eclass in super().itermatch(restrict, **kwargs):
- with open(eclass.path, 'rb') as f:
+ with open(eclass.path, "rb") as f:
data = f.read()
yield _ParsedEclass(data, eclass=eclass)
@@ -364,14 +370,14 @@ class PackageRepoSource(_CombinedSource):
"""Ebuild repository source yielding lists of versioned packages per package."""
scope = base.package_scope
- keyfunc = attrgetter('key')
+ keyfunc = attrgetter("key")
class CategoryRepoSource(_CombinedSource):
"""Ebuild repository source yielding lists of versioned packages per category."""
scope = base.category_scope
- keyfunc = attrgetter('category')
+ keyfunc = attrgetter("category")
class RepositoryRepoSource(RepoSource):
@@ -401,13 +407,13 @@ class _FilteredSource(RawRepoSource):
class UnversionedSource(_FilteredSource):
"""Source yielding unversioned atoms from matching packages."""
- keyfunc = attrgetter('unversioned_atom')
+ keyfunc = attrgetter("unversioned_atom")
class VersionedSource(_FilteredSource):
"""Source yielding versioned atoms from matching packages."""
- keyfunc = attrgetter('versioned_atom')
+ keyfunc = attrgetter("versioned_atom")
def init_source(source, options, addons_map=None):
@@ -417,8 +423,8 @@ def init_source(source, options, addons_map=None):
cls, args, kwargs = source
kwargs = dict(kwargs)
# initialize wrapped source
- if 'source' in kwargs:
- kwargs['source'] = init_source(kwargs['source'], options, addons_map)
+ if "source" in kwargs:
+ kwargs["source"] = init_source(kwargs["source"], options, addons_map)
else:
cls, args = source
kwargs = {}
diff --git a/src/pkgcheck/utils.py b/src/pkgcheck/utils.py
index 22e50824..da716568 100644
--- a/src/pkgcheck/utils.py
+++ b/src/pkgcheck/utils.py
@@ -30,7 +30,7 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-_control_chars = b'\n\r\t\f\b'
+_control_chars = b"\n\r\t\f\b"
_printable_ascii = _control_chars + bytes(range(32, 127))
_printable_high_ascii = bytes(range(127, 256))
@@ -51,7 +51,7 @@ def is_binary(path, blocksize=1024):
:returns: True if appears to be a binary, otherwise False.
"""
try:
- with open(path, 'rb') as f:
+ with open(path, "rb") as f:
byte_str = f.read(blocksize)
except IOError:
return False
@@ -75,9 +75,8 @@ def is_binary(path, blocksize=1024):
high_chars = byte_str.translate(None, _printable_high_ascii)
nontext_ratio2 = len(high_chars) / len(byte_str)
- is_likely_binary = (
- (nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or
- (nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8)
+ is_likely_binary = (nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or (
+ nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8
)
decodable = False
@@ -91,9 +90,9 @@ def is_binary(path, blocksize=1024):
# guess character encoding using chardet
detected_encoding = chardet.detect(byte_str)
- if detected_encoding['confidence'] > 0.8:
+ if detected_encoding["confidence"] > 0.8:
try:
- byte_str.decode(encoding=detected_encoding['encoding'])
+ byte_str.decode(encoding=detected_encoding["encoding"])
decodable = True
except (UnicodeDecodeError, LookupError):
pass
@@ -101,6 +100,6 @@ def is_binary(path, blocksize=1024):
# finally use all the checks to decide binary or text
if decodable:
return False
- if is_likely_binary or b'\x00' in byte_str:
+ if is_likely_binary or b"\x00" in byte_str:
return True
return False
diff --git a/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py b/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
index 6c48dfab..8c23ea65 100644
--- a/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
+++ b/testdata/repos/network/FetchablesUrlCheck/DeadUrl/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
+r.reason = "Not Found"
+r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
index 68b0e2de..215aadfa 100644
--- a/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
+++ b/testdata/repos/network/FetchablesUrlCheck/HttpsUrlAvailable/responses.py
@@ -5,14 +5,14 @@ from requests.models import Response
# initial URL check
r = Response()
r.status_code = 200
-r.reason = 'OK'
-r.url = 'http://github.com/pkgcore/pkgcheck/foo.tar.gz'
+r.reason = "OK"
+r.url = "http://github.com/pkgcore/pkgcheck/foo.tar.gz"
r.raw = io.StringIO()
# now checking if https:// exists
https_r = Response()
https_r.status_code = 200
-https_r.reason = 'OK'
-https_r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
+https_r.reason = "OK"
+https_r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
https_r.raw = io.StringIO()
responses = [r, https_r]
diff --git a/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
index b582b6db..14333d48 100644
--- a/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
+++ b/testdata/repos/network/FetchablesUrlCheck/RedirectedUrl/responses.py
@@ -4,15 +4,15 @@ from requests.models import Response
r_hist = Response()
r_hist.status_code = 301
-r_hist.reason = 'Moved Permanently'
-r_hist.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
-r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck/foo-moved.tar.gz'}
+r_hist.reason = "Moved Permanently"
+r_hist.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
+r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck/foo-moved.tar.gz"}
r_hist.raw = io.StringIO()
r = Response()
r.status_code = 200
-r.reason = 'OK'
-r.url = 'https://github.com/pkgcore/pkgcheck/foo.tar.gz'
+r.reason = "OK"
+r.url = "https://github.com/pkgcore/pkgcheck/foo.tar.gz"
r.history = [r_hist]
r.raw = io.StringIO()
diff --git a/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
index 95ed6778..b9d30062 100644
--- a/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
+++ b/testdata/repos/network/FetchablesUrlCheck/SSLCertificateError/responses.py
@@ -1,3 +1,3 @@
from requests.exceptions import SSLError
-responses = [SSLError('Certificate verification failed')]
+responses = [SSLError("Certificate verification failed")]
diff --git a/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py b/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
index ad3fd857..0a78a313 100644
--- a/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
+++ b/testdata/repos/network/HomepageUrlCheck/DeadUrl-connection-error/responses.py
@@ -1,3 +1,3 @@
from requests.exceptions import ConnectionError
-responses = [ConnectionError('connection failed')]
+responses = [ConnectionError("connection failed")]
diff --git a/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py b/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
index e490c7ed..31ad363c 100644
--- a/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
+++ b/testdata/repos/network/HomepageUrlCheck/DeadUrl/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://github.com/pkgcore/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://github.com/pkgcore/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
index 2f6f43d4..a43f15d4 100644
--- a/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
+++ b/testdata/repos/network/HomepageUrlCheck/HttpsUrlAvailable/responses.py
@@ -5,14 +5,14 @@ from requests.models import Response
# initial URL check
r = Response()
r.status_code = 200
-r.reason = 'OK'
-r.url = 'http://github.com/pkgcore/pkgcheck'
+r.reason = "OK"
+r.url = "http://github.com/pkgcore/pkgcheck"
r.raw = io.StringIO()
# now checking if https:// exists
https_r = Response()
https_r.status_code = 200
-https_r.reason = 'OK'
-https_r.url = 'https://github.com/pkgcore/pkgcheck'
+https_r.reason = "OK"
+https_r.url = "https://github.com/pkgcore/pkgcheck"
https_r.raw = io.StringIO()
responses = [r, https_r]
diff --git a/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
index 71360581..384a2466 100644
--- a/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
+++ b/testdata/repos/network/HomepageUrlCheck/RedirectedUrl/responses.py
@@ -4,15 +4,15 @@ from requests.models import Response
r_hist = Response()
r_hist.status_code = 301
-r_hist.reason = 'Moved Permanently'
-r_hist.url = 'https://github.com/pkgcore/pkgcheck'
-r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck-moved'}
+r_hist.reason = "Moved Permanently"
+r_hist.url = "https://github.com/pkgcore/pkgcheck"
+r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck-moved"}
r_hist.raw = io.StringIO()
r = Response()
r.status_code = 200
-r.reason = 'OK'
-r.url = 'https://github.com/pkgcore/pkgcheck'
+r.reason = "OK"
+r.url = "https://github.com/pkgcore/pkgcheck"
r.raw = io.StringIO()
r.history = [r_hist]
diff --git a/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
index 95ed6778..b9d30062 100644
--- a/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
+++ b/testdata/repos/network/HomepageUrlCheck/SSLCertificateError/responses.py
@@ -1,3 +1,3 @@
from requests.exceptions import SSLError
-responses = [SSLError('Certificate verification failed')]
+responses = [SSLError("Certificate verification failed")]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
index ba4f7cd1..35f8f6bd 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-bitbucket/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://bitbucket.org/pkgcore/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://bitbucket.org/pkgcore/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
index 02e637f4..84c21ff3 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cpan/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://metacpan.org/dist/PkgCore-PkgCheck'
+r.reason = "Not Found"
+r.url = "https://metacpan.org/dist/PkgCore-PkgCheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
index 7e6bef3e..63ee8e0e 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-cran/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://cran.r-project.org/web/packages/PkgCheck/'
+r.reason = "Not Found"
+r.url = "https://cran.r-project.org/web/packages/PkgCheck/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
index ff9b152e..6297edda 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-ctan/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://ctan.org/pkg/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://ctan.org/pkg/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
index d32f1ee0..9193e4d7 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-freedesktop-gitlab/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://gitlab.freedesktop.org/pkgcore/pkgcheck.git/'
+r.reason = "Not Found"
+r.url = "https://gitlab.freedesktop.org/pkgcore/pkgcheck.git/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
index 61271075..d9c007a0 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gentoo/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://gitweb.gentoo.org/proj/pkgcheck.git/'
+r.reason = "Not Found"
+r.url = "https://gitweb.gentoo.org/proj/pkgcheck.git/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
index 694e4834..416016ed 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-gnome-gitlab/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://gitlab.gnome.org/pkgcore/pkgcheck.git/'
+r.reason = "Not Found"
+r.url = "https://gitlab.gnome.org/pkgcore/pkgcheck.git/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
index 34770180..e2627295 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-hackage/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://hackage.haskell.org/package/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://hackage.haskell.org/package/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
index 92455e24..e4327013 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-launchpad/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://launchpad.net/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://launchpad.net/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
index 02bcf124..f5be015d 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-osdn/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://osdn.net/projects/pkgcore/pkgcheck/'
+r.reason = "Not Found"
+r.url = "https://osdn.net/projects/pkgcore/pkgcheck/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
index f9585adf..fca18be6 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pecl/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://pecl.php.net/package/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://pecl.php.net/package/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
index 1a13d51b..3a164368 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-pypi/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://pypi.org/project/pkgcheck/'
+r.reason = "Not Found"
+r.url = "https://pypi.org/project/pkgcheck/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
index dd70699f..473bd566 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-rubygems/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://rubygems.org/gems/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://rubygems.org/gems/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
index d5aeb788..f1776c9c 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah-nongnu/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://savannah.nongnu.org/projects/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://savannah.nongnu.org/projects/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
index fb20f23f..eb9a56d8 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-savannah/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://savannah.gnu.org/projects/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://savannah.gnu.org/projects/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
index 53fe194e..719a5958 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourceforge/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://sourceforge.net/projects/pkgcheck/'
+r.reason = "Not Found"
+r.url = "https://sourceforge.net/projects/pkgcheck/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
index a4bd454a..e79f9625 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-sourcehut/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://sr.ht/~pkgcore/pkgcheck/'
+r.reason = "Not Found"
+r.url = "https://sr.ht/~pkgcore/pkgcheck/"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
index 9e184839..8ecfaf6d 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl-vim/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://vim.org/scripts/script.php?script_id=12345'
+r.reason = "Not Found"
+r.url = "https://vim.org/scripts/script.php?script_id=12345"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py b/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
index e490c7ed..31ad363c 100644
--- a/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/DeadUrl/responses.py
@@ -4,7 +4,7 @@ from requests.models import Response
r = Response()
r.status_code = 404
-r.reason = 'Not Found'
-r.url = 'https://github.com/pkgcore/pkgcheck'
+r.reason = "Not Found"
+r.url = "https://github.com/pkgcore/pkgcheck"
r.raw = io.StringIO()
responses = [r]
diff --git a/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py b/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
index dacb475f..2c079574 100644
--- a/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/HttpsUrlAvailable/responses.py
@@ -5,14 +5,14 @@ from requests.models import Response
# initial URL check
r = Response()
r.status_code = 200
-r.reason = 'OK'
-r.url = 'http://github.com/pkgcore/pkgcheck/issues'
+r.reason = "OK"
+r.url = "http://github.com/pkgcore/pkgcheck/issues"
r.raw = io.StringIO()
# now checking if https:// exists
https_r = Response()
https_r.status_code = 200
-https_r.reason = 'OK'
-https_r.url = 'https://github.com/pkgcore/pkgcheck/issues'
+https_r.reason = "OK"
+https_r.url = "https://github.com/pkgcore/pkgcheck/issues"
https_r.raw = io.StringIO()
responses = [r, https_r]
diff --git a/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py b/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
index 7567a614..39e182b7 100644
--- a/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/RedirectedUrl/responses.py
@@ -1,17 +1,18 @@
import io
from requests.models import Response
+
r_hist = Response()
r_hist.status_code = 301
-r_hist.reason = 'Moved Permanently'
-r_hist.url = 'https://github.com/pkgcore/pkgcheck'
-r_hist.headers = {'location': 'https://github.com/pkgcore/pkgcheck/'}
+r_hist.reason = "Moved Permanently"
+r_hist.url = "https://github.com/pkgcore/pkgcheck"
+r_hist.headers = {"location": "https://github.com/pkgcore/pkgcheck/"}
r_hist.raw = io.StringIO()
r = Response()
r.status_code = 301
-r.reason = 'OK'
-r.url = 'https://github.com/pkgcore/pkgcheck'
+r.reason = "OK"
+r.url = "https://github.com/pkgcore/pkgcheck"
r.raw = io.StringIO()
r.history = [r_hist]
diff --git a/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py b/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
index 95ed6778..b9d30062 100644
--- a/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
+++ b/testdata/repos/network/MetadataUrlCheck/SSLCertificateError/responses.py
@@ -1,3 +1,3 @@
from requests.exceptions import SSLError
-responses = [SSLError('Certificate verification failed')]
+responses = [SSLError("Certificate verification failed")]
diff --git a/tests/addons/test_addons.py b/tests/addons/test_addons.py
index 32226927..87c59359 100644
--- a/tests/addons/test_addons.py
+++ b/tests/addons/test_addons.py
@@ -11,70 +11,68 @@ from ..misc import FakePkg, FakeProfile, Profile
class TestArchesAddon:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, repo):
self.tool = tool
self.repo = repo
- self.args = ['scan', '--repo', repo.location]
+ self.args = ["scan", "--repo", repo.location]
def test_empty_default(self):
options, _ = self.tool.parse_args(self.args)
assert options.arches == frozenset()
def test_repo_default(self):
- with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
f.write("arm64\namd64\n")
options, _ = self.tool.parse_args(self.args)
- assert options.arches == frozenset(['amd64', 'arm64'])
+ assert options.arches == frozenset(["amd64", "arm64"])
def test_enabled(self):
data = (
- ('x86', ['x86']),
- ('ppc', ['ppc']),
- ('x86,ppc', ['ppc', 'x86']),
+ ("x86", ["x86"]),
+ ("ppc", ["ppc"]),
+ ("x86,ppc", ["ppc", "x86"]),
)
for arg, expected in data:
- for opt in ('-a', '--arches'):
- options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
+ for opt in ("-a", "--arches"):
+ options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
assert options.arches == frozenset(expected)
def test_disabled(self):
# set repo defaults
- with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
f.write("arm64\namd64\narm64-linux\n")
data = (
- ('-x86', ['amd64', 'arm64']),
- ('-x86,-amd64', ['arm64']),
+ ("-x86", ["amd64", "arm64"]),
+ ("-x86,-amd64", ["arm64"]),
)
for arg, expected in data:
- for opt in ('-a', '--arches'):
- options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
+ for opt in ("-a", "--arches"):
+ options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
assert options.arches == frozenset(expected)
def test_unknown(self, capsys):
# unknown arch checking requires repo defaults
- with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
f.write("arm64\namd64\narm64-linux\n")
- for arg in ('foo', 'bar'):
- for opt in ('-a', '--arches'):
+ for arg in ("foo", "bar"):
+ for opt in ("-a", "--arches"):
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + [f'{opt}={arg}'])
+ self.tool.parse_args(self.args + [f"{opt}={arg}"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
- assert f'unknown arch: {arg}' in err
+ assert f"unknown arch: {arg}" in err
class TestStableArchesAddon:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, repo):
self.tool = tool
self.repo = repo
- self.args = ['scan', '--repo', repo.location]
+ self.args = ["scan", "--repo", repo.location]
def test_empty_default(self):
options, _ = self.tool.parse_args(self.args)
@@ -82,40 +80,56 @@ class TestStableArchesAddon:
def test_repo_arches_default(self):
"""Use GLEP 72 arches.desc file if it exists."""
- with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
f.write("arm64\namd64\nriscv\n")
- with open(pjoin(self.repo.location, 'profiles', 'arches.desc'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arches.desc"), "w") as f:
f.write("arm64 stable\namd64 stable\nriscv testing")
options, _ = self.tool.parse_args(self.args)
- assert options.stable_arches == {'amd64', 'arm64'}
+ assert options.stable_arches == {"amd64", "arm64"}
def test_repo_profiles_default(self):
"""Otherwise arch stability is determined from the profiles.desc file."""
- with open(pjoin(self.repo.location, 'profiles', 'arch.list'), 'w') as f:
+ with open(pjoin(self.repo.location, "profiles", "arch.list"), "w") as f:
f.write("arm64\namd64\nriscv\n")
- os.mkdir(pjoin(self.repo.location, 'profiles', 'default'))
- with open(pjoin(self.repo.location, 'profiles', 'profiles.desc'), 'w') as f:
+ os.mkdir(pjoin(self.repo.location, "profiles", "default"))
+ with open(pjoin(self.repo.location, "profiles", "profiles.desc"), "w") as f:
f.write("arm64 default dev\namd64 default stable\nriscv default exp")
options, _ = self.tool.parse_args(self.args)
- assert options.stable_arches == {'amd64'}
+ assert options.stable_arches == {"amd64"}
def test_selected_arches(self):
- for opt in ('-a', '--arches'):
- options, _ = self.tool.parse_args(self.args + [f'{opt}=amd64'])
- assert options.stable_arches == {'amd64'}
+ for opt in ("-a", "--arches"):
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=amd64"])
+ assert options.stable_arches == {"amd64"}
class Test_profile_data:
-
- def assertResults(self, profile, known_flags, required_immutable,
- required_forced, cpv="dev-util/diffball-0.1",
- key_override=None, data_override=None):
+ def assertResults(
+ self,
+ profile,
+ known_flags,
+ required_immutable,
+ required_forced,
+ cpv="dev-util/diffball-0.1",
+ key_override=None,
+ data_override=None,
+ ):
profile_data = addons.profiles.ProfileData(
- "test-repo", "test-profile", key_override,
+ "test-repo",
+ "test-profile",
+ key_override,
profile.provides_repo,
- packages.AlwaysFalse, profile.iuse_effective,
- profile.use, profile.pkg_use, profile.masked_use, profile.forced_use, {}, set(),
- 'stable', False)
+ packages.AlwaysFalse,
+ profile.iuse_effective,
+ profile.use,
+ profile.pkg_use,
+ profile.masked_use,
+ profile.forced_use,
+ {},
+ set(),
+ "stable",
+ False,
+ )
pkg = FakePkg(cpv, data=data_override)
immutable, enabled = profile_data.identify_use(pkg, set(known_flags))
assert immutable == set(required_immutable)
@@ -140,15 +154,15 @@ class Test_profile_data:
self.assertResults(profile, ["lib", "bar"], ["lib"], ["lib"])
profile = FakeProfile(
- forced_use={"dev-util/diffball": ["lib"]},
- masked_use={"dev-util/diffball": ["lib"]})
+ forced_use={"dev-util/diffball": ["lib"]}, masked_use={"dev-util/diffball": ["lib"]}
+ )
self.assertResults(profile, [], [], [])
# check that masked use wins out over forced.
self.assertResults(profile, ["lib", "bar"], ["lib"], [])
profile = FakeProfile(
- forced_use={"dev-util/diffball": ["lib"]},
- masked_use={"dev-util/diffball": ["lib"]})
+ forced_use={"dev-util/diffball": ["lib"]}, masked_use={"dev-util/diffball": ["lib"]}
+ )
self.assertResults(profile, [], [], [])
# check that masked use wins out over forced.
self.assertResults(profile, ["lib", "bar"], ["lib"], [])
@@ -162,7 +176,7 @@ class TestProfileAddon:
def _setup(self, tool, repo, tmp_path):
self.tool = tool
self.repo = repo
- self.args = ['scan', '--cache-dir', str(tmp_path), '--repo', repo.location]
+ self.args = ["scan", "--cache-dir", str(tmp_path), "--repo", repo.location]
def assertProfiles(self, addon, key, *profile_names):
actual = sorted(x.name for y in addon.profile_evaluate_dict[key] for x in y)
@@ -171,34 +185,34 @@ class TestProfileAddon:
def test_defaults(self):
profiles = [
- Profile('profile1', 'x86'),
- Profile('profile1/2', 'x86'),
+ Profile("profile1", "x86"),
+ Profile("profile1/2", "x86"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.add('x86')
+ self.repo.arches.add("x86")
options, _ = self.tool.parse_args(self.args)
addon = addons.init_addon(self.addon_kls, options)
- assert sorted(addon.profile_evaluate_dict) == ['x86', '~x86']
- self.assertProfiles(addon, 'x86', 'profile1', 'profile1/2')
+ assert sorted(addon.profile_evaluate_dict) == ["x86", "~x86"]
+ self.assertProfiles(addon, "x86", "profile1", "profile1/2")
def test_profiles_base(self):
profiles = [
- Profile('default-linux/dep', 'x86', deprecated=True),
- Profile('default-linux', 'x86', 'dev'),
- Profile('default-linux/x86', 'x86'),
+ Profile("default-linux/dep", "x86", deprecated=True),
+ Profile("default-linux", "x86", "dev"),
+ Profile("default-linux/x86", "x86"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.add('x86')
+ self.repo.arches.add("x86")
options, _ = self.tool.parse_args(self.args)
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/x86')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/x86")
def test_nonexistent(self, capsys):
- profile = Profile('x86', 'x86')
+ profile = Profile("x86", "x86")
self.repo.create_profiles([profile])
- for profiles in ('bar', '-bar', 'x86,bar', 'bar,x86', 'x86,-bar'):
+ for profiles in ("bar", "-bar", "x86,bar", "bar,x86", "x86,-bar"):
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + [f'--profiles={profiles}'])
+ self.tool.parse_args(self.args + [f"--profiles={profiles}"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
@@ -206,149 +220,150 @@ class TestProfileAddon:
def test_profiles_args(self):
profiles = [
- Profile('default-linux/dep', 'x86', deprecated=True),
- Profile('default-linux/dev', 'x86', 'dev'),
- Profile('default-linux/exp', 'x86', 'exp'),
- Profile('default-linux', 'x86'),
+ Profile("default-linux/dep", "x86", deprecated=True),
+ Profile("default-linux/dev", "x86", "dev"),
+ Profile("default-linux/exp", "x86", "exp"),
+ Profile("default-linux", "x86"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.add('x86')
+ self.repo.arches.add("x86")
# enable stable
- options, _ = self.tool.parse_args(self.args + ['--profiles=stable'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=stable"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux')
+ self.assertProfiles(addon, "x86", "default-linux")
# disable stable
- options, _ = self.tool.parse_args(self.args + ['--profiles=-stable'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=-stable"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/dev', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux/dev", "default-linux/exp")
# enable dev
- options, _ = self.tool.parse_args(self.args + ['--profiles=dev'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=dev"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/dev')
+ self.assertProfiles(addon, "x86", "default-linux/dev")
# disable dev
- options, _ = self.tool.parse_args(self.args + ['--profiles=-dev'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=-dev"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/exp")
# enable exp
- options, _ = self.tool.parse_args(self.args + ['--profiles=exp'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=exp"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux/exp")
# disable exp
- options, _ = self.tool.parse_args(self.args + ['--profiles=-exp'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=-exp"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev")
# enable deprecated
- options, _ = self.tool.parse_args(self.args + ['--profiles=deprecated'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=deprecated"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/dep')
+ self.assertProfiles(addon, "x86", "default-linux/dep")
# disable deprecated
- options, _ = self.tool.parse_args(self.args + ['--profiles=-deprecated'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=-deprecated"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev", "default-linux/exp")
# enable specific profile
- options, _ = self.tool.parse_args(self.args + ['--profiles', 'default-linux/exp'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles", "default-linux/exp"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux/exp")
# disable specific profile
- options, _ = self.tool.parse_args(self.args + ['--profiles=-default-linux'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=-default-linux"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux/dev', 'default-linux/exp')
+ self.assertProfiles(addon, "x86", "default-linux/dev", "default-linux/exp")
def test_auto_enable_exp_profiles(self):
profiles = [
- Profile('default-linux/dep', 'x86', deprecated=True),
- Profile('default-linux/dev', 'x86', 'dev'),
- Profile('default-linux/exp', 'x86', 'exp'),
- Profile('default-linux/amd64', 'amd64', 'exp'),
- Profile('default-linux', 'x86'),
+ Profile("default-linux/dep", "x86", deprecated=True),
+ Profile("default-linux/dev", "x86", "dev"),
+ Profile("default-linux/exp", "x86", "exp"),
+ Profile("default-linux/amd64", "amd64", "exp"),
+ Profile("default-linux", "x86"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.update(['amd64', 'x86'])
+ self.repo.arches.update(["amd64", "x86"])
# experimental profiles aren't enabled by default
options, _ = self.tool.parse_args(self.args)
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev")
# but are auto-enabled when an arch with only exp profiles is selected
- options, _ = self.tool.parse_args(self.args + ['-a', 'amd64'])
+ options, _ = self.tool.parse_args(self.args + ["-a", "amd64"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'amd64', 'default-linux/amd64')
+ self.assertProfiles(addon, "amd64", "default-linux/amd64")
# or a result keyword is selected that requires them
- options, _ = self.tool.parse_args(self.args + ['-k', 'NonsolvableDepsInExp'])
+ options, _ = self.tool.parse_args(self.args + ["-k", "NonsolvableDepsInExp"])
addon = addons.init_addon(self.addon_kls, options)
- self.assertProfiles(addon, 'amd64', 'default-linux/amd64')
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/dev', 'default-linux/exp')
+ self.assertProfiles(addon, "amd64", "default-linux/amd64")
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/dev", "default-linux/exp")
def test_addon_dict(self):
"""ProfileAddon has methods that allow it to act like a dict of profile filters."""
profiles = [
- Profile('linux/x86', 'x86'),
- Profile('linux/ppc', 'ppc'),
+ Profile("linux/x86", "x86"),
+ Profile("linux/ppc", "ppc"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.update(['x86', 'ppc'])
+ self.repo.arches.update(["x86", "ppc"])
options, _ = self.tool.parse_args(self.args)
addon = addons.init_addon(self.addon_kls, options)
assert len(addon) == 4
- assert set(x.name for x in addon) == {'linux/x86', 'linux/ppc'}
- assert len(addon['x86']) == 1
- assert [x.name for x in addon['~x86']] == ['linux/x86']
- assert addon.get('foo', ['foo']) == ['foo']
- assert addon.get('foo') is None
+ assert set(x.name for x in addon) == {"linux/x86", "linux/ppc"}
+ assert len(addon["x86"]) == 1
+ assert [x.name for x in addon["~x86"]] == ["linux/x86"]
+ assert addon.get("foo", ["foo"]) == ["foo"]
+ assert addon.get("foo") is None
def test_profile_collapsing(self):
profiles = [
- Profile('default-linux', 'x86'),
- Profile('default-linux/x86', 'x86'),
- Profile('default-linux/ppc', 'ppc'),
+ Profile("default-linux", "x86"),
+ Profile("default-linux/x86", "x86"),
+ Profile("default-linux/ppc", "ppc"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.update(['x86', 'ppc'])
+ self.repo.arches.update(["x86", "ppc"])
options, _ = self.tool.parse_args(self.args)
addon = addons.init_addon(self.addon_kls, options)
# assert they're collapsed properly.
- self.assertProfiles(addon, 'x86', 'default-linux', 'default-linux/x86')
- assert len(addon.profile_evaluate_dict['x86']) == 1
- assert len(addon.profile_evaluate_dict['x86'][0]) == 2
- self.assertProfiles(addon, 'ppc', 'default-linux/ppc')
+ self.assertProfiles(addon, "x86", "default-linux", "default-linux/x86")
+ assert len(addon.profile_evaluate_dict["x86"]) == 1
+ assert len(addon.profile_evaluate_dict["x86"][0]) == 2
+ self.assertProfiles(addon, "ppc", "default-linux/ppc")
- groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={'KEYWORDS': 'x86'}))
+ groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={"KEYWORDS": "x86"}))
assert len(groups) == 2, f"checking for profile collapsing: {groups!r}"
assert len(groups[0]) == 2, f"checking for proper # of profiles: {groups[0]!r}"
- assert sorted(x.name for x in groups[0]) == sorted(['default-linux', 'default-linux/x86'])
+ assert sorted(x.name for x in groups[0]) == sorted(["default-linux", "default-linux/x86"])
# check arch vs ~arch runs (i.e. arch KEYWORDS should also trigger ~arch runs)
- groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={'KEYWORDS': '~x86'}))
+ groups = addon.identify_profiles(FakePkg("d-b/ab-1", data={"KEYWORDS": "~x86"}))
assert len(groups) == 1, f"checking for profile collapsing: {groups!r}"
assert len(groups[0]) == 2, f"checking for proper # of profiles: {groups[0]!r}"
- assert sorted(x.name for x in groups[0]) == sorted(['default-linux', 'default-linux/x86'])
+ assert sorted(x.name for x in groups[0]) == sorted(["default-linux", "default-linux/x86"])
# check keyword collapsing
- groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={'KEYWORDS': 'ppc'}))
+ groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={"KEYWORDS": "ppc"}))
assert len(groups) == 2, f"checking for profile collapsing: {groups!r}"
assert len(groups[0]) == 1, f"checking for proper # of profiles: {groups[0]!r}"
- assert groups[0][0].name == 'default-linux/ppc'
+ assert groups[0][0].name == "default-linux/ppc"
- groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={'KEYWORDS': 'foon'}))
+ groups = addon.identify_profiles(FakePkg("d-b/ab-2", data={"KEYWORDS": "foon"}))
assert len(groups) == 0, f"checking for profile collapsing: {groups!r}"
try:
import requests
+
net_skip = False
except ImportError:
net_skip = True
@@ -356,33 +371,33 @@ except ImportError:
@pytest.mark.skipif(net_skip, reason="requests isn't installed")
class TestNetAddon:
-
def test_failed_import(self, tool):
- options, _ = tool.parse_args(['scan'])
+ options, _ = tool.parse_args(["scan"])
addon = addons.NetAddon(options)
- with patch('pkgcheck.addons.net.Session') as net:
- net.side_effect = ImportError('import failed', name='foo')
+ with patch("pkgcheck.addons.net.Session") as net:
+ net.side_effect = ImportError("import failed", name="foo")
with pytest.raises(ImportError):
addon.session
# failing to import requests specifically returns a nicer user exception
- net.side_effect = ImportError('import failed', name='requests')
- with pytest.raises(PkgcheckUserException, match='network checks require requests'):
+ net.side_effect = ImportError("import failed", name="requests")
+ with pytest.raises(PkgcheckUserException, match="network checks require requests"):
addon.session
def test_custom_timeout(self, tool):
- options, _ = tool.parse_args(['scan', '--timeout', '10'])
+ options, _ = tool.parse_args(["scan", "--timeout", "10"])
addon = addons.NetAddon(options)
assert isinstance(addon.session, requests.Session)
assert addon.session.timeout == 10
# a timeout of zero disables timeouts entirely
- options, _ = tool.parse_args(['scan', '--timeout', '0'])
+ options, _ = tool.parse_args(["scan", "--timeout", "0"])
addon = addons.NetAddon(options)
assert addon.session.timeout is None
def test_args(self, tool):
options, _ = tool.parse_args(
- ['scan', '--timeout', '10', '--tasks', '50', '--user-agent', 'firefox'])
+ ["scan", "--timeout", "10", "--tasks", "50", "--user-agent", "firefox"]
+ )
addon = addons.NetAddon(options)
- with patch('pkgcheck.addons.net.Session') as net:
+ with patch("pkgcheck.addons.net.Session") as net:
addon.session
- net.assert_called_once_with(concurrent=50, timeout=10, user_agent='firefox')
+ net.assert_called_once_with(concurrent=50, timeout=10, user_agent="firefox")
diff --git a/tests/addons/test_eclass.py b/tests/addons/test_eclass.py
index 4e1b26db..c6c045b2 100644
--- a/tests/addons/test_eclass.py
+++ b/tests/addons/test_eclass.py
@@ -13,26 +13,29 @@ from snakeoil.osutils import pjoin
class TestEclass:
-
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- path = str(tmp_path / 'foo.eclass')
- with open(path, 'w') as f:
- f.write(textwrap.dedent("""\
- # eclass header
- foo () { :; }
- """))
- self.eclass1 = Eclass('foo', path)
- path = str(tmp_path / 'bar.eclass')
- self.eclass2 = Eclass('bar', path)
+ path = str(tmp_path / "foo.eclass")
+ with open(path, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ # eclass header
+ foo () { :; }
+ """
+ )
+ )
+ self.eclass1 = Eclass("foo", path)
+ path = str(tmp_path / "bar.eclass")
+ self.eclass2 = Eclass("bar", path)
def test_lines(self):
- assert self.eclass1.lines == ('# eclass header\n', 'foo () { :; }\n')
+ assert self.eclass1.lines == ("# eclass header\n", "foo () { :; }\n")
assert self.eclass2.lines == ()
def test_lt(self):
assert self.eclass2 < self.eclass1
- assert self.eclass1 < 'zoo.eclass'
+ assert self.eclass1 < "zoo.eclass"
def test_hash(self):
eclasses = {self.eclass1, self.eclass2}
@@ -46,23 +49,22 @@ class TestEclass:
class TestEclassAddon:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path, repo):
self.repo = repo
self.cache_dir = str(tmp_path)
- self.eclass_dir = pjoin(repo.location, 'eclass')
+ self.eclass_dir = pjoin(repo.location, "eclass")
- args = ['scan', '--cache-dir', self.cache_dir, '--repo', repo.location]
+ args = ["scan", "--cache-dir", self.cache_dir, "--repo", repo.location]
options, _ = tool.parse_args(args)
self.addon = EclassAddon(options)
self.cache_file = self.addon.cache_file(self.repo)
def test_cache_disabled(self, tool):
- args = ['scan', '--cache', 'no', '--repo', self.repo.location]
+ args = ["scan", "--cache", "no", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
- with pytest.raises(CacheDisabled, match='eclass cache support required'):
+ with pytest.raises(CacheDisabled, match="eclass cache support required"):
init_addon(EclassAddon, options)
def test_no_eclasses(self):
@@ -73,18 +75,18 @@ class TestEclassAddon:
def test_eclasses(self):
# non-eclass files are ignored
- for f in ('foo.eclass', 'bar'):
+ for f in ("foo.eclass", "bar"):
touch(pjoin(self.eclass_dir, f))
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
assert not self.addon.deprecated
def test_cache_load(self):
- touch(pjoin(self.eclass_dir, 'foo.eclass'))
+ touch(pjoin(self.eclass_dir, "foo.eclass"))
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
# verify the cache was loaded and not regenerated
save_cache.assert_not_called()
@@ -93,9 +95,9 @@ class TestEclassAddon:
save_cache.assert_called_once()
def test_outdated_cache(self):
- touch(pjoin(self.eclass_dir, 'foo.eclass'))
+ touch(pjoin(self.eclass_dir, "foo.eclass"))
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
# increment cache version and dump cache
cache = self.addon.load_cache(self.cache_file)
@@ -103,68 +105,72 @@ class TestEclassAddon:
self.addon.save_cache(cache, self.cache_file)
# verify cache load causes regen
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
save_cache.assert_called_once()
def test_eclass_changes(self):
"""The cache stores eclass mtimes and regenerates entries if they differ."""
- eclass_path = pjoin(self.eclass_dir, 'foo.eclass')
+ eclass_path = pjoin(self.eclass_dir, "foo.eclass")
touch(eclass_path)
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
sleep(1)
- with open(eclass_path, 'w') as f:
- f.write('# changed eclass\n')
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ with open(eclass_path, "w") as f:
+ f.write("# changed eclass\n")
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
save_cache.assert_called_once()
def test_error_loading_cache(self):
- touch(pjoin(self.eclass_dir, 'foo.eclass'))
+ touch(pjoin(self.eclass_dir, "foo.eclass"))
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
- with patch('pkgcheck.addons.caches.pickle.load') as pickle_load:
+ with patch("pkgcheck.addons.caches.pickle.load") as pickle_load:
# catastrophic errors are raised
- pickle_load.side_effect = MemoryError('unpickling failed')
- with pytest.raises(MemoryError, match='unpickling failed'):
+ pickle_load.side_effect = MemoryError("unpickling failed")
+ with pytest.raises(MemoryError, match="unpickling failed"):
self.addon.update_cache()
# but various load failure exceptions cause cache regen
- pickle_load.side_effect = Exception('unpickling failed')
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ pickle_load.side_effect = Exception("unpickling failed")
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
save_cache.assert_called_once()
def test_error_dumping_cache(self):
- touch(pjoin(self.eclass_dir, 'foo.eclass'))
+ touch(pjoin(self.eclass_dir, "foo.eclass"))
# verify IO related dump failures are raised
- with patch('pkgcheck.addons.caches.pickle.dump') as pickle_dump:
- pickle_dump.side_effect = IOError('unpickling failed')
- with pytest.raises(PkgcheckUserException, match='failed dumping eclass cache'):
+ with patch("pkgcheck.addons.caches.pickle.dump") as pickle_dump:
+ pickle_dump.side_effect = IOError("unpickling failed")
+ with pytest.raises(PkgcheckUserException, match="failed dumping eclass cache"):
self.addon.update_cache()
def test_eclass_removal(self):
- for name in ('foo', 'bar'):
- touch(pjoin(self.eclass_dir, f'{name}.eclass'))
+ for name in ("foo", "bar"):
+ touch(pjoin(self.eclass_dir, f"{name}.eclass"))
self.addon.update_cache()
- assert sorted(self.addon.eclasses) == ['bar', 'foo']
- os.unlink(pjoin(self.eclass_dir, 'bar.eclass'))
+ assert sorted(self.addon.eclasses) == ["bar", "foo"]
+ os.unlink(pjoin(self.eclass_dir, "bar.eclass"))
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
+ assert list(self.addon.eclasses) == ["foo"]
def test_deprecated(self):
- with open(pjoin(self.eclass_dir, 'foo.eclass'), 'w') as f:
- f.write(textwrap.dedent("""
- # @ECLASS: foo.eclass
- # @MAINTAINER:
- # Random Person <random.person@random.email>
- # @AUTHOR:
- # Random Person <random.person@random.email>
- # @BLURB: Example deprecated eclass with replacement.
- # @DEPRECATED: foo2
- """))
+ with open(pjoin(self.eclass_dir, "foo.eclass"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ # @ECLASS: foo.eclass
+ # @MAINTAINER:
+ # Random Person <random.person@random.email>
+ # @AUTHOR:
+ # Random Person <random.person@random.email>
+ # @BLURB: Example deprecated eclass with replacement.
+ # @DEPRECATED: foo2
+ """
+ )
+ )
self.addon.update_cache()
- assert list(self.addon.eclasses) == ['foo']
- assert self.addon.deprecated == {'foo': 'foo2'}
+ assert list(self.addon.eclasses) == ["foo"]
+ assert self.addon.deprecated == {"foo": "foo2"}
diff --git a/tests/addons/test_git.py b/tests/addons/test_git.py
index cf39efc3..da88d501 100644
--- a/tests/addons/test_git.py
+++ b/tests/addons/test_git.py
@@ -18,146 +18,147 @@ from snakeoil.process import CommandNotFound, find_binary
# skip testing module if git isn't installed
try:
- find_binary('git')
+ find_binary("git")
except CommandNotFound:
- pytestmark = pytest.mark.skipif(True, reason='git not installed')
+ pytestmark = pytest.mark.skipif(True, reason="git not installed")
class TestPkgcheckScanCommitsParseArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool):
self.tool = tool
- self.args = ['scan']
+ self.args = ["scan"]
def test_commits_with_targets(self, capsys):
with pytest.raises(SystemExit) as excinfo:
- options, _func = self.tool.parse_args(self.args + ['--commits', 'ref', 'dev-util/foo'])
+ options, _func = self.tool.parse_args(self.args + ["--commits", "ref", "dev-util/foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- assert err.strip() == \
- "pkgcheck scan: error: --commits is mutually exclusive with target: dev-util/foo"
+ assert (
+ err.strip()
+ == "pkgcheck scan: error: --commits is mutually exclusive with target: dev-util/foo"
+ )
def test_commits_git_unavailable(self, capsys):
- with patch('subprocess.run') as git_diff:
+ with patch("subprocess.run") as git_diff:
git_diff.side_effect = FileNotFoundError("no such file 'git'")
with pytest.raises(SystemExit) as excinfo:
- options, _func = self.tool.parse_args(self.args + ['--commits'])
+ options, _func = self.tool.parse_args(self.args + ["--commits"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert err.strip() == "pkgcheck scan: error: no such file 'git'"
def test_git_error(self, capsys):
- with patch('subprocess.run') as git_diff:
- git_diff.side_effect = subprocess.CalledProcessError(1, 'git')
- git_diff.side_effect.stderr = 'git error: foobar'
+ with patch("subprocess.run") as git_diff:
+ git_diff.side_effect = subprocess.CalledProcessError(1, "git")
+ git_diff.side_effect.stderr = "git error: foobar"
with pytest.raises(SystemExit) as excinfo:
- options, _func = self.tool.parse_args(self.args + ['--commits'])
+ options, _func = self.tool.parse_args(self.args + ["--commits"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[-1].startswith('pkgcheck scan: error: failed running git: ')
+ err = err.strip().split("\n")
+ assert err[-1].startswith("pkgcheck scan: error: failed running git: ")
def test_commits_nonexistent(self, make_repo, make_git_repo, tmp_path):
parent = make_repo()
origin = make_git_repo(parent.location, commit=True)
local = make_git_repo(str(tmp_path), commit=False)
- local.run(['git', 'remote', 'add', 'origin', origin.path])
- local.run(['git', 'pull', 'origin', 'main'])
- local.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ local.run(["git", "remote", "add", "origin", origin.path])
+ local.run(["git", "pull", "origin", "main"])
+ local.run(["git", "remote", "set-head", "origin", "main"])
with pytest.raises(SystemExit) as excinfo:
- options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
+ options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
assert excinfo.value.code == 0
def test_commits_existing(self, make_repo, make_git_repo, tmp_path):
# create parent repo
parent = make_repo()
origin = make_git_repo(parent.location, commit=True)
- parent.create_ebuild('cat/pkg-0')
- origin.add_all('cat/pkg-0')
+ parent.create_ebuild("cat/pkg-0")
+ origin.add_all("cat/pkg-0")
# create child repo and pull from parent
local = make_git_repo(str(tmp_path), commit=False)
- local.run(['git', 'remote', 'add', 'origin', origin.path])
- local.run(['git', 'pull', 'origin', 'main'])
- local.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ local.run(["git", "remote", "add", "origin", origin.path])
+ local.run(["git", "pull", "origin", "main"])
+ local.run(["git", "remote", "set-head", "origin", "main"])
child = make_repo(local.path)
# create local commits on child repo
- child.create_ebuild('cat/pkg-1')
- local.add_all('cat/pkg-1')
- child.create_ebuild('cat/pkg-2')
- local.add_all('cat/pkg-2')
-
- options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
- atom_restricts = [atom_cls('cat/pkg')]
- assert list(options.restrictions) == \
- [(base.package_scope, packages.OrRestriction(*atom_restricts))]
+ child.create_ebuild("cat/pkg-1")
+ local.add_all("cat/pkg-1")
+ child.create_ebuild("cat/pkg-2")
+ local.add_all("cat/pkg-2")
+
+ options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
+ atom_restricts = [atom_cls("cat/pkg")]
+ assert list(options.restrictions) == [
+ (base.package_scope, packages.OrRestriction(*atom_restricts))
+ ]
def test_commits_eclasses(self, make_repo, make_git_repo, tmp_path):
# create parent repo
parent = make_repo()
origin = make_git_repo(parent.location, commit=True)
- parent.create_ebuild('cat/pkg-0')
- origin.add_all('cat/pkg-0')
+ parent.create_ebuild("cat/pkg-0")
+ origin.add_all("cat/pkg-0")
# create child repo and pull from parent
local = make_git_repo(str(tmp_path), commit=False)
- local.run(['git', 'remote', 'add', 'origin', origin.path])
- local.run(['git', 'pull', 'origin', 'main'])
- local.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ local.run(["git", "remote", "add", "origin", origin.path])
+ local.run(["git", "pull", "origin", "main"])
+ local.run(["git", "remote", "set-head", "origin", "main"])
child = make_repo(local.path)
# create local commits on child repo
- with open(pjoin(local.path, 'cat', 'pkg', 'metadata.xml'), 'w') as f:
+ with open(pjoin(local.path, "cat", "pkg", "metadata.xml"), "w") as f:
f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
- local.add_all('cat/pkg: metadata')
- child.create_ebuild('cat/pkg-1')
- local.add_all('cat/pkg-1')
- os.makedirs(pjoin(local.path, 'eclass'))
- with open(pjoin(local.path, 'eclass', 'foo.eclass'), 'w') as f:
- f.write('data\n')
- local.add_all('foo.eclass')
-
- options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
- atom_restricts = [atom_cls('cat/pkg')]
+ local.add_all("cat/pkg: metadata")
+ child.create_ebuild("cat/pkg-1")
+ local.add_all("cat/pkg-1")
+ os.makedirs(pjoin(local.path, "eclass"))
+ with open(pjoin(local.path, "eclass", "foo.eclass"), "w") as f:
+ f.write("data\n")
+ local.add_all("foo.eclass")
+
+ options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
+ atom_restricts = [atom_cls("cat/pkg")]
restrictions = list(options.restrictions)
assert len(restrictions) == 2
- assert restrictions[0] == \
- (base.package_scope, packages.OrRestriction(*atom_restricts))
+ assert restrictions[0] == (base.package_scope, packages.OrRestriction(*atom_restricts))
assert restrictions[1][0] == base.eclass_scope
- assert restrictions[1][1] == frozenset(['foo'])
+ assert restrictions[1][1] == frozenset(["foo"])
def test_commits_profiles(self, make_repo, make_git_repo, tmp_path):
# create parent repo
parent = make_repo()
origin = make_git_repo(parent.location, commit=True)
- parent.create_ebuild('cat/pkg-0')
- origin.add_all('cat/pkg-0')
+ parent.create_ebuild("cat/pkg-0")
+ origin.add_all("cat/pkg-0")
# create child repo and pull from parent
local = make_git_repo(str(tmp_path), commit=False)
- local.run(['git', 'remote', 'add', 'origin', origin.path])
- local.run(['git', 'pull', 'origin', 'main'])
- local.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ local.run(["git", "remote", "add", "origin", origin.path])
+ local.run(["git", "pull", "origin", "main"])
+ local.run(["git", "remote", "set-head", "origin", "main"])
child = make_repo(local.path)
# create local commits on child repo
- with open(pjoin(local.path, 'cat', 'pkg', 'metadata.xml'), 'w') as f:
+ with open(pjoin(local.path, "cat", "pkg", "metadata.xml"), "w") as f:
f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
- local.add_all('cat/pkg: metadata')
- child.create_ebuild('cat/pkg-1')
- local.add_all('cat/pkg-1')
- with open(pjoin(local.path, 'profiles', 'package.mask'), 'w') as f:
- f.write('data\n')
- local.add_all('package.mask')
-
- options, _func = self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
- atom_restricts = [atom_cls('cat/pkg')]
+ local.add_all("cat/pkg: metadata")
+ child.create_ebuild("cat/pkg-1")
+ local.add_all("cat/pkg-1")
+ with open(pjoin(local.path, "profiles", "package.mask"), "w") as f:
+ f.write("data\n")
+ local.add_all("package.mask")
+
+ options, _func = self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
+ atom_restricts = [atom_cls("cat/pkg")]
restrictions = [
(base.package_scope, packages.OrRestriction(*atom_restricts)),
- (base.profile_node_scope, frozenset(['profiles/package.mask'])),
+ (base.profile_node_scope, frozenset(["profiles/package.mask"])),
]
assert restrictions == options.restrictions
@@ -165,33 +166,32 @@ class TestPkgcheckScanCommitsParseArgs:
# create parent repo
parent = make_repo()
origin = make_git_repo(parent.location, commit=True)
- parent.create_ebuild('cat/pkg-0')
- origin.add_all('cat/pkg-0')
+ parent.create_ebuild("cat/pkg-0")
+ origin.add_all("cat/pkg-0")
# create child repo and pull from parent
local = make_git_repo(str(tmp_path), commit=False)
- local.run(['git', 'remote', 'add', 'origin', origin.path])
- local.run(['git', 'pull', 'origin', 'main'])
- local.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ local.run(["git", "remote", "add", "origin", origin.path])
+ local.run(["git", "pull", "origin", "main"])
+ local.run(["git", "remote", "set-head", "origin", "main"])
# create local commits on child repo
- os.makedirs(pjoin(local.path, 'foo'))
- with open(pjoin(local.path, 'foo', 'bar.txt'), 'w') as f:
- f.write('data\n')
- os.makedirs(pjoin(local.path, 'eclass', 'tests'))
- with open(pjoin(local.path, 'eclass', 'tests', 'test.sh'), 'w') as f:
- f.write('data\n')
- local.add_all('add files')
+ os.makedirs(pjoin(local.path, "foo"))
+ with open(pjoin(local.path, "foo", "bar.txt"), "w") as f:
+ f.write("data\n")
+ os.makedirs(pjoin(local.path, "eclass", "tests"))
+ with open(pjoin(local.path, "eclass", "tests", "test.sh"), "w") as f:
+ f.write("data\n")
+ local.add_all("add files")
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + ['-r', local.path, '--commits'])
+ self.tool.parse_args(self.args + ["-r", local.path, "--commits"])
assert excinfo.value.code == 0
class TestGitStash:
-
def test_non_git_repo(self, tmp_path):
- with pytest.raises(ValueError, match='not a git repo'):
+ with pytest.raises(ValueError, match="not a git repo"):
with git.GitStash(str(tmp_path)):
pass
@@ -200,7 +200,7 @@ class TestGitStash:
pass
def test_untracked_file(self, git_repo):
- path = pjoin(git_repo.path, 'foo')
+ path = pjoin(git_repo.path, "foo")
touch(path)
assert os.path.exists(path)
with git.GitStash(git_repo.path):
@@ -208,37 +208,36 @@ class TestGitStash:
assert os.path.exists(path)
def test_failed_stashing(self, git_repo):
- path = pjoin(git_repo.path, 'foo')
+ path = pjoin(git_repo.path, "foo")
touch(path)
assert os.path.exists(path)
- with patch('subprocess.run') as run:
- err = subprocess.CalledProcessError(1, 'git stash')
- err.stderr = 'git stash failed'
- run.side_effect = [Mock(stdout='foo'), err]
- with pytest.raises(UserException, match='git failed stashing files'):
+ with patch("subprocess.run") as run:
+ err = subprocess.CalledProcessError(1, "git stash")
+ err.stderr = "git stash failed"
+ run.side_effect = [Mock(stdout="foo"), err]
+ with pytest.raises(UserException, match="git failed stashing files"):
with git.GitStash(git_repo.path):
pass
def test_failed_unstashing(self, git_repo):
- path = pjoin(git_repo.path, 'foo')
+ path = pjoin(git_repo.path, "foo")
touch(path)
assert os.path.exists(path)
- with pytest.raises(UserException, match='git failed applying stash'):
+ with pytest.raises(UserException, match="git failed applying stash"):
with git.GitStash(git_repo.path):
assert not os.path.exists(path)
touch(path)
class TestGitRepoCommits:
-
def test_non_git(self, tmp_path):
- with pytest.raises(git.GitError, match='failed running git log'):
- git.GitRepoCommits(str(tmp_path), 'HEAD')
+ with pytest.raises(git.GitError, match="failed running git log"):
+ git.GitRepoCommits(str(tmp_path), "HEAD")
def test_empty_repo(self, make_git_repo):
git_repo = make_git_repo()
- with pytest.raises(git.GitError, match='failed running git log'):
- git.GitRepoCommits(git_repo.path, 'HEAD')
+ with pytest.raises(git.GitError, match="failed running git log"):
+ git.GitRepoCommits(git_repo.path, "HEAD")
def test_parsing(self, make_repo, make_git_repo):
git_repo = make_git_repo()
@@ -246,135 +245,134 @@ class TestGitRepoCommits:
path = git_repo.path
# make an initial commit
- git_repo.add('foo', msg='foo', create=True)
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ git_repo.add("foo", msg="foo", create=True)
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 1
- assert commits[0].message == ['foo']
+ assert commits[0].message == ["foo"]
assert commits[0].pkgs == {}
orig_commit = commits[0]
# make another commit
- git_repo.add('bar', msg='bar', create=True)
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ git_repo.add("bar", msg="bar", create=True)
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 2
- assert commits[0].message == ['bar']
+ assert commits[0].message == ["bar"]
assert commits[0].pkgs == {}
assert commits[1] == orig_commit
assert len(set(commits)) == 2
# make a pkg commit
- repo.create_ebuild('cat/pkg-0')
- git_repo.add_all('cat/pkg-0')
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ repo.create_ebuild("cat/pkg-0")
+ git_repo.add_all("cat/pkg-0")
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 3
- assert commits[0].message == ['cat/pkg-0']
- assert commits[0].pkgs == {'A': {atom_cls('=cat/pkg-0')}}
+ assert commits[0].message == ["cat/pkg-0"]
+ assert commits[0].pkgs == {"A": {atom_cls("=cat/pkg-0")}}
# make a multiple pkg commit
- repo.create_ebuild('newcat/newpkg-0')
- repo.create_ebuild('newcat/newpkg-1')
- git_repo.add_all('newcat: various updates')
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ repo.create_ebuild("newcat/newpkg-0")
+ repo.create_ebuild("newcat/newpkg-1")
+ git_repo.add_all("newcat: various updates")
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 4
- assert commits[0].message == ['newcat: various updates']
+ assert commits[0].message == ["newcat: various updates"]
assert commits[0].pkgs == {
- 'A': {atom_cls('=newcat/newpkg-0'), atom_cls('=newcat/newpkg-1')}}
+ "A": {atom_cls("=newcat/newpkg-0"), atom_cls("=newcat/newpkg-1")}
+ }
# remove the old version
- git_repo.remove('newcat/newpkg/newpkg-0.ebuild')
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ git_repo.remove("newcat/newpkg/newpkg-0.ebuild")
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 5
- assert commits[0].pkgs == {'D': {atom_cls('=newcat/newpkg-0')}}
+ assert commits[0].pkgs == {"D": {atom_cls("=newcat/newpkg-0")}}
# rename the pkg
- git_repo.move('newcat', 'newcat2')
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ git_repo.move("newcat", "newcat2")
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 6
assert commits[0].pkgs == {
- 'A': {atom_cls('=newcat2/newpkg-1')},
- 'D': {atom_cls('=newcat/newpkg-1')},
+ "A": {atom_cls("=newcat2/newpkg-1")},
+ "D": {atom_cls("=newcat/newpkg-1")},
}
# malformed atoms don't show up as pkgs
- repo.create_ebuild('cat/pkg-3')
- git_repo.add_all('cat/pkg-3')
- with patch('pkgcheck.addons.git.atom_cls') as fake_atom:
- fake_atom.side_effect = MalformedAtom('bad atom')
- commits = list(git.GitRepoCommits(path, 'HEAD'))
+ repo.create_ebuild("cat/pkg-3")
+ git_repo.add_all("cat/pkg-3")
+ with patch("pkgcheck.addons.git.atom_cls") as fake_atom:
+ fake_atom.side_effect = MalformedAtom("bad atom")
+ commits = list(git.GitRepoCommits(path, "HEAD"))
assert len(commits) == 7
assert commits[0].pkgs == {}
class TestGitRepoPkgs:
-
def test_non_git(self, tmp_path):
- with pytest.raises(git.GitError, match='failed running git log'):
- git.GitRepoPkgs(str(tmp_path), 'HEAD')
+ with pytest.raises(git.GitError, match="failed running git log"):
+ git.GitRepoPkgs(str(tmp_path), "HEAD")
def test_empty_repo(self, make_git_repo):
git_repo = make_git_repo()
- with pytest.raises(git.GitError, match='failed running git log'):
- git.GitRepoPkgs(git_repo.path, 'HEAD')
+ with pytest.raises(git.GitError, match="failed running git log"):
+ git.GitRepoPkgs(git_repo.path, "HEAD")
def test_parsing(self, repo, make_git_repo):
git_repo = make_git_repo(repo.location, commit=True)
path = git_repo.path
# empty repo contains no packages
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 0
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- git_repo.add_all('cat/pkg-0')
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ repo.create_ebuild("cat/pkg-0")
+ git_repo.add_all("cat/pkg-0")
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 1
pkg = pkgs[0]
- assert pkg.atom == atom_cls('=cat/pkg-0')
- assert pkg.status == 'A'
+ assert pkg.atom == atom_cls("=cat/pkg-0")
+ assert pkg.status == "A"
# add a new version and commit it
- repo.create_ebuild('cat/pkg-1')
- git_repo.add_all('cat/pkg-1')
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ repo.create_ebuild("cat/pkg-1")
+ git_repo.add_all("cat/pkg-1")
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 2
pkg = pkgs[0]
- assert pkg.atom == atom_cls('=cat/pkg-1')
- assert pkg.status == 'A'
+ assert pkg.atom == atom_cls("=cat/pkg-1")
+ assert pkg.status == "A"
# remove the old version
- git_repo.remove('cat/pkg/pkg-0.ebuild')
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ git_repo.remove("cat/pkg/pkg-0.ebuild")
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 3
pkg = pkgs[0]
- assert pkg.atom == atom_cls('=cat/pkg-0')
- assert pkg.status == 'D'
+ assert pkg.atom == atom_cls("=cat/pkg-0")
+ assert pkg.status == "D"
# rename the pkg
- git_repo.move('cat', 'cat2')
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ git_repo.move("cat", "cat2")
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 5
new_pkg, old_pkg = pkgs[:2]
- assert old_pkg.atom == atom_cls('=cat/pkg-1')
- assert old_pkg.status == 'D'
- assert new_pkg.atom == atom_cls('=cat2/pkg-1')
- assert new_pkg.status == 'A'
+ assert old_pkg.atom == atom_cls("=cat/pkg-1")
+ assert old_pkg.status == "D"
+ assert new_pkg.atom == atom_cls("=cat2/pkg-1")
+ assert new_pkg.status == "A"
# malformed atoms don't show up as pkgs
- with patch('pkgcheck.addons.git.atom_cls') as fake_atom:
- fake_atom.side_effect = MalformedAtom('bad atom')
- pkgs = list(git.GitRepoPkgs(path, 'HEAD'))
+ with patch("pkgcheck.addons.git.atom_cls") as fake_atom:
+ fake_atom.side_effect = MalformedAtom("bad atom")
+ pkgs = list(git.GitRepoPkgs(path, "HEAD"))
assert len(pkgs) == 0
class TestGitChangedRepo:
-
def test_pkg_history(self, repo, make_git_repo):
git_repo = make_git_repo(repo.location, commit=True)
pkg_history = partial(git.GitAddon.pkg_history, repo)
# initialize the dict cache
- data = pkg_history('HEAD')
+ data = pkg_history("HEAD")
assert data == {}
# overlay repo objects on top of the dict cache
@@ -388,10 +386,10 @@ class TestGitChangedRepo:
assert len(removed_repo) == 0
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- git_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ git_repo.add_all("cat/pkg-0")
# update the dict cache
- data = pkg_history('HEAD', data=data)
+ data = pkg_history("HEAD", data=data)
commit = git_repo.HEAD
# overlay repo objects on top of the dict cache
@@ -405,10 +403,10 @@ class TestGitChangedRepo:
assert len(removed_repo) == 0
# add a new version and commit it
- repo.create_ebuild('cat/pkg-1')
- git_repo.add_all('cat/pkg-1')
+ repo.create_ebuild("cat/pkg-1")
+ git_repo.add_all("cat/pkg-1")
# update the dict cache
- data = pkg_history(f'{commit}..HEAD', data=data)
+ data = pkg_history(f"{commit}..HEAD", data=data)
commit = git_repo.HEAD
# overlay repo objects on top of the dict cache
@@ -422,9 +420,9 @@ class TestGitChangedRepo:
assert len(removed_repo) == 0
# remove the old version
- git_repo.remove('cat/pkg/pkg-0.ebuild')
+ git_repo.remove("cat/pkg/pkg-0.ebuild")
# update the dict cache
- data = pkg_history(f'{commit}..HEAD', data=data)
+ data = pkg_history(f"{commit}..HEAD", data=data)
commit = git_repo.HEAD
# overlay repo objects on top of the dict cache
@@ -438,9 +436,9 @@ class TestGitChangedRepo:
assert len(removed_repo) == 1
# rename the pkg
- git_repo.move('cat', 'cat2')
+ git_repo.move("cat", "cat2")
# update the dict cache
- data = pkg_history(f'{commit}..HEAD', data=data)
+ data = pkg_history(f"{commit}..HEAD", data=data)
commit = git_repo.HEAD
# overlay repo objects on top of the dict cache
@@ -455,51 +453,50 @@ class TestGitChangedRepo:
class TestGitAddon:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path, repo):
self.repo = repo
self.cache_dir = str(tmp_path)
- args = ['scan', '--cache-dir', self.cache_dir, '--repo', self.repo.location]
+ args = ["scan", "--cache-dir", self.cache_dir, "--repo", self.repo.location]
options, _ = tool.parse_args(args)
self.addon = git.GitAddon(options)
self.cache_file = self.addon.cache_file(self.repo)
def test_git_unavailable(self, tool):
- args = ['scan', '--cache-dir', self.cache_dir, '--repo', self.repo.location]
+ args = ["scan", "--cache-dir", self.cache_dir, "--repo", self.repo.location]
options, _ = tool.parse_args(args)
- with patch('pkgcheck.addons.git.find_binary') as find_binary:
- find_binary.side_effect = CommandNotFound('git not found')
- with pytest.raises(CacheDisabled, match='git cache support required'):
+ with patch("pkgcheck.addons.git.find_binary") as find_binary:
+ find_binary.side_effect = CommandNotFound("git not found")
+ with pytest.raises(CacheDisabled, match="git cache support required"):
git.GitAddon(options)
def test_no_gitignore(self):
assert self.addon._gitignore is None
- assert not self.addon.gitignored('')
+ assert not self.addon.gitignored("")
def test_failed_gitignore(self):
- with open(pjoin(self.repo.location, '.gitignore'), 'w') as f:
- f.write('.*.swp\n')
- with patch('pkgcheck.addons.git.open') as fake_open:
- fake_open.side_effect = IOError('file reading failure')
+ with open(pjoin(self.repo.location, ".gitignore"), "w") as f:
+ f.write(".*.swp\n")
+ with patch("pkgcheck.addons.git.open") as fake_open:
+ fake_open.side_effect = IOError("file reading failure")
assert self.addon._gitignore is None
def test_gitignore(self):
- for path in ('.gitignore', '.git/info/exclude'):
+ for path in (".gitignore", ".git/info/exclude"):
file_path = pjoin(self.repo.location, path)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
- with open(file_path, 'w') as f:
- f.write('.*.swp\n')
- assert self.addon.gitignored('.foo.swp')
- assert self.addon.gitignored(pjoin(self.repo.location, '.foo.swp'))
- assert not self.addon.gitignored('foo.swp')
- assert not self.addon.gitignored(pjoin(self.repo.location, 'foo.swp'))
+ with open(file_path, "w") as f:
+ f.write(".*.swp\n")
+ assert self.addon.gitignored(".foo.swp")
+ assert self.addon.gitignored(pjoin(self.repo.location, ".foo.swp"))
+ assert not self.addon.gitignored("foo.swp")
+ assert not self.addon.gitignored(pjoin(self.repo.location, "foo.swp"))
def test_cache_disabled(self, tool):
- args = ['scan', '--cache', 'no', '--repo', self.repo.location]
+ args = ["scan", "--cache", "no", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
- with pytest.raises(CacheDisabled, match='git cache support required'):
+ with pytest.raises(CacheDisabled, match="git cache support required"):
init_addon(git.GitAddon, options)
def test_non_git_repo(self):
@@ -516,26 +513,26 @@ class TestGitAddon:
"""Cache file isn't updated if no relevant commits exist."""
parent_repo = make_git_repo(commit=True)
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
assert not os.path.exists(self.cache_file)
def test_cache_creation_and_load(self, repo, make_git_repo):
parent_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- parent_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ parent_repo.add_all("cat/pkg-0")
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
- assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
+ assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
# verify the cache was loaded and not regenerated
self.addon.update_cache()
save_cache.assert_not_called()
@@ -544,28 +541,28 @@ class TestGitAddon:
save_cache.assert_called_once()
# create another pkg and commit it to the parent repo
- repo.create_ebuild('cat/pkg-1')
- parent_repo.add_all('cat/pkg-1')
+ repo.create_ebuild("cat/pkg-1")
+ parent_repo.add_all("cat/pkg-1")
self.addon.update_cache()
- assert atom_cls('=cat/pkg-1') not in self.addon.cached_repo(git.GitAddedRepo)
+ assert atom_cls("=cat/pkg-1") not in self.addon.cached_repo(git.GitAddedRepo)
# new package is seen after child repo pulls changes
- child_repo.run(['git', 'pull', 'origin', 'main'])
+ child_repo.run(["git", "pull", "origin", "main"])
self.addon.update_cache()
- assert atom_cls('=cat/pkg-1') in self.addon.cached_repo(git.GitAddedRepo)
+ assert atom_cls("=cat/pkg-1") in self.addon.cached_repo(git.GitAddedRepo)
def test_outdated_cache(self, repo, make_git_repo):
parent_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- parent_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ parent_repo.add_all("cat/pkg-0")
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
- assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
+ assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
# increment cache version and dump cache
cache = self.addon.load_cache(self.cache_file)
@@ -573,79 +570,79 @@ class TestGitAddon:
self.addon.save_cache(cache, self.cache_file)
# verify cache load causes regen
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
save_cache.assert_called_once()
def test_error_creating_cache(self, repo, make_git_repo):
parent_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- parent_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ parent_repo.add_all("cat/pkg-0")
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
- with patch('pkgcheck.addons.git.GitLog') as git_log:
- git_log.side_effect = git.GitError('git parsing failed')
- with pytest.raises(PkgcheckUserException, match='git parsing failed'):
+ with patch("pkgcheck.addons.git.GitLog") as git_log:
+ git_log.side_effect = git.GitError("git parsing failed")
+ with pytest.raises(PkgcheckUserException, match="git parsing failed"):
self.addon.update_cache()
def test_error_loading_cache(self, repo, make_git_repo):
parent_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- parent_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ parent_repo.add_all("cat/pkg-0")
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
- assert atom_cls('=cat/pkg-0') in self.addon.cached_repo(git.GitAddedRepo)
+ assert atom_cls("=cat/pkg-0") in self.addon.cached_repo(git.GitAddedRepo)
- with patch('pkgcheck.addons.caches.pickle.load') as pickle_load:
+ with patch("pkgcheck.addons.caches.pickle.load") as pickle_load:
# catastrophic errors are raised
- pickle_load.side_effect = MemoryError('unpickling failed')
- with pytest.raises(MemoryError, match='unpickling failed'):
+ pickle_load.side_effect = MemoryError("unpickling failed")
+ with pytest.raises(MemoryError, match="unpickling failed"):
self.addon.update_cache()
# but various load failure exceptions cause cache regen
- pickle_load.side_effect = Exception('unpickling failed')
- with patch('pkgcheck.addons.caches.CachedAddon.save_cache') as save_cache:
+ pickle_load.side_effect = Exception("unpickling failed")
+ with patch("pkgcheck.addons.caches.CachedAddon.save_cache") as save_cache:
self.addon.update_cache()
save_cache.assert_called_once()
def test_error_dumping_cache(self, repo, make_git_repo):
parent_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- repo.create_ebuild('cat/pkg-0')
- parent_repo.add_all('cat/pkg-0')
+ repo.create_ebuild("cat/pkg-0")
+ parent_repo.add_all("cat/pkg-0")
child_repo = make_git_repo(self.repo.location, commit=False)
- child_repo.run(['git', 'remote', 'add', 'origin', parent_repo.path])
- child_repo.run(['git', 'pull', 'origin', 'main'])
- child_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_repo.run(["git", "remote", "add", "origin", parent_repo.path])
+ child_repo.run(["git", "pull", "origin", "main"])
+ child_repo.run(["git", "remote", "set-head", "origin", "main"])
# verify IO related dump failures are raised
- with patch('pkgcheck.addons.caches.pickle.dump') as pickle_dump:
- pickle_dump.side_effect = IOError('unpickling failed')
- with pytest.raises(PkgcheckUserException, match='failed dumping git cache'):
+ with patch("pkgcheck.addons.caches.pickle.dump") as pickle_dump:
+ pickle_dump.side_effect = IOError("unpickling failed")
+ with pytest.raises(PkgcheckUserException, match="failed dumping git cache"):
self.addon.update_cache()
def test_commits_repo(self, repo, make_repo, make_git_repo):
parent_repo = repo
parent_git_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- parent_repo.create_ebuild('cat/pkg-0')
- parent_git_repo.add_all('cat/pkg-0')
+ parent_repo.create_ebuild("cat/pkg-0")
+ parent_git_repo.add_all("cat/pkg-0")
child_git_repo = make_git_repo(self.repo.location, commit=False)
- child_git_repo.run(['git', 'remote', 'add', 'origin', parent_git_repo.path])
- child_git_repo.run(['git', 'pull', 'origin', 'main'])
- child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_git_repo.run(["git", "remote", "add", "origin", parent_git_repo.path])
+ child_git_repo.run(["git", "pull", "origin", "main"])
+ child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
# no new pkg commits exist locally in the child repo
@@ -654,37 +651,37 @@ class TestGitAddon:
# create a pkg in the child repo and commit it
child_repo = make_repo(child_git_repo.path)
- child_repo.create_ebuild('cat/pkg-1')
- child_git_repo.add_all('cat/pkg-1')
+ child_repo.create_ebuild("cat/pkg-1")
+ child_git_repo.add_all("cat/pkg-1")
# pkg commits now exist locally in the child repo
commits_repo = self.addon.commits_repo(git.GitChangedRepo)
assert len(commits_repo) == 1
- assert atom_cls('=cat/pkg-1') in commits_repo
+ assert atom_cls("=cat/pkg-1") in commits_repo
# failing to parse git log returns error with git cache enabled
- with patch('pkgcheck.addons.git.GitLog') as git_log:
- git_log.side_effect = git.GitError('git parsing failed')
- with pytest.raises(PkgcheckUserException, match='git parsing failed'):
+ with patch("pkgcheck.addons.git.GitLog") as git_log:
+ git_log.side_effect = git.GitError("git parsing failed")
+ with pytest.raises(PkgcheckUserException, match="git parsing failed"):
self.addon.commits_repo(git.GitChangedRepo)
# failing to parse git log yields an empty repo with git cache disabled
- with patch('pkgcheck.addons.git.GitLog') as git_log:
- git_log.side_effect = git.GitError('git parsing failed')
- with pytest.raises(PkgcheckUserException, match='git parsing failed'):
+ with patch("pkgcheck.addons.git.GitLog") as git_log:
+ git_log.side_effect = git.GitError("git parsing failed")
+ with pytest.raises(PkgcheckUserException, match="git parsing failed"):
self.addon.commits_repo(git.GitChangedRepo)
def test_commits(self, repo, make_repo, make_git_repo):
parent_repo = repo
parent_git_repo = make_git_repo(repo.location, commit=True)
# create a pkg and commit it
- parent_repo.create_ebuild('cat/pkg-0')
- parent_git_repo.add_all('cat/pkg-0')
+ parent_repo.create_ebuild("cat/pkg-0")
+ parent_git_repo.add_all("cat/pkg-0")
child_git_repo = make_git_repo(self.repo.location, commit=False)
- child_git_repo.run(['git', 'remote', 'add', 'origin', parent_git_repo.path])
- child_git_repo.run(['git', 'pull', 'origin', 'main'])
- child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ child_git_repo.run(["git", "remote", "add", "origin", parent_git_repo.path])
+ child_git_repo.run(["git", "pull", "origin", "main"])
+ child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.addon.update_cache()
# no new commits exist locally in the child repo
@@ -692,22 +689,22 @@ class TestGitAddon:
# create a pkg in the child repo and commit it
child_repo = make_repo(child_git_repo.path)
- child_repo.create_ebuild('cat/pkg-1')
- child_git_repo.add_all('cat/pkg-1')
+ child_repo.create_ebuild("cat/pkg-1")
+ child_git_repo.add_all("cat/pkg-1")
# commits now exist locally in the child repo
commits = list(self.addon.commits())
assert len(commits) == 1
- assert commits[0].message == ['cat/pkg-1']
+ assert commits[0].message == ["cat/pkg-1"]
# failing to parse git log returns error with git cache enabled
- with patch('pkgcheck.addons.git.GitLog') as git_log:
- git_log.side_effect = git.GitError('git parsing failed')
- with pytest.raises(PkgcheckUserException, match='git parsing failed'):
+ with patch("pkgcheck.addons.git.GitLog") as git_log:
+ git_log.side_effect = git.GitError("git parsing failed")
+ with pytest.raises(PkgcheckUserException, match="git parsing failed"):
list(self.addon.commits())
# failing to parse git log raises exception
- with patch('pkgcheck.addons.git.GitLog') as git_log:
- git_log.side_effect = git.GitError('git parsing failed')
- with pytest.raises(PkgcheckUserException, match='git parsing failed'):
+ with patch("pkgcheck.addons.git.GitLog") as git_log:
+ git_log.side_effect = git.GitError("git parsing failed")
+ with pytest.raises(PkgcheckUserException, match="git parsing failed"):
self.addon.commits()
diff --git a/tests/checks/test_acct.py b/tests/checks/test_acct.py
index 57273705..4c8202dc 100644
--- a/tests/checks/test_acct.py
+++ b/tests/checks/test_acct.py
@@ -12,81 +12,86 @@ class TestAcctUser(misc.ReportTestCase):
check_kls = acct.AcctCheck
- kind = 'user'
+ kind = "user"
@pytest.fixture(autouse=True)
def _setup(self, tmp_path):
- (metadata := tmp_path / 'metadata').mkdir()
- (metadata / 'qa-policy.conf').write_text(textwrap.dedent("""\
- [user-group-ids]
- uid-range = 0-749,65534
- gid-range = 0-749,65533,65534
- """))
+ (metadata := tmp_path / "metadata").mkdir()
+ (metadata / "qa-policy.conf").write_text(
+ textwrap.dedent(
+ """\
+ [user-group-ids]
+ uid-range = 0-749,65534
+ gid-range = 0-749,65533,65534
+ """
+ )
+ )
self.location = str(tmp_path)
def mk_check(self, pkgs):
- repo = FakeRepo(pkgs=pkgs, repo_id='test', location=self.location)
+ repo = FakeRepo(pkgs=pkgs, repo_id="test", location=self.location)
check = self.check_kls(arghparse.Namespace(target_repo=repo, gentoo_repo=True))
return check
def mk_pkg(self, name, identifier, version=1, ebuild=None):
if ebuild is None:
- ebuild = textwrap.dedent(f'''\
- inherit acct-{self.kind}
- ACCT_{self.kind.upper()}_ID="{identifier}"
- ''')
- return misc.FakePkg(f'acct-{self.kind}/{name}-{version}', ebuild=ebuild)
+ ebuild = textwrap.dedent(
+ f"""\
+ inherit acct-{self.kind}
+ ACCT_{self.kind.upper()}_ID="{identifier}"
+ """
+ )
+ return misc.FakePkg(f"acct-{self.kind}/{name}-{version}", ebuild=ebuild)
def test_unmatching_pkgs(self):
- pkgs = (misc.FakePkg('dev-util/foo-0'),
- misc.FakePkg('dev-util/bar-1'))
+ pkgs = (misc.FakePkg("dev-util/foo-0"), misc.FakePkg("dev-util/bar-1"))
check = self.mk_check(pkgs)
self.assertNoReport(check, pkgs)
def test_correct_ids(self):
- pkgs = (self.mk_pkg('foo', 100),
- self.mk_pkg('bar', 200),
- self.mk_pkg('test', 749),
- self.mk_pkg('nobody', 65534))
+ pkgs = (
+ self.mk_pkg("foo", 100),
+ self.mk_pkg("bar", 200),
+ self.mk_pkg("test", 749),
+ self.mk_pkg("nobody", 65534),
+ )
check = self.mk_check(pkgs)
self.assertNoReport(check, pkgs)
def test_missing_ids(self):
- pkg = self.mk_pkg('foo', None, ebuild='inherit acct-user\n')
+ pkg = self.mk_pkg("foo", None, ebuild="inherit acct-user\n")
check = self.mk_check((pkg,))
r = self.assertReport(check, pkg)
assert isinstance(r, acct.MissingAccountIdentifier)
- assert r.var == f'ACCT_{self.kind.upper()}_ID'
+ assert r.var == f"ACCT_{self.kind.upper()}_ID"
assert r.var in str(r)
def test_conflicting_ids(self):
- pkgs = (self.mk_pkg('foo', 100),
- self.mk_pkg('bar', 100))
+ pkgs = (self.mk_pkg("foo", 100), self.mk_pkg("bar", 100))
check = self.mk_check(pkgs)
r = self.assertReport(check, pkgs)
assert isinstance(r, acct.ConflictingAccountIdentifiers)
assert r.kind == self.kind
assert r.identifier == 100
- assert r.pkgs == (f'acct-{self.kind}/bar-1', f'acct-{self.kind}/foo-1')
- assert f'conflicting {self.kind} id 100 usage: ' in str(r)
+ assert r.pkgs == (f"acct-{self.kind}/bar-1", f"acct-{self.kind}/foo-1")
+ assert f"conflicting {self.kind} id 100 usage: " in str(r)
def test_self_nonconflicting_ids(self):
- pkgs = (self.mk_pkg('foo', 100),
- self.mk_pkg('foo', 100, version=2))
+ pkgs = (self.mk_pkg("foo", 100), self.mk_pkg("foo", 100, version=2))
check = self.mk_check(pkgs)
self.assertNoReport(check, pkgs)
def test_dynamic_assignment_range(self):
- pkg = self.mk_pkg('foo', 750)
+ pkg = self.mk_pkg("foo", 750)
check = self.mk_check((pkg,))
r = self.assertReport(check, pkg)
assert isinstance(r, acct.OutsideRangeAccountIdentifier)
assert r.kind == self.kind
assert r.identifier == 750
- assert f'{self.kind} id 750 outside permitted' in str(r)
+ assert f"{self.kind} id 750 outside permitted" in str(r)
def test_sysadmin_assignment_range(self):
- pkg = self.mk_pkg('foo', 1000)
+ pkg = self.mk_pkg("foo", 1000)
check = self.mk_check((pkg,))
r = self.assertReport(check, pkg)
assert isinstance(r, acct.OutsideRangeAccountIdentifier)
@@ -94,7 +99,7 @@ class TestAcctUser(misc.ReportTestCase):
assert r.identifier == 1000
def test_high_reserved(self):
- pkg = self.mk_pkg('foo', 65535)
+ pkg = self.mk_pkg("foo", 65535)
check = self.mk_check((pkg,))
r = self.assertReport(check, pkg)
assert isinstance(r, acct.OutsideRangeAccountIdentifier)
@@ -103,7 +108,7 @@ class TestAcctUser(misc.ReportTestCase):
def test_nogroup(self):
"""Test that 65533 is not accepted for UID."""
- pkg = self.mk_pkg('nogroup', 65533)
+ pkg = self.mk_pkg("nogroup", 65533)
check = self.mk_check((pkg,))
r = self.assertReport(check, pkg)
assert isinstance(r, acct.OutsideRangeAccountIdentifier)
@@ -111,28 +116,27 @@ class TestAcctUser(misc.ReportTestCase):
assert r.identifier == 65533
def test_nobody(self):
- pkg = self.mk_pkg('nobody', 65534)
+ pkg = self.mk_pkg("nobody", 65534)
check = self.mk_check((pkg,))
self.assertNoReport(check, pkg)
class TestAcctGroup(TestAcctUser):
- kind = 'group'
+ kind = "group"
def test_nogroup(self):
"""Test that 65533 is accepted for GID."""
- pkg = self.mk_pkg('nogroup', 65533)
+ pkg = self.mk_pkg("nogroup", 65533)
check = self.mk_check((pkg,))
self.assertNoReport(check, pkg)
class TestQaPolicyValidation(misc.ReportTestCase):
-
def mk_check(self, tmp_path, content):
if content:
- (metadata := tmp_path / 'metadata').mkdir()
- (metadata / 'qa-policy.conf').write_text(textwrap.dedent(content))
- repo = FakeRepo(repo_id='test', location=str(tmp_path))
+ (metadata := tmp_path / "metadata").mkdir()
+ (metadata / "qa-policy.conf").write_text(textwrap.dedent(content))
+ repo = FakeRepo(repo_id="test", location=str(tmp_path))
return acct.AcctCheck(arghparse.Namespace(target_repo=repo, gentoo_repo=True))
def test_missing_qa_policy(self, tmp_path):
@@ -141,27 +145,39 @@ class TestQaPolicyValidation(misc.ReportTestCase):
def test_missing_section(self, tmp_path):
with pytest.raises(SkipCheck, match="missing section user-group-ids"):
- self.mk_check(tmp_path, '''\
+ self.mk_check(
+ tmp_path,
+ """\
[random]
x = 5
- ''')
+ """,
+ )
def test_missing_config(self, tmp_path):
with pytest.raises(SkipCheck, match="missing value for gid-range"):
- self.mk_check(tmp_path, '''\
+ self.mk_check(
+ tmp_path,
+ """\
[user-group-ids]
uid-range = 0-749
- ''')
-
- @pytest.mark.parametrize('value', (
- 'start-end',
- '0-749-1500',
- ',150',
- ))
+ """,
+ )
+
+ @pytest.mark.parametrize(
+ "value",
+ (
+ "start-end",
+ "0-749-1500",
+ ",150",
+ ),
+ )
def test_invalid_value(self, tmp_path, value):
with pytest.raises(SkipCheck, match="invalid value for uid-range"):
- self.mk_check(tmp_path, f'''\
+ self.mk_check(
+ tmp_path,
+ f"""\
[user-group-ids]
uid-range = {value}
gid-range = 0-749
- ''')
+ """,
+ )
diff --git a/tests/checks/test_all.py b/tests/checks/test_all.py
index 2ca8a114..a153a802 100644
--- a/tests/checks/test_all.py
+++ b/tests/checks/test_all.py
@@ -22,96 +22,91 @@ class TestMetadataError:
def test_reregister_error(self):
with pytest.raises(ValueError, match="metadata attribute 'eapi' already registered"):
+
class InvalidEapi2(results.MetadataError, results.VersionResult):
- attr = 'eapi'
+ attr = "eapi"
def test_register_missing_attr(self):
with pytest.raises(ValueError, match="class missing metadata attributes"):
+
class InvalidAttr(results.MetadataError, results.VersionResult):
pass
class TestGentooRepoCheck:
-
def test_non_gentoo_repo(self, tool, make_repo):
self.repo = make_repo()
- args = ['scan', '--repo', self.repo.location]
+ args = ["scan", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
- with pytest.raises(checks_mod.SkipCheck, match='not running against gentoo repo'):
+ with pytest.raises(checks_mod.SkipCheck, match="not running against gentoo repo"):
init_check(checks_mod.GentooRepoCheck, options)
def test_gentoo_repo(self, tool, make_repo):
- self.repo = make_repo(repo_id='gentoo')
- args = ['scan', '--repo', self.repo.location]
+ self.repo = make_repo(repo_id="gentoo")
+ args = ["scan", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
assert init_check(checks_mod.GentooRepoCheck, options)
class TestOverlayCheck:
-
def test_non_overlay_repo(self, tool, testconfig):
tool.parser.set_defaults(config_path=testconfig)
- options, _ = tool.parse_args(['scan', '--repo', 'gentoo'])
- with pytest.raises(checks_mod.SkipCheck, match='not running against overlay'):
+ options, _ = tool.parse_args(["scan", "--repo", "gentoo"])
+ with pytest.raises(checks_mod.SkipCheck, match="not running against overlay"):
init_check(checks_mod.OverlayRepoCheck, options)
def test_overlay_repo(self, tool, testconfig):
tool.parser.set_defaults(config_path=testconfig)
- options, _ = tool.parse_args(['scan', '--repo', 'overlay'])
+ options, _ = tool.parse_args(["scan", "--repo", "overlay"])
assert init_check(checks_mod.OverlayRepoCheck, options)
class TestGitCommitsCheck:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, make_repo, make_git_repo):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(
- self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
+ self.parent_git_repo.add_all("initial commit")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def test_no_commits_option(self, tool, make_git_repo):
- options, _ = tool.parse_args(
- ['scan', '--repo', self.child_repo.location])
- with pytest.raises(checks_mod.SkipCheck, match='not scanning against git commits'):
+ options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location])
+ with pytest.raises(checks_mod.SkipCheck, match="not scanning against git commits"):
init_check(checks_mod.GitCommitsCheck, options)
def test_commits_option(self, tool, make_repo):
- self.child_repo.create_ebuild('cat/pkg-1')
- self.child_git_repo.add_all('cat/pkg-1')
- options, _ = tool.parse_args(
- ['scan', '--repo', self.child_repo.location, '--commits'])
+ self.child_repo.create_ebuild("cat/pkg-1")
+ self.child_git_repo.add_all("cat/pkg-1")
+ options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert init_check(checks_mod.GitCommitsCheck, options)
def test_no_local_commits(self, tool):
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', '--repo', self.child_repo.location, '--commits'])
+ tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert excinfo.value.code == 0
# parent repo has new commits
- self.parent_repo.create_ebuild('cat/pkg-1')
- self.parent_git_repo.add_all('cat/pkg-1')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1")
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', '--repo', self.child_repo.location, '--commits'])
+ tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert excinfo.value.code == 0
class TestNetworkCheck:
-
def test_network_disabled(self, tool):
- options, _ = tool.parse_args(['scan'])
- with pytest.raises(checks_mod.SkipCheck, match='network checks not enabled'):
+ options, _ = tool.parse_args(["scan"])
+ with pytest.raises(checks_mod.SkipCheck, match="network checks not enabled"):
init_check(checks_mod.NetworkCheck, options)
def test_network_enabled(self, tool):
- options, _ = tool.parse_args(['scan', '--net'])
+ options, _ = tool.parse_args(["scan", "--net"])
assert init_check(checks_mod.NetworkCheck, options)
diff --git a/tests/checks/test_cleanup.py b/tests/checks/test_cleanup.py
index 4e1aa2b3..7ca2f3b6 100644
--- a/tests/checks/test_cleanup.py
+++ b/tests/checks/test_cleanup.py
@@ -3,10 +3,12 @@ from snakeoil.cli import arghparse
from .. import misc
+
def mk_pkg(ver, keywords=("x86", "amd64"), slot="0", **kwds):
return misc.FakePkg(
- f"dev-util/diffball-{ver}",
- data={**kwds, "KEYWORDS": ' '.join(keywords), "SLOT": slot})
+ f"dev-util/diffball-{ver}", data={**kwds, "KEYWORDS": " ".join(keywords), "SLOT": slot}
+ )
+
class TestRedundantVersion(misc.ReportTestCase):
@@ -17,50 +19,43 @@ class TestRedundantVersion(misc.ReportTestCase):
self.assertNoReport(self.check, [mk_pkg("0.7.1")])
def test_live_version(self):
- self.assertNoReport(
- self.check, [mk_pkg('0.7'), mk_pkg('0.9', PROPERTIES='live')])
- self.assertNoReport(
- self.check, [mk_pkg('0.7'), mk_pkg('9999', PROPERTIES='live')])
+ self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", PROPERTIES="live")])
+ self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("9999", PROPERTIES="live")])
def test_no_keywords(self):
- self.assertNoReport(
- self.check, [mk_pkg('0.7'), mk_pkg('0.9', keywords=())])
+ self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", keywords=())])
def test_disabled_keywords(self):
- self.assertNoReport(
- self.check, [mk_pkg('0.7'), mk_pkg('0.9', keywords=('-x86', '-amd64'))])
+ self.assertNoReport(self.check, [mk_pkg("0.7"), mk_pkg("0.9", keywords=("-x86", "-amd64"))])
def test_single_redundant(self):
- r = self.assertReport(
- self.check, [mk_pkg(x) for x in ("0.7", "0.8")])
+ r = self.assertReport(self.check, [mk_pkg(x) for x in ("0.7", "0.8")])
assert isinstance(r, cleanup.RedundantVersion)
assert r.later_versions == ("0.8",)
- assert 'slot(0) keywords are overshadowed by version: 0.8' in str(r)
+ assert "slot(0) keywords are overshadowed by version: 0.8" in str(r)
def test_multiple_redundants(self):
- reports = self.assertReports(
- self.check, [mk_pkg(x) for x in ("0.7", "0.8", "0.9")])
- assert (
- [list(x.later_versions) for x in reports] ==
- [["0.8", "0.9"], ["0.9"]])
+ reports = self.assertReports(self.check, [mk_pkg(x) for x in ("0.7", "0.8", "0.9")])
+ assert [list(x.later_versions) for x in reports] == [["0.8", "0.9"], ["0.9"]]
for x in reports:
assert isinstance(x, cleanup.RedundantVersion)
def test_multiple_slots(self):
- l = [mk_pkg("0.7", slot="1"), mk_pkg("0.8"),
- mk_pkg("0.9", slot="1")]
+ l = [mk_pkg("0.7", slot="1"), mk_pkg("0.8"), mk_pkg("0.9", slot="1")]
r = self.assertReport(self.check, l)
assert r.later_versions == ("0.9",)
assert isinstance(r, cleanup.RedundantVersion)
- assert 'slot(1) keywords are overshadowed by version: 0.9' in str(r)
+ assert "slot(1) keywords are overshadowed by version: 0.9" in str(r)
l.append(mk_pkg("0.10", keywords=("x86", "amd64", "~sparc")))
reports = self.assertReports(self.check, l)
- assert ([list(x.later_versions) for x in reports] == [["0.9"], ["0.10"]])
+ assert [list(x.later_versions) for x in reports] == [["0.9"], ["0.10"]]
def test_multiple_keywords(self):
- l = [mk_pkg("0.1", keywords=("~x86", "~amd64")),
- mk_pkg("0.2", keywords=("x86", "~amd64", "~sparc"))]
+ l = [
+ mk_pkg("0.1", keywords=("~x86", "~amd64")),
+ mk_pkg("0.2", keywords=("x86", "~amd64", "~sparc")),
+ ]
r = self.assertReport(self.check, l)
assert r.later_versions == ("0.2",)
@@ -71,32 +66,33 @@ class TestRedundantVersionByStable(misc.ReportTestCase):
check = cleanup.RedundantVersionCheck(arghparse.Namespace(stable_only=True), profile_addon={})
def test_only_unstable(self):
- l = [mk_pkg("0.1", keywords=("~x86", "~amd64")),
- mk_pkg("0.2", keywords=("~x86", "~amd64"))]
+ l = [mk_pkg("0.1", keywords=("~x86", "~amd64")), mk_pkg("0.2", keywords=("~x86", "~amd64"))]
self.assertNoReport(self.check, l)
def test_only_stable(self):
- l = [mk_pkg("0.1", keywords=("x86", "amd64")),
- mk_pkg("0.2", keywords=("x86", "amd64"))]
+ l = [mk_pkg("0.1", keywords=("x86", "amd64")), mk_pkg("0.2", keywords=("x86", "amd64"))]
r = self.assertReport(self.check, l)
assert r.later_versions == ("0.2",)
def test_mixed_stable(self):
- l = [mk_pkg("0.1", keywords=("x86", "amd64", "~sparc")),
- mk_pkg("0.2", keywords=("x86", "amd64", "~sparc"))]
+ l = [
+ mk_pkg("0.1", keywords=("x86", "amd64", "~sparc")),
+ mk_pkg("0.2", keywords=("x86", "amd64", "~sparc")),
+ ]
r = self.assertReport(self.check, l)
assert r.later_versions == ("0.2",)
def test_mixed_history(self):
- l = [mk_pkg("0.1", keywords=("amd64")),
- mk_pkg("0.2", keywords=("~x86", "~amd64")),
- mk_pkg("0.3", keywords=("x86", "amd64")),
- mk_pkg("0.4", keywords=("~x86", "~amd64")),
- mk_pkg("0.5", keywords=("~x86", "~amd64"))]
+ l = [
+ mk_pkg("0.1", keywords=("amd64")),
+ mk_pkg("0.2", keywords=("~x86", "~amd64")),
+ mk_pkg("0.3", keywords=("x86", "amd64")),
+ mk_pkg("0.4", keywords=("~x86", "~amd64")),
+ mk_pkg("0.5", keywords=("~x86", "~amd64")),
+ ]
r = self.assertReport(self.check, l)
assert r.later_versions == ("0.3", "0.4", "0.5")
def test_no_redundant(self):
- l = [mk_pkg("0.1", keywords=("x86", "amd64")),
- mk_pkg("0.2", keywords=("x86", "~amd64"))]
+ l = [mk_pkg("0.1", keywords=("x86", "amd64")), mk_pkg("0.2", keywords=("x86", "~amd64"))]
self.assertNoReport(self.check, l)
diff --git a/tests/checks/test_codingstyle.py b/tests/checks/test_codingstyle.py
index 1c6a0075..528faa8b 100644
--- a/tests/checks/test_codingstyle.py
+++ b/tests/checks/test_codingstyle.py
@@ -30,8 +30,12 @@ class TestInsintoCheck(misc.ReportTestCase):
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
bad = (
- "/etc/env.d", "/etc/conf.d", "/etc/init.d", "/etc/pam.d",
- "/usr/share/applications", "/usr/share/applications",
+ "/etc/env.d",
+ "/etc/conf.d",
+ "/etc/init.d",
+ "/etc/pam.d",
+ "/usr/share/applications",
+ "/usr/share/applications",
"//usr/share//applications",
)
check = self.check_kls(None)
@@ -42,11 +46,12 @@ class TestInsintoCheck(misc.ReportTestCase):
def test_docinto(self):
check = self.check_kls(None)
- for path in ('${PF}', '${P}', '${PF}/examples'):
+ for path in ("${PF}", "${P}", "${PF}/examples"):
for eapi_str, eapi in EAPI.known_eapis.items():
- fake_src = [f'\tinsinto /usr/share/doc/{path}\n']
+ fake_src = [f"\tinsinto /usr/share/doc/{path}\n"]
fake_pkg = misc.FakePkg(
- "dev-util/diff-0.5", data={'EAPI': eapi_str}, lines=fake_src)
+ "dev-util/diff-0.5", data={"EAPI": eapi_str}, lines=fake_src
+ )
if eapi.options.dodoc_allow_recursive:
r = self.assertReport(check, fake_pkg)
assert path in str(r)
@@ -68,10 +73,10 @@ class TestAbsoluteSymlink(misc.ReportTestCase):
absolute_prefixed = []
for path_var in codingstyle.PATH_VARIABLES:
- src, dest = ('/bin/blah', '/bin/bash')
+ src, dest = ("/bin/blah", "/bin/bash")
absolute_prefixed.append((f'"${{{path_var}}}"{src}', dest))
absolute_prefixed.append((f'"${{{path_var}%/}}"{src}', dest))
- src, dest = ('/bin/blah baz', '/bin/blahbaz')
+ src, dest = ("/bin/blah baz", "/bin/blahbaz")
absolute_prefixed.append((f'"${{{path_var}}}{src}"', dest))
absolute_prefixed.append((f'"${{{path_var}%/}}{src}"', dest))
@@ -99,7 +104,7 @@ class TestAbsoluteSymlink(misc.ReportTestCase):
assert len(reports) == len(absolute) + len(absolute_prefixed)
for r, (src, dest) in zip(reports, absolute + absolute_prefixed):
- assert f'dosym {src}' in str(r)
+ assert f"dosym {src}" in str(r)
class TestPathVariablesCheck(misc.ReportTestCase):
@@ -107,7 +112,7 @@ class TestPathVariablesCheck(misc.ReportTestCase):
check_kls = codingstyle.PathVariablesCheck
check = check_kls(None)
- def _found(self, cls, suffix=''):
+ def _found(self, cls, suffix=""):
# check single and multiple matches across all specified variables
for lines in (1, 2):
for path_var in codingstyle.PATH_VARIABLES:
@@ -117,17 +122,18 @@ class TestPathVariablesCheck(misc.ReportTestCase):
fake_src.extend(["}\n", "\n"])
for eapi_str, eapi in EAPI.known_eapis.items():
fake_pkg = misc.FakePkg(
- "dev-util/diff-0.5", data={'EAPI': eapi_str}, lines=fake_src)
+ "dev-util/diff-0.5", data={"EAPI": eapi_str}, lines=fake_src
+ )
if eapi.options.trailing_slash:
self.assertNoReport(self.check, fake_pkg)
else:
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, cls)
- assert r.match == f'${{{path_var}{suffix}}}'
+ assert r.match == f"${{{path_var}{suffix}}}"
assert r.lines == tuple(x + 2 for x in range(lines))
assert path_var in str(r)
- def _unfound(self, cls, suffix=''):
+ def _unfound(self, cls, suffix=""):
for path_var in codingstyle.PATH_VARIABLES:
fake_src = [
"src_install() {\n",
@@ -138,7 +144,8 @@ class TestPathVariablesCheck(misc.ReportTestCase):
]
for eapi_str, eapi in EAPI.known_eapis.items():
fake_pkg = misc.FakePkg(
- "dev-util/diffball-0.5", data={'EAPI': eapi_str}, lines=fake_src)
+ "dev-util/diffball-0.5", data={"EAPI": eapi_str}, lines=fake_src
+ )
self.assertNoReport(self.check, fake_pkg)
def test_missing_found(self):
@@ -148,14 +155,14 @@ class TestPathVariablesCheck(misc.ReportTestCase):
self._unfound(codingstyle.MissingSlash)
def test_unnecessary_found(self):
- self._found(codingstyle.UnnecessarySlashStrip, suffix='%/')
+ self._found(codingstyle.UnnecessarySlashStrip, suffix="%/")
def test_unnecessary_unfound(self):
- self._unfound(codingstyle.UnnecessarySlashStrip, suffix='%/')
+ self._unfound(codingstyle.UnnecessarySlashStrip, suffix="%/")
def test_double_prefix_found(self):
fake_src = [
- 'src_install() {\n',
+ "src_install() {\n",
' cp foo.py "${ED}$(python_get_sitedir)"\n',
# test non-match
' cp foo.py "${D%/}$(python_get_sitedir)"\n',
@@ -174,17 +181,17 @@ class TestPathVariablesCheck(misc.ReportTestCase):
' dodir /foo/bar "${EPREFIX}"/bar/baz\n',
# commented lines aren't flagged for double prefix usage
'# exeinto "${EPREFIX}/foo/bar"\n',
- '}\n'
+ "}\n",
]
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReports(self.check, fake_pkg)
cls = codingstyle.DoublePrefixInPath
expected_results = (
- ('${ED}$(python_get_sitedir)', 2),
- ('${ED%/}$(python_get_sitedir)', 4),
- ('${ED}/$(python_get_sitedir)', 5),
- ('${ED}${PYTHON_SITEDIR}', 6),
- ('${ED}${EPREFIX}', 7),
+ ("${ED}$(python_get_sitedir)", 2),
+ ("${ED%/}$(python_get_sitedir)", 4),
+ ("${ED}/$(python_get_sitedir)", 5),
+ ("${ED}${PYTHON_SITEDIR}", 6),
+ ("${ED}${EPREFIX}", 7),
('insinto "$(python_get_sitedir)', 8),
('exeinto "${EPREFIX}', 9),
('fowners foo:bar "$(python_get_sitedir)', 10),
@@ -199,16 +206,16 @@ class TestPathVariablesCheck(misc.ReportTestCase):
def test_double_prefix_unfound(self):
fake_src = [
- 'src_install() {\n',
+ "src_install() {\n",
' cp foo.py "${D}$(python_get_sitedir)"\n',
' cp foo "${D}${EPREFIX}/foo/bar"\n',
- ' insinto /foo/bar\n',
+ " insinto /foo/bar\n",
# potential false positives: stripping prefix
' insinto "${MYVAR#${EPREFIX}}"\n',
' insinto "${MYVAR#"${EPREFIX}"}"\n',
# combined commands
' dodir /etc/env.d && echo "FOO=${EPREFIX}"\n',
- '}\n'
+ "}\n",
]
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
self.assertNoReport(self.check, fake_pkg)
@@ -219,99 +226,76 @@ class TestObsoleteUri(misc.ReportTestCase):
check_kls = codingstyle.ObsoleteUriCheck
def test_github_archive_uri(self):
- uri = 'https://github.com/foo/bar/archive/${PV}.tar.gz'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
- ]
+ uri = "https://github.com/foo/bar/archive/${PV}.tar.gz"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_commented_github_tarball_uri(self):
- uri = 'https://github.com/foo/bar/tarball/${PV}'
- fake_src = [
- '# github tarball\n',
- '\n',
- f'# {uri}\n'
- ]
+ uri = "https://github.com/foo/bar/tarball/${PV}"
+ fake_src = ["# github tarball\n", "\n", f"# {uri}\n"]
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_github_tarball_uri(self):
- uri = 'https://github.com/foo/bar/tarball/${PV}'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
- ]
+ uri = "https://github.com/foo/bar/tarball/${PV}"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.line == 1
assert r.uri == uri
- assert (r.replacement ==
- 'https://github.com/foo/bar/archive/${PV}.tar.gz')
+ assert r.replacement == "https://github.com/foo/bar/archive/${PV}.tar.gz"
assert uri in str(r)
def test_github_zipball_uri(self):
- uri = 'https://github.com/foo/bar/zipball/${PV}'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.zip"\n'
- ]
+ uri = "https://github.com/foo/bar/zipball/${PV}"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.zip"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.line == 1
assert r.uri == uri
- assert (r.replacement ==
- 'https://github.com/foo/bar/archive/${PV}.tar.gz')
+ assert r.replacement == "https://github.com/foo/bar/archive/${PV}.tar.gz"
assert uri in str(r)
def test_gitlab_archive_uri(self):
- uri = 'https://gitlab.com/foo/bar/-/archive/${PV}/${P}.tar.gz'
- fake_src = [
- f'SRC_URI="{uri}"\n'
- ]
+ uri = "https://gitlab.com/foo/bar/-/archive/${PV}/${P}.tar.gz"
+ fake_src = [f'SRC_URI="{uri}"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_gitlab_tar_gz_uri(self):
- uri = 'https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=${PV}'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
- ]
+ uri = "https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=${PV}"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.line == 1
assert r.uri == uri
- assert (r.replacement ==
- 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.gz')
+ assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.gz"
assert uri in str(r)
def test_gitlab_tar_bz2_uri(self):
- uri = 'https://gitlab.com/foo/bar/repository/archive.tar.bz2?ref=${PV}'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.bz2"\n'
- ]
+ uri = "https://gitlab.com/foo/bar/repository/archive.tar.bz2?ref=${PV}"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.bz2"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.line == 1
assert r.uri == uri
- assert (r.replacement ==
- 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.bz2')
+ assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.tar.bz2"
assert uri in str(r)
def test_gitlab_zip_uri(self):
- uri = 'https://gitlab.com/foo/bar/repository/archive.zip?ref=${PV}'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.zip"\n'
- ]
+ uri = "https://gitlab.com/foo/bar/repository/archive.zip?ref=${PV}"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.zip"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.line == 1
assert r.uri == uri
- assert (r.replacement ==
- 'https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.zip')
+ assert r.replacement == "https://gitlab.com/foo/bar/-/archive/${PV}/bar-${PV}.zip"
assert uri in str(r)
@@ -320,15 +304,13 @@ class TestBetterCompression(misc.ReportTestCase):
check_kls = codingstyle.BetterCompressionCheck
def test_github_archive_uri(self):
- uri = 'https://github.com/foo/bar/archive/${PV}.tar.gz'
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
- ]
+ uri = "https://github.com/foo/bar/archive/${PV}.tar.gz"
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_comment_uri(self):
- uri = 'https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar'
+ uri = "https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar"
fake_src = [
f'#SRC_URI="{uri} -> ${{P}}.tar.gz"\n',
" ",
@@ -339,21 +321,22 @@ class TestBetterCompression(misc.ReportTestCase):
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.lineno == 4
- @pytest.mark.parametrize('uri', (
- 'https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar',
- 'https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.tar.gz',
- 'https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.zip',
- 'https://gitlab.freedesktop.org/glvnd/${PN}/-/archive/v${PV}/${PN}-v${PV}.tar.gz',
- ))
+ @pytest.mark.parametrize(
+ "uri",
+ (
+ "https://gitlab.com/GNOME/${PN}/-/archive/${PV}/${P}.tar",
+ "https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.tar.gz",
+ "https://gitlab.gnome.org/GNOME/${PN}/-/archive/${PV}/${P}.zip",
+ "https://gitlab.freedesktop.org/glvnd/${PN}/-/archive/v${PV}/${PN}-v${PV}.tar.gz",
+ ),
+ )
def test_gitlab_archive_uri(self, uri):
- fake_src = [
- f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n'
- ]
+ fake_src = [f'SRC_URI="{uri} -> ${{P}}.tar.gz"\n']
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert r.lineno == 1
assert r.line == uri
- assert r.replacement == '.tar.bz2'
+ assert r.replacement == ".tar.bz2"
assert uri in str(r)
@@ -363,76 +346,85 @@ class TestStaticSrcUri(misc.ReportTestCase):
check = check_kls(None)
@staticmethod
- def _prepare_pkg(uri_value: str, rename: str = '', pkgver: str = 'diffball-0.1.2.3'):
+ def _prepare_pkg(uri_value: str, rename: str = "", pkgver: str = "diffball-0.1.2.3"):
if rename:
- rename = f' -> {rename}'
- uri = f'https://github.com/pkgcore/pkgcheck/archive/{uri_value}.tar.gz'
- fake_src = [
- f'SRC_URI="{uri}{rename}"\n'
- ]
+ rename = f" -> {rename}"
+ uri = f"https://github.com/pkgcore/pkgcheck/archive/{uri_value}.tar.gz"
+ fake_src = [f'SRC_URI="{uri}{rename}"\n']
- fake_pkg = misc.FakePkg(f"dev-util/{pkgver}", ebuild=''.join(fake_src), lines=fake_src)
- data = ''.join(fake_src).encode()
+ fake_pkg = misc.FakePkg(f"dev-util/{pkgver}", ebuild="".join(fake_src), lines=fake_src)
+ data = "".join(fake_src).encode()
return _ParsedPkg(data, pkg=fake_pkg)
-
- @pytest.mark.parametrize('value', (
- '${P}',
- '${PV}',
- 'v${PV}',
- 'random-0.1.2.3', # not a valid prefix
- '1.2.3', # currently we support only ver_cut with start=1
- '0', # for ver_cut only if more then 1 part
- ))
+ @pytest.mark.parametrize(
+ "value",
+ (
+ "${P}",
+ "${PV}",
+ "v${PV}",
+ "random-0.1.2.3", # not a valid prefix
+ "1.2.3", # currently we support only ver_cut with start=1
+ "0", # for ver_cut only if more then 1 part
+ ),
+ )
def test_no_report(self, value):
self.assertNoReport(self.check, self._prepare_pkg(value))
- @pytest.mark.parametrize(('value', 'static_str', 'replacement'), (
- ('diffball-0.1.2.3', 'diffball-0.1.2.3', '${P}'),
- ('Diffball-0.1.2.3', 'Diffball-0.1.2.3', '${P^}'),
- ('DIFFBALL-0.1.2.3', 'DIFFBALL-0.1.2.3', '${P^^}'),
- ('diffball-0123', 'diffball-0123', '${P//.}'),
- ('Diffball-0123', 'Diffball-0123', '${P^//.}'),
- ('0.1.2.3', '0.1.2.3', '${PV}'),
- ('v0.1.2.3', '0.1.2.3', '${PV}'),
- ('0.1.2', '0.1.2', '$(ver_cut 1-3)'),
- ('0.1', '0.1', '$(ver_cut 1-2)'),
- ('diffball-0.1.2', '0.1.2', '$(ver_cut 1-3)'),
- ('v0123', '0123', "${PV//.}"),
- ('012.3', '012.3', "$(ver_rs 1-2 '')"),
- ('012.3', '012.3', "$(ver_rs 1-2 '')"),
- ('0_1_2_3', '0_1_2_3', "${PV//./_}"),
- ('0_1_2.3', '0_1_2.3', "$(ver_rs 1-2 '_')"),
- ('0-1.2.3', '0-1.2.3', "$(ver_rs 1 '-')"),
- ))
+ @pytest.mark.parametrize(
+ ("value", "static_str", "replacement"),
+ (
+ ("diffball-0.1.2.3", "diffball-0.1.2.3", "${P}"),
+ ("Diffball-0.1.2.3", "Diffball-0.1.2.3", "${P^}"),
+ ("DIFFBALL-0.1.2.3", "DIFFBALL-0.1.2.3", "${P^^}"),
+ ("diffball-0123", "diffball-0123", "${P//.}"),
+ ("Diffball-0123", "Diffball-0123", "${P^//.}"),
+ ("0.1.2.3", "0.1.2.3", "${PV}"),
+ ("v0.1.2.3", "0.1.2.3", "${PV}"),
+ ("0.1.2", "0.1.2", "$(ver_cut 1-3)"),
+ ("0.1", "0.1", "$(ver_cut 1-2)"),
+ ("diffball-0.1.2", "0.1.2", "$(ver_cut 1-3)"),
+ ("v0123", "0123", "${PV//.}"),
+ ("012.3", "012.3", "$(ver_rs 1-2 '')"),
+ ("012.3", "012.3", "$(ver_rs 1-2 '')"),
+ ("0_1_2_3", "0_1_2_3", "${PV//./_}"),
+ ("0_1_2.3", "0_1_2.3", "$(ver_rs 1-2 '_')"),
+ ("0-1.2.3", "0-1.2.3", "$(ver_rs 1 '-')"),
+ ),
+ )
def test_with_report(self, value, static_str, replacement):
r = self.assertReport(self.check, self._prepare_pkg(value))
assert r.static_str == static_str
assert r.replacement == replacement
def test_rename(self):
- self.assertNoReport(self.check, self._prepare_pkg('${P}', '${P}.tar.gz'))
+ self.assertNoReport(self.check, self._prepare_pkg("${P}", "${P}.tar.gz"))
- r = self.assertReport(self.check, self._prepare_pkg('${P}', 'diffball-0.1.2.3.tar.gz'))
- assert r.static_str == 'diffball-0.1.2.3'
- assert r.replacement == '${P}'
+ r = self.assertReport(self.check, self._prepare_pkg("${P}", "diffball-0.1.2.3.tar.gz"))
+ assert r.static_str == "diffball-0.1.2.3"
+ assert r.replacement == "${P}"
- r = self.assertReport(self.check, self._prepare_pkg('0.1.2.3', '${P}.tar.gz'))
- assert r.static_str == '0.1.2.3'
- assert r.replacement == '${PV}'
+ r = self.assertReport(self.check, self._prepare_pkg("0.1.2.3", "${P}.tar.gz"))
+ assert r.static_str == "0.1.2.3"
+ assert r.replacement == "${PV}"
- r = self.assertReport(self.check, self._prepare_pkg('diffball-0.1.2.3', 'diffball-0.1.2.3.tar.gz'))
- assert r.static_str == 'diffball-0.1.2.3'
- assert r.replacement == '${P}'
+ r = self.assertReport(
+ self.check, self._prepare_pkg("diffball-0.1.2.3", "diffball-0.1.2.3.tar.gz")
+ )
+ assert r.static_str == "diffball-0.1.2.3"
+ assert r.replacement == "${P}"
def test_capitalize(self):
- r = self.assertReport(self.check, self._prepare_pkg('DIFFBALL-0.1.2.3', pkgver='DIFFBALL-0.1.2.3'))
- assert r.static_str == 'DIFFBALL-0.1.2.3'
- assert r.replacement == '${P}'
+ r = self.assertReport(
+ self.check, self._prepare_pkg("DIFFBALL-0.1.2.3", pkgver="DIFFBALL-0.1.2.3")
+ )
+ assert r.static_str == "DIFFBALL-0.1.2.3"
+ assert r.replacement == "${P}"
- r = self.assertReport(self.check, self._prepare_pkg('Diffball-0.1.2.3', pkgver='Diffball-0.1.2.3'))
- assert r.static_str == 'Diffball-0.1.2.3'
- assert r.replacement == '${P}'
+ r = self.assertReport(
+ self.check, self._prepare_pkg("Diffball-0.1.2.3", pkgver="Diffball-0.1.2.3")
+ )
+ assert r.static_str == "Diffball-0.1.2.3"
+ assert r.replacement == "${P}"
class TestExcessiveLineLength(misc.ReportTestCase):
@@ -441,54 +433,68 @@ class TestExcessiveLineLength(misc.ReportTestCase):
check = check_kls(None)
word_length = codingstyle.ExcessiveLineLength.word_length
-
@staticmethod
def _prepare_pkg(*lines: str):
- fake_pkg = misc.FakePkg("dev-util/diffball-0", ebuild=''.join(lines), lines=lines)
- data = ''.join(lines).encode()
+ fake_pkg = misc.FakePkg("dev-util/diffball-0", ebuild="".join(lines), lines=lines)
+ data = "".join(lines).encode()
return _ParsedPkg(data, pkg=fake_pkg)
def test_normal_length(self):
self.assertNoReport(self.check, self._prepare_pkg('echo "short line"'))
def test_long_line(self):
- r = self.assertReport(self.check, self._prepare_pkg(f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}'))
- assert r.lines == (1, )
+ r = self.assertReport(
+ self.check,
+ self._prepare_pkg(f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}'),
+ )
+ assert r.lines == (1,)
def test_multiple_lines(self):
- r = self.assertReport(self.check, self._prepare_pkg(
- f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}',
- 'echo "short line"',
- f'echo {"Hello " * codingstyle.ExcessiveLineLength.line_length}',
- ))
+ r = self.assertReport(
+ self.check,
+ self._prepare_pkg(
+ f'echo {"a " * codingstyle.ExcessiveLineLength.line_length}',
+ 'echo "short line"',
+ f'echo {"Hello " * codingstyle.ExcessiveLineLength.line_length}',
+ ),
+ )
assert r.lines == (1, 3)
- @pytest.mark.parametrize('variable', ('DESCRIPTION', 'KEYWORDS', 'IUSE'))
+ @pytest.mark.parametrize("variable", ("DESCRIPTION", "KEYWORDS", "IUSE"))
def test_special_variables(self, variable):
- self.assertNoReport(self.check, self._prepare_pkg(
- f'{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
- f' {variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
- f'\t\t{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
- ))
+ self.assertNoReport(
+ self.check,
+ self._prepare_pkg(
+ f'{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
+ f' {variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
+ f'\t\t{variable}="{"a " * codingstyle.ExcessiveLineLength.line_length}"',
+ ),
+ )
def test_long_words(self):
- long_word = 'a' * self.word_length + 'b'
- medium_word = 'a' * (self.word_length // 2)
- r = self.assertReport(self.check, self._prepare_pkg(
- f'echo {"a" * codingstyle.ExcessiveLineLength.line_length}',
- f'echo {medium_word} {long_word}',
- f'echo {medium_word} {long_word[:-5]}',
- ))
- assert r.lines == (3, )
+ long_word = "a" * self.word_length + "b"
+ medium_word = "a" * (self.word_length // 2)
+ r = self.assertReport(
+ self.check,
+ self._prepare_pkg(
+ f'echo {"a" * codingstyle.ExcessiveLineLength.line_length}',
+ f"echo {medium_word} {long_word}",
+ f"echo {medium_word} {long_word[:-5]}",
+ ),
+ )
+ assert r.lines == (3,)
def test_long_quotes(self):
# The exception is for any quoted string with length >= word_length.
# Each quoted string is computed by itself.
- long_word = 'a ' * (self.word_length // 2) + 'b' # long quoted string, skipped
- medium_word = 'a ' * (self.word_length // 4) # not long enough string, not skipped
- r = self.assertReport(self.check, self._prepare_pkg(
- f'echo "{"a" * codingstyle.ExcessiveLineLength.line_length}"',
- f'echo "{medium_word}" "{long_word}"',
- 'echo' + f' "{medium_word}"' * 3,
- ))
- assert r.lines == (3, )
+ long_word = "a " * (self.word_length // 2) + "b" # long quoted string, skipped
+ medium_word = "a " * (self.word_length // 4) # not long enough string, not skipped
+ r = self.assertReport(
+ self.check,
+ self._prepare_pkg(
+ f'echo "{"a" * codingstyle.ExcessiveLineLength.line_length}"',
+ f'echo "{medium_word}" "{long_word}"',
+ "echo" + f' "{medium_word}"' * 3,
+ ),
+ )
+ assert r.lines == (3,)
diff --git a/tests/checks/test_dropped_keywords.py b/tests/checks/test_dropped_keywords.py
index 6b070919..fbfee5fc 100644
--- a/tests/checks/test_dropped_keywords.py
+++ b/tests/checks/test_dropped_keywords.py
@@ -8,65 +8,62 @@ class TestDroppedKeywords(misc.ReportTestCase):
check_kls = dropped_keywords.DroppedKeywordsCheck
- def mk_pkg(self, ver, keywords='', eclasses=(), **kwargs):
+ def mk_pkg(self, ver, keywords="", eclasses=(), **kwargs):
return misc.FakePkg(
f"dev-util/diffball-{ver}",
data={
**kwargs,
"KEYWORDS": keywords,
"_eclasses_": eclasses,
- })
+ },
+ )
- def mk_check(self, arches=('x86', 'amd64'), verbosity=0):
+ def mk_check(self, arches=("x86", "amd64"), verbosity=0):
options = arghparse.Namespace(arches=arches, verbosity=verbosity)
return self.check_kls(options, arches_addon=None)
def test_it(self):
# single version, shouldn't yield.
check = self.mk_check()
- self.assertNoReport(check, [self.mk_pkg('1')])
+ self.assertNoReport(check, [self.mk_pkg("1")])
# ebuilds without keywords are skipped
- self.assertNoReport(
- check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2")])
+ self.assertNoReport(check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2")])
# ensure it limits itself to just the arches we care about
# check unstable at the same time;
# finally, check '-' handling; if x86 -> -x86, that's valid.
self.assertNoReport(
check,
- [self.mk_pkg("1", "x86 ~amd64 ppc"),
- self.mk_pkg("2", "~amd64 x86"),
- self.mk_pkg("3", "-amd64 x86")])
+ [
+ self.mk_pkg("1", "x86 ~amd64 ppc"),
+ self.mk_pkg("2", "~amd64 x86"),
+ self.mk_pkg("3", "-amd64 x86"),
+ ],
+ )
# check added keyword handling
self.assertNoReport(
check,
- [self.mk_pkg("1", "amd64"),
- self.mk_pkg("2", "x86"),
- self.mk_pkg("3", "~x86 ~amd64")])
+ [self.mk_pkg("1", "amd64"), self.mk_pkg("2", "x86"), self.mk_pkg("3", "~x86 ~amd64")],
+ )
# check special keyword handling
- for key in ('-*', '*', '~*'):
- self.assertNoReport(
- check,
- [self.mk_pkg("1", "x86 ~amd64"),
- self.mk_pkg("2", key)])
+ for key in ("-*", "*", "~*"):
+ self.assertNoReport(check, [self.mk_pkg("1", "x86 ~amd64"), self.mk_pkg("2", key)])
# ensure it doesn't flag live ebuilds
self.assertNoReport(
- check,
- [self.mk_pkg("1", "x86 amd64"),
- self.mk_pkg("9999", "", PROPERTIES='live')])
+ check, [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("9999", "", PROPERTIES="live")]
+ )
def test_verbose_mode(self):
# verbose mode outputs a report per version with dropped keywords
check = self.mk_check(verbosity=1)
reports = self.assertReports(
check,
- [self.mk_pkg("1", "amd64 x86"),
- self.mk_pkg("2", "amd64"),
- self.mk_pkg("3", "amd64")])
+ [self.mk_pkg("1", "amd64 x86"), self.mk_pkg("2", "amd64"), self.mk_pkg("3", "amd64")],
+ )
assert len(reports) == 2
assert {x.version for x in reports} == {"2", "3"}
assert set().union(*(x.arches for x in reports)) == {"x86"}
@@ -76,9 +73,8 @@ class TestDroppedKeywords(misc.ReportTestCase):
check = self.mk_check()
reports = self.assertReports(
check,
- [self.mk_pkg("1", "x86 amd64"),
- self.mk_pkg("2", "amd64"),
- self.mk_pkg("3", "amd64")])
+ [self.mk_pkg("1", "x86 amd64"), self.mk_pkg("2", "amd64"), self.mk_pkg("3", "amd64")],
+ )
assert len(reports) == 1
- assert reports[0].version == '3'
+ assert reports[0].version == "3"
assert set().union(*(x.arches for x in reports)) == {"x86"}
diff --git a/tests/checks/test_git.py b/tests/checks/test_git.py
index 150d8b8b..1cefd549 100644
--- a/tests/checks/test_git.py
+++ b/tests/checks/test_git.py
@@ -21,11 +21,11 @@ class FakeCommit(GitCommit):
def __init__(self, **kwargs):
commit_data = {
- 'hash': '7f9abd7ec2d079b1d0c36fc2f5d626ae0691757e',
- 'commit_time': 1613438722,
- 'author': 'author@domain.com',
- 'committer': 'author@domain.com',
- 'message': (),
+ "hash": "7f9abd7ec2d079b1d0c36fc2f5d626ae0691757e",
+ "commit_time": 1613438722,
+ "author": "author@domain.com",
+ "committer": "author@domain.com",
+ "message": (),
}
commit_data.update(kwargs)
super().__init__(**commit_data)
@@ -33,199 +33,217 @@ class FakeCommit(GitCommit):
class TestGitCommitMessageCheck(ReportTestCase):
check_kls = git_mod.GitCommitMessageCheck
- options = arghparse.Namespace(
- target_repo=FakeRepo(), commits='origin', gentoo_repo=True)
+ options = arghparse.Namespace(target_repo=FakeRepo(), commits="origin", gentoo_repo=True)
check = git_mod.GitCommitMessageCheck(options)
def test_sign_offs(self):
# assert that it checks for both author and comitter
r = self.assertReport(
- self.check,
- FakeCommit(author='user1', committer='user2', message=['blah'])
+ self.check, FakeCommit(author="user1", committer="user2", message=["blah"])
)
assert isinstance(r, git_mod.MissingSignOff)
- assert r.missing_sign_offs == ('user1', 'user2')
+ assert r.missing_sign_offs == ("user1", "user2")
# assert that it handles author/committer being the same
self.assertNoReport(
self.check,
FakeCommit(
- author='user@user.com', committer='user@user.com',
- message=['summary', '', 'Signed-off-by: user@user.com']))
+ author="user@user.com",
+ committer="user@user.com",
+ message=["summary", "", "Signed-off-by: user@user.com"],
+ ),
+ )
# assert it can handle multiple sign offs.
self.assertNoReport(
self.check,
FakeCommit(
- author='user1', committer='user2',
- message=['summary', '', 'Signed-off-by: user2', 'Signed-off-by: user1']))
+ author="user1",
+ committer="user2",
+ message=["summary", "", "Signed-off-by: user2", "Signed-off-by: user1"],
+ ),
+ )
- def SO_commit(self, summary='summary', body='', tags=(), **kwargs):
+ def SO_commit(self, summary="summary", body="", tags=(), **kwargs):
"""Create a commit object from summary, body, and tags components."""
- author = kwargs.pop('author', 'author@domain.com')
- committer = kwargs.pop('committer', 'author@domain.com')
+ author = kwargs.pop("author", "author@domain.com")
+ committer = kwargs.pop("committer", "author@domain.com")
message = summary
if message:
if body:
- message += '\n\n' + body
- sign_offs = tuple(f'Signed-off-by: {user}' for user in {author, committer})
- message += '\n\n' + '\n'.join(tuple(tags) + sign_offs)
+ message += "\n\n" + body
+ sign_offs = tuple(f"Signed-off-by: {user}" for user in {author, committer})
+ message += "\n\n" + "\n".join(tuple(tags) + sign_offs)
return FakeCommit(author=author, committer=committer, message=message.splitlines())
def test_invalid_commit_tag(self):
# assert it doesn't puke if there are no tags
self.assertNoReport(self.check, self.SO_commit())
- self.assertNoReport(self.check, self.SO_commit(tags=['Bug: https://gentoo.org/blah']))
- self.assertNoReport(self.check, self.SO_commit(tags=['Close: https://gentoo.org/blah']))
+ self.assertNoReport(self.check, self.SO_commit(tags=["Bug: https://gentoo.org/blah"]))
+ self.assertNoReport(self.check, self.SO_commit(tags=["Close: https://gentoo.org/blah"]))
- r = self.assertReport(self.check, self.SO_commit(tags=['Bug: 123455']))
+ r = self.assertReport(self.check, self.SO_commit(tags=["Bug: 123455"]))
assert isinstance(r, git_mod.InvalidCommitTag)
- assert (r.tag, r.value, r.error) == ('Bug', '123455', "value isn't a URL")
+ assert (r.tag, r.value, r.error) == ("Bug", "123455", "value isn't a URL")
# Do a protocol check; this is more of an assertion against the parsing model
# used in the implementation.
- r = self.assertReport(self.check, self.SO_commit(tags=['Closes: ftp://blah.com/asdf']))
+ r = self.assertReport(self.check, self.SO_commit(tags=["Closes: ftp://blah.com/asdf"]))
assert isinstance(r, git_mod.InvalidCommitTag)
- assert r.tag == 'Closes'
- assert 'protocol' in r.error
+ assert r.tag == "Closes"
+ assert "protocol" in r.error
def test_gentoo_bug_tag(self):
- commit = self.SO_commit(tags=['Gentoo-Bug: https://bugs.gentoo.org/1'])
- assert 'Gentoo-Bug tag is no longer valid' in self.assertReport(self.check, commit).error
+ commit = self.SO_commit(tags=["Gentoo-Bug: https://bugs.gentoo.org/1"])
+ assert "Gentoo-Bug tag is no longer valid" in self.assertReport(self.check, commit).error
def test_commit_tags(self):
- ref = 'd8337304f09'
+ ref = "d8337304f09"
- for tag in ('Fixes', 'Reverts'):
+ for tag in ("Fixes", "Reverts"):
# no results on `git cat-file` failure
- with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
+ with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
# force using a new `git cat-file` process for each iteration
self.check._git_cat_file = None
git_cat.return_value.poll.return_value = -1
- commit = self.SO_commit(tags=[f'{tag}: {ref}'])
+ commit = self.SO_commit(tags=[f"{tag}: {ref}"])
self.assertNoReport(self.check, commit)
# missing and ambiguous object refs
- for status in ('missing', 'ambiguous'):
+ for status in ("missing", "ambiguous"):
self.check._git_cat_file = None
- with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
+ with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
git_cat.return_value.poll.return_value = None
- git_cat.return_value.stdout.readline.return_value = f'{ref} {status}'
- commit = self.SO_commit(tags=[f'{tag}: {ref}'])
+ git_cat.return_value.stdout.readline.return_value = f"{ref} {status}"
+ commit = self.SO_commit(tags=[f"{tag}: {ref}"])
r = self.assertReport(self.check, commit)
assert isinstance(r, git_mod.InvalidCommitTag)
- assert f'{status} commit' in r.error
+ assert f"{status} commit" in r.error
# valid tag reference
- with patch('pkgcheck.checks.git.subprocess.Popen') as git_cat:
+ with patch("pkgcheck.checks.git.subprocess.Popen") as git_cat:
self.check._git_cat_file = None
git_cat.return_value.poll.return_value = None
- git_cat.return_value.stdout.readline.return_value = f'{ref} commit 1234'
- commit = self.SO_commit(tags=[f'{tag}: {ref}'])
+ git_cat.return_value.stdout.readline.return_value = f"{ref} commit 1234"
+ commit = self.SO_commit(tags=[f"{tag}: {ref}"])
self.assertNoReport(self.check, commit)
def test_summary_length(self):
- self.assertNoReport(self.check, self.SO_commit('single summary headline'))
- self.assertNoReport(self.check, self.SO_commit('a' * 69))
- assert 'no commit message' in \
- self.assertReport(self.check, self.SO_commit('')).error
- assert 'summary is too long' in \
- self.assertReport(self.check, self.SO_commit('a' * 70)).error
+ self.assertNoReport(self.check, self.SO_commit("single summary headline"))
+ self.assertNoReport(self.check, self.SO_commit("a" * 69))
+ assert "no commit message" in self.assertReport(self.check, self.SO_commit("")).error
+ assert (
+ "summary is too long" in self.assertReport(self.check, self.SO_commit("a" * 70)).error
+ )
def test_message_body_length(self):
# message body lines longer than 80 chars are flagged
- long_line = 'a' + ' b' * 40
- assert 'line 2 greater than 80 chars' in \
- self.assertReport(
- self.check,
- self.SO_commit(body=long_line)).error
+ long_line = "a" + " b" * 40
+ assert (
+ "line 2 greater than 80 chars"
+ in self.assertReport(self.check, self.SO_commit(body=long_line)).error
+ )
# but not non-word lines
- long_line = 'a' * 81
+ long_line = "a" * 81
self.assertNoReport(self.check, self.SO_commit(body=long_line))
def test_message_empty_lines(self):
- message = textwrap.dedent("""\
- foo
+ message = textwrap.dedent(
+ """\
+ foo
- bar
+ bar
- Signed-off-by: author@domain.com
- """).splitlines()
+ Signed-off-by: author@domain.com
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
self.assertNoReport(self.check, commit)
# missing empty line between summary and body
- message = textwrap.dedent("""\
- foo
- bar
+ message = textwrap.dedent(
+ """\
+ foo
+ bar
- Signed-off-by: author@domain.com
- """).splitlines()
+ Signed-off-by: author@domain.com
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
r = self.assertReport(self.check, commit)
- assert 'missing empty line before body' in str(r)
+ assert "missing empty line before body" in str(r)
# missing empty line between summary and tags
- message = textwrap.dedent("""\
- foo
- Signed-off-by: author@domain.com
- """).splitlines()
+ message = textwrap.dedent(
+ """\
+ foo
+ Signed-off-by: author@domain.com
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
r = self.assertReport(self.check, commit)
- assert 'missing empty line before tags' in str(r)
+ assert "missing empty line before tags" in str(r)
# missing empty lines between summary, body, and tags
- message = textwrap.dedent("""\
- foo
- bar
- Signed-off-by: author@domain.com
- """).splitlines()
+ message = textwrap.dedent(
+ """\
+ foo
+ bar
+ Signed-off-by: author@domain.com
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
reports = self.assertReports(self.check, commit)
- assert 'missing empty line before body' in str(reports[0])
- assert 'missing empty line before tags' in str(reports[1])
+ assert "missing empty line before body" in str(reports[0])
+ assert "missing empty line before tags" in str(reports[1])
def test_footer_empty_lines(self):
- for whitespace in ('\t', ' ', ''):
+ for whitespace in ("\t", " ", ""):
# empty lines in footer are flagged
- message = textwrap.dedent(f"""\
- foon
-
- blah: dar
- {whitespace}
- footer: yep
- Signed-off-by: author@domain.com
- """).splitlines()
+ message = textwrap.dedent(
+ f"""\
+ foon
+
+ blah: dar
+ {whitespace}
+ footer: yep
+ Signed-off-by: author@domain.com
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
r = self.assertReport(self.check, commit)
- assert 'empty line 4 in footer' in str(r)
+ assert "empty line 4 in footer" in str(r)
# empty lines at the end of a commit message are ignored
- message = textwrap.dedent(f"""\
+ message = textwrap.dedent(
+ f"""\
+ foon
+
+ blah: dar
+ footer: yep
+ Signed-off-by: author@domain.com
+ {whitespace}
+ """
+ ).splitlines()
+ commit = FakeCommit(message=message)
+ self.assertNoReport(self.check, commit)
+
+ def test_footer_non_tags(self):
+ message = textwrap.dedent(
+ """\
foon
blah: dar
footer: yep
+ some random line
Signed-off-by: author@domain.com
- {whitespace}
- """).splitlines()
- commit = FakeCommit(message=message)
- self.assertNoReport(self.check, commit)
-
- def test_footer_non_tags(self):
- message = textwrap.dedent("""\
- foon
-
- blah: dar
- footer: yep
- some random line
- Signed-off-by: author@domain.com
- """).splitlines()
+ """
+ ).splitlines()
commit = FakeCommit(message=message)
r = self.assertReport(self.check, commit)
- assert 'non-tag in footer, line 5' in str(r)
+ assert "non-tag in footer, line 5" in str(r)
class TestGitCommitMessageRepoCheck(ReportTestCase):
@@ -239,18 +257,17 @@ class TestGitCommitMessageRepoCheck(ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(
- self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
+ self.parent_git_repo.add_all("initial commit")
# create a stub pkg and commit it
- self.parent_repo.create_ebuild('cat/pkg-0')
- self.parent_git_repo.add_all('cat/pkg-0')
+ self.parent_repo.create_ebuild("cat/pkg-0")
+ self.parent_git_repo.add_all("cat/pkg-0")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0):
@@ -263,96 +280,106 @@ class TestGitCommitMessageRepoCheck(ReportTestCase):
def _options(self, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location, '--commits',
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
+ "--commits",
]
options, _ = self._tool.parse_args(args)
return options
def test_bad_commit_summary_pkg(self):
# properly prefixed commit summary
- self.child_repo.create_ebuild('cat/pkg-1')
- self.child_git_repo.add_all('cat/pkg: version bump to 1', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-1")
+ self.child_git_repo.add_all("cat/pkg: version bump to 1", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# properly prefixed multiple ebuild commit summary
- self.child_repo.create_ebuild('cat/pkg-2')
- self.child_repo.create_ebuild('cat/pkg-3')
- self.child_git_repo.add_all('cat/pkg: more version bumps', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-2")
+ self.child_repo.create_ebuild("cat/pkg-3")
+ self.child_git_repo.add_all("cat/pkg: more version bumps", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# special categories that allow not having version in new package summary
- self.child_repo.create_ebuild('acct-user/pkgcheck-1')
- self.child_git_repo.add_all('acct-user/pkgcheck: add user for pkgcheck', signoff=True)
+ self.child_repo.create_ebuild("acct-user/pkgcheck-1")
+ self.child_git_repo.add_all("acct-user/pkgcheck: add user for pkgcheck", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# special categories that allow not having version in bump version summary
- self.child_repo.create_ebuild('acct-user/pkgcheck-2')
- self.child_git_repo.add_all('acct-user/pkgcheck: bump user for pkgcheck', signoff=True)
+ self.child_repo.create_ebuild("acct-user/pkgcheck-2")
+ self.child_git_repo.add_all("acct-user/pkgcheck: bump user for pkgcheck", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# poorly prefixed commit summary
- self.child_repo.create_ebuild('cat/pkg-4')
- self.child_git_repo.add_all('version bump to 4', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-4")
+ self.child_git_repo.add_all("version bump to 4", signoff=True)
commit1 = self.child_git_repo.HEAD
# commit summary missing package version
- self.child_repo.create_ebuild('cat/pkg-5')
- self.child_git_repo.add_all('cat/pkg: version bump', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-5")
+ self.child_git_repo.add_all("cat/pkg: version bump", signoff=True)
commit2 = self.child_git_repo.HEAD
# commit summary missing renamed package version
self.child_git_repo.move(
- 'cat/pkg/pkg-3.ebuild', 'cat/pkg/pkg-6.ebuild',
- msg='cat/pkg: version bump and remove old', signoff=True)
+ "cat/pkg/pkg-3.ebuild",
+ "cat/pkg/pkg-6.ebuild",
+ msg="cat/pkg: version bump and remove old",
+ signoff=True,
+ )
commit3 = self.child_git_repo.HEAD
# revision bumps aren't flagged
- self.child_repo.create_ebuild('cat/pkg-6-r1')
- self.child_git_repo.add_all('cat/pkg: revision bump', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-6-r1")
+ self.child_git_repo.add_all("cat/pkg: revision bump", signoff=True)
self.init_check()
# allow vVERSION
- self.child_repo.create_ebuild('cat/pkg-7')
- self.child_git_repo.add_all('cat/pkg: bump to v7', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg-7")
+ self.child_git_repo.add_all("cat/pkg: bump to v7", signoff=True)
self.init_check()
results = self.assertReports(self.check, self.source)
r1 = git_mod.BadCommitSummary(
- "summary missing 'cat/pkg' package prefix",
- 'version bump to 4', commit=commit1)
+ "summary missing 'cat/pkg' package prefix", "version bump to 4", commit=commit1
+ )
r2 = git_mod.BadCommitSummary(
- "summary missing package version '5'",
- 'cat/pkg: version bump', commit=commit2)
+ "summary missing package version '5'", "cat/pkg: version bump", commit=commit2
+ )
r3 = git_mod.BadCommitSummary(
"summary missing package version '6'",
- 'cat/pkg: version bump and remove old', commit=commit3)
+ "cat/pkg: version bump and remove old",
+ commit=commit3,
+ )
assert set(results) == {r1, r2, r3}
def test_bad_commit_summary_category(self):
# properly prefixed commit summary
- self.child_repo.create_ebuild('cat/pkg1-1')
- self.child_repo.create_ebuild('cat/pkg2-1')
- self.child_git_repo.add_all('cat: various pkg updates', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg1-1")
+ self.child_repo.create_ebuild("cat/pkg2-1")
+ self.child_git_repo.add_all("cat: various pkg updates", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# multiple category commits are ignored
- self.child_repo.create_ebuild('newcat1/newcat1-1')
- self.child_repo.create_ebuild('newcat2/newpkg2-1')
- self.child_git_repo.add_all('various changes', signoff=True)
+ self.child_repo.create_ebuild("newcat1/newcat1-1")
+ self.child_repo.create_ebuild("newcat2/newpkg2-1")
+ self.child_git_repo.add_all("various changes", signoff=True)
self.init_check()
self.assertNoReport(self.check, self.source)
# poorly prefixed commit summary for single category changes
- self.child_repo.create_ebuild('cat/pkg3-1')
- self.child_repo.create_ebuild('cat/pkg4-1')
- self.child_git_repo.add_all('cat updates', signoff=True)
+ self.child_repo.create_ebuild("cat/pkg3-1")
+ self.child_repo.create_ebuild("cat/pkg4-1")
+ self.child_git_repo.add_all("cat updates", signoff=True)
commit = self.child_git_repo.HEAD
self.init_check()
r = self.assertReport(self.check, self.source)
expected = git_mod.BadCommitSummary(
- "summary missing 'cat' category prefix",
- 'cat updates', commit=commit)
+ "summary missing 'cat' category prefix", "cat updates", commit=commit
+ )
assert r == expected
@@ -367,18 +394,17 @@ class TestGitPkgCommitsCheck(ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(
- self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
+ self.parent_git_repo.add_all("initial commit")
# create a stub pkg and commit it
- self.parent_repo.create_ebuild('cat/pkg-0')
- self.parent_git_repo.add_all('cat/pkg-0')
+ self.parent_repo.create_ebuild("cat/pkg-0")
+ self.parent_git_repo.add_all("cat/pkg-0")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0):
@@ -391,193 +417,206 @@ class TestGitPkgCommitsCheck(ReportTestCase):
def _options(self, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location, '--commits',
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
+ "--commits",
]
options, _ = self._tool.parse_args(args)
return options
def test_broken_ebuilds_ignored(self):
- self.child_repo.create_ebuild('newcat/pkg-1', eapi='-1')
- self.child_git_repo.add_all('newcat/pkg: initial import')
+ self.child_repo.create_ebuild("newcat/pkg-1", eapi="-1")
+ self.child_git_repo.add_all("newcat/pkg: initial import")
self.init_check()
self.assertNoReport(self.check, self.source)
def test_direct_stable(self):
- self.child_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.child_git_repo.add_all('cat/pkg: version bump to 1')
+ self.child_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.child_git_repo.add_all("cat/pkg: version bump to 1")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.DirectStableKeywords(['amd64'], pkg=CPV('cat/pkg-1'))
+ expected = git_mod.DirectStableKeywords(["amd64"], pkg=CPV("cat/pkg-1"))
assert r == expected
def test_direct_no_maintainer(self):
- self.child_repo.create_ebuild('newcat/pkg-1')
- self.child_git_repo.add_all('newcat/pkg: initial import')
+ self.child_repo.create_ebuild("newcat/pkg-1")
+ self.child_git_repo.add_all("newcat/pkg: initial import")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.DirectNoMaintainer(pkg=CPV('newcat/pkg-1'))
+ expected = git_mod.DirectNoMaintainer(pkg=CPV("newcat/pkg-1"))
assert r == expected
def test_ebuild_incorrect_copyright(self):
- self.child_repo.create_ebuild('cat/pkg-1')
- line = '# Copyright 1999-2019 Gentoo Authors'
- with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-1.ebuild'), 'r+') as f:
+ self.child_repo.create_ebuild("cat/pkg-1")
+ line = "# Copyright 1999-2019 Gentoo Authors"
+ with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-1.ebuild"), "r+") as f:
lines = f.read().splitlines()
lines[0] = line
f.seek(0)
f.truncate()
- f.write('\n'.join(lines))
- self.child_git_repo.add_all('cat/pkg: version bump to 1')
+ f.write("\n".join(lines))
+ self.child_git_repo.add_all("cat/pkg: version bump to 1")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.EbuildIncorrectCopyright('2019', line=line, pkg=CPV('cat/pkg-1'))
+ expected = git_mod.EbuildIncorrectCopyright("2019", line=line, pkg=CPV("cat/pkg-1"))
assert r == expected
def test_missing_copyright(self):
"""Ebuilds missing copyrights entirely are handled by EbuildHeaderCheck."""
- self.child_repo.create_ebuild('cat/pkg-1')
- with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-1.ebuild'), 'r+') as f:
+ self.child_repo.create_ebuild("cat/pkg-1")
+ with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-1.ebuild"), "r+") as f:
lines = f.read().splitlines()
f.seek(0)
f.truncate()
- f.write('\n'.join(lines[1:]))
- self.child_git_repo.add_all('cat/pkg: update ebuild')
+ f.write("\n".join(lines[1:]))
+ self.child_git_repo.add_all("cat/pkg: update ebuild")
self.init_check()
self.assertNoReport(self.check, self.source)
def test_dropped_stable_keywords(self):
# add stable ebuild to parent repo
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg: version bump to 1')
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg: version bump to 1")
# pull changes and remove it from the child repo
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
commit = self.child_git_repo.HEAD
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.DroppedStableKeywords(['amd64'], commit, pkg=CPV('cat/pkg-1'))
+ expected = git_mod.DroppedStableKeywords(["amd64"], commit, pkg=CPV("cat/pkg-1"))
assert r == expected
# git archive failures error out
- with patch('pkgcheck.checks.git.subprocess.Popen') as git_archive:
+ with patch("pkgcheck.checks.git.subprocess.Popen") as git_archive:
git_archive.return_value.poll.return_value = -1
- with pytest.raises(PkgcheckUserException, match='failed populating archive repo'):
+ with pytest.raises(PkgcheckUserException, match="failed populating archive repo"):
self.assertNoReport(self.check, self.source)
def test_dropped_unstable_keywords(self):
# add stable ebuild to parent repo
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg: version bump to 1')
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg: version bump to 1")
# pull changes and remove it from the child repo
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
commit = self.child_git_repo.HEAD
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.DroppedUnstableKeywords(['~amd64'], commit, pkg=CPV('cat/pkg-1'))
+ expected = git_mod.DroppedUnstableKeywords(["~amd64"], commit, pkg=CPV("cat/pkg-1"))
assert r == expected
def test_dropped_keywords_inherit_eclass(self):
# add stable ebuild to parent repo
- with open(pjoin(self.parent_git_repo.path, 'eclass/make.eclass'), 'w') as f:
- f.write(':')
- self.parent_git_repo.add_all('make.eclass: initial commit')
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'], data="inherit make")
- self.parent_git_repo.add_all('cat/pkg: version bump to 1')
+ with open(pjoin(self.parent_git_repo.path, "eclass/make.eclass"), "w") as f:
+ f.write(":")
+ self.parent_git_repo.add_all("make.eclass: initial commit")
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"], data="inherit make")
+ self.parent_git_repo.add_all("cat/pkg: version bump to 1")
# pull changes and remove it from the child repo
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.remove('cat/pkg/pkg-1.ebuild', msg='cat/pkg: remove 1')
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.remove("cat/pkg/pkg-1.ebuild", msg="cat/pkg: remove 1")
commit = self.child_git_repo.HEAD
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.DroppedUnstableKeywords(['~amd64'], commit, pkg=CPV('cat/pkg-1'))
+ expected = git_mod.DroppedUnstableKeywords(["~amd64"], commit, pkg=CPV("cat/pkg-1"))
assert r == expected
def test_rdepend_change(self):
# add pkgs to parent repo
- self.parent_repo.create_ebuild('cat/dep1-0')
- self.parent_git_repo.add_all('cat/dep1: initial import')
- self.parent_repo.create_ebuild('cat/dep2-0')
- self.parent_git_repo.add_all('cat/dep2: initial import')
- self.parent_repo.create_ebuild('newcat/newpkg-1')
- self.parent_git_repo.add_all('newcat/newpkg: initial import')
- self.parent_repo.create_ebuild('newcat/newpkg-2', rdepend="cat/dep1 cat/dep2")
- self.parent_git_repo.add_all('newcat/newpkg: version bump')
+ self.parent_repo.create_ebuild("cat/dep1-0")
+ self.parent_git_repo.add_all("cat/dep1: initial import")
+ self.parent_repo.create_ebuild("cat/dep2-0")
+ self.parent_git_repo.add_all("cat/dep2: initial import")
+ self.parent_repo.create_ebuild("newcat/newpkg-1")
+ self.parent_git_repo.add_all("newcat/newpkg: initial import")
+ self.parent_repo.create_ebuild("newcat/newpkg-2", rdepend="cat/dep1 cat/dep2")
+ self.parent_git_repo.add_all("newcat/newpkg: version bump")
# pull changes to child repo
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
# change pkg RDEPEND and commit
- with open(pjoin(self.child_git_repo.path, 'cat/pkg/pkg-0.ebuild'), 'a') as f:
+ with open(pjoin(self.child_git_repo.path, "cat/pkg/pkg-0.ebuild"), "a") as f:
f.write('RDEPEND="cat/dep1"\n')
- self.child_git_repo.add_all('cat/pkg: update deps')
+ self.child_git_repo.add_all("cat/pkg: update deps")
# change live pkg RDEPEND and commit
- with open(pjoin(self.child_git_repo.path, 'newcat/newpkg/newpkg-1.ebuild'), 'a') as f:
+ with open(pjoin(self.child_git_repo.path, "newcat/newpkg/newpkg-1.ebuild"), "a") as f:
f.write('RDEPEND="cat/dep1"\n')
f.write('PROPERTIES="live"\n')
- self.child_git_repo.add_all('newcat/newpkg: update deps')
+ self.child_git_repo.add_all("newcat/newpkg: update deps")
# reorder pkg RDEPEND and commit
- with open(pjoin(self.child_git_repo.path, 'newcat/newpkg/newpkg-2.ebuild'), 'a') as f:
+ with open(pjoin(self.child_git_repo.path, "newcat/newpkg/newpkg-2.ebuild"), "a") as f:
f.write('RDEPEND="cat/dep2 cat/dep1"\n')
- self.child_git_repo.add_all('newcat/newpkg: reorder deps')
+ self.child_git_repo.add_all("newcat/newpkg: reorder deps")
self.init_check()
r = self.assertReport(self.check, self.source)
# only one result is expected since live ebuilds are ignored
- expected = git_mod.RdependChange(pkg=CPV('cat/pkg-0'))
+ expected = git_mod.RdependChange(pkg=CPV("cat/pkg-0"))
assert r == expected
def test_missing_slotmove(self):
# add new ebuild to parent repo
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg: version bump to 1')
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg: version bump to 1")
# pull changes and modify its slot in the child repo
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'], slot='1')
- self.child_git_repo.add_all('cat/pkg: update SLOT to 1')
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"], slot="1")
+ self.child_git_repo.add_all("cat/pkg: update SLOT to 1")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.MissingSlotmove('0', '1', pkg=CPV('cat/pkg-1'))
+ expected = git_mod.MissingSlotmove("0", "1", pkg=CPV("cat/pkg-1"))
assert r == expected
# create slot move update and the result goes away
- updates_dir = pjoin(self.child_git_repo.path, 'profiles', 'updates')
+ updates_dir = pjoin(self.child_git_repo.path, "profiles", "updates")
os.makedirs(updates_dir, exist_ok=True)
- with open(pjoin(updates_dir, '4Q-2020'), 'w') as f:
- f.write(textwrap.dedent("""\
- slotmove ~cat/foo-0 0 1
- slotmove ~cat/pkg-1 0 1
- """))
+ with open(pjoin(updates_dir, "4Q-2020"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ slotmove ~cat/foo-0 0 1
+ slotmove ~cat/pkg-1 0 1
+ """
+ )
+ )
# force repo_config pkg updates jitted attr to be reset
self.init_check()
self.assertNoReport(self.check, self.source)
# git archive failures error out
- with patch('pkgcheck.checks.git.subprocess.Popen') as git_archive:
+ with patch("pkgcheck.checks.git.subprocess.Popen") as git_archive:
git_archive.return_value.poll.return_value = -1
- with pytest.raises(PkgcheckUserException, match='failed populating archive repo'):
+ with pytest.raises(PkgcheckUserException, match="failed populating archive repo"):
self.assertNoReport(self.check, self.source)
def test_missing_move(self):
# verify ebuild renames at the git level don't trigger
- self.child_repo.create_ebuild('cat/pkg-1')
- self.child_git_repo.run(['git', 'rm', 'cat/pkg/pkg-0.ebuild'])
- self.child_git_repo.add_all('cat/pkg: version bump and remove old')
+ self.child_repo.create_ebuild("cat/pkg-1")
+ self.child_git_repo.run(["git", "rm", "cat/pkg/pkg-0.ebuild"])
+ self.child_git_repo.add_all("cat/pkg: version bump and remove old")
self.init_check()
self.assertNoReport(self.check, self.source)
- self.child_git_repo.move('cat', 'newcat', msg='newcat/pkg: moved pkg')
+ self.child_git_repo.move("cat", "newcat", msg="newcat/pkg: moved pkg")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.MissingMove('cat/pkg', 'newcat/pkg', pkg=CPV('newcat/pkg-0'))
+ expected = git_mod.MissingMove("cat/pkg", "newcat/pkg", pkg=CPV("newcat/pkg-0"))
assert r == expected
# create package move update and the result goes away
- updates_dir = pjoin(self.child_git_repo.path, 'profiles', 'updates')
+ updates_dir = pjoin(self.child_git_repo.path, "profiles", "updates")
os.makedirs(updates_dir, exist_ok=True)
- with open(pjoin(updates_dir, '4Q-2020'), 'w') as f:
- f.write(textwrap.dedent("""\
- move cat/foo newcat/foo
- move cat/pkg newcat/pkg
- """))
+ with open(pjoin(updates_dir, "4Q-2020"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ move cat/foo newcat/foo
+ move cat/pkg newcat/pkg
+ """
+ )
+ )
# force repo_config pkg updates jitted attr to be reset
self.init_check()
self.assertNoReport(self.check, self.source)
@@ -594,18 +633,17 @@ class TestGitEclassCommitsCheck(ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(
- self.parent_git_repo.path, repo_id='gentoo', arches=['amd64'])
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
+ self.parent_git_repo.add_all("initial commit")
# create a stub eclass and commit it
- touch(pjoin(self.parent_git_repo.path, 'eclass', 'foo.eclass'))
- self.parent_git_repo.add_all('eclass: add foo eclass')
+ touch(pjoin(self.parent_git_repo.path, "eclass", "foo.eclass"))
+ self.parent_git_repo.add_all("eclass: add foo eclass")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0):
@@ -618,35 +656,40 @@ class TestGitEclassCommitsCheck(ReportTestCase):
def _options(self, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location, '--commits',
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
+ "--commits",
]
options, _ = self._tool.parse_args(args)
return options
def test_eclass_incorrect_copyright(self):
- line = '# Copyright 1999-2019 Gentoo Authors'
- with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
- f.write(f'{line}\n')
- self.child_git_repo.add_all('eclass: update foo')
+ line = "# Copyright 1999-2019 Gentoo Authors"
+ with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
+ f.write(f"{line}\n")
+ self.child_git_repo.add_all("eclass: update foo")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = git_mod.EclassIncorrectCopyright('2019', line, eclass='foo')
+ expected = git_mod.EclassIncorrectCopyright("2019", line, eclass="foo")
assert r == expected
# correcting the year results in no report
year = datetime.today().year
- line = f'# Copyright 1999-{year} Gentoo Authors'
- with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
- f.write(f'{line}\n')
- self.child_git_repo.add_all('eclass: fix copyright year')
+ line = f"# Copyright 1999-{year} Gentoo Authors"
+ with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
+ f.write(f"{line}\n")
+ self.child_git_repo.add_all("eclass: fix copyright year")
self.init_check()
self.assertNoReport(self.check, self.source)
def test_eclass_missing_copyright(self):
"""Eclasses missing copyrights entirely are handled by EclassHeaderCheck."""
- with open(pjoin(self.child_git_repo.path, 'eclass/foo.eclass'), 'w') as f:
- f.write('# comment\n')
- self.child_git_repo.add_all('eclass: update foo')
+ with open(pjoin(self.child_git_repo.path, "eclass/foo.eclass"), "w") as f:
+ f.write("# comment\n")
+ self.child_git_repo.add_all("eclass: update foo")
self.init_check()
self.assertNoReport(self.check, self.source)
diff --git a/tests/checks/test_glsa.py b/tests/checks/test_glsa.py
index c3182be1..bec35857 100644
--- a/tests/checks/test_glsa.py
+++ b/tests/checks/test_glsa.py
@@ -31,34 +31,34 @@ class TestVulnerabilitiesCheck(misc.ReportTestCase):
def test_no_glsa_dir(self, tmp_path):
# TODO: switch to using a repo fixture when available
repo_dir = str(tmp_path)
- os.makedirs(pjoin(repo_dir, 'profiles'))
- os.makedirs(pjoin(repo_dir, 'metadata'))
- with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('fake\n')
- with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
- f.write('masters =\n')
+ os.makedirs(pjoin(repo_dir, "profiles"))
+ os.makedirs(pjoin(repo_dir, "metadata"))
+ with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
+ f.write("fake\n")
+ with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
+ f.write("masters =\n")
repo_config = repo_objs.RepoConfig(location=repo_dir)
repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
options = arghparse.Namespace(glsa_dir=None, target_repo=repo, gentoo_repo=True)
- with pytest.raises(SkipCheck, match='no available glsa source'):
+ with pytest.raises(SkipCheck, match="no available glsa source"):
glsa.GlsaCheck(options)
def test_repo_glsa_dir(self, tmp_path):
# TODO: switch to using a repo fixture when available
repo_dir = str(tmp_path)
- os.makedirs(pjoin(repo_dir, 'profiles'))
- os.makedirs(pjoin(repo_dir, 'metadata', 'glsa'))
- with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('fake\n')
- with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
- f.write('masters =\n')
- with open(pjoin(repo_dir, 'metadata', 'glsa', 'glsa-202010-01.xml'), 'w') as f:
+ os.makedirs(pjoin(repo_dir, "profiles"))
+ os.makedirs(pjoin(repo_dir, "metadata", "glsa"))
+ with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
+ f.write("fake\n")
+ with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
+ f.write("masters =\n")
+ with open(pjoin(repo_dir, "metadata", "glsa", "glsa-202010-01.xml"), "w") as f:
f.write(mk_glsa(("dev-util/diffball", ([], ["~>=0.5-r3"]))))
repo_config = repo_objs.RepoConfig(location=repo_dir)
repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
options = arghparse.Namespace(glsa_dir=None, target_repo=repo, gentoo_repo=True)
check = glsa.GlsaCheck(options)
- assert 'dev-util/diffball' in check.vulns
+ assert "dev-util/diffball" in check.vulns
def test_non_matching(self, check):
self.assertNoReport(check, mk_pkg("0.5.1"))
@@ -67,10 +67,8 @@ class TestVulnerabilitiesCheck(misc.ReportTestCase):
def test_matching(self, check):
r = self.assertReport(check, mk_pkg("0.5-r5"))
assert isinstance(r, glsa.VulnerablePackage)
- assert (
- (r.category, r.package, r.version) ==
- ("dev-util", "diffball", "0.5-r5"))
- assert 'vulnerable via glsa(200611-02)' in str(r)
+ assert (r.category, r.package, r.version) == ("dev-util", "diffball", "0.5-r5")
+ assert "vulnerable via glsa(200611-02)" in str(r)
# multiple glsa matches
self.assertReports(check, mk_pkg("1.0"))
diff --git a/tests/checks/test_header.py b/tests/checks/test_header.py
index 492c3d8c..e79fdeb1 100644
--- a/tests/checks/test_header.py
+++ b/tests/checks/test_header.py
@@ -21,9 +21,9 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
def test_good_copyright(self):
good_copyrights = [
- '# Copyright 1999-2019 Gentoo Authors\n',
- '# Copyright 2019 Gentoo Authors\n',
- '# Copyright 2010-2017 Gentoo Authors\n',
+ "# Copyright 1999-2019 Gentoo Authors\n",
+ "# Copyright 2019 Gentoo Authors\n",
+ "# Copyright 2010-2017 Gentoo Authors\n",
]
for line in good_copyrights:
fake_src = [line, self.check_kls.license_header]
@@ -32,11 +32,11 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
def test_invalid_copyright(self):
bad_copyrights = [
- '# Copyright (c) 1999-2019 Gentoo Authors\n',
- '# Copyright Gentoo Authors\n',
- '# Gentoo Authors\n',
- '# Here is entirely random text\n',
- '\n',
+ "# Copyright (c) 1999-2019 Gentoo Authors\n",
+ "# Copyright Gentoo Authors\n",
+ "# Gentoo Authors\n",
+ "# Here is entirely random text\n",
+ "\n",
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
@@ -48,10 +48,10 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
def test_new_foundation_copyright(self):
"""Foundation copyright on new ebuilds triggers the report."""
bad_copyrights = [
- '# Copyright 1999-2019 Gentoo Foundation\n',
- '# Copyright 2019 Gentoo Foundation\n',
- '# Copyright 3125 Gentoo Foundation\n',
- '# Copyright 2010-2021 Gentoo Foundation\n',
+ "# Copyright 1999-2019 Gentoo Foundation\n",
+ "# Copyright 2019 Gentoo Foundation\n",
+ "# Copyright 3125 Gentoo Foundation\n",
+ "# Copyright 2010-2021 Gentoo Foundation\n",
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
@@ -63,9 +63,9 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
def test_old_foundation_copyright(self):
"""Foundation copyright on old ebuilds does not trigger false positives."""
good_copyrights = [
- '# Copyright 1999-2018 Gentoo Foundation\n',
- '# Copyright 2016 Gentoo Foundation\n',
- '# Copyright 2010-2017 Gentoo Foundation\n',
+ "# Copyright 1999-2018 Gentoo Foundation\n",
+ "# Copyright 2016 Gentoo Foundation\n",
+ "# Copyright 2010-2017 Gentoo Foundation\n",
]
for line in good_copyrights:
fake_src = [line, self.check_kls.license_header]
@@ -75,8 +75,8 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
def test_non_gentoo_authors_copyright_in_gentoo(self):
"""Ebuilds in the gentoo repo must use 'Gentoo Authors'."""
bad_copyrights = [
- '# Copyright 1999-2019 D. E. Veloper\n',
- '# Copyright 2019 辣鸡汤\n',
+ "# Copyright 1999-2019 D. E. Veloper\n",
+ "# Copyright 2019 辣鸡汤\n",
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
@@ -86,23 +86,23 @@ class TestEbuildHeaderCheck(misc.ReportTestCase):
assert line.strip() in str(r)
def test_license_headers(self):
- copyright = '# Copyright 1999-2019 Gentoo Authors\n'
+ copyright = "# Copyright 1999-2019 Gentoo Authors\n"
fake_src = [copyright, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
self.assertNoReport(self.mk_check(), fake_pkg)
bad_license_headers = [
[],
- [''],
- ['\n'],
- [f'{self.check_kls.license_header} '],
- [f' {self.check_kls.license_header}'],
- ['# Distributed under the terms of the GNU General Public License v3'],
+ [""],
+ ["\n"],
+ [f"{self.check_kls.license_header} "],
+ [f" {self.check_kls.license_header}"],
+ ["# Distributed under the terms of the GNU General Public License v3"],
]
for content in bad_license_headers:
fake_src = [copyright] + content
fake_pkg = self.mk_pkg(lines=fake_src)
r = self.assertReport(self.mk_check(), fake_pkg)
assert isinstance(r, header.EbuildInvalidLicenseHeader)
- expected = content[0].strip() if content else 'missing license header'
+ expected = content[0].strip() if content else "missing license header"
assert expected in str(r)
diff --git a/tests/checks/test_imlate.py b/tests/checks/test_imlate.py
index 3c0f44f8..624dc90a 100644
--- a/tests/checks/test_imlate.py
+++ b/tests/checks/test_imlate.py
@@ -4,21 +4,25 @@ from snakeoil.cli import arghparse
from .. import misc
-def mk_check(selected_arches=("x86", "ppc", "amd64"), arches=None,
- stable_arches=None, source_arches=None):
+def mk_check(
+ selected_arches=("x86", "ppc", "amd64"), arches=None, stable_arches=None, source_arches=None
+):
if arches is None:
arches = selected_arches
if stable_arches is None:
stable_arches = selected_arches
return imlate.ImlateCheck(
arghparse.Namespace(
- selected_arches=selected_arches, arches=arches,
- stable_arches=stable_arches, source_arches=source_arches))
+ selected_arches=selected_arches,
+ arches=arches,
+ stable_arches=stable_arches,
+ source_arches=source_arches,
+ )
+ )
def mk_pkg(ver, keywords="", slot="0"):
- return misc.FakePkg(
- f"dev-util/diffball-{ver}", data={"SLOT": slot, "KEYWORDS": keywords})
+ return misc.FakePkg(f"dev-util/diffball-{ver}", data={"SLOT": slot, "KEYWORDS": keywords})
class TestImlateCheck(misc.ReportTestCase):
@@ -26,96 +30,82 @@ class TestImlateCheck(misc.ReportTestCase):
check_kls = imlate.ImlateCheck
def test_all_unstable(self):
- self.assertNoReport(
- mk_check(),
- [mk_pkg(str(x), "~x86 ~amd64") for x in range(10)])
+ self.assertNoReport(mk_check(), [mk_pkg(str(x), "~x86 ~amd64") for x in range(10)])
def test_all_stable(self):
- self.assertNoReport(
- mk_check(),
- [mk_pkg("0.9", "amd64 x86")])
+ self.assertNoReport(mk_check(), [mk_pkg("0.9", "amd64 x86")])
def test_unselected_arch(self):
- self.assertNoReport(
- mk_check(),
- [mk_pkg("0.9", "~mips amd64")])
+ self.assertNoReport(mk_check(), [mk_pkg("0.9", "~mips amd64")])
def test_specified_stable_arches(self):
# pkg doesn't have any unstable arches we care about
- self.assertNoReport(
- mk_check(source_arches=('arm', 'arm64')),
- [mk_pkg("0.9", "~x86 amd64")])
+ self.assertNoReport(mk_check(source_arches=("arm", "arm64")), [mk_pkg("0.9", "~x86 amd64")])
# pkg doesn't have any stable arches we care about
- self.assertNoReport(
- mk_check(source_arches=('arm64',)),
- [mk_pkg("0.9", "~x86 amd64")])
+ self.assertNoReport(mk_check(source_arches=("arm64",)), [mk_pkg("0.9", "~x86 amd64")])
# only flag arches we care about
r = self.assertReport(
- mk_check(source_arches=('amd64',), selected_arches=('arm64',)),
- [mk_pkg("0.9", "~arm64 ~x86 amd64")])
+ mk_check(source_arches=("amd64",), selected_arches=("arm64",)),
+ [mk_pkg("0.9", "~arm64 ~x86 amd64")],
+ )
assert isinstance(r, imlate.PotentialStable)
assert r.stable == ("amd64",)
assert r.keywords == ("~arm64",)
assert r.version == "0.9"
def test_lagging_keyword(self):
- r = self.assertReport(
- mk_check(),
- [mk_pkg("0.8", "x86 amd64"),
- mk_pkg("0.9", "x86 ~amd64")])
+ r = self.assertReport(mk_check(), [mk_pkg("0.8", "x86 amd64"), mk_pkg("0.9", "x86 ~amd64")])
assert isinstance(r, imlate.LaggingStable)
assert r.stable == ("x86",)
assert r.keywords == ("~amd64",)
assert r.version == "0.9"
- assert 'x86' in str(r) and '~amd64' in str(r)
+ assert "x86" in str(r) and "~amd64" in str(r)
def test_potential_keyword(self):
- r = self.assertReport(
- mk_check(),
- [mk_pkg("0.9", "~x86 amd64")])
+ r = self.assertReport(mk_check(), [mk_pkg("0.9", "~x86 amd64")])
assert isinstance(r, imlate.PotentialStable)
assert r.stable == ("amd64",)
assert r.keywords == ("~x86",)
assert r.version == "0.9"
- assert 'amd64' in str(r) and '~x86' in str(r)
+ assert "amd64" in str(r) and "~x86" in str(r)
def test_multiple_unstable_pkgs(self):
r = self.assertReport(
- mk_check(),
- [mk_pkg("0.7", "~x86"),
- mk_pkg("0.8", "~x86"),
- mk_pkg("0.9", "~x86 amd64")])
+ mk_check(), [mk_pkg("0.7", "~x86"), mk_pkg("0.8", "~x86"), mk_pkg("0.9", "~x86 amd64")]
+ )
assert r.stable == ("amd64",)
assert r.keywords == ("~x86",)
assert r.version == "0.9"
def test_multiple_stable_arches(self):
r = self.assertReport(
- mk_check(),
- [mk_pkg("0.7", "~x86 ~ppc"),
- mk_pkg("0.9", "~x86 ppc amd64")])
+ mk_check(), [mk_pkg("0.7", "~x86 ~ppc"), mk_pkg("0.9", "~x86 ppc amd64")]
+ )
assert r.stable == ("amd64", "ppc")
assert r.keywords == ("~x86",)
assert r.version == "0.9"
def test_multiple_potential_arches(self):
- r = self.assertReport(
- mk_check(),
- [mk_pkg("0.7", "~x86"),
- mk_pkg("0.9", "~x86 ~ppc amd64")])
+ r = self.assertReport(mk_check(), [mk_pkg("0.7", "~x86"), mk_pkg("0.9", "~x86 ~ppc amd64")])
assert r.stable == ("amd64",)
- assert r.keywords == ("~ppc", "~x86",)
+ assert r.keywords == (
+ "~ppc",
+ "~x86",
+ )
assert r.version == "0.9"
def test_multiple_lagging_slots(self):
r = self.assertReports(
mk_check(),
- [mk_pkg("0.7", slot="0", keywords="x86 ppc"),
- mk_pkg("0.9", slot="0", keywords="~x86 ppc"),
- mk_pkg("1.0", slot="1", keywords="x86 ppc"),
- mk_pkg("1.2", slot="1", keywords="x86 ~ppc")])
+ [
+ mk_pkg("0.7", slot="0", keywords="x86 ppc"),
+ mk_pkg("0.9", slot="0", keywords="~x86 ppc"),
+ mk_pkg("1.0", slot="1", keywords="x86 ppc"),
+ mk_pkg("1.2", slot="1", keywords="x86 ~ppc"),
+ ],
+ )
assert len(r) == 2
assert isinstance(r[0], imlate.LaggingStable)
assert r[0].slot == "0"
@@ -131,8 +121,11 @@ class TestImlateCheck(misc.ReportTestCase):
def test_multiple_potential_slots(self):
r = self.assertReports(
mk_check(),
- [mk_pkg("0.9", slot="0", keywords="x86 ~ppc"),
- mk_pkg("1.2", slot="1", keywords="x86 ~ppc")])
+ [
+ mk_pkg("0.9", slot="0", keywords="x86 ~ppc"),
+ mk_pkg("1.2", slot="1", keywords="x86 ~ppc"),
+ ],
+ )
assert len(r) == 2
assert isinstance(r[0], imlate.PotentialStable)
assert r[0].slot == "0"
@@ -146,15 +139,17 @@ class TestImlateCheck(misc.ReportTestCase):
assert r[1].version == "1.2"
def test_drop_newer_slot_stables(self):
- selected_arches=("x86", "amd64")
- all_arches=("x86", "amd64", "arm64")
+ selected_arches = ("x86", "amd64")
+ all_arches = ("x86", "amd64", "arm64")
r = self.assertReport(
mk_check(selected_arches=selected_arches, arches=all_arches),
- [mk_pkg("0.7", "amd64 x86 ~arm64"),
- mk_pkg("0.8", "amd64 ~x86 ~arm64"),
- mk_pkg("0.9", "~amd64 ~x86 arm64")]
+ [
+ mk_pkg("0.7", "amd64 x86 ~arm64"),
+ mk_pkg("0.8", "amd64 ~x86 ~arm64"),
+ mk_pkg("0.9", "~amd64 ~x86 arm64"),
+ ],
)
assert isinstance(r, imlate.LaggingStable)
- assert r.stable == ('amd64',)
- assert r.keywords == ('~x86',)
- assert r.version == '0.8'
+ assert r.stable == ("amd64",)
+ assert r.keywords == ("~x86",)
+ assert r.version == "0.8"
diff --git a/tests/checks/test_metadata.py b/tests/checks/test_metadata.py
index cc074d93..ee0ac08e 100644
--- a/tests/checks/test_metadata.py
+++ b/tests/checks/test_metadata.py
@@ -30,27 +30,24 @@ class TestDescriptionCheck(misc.ReportTestCase):
self.assertNoReport(self.check, self.mk_pkg("a perfectly written package description"))
def test_bad_descs(self):
- for desc in ('based on eclass',
- 'diffball',
- 'dev-util/diffball',
- 'foon'):
+ for desc in ("based on eclass", "diffball", "dev-util/diffball", "foon"):
r = self.assertReport(self.check, self.mk_pkg(desc))
assert isinstance(r, metadata.BadDescription)
def test_desc_length(self):
r = self.assertReport(self.check, self.mk_pkg())
assert isinstance(r, metadata.BadDescription)
- assert 'empty/unset' in str(r)
+ assert "empty/unset" in str(r)
- self.assertNoReport(self.check, self.mk_pkg('s' * 80))
- r = self.assertReport(self.check, self.mk_pkg('s' * 81))
+ self.assertNoReport(self.check, self.mk_pkg("s" * 80))
+ r = self.assertReport(self.check, self.mk_pkg("s" * 81))
assert isinstance(r, metadata.BadDescription)
- assert 'over 80 chars in length' in str(r)
+ assert "over 80 chars in length" in str(r)
- self.assertNoReport(self.check, self.mk_pkg('s' * 10))
- r = self.assertReport(self.check, self.mk_pkg('s' * 9))
+ self.assertNoReport(self.check, self.mk_pkg("s" * 10))
+ r = self.assertReport(self.check, self.mk_pkg("s" * 9))
assert isinstance(r, metadata.BadDescription)
- assert 'under 10 chars in length' in str(r)
+ assert "under 10 chars in length" in str(r)
class TestHomepageCheck(misc.ReportTestCase):
@@ -58,7 +55,7 @@ class TestHomepageCheck(misc.ReportTestCase):
check_kls = metadata.HomepageCheck
check = metadata.HomepageCheck(None)
- def mk_pkg(self, homepage='', cpvstr='dev-util/diffball-0.7.1'):
+ def mk_pkg(self, homepage="", cpvstr="dev-util/diffball-0.7.1"):
return misc.FakePkg(cpvstr, data={"HOMEPAGE": homepage})
def test_regular(self):
@@ -72,26 +69,26 @@ class TestHomepageCheck(misc.ReportTestCase):
def test_unset(self):
r = self.assertReport(self.check, self.mk_pkg())
isinstance(r, metadata.BadHomepage)
- assert 'empty/unset' in str(r)
+ assert "empty/unset" in str(r)
# categories of pkgs allowed to skip HOMEPAGE
for cat in self.check_kls.missing_categories:
- self.assertNoReport(self.check, self.mk_pkg(cpvstr=f'{cat}/foo-0'))
+ self.assertNoReport(self.check, self.mk_pkg(cpvstr=f"{cat}/foo-0"))
def test_no_protocol(self):
- r = self.assertReport(self.check, self.mk_pkg('foobar.com'))
+ r = self.assertReport(self.check, self.mk_pkg("foobar.com"))
isinstance(r, metadata.BadHomepage)
- assert 'lacks protocol' in str(r)
+ assert "lacks protocol" in str(r)
def test_unsupported_protocol(self):
- r = self.assertReport(self.check, self.mk_pkg('htp://foobar.com'))
+ r = self.assertReport(self.check, self.mk_pkg("htp://foobar.com"))
isinstance(r, metadata.BadHomepage)
assert "uses unsupported protocol 'htp'" in str(r)
def test_unspecific_site(self):
- for suffix in ('', '/'):
- for site in ('https://www.gentoo.org', 'https://gentoo.org'):
- r = self.assertReport(self.check, self.mk_pkg(f'{site}{suffix}'))
+ for suffix in ("", "/"):
+ for site in ("https://www.gentoo.org", "https://gentoo.org"):
+ r = self.assertReport(self.check, self.mk_pkg(f"{site}{suffix}"))
isinstance(r, metadata.BadHomepage)
assert "unspecific HOMEPAGE" in str(r)
@@ -104,27 +101,30 @@ class TestHomepageCheck(misc.ReportTestCase):
class IUSE_Options(misc.Tmpdir):
-
def get_options(self, properties=(), restrict=(), **kwargs):
repo_base = tempfile.mkdtemp(dir=self.dir)
- base = pjoin(repo_base, 'profiles')
+ base = pjoin(repo_base, "profiles")
os.mkdir(base)
- with open(pjoin(base, "arch.list"), 'w') as file:
+ with open(pjoin(base, "arch.list"), "w") as file:
file.write("\n".join(kwargs.pop("arches", ("x86", "ppc", "amd64", "amd64-fbsd"))))
with open(pjoin(base, "use.desc"), "w") as file:
file.write("\n".join(f"{x} - {x}" for x in kwargs.pop("use_desc", ("foo", "bar"))))
- with open(pjoin(base, 'repo_name'), 'w') as file:
- file.write(kwargs.pop('repo_name', 'monkeys'))
- os.mkdir(pjoin(repo_base, 'metadata'))
- with open(pjoin(repo_base, 'metadata', 'layout.conf'), 'w') as f:
- f.write(textwrap.dedent(f"""\
- masters =
- properties-allowed = {' '.join(properties)}
- restrict-allowed = {' '.join(restrict)}
- """))
- kwargs['target_repo'] = repository.UnconfiguredTree(repo_base)
- kwargs.setdefault('verbosity', 0)
- kwargs.setdefault('cache', {'git': False})
+ with open(pjoin(base, "repo_name"), "w") as file:
+ file.write(kwargs.pop("repo_name", "monkeys"))
+ os.mkdir(pjoin(repo_base, "metadata"))
+ with open(pjoin(repo_base, "metadata", "layout.conf"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ f"""\
+ masters =
+ properties-allowed = {' '.join(properties)}
+ restrict-allowed = {' '.join(restrict)}
+ """
+ )
+ )
+ kwargs["target_repo"] = repository.UnconfiguredTree(repo_base)
+ kwargs.setdefault("verbosity", 0)
+ kwargs.setdefault("cache", {"git": False})
return arghparse.Namespace(**kwargs)
@@ -135,21 +135,21 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
@pytest.fixture
def check(self):
pkgs = (
- FakePkg('dev-libs/foo-0', keywords=('amd64', '~x86')),
- FakePkg('dev-libs/foo-1', keywords=('-*', 'ppc')),
- FakePkg('dev-libs/bar-2', keywords=()),
+ FakePkg("dev-libs/foo-0", keywords=("amd64", "~x86")),
+ FakePkg("dev-libs/foo-1", keywords=("-*", "ppc")),
+ FakePkg("dev-libs/bar-2", keywords=()),
)
search_repo = FakeRepo(pkgs=pkgs)
options = self.get_options(search_repo=search_repo, gentoo_repo=False)
kwargs = {
- 'use_addon': addons.UseAddon(options),
- 'keywords_addon': addons.KeywordsAddon(options),
+ "use_addon": addons.UseAddon(options),
+ "keywords_addon": addons.KeywordsAddon(options),
}
return metadata.KeywordsCheck(options, **kwargs)
- def mk_pkg(self, keywords='', cpv='dev-util/diffball-0.7.1', rdepend=''):
- return misc.FakePkg(cpv, data={'KEYWORDS': keywords, 'RDEPEND': rdepend})
+ def mk_pkg(self, keywords="", cpv="dev-util/diffball-0.7.1", rdepend=""):
+ return misc.FakePkg(cpv, data={"KEYWORDS": keywords, "RDEPEND": rdepend})
def test_no_keywords(self, check):
self.assertNoReport(check, self.mk_pkg())
@@ -173,23 +173,23 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
# unknown keyword
r = self.assertReport(check, self.mk_pkg("foo"))
assert isinstance(r, metadata.UnknownKeywords)
- assert r.keywords == ('foo',)
+ assert r.keywords == ("foo",)
assert "unknown KEYWORDS: 'foo'" in str(r)
# check that * and ~* are flagged in gentoo repo
- options = self.get_options(repo_name='gentoo', gentoo_repo=True)
+ options = self.get_options(repo_name="gentoo", gentoo_repo=True)
kwargs = {
- 'use_addon': addons.UseAddon(options),
- 'keywords_addon': addons.KeywordsAddon(options),
+ "use_addon": addons.UseAddon(options),
+ "keywords_addon": addons.KeywordsAddon(options),
}
check = metadata.KeywordsCheck(options, **kwargs)
r = self.assertReport(check, self.mk_pkg("*"))
assert isinstance(r, metadata.UnknownKeywords)
- assert r.keywords == ('*',)
+ assert r.keywords == ("*",)
assert "unknown KEYWORDS: '*'" in str(r)
r = self.assertReport(check, self.mk_pkg("~*"))
assert isinstance(r, metadata.UnknownKeywords)
- assert r.keywords == ('~*',)
+ assert r.keywords == ("~*",)
assert "unknown KEYWORDS: '~*'" in str(r)
def test_overlapping_keywords(self, check):
@@ -214,78 +214,78 @@ class TestKeywordsCheck(IUSE_Options, misc.ReportTestCase):
# single duplicate
r = self.assertReport(check, self.mk_pkg("amd64 amd64"))
assert isinstance(r, metadata.DuplicateKeywords)
- assert r.keywords == ('amd64',)
- assert 'duplicate KEYWORDS: amd64' in str(r)
+ assert r.keywords == ("amd64",)
+ assert "duplicate KEYWORDS: amd64" in str(r)
# multiple duplicates
r = self.assertReport(check, self.mk_pkg("-* -* amd64 amd64 ~x86 ~x86"))
assert isinstance(r, metadata.DuplicateKeywords)
- assert r.keywords == ('-*', 'amd64', '~x86')
+ assert r.keywords == ("-*", "amd64", "~x86")
def test_unsorted_keywords(self, check):
# regular keywords
- self.assertNoReport(check, self.mk_pkg('-* ~amd64'))
+ self.assertNoReport(check, self.mk_pkg("-* ~amd64"))
# prefix keywords come after regular keywords
- self.assertNoReport(check, self.mk_pkg('~amd64 ppc ~x86 ~amd64-fbsd'))
+ self.assertNoReport(check, self.mk_pkg("~amd64 ppc ~x86 ~amd64-fbsd"))
# non-verbose mode doesn't show sorted keywords
- r = self.assertReport(check, self.mk_pkg('~amd64 -*'))
+ r = self.assertReport(check, self.mk_pkg("~amd64 -*"))
assert isinstance(r, metadata.UnsortedKeywords)
- assert r.keywords == ('~amd64', '-*')
+ assert r.keywords == ("~amd64", "-*")
assert r.sorted_keywords == ()
- assert 'unsorted KEYWORDS: ~amd64, -*' in str(r)
+ assert "unsorted KEYWORDS: ~amd64, -*" in str(r)
# create a check instance with verbose mode enabled
options = self.get_options(gentoo_repo=False, verbosity=1)
kwargs = {
- 'use_addon': addons.UseAddon(options),
- 'keywords_addon': addons.KeywordsAddon(options),
+ "use_addon": addons.UseAddon(options),
+ "keywords_addon": addons.KeywordsAddon(options),
}
check = metadata.KeywordsCheck(options, **kwargs)
# masks should come before regular keywords
- r = self.assertReport(check, self.mk_pkg('~amd64 -*'))
+ r = self.assertReport(check, self.mk_pkg("~amd64 -*"))
assert isinstance(r, metadata.UnsortedKeywords)
- assert r.keywords == ('~amd64', '-*')
- assert r.sorted_keywords == ('-*', '~amd64')
- assert '\n\tunsorted KEYWORDS: ~amd64, -*\n\tsorted KEYWORDS: -*, ~amd64' in str(r)
+ assert r.keywords == ("~amd64", "-*")
+ assert r.sorted_keywords == ("-*", "~amd64")
+ assert "\n\tunsorted KEYWORDS: ~amd64, -*\n\tsorted KEYWORDS: -*, ~amd64" in str(r)
# keywords should be sorted alphabetically by arch
- r = self.assertReport(check, self.mk_pkg('ppc ~amd64'))
+ r = self.assertReport(check, self.mk_pkg("ppc ~amd64"))
assert isinstance(r, metadata.UnsortedKeywords)
- assert r.keywords == ('ppc', '~amd64')
- assert r.sorted_keywords == ('~amd64', 'ppc')
- assert '\n\tunsorted KEYWORDS: ppc, ~amd64\n\tsorted KEYWORDS: ~amd64, ppc' in str(r)
+ assert r.keywords == ("ppc", "~amd64")
+ assert r.sorted_keywords == ("~amd64", "ppc")
+ assert "\n\tunsorted KEYWORDS: ppc, ~amd64\n\tsorted KEYWORDS: ~amd64, ppc" in str(r)
# prefix keywords should come after regular keywords
- r = self.assertReport(check, self.mk_pkg('~amd64 ~amd64-fbsd ppc ~x86'))
+ r = self.assertReport(check, self.mk_pkg("~amd64 ~amd64-fbsd ppc ~x86"))
assert isinstance(r, metadata.UnsortedKeywords)
- assert r.keywords == ('~amd64', '~amd64-fbsd', 'ppc', '~x86')
- assert r.sorted_keywords == ('~amd64', 'ppc', '~x86', '~amd64-fbsd')
+ assert r.keywords == ("~amd64", "~amd64-fbsd", "ppc", "~x86")
+ assert r.sorted_keywords == ("~amd64", "ppc", "~x86", "~amd64-fbsd")
def test_missing_virtual_keywords(self, check):
# non-virtuals don't trigger
- pkg = self.mk_pkg(cpv='dev-util/foo-0', rdepend='=dev-libs/foo-0')
+ pkg = self.mk_pkg(cpv="dev-util/foo-0", rdepend="=dev-libs/foo-0")
self.assertNoReport(check, pkg)
# matching pkg with no keywords
- pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='dev-libs/bar')
+ pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="dev-libs/bar")
self.assertNoReport(check, pkg)
# single pkg match
- pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='=dev-libs/foo-0')
+ pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="=dev-libs/foo-0")
r = self.assertReport(check, pkg)
assert isinstance(r, metadata.VirtualKeywordsUpdate)
- assert r.keywords == ('amd64', '~x86')
- assert 'KEYWORDS updates available: amd64, ~x86' in str(r)
+ assert r.keywords == ("amd64", "~x86")
+ assert "KEYWORDS updates available: amd64, ~x86" in str(r)
# multiple pkg match
- pkg = self.mk_pkg(cpv='virtual/foo-0', rdepend='dev-libs/foo')
+ pkg = self.mk_pkg(cpv="virtual/foo-0", rdepend="dev-libs/foo")
r = self.assertReport(check, pkg)
assert isinstance(r, metadata.VirtualKeywordsUpdate)
- assert r.keywords == ('amd64', 'ppc', '~x86')
- assert 'KEYWORDS updates available: amd64, ppc, ~x86' in str(r)
+ assert r.keywords == ("amd64", "ppc", "~x86")
+ assert "KEYWORDS updates available: amd64, ppc, ~x86" in str(r)
class TestIuseCheck(IUSE_Options, misc.ReportTestCase):
@@ -298,28 +298,28 @@ class TestIuseCheck(IUSE_Options, misc.ReportTestCase):
use_addon = addons.UseAddon(options)
return self.check_kls(options, use_addon=use_addon)
- def mk_pkg(self, iuse=''):
- return misc.FakePkg('dev-util/diffball-0.7.1', data={'IUSE': iuse, 'EAPI': '1'})
+ def mk_pkg(self, iuse=""):
+ return misc.FakePkg("dev-util/diffball-0.7.1", data={"IUSE": iuse, "EAPI": "1"})
def test_known_iuse(self, check):
- self.assertNoReport(check, self.mk_pkg('foo bar'))
+ self.assertNoReport(check, self.mk_pkg("foo bar"))
def test_unknown_iuse(self, check):
- r = self.assertReport(check, self.mk_pkg('foo dar'))
+ r = self.assertReport(check, self.mk_pkg("foo dar"))
assert isinstance(r, metadata.UnknownUseFlags)
- assert r.flags == ('dar',)
- assert 'dar' in str(r)
+ assert r.flags == ("dar",)
+ assert "dar" in str(r)
def test_arch_iuse(self, check):
# arch flags must _not_ be in IUSE
- r = self.assertReport(check, self.mk_pkg('x86'))
+ r = self.assertReport(check, self.mk_pkg("x86"))
assert isinstance(r, metadata.UnknownUseFlags)
- assert r.flags == ('x86',)
- assert 'x86' in str(r)
+ assert r.flags == ("x86",)
+ assert "x86" in str(r)
def test_invalid_iuse(self, check):
- for flag in ('+', '-', '@', '_'):
- r = self.assertReport(check, self.mk_pkg(f'foo {flag}'))
+ for flag in ("+", "-", "@", "_"):
+ r = self.assertReport(check, self.mk_pkg(f"foo {flag}"))
assert isinstance(r, metadata.InvalidUseFlags)
assert r.flags == (flag,)
assert flag in str(r)
@@ -331,12 +331,12 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
def mk_check(self, deprecated=(), banned=()):
# TODO: switch to using a repo fixture when available
- os.makedirs(pjoin(self.dir, 'profiles'))
- os.makedirs(pjoin(self.dir, 'metadata'))
- with open(pjoin(self.dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('fake\n')
- with open(pjoin(self.dir, 'metadata', 'layout.conf'), 'w') as f:
- f.write('masters =\n')
+ os.makedirs(pjoin(self.dir, "profiles"))
+ os.makedirs(pjoin(self.dir, "metadata"))
+ with open(pjoin(self.dir, "profiles", "repo_name"), "w") as f:
+ f.write("fake\n")
+ with open(pjoin(self.dir, "metadata", "layout.conf"), "w") as f:
+ f.write("masters =\n")
f.write(f"eapis-deprecated = {' '.join(deprecated)}\n")
f.write(f"eapis-banned = {' '.join(banned)}\n")
repo_config = repo_objs.RepoConfig(location=self.dir)
@@ -345,7 +345,7 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
return self.check_kls(options, eclass_addon=addons.eclass.EclassAddon(options))
def mk_pkg(self, eapi):
- return misc.FakePkg('dev-util/diffball-2.7.1', data={'EAPI': eapi})
+ return misc.FakePkg("dev-util/diffball-2.7.1", data={"EAPI": eapi})
def test_repo_with_no_settings(self):
check = self.mk_check()
@@ -353,29 +353,35 @@ class TestEapiCheck(misc.ReportTestCase, misc.Tmpdir):
self.assertNoReport(check, self.mk_pkg(eapi=eapi_str))
def test_latest_eapi(self):
- check = self.mk_check(deprecated=('0', '2', '4', '5'), banned=('1', '3',))
+ check = self.mk_check(
+ deprecated=("0", "2", "4", "5"),
+ banned=(
+ "1",
+ "3",
+ ),
+ )
latest_eapi = list(eapi.EAPI.known_eapis)[-1]
self.assertNoReport(check, self.mk_pkg(eapi=latest_eapi))
def test_deprecated_eapi(self):
- deprecated = ('0', '2', '4', '5')
- banned = ('1', '3')
+ deprecated = ("0", "2", "4", "5")
+ banned = ("1", "3")
check = self.mk_check(deprecated=deprecated, banned=banned)
for eapi_str in deprecated:
r = self.assertReport(check, self.mk_pkg(eapi=eapi_str))
assert isinstance(r, metadata.DeprecatedEapi)
assert r.eapi == eapi_str
- assert f'uses deprecated EAPI {eapi_str}' in str(r)
+ assert f"uses deprecated EAPI {eapi_str}" in str(r)
def test_banned_eapi(self):
- deprecated = ('0', '2', '4', '5')
- banned = ('1', '3')
+ deprecated = ("0", "2", "4", "5")
+ banned = ("1", "3")
check = self.mk_check(deprecated=deprecated, banned=banned)
for eapi_str in banned:
r = self.assertReport(check, self.mk_pkg(eapi=eapi_str))
assert isinstance(r, metadata.BannedEapi)
assert r.eapi == eapi_str
- assert f'uses banned EAPI {eapi_str}' in str(r)
+ assert f"uses banned EAPI {eapi_str}" in str(r)
class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
@@ -387,19 +393,19 @@ class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
# TODO: switch to using a repo fixture when available
repo_dir = pjoin(self.dir, str(self._repo_id))
self._repo_id += 1
- os.makedirs(pjoin(repo_dir, 'profiles'))
- os.makedirs(pjoin(repo_dir, 'metadata'))
- with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('fake\n')
- with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
- f.write('masters =\n')
+ os.makedirs(pjoin(repo_dir, "profiles"))
+ os.makedirs(pjoin(repo_dir, "metadata"))
+ with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
+ f.write("fake\n")
+ with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
+ f.write("masters =\n")
repo_config = repo_objs.RepoConfig(location=repo_dir)
self.repo = repository.UnconfiguredTree(repo_config.location, repo_config=repo_config)
options = arghparse.Namespace(target_repo=self.repo, verbosity=False)
return self.check_kls(options)
def mk_pkg(self, eapi):
- return misc.FakePkg('dev-util/diffball-2.7.1', data={'EAPI': eapi})
+ return misc.FakePkg("dev-util/diffball-2.7.1", data={"EAPI": eapi})
def test_repo_with_no_settings(self):
check = self.mk_check()
@@ -407,51 +413,43 @@ class TestSourcingCheck(misc.ReportTestCase, misc.Tmpdir):
self.assertNoReport(check, self.mk_pkg(eapi=eapi_str))
def test_unknown_eapis(self):
- for eapi in ('blah', '9999'):
+ for eapi in ("blah", "9999"):
check = self.mk_check()
- pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
+ pkg_path = pjoin(self.repo.location, "dev-util", "foo")
os.makedirs(pkg_path)
- with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
- f.write(textwrap.dedent(f"""\
- EAPI={eapi}
- """))
+ with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
+ f.write(f"EAPI={eapi}\n")
r = self.assertReport(check, self.repo)
assert isinstance(r, metadata.InvalidEapi)
assert f"EAPI '{eapi}' is not supported" in str(r)
def test_invalid_eapis(self):
- for eapi in ('invalid!', '${EAPI}'):
+ for eapi in ("invalid!", "${EAPI}"):
check = self.mk_check()
- pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
+ pkg_path = pjoin(self.repo.location, "dev-util", "foo")
os.makedirs(pkg_path)
- with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
- f.write(textwrap.dedent(f"""\
- EAPI="{eapi}"
- """))
+ with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
+ f.write(f"EAPI={eapi}\n")
r = self.assertReport(check, self.repo)
assert isinstance(r, metadata.InvalidEapi)
assert f"invalid EAPI '{eapi}'" in str(r)
def test_sourcing_error(self):
check = self.mk_check()
- pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
+ pkg_path = pjoin(self.repo.location, "dev-util", "foo")
os.makedirs(pkg_path)
- with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
- f.write(textwrap.dedent("""\
- foo
- """))
+ with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
+ f.write("foo\n")
r = self.assertReport(check, self.repo)
assert isinstance(r, metadata.SourcingError)
def test_invalid_slots(self):
- for slot in ('?', '0/1'):
+ for slot in ("?", "0/1"):
check = self.mk_check()
- pkg_path = pjoin(self.repo.location, 'dev-util', 'foo')
+ pkg_path = pjoin(self.repo.location, "dev-util", "foo")
os.makedirs(pkg_path)
- with open(pjoin(pkg_path, 'foo-0.ebuild'), 'w') as f:
- f.write(textwrap.dedent(f"""\
- SLOT="{slot}"
- """))
+ with open(pjoin(pkg_path, "foo-0.ebuild"), "w") as f:
+ f.write(f"""SLOT="{slot}"\n""")
r = self.assertReport(check, self.repo)
assert isinstance(r, metadata.InvalidSlot)
assert f"invalid SLOT: '{slot}'" in str(r)
@@ -467,19 +465,26 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
def mk_check(self, masks=(), verbosity=1, profiles=None):
if profiles is None:
- profiles = {'x86': [misc.FakeProfile(name='default/linux/x86', masks=masks)]}
+ profiles = {"x86": [misc.FakeProfile(name="default/linux/x86", masks=masks)]}
options = self.get_options(verbosity=verbosity)
use_addon = addons.UseAddon(options)
check = self.check_kls(options, use_addon=use_addon, profile_addon=profiles)
return check
- def mk_pkg(self, cpvstr="dev-util/diffball-0.7.1", eapi="4", iuse="",
- required_use="", keywords="~amd64 x86"):
+ def mk_pkg(
+ self,
+ cpvstr="dev-util/diffball-0.7.1",
+ eapi="4",
+ iuse="",
+ required_use="",
+ keywords="~amd64 x86",
+ ):
return FakePkg(
cpvstr,
eapi=eapi,
iuse=iuse.split(),
- data={"REQUIRED_USE": required_use, "KEYWORDS": keywords})
+ data={"REQUIRED_USE": required_use, "KEYWORDS": keywords},
+ )
def test_unsupported_eapis(self, check):
for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
@@ -489,9 +494,10 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
def test_multireport_verbosity(self):
profiles = {
- 'x86': [
- misc.FakeProfile(name='default/linux/x86', masks=()),
- misc.FakeProfile(name='default/linux/x86/foo', masks=())]
+ "x86": [
+ misc.FakeProfile(name="default/linux/x86", masks=()),
+ misc.FakeProfile(name="default/linux/x86/foo", masks=()),
+ ]
}
# non-verbose mode should only one failure per node
check = self.mk_check(verbosity=0, profiles=profiles)
@@ -516,7 +522,9 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
# only supported in >= EAPI 5
self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="?? ( foo bar )"))
- self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )"))
+ self.assertNoReport(
+ check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )")
+ )
def test_unstated_iuse(self, check):
r = self.assertReport(check, self.mk_pkg(required_use="foo? ( blah )"))
@@ -534,25 +542,34 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
# pkgs masked by the related profile aren't checked
self.assertNoReport(
- self.mk_check(masks=('>=dev-util/diffball-8.0',)),
- self.mk_pkg(cpvstr="dev-util/diffball-8.0", iuse="foo bar", required_use="bar"))
+ self.mk_check(masks=(">=dev-util/diffball-8.0",)),
+ self.mk_pkg(cpvstr="dev-util/diffball-8.0", iuse="foo bar", required_use="bar"),
+ )
# unsatisfied REQUIRED_USE
r = self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="bar"))
assert isinstance(r, metadata.RequiredUseDefaults)
- assert r.keyword == 'x86'
- assert r.profile == 'default/linux/x86'
+ assert r.keyword == "x86"
+ assert r.profile == "default/linux/x86"
assert r.use == ()
- assert str(r.required_use) == 'bar'
+ assert str(r.required_use) == "bar"
# at-most-one-of
- self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )"))
- self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="+foo bar", required_use="?? ( foo bar )"))
- self.assertNoReport(check, self.mk_pkg(eapi="5", iuse="foo +bar", required_use="?? ( foo bar )"))
- r = self.assertReport(check, self.mk_pkg(eapi="5", iuse="+foo +bar", required_use="?? ( foo bar )"))
+ self.assertNoReport(
+ check, self.mk_pkg(eapi="5", iuse="foo bar", required_use="?? ( foo bar )")
+ )
+ self.assertNoReport(
+ check, self.mk_pkg(eapi="5", iuse="+foo bar", required_use="?? ( foo bar )")
+ )
+ self.assertNoReport(
+ check, self.mk_pkg(eapi="5", iuse="foo +bar", required_use="?? ( foo bar )")
+ )
+ r = self.assertReport(
+ check, self.mk_pkg(eapi="5", iuse="+foo +bar", required_use="?? ( foo bar )")
+ )
assert isinstance(r, metadata.RequiredUseDefaults)
- assert r.use == ('bar', 'foo')
- assert str(r.required_use) == 'at-most-one-of ( foo bar )'
+ assert r.use == ("bar", "foo")
+ assert str(r.required_use) == "at-most-one-of ( foo bar )"
# exactly-one-of
self.assertNoReport(check, self.mk_pkg(iuse="+foo bar", required_use="^^ ( foo bar )"))
@@ -560,35 +577,48 @@ class TestRequiredUseCheck(IUSE_Options, misc.ReportTestCase):
self.assertReport(check, self.mk_pkg(iuse="foo bar", required_use="^^ ( foo bar )"))
r = self.assertReport(check, self.mk_pkg(iuse="+foo +bar", required_use="^^ ( foo bar )"))
assert isinstance(r, metadata.RequiredUseDefaults)
- assert r.use == ('bar', 'foo')
- assert str(r.required_use) == 'exactly-one-of ( foo bar )'
+ assert r.use == ("bar", "foo")
+ assert str(r.required_use) == "exactly-one-of ( foo bar )"
# all-of
self.assertNoReport(check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( bar baz )"))
- self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( bar baz )"))
+ self.assertNoReport(
+ check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( bar baz )")
+ )
self.assertReports(check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( bar baz )"))
self.assertReport(check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( bar baz )"))
- r = self.assertReport(check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( bar baz )"))
+ r = self.assertReport(
+ check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( bar baz )")
+ )
assert isinstance(r, metadata.RequiredUseDefaults)
- assert r.use == ('baz', 'foo')
+ assert r.use == ("baz", "foo")
# TODO: fix this output to show both required USE flags
- assert str(r.required_use) == 'bar'
+ assert str(r.required_use) == "bar"
# any-of
- self.assertNoReport(check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( || ( bar baz ) )"))
- self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( || ( bar baz ) )"))
- self.assertNoReport(check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( || ( bar baz ) )"))
- self.assertNoReport(check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( || ( bar baz ) )"))
- r = self.assertReport(check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( || ( bar baz ) )"))
+ self.assertNoReport(
+ check, self.mk_pkg(iuse="foo bar baz", required_use="foo? ( || ( bar baz ) )")
+ )
+ self.assertNoReport(
+ check, self.mk_pkg(iuse="+foo +bar baz", required_use="foo? ( || ( bar baz ) )")
+ )
+ self.assertNoReport(
+ check, self.mk_pkg(iuse="+foo bar +baz", required_use="foo? ( || ( bar baz ) )")
+ )
+ self.assertNoReport(
+ check, self.mk_pkg(iuse="+foo +bar +baz", required_use="foo? ( || ( bar baz ) )")
+ )
+ r = self.assertReport(
+ check, self.mk_pkg(iuse="+foo bar baz", required_use="foo? ( || ( bar baz ) )")
+ )
assert isinstance(r, metadata.RequiredUseDefaults)
- assert r.use == ('foo',)
- assert str(r.required_use) == '( bar || baz )'
+ assert r.use == ("foo",)
+ assert str(r.required_use) == "( bar || baz )"
def use_based():
# hidden to keep the test runner from finding it
class UseBased(IUSE_Options):
-
def test_required_addons(self):
assert addons.UseAddon in self.check_kls.required_addons
@@ -604,30 +634,32 @@ def use_based():
class _TestRestrictPropertiesCheck(use_based(), misc.ReportTestCase):
-
- def mk_pkg(self, restrict='', properties='', iuse=''):
+ def mk_pkg(self, restrict="", properties="", iuse=""):
return misc.FakePkg(
- 'dev-util/diffball-2.7.1',
- data={'IUSE': iuse, 'RESTRICT': restrict, 'PROPERTIES': properties})
+ "dev-util/diffball-2.7.1",
+ data={"IUSE": iuse, "RESTRICT": restrict, "PROPERTIES": properties},
+ )
def test_no_allowed(self):
# repo or its masters don't define any allowed values so anything goes
check = self.mk_check()
- self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
- self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )', 'iuse': 'foo'}))
+ self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
+ self.assertNoReport(
+ check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )", "iuse": "foo"})
+ )
def test_allowed(self):
- check = self.mk_check(options={self.check_kls._attr: ('foo',)})
+ check = self.mk_check(options={self.check_kls._attr: ("foo",)})
# allowed
- self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
+ self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
# unknown
- r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'bar'}))
+ r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: "bar"}))
assert isinstance(r, self.check_kls._unknown_result_cls)
assert f'unknown {self.check_kls._attr.upper()}="bar"' in str(r)
# unknown multiple, conditional
- pkg = self.mk_pkg(**{self.check_kls._attr: 'baz? ( foo bar boo )', 'iuse': 'baz'})
+ pkg = self.mk_pkg(**{self.check_kls._attr: "baz? ( foo bar boo )", "iuse": "baz"})
r = self.assertReport(check, pkg)
assert isinstance(r, self.check_kls._unknown_result_cls)
assert f'unknown {self.check_kls._attr.upper()}="bar boo"' in str(r)
@@ -635,17 +667,21 @@ class _TestRestrictPropertiesCheck(use_based(), misc.ReportTestCase):
def test_unstated_iuse(self):
check = self.mk_check()
# no IUSE
- self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo'}))
+ self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: "foo"}))
# conditional with IUSE defined
- self.assertNoReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )', 'iuse': 'foo'}))
+ self.assertNoReport(
+ check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )", "iuse": "foo"})
+ )
# conditional missing IUSE
- r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar )'}))
+ r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar )"}))
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flag: [ foo ]' in str(r)
+ assert "unstated flag: [ foo ]" in str(r)
# multiple missing IUSE
- r = self.assertReport(check, self.mk_pkg(**{self.check_kls._attr: 'foo? ( bar ) boo? ( blah )'}))
+ r = self.assertReport(
+ check, self.mk_pkg(**{self.check_kls._attr: "foo? ( bar ) boo? ( blah )"})
+ )
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flags: [ boo, foo ]' in str(r)
+ assert "unstated flags: [ boo, foo ]" in str(r)
class TestRestrictCheck(_TestRestrictPropertiesCheck):
@@ -670,30 +706,33 @@ class TestRestrictTestCheck(misc.ReportTestCase):
check_kls = metadata.RestrictTestCheck
check = metadata.RestrictTestCheck(None)
- def mk_pkg(self, iuse='', restrict=''):
- return misc.FakePkg(
- 'dev-util/diffball-2.7.1', data={'IUSE': iuse, 'RESTRICT': restrict})
+ def mk_pkg(self, iuse="", restrict=""):
+ return misc.FakePkg("dev-util/diffball-2.7.1", data={"IUSE": iuse, "RESTRICT": restrict})
def test_empty_restrict(self):
self.assertNoReport(self.check, self.mk_pkg())
def test_specified_restrict(self):
- self.assertNoReport(self.check, self.mk_pkg(
- iuse='test', restrict='!test? ( test )'))
+ self.assertNoReport(self.check, self.mk_pkg(iuse="test", restrict="!test? ( test )"))
# unconditional restriction is fine too
- self.assertNoReport(self.check, self.mk_pkg(iuse='test', restrict='test'))
- self.assertNoReport(self.check, self.mk_pkg(restrict='test'))
+ self.assertNoReport(self.check, self.mk_pkg(iuse="test", restrict="test"))
+ self.assertNoReport(self.check, self.mk_pkg(restrict="test"))
# more RESTRICTs
- self.assertNoReport(self.check, self.mk_pkg(iuse='foo test',
- restrict='foo? ( strip ) !test? ( test ) bindist'))
+ self.assertNoReport(
+ self.check,
+ self.mk_pkg(iuse="foo test", restrict="foo? ( strip ) !test? ( test ) bindist"),
+ )
def test_missing_restrict(self):
data = (
- ('test', ''), # missing entirely
- ('foo test', '!foo? ( test )'), # 'test' present in other condition
- ('foo test', '!foo? ( !test? ( test ) )'), # correct restriction inside another condition
- ('test', 'test? ( test )'), # USE condition gotten the other way around
+ ("test", ""), # missing entirely
+ ("foo test", "!foo? ( test )"), # 'test' present in other condition
+ (
+ "foo test",
+ "!foo? ( !test? ( test ) )",
+ ), # correct restriction inside another condition
+ ("test", "test? ( test )"), # USE condition gotten the other way around
)
for iuse, restrict in data:
r = self.assertReport(self.check, self.mk_pkg(iuse=iuse, restrict=restrict))
@@ -706,67 +745,65 @@ class TestLicenseCheck(use_based(), misc.ReportTestCase):
check_kls = metadata.LicenseCheck
def mk_check(self, licenses=(), **kwargs):
- self.repo = FakeRepo(repo_id='test', licenses=licenses)
+ self.repo = FakeRepo(repo_id="test", licenses=licenses)
options = self.get_options(**kwargs)
use_addon = addons.UseAddon(options)
check = self.check_kls(options, use_addon=use_addon)
return check
- def mk_pkg(self, license='', iuse=''):
+ def mk_pkg(self, license="", iuse=""):
return FakePkg(
- 'dev-util/diffball-2.7.1',
- data={'LICENSE': license, 'IUSE': iuse},
- repo=self.repo)
+ "dev-util/diffball-2.7.1", data={"LICENSE": license, "IUSE": iuse}, repo=self.repo
+ )
def test_malformed(self):
r = self.assertReport(self.mk_check(), self.mk_pkg("|| ("))
assert isinstance(r, metadata.InvalidLicense)
- assert r.attr == 'license'
+ assert r.attr == "license"
def test_empty(self):
r = self.assertReport(self.mk_check(), self.mk_pkg())
assert isinstance(r, metadata.MissingLicense)
def test_unstated_iuse(self):
- chk = self.mk_check(licenses=('BSD',))
+ chk = self.mk_check(licenses=("BSD",))
# no IUSE
- self.assertNoReport(chk, self.mk_pkg('BSD'))
+ self.assertNoReport(chk, self.mk_pkg("BSD"))
# conditional URI with related IUSE
- pkg = self.mk_pkg(license='foo? ( BSD )', iuse='foo')
+ pkg = self.mk_pkg(license="foo? ( BSD )", iuse="foo")
self.assertNoReport(chk, pkg)
# conditional URI with missing IUSE
- pkg = self.mk_pkg(license='foo? ( BSD )')
+ pkg = self.mk_pkg(license="foo? ( BSD )")
r = self.assertReport(chk, pkg)
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flag: [ foo ]' in str(r)
+ assert "unstated flag: [ foo ]" in str(r)
def test_single_missing(self):
r = self.assertReport(self.mk_check(), self.mk_pkg("foo"))
assert isinstance(r, metadata.UnknownLicense)
- assert r.licenses == ('foo',)
+ assert r.licenses == ("foo",)
def test_multiple_existing(self):
- chk = self.mk_check(['foo', 'foo2'])
- self.assertNoReport(chk, self.mk_pkg('foo'))
- self.assertNoReport(chk, self.mk_pkg('foo', 'foo2'))
+ chk = self.mk_check(["foo", "foo2"])
+ self.assertNoReport(chk, self.mk_pkg("foo"))
+ self.assertNoReport(chk, self.mk_pkg("foo", "foo2"))
def test_multiple_missing(self):
- chk = self.mk_check(['foo', 'foo2'])
- r = self.assertReport(chk, self.mk_pkg('|| ( foo foo3 foo4 )'))
+ chk = self.mk_check(["foo", "foo2"])
+ r = self.assertReport(chk, self.mk_pkg("|| ( foo foo3 foo4 )"))
assert isinstance(r, metadata.UnknownLicense)
- assert r.licenses == ('foo3', 'foo4')
+ assert r.licenses == ("foo3", "foo4")
def test_unlicensed_categories(self):
- check = self.mk_check(['foo'])
+ check = self.mk_check(["foo"])
for category in self.check_kls.unlicensed_categories:
- for license in ('foo', ''):
+ for license in ("foo", ""):
pkg = FakePkg(
- f'{category}/diffball-2.7.1',
- data={'LICENSE': license},
- repo=self.repo)
+ f"{category}/diffball-2.7.1", data={"LICENSE": license}, repo=self.repo
+ )
if license:
r = self.assertReport(check, pkg)
assert isinstance(r, metadata.UnnecessaryLicense)
@@ -782,87 +819,94 @@ class TestMissingSlotDepCheck(use_based(), misc.ReportTestCase):
def mk_check(self, pkgs=None, **kwargs):
if pkgs is None:
pkgs = (
- FakePkg('dev-libs/foo-0', slot='0'),
- FakePkg('dev-libs/foo-1', slot='1'),
- FakePkg('dev-libs/bar-2', slot='2'),
+ FakePkg("dev-libs/foo-0", slot="0"),
+ FakePkg("dev-libs/foo-1", slot="1"),
+ FakePkg("dev-libs/bar-2", slot="2"),
)
- self.repo = FakeRepo(pkgs=pkgs, repo_id='test')
+ self.repo = FakeRepo(pkgs=pkgs, repo_id="test")
options = self.get_options(**kwargs)
use_addon = addons.UseAddon(options)
check = self.check_kls(options, use_addon=use_addon)
return check
- def mk_pkg(self, eapi='5', rdepend='', depend=''):
+ def mk_pkg(self, eapi="5", rdepend="", depend=""):
return FakePkg(
- 'dev-util/diffball-2.7.1', eapi=eapi,
- data={'RDEPEND': rdepend, 'DEPEND': depend},
- repo=self.repo)
+ "dev-util/diffball-2.7.1",
+ eapi=eapi,
+ data={"RDEPEND": rdepend, "DEPEND": depend},
+ repo=self.repo,
+ )
def test_flagged_deps(self):
- for dep_str in ('dev-libs/foo', 'dev-libs/foo[bar]'):
+ for dep_str in ("dev-libs/foo", "dev-libs/foo[bar]"):
for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
if eapi_obj.options.sub_slotting:
r = self.assertReport(
- self.mk_check(), self.mk_pkg(
- eapi=eapi_str, rdepend=dep_str, depend=dep_str))
+ self.mk_check(), self.mk_pkg(eapi=eapi_str, rdepend=dep_str, depend=dep_str)
+ )
assert isinstance(r, metadata.MissingSlotDep)
- assert 'matches more than one slot: [ 0, 1 ]' in str(r)
+ assert "matches more than one slot: [ 0, 1 ]" in str(r)
def test_skipped_deps(self):
for dep_str in (
- '!dev-libs/foo', '!!dev-libs/foo', # blockers
- '~dev-libs/foo-0', '~dev-libs/foo-1', # version limited to single slots
- 'dev-libs/foo:0', 'dev-libs/foo:1', # slotted
- 'dev-libs/foo:*', 'dev-libs/foo:=', # slot operators
- ):
+ "!dev-libs/foo",
+ "!!dev-libs/foo", # blockers
+ "~dev-libs/foo-0",
+ "~dev-libs/foo-1", # version limited to single slots
+ "dev-libs/foo:0",
+ "dev-libs/foo:1", # slotted
+ "dev-libs/foo:*",
+ "dev-libs/foo:=", # slot operators
+ ):
for eapi_str, eapi_obj in eapi.EAPI.known_eapis.items():
if eapi_obj.options.sub_slotting:
self.assertNoReport(
- self.mk_check(), self.mk_pkg(
- eapi=eapi_str, rdepend=dep_str, depend=dep_str))
+ self.mk_check(), self.mk_pkg(eapi=eapi_str, rdepend=dep_str, depend=dep_str)
+ )
def test_no_deps(self):
self.assertNoReport(self.mk_check(), self.mk_pkg())
def test_single_slot_dep(self):
self.assertNoReport(
- self.mk_check(), self.mk_pkg(rdepend='dev-libs/bar', depend='dev-libs/bar'))
+ self.mk_check(), self.mk_pkg(rdepend="dev-libs/bar", depend="dev-libs/bar")
+ )
class TestDependencyCheck(use_based(), misc.ReportTestCase):
check_kls = metadata.DependencyCheck
- def mk_pkg(self, attr, depset='', eapi='0', iuse=''):
- eapi_attr_map = {'BDEPEND': '7', 'IDEPEND': '8'}
+ def mk_pkg(self, attr, depset="", eapi="0", iuse=""):
+ eapi_attr_map = {"BDEPEND": "7", "IDEPEND": "8"}
eapi = eapi_attr_map.get(attr, eapi)
return misc.FakePkg(
- 'dev-util/diffball-2.7.1',
- data={'EAPI': eapi, 'IUSE': iuse, attr: depset})
+ "dev-util/diffball-2.7.1", data={"EAPI": eapi, "IUSE": iuse, attr: depset}
+ )
def mk_check(self, pkgs=None, **kwargs):
if pkgs is None:
pkgs = (
- FakePkg('dev-libs/foo-0', slot='0', iuse=('bar',)),
- FakePkg('dev-libs/foo-1', slot='1', iuse=('bar', 'baz')),
- FakePkg('dev-libs/bar-2', slot='2'),
+ FakePkg("dev-libs/foo-0", slot="0", iuse=("bar",)),
+ FakePkg("dev-libs/foo-1", slot="1", iuse=("bar", "baz")),
+ FakePkg("dev-libs/bar-2", slot="2"),
)
- kwargs['search_repo'] = FakeRepo(pkgs=pkgs, repo_id='test')
+ kwargs["search_repo"] = FakeRepo(pkgs=pkgs, repo_id="test")
return super().mk_check(options=kwargs)
# pull the set of dependency attrs from the most recent EAPI
dep_attrs = sorted(list(eapi.EAPI.known_eapis.values())[-1].dep_keys)
- @pytest.mark.parametrize('attr', dep_attrs)
+ @pytest.mark.parametrize("attr", dep_attrs)
def test_depset(self, attr):
chk = self.mk_check()
mk_pkg = partial(self.mk_pkg, attr)
# various regular depsets
self.assertNoReport(chk, mk_pkg())
- self.assertNoReport(chk, mk_pkg('dev-util/foo'))
+ self.assertNoReport(chk, mk_pkg("dev-util/foo"))
self.assertNoReport(chk, mk_pkg("|| ( dev-util/foo ) dev-foo/bugger "))
- if attr == 'RDEPEND':
+ if attr == "RDEPEND":
self.assertNoReport(chk, mk_pkg("!dev-util/blah"))
else:
r = self.assertReport(chk, mk_pkg("!dev-util/blah"))
@@ -870,7 +914,7 @@ class TestDependencyCheck(use_based(), misc.ReportTestCase):
# invalid depset syntax
r = self.assertReport(chk, mk_pkg("|| ("))
- assert isinstance(r, getattr(metadata, f'Invalid{attr.lower().capitalize()}'))
+ assert isinstance(r, getattr(metadata, f"Invalid{attr.lower().capitalize()}"))
# pkg blocking itself
r = self.assertReport(chk, mk_pkg("!dev-util/diffball"))
@@ -879,105 +923,113 @@ class TestDependencyCheck(use_based(), misc.ReportTestCase):
assert f'{attr.upper()}="!dev-util/diffball"' in str(r)
# check for := in || () blocks
- pkg = mk_pkg(eapi='5', depset="|| ( dev-libs/foo:= dev-libs/bar )")
+ pkg = mk_pkg(eapi="5", depset="|| ( dev-libs/foo:= dev-libs/bar )")
r = self.assertReport(chk, pkg)
assert isinstance(r, metadata.BadDependency)
assert "= slot operator used inside || block" in str(r)
assert f'{attr.upper()}="dev-libs/foo:="' in str(r)
# multiple := atoms in || () blocks
- pkg = mk_pkg(eapi='5', depset="|| ( dev-libs/foo:= dev-libs/bar:= )")
+ pkg = mk_pkg(eapi="5", depset="|| ( dev-libs/foo:= dev-libs/bar:= )")
reports = self.assertReports(chk, pkg)
for r in reports:
assert isinstance(r, metadata.BadDependency)
assert "= slot operator used inside || block" in str(r)
# check for := in blockers
- r = self.assertReport(chk, mk_pkg(eapi='5', depset="!dev-libs/foo:="))
+ r = self.assertReport(chk, mk_pkg(eapi="5", depset="!dev-libs/foo:="))
assert isinstance(r, metadata.BadDependency)
assert "= slot operator used in blocker" in str(r)
assert f'{attr.upper()}="!dev-libs/foo:="' in str(r)
# check for missing package revisions
self.assertNoReport(chk, mk_pkg("=dev-libs/foo-1-r0"))
- r = self.assertReport(chk, mk_pkg(eapi='6', depset="=dev-libs/foo-1"))
+ r = self.assertReport(chk, mk_pkg(eapi="6", depset="=dev-libs/foo-1"))
assert isinstance(r, metadata.MissingPackageRevision)
assert f'{attr.upper()}="=dev-libs/foo-1"' in str(r)
- @pytest.mark.parametrize('attr', dep_attrs)
+ @pytest.mark.parametrize("attr", dep_attrs)
def test_depset_unstated_iuse(self, attr):
chk = self.mk_check()
mk_pkg = partial(self.mk_pkg, attr)
# unstated IUSE
- r = self.assertReport(chk, mk_pkg(depset='foo? ( dev-libs/foo )'))
+ r = self.assertReport(chk, mk_pkg(depset="foo? ( dev-libs/foo )"))
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flag: [ foo ]' in str(r)
+ assert "unstated flag: [ foo ]" in str(r)
# known IUSE
- self.assertNoReport(chk, mk_pkg(depset='foo? ( dev-libs/foo )', iuse='foo'))
+ self.assertNoReport(chk, mk_pkg(depset="foo? ( dev-libs/foo )", iuse="foo"))
# multiple unstated IUSE
- r = self.assertReport(chk, mk_pkg(depset='foo? ( !bar? ( dev-libs/foo ) )'))
+ r = self.assertReport(chk, mk_pkg(depset="foo? ( !bar? ( dev-libs/foo ) )"))
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flags: [ bar, foo ]' in str(r)
+ assert "unstated flags: [ bar, foo ]" in str(r)
- @pytest.mark.parametrize('attr', dep_attrs)
+ @pytest.mark.parametrize("attr", dep_attrs)
def test_depset_missing_usedep_default(self, attr):
chk = self.mk_check()
mk_pkg = partial(self.mk_pkg, attr)
# USE flag exists on all matching pkgs
- self.assertNoReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[bar?]'))
+ self.assertNoReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[bar?]"))
use_deps = (
- 'foo(-)?', '!foo(-)?', 'foo(+)?', '!foo(+)?', 'foo(-)=', '!foo(-)=',
- 'foo(+)=', '!foo(+)=', '-foo(-)', '-foo(+)',
+ "foo(-)?",
+ "!foo(-)?",
+ "foo(+)?",
+ "!foo(+)?",
+ "foo(-)=",
+ "!foo(-)=",
+ "foo(+)=",
+ "!foo(+)=",
+ "-foo(-)",
+ "-foo(+)",
)
for use_dep in use_deps:
# USE flag doesn't exist but has proper default
- self.assertNoReport(chk, mk_pkg(eapi='4', depset=f'dev-libs/bar[{use_dep}]'))
- if attr == 'RDEPEND':
- self.assertNoReport(chk, mk_pkg(eapi='4', depset=f'!dev-libs/bar[{use_dep}]'))
+ self.assertNoReport(chk, mk_pkg(eapi="4", depset=f"dev-libs/bar[{use_dep}]"))
+ if attr == "RDEPEND":
+ self.assertNoReport(chk, mk_pkg(eapi="4", depset=f"!dev-libs/bar[{use_dep}]"))
else:
- r = self.assertReport(chk, mk_pkg(eapi='4', depset=f'!dev-libs/bar[{use_dep}]'))
+ r = self.assertReport(chk, mk_pkg(eapi="4", depset=f"!dev-libs/bar[{use_dep}]"))
assert isinstance(r, metadata.MisplacedWeakBlocker)
# result triggers when all matching pkgs don't have requested USE flag
for dep in (
- 'dev-libs/bar[foo?]',
- 'dev-libs/bar[!foo?]',
- 'dev-libs/bar[foo=]',
- 'dev-libs/bar[!foo=]',
- 'dev-libs/bar[-foo]',
- '|| ( dev-libs/foo[bar] dev-libs/bar[foo] )',
- '|| ( dev-libs/foo[bar] dev-libs/bar[-foo] )',
- ):
- r = self.assertReport(chk, mk_pkg(eapi='4', depset=dep))
+ "dev-libs/bar[foo?]",
+ "dev-libs/bar[!foo?]",
+ "dev-libs/bar[foo=]",
+ "dev-libs/bar[!foo=]",
+ "dev-libs/bar[-foo]",
+ "|| ( dev-libs/foo[bar] dev-libs/bar[foo] )",
+ "|| ( dev-libs/foo[bar] dev-libs/bar[-foo] )",
+ ):
+ r = self.assertReport(chk, mk_pkg(eapi="4", depset=dep))
assert isinstance(r, metadata.MissingUseDepDefault)
- assert r.pkgs == ('dev-libs/bar-2',)
- assert r.flag == 'foo'
+ assert r.pkgs == ("dev-libs/bar-2",)
+ assert r.flag == "foo"
assert "USE flag 'foo' missing" in str(r)
- if attr == 'RDEPEND':
- r = self.assertReport(chk, mk_pkg(eapi='4', depset='!dev-libs/bar[foo?]'))
+ if attr == "RDEPEND":
+ r = self.assertReport(chk, mk_pkg(eapi="4", depset="!dev-libs/bar[foo?]"))
assert isinstance(r, metadata.MissingUseDepDefault)
- assert r.pkgs == ('dev-libs/bar-2',)
- assert r.flag == 'foo'
+ assert r.pkgs == ("dev-libs/bar-2",)
+ assert r.flag == "foo"
assert "USE flag 'foo' missing" in str(r)
# USE flag missing on one of multiple matches
- r = self.assertReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[baz?]'))
+ r = self.assertReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[baz?]"))
assert isinstance(r, metadata.MissingUseDepDefault)
- assert r.atom == 'dev-libs/foo[baz?]'
- assert r.pkgs == ('dev-libs/foo-0',)
- assert r.flag == 'baz'
+ assert r.atom == "dev-libs/foo[baz?]"
+ assert r.pkgs == ("dev-libs/foo-0",)
+ assert r.flag == "baz"
assert "USE flag 'baz' missing" in str(r)
# USE flag missing on all matches
- r = self.assertReport(chk, mk_pkg(eapi='4', depset='dev-libs/foo[blah?]'))
+ r = self.assertReport(chk, mk_pkg(eapi="4", depset="dev-libs/foo[blah?]"))
assert isinstance(r, metadata.MissingUseDepDefault)
- assert r.atom == 'dev-libs/foo[blah?]'
- assert r.pkgs == ('dev-libs/foo-0', 'dev-libs/foo-1')
- assert r.flag == 'blah'
+ assert r.atom == "dev-libs/foo[blah?]"
+ assert r.pkgs == ("dev-libs/foo-0", "dev-libs/foo-1")
+ assert r.flag == "blah"
assert "USE flag 'blah' missing" in str(r)
@@ -993,16 +1045,16 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
self.parent_repo = make_repo(self.parent_git_repo.path)
- self.parent_git_repo.add_all('initial commit')
+ self.parent_git_repo.add_all("initial commit")
# create a stub pkg and commit it
- self.parent_repo.create_ebuild('cat/pkg-0')
- self.parent_git_repo.add_all('cat/pkg-0')
+ self.parent_repo.create_ebuild("cat/pkg-0")
+ self.parent_git_repo.add_all("cat/pkg-0")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0):
@@ -1015,36 +1067,39 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
def _options(self, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location,
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
]
options, _ = self.tool.parse_args(args)
return options
def test_existent_blockers(self):
- self.child_repo.create_ebuild('cat/pkg-1', depend='!~cat/pkg-0')
- self.child_git_repo.add_all('cat/pkg: version bump to 1')
- self.child_repo.create_ebuild('cat/pkg-2', depend='!!~cat/pkg-0')
- self.child_git_repo.add_all('cat/pkg: version bump to 2')
- self.child_repo.create_ebuild('cat/pkg-3', depend='!!=cat/pkg-0*')
- self.child_git_repo.add_all('cat/pkg: version bump to 3')
+ self.child_repo.create_ebuild("cat/pkg-1", depend="!~cat/pkg-0")
+ self.child_git_repo.add_all("cat/pkg: version bump to 1")
+ self.child_repo.create_ebuild("cat/pkg-2", depend="!!~cat/pkg-0")
+ self.child_git_repo.add_all("cat/pkg: version bump to 2")
+ self.child_repo.create_ebuild("cat/pkg-3", depend="!!=cat/pkg-0*")
+ self.child_git_repo.add_all("cat/pkg: version bump to 3")
self.init_check()
self.assertNoReport(self.check, self.source)
def test_nonexistent_blockers(self):
- self.child_repo.create_ebuild('cat/pkg-1', depend='!nonexistent/pkg')
- self.child_git_repo.add_all('cat/pkg: version bump to 1')
+ self.child_repo.create_ebuild("cat/pkg-1", depend="!nonexistent/pkg")
+ self.child_git_repo.add_all("cat/pkg: version bump to 1")
self.init_check()
r = self.assertReport(self.check, self.source)
- expected = metadata.NonexistentBlocker(
- 'DEPEND', '!nonexistent/pkg', pkg=CPV('cat/pkg-1'))
+ expected = metadata.NonexistentBlocker("DEPEND", "!nonexistent/pkg", pkg=CPV("cat/pkg-1"))
assert r == expected
def test_outdated_blockers(self):
- self.parent_git_repo.remove_all('cat/pkg')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_repo.create_ebuild('cat/pkg-1', depend='!!=cat/pkg-0*')
- self.child_git_repo.add_all('cat/pkg: version bump to 1')
+ self.parent_git_repo.remove_all("cat/pkg")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_repo.create_ebuild("cat/pkg-1", depend="!!=cat/pkg-0*")
+ self.child_git_repo.add_all("cat/pkg: version bump to 1")
# packages are not old enough to trigger any results
for days in (0, 100, 365, 729):
@@ -1056,7 +1111,8 @@ class TestOutdatedBlockersCheck(misc.ReportTestCase):
self.init_check(future=days)
r = self.assertReport(self.check, self.source)
expected = metadata.OutdatedBlocker(
- 'DEPEND', '!!=cat/pkg-0*', years, pkg=CPV('cat/pkg-1'))
+ "DEPEND", "!!=cat/pkg-0*", years, pkg=CPV("cat/pkg-1")
+ )
assert r == expected
@@ -1064,16 +1120,17 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
check_kls = metadata.SrcUriCheck
- def mk_pkg(self, src_uri='', restrict='', default_chksums={"size": 100},
- iuse='', disable_chksums=False):
+ def mk_pkg(
+ self, src_uri="", restrict="", default_chksums={"size": 100}, iuse="", disable_chksums=False
+ ):
class fake_repo:
def __init__(self, default_chksums):
if disable_chksums:
self.chksums = {}
else:
self.chksums = {}.fromkeys(
- {os.path.basename(x) for x in src_uri.split()},
- default_chksums)
+ {os.path.basename(x) for x in src_uri.split()}, default_chksums
+ )
def _get_digests(self, pkg, allow_missing=False):
return False, self.chksums
@@ -1082,47 +1139,52 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
_parent_repo = fake_repo(default_chksums)
return misc.FakePkg(
- 'dev-util/diffball-2.7.1',
- data={'SRC_URI': src_uri, 'IUSE': iuse, 'RESTRICT': restrict},
- parent=fake_parent())
+ "dev-util/diffball-2.7.1",
+ data={"SRC_URI": src_uri, "IUSE": iuse, "RESTRICT": restrict},
+ parent=fake_parent(),
+ )
def test_malformed(self):
- r = self.assertReport(
- self.mk_check(), self.mk_pkg("foon", disable_chksums=True))
+ r = self.assertReport(self.mk_check(), self.mk_pkg("foon", disable_chksums=True))
assert isinstance(r, metadata.InvalidSrcUri)
- assert r.attr == 'fetchables'
+ assert r.attr == "fetchables"
def test_regular_src_uri(self):
chk = self.mk_check()
# single file
- self.assertNoReport(chk, self.mk_pkg(src_uri='https://foon.com/foon-2.7.1.tar.gz'))
+ self.assertNoReport(chk, self.mk_pkg(src_uri="https://foon.com/foon-2.7.1.tar.gz"))
# single file, multiple uris
- self.assertNoReport(chk, self.mk_pkg(
- src_uri='https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz'))
+ self.assertNoReport(
+ chk, self.mk_pkg(src_uri="https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz")
+ )
# multiple files, multiple uris
- self.assertNoReport(chk, self.mk_pkg(
- src_uri="""
+ self.assertNoReport(
+ chk,
+ self.mk_pkg(
+ src_uri="""
https://foo.com/a-0.tar.gz https://bar.com/a-0.tar.gz
https://blah.org/b-1.zip https://boo.net/boo-10.tar.xz
- """))
+ """
+ ),
+ )
def test_unknown_mirror(self):
chk = self.mk_check()
# single mirror
- r = self.assertReport(chk, self.mk_pkg('mirror://foo/a-0.gz https://foo.com/a-0.gz'))
+ r = self.assertReport(chk, self.mk_pkg("mirror://foo/a-0.gz https://foo.com/a-0.gz"))
assert isinstance(r, metadata.UnknownMirror)
- assert r.mirror == 'foo'
- assert r.uri == 'mirror://foo/a-0.gz'
+ assert r.mirror == "foo"
+ assert r.uri == "mirror://foo/a-0.gz"
assert "unknown mirror 'foo'" in str(r)
# multiple mirrors
- pkg = self.mk_pkg('mirror://foo/a-0.gz mirror://bar/a-0.gz https://foo.com/a-0.gz')
+ pkg = self.mk_pkg("mirror://foo/a-0.gz mirror://bar/a-0.gz https://foo.com/a-0.gz")
reports = self.assertReports(chk, pkg)
- for mirror, r in zip(('bar', 'foo'), sorted(reports, key=attrgetter('mirror'))):
+ for mirror, r in zip(("bar", "foo"), sorted(reports, key=attrgetter("mirror"))):
assert isinstance(r, metadata.UnknownMirror)
assert r.mirror == mirror
- assert r.uri == f'mirror://{mirror}/a-0.gz'
+ assert r.uri == f"mirror://{mirror}/a-0.gz"
assert f"unknown mirror '{mirror}'" in str(r)
def test_bad_filename(self):
@@ -1131,77 +1193,80 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
# PN filename
r = self.assertReport(chk, self.mk_pkg("https://foon.com/diffball.tar.gz"))
assert isinstance(r, metadata.BadFilename)
- assert r.filenames == ('diffball.tar.gz',)
- assert 'bad filename: [ diffball.tar.gz ]' in str(r)
+ assert r.filenames == ("diffball.tar.gz",)
+ assert "bad filename: [ diffball.tar.gz ]" in str(r)
# PV filename
r = self.assertReport(chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz"))
assert isinstance(r, metadata.BadFilename)
- assert r.filenames == ('2.7.1.tar.gz',)
- assert 'bad filename: [ 2.7.1.tar.gz ]' in str(r)
+ assert r.filenames == ("2.7.1.tar.gz",)
+ assert "bad filename: [ 2.7.1.tar.gz ]" in str(r)
# github-style PV filename
r = self.assertReport(chk, self.mk_pkg("https://foon.com/v2.7.1.zip"))
assert isinstance(r, metadata.BadFilename)
- assert r.filenames == ('v2.7.1.zip',)
- assert 'bad filename: [ v2.7.1.zip ]' in str(r)
+ assert r.filenames == ("v2.7.1.zip",)
+ assert "bad filename: [ v2.7.1.zip ]" in str(r)
# github-style commit snapshot filename
- r = self.assertReport(chk, self.mk_pkg("https://foon.com/cb230f01fb288a0b9f0fc437545b97d06c846bd3.tar.gz"))
+ r = self.assertReport(
+ chk, self.mk_pkg("https://foon.com/cb230f01fb288a0b9f0fc437545b97d06c846bd3.tar.gz")
+ )
assert isinstance(r, metadata.BadFilename)
# multiple bad filenames
- r = self.assertReport(chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz https://foon.com/diffball.zip"))
+ r = self.assertReport(
+ chk, self.mk_pkg("https://foon.com/2.7.1.tar.gz https://foon.com/diffball.zip")
+ )
assert isinstance(r, metadata.BadFilename)
- assert r.filenames == ('2.7.1.tar.gz', 'diffball.zip')
- assert 'bad filenames: [ 2.7.1.tar.gz, diffball.zip ]' in str(r)
+ assert r.filenames == ("2.7.1.tar.gz", "diffball.zip")
+ assert "bad filenames: [ 2.7.1.tar.gz, diffball.zip ]" in str(r)
def test_missing_uri(self):
chk = self.mk_check()
# mangled protocol
- r = self.assertReport(chk, self.mk_pkg('http:/foo/foo-0.tar.gz'))
+ r = self.assertReport(chk, self.mk_pkg("http:/foo/foo-0.tar.gz"))
assert isinstance(r, metadata.MissingUri)
- assert r.filenames == ('http:/foo/foo-0.tar.gz',)
+ assert r.filenames == ("http:/foo/foo-0.tar.gz",)
assert "unfetchable file: 'http:/foo/foo-0.tar.gz'" in str(r)
# no URI and RESTRICT doesn't contain 'fetch'
- r = self.assertReport(chk, self.mk_pkg('foon'))
+ r = self.assertReport(chk, self.mk_pkg("foon"))
assert isinstance(r, metadata.MissingUri)
- assert r.filenames == ('foon',)
+ assert r.filenames == ("foon",)
assert "unfetchable file: 'foon'" in str(r)
# no URI and RESTRICT contains 'fetch'
- self.assertNoReport(chk, self.mk_pkg('foon', restrict='fetch'))
+ self.assertNoReport(chk, self.mk_pkg("foon", restrict="fetch"))
# conditional URI and conditional RESTRICT containing 'fetch'
- pkg = self.mk_pkg(src_uri='foo? ( bar )', iuse='foo', restrict='foo? ( fetch )')
+ pkg = self.mk_pkg(src_uri="foo? ( bar )", iuse="foo", restrict="foo? ( fetch )")
self.assertNoReport(chk, pkg)
# negated
- pkg = self.mk_pkg(src_uri='!foo? ( bar )', iuse='foo', restrict='!foo? ( fetch )')
+ pkg = self.mk_pkg(src_uri="!foo? ( bar )", iuse="foo", restrict="!foo? ( fetch )")
self.assertNoReport(chk, pkg)
# multi-level conditional
pkg = self.mk_pkg(
- iuse='foo bar',
- src_uri='foo? ( bar? ( blah ) )',
- restrict='foo? ( bar? ( fetch ) )')
+ iuse="foo bar", src_uri="foo? ( bar? ( blah ) )", restrict="foo? ( bar? ( fetch ) )"
+ )
self.assertNoReport(chk, pkg)
def test_unstated_iuse(self):
chk = self.mk_check()
# no IUSE
- self.assertNoReport(chk, self.mk_pkg('https://foo.com/foo-0.tar.gz'))
+ self.assertNoReport(chk, self.mk_pkg("https://foo.com/foo-0.tar.gz"))
# conditional URI with related IUSE
- pkg = self.mk_pkg(src_uri='foo? ( https://foo.com/foo-0.tar.gz )', iuse='foo')
+ pkg = self.mk_pkg(src_uri="foo? ( https://foo.com/foo-0.tar.gz )", iuse="foo")
self.assertNoReport(chk, pkg)
# conditional URI with missing IUSE
- pkg = self.mk_pkg(src_uri='foo? ( https://foo.com/foo-0.tar.gz )')
+ pkg = self.mk_pkg(src_uri="foo? ( https://foo.com/foo-0.tar.gz )")
r = self.assertReport(chk, pkg)
assert isinstance(r, addons.UnstatedIuse)
- assert 'unstated flag: [ foo ]' in str(r)
+ assert "unstated flag: [ foo ]" in str(r)
def test_bad_proto(self):
chk = self.mk_check()
@@ -1211,22 +1276,23 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
for proto in self.check_kls.valid_protos:
self.assertNoReport(
- chk, self.mk_pkg(f"{proto}://dar.com/foon"),
- msg=f"testing valid proto {proto}")
+ chk, self.mk_pkg(f"{proto}://dar.com/foon"), msg=f"testing valid proto {proto}"
+ )
- bad_proto = f'{proto}x'
+ bad_proto = f"{proto}x"
r = self.assertReport(chk, self.mk_pkg(f"{bad_proto}://foon.com/foon"))
assert isinstance(r, metadata.BadProtocol)
assert bad_proto in str(r)
- assert f'{bad_proto}://foon.com/foon' in str(r)
+ assert f"{bad_proto}://foon.com/foon" in str(r)
# check collapsing
pkg = self.mk_pkg(f"{bad_proto}://foon.com/foon {bad_proto}://dar.com/foon")
r = self.assertReport(chk, pkg)
assert isinstance(r, metadata.BadProtocol)
assert list(r.uris) == sorted(
- f'{bad_proto}://{x}/foon' for x in ('foon.com', 'dar.com'))
+ f"{bad_proto}://{x}/foon" for x in ("foon.com", "dar.com")
+ )
assert bad_proto in str(r)
def test_tarball_available_github(self):
@@ -1235,7 +1301,7 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
r = self.assertReport(chk, self.mk_pkg(uri))
assert isinstance(r, metadata.TarballAvailable)
assert r.uris == (uri,)
- assert '[ https://github.com/foo/bar/archive/v1.2.3.zip ]' in str(r)
+ assert "[ https://github.com/foo/bar/archive/v1.2.3.zip ]" in str(r)
def test_tarball_available_gitlab(self):
chk = self.mk_check()
@@ -1243,36 +1309,34 @@ class TestSrcUriCheck(use_based(), misc.ReportTestCase):
r = self.assertReport(chk, self.mk_pkg(uri))
assert isinstance(r, metadata.TarballAvailable)
assert r.uris == (uri,)
- assert 'zip archive used when tarball available' in str(r)
+ assert "zip archive used when tarball available" in str(r)
class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
check_kls = metadata.MissingUnpackerDepCheck
- def mk_pkg(self, exts, eapi='7', **data):
+ def mk_pkg(self, exts, eapi="7", **data):
if isinstance(exts, str):
exts = [exts]
class fake_repo:
def _get_digests(self, pkg, allow_missing=False):
- chksums = {f'diffball-2.7.1{ext}': {'size': 100} for ext in exts}
+ chksums = {f"diffball-2.7.1{ext}": {"size": 100} for ext in exts}
return False, chksums
- data['SRC_URI'] = ' '.join(
- f'https://foo.com/diffball-2.7.1{ext}' for ext in exts)
- return FakePkg(
- 'dev-util/diffball-2.7.1', data=data, eapi=eapi, repo=fake_repo())
+ data["SRC_URI"] = " ".join(f"https://foo.com/diffball-2.7.1{ext}" for ext in exts)
+ return FakePkg("dev-util/diffball-2.7.1", data=data, eapi=eapi, repo=fake_repo())
def test_with_system_dep(self):
- self.assertNoReport(self.mk_check(), self.mk_pkg('.tar.gz'))
+ self.assertNoReport(self.mk_check(), self.mk_pkg(".tar.gz"))
def test_keyword_output(self):
# unpacker deps go in BDEPEND in EAPI >= 7
- r = self.assertReport(self.mk_check(), self.mk_pkg('.zip', eapi='7'))
+ r = self.assertReport(self.mk_check(), self.mk_pkg(".zip", eapi="7"))
assert 'missing BDEPEND="app-arch/unzip"' in str(r)
# and in DEPEND for EAPI < 7
- r = self.assertReport(self.mk_check(), self.mk_pkg('.zip', eapi='6'))
+ r = self.assertReport(self.mk_check(), self.mk_pkg(".zip", eapi="6"))
assert 'missing DEPEND="app-arch/unzip"' in str(r)
def test_without_dep(self):
@@ -1280,23 +1344,22 @@ class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
pkg = self.mk_pkg(ext)
r = self.assertReport(self.mk_check(), pkg)
assert isinstance(r, metadata.MissingUnpackerDep)
- assert r.filenames == (f'diffball-2.7.1{ext}',)
- assert r.unpackers == tuple(
- sorted(map(str, self.check_kls.non_system_unpackers[ext])))
+ assert r.filenames == (f"diffball-2.7.1{ext}",)
+ assert r.unpackers == tuple(sorted(map(str, self.check_kls.non_system_unpackers[ext])))
def test_with_dep(self):
for ext, unpackers in self.check_kls.non_system_unpackers.items():
- for dep_type in ('DEPEND', 'BDEPEND'):
+ for dep_type in ("DEPEND", "BDEPEND"):
for unpacker in unpackers:
- for dep in (unpacker, f'>={unpacker}-1'):
+ for dep in (unpacker, f">={unpacker}-1"):
kwargs = {dep_type: dep}
pkg = self.mk_pkg(ext, **kwargs)
self.assertNoReport(self.mk_check(), pkg)
def test_rar_with_or_dep(self):
self.assertNoReport(
- self.mk_check(),
- self.mk_pkg('.rar', DEPEND='|| ( app-arch/rar app-arch/unrar )'))
+ self.mk_check(), self.mk_pkg(".rar", DEPEND="|| ( app-arch/rar app-arch/unrar )")
+ )
def test_without_multiple_unpackers(self):
for combination in combinations(self.check_kls.non_system_unpackers.items(), 2):
@@ -1310,19 +1373,19 @@ class TestMissingUnpackerDepCheck(use_based(), misc.ReportTestCase):
assert len(set(unpackers)) == 1
r = reports[0]
assert isinstance(r, metadata.MissingUnpackerDep)
- assert r.filenames == tuple(sorted(f'diffball-2.7.1{ext}' for ext in exts))
+ assert r.filenames == tuple(sorted(f"diffball-2.7.1{ext}" for ext in exts))
assert r.unpackers == tuple(sorted(map(str, unpackers[0])))
else:
assert len(reports) == 2
for i, r in enumerate(reports):
assert isinstance(r, metadata.MissingUnpackerDep)
- assert r.filenames == (f'diffball-2.7.1{exts[i]}',)
+ assert r.filenames == (f"diffball-2.7.1{exts[i]}",)
assert r.unpackers == tuple(sorted(map(str, unpackers[i])))
def test_with_multiple_unpackers_one_missing(self):
r = self.assertReport(
- self.mk_check(),
- self.mk_pkg(['.zip', '.7z'], DEPEND='app-arch/unzip'))
+ self.mk_check(), self.mk_pkg([".zip", ".7z"], DEPEND="app-arch/unzip")
+ )
assert isinstance(r, metadata.MissingUnpackerDep)
- assert r.filenames == (f'diffball-2.7.1.7z',)
- assert r.unpackers == ('app-arch/p7zip',)
+ assert r.filenames == (f"diffball-2.7.1.7z",)
+ assert r.unpackers == ("app-arch/p7zip",)
diff --git a/tests/checks/test_network.py b/tests/checks/test_network.py
index bb3a7ef5..fb684954 100644
--- a/tests/checks/test_network.py
+++ b/tests/checks/test_network.py
@@ -10,34 +10,38 @@ from unittest.mock import patch
import pytest
from pkgcheck import objects, reporters, scan
from pkgcheck.checks import NetworkCheck
-from pkgcheck.checks.network import (DeadUrl, FetchablesUrlCheck,
- HomepageUrlCheck)
+from pkgcheck.checks.network import DeadUrl, FetchablesUrlCheck, HomepageUrlCheck
from pkgcheck.packages import RawCPV
from snakeoil.formatters import PlainTextFormatter
# skip module tests if requests isn't available
-requests = pytest.importorskip('requests')
+requests = pytest.importorskip("requests")
class TestNetworkChecks:
- repos_data = pytest.REPO_ROOT / 'testdata/data/repos'
- repos_dir = pytest.REPO_ROOT / 'testdata/repos'
+ repos_data = pytest.REPO_ROOT / "testdata/data/repos"
+ repos_dir = pytest.REPO_ROOT / "testdata/repos"
@pytest.fixture(autouse=True)
def _setup(self, testconfig, tmp_path):
- base_args = ['--config', testconfig]
+ base_args = ["--config", testconfig]
self.scan = partial(scan, base_args=base_args)
self.scan_args = [
- '--config', 'no', '--cache-dir', str(tmp_path), '--net',
- '-r', str(self.repos_dir / 'network'),
+ "--config",
+ "no",
+ "--cache-dir",
+ str(tmp_path),
+ "--net",
+ "-r",
+ str(self.repos_dir / "network"),
]
_net_results = [
(cls, result)
for _name, cls in sorted(objects.CHECKS.items())
if issubclass(cls, NetworkCheck)
- for result in sorted(cls.known_results, key=attrgetter('__name__'))
+ for result in sorted(cls.known_results, key=attrgetter("__name__"))
]
def _render_results(self, results, **kwargs):
@@ -50,34 +54,34 @@ class TestNetworkChecks:
output = f.read().decode()
return output
- @pytest.mark.parametrize('check, result', _net_results)
+ @pytest.mark.parametrize("check, result", _net_results)
def test_scan(self, check, result):
check_name = check.__name__
keyword = result.__name__
- result_dir = self.repos_dir / 'network' / check_name
- paths = tuple(result_dir.glob(keyword + '*'))
+ result_dir = self.repos_dir / "network" / check_name
+ paths = tuple(result_dir.glob(keyword + "*"))
if not paths:
- pytest.skip('data unavailable')
+ pytest.skip("data unavailable")
for path in paths:
ebuild_name = os.path.basename(path)
- data_dir = self.repos_data / 'network' / check_name / ebuild_name
+ data_dir = self.repos_data / "network" / check_name / ebuild_name
# load response data to fake
- module_path = path / 'responses.py'
- spec = importlib.util.spec_from_file_location('responses_mod', module_path)
+ module_path = path / "responses.py"
+ spec = importlib.util.spec_from_file_location("responses_mod", module_path)
responses_mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(responses_mod)
results = []
- args = ['-c', check_name, '-k', keyword, f'{check_name}/{ebuild_name}']
- with patch('pkgcheck.addons.net.requests.Session.send') as send:
+ args = ["-c", check_name, "-k", keyword, f"{check_name}/{ebuild_name}"]
+ with patch("pkgcheck.addons.net.requests.Session.send") as send:
send.side_effect = responses_mod.responses
# load expected results if they exist
try:
- with (data_dir / 'expected.json').open() as f:
+ with (data_dir / "expected.json").open() as f:
expected_results = set(reporters.JsonStream.from_iter(f))
except FileNotFoundError:
# check stopped before making request or completed successfully
@@ -85,37 +89,42 @@ class TestNetworkChecks:
results = list(self.scan(self.scan_args + args))
rendered_results = self._render_results(results)
- assert rendered_results, 'failed rendering results'
+ assert rendered_results, "failed rendering results"
if set(results) != expected_results:
- error = ['unmatched results:']
+ error = ["unmatched results:"]
expected = self._render_results(expected_results)
- error.append(f'expected:\n{expected}')
- error.append(f'got:\n{rendered_results}')
- pytest.fail('\n'.join(error))
-
- @pytest.mark.parametrize('check, result', (
- (HomepageUrlCheck, DeadUrl),
- (FetchablesUrlCheck, DeadUrl),
- ))
+ error.append(f"expected:\n{expected}")
+ error.append(f"got:\n{rendered_results}")
+ pytest.fail("\n".join(error))
+
+ @pytest.mark.parametrize(
+ "check, result",
+ (
+ (HomepageUrlCheck, DeadUrl),
+ (FetchablesUrlCheck, DeadUrl),
+ ),
+ )
def test_scan_ftp(self, check, result):
check_name = check.__name__
keyword = result.__name__
- pkg = RawCPV(check_name, f'ftp-{keyword}', '0')
- if check_name == 'HomepageUrlCheck':
- deadurl = DeadUrl('HOMEPAGE', 'ftp://pkgcheck.net/pkgcheck/', 'dead ftp', pkg=pkg)
+ pkg = RawCPV(check_name, f"ftp-{keyword}", "0")
+ if check_name == "HomepageUrlCheck":
+ deadurl = DeadUrl("HOMEPAGE", "ftp://pkgcheck.net/pkgcheck/", "dead ftp", pkg=pkg)
else:
- deadurl = DeadUrl('SRC_URI', 'ftp://pkgcheck.net/pkgcheck/foo.tar.gz', 'dead ftp', pkg=pkg)
+ deadurl = DeadUrl(
+ "SRC_URI", "ftp://pkgcheck.net/pkgcheck/foo.tar.gz", "dead ftp", pkg=pkg
+ )
data = (
- (urllib.error.URLError('dead ftp'), deadurl),
- (socket.timeout('dead ftp'), deadurl),
+ (urllib.error.URLError("dead ftp"), deadurl),
+ (socket.timeout("dead ftp"), deadurl),
(None, None), # faking a clean connection
)
- args = ['-c', check_name, '-k', keyword, f'{check_name}/ftp-{keyword}']
+ args = ["-c", check_name, "-k", keyword, f"{check_name}/ftp-{keyword}"]
for side_effect, expected_result in data:
- with patch('pkgcheck.checks.network.urllib.request.urlopen') as urlopen:
+ with patch("pkgcheck.checks.network.urllib.request.urlopen") as urlopen:
if side_effect is not None:
urlopen.side_effect = side_effect
results = list(self.scan(self.scan_args + args))
@@ -123,4 +132,4 @@ class TestNetworkChecks:
assert not results
else:
assert results == [expected_result]
- assert self._render_results(results), 'failed rendering results'
+ assert self._render_results(results), "failed rendering results"
diff --git a/tests/checks/test_perl.py b/tests/checks/test_perl.py
index b9c25578..1b26e412 100644
--- a/tests/checks/test_perl.py
+++ b/tests/checks/test_perl.py
@@ -6,7 +6,7 @@ from snakeoil.cli import arghparse
from .. import misc
-REASON = ''
+REASON = ""
def perl_deps_missing():
@@ -28,49 +28,49 @@ class TestPerlCheck(misc.ReportTestCase):
def mk_check(self, verbosity=0):
return self.check_kls(arghparse.Namespace(verbosity=verbosity))
- def mk_pkg(self, PVR, dist_version='', eclasses=('perl-module',), **kwargs):
- lines = ['inherit perl-module\n']
+ def mk_pkg(self, PVR, dist_version="", eclasses=("perl-module",), **kwargs):
+ lines = ["inherit perl-module\n"]
if dist_version:
- lines.append(f'DIST_VERSION={dist_version}\n')
- kwargs.setdefault('EAPI', '7')
- kwargs.setdefault('_eclasses_', list(eclasses))
- return misc.FakePkg(f'app-foo/bar-{PVR}', lines=lines, data=kwargs)
+ lines.append(f"DIST_VERSION={dist_version}\n")
+ kwargs.setdefault("EAPI", "7")
+ kwargs.setdefault("_eclasses_", list(eclasses))
+ return misc.FakePkg(f"app-foo/bar-{PVR}", lines=lines, data=kwargs)
def test_matching(self):
"""Ebuilds with matching DIST_VERSION and package version."""
- for PVR in ('1.7.0-r0', '1.7.0', '1.7.0-r100'):
- self.assertNoReport(self.mk_check(), self.mk_pkg(PVR, '1.007'))
+ for PVR in ("1.7.0-r0", "1.7.0", "1.7.0-r100"):
+ self.assertNoReport(self.mk_check(), self.mk_pkg(PVR, "1.007"))
def test_nonmatching(self):
"""Ebuilds without matching DIST_VERSION and package version."""
- for PVR in ('1.7.0-r0', '1.7.0', '1.7.0-r100'):
- r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, '1.07'))
+ for PVR in ("1.7.0-r0", "1.7.0", "1.7.0-r100"):
+ r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, "1.07"))
assert isinstance(r, perl.MismatchedPerlVersion)
- assert r.dist_version == '1.07'
- assert r.normalized == '1.70.0'
- assert 'DIST_VERSION=1.07 normalizes to 1.70.0' in str(r)
- r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, '1.7'))
+ assert r.dist_version == "1.07"
+ assert r.normalized == "1.70.0"
+ assert "DIST_VERSION=1.07 normalizes to 1.70.0" in str(r)
+ r = self.assertReport(self.mk_check(), self.mk_pkg(PVR, "1.7"))
assert isinstance(r, perl.MismatchedPerlVersion)
- assert r.dist_version == '1.7'
- assert r.normalized == '1.700.0'
- assert 'DIST_VERSION=1.7 normalizes to 1.700.0' in str(r)
+ assert r.dist_version == "1.7"
+ assert r.normalized == "1.700.0"
+ assert "DIST_VERSION=1.7 normalizes to 1.700.0" in str(r)
def test_no_dist_version(self):
"""Ebuilds without DIST_VERSION defined are skipped."""
- self.assertNoReport(self.mk_check(), self.mk_pkg('1.7.0'))
+ self.assertNoReport(self.mk_check(), self.mk_pkg("1.7.0"))
def test_no_perl(self):
"""Check initialization fails if perl isn't installed."""
- with patch('subprocess.Popen') as popen:
- popen.side_effect = FileNotFoundError('perl not available')
- with pytest.raises(SkipCheck, match='perl not installed'):
+ with patch("subprocess.Popen") as popen:
+ popen.side_effect = FileNotFoundError("perl not available")
+ with pytest.raises(SkipCheck, match="perl not installed"):
self.mk_check()
def test_no_perl_deps(self):
"""Check initialization fails if perl deps aren't installed."""
- with patch('pkgcheck.checks.perl.subprocess.Popen') as popen:
- popen.return_value.stdout.readline.return_value = 'perl error'
+ with patch("pkgcheck.checks.perl.subprocess.Popen") as popen:
+ popen.return_value.stdout.readline.return_value = "perl error"
popen.return_value.poll.return_value = 2
for verbosity in (0, 1):
- with pytest.raises(SkipCheck, match='failed to run perl script'):
+ with pytest.raises(SkipCheck, match="failed to run perl script"):
self.mk_check(verbosity=verbosity)
diff --git a/tests/checks/test_pkgdir.py b/tests/checks/test_pkgdir.py
index 1fc01b01..2a26e79c 100644
--- a/tests/checks/test_pkgdir.py
+++ b/tests/checks/test_pkgdir.py
@@ -21,33 +21,34 @@ class PkgDirCheckBase(misc.ReportTestCase):
@pytest.fixture(autouse=True)
def _create_repo(self, tmpdir):
- self.repo = FakeRepo(repo_id='repo', location=str(tmpdir))
+ self.repo = FakeRepo(repo_id="repo", location=str(tmpdir))
def mk_check(self, gentoo=False):
options = arghparse.Namespace(
- target_repo=self.repo, cache={'git': False}, gentoo_repo=gentoo)
+ target_repo=self.repo, cache={"git": False}, gentoo_repo=gentoo
+ )
kwargs = {}
if addons.git.GitAddon in self.check_kls.required_addons:
- kwargs['git_addon'] = addons.git.GitAddon(options)
+ kwargs["git_addon"] = addons.git.GitAddon(options)
return self.check_kls(options, **kwargs)
- def mk_pkg(self, files={}, category=None, package=None, version='0.7.1', revision=''):
+ def mk_pkg(self, files={}, category=None, package=None, version="0.7.1", revision=""):
# generate random cat/PN
category = misc.random_str() if category is None else category
package = misc.random_str() if package is None else package
pkg = f"{category}/{package}-{version}{revision}"
- self.filesdir = pjoin(self.repo.location, category, package, 'files')
+ self.filesdir = pjoin(self.repo.location, category, package, "files")
# create files dir with random empty subdir
os.makedirs(pjoin(self.filesdir, misc.random_str()), exist_ok=True)
# create dirs that should be ignored
- for d in getattr(self.check_kls, 'ignore_dirs', ()):
+ for d in getattr(self.check_kls, "ignore_dirs", ()):
os.makedirs(pjoin(self.filesdir, d), exist_ok=True)
# create specified files in FILESDIR
for fn, contents in files.items():
- with open(pjoin(self.filesdir, fn), 'w') as file:
+ with open(pjoin(self.filesdir, fn), "w") as file:
file.write(contents)
return misc.FakeFilesDirPkg(pkg, repo=self.repo)
@@ -64,24 +65,30 @@ class TestDuplicateFiles(PkgDirCheckBase):
"""Check DuplicateFiles results."""
def test_unique_files(self):
- self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'abc', 'test2': 'bcd'})])
+ self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "abc", "test2": "bcd"})])
def test_single_duplicate(self):
- pkg = self.mk_pkg({'test': 'abc', 'test2': 'abc'})
+ pkg = self.mk_pkg({"test": "abc", "test2": "abc"})
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.DuplicateFiles)
- assert r.files == ('files/test', 'files/test2')
+ assert r.files == ("files/test", "files/test2")
assert "'files/test', 'files/test2'" in str(r)
def test_multiple_duplicates(self):
- r = self.assertReports(self.mk_check(), [self.mk_pkg(
- {'test': 'abc', 'test2': 'abc', 'test3': 'bcd', 'test4': 'bcd', 'test5': 'zzz'})])
+ r = self.assertReports(
+ self.mk_check(),
+ [
+ self.mk_pkg(
+ {"test": "abc", "test2": "abc", "test3": "bcd", "test4": "bcd", "test5": "zzz"}
+ )
+ ],
+ )
assert len(r) == 2
assert isinstance(r[0], pkgdir.DuplicateFiles)
assert isinstance(r[1], pkgdir.DuplicateFiles)
- assert (
- tuple(sorted(x.files for x in r)) ==
- (('files/test', 'files/test2'), ('files/test3', 'files/test4'))
+ assert tuple(sorted(x.files for x in r)) == (
+ ("files/test", "files/test2"),
+ ("files/test3", "files/test4"),
)
@@ -89,29 +96,29 @@ class TestEmptyFile(PkgDirCheckBase):
"""Check EmptyFile results."""
def test_nonempty_file(self):
- self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh'})])
+ self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh"})])
def test_single_empty_file(self):
assert isinstance(
- self.assertReport(self.mk_check(), [self.mk_pkg({'test': ''})]),
- pkgdir.EmptyFile)
+ self.assertReport(self.mk_check(), [self.mk_pkg({"test": ""})]), pkgdir.EmptyFile
+ )
def test_multiple_empty_files(self):
- r = self.assertReports(self.mk_check(), [self.mk_pkg({'test': '', 'test2': ''})])
+ r = self.assertReports(self.mk_check(), [self.mk_pkg({"test": "", "test2": ""})])
assert len(r) == 2
assert isinstance(r[0], pkgdir.EmptyFile)
assert isinstance(r[1], pkgdir.EmptyFile)
- assert sorted(x.filename for x in r) == ['files/test', 'files/test2']
+ assert sorted(x.filename for x in r) == ["files/test", "files/test2"]
def test_mixture_of_files(self):
- r = self.assertReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh', 'test2': ''})])
+ r = self.assertReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh", "test2": ""})])
assert isinstance(r, pkgdir.EmptyFile)
- assert r.filename == 'files/test2'
- assert 'files/test2' in str(r)
- r = self.assertReport(self.mk_check(), [self.mk_pkg({'test': '', 'test2': 'asdfgh'})])
+ assert r.filename == "files/test2"
+ assert "files/test2" in str(r)
+ r = self.assertReport(self.mk_check(), [self.mk_pkg({"test": "", "test2": "asdfgh"})])
assert isinstance(r, pkgdir.EmptyFile)
- assert r.filename == 'files/test'
- assert 'files/test' in str(r)
+ assert r.filename == "files/test"
+ assert "files/test" in str(r)
class TestMismatchedPN(PkgDirCheckBase):
@@ -119,29 +126,29 @@ class TestMismatchedPN(PkgDirCheckBase):
def test_multiple_regular_ebuilds(self):
pkg = self.mk_pkg()
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-1.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-2.ebuild'))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-1.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-2.ebuild"))
self.assertNoReport(self.mk_check(), [pkg])
def test_single_mismatched_ebuild(self):
pkg = self.mk_pkg()
- touch(pjoin(os.path.dirname(pkg.path), 'mismatched-0.ebuild'))
+ touch(pjoin(os.path.dirname(pkg.path), "mismatched-0.ebuild"))
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.MismatchedPN)
- assert r.ebuilds == ('mismatched-0',)
- assert 'mismatched-0' in str(r)
+ assert r.ebuilds == ("mismatched-0",)
+ assert "mismatched-0" in str(r)
def test_multiple_mismatched_ebuilds(self):
pkg = self.mk_pkg()
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-1.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'mismatched-0.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'abc-1.ebuild'))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-1.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "mismatched-0.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "abc-1.ebuild"))
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.MismatchedPN)
- assert r.ebuilds == ('abc-1', 'mismatched-0')
- assert 'abc-1, mismatched-0' in str(r)
+ assert r.ebuilds == ("abc-1", "mismatched-0")
+ assert "abc-1, mismatched-0" in str(r)
class TestInvalidPN(PkgDirCheckBase):
@@ -149,27 +156,27 @@ class TestInvalidPN(PkgDirCheckBase):
def test_regular_ebuild(self):
pkg = self.mk_pkg()
- touch(pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild'))
+ touch(pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild"))
self.assertNoReport(self.mk_check(), [pkg])
def test_single_invalid_ebuild(self):
- pkg = self.mk_pkg(category='sys-apps', package='invalid')
- touch(pjoin(os.path.dirname(pkg.path), 'invalid-0-foo.ebuild'))
+ pkg = self.mk_pkg(category="sys-apps", package="invalid")
+ touch(pjoin(os.path.dirname(pkg.path), "invalid-0-foo.ebuild"))
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.InvalidPN)
- assert r.ebuilds == ('invalid-0-foo',)
- assert 'invalid-0-foo' in str(r)
+ assert r.ebuilds == ("invalid-0-foo",)
+ assert "invalid-0-foo" in str(r)
def test_multiple_invalid_ebuilds(self):
- pkg = self.mk_pkg(category='sys-apps', package='bar')
- touch(pjoin(os.path.dirname(pkg.path), 'bar-0.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'bar-1.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'bar-0-foo1.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'bar-1-foo2.ebuild'))
+ pkg = self.mk_pkg(category="sys-apps", package="bar")
+ touch(pjoin(os.path.dirname(pkg.path), "bar-0.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "bar-1.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "bar-0-foo1.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "bar-1-foo2.ebuild"))
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.InvalidPN)
- assert r.ebuilds == ('bar-0-foo1', 'bar-1-foo2')
- assert 'bar-0-foo1, bar-1-foo2' in str(r)
+ assert r.ebuilds == ("bar-0-foo1", "bar-1-foo2")
+ assert "bar-0-foo1, bar-1-foo2" in str(r)
class TestInvalidUTF8(PkgDirCheckBase):
@@ -177,26 +184,26 @@ class TestInvalidUTF8(PkgDirCheckBase):
def test_ascii_ebuild(self):
pkg = self.mk_pkg()
- ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
- with open(ebuild_path, 'w', encoding='ascii') as f:
+ ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
+ with open(ebuild_path, "w", encoding="ascii") as f:
f.write('EAPI=7\nDESCRIPTION="foobar"\n')
self.assertNoReport(self.mk_check(), [pkg])
def test_utf8_ebuild(self):
pkg = self.mk_pkg()
- ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
- with open(ebuild_path, 'w') as f:
+ ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
+ with open(ebuild_path, "w") as f:
f.write('EAPI=6\nDESCRIPTION="fóóbár"\n')
self.assertNoReport(self.mk_check(), [pkg])
def test_latin1_ebuild(self):
pkg = self.mk_pkg()
- ebuild_path = pjoin(os.path.dirname(pkg.path), f'{pkg.package}-0.ebuild')
- with open(ebuild_path, 'w', encoding='latin-1') as f:
+ ebuild_path = pjoin(os.path.dirname(pkg.path), f"{pkg.package}-0.ebuild")
+ with open(ebuild_path, "w", encoding="latin-1") as f:
f.write('EAPI=5\nDESCRIPTION="fôòbår"\n')
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.InvalidUTF8)
- assert r.filename == f'{pkg.package}-0.ebuild'
+ assert r.filename == f"{pkg.package}-0.ebuild"
assert r.filename in str(r)
@@ -207,44 +214,48 @@ class TestEqualVersions(PkgDirCheckBase):
def test_it(self):
# pkg with no revision
- pkg_a = self.mk_pkg(version='0')
+ pkg_a = self.mk_pkg(version="0")
self.assertNoReport(self.mk_check(), [pkg_a])
# single, matching revision
pkg_b = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='0', revision='-r0')
+ category=pkg_a.category, package=pkg_a.package, version="0", revision="-r0"
+ )
r = self.assertReport(self.mk_check(), [pkg_a, pkg_b])
assert isinstance(r, pkgdir.EqualVersions)
- assert r.versions == ('0', '0-r0')
- assert '[ 0, 0-r0 ]' in str(r)
+ assert r.versions == ("0", "0-r0")
+ assert "[ 0, 0-r0 ]" in str(r)
# multiple, matching revisions
pkg_c = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='0', revision='-r000')
+ category=pkg_a.category, package=pkg_a.package, version="0", revision="-r000"
+ )
r = self.assertReport(self.mk_check(), [pkg_a, pkg_b, pkg_c])
assert isinstance(r, pkgdir.EqualVersions)
- assert r.versions == ('0', '0-r0', '0-r000')
- assert '[ 0, 0-r0, 0-r000 ]' in str(r)
+ assert r.versions == ("0", "0-r0", "0-r000")
+ assert "[ 0, 0-r0, 0-r000 ]" in str(r)
# unsorted, matching revisions
- pkg_new_version = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='1')
+ pkg_new_version = self.mk_pkg(category=pkg_a.category, package=pkg_a.package, version="1")
r = self.assertReport(self.mk_check(), [pkg_b, pkg_new_version, pkg_c, pkg_a])
assert isinstance(r, pkgdir.EqualVersions)
- assert r.versions == ('0', '0-r0', '0-r000')
- assert '[ 0, 0-r0, 0-r000 ]' in str(r)
+ assert r.versions == ("0", "0-r0", "0-r000")
+ assert "[ 0, 0-r0, 0-r000 ]" in str(r)
# multiple, matching revisions with 0 prefixes
pkg_d = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='0', revision='-r1')
+ category=pkg_a.category, package=pkg_a.package, version="0", revision="-r1"
+ )
pkg_e = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='0', revision='-r01')
+ category=pkg_a.category, package=pkg_a.package, version="0", revision="-r01"
+ )
pkg_f = self.mk_pkg(
- category=pkg_a.category, package=pkg_a.package, version='0', revision='-r001')
+ category=pkg_a.category, package=pkg_a.package, version="0", revision="-r001"
+ )
r = self.assertReport(self.mk_check(), [pkg_d, pkg_e, pkg_f])
assert isinstance(r, pkgdir.EqualVersions)
- assert r.versions == ('0-r001', '0-r01', '0-r1')
- assert '[ 0-r001, 0-r01, 0-r1 ]' in str(r)
+ assert r.versions == ("0-r001", "0-r01", "0-r1")
+ assert "[ 0-r001, 0-r01, 0-r1 ]" in str(r)
class TestSizeViolation(PkgDirCheckBase):
@@ -252,50 +263,51 @@ class TestSizeViolation(PkgDirCheckBase):
def test_files_under_size_limit(self):
pkg = self.mk_pkg()
- for name, size in (('small', 1024*10),
- ('limit', 1024*20-1)):
- with open(pjoin(self.filesdir, name), 'w') as f:
+ for name, size in (("small", 1024 * 10), ("limit", 1024 * 20 - 1)):
+ with open(pjoin(self.filesdir, name), "w") as f:
f.seek(size)
- f.write('\0')
+ f.write("\0")
self.assertNoReport(self.mk_check(), [pkg])
def test_single_file_over_limit(self):
pkg = self.mk_pkg()
- with open(pjoin(self.filesdir, 'over'), 'w') as f:
- f.seek(1024*20)
- f.write('\0')
+ with open(pjoin(self.filesdir, "over"), "w") as f:
+ f.seek(1024 * 20)
+ f.write("\0")
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.SizeViolation)
- assert r.filename == 'files/over'
- assert r.size == 1024*20+1
- assert 'files/over' in str(r)
+ assert r.filename == "files/over"
+ assert r.size == 1024 * 20 + 1
+ assert "files/over" in str(r)
def test_multiple_files_over_limit(self):
pkg = self.mk_pkg()
- for name, size in (('small', 1024*10),
- ('limit', 1024*20-1),
- ('over', 1024*20),
- ('massive', 1024*100)):
- with open(pjoin(self.filesdir, name), 'w') as f:
+ for name, size in (
+ ("small", 1024 * 10),
+ ("limit", 1024 * 20 - 1),
+ ("over", 1024 * 20),
+ ("massive", 1024 * 100),
+ ):
+ with open(pjoin(self.filesdir, name), "w") as f:
f.seek(size)
- f.write('\0')
+ f.write("\0")
r = self.assertReports(self.mk_check(), [pkg])
assert len(r) == 3
assert isinstance(r[0], pkgdir.SizeViolation)
assert isinstance(r[1], pkgdir.SizeViolation)
assert isinstance(r[2], pkgdir.TotalSizeViolation)
- assert (
- tuple(sorted((x.filename, x.size) for x in r[:2])) ==
- (('files/massive', 1024*100+1), ('files/over', 1024*20+1))
+ assert tuple(sorted((x.filename, x.size) for x in r[:2])) == (
+ ("files/massive", 1024 * 100 + 1),
+ ("files/over", 1024 * 20 + 1),
)
- assert r[2].size == 1024*(10+20+20+100)+4-1
+ assert r[2].size == 1024 * (10 + 20 + 20 + 100) + 4 - 1
class TestExecutableFile(PkgDirCheckBase):
"""Check ExecutableFile results."""
def test_non_empty_filesdir(self):
- self.assertNoReport(self.mk_check(), [self.mk_pkg({'test': 'asdfgh'})])
+ self.assertNoReport(self.mk_check(), [self.mk_pkg({"test": "asdfgh"})])
def test_executable_ebuild(self):
pkg = self.mk_pkg()
@@ -307,54 +319,53 @@ class TestExecutableFile(PkgDirCheckBase):
def test_executable_manifest_and_metadata(self):
pkg = self.mk_pkg()
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'), mode=0o755)
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'), mode=0o744)
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"), mode=0o755)
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"), mode=0o744)
r = self.assertReports(self.mk_check(), [pkg])
assert len(r) == 2
assert isinstance(r[0], pkgdir.ExecutableFile)
assert isinstance(r[1], pkgdir.ExecutableFile)
- assert (
- tuple(sorted(x.filename for x in r)) ==
- ('Manifest', 'metadata.xml')
- )
+ assert tuple(sorted(x.filename for x in r)) == ("Manifest", "metadata.xml")
def test_executable_filesdir_file(self):
- pkg = self.mk_pkg({'foo.init': 'blah'})
+ pkg = self.mk_pkg({"foo.init": "blah"})
touch(pkg.path)
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
- os.chmod(pjoin(os.path.dirname(pkg.path), 'files', 'foo.init'), 0o645)
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
+ os.chmod(pjoin(os.path.dirname(pkg.path), "files", "foo.init"), 0o645)
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.ExecutableFile)
- assert r.filename == 'files/foo.init'
- assert 'files/foo.init' in str(r)
+ assert r.filename == "files/foo.init"
+ assert "files/foo.init" in str(r)
class TestBannedCharacter(PkgDirCheckBase):
"""Check BannedCharacter results."""
def test_regular_files(self):
- pkg = self.mk_pkg({'foo.init': 'blah'})
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
+ pkg = self.mk_pkg({"foo.init": "blah"})
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
self.assertNoReport(self.mk_check(), [pkg])
def test_filenames_outside_allowed_charsets(self):
- pkg = self.mk_pkg({
- 'foo.init': 'bar',
- 'foo.init~': 'foo',
- })
+ pkg = self.mk_pkg(
+ {
+ "foo.init": "bar",
+ "foo.init~": "foo",
+ }
+ )
# vim backup files are flagged by default
r = self.assertReport(self.mk_check(), [pkg])
assert isinstance(r, pkgdir.BannedCharacter)
- assert 'files/foo.init~' in str(r)
+ assert "files/foo.init~" in str(r)
# but results are suppressed if a matching git ignore entry exists
- for ignore_file in ('.gitignore', '.git/info/exclude'):
+ for ignore_file in (".gitignore", ".git/info/exclude"):
path = pjoin(self.repo.location, ignore_file)
ensure_dirs(os.path.dirname(path))
- with open(path, 'w') as f:
- f.write('*~')
+ with open(path, "w") as f:
+ f.write("*~")
self.assertNoReport(self.mk_check(), [pkg])
os.unlink(path)
@@ -363,40 +374,40 @@ class TestUnknownPkgDirEntry(PkgDirCheckBase):
"""Check UnknownPkgDirEntry results."""
def test_regular_files(self):
- pkg = self.mk_pkg({'foo.init': 'blah'})
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
+ pkg = self.mk_pkg({"foo.init": "blah"})
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
self.assertNoReport(self.mk_check(), [pkg])
def test_unknown_non_gentoo_repo(self):
- pkg = self.mk_pkg({'foo.init': 'blah'})
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
- touch(pjoin(os.path.dirname(pkg.path), 'foo-2'))
+ pkg = self.mk_pkg({"foo.init": "blah"})
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
+ touch(pjoin(os.path.dirname(pkg.path), "foo-2"))
self.assertNoReport(self.mk_check(), [pkg])
def test_unknown_gentoo_repo(self):
- pkg = self.mk_pkg({'foo.init': 'blah'})
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
- touch(pjoin(os.path.dirname(pkg.path), 'foo-2'))
+ pkg = self.mk_pkg({"foo.init": "blah"})
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
+ touch(pjoin(os.path.dirname(pkg.path), "foo-2"))
r = self.assertReport(self.mk_check(gentoo=True), [pkg])
assert isinstance(r, pkgdir.UnknownPkgDirEntry)
- assert 'foo-2' in str(r)
+ assert "foo-2" in str(r)
def test_unknown_gitignore(self):
- pkg = self.mk_pkg(files={'foo.init': 'blah'}, category='dev-util', package='foo')
- touch(pjoin(os.path.dirname(pkg.path), 'Manifest'))
- touch(pjoin(os.path.dirname(pkg.path), 'metadata.xml'))
- touch(pjoin(os.path.dirname(pkg.path), 'foo-0.ebuild'))
- touch(pjoin(os.path.dirname(pkg.path), 'foo-0.ebuild.swp'))
+ pkg = self.mk_pkg(files={"foo.init": "blah"}, category="dev-util", package="foo")
+ touch(pjoin(os.path.dirname(pkg.path), "Manifest"))
+ touch(pjoin(os.path.dirname(pkg.path), "metadata.xml"))
+ touch(pjoin(os.path.dirname(pkg.path), "foo-0.ebuild"))
+ touch(pjoin(os.path.dirname(pkg.path), "foo-0.ebuild.swp"))
r = self.assertReport(self.mk_check(gentoo=True), [pkg])
assert isinstance(r, pkgdir.UnknownPkgDirEntry)
- assert 'foo-0.ebuild.swp' in str(r)
+ assert "foo-0.ebuild.swp" in str(r)
# results are suppressed if a matching .gitignore entry exists
- with open(pjoin(self.repo.location, '.gitignore'), 'w') as f:
- f.write('*.swp')
+ with open(pjoin(self.repo.location, ".gitignore"), "w") as f:
+ f.write("*.swp")
self.assertNoReport(self.mk_check(gentoo=True), [pkg])
@@ -411,17 +422,17 @@ class TestLiveOnlyCheck(misc.ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(self.parent_git_repo.path, repo_id='gentoo')
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo")
+ self.parent_git_repo.add_all("initial commit")
# create a stub pkg and commit it
- self.parent_repo.create_ebuild('cat/pkg-0', properties='live')
- self.parent_git_repo.add_all('cat/pkg-0')
+ self.parent_repo.create_ebuild("cat/pkg-0", properties="live")
+ self.parent_git_repo.add_all("cat/pkg-0")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0):
@@ -434,45 +445,49 @@ class TestLiveOnlyCheck(misc.ReportTestCase):
def _options(self, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location,
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
]
options, _ = self._tool.parse_args(args)
return options
def test_no_git_support(self):
options = self._options()
- options.cache['git'] = False
- with pytest.raises(SkipCheck, match='git cache support required'):
+ options.cache["git"] = False
+ with pytest.raises(SkipCheck, match="git cache support required"):
self.init_check(options)
def test_keywords_exist(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check()
self.assertNoReport(self.check, self.source)
def test_all_live_pkgs(self):
- self.parent_repo.create_ebuild('cat/pkg-1', properties='live')
- self.parent_git_repo.add_all('cat/pkg-1')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", properties="live")
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check()
# result will trigger for any package age
- expected = pkgdir.LiveOnlyPackage(0, pkg=UnversionedCPV('cat/pkg'))
+ expected = pkgdir.LiveOnlyPackage(0, pkg=UnversionedCPV("cat/pkg"))
r = self.assertReport(self.check, self.source)
assert r == expected
# packages are now a year old
self.init_check(future=365)
- expected = pkgdir.LiveOnlyPackage(365, pkg=UnversionedCPV('cat/pkg'))
+ expected = pkgdir.LiveOnlyPackage(365, pkg=UnversionedCPV("cat/pkg"))
r = self.assertReport(self.check, self.source)
assert r == expected
def test_uncommitted_local_ebuild(self):
- self.parent_repo.create_ebuild('cat/pkg-1', properties='live')
- self.parent_git_repo.add_all('cat/pkg-1')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_repo.create_ebuild('cat/pkg-2', properties='live')
+ self.parent_repo.create_ebuild("cat/pkg-1", properties="live")
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_repo.create_ebuild("cat/pkg-2", properties="live")
self.init_check(future=180)
self.assertNoReport(self.check, self.source)
diff --git a/tests/checks/test_python.py b/tests/checks/test_python.py
index 843975b5..eb4c44fb 100644
--- a/tests/checks/test_python.py
+++ b/tests/checks/test_python.py
@@ -9,180 +9,201 @@ class TestPythonCheck(misc.ReportTestCase):
check_kls = python.PythonCheck
def mk_pkg(self, cpv="app-foo/bar-1", **kwargs):
- kwargs.setdefault('EAPI', '7')
+ kwargs.setdefault("EAPI", "7")
return misc.FakePkg(cpv, data=kwargs)
def test_multiple_eclasses(self):
r = self.assertReport(
self.check,
- self.mk_pkg(_eclasses_=['python-any-r1', 'python-single-r1'],
- DEPEND='dev-lang/python'))
+ self.mk_pkg(_eclasses_=["python-any-r1", "python-single-r1"], DEPEND="dev-lang/python"),
+ )
assert isinstance(r, python.PythonEclassError)
def test_missing_eclass_depend(self):
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-any-r1'], DEPEND='dev-lang/python'))
- self.assertNoReport(self.check, self.mk_pkg(DEPEND='dev-foo/frobnicate'))
+ self.check, self.mk_pkg(_eclasses_=["python-any-r1"], DEPEND="dev-lang/python")
+ )
+ self.assertNoReport(self.check, self.mk_pkg(DEPEND="dev-foo/frobnicate"))
- r = self.assertReport(self.check, self.mk_pkg(DEPEND='dev-lang/python'))
+ r = self.assertReport(self.check, self.mk_pkg(DEPEND="dev-lang/python"))
assert isinstance(r, python.MissingPythonEclass)
assert 'missing python-any-r1 eclass usage for DEPEND="dev-lang/python"' in str(r)
- self.assertNoReport(self.check, self.mk_pkg(DEPEND='dev-lang/python:2.7'))
+ self.assertNoReport(self.check, self.mk_pkg(DEPEND="dev-lang/python:2.7"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(DEPEND='dev-lang/python:*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(DEPEND="dev-lang/python:*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(DEPEND='=dev-lang/python-2*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(DEPEND="=dev-lang/python-2*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
self.assertReport(
- self.check,
- self.mk_pkg(DEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
- python.MissingPythonEclass)
+ self.check, self.mk_pkg(DEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
+ ),
+ python.MissingPythonEclass,
+ )
def test_missing_eclass_bdepend(self):
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-any-r1'], BDEPEND='dev-lang/python'))
- self.assertNoReport(self.check, self.mk_pkg(BDEPEND='dev-foo/frobnicate'))
+ self.check, self.mk_pkg(_eclasses_=["python-any-r1"], BDEPEND="dev-lang/python")
+ )
+ self.assertNoReport(self.check, self.mk_pkg(BDEPEND="dev-foo/frobnicate"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python')),
- python.MissingPythonEclass)
- self.assertNoReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python:2.7'))
+ self.assertReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python")),
+ python.MissingPythonEclass,
+ )
+ self.assertNoReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python:2.7"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(BDEPEND='dev-lang/python:*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(BDEPEND="dev-lang/python:*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(BDEPEND='=dev-lang/python-2*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(BDEPEND="=dev-lang/python-2*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
self.assertReport(
- self.check,
- self.mk_pkg(BDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
- python.MissingPythonEclass)
+ self.check, self.mk_pkg(BDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
+ ),
+ python.MissingPythonEclass,
+ )
def test_missing_eclass_rdepend(self):
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-r1'], RDEPEND='dev-lang/python:3.7'))
+ self.check, self.mk_pkg(_eclasses_=["python-r1"], RDEPEND="dev-lang/python:3.7")
+ )
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-single-r1'], RDEPEND='dev-lang/python:3.7'))
- self.assertNoReport(self.check, self.mk_pkg(RDEPEND='dev-foo/frobnicate'))
+ self.check, self.mk_pkg(_eclasses_=["python-single-r1"], RDEPEND="dev-lang/python:3.7")
+ )
+ self.assertNoReport(self.check, self.mk_pkg(RDEPEND="dev-foo/frobnicate"))
- r = self.assertReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python'))
+ r = self.assertReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python"))
assert isinstance(r, python.MissingPythonEclass)
- assert 'missing python-r1 or python-single-r1 eclass' in str(r)
+ assert "missing python-r1 or python-single-r1 eclass" in str(r)
- self.assertNoReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python:2.7'))
+ self.assertNoReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python:2.7"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(RDEPEND='dev-lang/python:=')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(RDEPEND="dev-lang/python:=")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(RDEPEND='=dev-lang/python-2*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(RDEPEND="=dev-lang/python-2*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
self.assertReport(
- self.check,
- self.mk_pkg(RDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
- python.MissingPythonEclass)
+ self.check, self.mk_pkg(RDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
+ ),
+ python.MissingPythonEclass,
+ )
def test_missing_eclass_pdepend(self):
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-r1'], PDEPEND='dev-lang/python:3.7'))
+ self.check, self.mk_pkg(_eclasses_=["python-r1"], PDEPEND="dev-lang/python:3.7")
+ )
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-single-r1'], PDEPEND='dev-lang/python:3.7'))
- self.assertNoReport(self.check, self.mk_pkg(PDEPEND='dev-foo/frobnicate'))
+ self.check, self.mk_pkg(_eclasses_=["python-single-r1"], PDEPEND="dev-lang/python:3.7")
+ )
+ self.assertNoReport(self.check, self.mk_pkg(PDEPEND="dev-foo/frobnicate"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python')),
- python.MissingPythonEclass)
- self.assertNoReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python:2.7'))
+ self.assertReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python")),
+ python.MissingPythonEclass,
+ )
+ self.assertNoReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python:2.7"))
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(PDEPEND='dev-lang/python:=')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(PDEPEND="dev-lang/python:=")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(PDEPEND='=dev-lang/python-2*')),
- python.MissingPythonEclass)
+ self.assertReport(self.check, self.mk_pkg(PDEPEND="=dev-lang/python-2*")),
+ python.MissingPythonEclass,
+ )
assert isinstance(
self.assertReport(
- self.check,
- self.mk_pkg(PDEPEND='|| ( dev-lang/python:2.7 dev-lang/python:3.6 )')),
- python.MissingPythonEclass)
+ self.check, self.mk_pkg(PDEPEND="|| ( dev-lang/python:2.7 dev-lang/python:3.6 )")
+ ),
+ python.MissingPythonEclass,
+ )
def test_valid_packages(self):
self.assertNoReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )'))
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ )
# python-single-r1 with one implementation does not use PST
self.assertNoReport(
self.check,
- self.mk_pkg(_eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 )',
- REQUIRED_USE='python_targets_python3_5'))
+ self.mk_pkg(
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5",
+ RDEPEND="python_targets_python3_5? ( " " dev-lang/python:3.5 )",
+ REQUIRED_USE="python_targets_python3_5",
+ ),
+ )
self.assertNoReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )'))
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ )
self.assertNoReport(
self.check,
- self.mk_pkg(_eclasses_=['python-any-r1'],
- DEPEND='|| ( '
- ' dev-lang/python:3.5 '
- ' dev-lang/python:3.6 )'))
+ self.mk_pkg(
+ _eclasses_=["python-any-r1"],
+ DEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
+ ),
+ )
self.assertNoReport(
- self.check,
- self.mk_pkg(_eclasses_=['python-any-r1'], DEPEND='dev-lang/python:3.5'))
+ self.check, self.mk_pkg(_eclasses_=["python-any-r1"], DEPEND="dev-lang/python:3.5")
+ )
self.assertNoReport(
self.check,
- self.mk_pkg(_eclasses_=['python-any-r1'],
- BDEPEND='|| ( '
- ' dev-lang/python:3.5 '
- ' dev-lang/python:3.6 )'))
+ self.mk_pkg(
+ _eclasses_=["python-any-r1"],
+ BDEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
+ ),
+ )
def test_missing_required_use(self):
r = self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )'))
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ ),
+ )
assert isinstance(r, python.PythonMissingRequiredUse)
assert 'missing REQUIRED_USE="${PYTHON_REQUIRED_USE}"' in str(r)
@@ -191,93 +212,105 @@ class TestPythonCheck(misc.ReportTestCase):
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='|| ( python_targets_python3_5 )')),
- python.PythonMissingRequiredUse)
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 )",
+ ),
+ ),
+ python.PythonMissingRequiredUse,
+ )
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_targets_python3_7',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 ) '
- 'python_targets_python3_7? ( '
- ' dev-lang/python:3.7 )',
- REQUIRED_USE='|| ( python_targets_python3_6 '
- ' python_targets_python3_7 )')),
- python.PythonMissingRequiredUse)
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_targets_python3_7",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 ) "
+ "python_targets_python3_7? ( "
+ " dev-lang/python:3.7 )",
+ REQUIRED_USE="|| ( python_targets_python3_6 " " python_targets_python3_7 )",
+ ),
+ ),
+ python.PythonMissingRequiredUse,
+ )
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )')),
- python.PythonMissingRequiredUse)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ ),
+ ),
+ python.PythonMissingRequiredUse,
+ )
# incomplete REQUIRED_USE
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 )')),
- python.PythonMissingRequiredUse)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 )",
+ ),
+ ),
+ python.PythonMissingRequiredUse,
+ )
# || instead of ^^ in python-single-r1
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )')),
- python.PythonMissingRequiredUse)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingRequiredUse,
+ )
def test_missing_deps(self):
r = self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )'))
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ )
assert isinstance(r, python.PythonMissingDeps)
assert 'missing RDEPEND="${PYTHON_DEPS}"' in str(r)
@@ -285,13 +318,12 @@ class TestPythonCheck(misc.ReportTestCase):
r = self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 )',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )'))
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ RDEPEND="python_targets_python3_5? ( " " dev-lang/python:3.5 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ )
assert isinstance(r, python.PythonMissingDeps)
assert 'missing RDEPEND="${PYTHON_DEPS}"' in str(r)
@@ -301,69 +333,76 @@ class TestPythonCheck(misc.ReportTestCase):
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-foo/bar ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-foo/bar ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
# DEPEND only, RDEPEND missing
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6',
- DEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='|| ( python_targets_python3_5 '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-r1"],
+ IUSE="python_targets_python3_5 " "python_targets_python3_6",
+ DEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="|| ( python_targets_python3_5 " " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
# incomplete deps
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( " " dev-lang/python:3.5 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
# check that irrelevant dep with same USE conditional does not wrongly
# satisfy the check
@@ -371,44 +410,50 @@ class TestPythonCheck(misc.ReportTestCase):
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_single_target_python3_5? ( '
- ' dev-foo/bar ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_single_target_python3_5? ( "
+ " dev-foo/bar ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
# DEPEND only, RDEPEND missing
assert isinstance(
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- DEPEND='python_single_target_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_single_target_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ DEPEND="python_single_target_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_single_target_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
# check that the check isn't wrongly satisfied by PYTHON_TARGETS
# in python-single-r1 (PYTHON_SINGLE_TARGET expected)
@@ -416,38 +461,40 @@ class TestPythonCheck(misc.ReportTestCase):
self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-single-r1'],
- IUSE='python_targets_python3_5 '
- 'python_targets_python3_6 '
- 'python_single_target_python3_5 '
- 'python_single_target_python3_6',
- RDEPEND='python_targets_python3_5? ( '
- ' dev-lang/python:3.5 ) '
- 'python_targets_python3_6? ( '
- ' dev-lang/python:3.6 )',
- REQUIRED_USE='^^ ( python_single_target_python3_5 '
- ' python_single_target_python3_6 ) '
- 'python_single_target_python3_5? ( '
- ' python_targets_python3_5 ) '
- 'python_single_target_python3_6? ( '
- ' python_targets_python3_6 )')),
- python.PythonMissingDeps)
+ _eclasses_=["python-single-r1"],
+ IUSE="python_targets_python3_5 "
+ "python_targets_python3_6 "
+ "python_single_target_python3_5 "
+ "python_single_target_python3_6",
+ RDEPEND="python_targets_python3_5? ( "
+ " dev-lang/python:3.5 ) "
+ "python_targets_python3_6? ( "
+ " dev-lang/python:3.6 )",
+ REQUIRED_USE="^^ ( python_single_target_python3_5 "
+ " python_single_target_python3_6 ) "
+ "python_single_target_python3_5? ( "
+ " python_targets_python3_5 ) "
+ "python_single_target_python3_6? ( "
+ " python_targets_python3_6 )",
+ ),
+ ),
+ python.PythonMissingDeps,
+ )
assert isinstance(
- self.assertReport(self.check, self.mk_pkg(_eclasses_=['python-any-r1'])),
- python.PythonMissingDeps)
+ self.assertReport(self.check, self.mk_pkg(_eclasses_=["python-any-r1"])),
+ python.PythonMissingDeps,
+ )
def test_runtime_dep_in_any_r1(self):
r = self.assertReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-any-r1'],
- DEPEND='|| ( '
- ' dev-lang/python:3.5 '
- ' dev-lang/python:3.6 )',
- RDEPEND='|| ( '
- ' dev-lang/python:3.5 '
- ' dev-lang/python:3.6 )'))
+ _eclasses_=["python-any-r1"],
+ DEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
+ RDEPEND="|| ( " " dev-lang/python:3.5 " " dev-lang/python:3.6 )",
+ ),
+ )
assert isinstance(r, python.PythonRuntimeDepInAnyR1)
assert 'inherits python-any-r1 with RDEPEND="dev-lang/python:3.5"' in str(r)
@@ -455,6 +502,8 @@ class TestPythonCheck(misc.ReportTestCase):
self.assertNoReport(
self.check,
self.mk_pkg(
- _eclasses_=['python-any-r1'],
- DEPEND='dev-lang/python:3.5',
- RDEPEND='!dev-python/pypy3-bin:0'))
+ _eclasses_=["python-any-r1"],
+ DEPEND="dev-lang/python:3.5",
+ RDEPEND="!dev-python/pypy3-bin:0",
+ ),
+ )
diff --git a/tests/checks/test_repo.py b/tests/checks/test_repo.py
index cd685a52..4220e055 100644
--- a/tests/checks/test_repo.py
+++ b/tests/checks/test_repo.py
@@ -17,15 +17,14 @@ class TestRepoDirCheck(misc.Tmpdir, misc.ReportTestCase):
check_kls = repo.RepoDirCheck
def mk_check(self):
- self.repo = FakeRepo(repo_id='repo', location=self.dir)
- options = arghparse.Namespace(
- target_repo=self.repo, cache={'git': False}, gentoo_repo=True)
+ self.repo = FakeRepo(repo_id="repo", location=self.dir)
+ options = arghparse.Namespace(target_repo=self.repo, cache={"git": False}, gentoo_repo=True)
git_addon = addons.git.GitAddon(options)
return repo.RepoDirCheck(options, git_addon=git_addon)
def mk_pkg(self, cpvstr):
pkg = atom.atom(cpvstr)
- filesdir = pjoin(self.repo.location, pkg.category, pkg.package, 'files')
+ filesdir = pjoin(self.repo.location, pkg.category, pkg.package, "files")
os.makedirs(filesdir, exist_ok=True)
return filesdir
@@ -34,100 +33,100 @@ class TestRepoDirCheck(misc.Tmpdir, misc.ReportTestCase):
def test_empty_file(self):
check = self.mk_check()
- bin_path = pjoin(self.repo.location, 'foo')
+ bin_path = pjoin(self.repo.location, "foo")
touch(bin_path)
self.assertNoReport(check, [])
def test_regular_file(self):
check = self.mk_check()
- with open(pjoin(self.repo.location, 'foo'), 'w') as f:
- f.write('bar')
+ with open(pjoin(self.repo.location, "foo"), "w") as f:
+ f.write("bar")
self.assertNoReport(check, [])
def test_unreadable_file(self):
check = self.mk_check()
- with open(pjoin(self.repo.location, 'foo'), 'w') as f:
- f.write('bar')
- with mock.patch('pkgcheck.open') as mocked_open:
- mocked_open.side_effect = IOError('fake exception')
+ with open(pjoin(self.repo.location, "foo"), "w") as f:
+ f.write("bar")
+ with mock.patch("pkgcheck.open") as mocked_open:
+ mocked_open.side_effect = IOError("fake exception")
self.assertNoReport(check, [])
def test_ignored_root_dirs(self):
for d in self.check_kls.ignored_root_dirs:
check = self.mk_check()
- bin_path = pjoin(self.repo.location, d, 'foo')
+ bin_path = pjoin(self.repo.location, d, "foo")
os.makedirs(os.path.dirname(bin_path))
- with open(bin_path, 'wb') as f:
- f.write(b'\xd3\xad\xbe\xef')
+ with open(bin_path, "wb") as f:
+ f.write(b"\xd3\xad\xbe\xef")
self.assertNoReport(check, [])
def test_null_bytes(self):
check = self.mk_check()
- with open(pjoin(self.repo.location, 'foo'), 'wb') as f:
- f.write(b'foo\x00\xffbar')
+ with open(pjoin(self.repo.location, "foo"), "wb") as f:
+ f.write(b"foo\x00\xffbar")
r = self.assertReport(check, [])
assert isinstance(r, repo.BinaryFile)
- assert r.path == 'foo'
+ assert r.path == "foo"
assert "'foo'" in str(r)
def test_root_dir_binary(self):
check = self.mk_check()
- bin_path = pjoin(self.repo.location, 'foo')
- with open(bin_path, 'wb') as f:
- f.write(b'\xd3\xad\xbe\xef')
+ bin_path = pjoin(self.repo.location, "foo")
+ with open(bin_path, "wb") as f:
+ f.write(b"\xd3\xad\xbe\xef")
r = self.assertReport(check, [])
assert isinstance(r, repo.BinaryFile)
- assert r.path == 'foo'
+ assert r.path == "foo"
assert "'foo'" in str(r)
def test_ebuild_filesdir_binary(self):
check = self.mk_check()
- filesdir = self.mk_pkg('dev-util/foo')
- with open(pjoin(filesdir, 'foo'), 'wb') as f:
- f.write(b'\xd3\xad\xbe\xef')
+ filesdir = self.mk_pkg("dev-util/foo")
+ with open(pjoin(filesdir, "foo"), "wb") as f:
+ f.write(b"\xd3\xad\xbe\xef")
r = self.assertReport(check, [])
assert isinstance(r, repo.BinaryFile)
- assert r.path == 'dev-util/foo/files/foo'
+ assert r.path == "dev-util/foo/files/foo"
assert "'dev-util/foo/files/foo'" in str(r)
def test_gitignore(self):
# distfiles located in deprecated in-tree location are reported by default
check = self.mk_check()
- distfiles = pjoin(self.repo.location, 'distfiles')
+ distfiles = pjoin(self.repo.location, "distfiles")
os.mkdir(distfiles)
- with open(pjoin(distfiles, 'foo-0.tar.gz'), 'wb') as f:
- f.write(b'\xd3\xad\xbe\xef')
+ with open(pjoin(distfiles, "foo-0.tar.gz"), "wb") as f:
+ f.write(b"\xd3\xad\xbe\xef")
r = self.assertReport(check, [])
assert isinstance(r, repo.BinaryFile)
assert "distfiles/foo-0.tar.gz" in str(r)
# but results are suppressed if a matching git ignore entry exists
- for ignore_file in ('.gitignore', '.git/info/exclude'):
+ for ignore_file in (".gitignore", ".git/info/exclude"):
path = pjoin(self.repo.location, ignore_file)
ensure_dirs(os.path.dirname(path))
- with open(path, 'w') as f:
- f.write('/distfiles/')
+ with open(path, "w") as f:
+ f.write("/distfiles/")
self.assertNoReport(self.mk_check(), [])
os.unlink(path)
def test_non_utf8_encodings(self):
# non-english languages courtesy of google translate mangling
langs = (
- ("example text that shouldn't trigger", 'ascii'),
- ('نص المثال الذي لا ينبغي أن يؤدي', 'cp1256'), # arabic
- ('пример текста, который не должен срабатывать', 'koi8_r'), # russian
- ('उदाहरण पाठ जो ट्रिगर नहीं होना चाहिए', 'utf-16'), # hindi
- ('مثال کے متن جو ٹرگر نہ ہوں۔', 'utf-16'), # urdu
- ('ဖြစ်ပေါ်မပေးသင့်ကြောင်းဥပမာစာသား', 'utf-32'), # burmese
- ('उदाहरण पाठ जुन ट्रिगर हुँदैन', 'utf-32'), # nepali
- ('トリガーするべきではないテキストの例', 'shift_jis'), # japanese
- ('트리거해서는 안되는 예제 텍스트', 'cp949'), # korean
- ('不应触发的示例文本', 'gb2312'), # simplified chinese
- ('不應觸發的示例文本', 'gb18030'), # traditional chinese
+ ("example text that shouldn't trigger", "ascii"),
+ ("نص المثال الذي لا ينبغي أن يؤدي", "cp1256"), # arabic
+ ("пример текста, который не должен срабатывать", "koi8_r"), # russian
+ ("उदाहरण पाठ जो ट्रिगर नहीं होना चाहिए", "utf-16"), # hindi
+ ("مثال کے متن جو ٹرگر نہ ہوں۔", "utf-16"), # urdu
+ ("ဖြစ်ပေါ်မပေးသင့်ကြောင်းဥပမာစာသား", "utf-32"), # burmese
+ ("उदाहरण पाठ जुन ट्रिगर हुँदैन", "utf-32"), # nepali
+ ("トリガーするべきではないテキストの例", "shift_jis"), # japanese
+ ("트리거해서는 안되는 예제 텍스트", "cp949"), # korean
+ ("不应触发的示例文本", "gb2312"), # simplified chinese
+ ("不應觸發的示例文本", "gb18030"), # traditional chinese
)
for text, encoding in langs:
check = self.mk_check()
- with open(pjoin(self.repo.location, 'foo'), 'wb') as f:
+ with open(pjoin(self.repo.location, "foo"), "wb") as f:
data = text.encode(encoding)
f.write(data)
self.assertNoReport(check, [])
diff --git a/tests/checks/test_repo_metadata.py b/tests/checks/test_repo_metadata.py
index 2221e283..ff550d7d 100644
--- a/tests/checks/test_repo_metadata.py
+++ b/tests/checks/test_repo_metadata.py
@@ -16,24 +16,25 @@ class TestPackageUpdatesCheck(misc.Tmpdir, misc.ReportTestCase):
def mk_check(self, pkgs=(), **kwargs):
# TODO: switch to using a repo fixture when available
repo_dir = pjoin(self.dir, misc.random_str())
- os.makedirs(pjoin(repo_dir, 'metadata'))
- with open(pjoin(repo_dir, 'metadata', 'layout.conf'), 'w') as f:
- f.write('masters =\n')
+ os.makedirs(pjoin(repo_dir, "metadata"))
+ with open(pjoin(repo_dir, "metadata", "layout.conf"), "w") as f:
+ f.write("masters =\n")
- os.makedirs(pjoin(repo_dir, 'profiles', 'updates'))
- with open(pjoin(repo_dir, 'profiles', 'repo_name'), 'w') as f:
- f.write('fake\n')
+ os.makedirs(pjoin(repo_dir, "profiles", "updates"))
+ with open(pjoin(repo_dir, "profiles", "repo_name"), "w") as f:
+ f.write("fake\n")
for filename, updates in kwargs.items():
- with open(pjoin(repo_dir, 'profiles', 'updates', filename), 'w') as f:
- f.write('\n'.join(updates))
+ with open(pjoin(repo_dir, "profiles", "updates", filename), "w") as f:
+ f.write("\n".join(updates))
for pkg in pkgs:
pkg = FakePkg(pkg)
pkg_path = pjoin(
- repo_dir, pkg.category, pkg.package, f'{pkg.package}-{pkg.fullver}.ebuild')
+ repo_dir, pkg.category, pkg.package, f"{pkg.package}-{pkg.fullver}.ebuild"
+ )
os.makedirs(os.path.dirname(pkg_path), exist_ok=True)
- with open(pkg_path, 'w') as f:
- f.write('SLOT=0\n')
+ with open(pkg_path, "w") as f:
+ f.write("SLOT=0\n")
repo = UnconfiguredTree(repo_dir)
options = arghparse.Namespace(target_repo=repo, search_repo=repo)
@@ -44,87 +45,91 @@ class TestPackageUpdatesCheck(misc.Tmpdir, misc.ReportTestCase):
self.assertNoReport(self.mk_check(), [])
# empty file
- updates = {'1Q-2020': []}
+ updates = {"1Q-2020": []}
self.assertNoReport(self.mk_check(**updates), [])
def test_bad_update_filenames(self):
# only files named using the format [1-4]Q-[YYYY] are allowed
- updates = {'foobar': ['blah']}
+ updates = {"foobar": ["blah"]}
r = self.assertReport(self.mk_check(**updates), [])
assert isinstance(r, repo_metadata.BadPackageUpdate)
assert "incorrectly named update file: 'foobar'" in str(r)
- updates = {'5Q-2020': ['blah']}
+ updates = {"5Q-2020": ["blah"]}
r = self.assertReport(self.mk_check(**updates), [])
assert isinstance(r, repo_metadata.BadPackageUpdate)
assert "incorrectly named update file: '5Q-2020'" in str(r)
# hidden files will be flagged
- updates = {'.1Q-2020.swp': ['blah']}
+ updates = {".1Q-2020.swp": ["blah"]}
r = self.assertReport(self.mk_check(**updates), [])
assert isinstance(r, repo_metadata.BadPackageUpdate)
assert "incorrectly named update file: '.1Q-2020.swp'" in str(r)
def test_empty_line(self):
- updates = {'1Q-2020': [' ']}
+ updates = {"1Q-2020": [" "]}
r = self.assertReport(self.mk_check(**updates), [])
assert isinstance(r, repo_metadata.BadPackageUpdate)
assert "file '1Q-2020': empty line 1" in str(r)
def test_extra_whitespace(self):
- pkgs = ('dev-util/foo-0', 'dev-util/bar-1')
- for update in (' move dev-util/foo dev-util/bar', # prefix
- 'move dev-util/foo dev-util/bar '): # suffix
- updates = {'1Q-2020': [update]}
+ pkgs = ("dev-util/foo-0", "dev-util/bar-1")
+ for update in (
+ " move dev-util/foo dev-util/bar", # prefix
+ "move dev-util/foo dev-util/bar ",
+ ): # suffix
+ updates = {"1Q-2020": [update]}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.BadPackageUpdate)
- assert 'extra whitespace' in str(r)
- assert 'on line 1' in str(r)
+ assert "extra whitespace" in str(r)
+ assert "on line 1" in str(r)
def test_old_pkg_update(self):
- pkgs = ('dev-util/blah-0', 'dev-libs/foon-1')
- for update in ('move dev-util/foo dev-util/bar', # old pkg move
- 'slotmove dev-util/bar 0 1'): # old slot move
- updates = {'1Q-2020': [update]}
+ pkgs = ("dev-util/blah-0", "dev-libs/foon-1")
+ for update in (
+ "move dev-util/foo dev-util/bar", # old pkg move
+ "slotmove dev-util/bar 0 1",
+ ): # old slot move
+ updates = {"1Q-2020": [update]}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.OldPackageUpdate)
- assert r.pkg == 'dev-util/bar'
+ assert r.pkg == "dev-util/bar"
assert "'dev-util/bar' unavailable" in str(r)
def test_old_multimove_pkg_update(self):
- update = ['move dev-util/foo dev-util/bar', 'move dev-util/bar dev-util/blah']
- pkgs = ('dev-util/blaz-0', 'dev-libs/foon-1')
- updates = {'1Q-2020': update}
+ update = ["move dev-util/foo dev-util/bar", "move dev-util/bar dev-util/blah"]
+ pkgs = ("dev-util/blaz-0", "dev-libs/foon-1")
+ updates = {"1Q-2020": update}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.OldMultiMovePackageUpdate)
- assert r.pkg == 'dev-util/blah'
- assert r.moves == ('dev-util/foo', 'dev-util/bar', 'dev-util/blah')
+ assert r.pkg == "dev-util/blah"
+ assert r.moves == ("dev-util/foo", "dev-util/bar", "dev-util/blah")
assert "'dev-util/blah' unavailable" in str(r)
def test_multimove_pkg_update(self):
- update = ['move dev-util/foo dev-util/bar', 'move dev-util/bar dev-util/blah']
- pkgs = ('dev-util/blah-0', 'dev-libs/foon-1')
- updates = {'1Q-2020': update}
+ update = ["move dev-util/foo dev-util/bar", "move dev-util/bar dev-util/blah"]
+ pkgs = ("dev-util/blah-0", "dev-libs/foon-1")
+ updates = {"1Q-2020": update}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.MultiMovePackageUpdate)
- assert r.pkg == 'dev-util/foo'
- assert r.moves == ('dev-util/foo', 'dev-util/bar', 'dev-util/blah')
+ assert r.pkg == "dev-util/foo"
+ assert r.moves == ("dev-util/foo", "dev-util/bar", "dev-util/blah")
assert "'dev-util/foo': multi-move update" in str(r)
def test_move_to_self_pkg_update(self):
- update = ['move dev-util/foo dev-util/foo']
- pkgs = ('dev-util/foo-0',)
- updates = {'1Q-2020': update}
+ update = ["move dev-util/foo dev-util/foo"]
+ pkgs = ("dev-util/foo-0",)
+ updates = {"1Q-2020": update}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.RedundantPackageUpdate)
- assert r.updates == ('move', 'dev-util/foo', 'dev-util/foo')
+ assert r.updates == ("move", "dev-util/foo", "dev-util/foo")
assert "update line moves to the same package/slot" in str(r)
def test_slot_move_to_self_pkg_update(self):
- update = ['slotmove dev-util/foo 0 0']
- pkgs = ('dev-util/foo-0',)
- updates = {'1Q-2020': update}
+ update = ["slotmove dev-util/foo 0 0"]
+ pkgs = ("dev-util/foo-0",)
+ updates = {"1Q-2020": update}
r = self.assertReport(self.mk_check(pkgs=pkgs, **updates), [])
assert isinstance(r, repo_metadata.RedundantPackageUpdate)
- assert r.updates == ('slotmove', 'dev-util/foo', '0', '0')
+ assert r.updates == ("slotmove", "dev-util/foo", "0", "0")
assert "update line moves to the same package/slot" in str(r)
diff --git a/tests/checks/test_stablereq.py b/tests/checks/test_stablereq.py
index b51bf9bc..2e181e57 100644
--- a/tests/checks/test_stablereq.py
+++ b/tests/checks/test_stablereq.py
@@ -21,17 +21,17 @@ class TestStableRequestCheck(ReportTestCase):
# initialize parent repo
self.parent_git_repo = make_git_repo()
- self.parent_repo = make_repo(self.parent_git_repo.path, repo_id='gentoo')
- self.parent_git_repo.add_all('initial commit')
+ self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo")
+ self.parent_git_repo.add_all("initial commit")
# create a stub pkg and commit it
- self.parent_repo.create_ebuild('cat/pkg-0')
- self.parent_git_repo.add_all('cat/pkg-0')
+ self.parent_repo.create_ebuild("cat/pkg-0")
+ self.parent_git_repo.add_all("cat/pkg-0")
# initialize child repo
self.child_git_repo = make_git_repo()
- self.child_git_repo.run(['git', 'remote', 'add', 'origin', self.parent_git_repo.path])
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_git_repo.run(['git', 'remote', 'set-head', 'origin', 'main'])
+ self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def init_check(self, options=None, future=0, stable_time=None):
@@ -44,50 +44,57 @@ class TestStableRequestCheck(ReportTestCase):
def _options(self, stable_time=None, **kwargs):
args = [
- 'scan', '-q', '--cache-dir', self.cache_dir,
- '--repo', self.child_repo.location,
+ "scan",
+ "-q",
+ "--cache-dir",
+ self.cache_dir,
+ "--repo",
+ self.child_repo.location,
]
if stable_time is not None:
- args.extend(['--stabletime', str(stable_time)])
+ args.extend(["--stabletime", str(stable_time)])
options, _ = self._tool.parse_args(args)
return options
def test_no_git_support(self):
options = self._options()
- options.cache['git'] = False
- with pytest.raises(SkipCheck, match='git cache support required'):
+ options.cache["git"] = False
+ with pytest.raises(SkipCheck, match="git cache support required"):
self.init_check(options)
def test_no_stable_keywords(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check()
self.assertNoReport(self.check, self.source)
def test_uncommitted_local_ebuild(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
- self.child_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
+ self.child_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
self.init_check(future=30)
self.assertNoReport(self.check, self.source)
- @pytest.mark.parametrize(("stable_time", "less_days", "more_days"), (
- pytest.param(None, (0, 1, 10, 20, 29), (30, 31), id="stable_time=unset"),
- pytest.param(1, (0,), (1, 10), id="stable_time=1"),
- pytest.param(14, (0, 1, 10, 13), (14, 15, 30), id="stable_time=14"),
- pytest.param(30, (0, 1, 10, 20, 29), (30, 31), id="stable_time=30"),
- pytest.param(100, (98, 99), (100, 101), id="stable_time=100"),
- ))
+ @pytest.mark.parametrize(
+ ("stable_time", "less_days", "more_days"),
+ (
+ pytest.param(None, (0, 1, 10, 20, 29), (30, 31), id="stable_time=unset"),
+ pytest.param(1, (0,), (1, 10), id="stable_time=1"),
+ pytest.param(14, (0, 1, 10, 13), (14, 15, 30), id="stable_time=14"),
+ pytest.param(30, (0, 1, 10, 20, 29), (30, 31), id="stable_time=30"),
+ pytest.param(100, (98, 99), (100, 101), id="stable_time=100"),
+ ),
+ )
def test_existing_stable_keywords(self, stable_time, less_days, more_days):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
# packages are not old enough to trigger any results
for future in less_days:
@@ -98,74 +105,74 @@ class TestStableRequestCheck(ReportTestCase):
for future in more_days:
self.init_check(future=future, stable_time=stable_time)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('0', ['~amd64'], future, pkg=VersionedCPV('cat/pkg-2'))
+ expected = StableRequest("0", ["~amd64"], future, pkg=VersionedCPV("cat/pkg-2"))
assert r == expected
def test_multislot_with_unstable_slot(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'], slot='1')
- self.parent_git_repo.add_all('cat/pkg-2')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"], slot="1")
+ self.parent_git_repo.add_all("cat/pkg-2")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check(future=30)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('1', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
+ expected = StableRequest("1", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
assert r == expected
def test_moved_category(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2')
- self.parent_git_repo.move('cat', 'newcat')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2")
+ self.parent_git_repo.move("cat", "newcat")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check(future=30)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('newcat/pkg-2'))
+ expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("newcat/pkg-2"))
assert r == expected
def test_moved_package(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2')
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2")
# rename pkg and commit results
path = self.parent_git_repo.path
- new_pkg_dir = pjoin(path, 'cat/newpkg')
- os.rename(pjoin(path, 'cat/pkg'), new_pkg_dir)
+ new_pkg_dir = pjoin(path, "cat/newpkg")
+ os.rename(pjoin(path, "cat/pkg"), new_pkg_dir)
for i, f in enumerate(sorted(os.listdir(new_pkg_dir))):
- os.rename(pjoin(new_pkg_dir, f), pjoin(new_pkg_dir, f'newpkg-{i}.ebuild'))
+ os.rename(pjoin(new_pkg_dir, f), pjoin(new_pkg_dir, f"newpkg-{i}.ebuild"))
self.parent_git_repo.add_all()
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check(future=30)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/newpkg-2'))
+ expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/newpkg-2"))
assert r == expected
def test_renamed_ebuild(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2_rc1', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2_rc1')
- self.parent_git_repo.move('cat/pkg/pkg-2_rc1.ebuild', 'cat/pkg/pkg-2.ebuild')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2_rc1", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2_rc1")
+ self.parent_git_repo.move("cat/pkg/pkg-2_rc1.ebuild", "cat/pkg/pkg-2.ebuild")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check(future=30)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
+ expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
assert r == expected
def test_modified_ebuild(self):
- self.parent_repo.create_ebuild('cat/pkg-1', keywords=['amd64'])
- self.parent_git_repo.add_all('cat/pkg-1')
- self.parent_repo.create_ebuild('cat/pkg-2', keywords=['~amd64'])
- self.parent_git_repo.add_all('cat/pkg-2')
- with open(pjoin(self.parent_git_repo.path, 'cat/pkg/pkg-2.ebuild'), 'a') as f:
- f.write('# comment\n')
- self.parent_git_repo.add_all('cat/pkg-2: add comment')
- self.child_git_repo.run(['git', 'pull', 'origin', 'main'])
+ self.parent_repo.create_ebuild("cat/pkg-1", keywords=["amd64"])
+ self.parent_git_repo.add_all("cat/pkg-1")
+ self.parent_repo.create_ebuild("cat/pkg-2", keywords=["~amd64"])
+ self.parent_git_repo.add_all("cat/pkg-2")
+ with open(pjoin(self.parent_git_repo.path, "cat/pkg/pkg-2.ebuild"), "a") as f:
+ f.write("# comment\n")
+ self.parent_git_repo.add_all("cat/pkg-2: add comment")
+ self.child_git_repo.run(["git", "pull", "origin", "main"])
self.init_check(future=30)
r = self.assertReport(self.check, self.source)
- expected = StableRequest('0', ['~amd64'], 30, pkg=VersionedCPV('cat/pkg-2'))
+ expected = StableRequest("0", ["~amd64"], 30, pkg=VersionedCPV("cat/pkg-2"))
assert r == expected
diff --git a/tests/checks/test_whitespace.py b/tests/checks/test_whitespace.py
index bc61998c..f90495b6 100644
--- a/tests/checks/test_whitespace.py
+++ b/tests/checks/test_whitespace.py
@@ -15,7 +15,6 @@ class WhitespaceCheckTest(misc.ReportTestCase):
class TestWhitespaceFound(WhitespaceCheckTest):
-
def test_leading(self):
fake_src = [
"# This is our first fake ebuild\n",
@@ -27,7 +26,7 @@ class TestWhitespaceFound(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.WhitespaceFound)
assert r.lines == (2,)
- assert 'leading whitespace' in str(r)
+ assert "leading whitespace" in str(r)
def test_trailing(self):
fake_src = [
@@ -40,11 +39,10 @@ class TestWhitespaceFound(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.WhitespaceFound)
assert r.lines == (2,)
- assert 'trailing whitespace' in str(r)
+ assert "trailing whitespace" in str(r)
class TestWrongIndentFound(WhitespaceCheckTest):
-
def test_it(self):
fake_src = [
"# This is our first fake ebuild\n",
@@ -56,11 +54,10 @@ class TestWrongIndentFound(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.WrongIndentFound)
assert r.lines == (2,)
- assert 'whitespace in indentation' in str(r)
+ assert "whitespace in indentation" in str(r)
class TestDoubleEmptyLine(WhitespaceCheckTest):
-
def test_it(self):
fake_src = [
"# This is our first fake ebuild\n",
@@ -73,11 +70,10 @@ class TestDoubleEmptyLine(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.DoubleEmptyLine)
assert r.lines == (3,)
- assert 'unneeded empty line' in str(r)
+ assert "unneeded empty line" in str(r)
class TestNoNewLineOnEnd(WhitespaceCheckTest):
-
def test_it(self):
fake_src = [
"# This is our first fake ebuild\n",
@@ -87,11 +83,10 @@ class TestNoNewLineOnEnd(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.NoFinalNewline)
- assert 'lacks an ending newline' in str(r)
+ assert "lacks an ending newline" in str(r)
class TestTrailingNewLineOnEnd(WhitespaceCheckTest):
-
def test_it(self):
fake_src = [
"# This is our first fake ebuild\n",
@@ -102,43 +97,43 @@ class TestTrailingNewLineOnEnd(WhitespaceCheckTest):
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.TrailingEmptyLine)
- assert 'trailing blank line(s)' in str(r)
+ assert "trailing blank line(s)" in str(r)
def generate_whitespace_data():
"""Generate bad whitespace list for the current python version."""
all_whitespace_chars = set(
- re.findall(r'\s', ''.join(chr(c) for c in range(sys.maxunicode + 1))))
- allowed_whitespace_chars = {'\t', '\n', ' '}
+ re.findall(r"\s", "".join(chr(c) for c in range(sys.maxunicode + 1)))
+ )
+ allowed_whitespace_chars = {"\t", "\n", " "}
bad_whitespace_chars = tuple(sorted(all_whitespace_chars - allowed_whitespace_chars))
return whitespace.WhitespaceData(unicodedata.unidata_version, bad_whitespace_chars)
class TestBadWhitespaceCharacter(WhitespaceCheckTest):
-
def test_outdated_bad_whitespace_chars(self):
"""Check if the hardcoded bad whitespace character list is outdated."""
updated_whitespace_data = generate_whitespace_data()
if updated_whitespace_data.unicode_version != whitespace.whitespace_data.unicode_version:
- assert updated_whitespace_data.chars == whitespace.whitespace_data.chars, \
- f'outdated character list for Unicode version {unicodedata.unidata_version}'
+ assert (
+ updated_whitespace_data.chars == whitespace.whitespace_data.chars
+ ), f"outdated character list for Unicode version {unicodedata.unidata_version}"
def test_bad_whitespace_chars(self):
for char in whitespace.whitespace_data.chars:
fake_src = [
- 'src_prepare() {\n',
+ "src_prepare() {\n",
f'\tcd "${{S}}"/cpp ||{char}die\n',
- '}\n',
+ "}\n",
]
fake_pkg = misc.FakePkg("dev-util/diffball-0.5", lines=fake_src)
r = self.assertReport(self.check, fake_pkg)
assert isinstance(r, whitespace.BadWhitespaceCharacter)
- assert f'bad whitespace character {repr(char)} on line 2' in str(r)
+ assert f"bad whitespace character {repr(char)} on line 2" in str(r)
class TestMultipleChecks(WhitespaceCheckTest):
-
def test_it(self):
fake_src = [
"# This is our first fake ebuild\n",
diff --git a/tests/conftest.py b/tests/conftest.py
index 675110a6..a836e3ba 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -17,7 +17,7 @@ from snakeoil.contexts import os_environ
from snakeoil.formatters import PlainTextFormatter
from snakeoil.osutils import pjoin
-pytest_plugins = ['pkgcore']
+pytest_plugins = ["pkgcore"]
REPO_ROOT = Path(__file__).parent.parent
@@ -43,7 +43,7 @@ def default_session_fixture(request):
"""Fixture run globally for the entire test session."""
stack = ExitStack()
# don't load the default system or user config files
- stack.enter_context(patch('pkgcheck.cli.ConfigFileParser.default_configs', ()))
+ stack.enter_context(patch("pkgcheck.cli.ConfigFileParser.default_configs", ()))
stack.enter_context(os_environ(**(git_config := GitConfig()).config_env))
def unpatch():
@@ -59,40 +59,44 @@ def testconfig(tmp_path_factory):
Also, repo entries for all the bundled test repos.
"""
- config = tmp_path_factory.mktemp('testconfig')
- repos_conf = config / 'repos.conf'
- stubrepo = pjoin(pkgcore_const.DATA_PATH, 'stubrepo')
- testdir = REPO_ROOT / 'testdata/repos'
- with open(repos_conf, 'w') as f:
- f.write(textwrap.dedent(f"""\
- [DEFAULT]
- main-repo = standalone
- [stubrepo]
- location = {stubrepo}
- """))
+ config = tmp_path_factory.mktemp("testconfig")
+ repos_conf = config / "repos.conf"
+ stubrepo = pjoin(pkgcore_const.DATA_PATH, "stubrepo")
+ testdir = REPO_ROOT / "testdata/repos"
+ with open(repos_conf, "w") as f:
+ f.write(
+ textwrap.dedent(
+ f"""\
+ [DEFAULT]
+ main-repo = standalone
+ [stubrepo]
+ location = {stubrepo}
+ """
+ )
+ )
for repo in testdir.iterdir():
- f.write(f'[{repo.name}]\n')
- f.write(f'location = {repo}\n')
- profile_path = pjoin(stubrepo, 'profiles', 'default')
- os.symlink(profile_path, str(config / 'make.profile'))
+ f.write(f"[{repo.name}]\n")
+ f.write(f"location = {repo}\n")
+ profile_path = pjoin(stubrepo, "profiles", "default")
+ os.symlink(profile_path, str(config / "make.profile"))
return str(config)
@pytest.fixture(scope="session")
def cache_dir(tmp_path_factory):
"""Generate a cache directory for pkgcheck."""
- cache_dir = tmp_path_factory.mktemp('cache')
+ cache_dir = tmp_path_factory.mktemp("cache")
return str(cache_dir)
@pytest.fixture
def fakerepo(tmp_path_factory):
"""Generate a stub repo."""
- fakerepo = tmp_path_factory.mktemp('fakerepo')
- (profiles := fakerepo / 'profiles').mkdir(parents=True)
- (profiles / 'repo_name').write_text('fakerepo\n')
- (metadata := fakerepo / 'metadata').mkdir(parents=True)
- (metadata / 'layout.conf').write_text('masters =\n')
+ fakerepo = tmp_path_factory.mktemp("fakerepo")
+ (profiles := fakerepo / "profiles").mkdir(parents=True)
+ (profiles / "repo_name").write_text("fakerepo\n")
+ (metadata := fakerepo / "metadata").mkdir(parents=True)
+ (metadata / "layout.conf").write_text("masters =\n")
return fakerepo
diff --git a/tests/misc.py b/tests/misc.py
index cf317e4e..92d25721 100644
--- a/tests/misc.py
+++ b/tests/misc.py
@@ -26,18 +26,18 @@ from snakeoil.sequences import split_negations
@dataclass
class Profile:
"""Profile record used to create profiles in a repository."""
+
path: str
arch: str
- status: str = 'stable'
+ status: str = "stable"
deprecated: bool = False
defaults: List[str] = None
- eapi: str = '5'
+ eapi: str = "5"
# TODO: merge this with the pkgcore-provided equivalent
class FakePkg(package):
-
- def __init__(self, cpvstr, data=None, parent=None, ebuild='', **kwargs):
+ def __init__(self, cpvstr, data=None, parent=None, ebuild="", **kwargs):
if data is None:
data = {}
@@ -46,7 +46,7 @@ class FakePkg(package):
cpv = VersionedCPV(cpvstr)
# TODO: make pkgcore generate empty shared pkg data when None is passed
- mxml = repo_objs.LocalMetadataXml('')
+ mxml = repo_objs.LocalMetadataXml("")
shared = repo_objs.SharedPkgData(metadata_xml=mxml, manifest=None)
super().__init__(shared, parent, cpv.category, cpv.package, cpv.fullver)
object.__setattr__(self, "data", data)
@@ -58,7 +58,7 @@ class FakePkg(package):
@property
def eapi(self):
- return get_eapi(self.data.get('EAPI', '0'))
+ return get_eapi(self.data.get("EAPI", "0"))
@property
def ebuild(self):
@@ -88,8 +88,11 @@ class FakeFilesDirPkg(package):
cpv = VersionedCPV(cpvstr)
super().__init__(shared, factory(repo), cpv.category, cpv.package, cpv.fullver)
object.__setattr__(self, "data", data)
- object.__setattr__(self, "path", pjoin(
- repo.location, cpv.category, cpv.package, f'{cpv.package}-{cpv.fullver}.ebuild'))
+ object.__setattr__(
+ self,
+ "path",
+ pjoin(repo.location, cpv.category, cpv.package, f"{cpv.package}-{cpv.fullver}.ebuild"),
+ )
class ReportTestCase:
@@ -133,7 +136,7 @@ class ReportTestCase:
def assertReport(self, check, data):
results = self.assertReports(check, data)
- results_str = '\n'.join(map(str, results))
+ results_str = "\n".join(map(str, results))
assert len(results) == 1, f"expected one report, got {len(results)}:\n{results_str}"
self._assertReportSanity(*results)
result = results[0]
@@ -141,40 +144,51 @@ class ReportTestCase:
class FakeProfile:
-
- def __init__(self, masked_use={}, stable_masked_use={}, forced_use={},
- stable_forced_use={}, pkg_use={}, provides={}, iuse_effective=[],
- use=[], masks=[], unmasks=[], arch='x86', name='none'):
+ def __init__(
+ self,
+ masked_use={},
+ stable_masked_use={},
+ forced_use={},
+ stable_forced_use={},
+ pkg_use={},
+ provides={},
+ iuse_effective=[],
+ use=[],
+ masks=[],
+ unmasks=[],
+ arch="x86",
+ name="none",
+ ):
self.provides_repo = SimpleTree(provides)
self.masked_use = ChunkedDataDict()
self.masked_use.update_from_stream(
- chunked_data(atom(k), *split_negations(v))
- for k, v in masked_use.items())
+ chunked_data(atom(k), *split_negations(v)) for k, v in masked_use.items()
+ )
self.masked_use.freeze()
self.stable_masked_use = ChunkedDataDict()
self.stable_masked_use.update_from_stream(
- chunked_data(atom(k), *split_negations(v))
- for k, v in stable_masked_use.items())
+ chunked_data(atom(k), *split_negations(v)) for k, v in stable_masked_use.items()
+ )
self.stable_masked_use.freeze()
self.forced_use = ChunkedDataDict()
self.forced_use.update_from_stream(
- chunked_data(atom(k), *split_negations(v))
- for k, v in forced_use.items())
+ chunked_data(atom(k), *split_negations(v)) for k, v in forced_use.items()
+ )
self.forced_use.freeze()
self.stable_forced_use = ChunkedDataDict()
self.stable_forced_use.update_from_stream(
- chunked_data(atom(k), *split_negations(v))
- for k, v in stable_forced_use.items())
+ chunked_data(atom(k), *split_negations(v)) for k, v in stable_forced_use.items()
+ )
self.stable_forced_use.freeze()
self.pkg_use = ChunkedDataDict()
self.pkg_use.update_from_stream(
- chunked_data(atom(k), *split_negations(v))
- for k, v in pkg_use.items())
+ chunked_data(atom(k), *split_negations(v)) for k, v in pkg_use.items()
+ )
self.pkg_use.freeze()
self.masks = tuple(map(atom, masks))
@@ -199,7 +213,7 @@ class Tmpdir:
def random_str(length=10):
"""Generate a random string of ASCII characters of a given length."""
- return ''.join(random.choice(string.ascii_letters) for _ in range(length))
+ return "".join(random.choice(string.ascii_letters) for _ in range(length))
# TODO: combine this with pkgcheck.checks.init_checks()
@@ -224,6 +238,5 @@ def init_check(check_cls, options):
except CacheDisabled as e:
raise SkipCheck(cls, e)
- required_addons = {
- base.param_name(x): addons_map[x] for x in addon.required_addons}
+ required_addons = {base.param_name(x): addons_map[x] for x in addon.required_addons}
return addon, required_addons, source
diff --git a/tests/scripts/test_argparse_actions.py b/tests/scripts/test_argparse_actions.py
index d46d4560..283eb716 100644
--- a/tests/scripts/test_argparse_actions.py
+++ b/tests/scripts/test_argparse_actions.py
@@ -11,61 +11,59 @@ from snakeoil.cli import arghparse
class TestConfigArg:
-
@pytest.fixture(autouse=True)
def _create_argparser(self):
self.parser = arghparse.ArgumentParser()
- self.parser.add_argument('--config', action=argparse_actions.ConfigArg)
+ self.parser.add_argument("--config", action=argparse_actions.ConfigArg)
def test_none(self):
options = self.parser.parse_args([])
assert options.config is None
def test_enabled(self):
- for arg in ('config_file', '/path/to/config/file'):
- options = self.parser.parse_args(['--config', arg])
+ for arg in ("config_file", "/path/to/config/file"):
+ options = self.parser.parse_args(["--config", arg])
assert options.config == arg
def test_disabled(self):
- for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
- options = self.parser.parse_args(['--config', arg])
+ for arg in ("False", "false", "No", "no", "N", "n"):
+ options = self.parser.parse_args(["--config", arg])
assert options.config is False
class TestFilterArgs:
-
@pytest.fixture(autouse=True)
def _create_argparser(self):
self.parser = arghparse.ArgumentParser()
- self.parser.set_defaults(config_checksets={'cset': ['StableRequestCheck']})
- self.parser.add_argument('--filter', action=argparse_actions.FilterArgs)
+ self.parser.set_defaults(config_checksets={"cset": ["StableRequestCheck"]})
+ self.parser.add_argument("--filter", action=argparse_actions.FilterArgs)
def test_none(self):
options = self.parser.parse_args([])
assert options.filter is None
def test_unknown_filter(self, capsys):
- for arg in ('foo', 'foo:PkgDirCheck'):
+ for arg in ("foo", "foo:PkgDirCheck"):
with pytest.raises(SystemExit) as excinfo:
- self.parser.parse_args(['--filter', arg])
+ self.parser.parse_args(["--filter", arg])
out, err = capsys.readouterr()
assert not out
assert "unknown filter: 'foo'" in err
assert excinfo.value.code == 2
def test_disabled(self):
- for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
- options = self.parser.parse_args(['--filter', arg])
+ for arg in ("False", "false", "No", "no", "N", "n"):
+ options = self.parser.parse_args(["--filter", arg])
assert options.filter == {}
def test_enabled(self):
- for arg in ('latest', 'latest:StableRequest', 'latest:StableRequestCheck', 'latest:cset'):
- options = self.parser.parse_args(['--filter', arg])
- assert objects.KEYWORDS['StableRequest'] in options.filter
+ for arg in ("latest", "latest:StableRequest", "latest:StableRequestCheck", "latest:cset"):
+ options = self.parser.parse_args(["--filter", arg])
+ assert objects.KEYWORDS["StableRequest"] in options.filter
def test_unknown_value(self, capsys):
with pytest.raises(SystemExit) as excinfo:
- self.parser.parse_args(['--filter', 'latest:foo'])
+ self.parser.parse_args(["--filter", "latest:foo"])
out, err = capsys.readouterr()
assert not out
assert "unknown checkset, check, or keyword: 'foo'" in err
@@ -73,11 +71,10 @@ class TestFilterArgs:
class TestCacheNegations:
-
@pytest.fixture(autouse=True)
def _create_argparser(self):
self.parser = arghparse.ArgumentParser()
- self.parser.add_argument('--cache', action=argparse_actions.CacheNegations)
+ self.parser.add_argument("--cache", action=argparse_actions.CacheNegations)
self.caches = [x.type for x in CachedAddon.caches.values()]
def test_defaults(self):
@@ -86,27 +83,27 @@ class TestCacheNegations:
def test_unknown(self, capsys):
with pytest.raises(SystemExit) as excinfo:
- self.parser.parse_args(['--cache', 'foo'])
+ self.parser.parse_args(["--cache", "foo"])
out, err = capsys.readouterr()
assert not out
assert "unknown cache type: 'foo'" in err
assert excinfo.value.code == 2
def test_all(self):
- for arg in ('True', 'true', 'Yes', 'yes', 'Y', 'y'):
- options = self.parser.parse_args(['--cache', arg])
+ for arg in ("True", "true", "Yes", "yes", "Y", "y"):
+ options = self.parser.parse_args(["--cache", arg])
for k, v in options.cache.items():
assert v is True
def test_none(self):
- for arg in ('False', 'false', 'No', 'no', 'N', 'n'):
- options = self.parser.parse_args(['--cache', arg])
+ for arg in ("False", "false", "No", "no", "N", "n"):
+ options = self.parser.parse_args(["--cache", arg])
for k, v in options.cache.items():
assert v is False
def test_enabled(self):
cache = self.caches[random.randrange(len(self.caches))]
- options = self.parser.parse_args(['--cache', cache])
+ options = self.parser.parse_args(["--cache", cache])
for k, v in options.cache.items():
if k == cache:
assert v is True
@@ -115,7 +112,7 @@ class TestCacheNegations:
def test_disabled(self):
cache = self.caches[random.randrange(len(self.caches))]
- options = self.parser.parse_args([f'--cache=-{cache}'])
+ options = self.parser.parse_args([f"--cache=-{cache}"])
for k, v in options.cache.items():
if k == cache:
assert v is False
@@ -124,66 +121,70 @@ class TestCacheNegations:
class TestChecksetArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path):
self.tool = tool
- self.cache_dir = str(tmp_path / '.cache')
- self.config = str(tmp_path / 'config')
- self.args = ['scan', '--cache-dir', self.cache_dir]
+ self.cache_dir = str(tmp_path / ".cache")
+ self.config = str(tmp_path / "config")
+ self.args = ["scan", "--cache-dir", self.cache_dir]
def test_unknown(self, capsys):
- for opt in ('-C', '--checksets'):
+ for opt in ("-C", "--checksets"):
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + [opt, 'foo'])
+ self.tool.parse_args(self.args + [opt, "foo"])
out, err = capsys.readouterr()
assert not out
assert "unknown checkset: 'foo'" in err
assert excinfo.value.code == 2
def test_aliases(self):
- for opt in ('-C', '--checksets'):
+ for opt in ("-C", "--checksets"):
# net
- options, _ = self.tool.parse_args(self.args + [opt, 'net'])
+ options, _ = self.tool.parse_args(self.args + [opt, "net"])
network_checks = [
- c for c, v in objects.CHECKS.items() if issubclass(v, checks.NetworkCheck)]
+ c for c, v in objects.CHECKS.items() if issubclass(v, checks.NetworkCheck)
+ ]
assert options.selected_checks == set(network_checks)
# all
- options, _ = self.tool.parse_args(self.args + [opt, 'all'])
+ options, _ = self.tool.parse_args(self.args + [opt, "all"])
assert options.selected_checks == set(objects.CHECKS)
def test_sets(self, capsys):
- with open(self.config, 'w') as f:
- f.write(textwrap.dedent("""\
- [CHECKSETS]
- set1=StableRequest
- set2=-StableRequest
- set3=SourcingCheck,-InvalidEapi,-InvalidSlot
- bad=foo
- """))
+ with open(self.config, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ [CHECKSETS]
+ set1=StableRequest
+ set2=-StableRequest
+ set3=SourcingCheck,-InvalidEapi,-InvalidSlot
+ bad=foo
+ """
+ )
+ )
configs = [self.config]
- with patch('pkgcheck.cli.ConfigFileParser.default_configs', configs):
- for opt in ('-C', '--checksets'):
+ with patch("pkgcheck.cli.ConfigFileParser.default_configs", configs):
+ for opt in ("-C", "--checksets"):
# enabled keyword
- for arg in ('set1', '-set2'):
- options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
- assert options.filtered_keywords == {objects.KEYWORDS['StableRequest']}
- assert options.enabled_checks == {objects.CHECKS['StableRequestCheck']}
+ for arg in ("set1", "-set2"):
+ options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
+ assert options.filtered_keywords == {objects.KEYWORDS["StableRequest"]}
+ assert options.enabled_checks == {objects.CHECKS["StableRequestCheck"]}
# disabled keyword
- for arg in ('-set1', 'set2'):
- options, _ = self.tool.parse_args(self.args + [f'{opt}={arg}'])
- assert objects.KEYWORDS['StableRequest'] not in options.filtered_keywords
+ for arg in ("-set1", "set2"):
+ options, _ = self.tool.parse_args(self.args + [f"{opt}={arg}"])
+ assert objects.KEYWORDS["StableRequest"] not in options.filtered_keywords
# check/keywords mixture
- options, _ = self.tool.parse_args(self.args + [f'{opt}=set3'])
- assert options.filtered_keywords == {objects.KEYWORDS['SourcingError']}
- assert options.enabled_checks == {objects.CHECKS['SourcingCheck']}
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=set3"])
+ assert options.filtered_keywords == {objects.KEYWORDS["SourcingError"]}
+ assert options.enabled_checks == {objects.CHECKS["SourcingCheck"]}
# unknown value
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + [f'{opt}=bad'])
+ self.tool.parse_args(self.args + [f"{opt}=bad"])
out, err = capsys.readouterr()
assert not out
assert "'bad' checkset, unknown check or keyword: 'foo'" in err
@@ -191,173 +192,167 @@ class TestChecksetArgs:
class TestScopeArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path):
self.tool = tool
self.cache_dir = str(tmp_path)
- self.args = ['scan', '--cache-dir', self.cache_dir]
+ self.args = ["scan", "--cache-dir", self.cache_dir]
def test_unknown_scope(self, capsys):
- for opt in ('-s', '--scopes'):
+ for opt in ("-s", "--scopes"):
with pytest.raises(SystemExit) as excinfo:
- options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
+ options, _ = self.tool.parse_args(self.args + [opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert "unknown scope: 'foo'" in err[-1]
def test_missing_scope(self, capsys):
- for opt in ('-s', '--scopes'):
+ for opt in ("-s", "--scopes"):
with pytest.raises(SystemExit) as excinfo:
options, _ = self.tool.parse_args(self.args + [opt])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[0] == (
- 'pkgcheck scan: error: argument -s/--scopes: expected one argument')
+ err = err.strip().split("\n")
+ assert err[0] == ("pkgcheck scan: error: argument -s/--scopes: expected one argument")
def test_disabled(self):
- options, _ = self.tool.parse_args(self.args + ['--scopes=-eclass'])
+ options, _ = self.tool.parse_args(self.args + ["--scopes=-eclass"])
assert options.selected_scopes == frozenset()
def test_enabled(self):
- options, _ = self.tool.parse_args(self.args + ['--scopes', 'repo'])
- assert options.selected_scopes == frozenset([base.scopes['repo']])
+ options, _ = self.tool.parse_args(self.args + ["--scopes", "repo"])
+ assert options.selected_scopes == frozenset([base.scopes["repo"]])
class TestCheckArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path):
self.tool = tool
self.cache_dir = str(tmp_path)
- self.args = ['scan', '--cache-dir', self.cache_dir]
+ self.args = ["scan", "--cache-dir", self.cache_dir]
def test_unknown_check(self, capsys):
- for opt in ('-c', '--checks'):
+ for opt in ("-c", "--checks"):
with pytest.raises(SystemExit) as excinfo:
- options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
+ options, _ = self.tool.parse_args(self.args + [opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert "unknown check: 'foo'" in err[-1]
def test_token_errors(self):
- for opt in ('-c', '--checks'):
- for operation in ('-', '+'):
+ for opt in ("-c", "--checks"):
+ for operation in ("-", "+"):
with pytest.raises(argparse.ArgumentTypeError) as excinfo:
- options, _ = self.tool.parse_args(self.args + [f'{opt}={operation}'])
- assert 'without a token' in str(excinfo.value)
+ options, _ = self.tool.parse_args(self.args + [f"{opt}={operation}"])
+ assert "without a token" in str(excinfo.value)
def test_missing_check(self, capsys):
- for opt in ('-c', '--checks'):
+ for opt in ("-c", "--checks"):
with pytest.raises(SystemExit) as excinfo:
options, _ = self.tool.parse_args(self.args + [opt])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[0] == (
- 'pkgcheck scan: error: argument -c/--checks: expected one argument')
+ err = err.strip().split("\n")
+ assert err[0] == ("pkgcheck scan: error: argument -c/--checks: expected one argument")
def test_neutral(self):
- for opt in ('-c', '--checks'):
- options, _ = self.tool.parse_args(self.args + [opt, 'UnusedLicensesCheck'])
- assert options.selected_checks == frozenset(['UnusedLicensesCheck'])
+ for opt in ("-c", "--checks"):
+ options, _ = self.tool.parse_args(self.args + [opt, "UnusedLicensesCheck"])
+ assert options.selected_checks == frozenset(["UnusedLicensesCheck"])
def test_subtractive(self):
- for opt in ('-c', '--checks'):
+ for opt in ("-c", "--checks"):
check = list(objects.CHECKS)[random.randrange(len(objects.CHECKS))]
- options, _ = self.tool.parse_args(self.args + [f'{opt}=-{check}'])
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=-{check}"])
assert options.selected_checks == frozenset()
def test_additive(self):
- for opt in ('-c', '--checks'):
+ for opt in ("-c", "--checks"):
options, _ = self.tool.parse_args(self.args)
assert issubclass(checks.perl.PerlCheck, checks.OptionalCheck)
assert checks.perl.PerlCheck not in set(options.enabled_checks)
- options, _ = self.tool.parse_args(self.args + [f'{opt}=+PerlCheck'])
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=+PerlCheck"])
assert checks.perl.PerlCheck in set(options.enabled_checks)
- assert options.selected_checks == frozenset(['PerlCheck'])
+ assert options.selected_checks == frozenset(["PerlCheck"])
class TestKeywordArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path):
self.tool = tool
self.cache_dir = str(tmp_path)
- self.args = ['scan', '--cache-dir', self.cache_dir]
+ self.args = ["scan", "--cache-dir", self.cache_dir]
def test_unknown_keyword(self, capsys):
- for opt in ('-k', '--keywords'):
+ for opt in ("-k", "--keywords"):
with pytest.raises(SystemExit) as excinfo:
- options, _ = self.tool.parse_args(self.args + [opt, 'foo'])
+ options, _ = self.tool.parse_args(self.args + [opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert "unknown keyword: 'foo'" in err[-1]
def test_missing_keyword(self, capsys):
- for opt in ('-k', '--keywords'):
+ for opt in ("-k", "--keywords"):
with pytest.raises(SystemExit) as excinfo:
options, _ = self.tool.parse_args(self.args + [opt])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[0] == (
- 'pkgcheck scan: error: argument -k/--keywords: expected one argument')
+ err = err.strip().split("\n")
+ assert err[0] == ("pkgcheck scan: error: argument -k/--keywords: expected one argument")
def test_enabled(self):
- for opt in ('-k', '--keywords'):
- options, _ = self.tool.parse_args(self.args + [opt, 'UnusedLicenses'])
- assert options.selected_keywords == frozenset(['UnusedLicenses'])
- assert options.filtered_keywords == frozenset([objects.KEYWORDS['UnusedLicenses']])
+ for opt in ("-k", "--keywords"):
+ options, _ = self.tool.parse_args(self.args + [opt, "UnusedLicenses"])
+ assert options.selected_keywords == frozenset(["UnusedLicenses"])
+ assert options.filtered_keywords == frozenset([objects.KEYWORDS["UnusedLicenses"]])
assert options.enabled_checks == {checks.repo_metadata.UnusedLicensesCheck}
def test_disabled_check(self):
"""Disabling all keywords for a given check also disables the check."""
- for opt in ('-k', '--keywords'):
+ for opt in ("-k", "--keywords"):
default_checks = set(objects.CHECKS.default.values())
default_keywords = set().union(*(v.known_results for v in default_checks))
keyword = checks.repo_metadata.UnusedLicenses
check = checks.repo_metadata.UnusedLicensesCheck
assert check in default_checks
assert check.known_results == frozenset([keyword])
- options, _ = self.tool.parse_args(self.args + [f'{opt}=-UnusedLicenses'])
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=-UnusedLicenses"])
assert options.selected_keywords == frozenset()
assert options.filtered_keywords == frozenset(default_keywords - {keyword})
assert check not in set(options.enabled_checks)
def test_disabled(self):
- for opt in ('-k', '--keywords'):
+ for opt in ("-k", "--keywords"):
default_keywords = set().union(
- *(v.known_results for v in objects.CHECKS.default.values()))
+ *(v.known_results for v in objects.CHECKS.default.values())
+ )
keyword_cls = list(default_keywords)[random.randrange(len(default_keywords))]
keyword = keyword_cls.__name__
- options, _ = self.tool.parse_args(self.args + [f'{opt}=-{keyword}'])
+ options, _ = self.tool.parse_args(self.args + [f"{opt}=-{keyword}"])
assert options.selected_keywords == frozenset()
assert options.filtered_keywords == frozenset(default_keywords - {keyword_cls})
def test_aliases(self):
- for opt in ('-k', '--keywords'):
- for alias in ('error', 'warning', 'info'):
+ for opt in ("-k", "--keywords"):
+ for alias in ("error", "warning", "info"):
options, _ = self.tool.parse_args(self.args + [opt, alias])
alias_keywords = list(getattr(objects.KEYWORDS, alias))
assert options.selected_keywords == frozenset(alias_keywords)
class TestExitArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, tmp_path):
self.tool = tool
self.cache_dir = str(tmp_path)
- self.args = ['scan', '--cache-dir', self.cache_dir]
+ self.args = ["scan", "--cache-dir", self.cache_dir]
def test_unknown(self, capsys):
with pytest.raises(SystemExit) as excinfo:
- self.tool.parse_args(self.args + ['--exit', 'foo'])
+ self.tool.parse_args(self.args + ["--exit", "foo"])
out, err = capsys.readouterr()
assert not out
assert "unknown checkset, check, or keyword: 'foo'" in err
@@ -368,22 +363,22 @@ class TestExitArgs:
assert options.exit_keywords == ()
def test_default(self):
- options, _ = self.tool.parse_args(self.args + ['--exit'])
+ options, _ = self.tool.parse_args(self.args + ["--exit"])
assert options.exit_keywords == frozenset(objects.KEYWORDS.error.values())
def test_enabled(self):
keyword = list(objects.KEYWORDS)[random.randrange(len(objects.KEYWORDS))]
objs = (objects.KEYWORDS[x] for x in objects.KEYWORDS.aliases.get(keyword, [keyword]))
- options, _ = self.tool.parse_args(self.args + ['--exit', keyword])
+ options, _ = self.tool.parse_args(self.args + ["--exit", keyword])
assert options.exit_keywords == frozenset(objs)
def test_disabled(self):
keyword = list(objects.KEYWORDS)[random.randrange(len(objects.KEYWORDS))]
objs = (objects.KEYWORDS[x] for x in objects.KEYWORDS.aliases.get(keyword, [keyword]))
- options, _ = self.tool.parse_args(self.args + [f'--exit=-{keyword}'])
+ options, _ = self.tool.parse_args(self.args + [f"--exit=-{keyword}"])
assert options.exit_keywords == frozenset(objects.KEYWORDS.error.values()) - frozenset(objs)
def test_aliases(self):
- for alias in ('error', 'warning', 'info'):
- options, _ = self.tool.parse_args(self.args + [f'--exit={alias}'])
+ for alias in ("error", "warning", "info"):
+ options, _ = self.tool.parse_args(self.args + [f"--exit={alias}"])
assert options.exit_keywords == frozenset(getattr(objects.KEYWORDS, alias).values())
diff --git a/tests/scripts/test_pkgcheck.py b/tests/scripts/test_pkgcheck.py
index 8478a746..49e2f8b6 100644
--- a/tests/scripts/test_pkgcheck.py
+++ b/tests/scripts/test_pkgcheck.py
@@ -11,27 +11,27 @@ def test_script_run(capsys):
"""Test regular code path for running scripts."""
script = partial(run, project)
- with patch(f'{project}.scripts.import_module') as import_module:
+ with patch(f"{project}.scripts.import_module") as import_module:
import_module.side_effect = ImportError("baz module doesn't exist")
# default error path when script import fails
- with patch('sys.argv', [project]):
+ with patch("sys.argv", [project]):
with pytest.raises(SystemExit) as excinfo:
script()
assert excinfo.value.code == 1
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert len(err) == 3
assert err[0] == "Failed importing: baz module doesn't exist!"
assert err[1].startswith(f"Verify that {project} and its deps")
assert err[2] == "Add --debug to the commandline for a traceback."
# running with --debug should raise an ImportError when there are issues
- with patch('sys.argv', [project, '--debug']):
+ with patch("sys.argv", [project, "--debug"]):
with pytest.raises(ImportError):
script()
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert len(err) == 2
assert err[0] == "Failed importing: baz module doesn't exist!"
assert err[1].startswith(f"Verify that {project} and its deps")
@@ -44,7 +44,7 @@ class TestPkgcheck:
script = partial(run, project)
def test_version(self, capsys):
- with patch('sys.argv', [project, '--version']):
+ with patch("sys.argv", [project, "--version"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
diff --git a/tests/scripts/test_pkgcheck_cache.py b/tests/scripts/test_pkgcheck_cache.py
index 0414b4ec..023a3161 100644
--- a/tests/scripts/test_pkgcheck_cache.py
+++ b/tests/scripts/test_pkgcheck_cache.py
@@ -14,95 +14,95 @@ class TestPkgcheckCache:
@pytest.fixture(autouse=True)
def _setup(self, testconfig, tmp_path):
self.cache_dir = str(tmp_path)
- self.args = [
- project, '--config', testconfig,
- 'cache', '--cache-dir', self.cache_dir]
+ self.args = [project, "--config", testconfig, "cache", "--cache-dir", self.cache_dir]
def test_cache_profiles(self, capsys):
# force standalone repo profiles cache regen
- for args in (['-u', '-f'], ['--update', '--force']):
- with patch('sys.argv', self.args + args + ['-t', 'profiles']):
+ for args in (["-u", "-f"], ["--update", "--force"]):
+ with patch("sys.argv", self.args + args + ["-t", "profiles"]):
with pytest.raises(SystemExit):
self.script()
# verify the profiles cache shows up
- with patch('sys.argv', self.args):
+ with patch("sys.argv", self.args):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
out = out.strip().splitlines()
- assert out[-1].startswith('standalone-')
+ assert out[-1].startswith("standalone-")
assert excinfo.value.code == 0
# pretend to remove it
- for arg in ('-n', '--dry-run'):
- with patch('sys.argv', self.args + [arg] + ['-Rt', 'profiles']):
+ for arg in ("-n", "--dry-run"):
+ with patch("sys.argv", self.args + [arg] + ["-Rt", "profiles"]):
with pytest.raises(SystemExit):
self.script()
out, err = capsys.readouterr()
- assert err == ''
- assert out.startswith(f'Would remove {self.cache_dir}')
+ assert err == ""
+ assert out.startswith(f"Would remove {self.cache_dir}")
# fail to remove it
- for arg in ('-R', '--remove'):
- with patch('pkgcheck.addons.caches.os.unlink') as unlink, \
- patch('sys.argv', self.args + [arg] + ['-t', 'profiles']):
- unlink.side_effect = IOError('bad perms')
+ for arg in ("-R", "--remove"):
+ with patch("pkgcheck.addons.caches.os.unlink") as unlink, patch(
+ "sys.argv", self.args + [arg] + ["-t", "profiles"]
+ ):
+ unlink.side_effect = IOError("bad perms")
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not out
assert os.listdir(self.cache_dir)
- assert err.startswith('pkgcheck cache: error: failed removing profiles cache')
+ assert err.startswith("pkgcheck cache: error: failed removing profiles cache")
assert excinfo.value.code == 2
# actually remove it
- for arg in ('-R', '--remove'):
- with patch('sys.argv', self.args + [arg] + ['-t', 'profiles']):
+ for arg in ("-R", "--remove"):
+ with patch("sys.argv", self.args + [arg] + ["-t", "profiles"]):
with pytest.raises(SystemExit):
self.script()
# verify it's gone
- with patch('sys.argv', self.args):
+ with patch("sys.argv", self.args):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
- assert (out, err) == ('', '')
+ assert (out, err) == ("", "")
assert excinfo.value.code == 0
def test_cache_forced_removal(self, capsys):
# force standalone repo profiles cache regen
- with patch('sys.argv', self.args + ['-uf']):
+ with patch("sys.argv", self.args + ["-uf"]):
with pytest.raises(SystemExit):
self.script()
# fail to forcibly remove all
- with patch('pkgcheck.addons.caches.shutil.rmtree') as rmtree, \
- patch('sys.argv', self.args + ['-Rf']):
- rmtree.side_effect = IOError('bad perms')
+ with patch("pkgcheck.addons.caches.shutil.rmtree") as rmtree, patch(
+ "sys.argv", self.args + ["-Rf"]
+ ):
+ rmtree.side_effect = IOError("bad perms")
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not out
- assert err.strip() == 'pkgcheck cache: error: failed removing cache dir: bad perms'
+ assert err.strip() == "pkgcheck cache: error: failed removing cache dir: bad perms"
assert excinfo.value.code == 2
# actually forcibly remove all
- with patch('sys.argv', self.args + ['-Rf']):
+ with patch("sys.argv", self.args + ["-Rf"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
- assert (out, err) == ('', '')
+ assert (out, err) == ("", "")
assert excinfo.value.code == 0
# cache dir has been entirely blown away
assert not os.path.exists(self.cache_dir)
# forcing removal again does nothing
- with patch('sys.argv', self.args + ['-Rf']):
+ with patch("sys.argv", self.args + ["-Rf"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
- assert (out, err) == ('', '')
+ assert (out, err) == ("", "")
assert excinfo.value.code == 0
diff --git a/tests/scripts/test_pkgcheck_ci.py b/tests/scripts/test_pkgcheck_ci.py
index 2ac21d7c..bc0d9cbf 100644
--- a/tests/scripts/test_pkgcheck_ci.py
+++ b/tests/scripts/test_pkgcheck_ci.py
@@ -10,61 +10,61 @@ from pkgcore.ebuild.cpv import VersionedCPV
class TestPkgcheckCi:
- script = partial(run, 'pkgcheck')
+ script = partial(run, "pkgcheck")
@pytest.fixture(autouse=True)
def _setup(self, testconfig, tmp_path):
self.cache_dir = str(tmp_path)
- base_args = ['--config', testconfig]
- self.scan_args = ['--config', 'no', '--cache-dir', self.cache_dir]
+ base_args = ["--config", testconfig]
+ self.scan_args = ["--config", "no", "--cache-dir", self.cache_dir]
# args for running pkgcheck like a script
- self.args = ['pkgcheck'] + base_args + ['ci'] + self.scan_args
+ self.args = ["pkgcheck"] + base_args + ["ci"] + self.scan_args
def test_empty_repo(self, capsys, repo):
- with patch('sys.argv', self.args + [repo.location]):
+ with patch("sys.argv", self.args + [repo.location]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
out, err = capsys.readouterr()
- assert out == err == ''
+ assert out == err == ""
def test_exit_status(self, repo):
# create good ebuild and another with an invalid EAPI
- repo.create_ebuild('cat/pkg-0')
- repo.create_ebuild('cat/pkg-1', eapi='-1')
+ repo.create_ebuild("cat/pkg-0")
+ repo.create_ebuild("cat/pkg-1", eapi="-1")
# exit status isn't enabled by default
- args = ['-r', repo.location]
- with patch('sys.argv', self.args + args):
+ args = ["-r", repo.location]
+ with patch("sys.argv", self.args + args):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
# all error level results are flagged by default when enabled
- with patch('sys.argv', self.args + args + ['--exit']):
+ with patch("sys.argv", self.args + args + ["--exit"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 1
# selective error results will only flag those specified
- with patch('sys.argv', self.args + args + ['--exit', 'InvalidSlot']):
+ with patch("sys.argv", self.args + args + ["--exit", "InvalidSlot"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
- with patch('sys.argv', self.args + args + ['--exit', 'InvalidEapi']):
+ with patch("sys.argv", self.args + args + ["--exit", "InvalidEapi"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 1
def test_failures(self, tmp_path, repo):
- repo.create_ebuild('cat/pkg-1', slot='')
- failures = str(tmp_path / 'failures.json')
- args = ['--failures', failures, '--exit', '-r', repo.location]
- with patch('sys.argv', self.args + args):
+ repo.create_ebuild("cat/pkg-1", slot="")
+ failures = str(tmp_path / "failures.json")
+ args = ["--failures", failures, "--exit", "-r", repo.location]
+ with patch("sys.argv", self.args + args):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 1
with open(str(failures)) as f:
results = list(JsonStream.from_iter(f))
- pkg = VersionedCPV('cat/pkg-1')
- assert results == [InvalidSlot('slot', 'SLOT cannot be unset or empty', pkg=pkg)]
+ pkg = VersionedCPV("cat/pkg-1")
+ assert results == [InvalidSlot("slot", "SLOT cannot be unset or empty", pkg=pkg)]
diff --git a/tests/scripts/test_pkgcheck_replay.py b/tests/scripts/test_pkgcheck_replay.py
index 67ad3486..c2aeda66 100644
--- a/tests/scripts/test_pkgcheck_replay.py
+++ b/tests/scripts/test_pkgcheck_replay.py
@@ -18,70 +18,69 @@ class TestPkgcheckReplay:
@pytest.fixture(autouse=True)
def _setup(self, testconfig):
- self.args = [project, '--config', testconfig, 'replay']
+ self.args = [project, "--config", testconfig, "replay"]
def test_missing_file_arg(self, capsys):
- with patch('sys.argv', self.args):
+ with patch("sys.argv", self.args):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not out
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert len(err) == 1
- assert err[0] == (
- 'pkgcheck replay: error: the following arguments are required: FILE')
+ assert err[0] == ("pkgcheck replay: error: the following arguments are required: FILE")
assert excinfo.value.code == 2
def test_replay(self, capsys):
- result = ProfileWarning('profile warning: foo')
+ result = ProfileWarning("profile warning: foo")
with tempfile.NamedTemporaryFile() as f:
out = PlainTextFormatter(f)
with JsonStream(out) as reporter:
reporter.report(result)
- with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
+ with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- assert out == 'profile warning: foo\n'
+ assert out == "profile warning: foo\n"
assert excinfo.value.code == 0
def test_corrupted_resuts(self, capsys):
- result = ProfileWarning('profile warning: foo')
+ result = ProfileWarning("profile warning: foo")
with tempfile.NamedTemporaryFile() as f:
out = PlainTextFormatter(f)
with JsonStream(out) as reporter:
reporter.report(result)
- f.write(b'corrupted')
+ f.write(b"corrupted")
f.seek(0)
- with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
+ with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
- assert 'corrupted results file' in err
+ assert "corrupted results file" in err
assert excinfo.value.code == 2
def test_invalid_file(self, capsys):
- with tempfile.NamedTemporaryFile(mode='wt') as f:
- f.write('invalid file')
+ with tempfile.NamedTemporaryFile(mode="wt") as f:
+ f.write("invalid file")
f.seek(0)
- with patch('sys.argv', self.args + ['-R', 'StrReporter', f.name]):
+ with patch("sys.argv", self.args + ["-R", "StrReporter", f.name]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
- assert err.strip() == 'pkgcheck replay: error: invalid or unsupported replay file'
+ assert err.strip() == "pkgcheck replay: error: invalid or unsupported replay file"
assert excinfo.value.code == 2
def test_replay_pipe_stdin(self):
- script = pytest.REPO_ROOT / 'bin/pkgcheck'
- result = ProfileWarning('profile warning: foo')
+ script = pytest.REPO_ROOT / "bin/pkgcheck"
+ result = ProfileWarning("profile warning: foo")
with tempfile.NamedTemporaryFile() as f:
out = PlainTextFormatter(f)
with JsonStream(out) as reporter:
reporter.report(result)
f.seek(0)
p = subprocess.run(
- [script, 'replay', '-R', 'StrReporter', '-'],
- stdin=f, stdout=subprocess.PIPE)
- assert p.stdout.decode() == 'profile warning: foo\n'
+ [script, "replay", "-R", "StrReporter", "-"], stdin=f, stdout=subprocess.PIPE
+ )
+ assert p.stdout.decode() == "profile warning: foo\n"
assert p.returncode == 0
diff --git a/tests/scripts/test_pkgcheck_scan.py b/tests/scripts/test_pkgcheck_scan.py
index c224d83a..bba0547d 100644
--- a/tests/scripts/test_pkgcheck_scan.py
+++ b/tests/scripts/test_pkgcheck_scan.py
@@ -28,36 +28,35 @@ from ..misc import Profile
class TestPkgcheckScanParseArgs:
-
def test_skipped_checks(self, tool):
- options, _ = tool.parse_args(['scan'])
+ options, _ = tool.parse_args(["scan"])
assert options.enabled_checks
# some checks should always be skipped by default
assert set(options.enabled_checks) != set(objects.CHECKS.values())
def test_enabled_check(self, tool):
- options, _ = tool.parse_args(['scan', '-c', 'PkgDirCheck'])
+ options, _ = tool.parse_args(["scan", "-c", "PkgDirCheck"])
assert options.enabled_checks == {checks_mod.pkgdir.PkgDirCheck}
def test_disabled_check(self, tool):
- options, _ = tool.parse_args(['scan'])
+ options, _ = tool.parse_args(["scan"])
assert checks_mod.pkgdir.PkgDirCheck in options.enabled_checks
- options, _ = tool.parse_args(['scan', '-c=-PkgDirCheck'])
+ options, _ = tool.parse_args(["scan", "-c=-PkgDirCheck"])
assert options.enabled_checks
assert checks_mod.pkgdir.PkgDirCheck not in options.enabled_checks
def test_targets(self, tool):
- options, _ = tool.parse_args(['scan', 'dev-util/foo'])
- assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
+ options, _ = tool.parse_args(["scan", "dev-util/foo"])
+ assert list(options.restrictions) == [(base.package_scope, atom.atom("dev-util/foo"))]
def test_stdin_targets(self, tool):
- with patch('sys.stdin', StringIO('dev-util/foo')):
- options, _ = tool.parse_args(['scan', '-'])
- assert list(options.restrictions) == [(base.package_scope, atom.atom('dev-util/foo'))]
+ with patch("sys.stdin", StringIO("dev-util/foo")):
+ options, _ = tool.parse_args(["scan", "-"])
+ assert list(options.restrictions) == [(base.package_scope, atom.atom("dev-util/foo"))]
def test_invalid_targets(self, tool, capsys):
with pytest.raises(SystemExit) as excinfo:
- options, _ = tool.parse_args(['scan', 'dev-util/f$o'])
+ options, _ = tool.parse_args(["scan", "dev-util/f$o"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
err = err.strip()
@@ -65,91 +64,97 @@ class TestPkgcheckScanParseArgs:
def test_unknown_path_target(self, tool, capsys):
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', '/foo/bar'])
+ tool.parse_args(["scan", "/foo/bar"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
+ err = err.strip().split("\n")
assert err[-1].startswith(
- "pkgcheck scan: error: 'standalone' repo doesn't contain: '/foo/bar'")
+ "pkgcheck scan: error: 'standalone' repo doesn't contain: '/foo/bar'"
+ )
def test_target_repo_id(self, tool):
- options, _ = tool.parse_args(['scan', 'standalone'])
- assert options.target_repo.repo_id == 'standalone'
+ options, _ = tool.parse_args(["scan", "standalone"])
+ assert options.target_repo.repo_id == "standalone"
assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
def test_target_dir_path(self, repo, tool):
- options, _ = tool.parse_args(['scan', repo.location])
- assert options.target_repo.repo_id == 'fake'
+ options, _ = tool.parse_args(["scan", repo.location])
+ assert options.target_repo.repo_id == "fake"
assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
def test_target_dir_path_in_repo(self, repo, tool):
- path = pjoin(repo.location, 'profiles')
- options, _ = tool.parse_args(['scan', path])
- assert options.target_repo.repo_id == 'fake'
+ path = pjoin(repo.location, "profiles")
+ options, _ = tool.parse_args(["scan", path])
+ assert options.target_repo.repo_id == "fake"
assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
def test_target_dir_path_in_configured_repo(self, tool):
- options, _ = tool.parse_args(['scan', 'standalone'])
- path = pjoin(options.target_repo.location, 'profiles')
- options, _ = tool.parse_args(['scan', path])
- assert options.target_repo.repo_id == 'standalone'
+ options, _ = tool.parse_args(["scan", "standalone"])
+ path = pjoin(options.target_repo.location, "profiles")
+ options, _ = tool.parse_args(["scan", path])
+ assert options.target_repo.repo_id == "standalone"
assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
def test_target_non_repo_path(self, tool, capsys, tmp_path):
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', str(tmp_path)])
+ tool.parse_args(["scan", str(tmp_path)])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
assert err.startswith(
- f"pkgcheck scan: error: 'standalone' repo doesn't contain: '{str(tmp_path)}'")
+ f"pkgcheck scan: error: 'standalone' repo doesn't contain: '{str(tmp_path)}'"
+ )
def test_target_invalid_repo(self, tool, capsys, make_repo):
- repo = make_repo(masters=['unknown'])
+ repo = make_repo(masters=["unknown"])
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', repo.location])
+ tool.parse_args(["scan", repo.location])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
err = err.strip()
- assert err.startswith('pkgcheck scan: error: repo init failed')
+ assert err.startswith("pkgcheck scan: error: repo init failed")
assert err.endswith("has missing masters: 'unknown'")
def test_target_file_path(self, repo, tool):
- os.makedirs(pjoin(repo.location, 'dev-util', 'foo'))
- ebuild_path = pjoin(repo.location, 'dev-util', 'foo', 'foo-0.ebuild')
+ os.makedirs(pjoin(repo.location, "dev-util", "foo"))
+ ebuild_path = pjoin(repo.location, "dev-util", "foo", "foo-0.ebuild")
touch(ebuild_path)
- options, _ = tool.parse_args(['scan', ebuild_path])
+ options, _ = tool.parse_args(["scan", ebuild_path])
restrictions = [
- restricts.CategoryDep('dev-util'),
- restricts.PackageDep('foo'),
- restricts.VersionMatch('=', '0'),
+ restricts.CategoryDep("dev-util"),
+ restricts.PackageDep("foo"),
+ restricts.VersionMatch("=", "0"),
+ ]
+ assert list(options.restrictions) == [
+ (base.version_scope, packages.AndRestriction(*restrictions))
]
- assert list(options.restrictions) == [(base.version_scope, packages.AndRestriction(*restrictions))]
- assert options.target_repo.repo_id == 'fake'
+ assert options.target_repo.repo_id == "fake"
def test_target_package_dir_cwd(self, repo, tool):
- os.makedirs(pjoin(repo.location, 'dev-util', 'foo'))
- with chdir(pjoin(repo.location, 'dev-util', 'foo')):
- options, _ = tool.parse_args(['scan'])
- assert options.target_repo.repo_id == 'fake'
+ os.makedirs(pjoin(repo.location, "dev-util", "foo"))
+ with chdir(pjoin(repo.location, "dev-util", "foo")):
+ options, _ = tool.parse_args(["scan"])
+ assert options.target_repo.repo_id == "fake"
restrictions = [
- restricts.CategoryDep('dev-util'),
- restricts.PackageDep('foo'),
+ restricts.CategoryDep("dev-util"),
+ restricts.PackageDep("foo"),
+ ]
+ assert list(options.restrictions) == [
+ (base.package_scope, packages.AndRestriction(*restrictions))
]
- assert list(options.restrictions) == [(base.package_scope, packages.AndRestriction(*restrictions))]
def test_target_repo_dir_cwd(self, repo, tool):
with chdir(repo.location):
- options, _ = tool.parse_args(['scan'])
- assert options.target_repo.repo_id == 'fake'
+ options, _ = tool.parse_args(["scan"])
+ assert options.target_repo.repo_id == "fake"
assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
def test_unknown_repo(self, tmp_path, capsys, tool):
- for opt in ('-r', '--repo'):
+ for opt in ("-r", "--repo"):
with pytest.raises(SystemExit) as excinfo:
with chdir(str(tmp_path)):
- options, _ = tool.parse_args(['scan', opt, 'foo'])
+ options, _ = tool.parse_args(["scan", opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
@@ -158,27 +163,26 @@ class TestPkgcheckScanParseArgs:
)
def test_invalid_repo(self, tmp_path, capsys, tool):
- (tmp_path / 'foo').touch()
- for opt in ('-r', '--repo'):
+ (tmp_path / "foo").touch()
+ for opt in ("-r", "--repo"):
with pytest.raises(SystemExit) as excinfo:
with chdir(str(tmp_path)):
- options, _ = tool.parse_args(['scan', opt, 'foo'])
+ options, _ = tool.parse_args(["scan", opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
- assert err.startswith(
- "pkgcheck scan: error: argument -r/--repo: repo init failed:")
+ assert err.startswith("pkgcheck scan: error: argument -r/--repo: repo init failed:")
def test_valid_repo(self, tool):
- for opt in ('-r', '--repo'):
- options, _ = tool.parse_args(['scan', opt, 'standalone'])
- assert options.target_repo.repo_id == 'standalone'
+ for opt in ("-r", "--repo"):
+ options, _ = tool.parse_args(["scan", opt, "standalone"])
+ assert options.target_repo.repo_id == "standalone"
assert list(options.restrictions) == [(base.repo_scope, packages.AlwaysTrue)]
def test_unknown_reporter(self, capsys, tool):
- for opt in ('-R', '--reporter'):
+ for opt in ("-R", "--reporter"):
with pytest.raises(SystemExit) as excinfo:
- options, _ = tool.parse_args(['scan', opt, 'foo'])
+ options, _ = tool.parse_args(["scan", opt, "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
@@ -187,161 +191,185 @@ class TestPkgcheckScanParseArgs:
def test_format_reporter(self, capsys, tool):
# missing --format
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', '-R', 'FormatReporter'])
+ tool.parse_args(["scan", "-R", "FormatReporter"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[-1].endswith(
- "missing or empty --format option required by FormatReporter")
+ err = err.strip().split("\n")
+ assert err[-1].endswith("missing or empty --format option required by FormatReporter")
# missing -R FormatReporter
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['scan', '--format', 'foo'])
+ tool.parse_args(["scan", "--format", "foo"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
- err = err.strip().split('\n')
- assert err[-1].endswith(
- "--format option is only valid when using FormatReporter")
+ err = err.strip().split("\n")
+ assert err[-1].endswith("--format option is only valid when using FormatReporter")
# properly set
- options, _ = tool.parse_args(
- ['scan', '-R', 'FormatReporter', '--format', 'foo'])
+ options, _ = tool.parse_args(["scan", "-R", "FormatReporter", "--format", "foo"])
def test_cwd(self, capsys, tool):
# regularly working
- options, _ = tool.parse_args(['scan'])
+ options, _ = tool.parse_args(["scan"])
assert options.cwd == os.getcwd()
# pretend the CWD was removed out from under us
- with patch('os.getcwd') as getcwd:
- getcwd.side_effect = FileNotFoundError('CWD is gone')
- options, _ = tool.parse_args(['scan'])
+ with patch("os.getcwd") as getcwd:
+ getcwd.side_effect = FileNotFoundError("CWD is gone")
+ options, _ = tool.parse_args(["scan"])
assert options.cwd == const.DATA_PATH
def test_eclass_target(self, fakerepo, tool):
- (eclass_dir := fakerepo / 'eclass').mkdir()
- (eclass_path := eclass_dir / 'foo.eclass').touch()
- options, _ = tool.parse_args(['scan', str(eclass_path)])
- assert list(options.restrictions) == [(base.eclass_scope, 'foo')]
+ (eclass_dir := fakerepo / "eclass").mkdir()
+ (eclass_path := eclass_dir / "foo.eclass").touch()
+ options, _ = tool.parse_args(["scan", str(eclass_path)])
+ assert list(options.restrictions) == [(base.eclass_scope, "foo")]
def test_profiles_target(self, fakerepo, tool):
- profiles_path = str(fakerepo / 'profiles')
- options, _ = tool.parse_args(['scan', profiles_path])
+ profiles_path = str(fakerepo / "profiles")
+ options, _ = tool.parse_args(["scan", profiles_path])
assert list(options.restrictions) == [(base.profiles_scope, packages.AlwaysTrue)]
def test_profiles_path_target_file(self, fakerepo, tool):
- (pkg_mask_path := fakerepo / 'profiles/package.mask').touch()
- options, _ = tool.parse_args(['scan', str(pkg_mask_path)])
+ (pkg_mask_path := fakerepo / "profiles/package.mask").touch()
+ options, _ = tool.parse_args(["scan", str(pkg_mask_path)])
assert list(options.restrictions) == [(base.profile_node_scope, str(pkg_mask_path))]
def test_profiles_path_target_dir(self, fakerepo, tool):
- (profile_dir := fakerepo / 'profiles/default').mkdir(parents=True)
- (pkg_mask_path := profile_dir / 'package.mask').touch()
- (pkg_use_path := profile_dir / 'package.use').touch()
- options, _ = tool.parse_args(['scan', str(profile_dir)])
- assert list(options.restrictions) == [(base.profile_node_scope, {str(pkg_mask_path), str(pkg_use_path)})]
+ (profile_dir := fakerepo / "profiles/default").mkdir(parents=True)
+ (pkg_mask_path := profile_dir / "package.mask").touch()
+ (pkg_use_path := profile_dir / "package.use").touch()
+ options, _ = tool.parse_args(["scan", str(profile_dir)])
+ assert list(options.restrictions) == [
+ (base.profile_node_scope, {str(pkg_mask_path), str(pkg_use_path)})
+ ]
def test_no_default_repo(self, tool, capsys):
- stubconfig = pjoin(pkgcore_const.DATA_PATH, 'stubconfig')
+ stubconfig = pjoin(pkgcore_const.DATA_PATH, "stubconfig")
with pytest.raises(SystemExit) as excinfo:
- tool.parse_args(['--config', stubconfig, 'scan'])
+ tool.parse_args(["--config", stubconfig, "scan"])
assert excinfo.value.code == 2
out, err = capsys.readouterr()
assert not out
assert err.strip() == "pkgcheck scan: error: no default repo found"
- @pytest.mark.parametrize(('makeopts', 'expected_jobs'), (
- ('', 4),
- ('-j1', 1),
- ('--jobs=6 -l 1', 6),
- ('--load 1', 4),
- ))
+ @pytest.mark.parametrize(
+ ("makeopts", "expected_jobs"),
+ (
+ ("", 4),
+ ("-j1", 1),
+ ("--jobs=6 -l 1", 6),
+ ("--load 1", 4),
+ ),
+ )
def test_makeopts_parsing(self, parser, makeopts, expected_jobs):
- with patch('os.cpu_count', return_value=4), \
- os_environ(MAKEOPTS=makeopts):
+ with patch("os.cpu_count", return_value=4), os_environ(MAKEOPTS=makeopts):
- options = parser.parse_args(['scan'])
+ options = parser.parse_args(["scan"])
assert options.jobs == expected_jobs
assert options.tasks == 5 * expected_jobs
def test_no_color(self, parser, tmp_path):
- (config_file := tmp_path / 'config').write_text(textwrap.dedent('''\
- [DEFAULT]
- color = true
- '''))
+ (config_file := tmp_path / "config").write_text(
+ textwrap.dedent(
+ """\
+ [DEFAULT]
+ color = true
+ """
+ )
+ )
- args = ('scan', '--config', str(config_file))
- with os_environ('NOCOLOR'):
+ args = ("scan", "--config", str(config_file))
+ with os_environ("NOCOLOR"):
assert parser.parse_args(args).color is True
- with os_environ(NOCOLOR='1'):
+ with os_environ(NOCOLOR="1"):
# NOCOLOR overrides config file
assert parser.parse_args(args).color is False
# cmd line option overrides NOCOLOR
- assert parser.parse_args([*args, '--color', 'n']).color is False
- assert parser.parse_args([*args, '--color', 'y']).color is True
+ assert parser.parse_args([*args, "--color", "n"]).color is False
+ assert parser.parse_args([*args, "--color", "y"]).color is True
class TestPkgcheckScanParseConfigArgs:
-
@pytest.fixture(autouse=True)
def _setup(self, parser, tmp_path, repo):
self.parser = parser
self.repo = repo
- self.args = ['scan', '-r', repo.location]
+ self.args = ["scan", "-r", repo.location]
self.system_config = str(tmp_path / "system-config")
self.user_config = str(tmp_path / "user-config")
self.config = str(tmp_path / "custom-config")
def test_config_precedence(self):
configs = [self.system_config, self.user_config]
- with patch('pkgcheck.cli.ConfigFileParser.default_configs', configs):
- with open(self.system_config, 'w') as f:
- f.write(textwrap.dedent("""\
- [DEFAULT]
- jobs=1000
- """))
+ with patch("pkgcheck.cli.ConfigFileParser.default_configs", configs):
+ with open(self.system_config, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ [DEFAULT]
+ jobs=1000
+ """
+ )
+ )
options = self.parser.parse_args(self.args)
assert options.jobs == 1000
# user config overrides system config
- with open(self.user_config, 'w') as f:
- f.write(textwrap.dedent("""\
- [DEFAULT]
- jobs=1001
- """))
+ with open(self.user_config, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ [DEFAULT]
+ jobs=1001
+ """
+ )
+ )
options = self.parser.parse_args(self.args)
assert options.jobs == 1001
# repo config overrides user config
- with open(pjoin(self.repo.location, 'metadata', 'pkgcheck.conf'), 'w') as f:
- f.write(textwrap.dedent("""\
- [DEFAULT]
- jobs=1002
- """))
+ with open(pjoin(self.repo.location, "metadata", "pkgcheck.conf"), "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ [DEFAULT]
+ jobs=1002
+ """
+ )
+ )
options = self.parser.parse_args(self.args)
assert options.jobs == 1002
# custom config overrides user config
- with open(self.config, 'w') as f:
- f.write(textwrap.dedent("""\
- [DEFAULT]
- jobs=1003
- """))
- config_args = self.args + ['--config', self.config]
+ with open(self.config, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """\
+ [DEFAULT]
+ jobs=1003
+ """
+ )
+ )
+ config_args = self.args + ["--config", self.config]
options = self.parser.parse_args(config_args)
assert options.jobs == 1003
# repo defaults override general defaults
- with open(self.config, 'a') as f:
- f.write(textwrap.dedent(f"""\
- [{self.repo.repo_id}]
- jobs=1004
- """))
+ with open(self.config, "a") as f:
+ f.write(
+ textwrap.dedent(
+ f"""\
+ [{self.repo.repo_id}]
+ jobs=1004
+ """
+ )
+ )
options = self.parser.parse_args(config_args)
assert options.jobs == 1004
# command line options override all config settings
- options = self.parser.parse_args(config_args + ['--jobs', '9999'])
+ options = self.parser.parse_args(config_args + ["--jobs", "9999"])
assert options.jobs == 9999
@@ -349,143 +377,146 @@ class TestPkgcheckScan:
script = staticmethod(partial(run, project))
- repos_data = pytest.REPO_ROOT / 'testdata/data/repos'
- repos_dir = pytest.REPO_ROOT / 'testdata/repos'
- repos = tuple(sorted(x.name for x in repos_data.iterdir() if x.name != 'network'))
+ repos_data = pytest.REPO_ROOT / "testdata/data/repos"
+ repos_dir = pytest.REPO_ROOT / "testdata/repos"
+ repos = tuple(sorted(x.name for x in repos_data.iterdir() if x.name != "network"))
_all_results = [
(cls, result)
for name, cls in sorted(objects.CHECKS.items())
if not issubclass(cls, checks_mod.NetworkCheck)
- for result in sorted(cls.known_results, key=attrgetter('__name__'))
+ for result in sorted(cls.known_results, key=attrgetter("__name__"))
]
@pytest.fixture(autouse=True)
def _setup(self, testconfig, tmp_path):
self.cache_dir = str(tmp_path)
- base_args = ['--config', testconfig]
+ base_args = ["--config", testconfig]
self.scan = partial(scan, base_args=base_args)
# args for running `pkgcheck scan` via API call
- self.scan_args = ['--config', 'no', '--cache-dir', self.cache_dir]
+ self.scan_args = ["--config", "no", "--cache-dir", self.cache_dir]
# args for running pkgcheck like a script
- self.args = [project] + base_args + ['scan'] + self.scan_args
+ self.args = [project] + base_args + ["scan"] + self.scan_args
def test_empty_repo(self, capsys, repo):
- with patch('sys.argv', self.args + [repo.location]):
+ with patch("sys.argv", self.args + [repo.location]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
out, err = capsys.readouterr()
- assert out == err == ''
+ assert out == err == ""
def test_no_matching_checks_scope(self, tool):
- options, _ = tool.parse_args(['scan', 'standalone'])
- path = pjoin(options.target_repo.location, 'profiles')
- error = 'no matching checks available for profiles scope'
+ options, _ = tool.parse_args(["scan", "standalone"])
+ path = pjoin(options.target_repo.location, "profiles")
+ error = "no matching checks available for profiles scope"
with pytest.raises(base.PkgcheckUserException, match=error):
- self.scan(self.scan_args + ['-c', 'PkgDirCheck', path])
+ self.scan(self.scan_args + ["-c", "PkgDirCheck", path])
def test_stdin_targets_with_no_args(self):
- with patch('sys.stdin', StringIO()):
- with pytest.raises(base.PkgcheckUserException, match='no targets'):
- self.scan(self.scan_args + ['-'])
+ with patch("sys.stdin", StringIO()):
+ with pytest.raises(base.PkgcheckUserException, match="no targets"):
+ self.scan(self.scan_args + ["-"])
def test_exit_status(self, repo):
# create good ebuild and another with an invalid EAPI
- repo.create_ebuild('newcat/pkg-0')
- repo.create_ebuild('newcat/pkg-1', eapi='-1')
+ repo.create_ebuild("newcat/pkg-0")
+ repo.create_ebuild("newcat/pkg-1", eapi="-1")
# exit status isn't enabled by default
- args = ['-r', repo.location]
- with patch('sys.argv', self.args + args):
+ args = ["-r", repo.location]
+ with patch("sys.argv", self.args + args):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
# all error level results are flagged by default when enabled
- with patch('sys.argv', self.args + args + ['--exit']):
+ with patch("sys.argv", self.args + args + ["--exit"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 1
# selective error results will only flag those specified
- with patch('sys.argv', self.args + args + ['--exit', 'InvalidSlot']):
+ with patch("sys.argv", self.args + args + ["--exit", "InvalidSlot"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 0
- with patch('sys.argv', self.args + args + ['--exit', 'InvalidEapi']):
+ with patch("sys.argv", self.args + args + ["--exit", "InvalidEapi"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
assert excinfo.value.code == 1
def test_filter_latest(self, make_repo):
- repo = make_repo(arches=['amd64'])
+ repo = make_repo(arches=["amd64"])
# create stub profile to suppress ArchesWithoutProfiles result
- repo.create_profiles([Profile('stub', 'amd64')])
+ repo.create_profiles([Profile("stub", "amd64")])
# create ebuild with unknown keywords
- repo.create_ebuild('cat/pkg-0', keywords=['unknown'], homepage='https://example.com')
+ repo.create_ebuild("cat/pkg-0", keywords=["unknown"], homepage="https://example.com")
# and a good ebuild for the latest version
- repo.create_ebuild('cat/pkg-1', keywords=['amd64'], homepage='https://example.com')
+ repo.create_ebuild("cat/pkg-1", keywords=["amd64"], homepage="https://example.com")
# results for old pkgs will be shown by default
- args = ['-r', repo.location]
- with patch('sys.argv', self.args + args):
+ args = ["-r", repo.location]
+ with patch("sys.argv", self.args + args):
results = list(self.scan(self.scan_args + args))
assert len(results) == 1
# but are ignored when running using the 'latest' filter
- for opt in ('-f', '--filter'):
- for arg in ('latest', 'latest:KeywordsCheck', 'latest:UnknownKeywords'):
+ for opt in ("-f", "--filter"):
+ for arg in ("latest", "latest:KeywordsCheck", "latest:UnknownKeywords"):
assert not list(self.scan(self.scan_args + args + [opt, arg]))
def test_scan_restrictions(self, repo):
# create two ebuilds with bad EAPIs
- repo.create_ebuild('cat/pkg-0', eapi='-1')
- repo.create_ebuild('cat/pkg-1', eapi='-1')
+ repo.create_ebuild("cat/pkg-0", eapi="-1")
+ repo.create_ebuild("cat/pkg-1", eapi="-1")
# matching version restriction returns a single result
- results = list(self.scan(self.scan_args + ['-r', repo.location, '=cat/pkg-0']))
- assert [x.version for x in results] == ['0']
+ results = list(self.scan(self.scan_args + ["-r", repo.location, "=cat/pkg-0"]))
+ assert [x.version for x in results] == ["0"]
# unmatching version restriction returns no results
- results = list(self.scan(self.scan_args + ['-r', repo.location, '=cat/pkg-2']))
+ results = list(self.scan(self.scan_args + ["-r", repo.location, "=cat/pkg-2"]))
assert not results
# matching package restriction returns two sorted results
- results = list(self.scan(self.scan_args + ['-r', repo.location, 'cat/pkg']))
- assert [x.version for x in results] == ['0', '1']
+ results = list(self.scan(self.scan_args + ["-r", repo.location, "cat/pkg"]))
+ assert [x.version for x in results] == ["0", "1"]
# unmatching package restriction returns no results
- results = list(self.scan(self.scan_args + ['-r', repo.location, 'cat/unknown']))
+ results = list(self.scan(self.scan_args + ["-r", repo.location, "cat/unknown"]))
assert not results
def test_explict_skip_check(self):
"""SkipCheck exceptions are raised when triggered for explicitly enabled checks."""
- error = 'network checks not enabled'
+ error = "network checks not enabled"
with pytest.raises(base.PkgcheckException, match=error):
- self.scan(self.scan_args + ['-C', 'net'])
+ self.scan(self.scan_args + ["-C", "net"])
def test_cache_disabled_skip_check(self):
"""SkipCheck exceptions are raised when enabled checks require disabled cache types."""
- args = ['--cache=-git', '-c', 'StableRequestCheck']
- error = 'StableRequestCheck: git cache support required'
+ args = ["--cache=-git", "-c", "StableRequestCheck"]
+ error = "StableRequestCheck: git cache support required"
with pytest.raises(base.PkgcheckException, match=error):
self.scan(self.scan_args + args)
- @pytest.mark.parametrize('module', (
- pytest.param('pkgcheck.pipeline.UnversionedSource', id='producer'),
- pytest.param('pkgcheck.runners.SyncCheckRunner.run', id='consumer'),
- ))
+ @pytest.mark.parametrize(
+ "module",
+ (
+ pytest.param("pkgcheck.pipeline.UnversionedSource", id="producer"),
+ pytest.param("pkgcheck.runners.SyncCheckRunner.run", id="consumer"),
+ ),
+ )
def test_pipeline_exceptions(self, module):
"""Test checkrunner pipeline against unhandled exceptions."""
with patch(module) as faked:
- faked.side_effect = Exception('pipeline failed')
- with pytest.raises(base.PkgcheckException, match='Exception: pipeline failed'):
+ faked.side_effect = Exception("pipeline failed")
+ with pytest.raises(base.PkgcheckException, match="Exception: pipeline failed"):
list(self.scan(self.scan_args))
# nested mapping of repos to checks/keywords they cover
_checks = defaultdict(lambda: defaultdict(set))
- @pytest.mark.parametrize('repo', repos)
+ @pytest.mark.parametrize("repo", repos)
def test_scan_repo_data(self, repo):
"""Make sure the test data is up to date check/result naming wise."""
for check in (self.repos_data / repo).iterdir():
@@ -506,19 +537,19 @@ class TestPkgcheckScan:
_results = {}
_verbose_results = {}
- @pytest.mark.parametrize('repo', repos)
+ @pytest.mark.parametrize("repo", repos)
def test_scan_repo(self, repo, tmp_path, verbosity=0):
"""Scan a target repo, saving results for verification."""
repo_dir = self.repos_dir / repo
# run all existing triggers
triggers = [
- pjoin(root, 'trigger.sh')
+ pjoin(root, "trigger.sh")
for root, _dirs, files in os.walk(self.repos_data / repo)
- if 'trigger.sh' in files
+ if "trigger.sh" in files
]
if triggers:
- triggered_repo = tmp_path / f'triggered-{repo}'
+ triggered_repo = tmp_path / f"triggered-{repo}"
shutil.copytree(repo_dir, triggered_repo)
for trigger in triggers:
self._script(trigger, triggered_repo)
@@ -526,19 +557,19 @@ class TestPkgcheckScan:
if repo not in self._checks:
self.test_scan_repo_data(repo)
- args = (['-v'] * verbosity) + ['-r', str(repo_dir), '-c', ','.join(self._checks[repo])]
+ args = (["-v"] * verbosity) + ["-r", str(repo_dir), "-c", ",".join(self._checks[repo])]
# add any defined extra repo args
try:
- args.extend(shlex.split((repo_dir / 'metadata/pkgcheck-args').read_text()))
+ args.extend(shlex.split((repo_dir / "metadata/pkgcheck-args").read_text()))
except FileNotFoundError:
pass
results = []
for result in self.scan(self.scan_args + args):
# ignore results generated from stubs
- stubs = (getattr(result, x, '') for x in ('category', 'package'))
- if any(x.startswith('stub') for x in stubs):
+ stubs = (getattr(result, x, "") for x in ("category", "package"))
+ if any(x.startswith("stub") for x in stubs):
continue
results.append(result)
@@ -549,7 +580,7 @@ class TestPkgcheckScan:
self._results[repo] = set(results)
assert len(results) == len(self._results[repo])
- @pytest.mark.parametrize('repo', repos)
+ @pytest.mark.parametrize("repo", repos)
def test_scan_repo_verbose(self, repo, tmp_path):
"""Scan a target repo in verbose mode, saving results for verification."""
return self.test_scan_repo(repo, tmp_path, verbosity=1)
@@ -572,7 +603,7 @@ class TestPkgcheckScan:
output = f.read().decode()
return output
- @pytest.mark.parametrize('repo', repos)
+ @pytest.mark.parametrize("repo", repos)
def test_scan_verify(self, repo, tmp_path):
"""Run pkgcheck against test pkgs in bundled repo, verifying result output."""
results = set()
@@ -584,15 +615,19 @@ class TestPkgcheckScan:
for check, keywords in self._checks[repo].items():
for keyword in keywords:
# verify the expected results were seen during the repo scans
- expected_results = self._get_results(f'{repo}/{check}/{keyword}/expected.json')
- assert expected_results, 'regular results must always exist'
- assert self._render_results(expected_results), 'failed rendering results'
+ expected_results = self._get_results(f"{repo}/{check}/{keyword}/expected.json")
+ assert expected_results, "regular results must always exist"
+ assert self._render_results(expected_results), "failed rendering results"
results.update(expected_results)
# when expected verbose results exist use them, otherwise fallback to using the regular ones
- expected_verbose_results = self._get_results(f'{repo}/{check}/{keyword}/expected-verbose.json')
+ expected_verbose_results = self._get_results(
+ f"{repo}/{check}/{keyword}/expected-verbose.json"
+ )
if expected_verbose_results:
- assert self._render_results(expected_verbose_results), 'failed rendering verbose results'
+ assert self._render_results(
+ expected_verbose_results
+ ), "failed rendering verbose results"
verbose_results.update(expected_verbose_results)
else:
verbose_results.update(expected_results)
@@ -600,34 +635,39 @@ class TestPkgcheckScan:
if results != self._results[repo]:
missing = self._render_results(results - self._results[repo])
unknown = self._render_results(self._results[repo] - results)
- error = ['unmatched repo scan results:']
+ error = ["unmatched repo scan results:"]
if missing:
- error.append(f'{repo} repo missing expected results:\n{missing}')
+ error.append(f"{repo} repo missing expected results:\n{missing}")
if unknown:
- error.append(f'{repo} repo unknown results:\n{unknown}')
- pytest.fail('\n'.join(error))
+ error.append(f"{repo} repo unknown results:\n{unknown}")
+ pytest.fail("\n".join(error))
if verbose_results != self._verbose_results[repo]:
missing = self._render_results(verbose_results - self._verbose_results[repo])
unknown = self._render_results(self._verbose_results[repo] - verbose_results)
- error = ['unmatched verbose repo scan results:']
+ error = ["unmatched verbose repo scan results:"]
if missing:
- error.append(f'{repo} repo missing expected results:\n{missing}')
+ error.append(f"{repo} repo missing expected results:\n{missing}")
if unknown:
- error.append(f'{repo} repo unknown results:\n{unknown}')
- pytest.fail('\n'.join(error))
+ error.append(f"{repo} repo unknown results:\n{unknown}")
+ pytest.fail("\n".join(error))
@staticmethod
def _patch(fix, repo_path):
with fix.open() as fix_file:
try:
subprocess.run(
- ['patch', '-p1'], cwd=repo_path, stdin=fix_file,
- capture_output=True, check=True, text=True)
+ ["patch", "-p1"],
+ cwd=repo_path,
+ stdin=fix_file,
+ capture_output=True,
+ check=True,
+ text=True,
+ )
except subprocess.CalledProcessError as exc:
error = exc.stderr if exc.stderr else exc.stdout
pytest.fail(error)
- @pytest.mark.parametrize('check, result', _all_results)
+ @pytest.mark.parametrize("check, result", _all_results)
def test_fix(self, check, result, tmp_path):
"""Apply fixes to pkgs, verifying the related results are fixed."""
check_name = check.__name__
@@ -635,36 +675,36 @@ class TestPkgcheckScan:
tested = False
for repo in self.repos:
keyword_dir = self.repos_data / repo / check_name / keyword
- if (fix := keyword_dir / 'fix.patch').exists():
+ if (fix := keyword_dir / "fix.patch").exists():
func = self._patch
- elif (fix := keyword_dir / 'fix.sh').exists():
+ elif (fix := keyword_dir / "fix.sh").exists():
func = self._script
else:
continue
# apply a fix if one exists and make sure the related result doesn't appear
repo_dir = self.repos_dir / repo
- fixed_repo = tmp_path / f'fixed-{repo}'
+ fixed_repo = tmp_path / f"fixed-{repo}"
shutil.copytree(repo_dir, fixed_repo)
func(fix, fixed_repo)
- args = ['-r', str(fixed_repo), '-c', check_name, '-k', keyword]
+ args = ["-r", str(fixed_repo), "-c", check_name, "-k", keyword]
# add any defined extra repo args
try:
- with open(f'{repo_dir}/metadata/pkgcheck-args') as f:
+ with open(f"{repo_dir}/metadata/pkgcheck-args") as f:
args.extend(shlex.split(f.read()))
except FileNotFoundError:
pass
results = list(self.scan(self.scan_args + args))
if results:
- error = ['unexpected repo scan results:']
+ error = ["unexpected repo scan results:"]
error.append(self._render_results(results))
- pytest.fail('\n'.join(error))
+ pytest.fail("\n".join(error))
shutil.rmtree(fixed_repo)
tested = True
if not tested:
- pytest.skip('fix not available')
+ pytest.skip("fix not available")
diff --git a/tests/scripts/test_pkgcheck_show.py b/tests/scripts/test_pkgcheck_show.py
index 4557ecfd..ee40f7cf 100644
--- a/tests/scripts/test_pkgcheck_show.py
+++ b/tests/scripts/test_pkgcheck_show.py
@@ -15,39 +15,39 @@ class TestPkgcheckShow:
@pytest.fixture(autouse=True)
def _setup(self, testconfig):
- self.args = [project, '--config', testconfig, 'show']
+ self.args = [project, "--config", testconfig, "show"]
def test_show_no_args(self, capsys):
# defaults to outputting keywords list if no option is passed
- with patch('sys.argv', self.args):
+ with patch("sys.argv", self.args):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
assert out == sorted(objects.KEYWORDS.keys())
assert excinfo.value.code == 0
def test_show_keywords(self, capsys):
- for arg in ('-k', '--keywords'):
+ for arg in ("-k", "--keywords"):
# regular mode
- with patch('sys.argv', self.args + [arg]):
+ with patch("sys.argv", self.args + [arg]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
regular_output = out
assert out == sorted(objects.KEYWORDS.keys())
assert excinfo.value.code == 0
# verbose mode
- with patch('sys.argv', self.args + [arg, '-v']):
+ with patch("sys.argv", self.args + [arg, "-v"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
verbose_output = out
assert excinfo.value.code == 0
@@ -55,25 +55,25 @@ class TestPkgcheckShow:
assert len(regular_output) < len(verbose_output)
def test_show_checks(self, capsys):
- for arg in ('-c', '--checks'):
+ for arg in ("-c", "--checks"):
# regular mode
- with patch('sys.argv', self.args + [arg]):
+ with patch("sys.argv", self.args + [arg]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
regular_output = out
assert out == sorted(objects.CHECKS.keys())
assert excinfo.value.code == 0
# verbose mode
- with patch('sys.argv', self.args + [arg, '-v']):
+ with patch("sys.argv", self.args + [arg, "-v"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
verbose_output = out
assert excinfo.value.code == 0
@@ -81,50 +81,50 @@ class TestPkgcheckShow:
assert len(regular_output) < len(verbose_output)
def test_show_scopes(self, capsys):
- for arg in ('-s', '--scopes'):
- with patch('sys.argv', self.args + [arg]):
+ for arg in ("-s", "--scopes"):
+ with patch("sys.argv", self.args + [arg]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
assert out == list(base.scopes)
assert excinfo.value.code == 0
- regular_output = '\n'.join(itertools.chain(out))
+ regular_output = "\n".join(itertools.chain(out))
# verbose mode
- with patch('sys.argv', self.args + [arg, '-v']):
+ with patch("sys.argv", self.args + [arg, "-v"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
assert excinfo.value.code == 0
- verbose_output = '\n'.join(itertools.chain(out))
+ verbose_output = "\n".join(itertools.chain(out))
# verbose output shows more info
assert len(regular_output) < len(verbose_output)
def test_show_reporters(self, capsys):
- for arg in ('-r', '--reporters'):
+ for arg in ("-r", "--reporters"):
# regular mode
- with patch('sys.argv', self.args + [arg]):
+ with patch("sys.argv", self.args + [arg]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
regular_output = out
assert out == sorted(objects.REPORTERS.keys())
assert excinfo.value.code == 0
# verbose mode
- with patch('sys.argv', self.args + [arg, '-v']):
+ with patch("sys.argv", self.args + [arg, "-v"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
verbose_output = out
assert excinfo.value.code == 0
@@ -132,27 +132,27 @@ class TestPkgcheckShow:
assert len(regular_output) < len(verbose_output)
def test_show_caches(self, capsys):
- for arg in ('-C', '--caches'):
- with patch('sys.argv', self.args + [arg]):
+ for arg in ("-C", "--caches"):
+ with patch("sys.argv", self.args + [arg]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
cache_objs = caches.CachedAddon.caches.values()
assert out == sorted(x.type for x in cache_objs)
assert excinfo.value.code == 0
- regular_output = '\n'.join(itertools.chain(out))
+ regular_output = "\n".join(itertools.chain(out))
# verbose mode
- with patch('sys.argv', self.args + [arg, '-v']):
+ with patch("sys.argv", self.args + [arg, "-v"]):
with pytest.raises(SystemExit) as excinfo:
self.script()
out, err = capsys.readouterr()
assert not err
- out = out.strip().split('\n')
+ out = out.strip().split("\n")
assert excinfo.value.code == 0
- verbose_output = '\n'.join(itertools.chain(out))
+ verbose_output = "\n".join(itertools.chain(out))
# verbose output shows more info
assert len(regular_output) < len(verbose_output)
diff --git a/tests/test_api.py b/tests/test_api.py
index f3db534b..cf546ee5 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -8,31 +8,32 @@ from pkgcheck import objects
class TestScanApi:
-
@pytest.fixture(autouse=True)
def _setup(self, testconfig):
- self.base_args = ['--config', testconfig]
- self.scan_args = ['--config', 'no', '--cache', 'no']
+ self.base_args = ["--config", testconfig]
+ self.scan_args = ["--config", "no", "--cache", "no"]
def test_argparse_error(self, repo):
- with pytest.raises(PkgcheckException, match='unrecognized arguments'):
- scan(['-r', repo.location, '--foo'])
+ with pytest.raises(PkgcheckException, match="unrecognized arguments"):
+ scan(["-r", repo.location, "--foo"])
def test_no_scan_args(self):
pipe = scan(base_args=self.base_args)
- assert pipe.options.target_repo.repo_id == 'standalone'
+ assert pipe.options.target_repo.repo_id == "standalone"
def test_no_base_args(self, repo):
- assert [] == list(scan(self.scan_args + ['-r', repo.location]))
+ assert [] == list(scan(self.scan_args + ["-r", repo.location]))
def test_keyword_import(self):
"""Keyword classes are importable from the top-level module."""
from pkgcheck import NonsolvableDeps, Result
+
assert issubclass(NonsolvableDeps, Result)
def test_module_attributes(self):
"""All keyword class names are shown for the top-level module."""
import pkgcheck
+
assert set(objects.KEYWORDS) < set(dir(pkgcheck))
def test_sigint_handling(self, repo):
@@ -49,10 +50,10 @@ class TestScanApi:
def sleep():
"""Notify testing process then sleep."""
- queue.put('ready')
+ queue.put("ready")
time.sleep(100)
- with patch('pkgcheck.pipeline.Pipeline.__iter__') as fake_iter:
+ with patch("pkgcheck.pipeline.Pipeline.__iter__") as fake_iter:
fake_iter.side_effect = partial(sleep)
try:
iter(scan([repo.location]))
@@ -62,7 +63,7 @@ class TestScanApi:
queue.put(None)
sys.exit(1)
- mp_ctx = multiprocessing.get_context('fork')
+ mp_ctx = multiprocessing.get_context("fork")
queue = mp_ctx.SimpleQueue()
p = mp_ctx.Process(target=run, args=(queue,))
p.start()
diff --git a/tests/test_base.py b/tests/test_base.py
index 08acaf8d..7c6aa905 100644
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -6,7 +6,6 @@ from pkgcheck.base import ProgressManager
class TestScope:
-
def test_rich_comparisons(self):
assert base.commit_scope < base.repo_scope
assert base.commit_scope < 0
@@ -32,15 +31,14 @@ class TestScope:
class TestProgressManager:
-
def test_no_output(self, capsys):
# output disabled due to lower verbosity setting
- with patch('sys.stdout.isatty', return_value=True):
+ with patch("sys.stdout.isatty", return_value=True):
with ProgressManager(verbosity=-1) as progress:
for x in range(10):
progress(x)
# output disabled due to non-tty output
- with patch('sys.stdout.isatty', return_value=False):
+ with patch("sys.stdout.isatty", return_value=False):
with ProgressManager(verbosity=1) as progress:
for x in range(10):
progress(x)
@@ -49,20 +47,20 @@ class TestProgressManager:
assert not err
def test_output(self, capsys):
- with patch('sys.stdout.isatty', return_value=True):
+ with patch("sys.stdout.isatty", return_value=True):
with ProgressManager(verbosity=0) as progress:
for x in range(10):
progress(x)
out, err = capsys.readouterr()
assert not out
- assert not err.strip().split('\r') == list(range(10))
+ assert not err.strip().split("\r") == list(range(10))
def test_cached_output(self, capsys):
- with patch('sys.stdout.isatty', return_value=True):
+ with patch("sys.stdout.isatty", return_value=True):
with ProgressManager(verbosity=0) as progress:
data = list(range(10))
for x in chain.from_iterable(zip(data, data)):
progress(x)
out, err = capsys.readouterr()
assert not out
- assert not err.strip().split('\r') == list(range(10))
+ assert not err.strip().split("\r") == list(range(10))
diff --git a/tests/test_cli.py b/tests/test_cli.py
index 4ad8011d..b2935b28 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -6,10 +6,9 @@ from snakeoil.cli import arghparse
class TestConfigFileParser:
-
@pytest.fixture(autouse=True)
def _create_argparser(self, tmp_path):
- self.config_file = str(tmp_path / 'config')
+ self.config_file = str(tmp_path / "config")
self.parser = arghparse.ArgumentParser()
self.namespace = arghparse.Namespace()
self.config_parser = cli.ConfigFileParser(self.parser)
@@ -22,65 +21,81 @@ class TestConfigFileParser:
def test_ignored_configs(self):
# nonexistent config files are ignored
- config = self.config_parser.parse_config(('foo', 'bar'))
+ config = self.config_parser.parse_config(("foo", "bar"))
assert config.sections() == []
def test_bad_config_format(self, capsys):
- with open(self.config_file, 'w') as f:
- f.write('foobar\n')
+ with open(self.config_file, "w") as f:
+ f.write("foobar\n")
with pytest.raises(SystemExit) as excinfo:
self.config_parser.parse_config((self.config_file,))
out, err = capsys.readouterr()
assert not out
- assert 'parsing config file failed:' in err
+ assert "parsing config file failed:" in err
assert excinfo.value.code == 2
def test_nonexistent_config_options(self, capsys):
"""Nonexistent parser arguments cause errors."""
- with open(self.config_file, 'w') as f:
- f.write(textwrap.dedent("""
- [DEFAULT]
- foo=bar
- """))
+ with open(self.config_file, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """
+ [DEFAULT]
+ foo=bar
+ """
+ )
+ )
with pytest.raises(SystemExit) as excinfo:
self.config_parser.parse_config_options(self.namespace, configs=[self.config_file])
out, err = capsys.readouterr()
assert not out
- assert 'failed loading config: unknown arguments: --foo=bar' in err
+ assert "failed loading config: unknown arguments: --foo=bar" in err
assert excinfo.value.code == 2
def test_config_options(self):
- self.parser.add_argument('--foo')
- with open(self.config_file, 'w') as f:
- f.write(textwrap.dedent("""
- [DEFAULT]
- foo=bar
- """))
- namespace = self.parser.parse_args(['--foo', 'foo'])
- assert namespace.foo == 'foo'
+ self.parser.add_argument("--foo")
+ with open(self.config_file, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """
+ [DEFAULT]
+ foo=bar
+ """
+ )
+ )
+ namespace = self.parser.parse_args(["--foo", "foo"])
+ assert namespace.foo == "foo"
# config args override matching namespace attrs
namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
- assert namespace.foo == 'bar'
+ assert namespace.foo == "bar"
def test_config_checksets(self):
namespace = self.parser.parse_args([])
namespace.config_checksets = {}
# checksets section exists with no entries
- with open(self.config_file, 'w') as f:
- f.write(textwrap.dedent("""
- [CHECKSETS]
- """))
+ with open(self.config_file, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """
+ [CHECKSETS]
+ """
+ )
+ )
namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
assert namespace.config_checksets == {}
# checksets section with entries including empty set
- with open(self.config_file, 'w') as f:
- f.write(textwrap.dedent("""
- [CHECKSETS]
- set1=keyword
- set2=check,-keyword
- set3=
- """))
+ with open(self.config_file, "w") as f:
+ f.write(
+ textwrap.dedent(
+ """
+ [CHECKSETS]
+ set1=keyword
+ set2=check,-keyword
+ set3=
+ """
+ )
+ )
namespace = self.config_parser.parse_config_options(namespace, configs=[self.config_file])
- assert namespace.config_checksets == {'set1': ['keyword'], 'set2': ['check', '-keyword']}
+ assert namespace.config_checksets == {"set1": ["keyword"], "set2": ["check", "-keyword"]}
diff --git a/tests/test_feeds.py b/tests/test_feeds.py
index a1540048..c098f060 100644
--- a/tests/test_feeds.py
+++ b/tests/test_feeds.py
@@ -6,54 +6,52 @@ from .misc import FakePkg, Profile
class TestQueryCacheAddon:
-
@pytest.fixture(autouse=True)
def _setup(self, tool):
self.tool = tool
- self.args = ['scan']
+ self.args = ["scan"]
def test_opts(self):
- for val in ('version', 'package', 'category'):
- options, _ = self.tool.parse_args(self.args + ['--reset-caching-per', val])
+ for val in ("version", "package", "category"):
+ options, _ = self.tool.parse_args(self.args + ["--reset-caching-per", val])
assert options.query_caching_freq == val
def test_default(self):
options, _ = self.tool.parse_args(self.args)
- assert options.query_caching_freq == 'package'
+ assert options.query_caching_freq == "package"
def test_feed(self):
options, _ = self.tool.parse_args(self.args)
addon = feeds.QueryCache(options)
- assert addon.options.query_caching_freq == 'package'
- addon.query_cache['foo'] = 'bar'
- pkg = FakePkg('dev-util/diffball-0.5')
+ assert addon.options.query_caching_freq == "package"
+ addon.query_cache["foo"] = "bar"
+ pkg = FakePkg("dev-util/diffball-0.5")
addon.feed(pkg)
assert not addon.query_cache
class TestEvaluateDepSet:
-
@pytest.fixture(autouse=True)
def _setup(self, tool, repo, tmp_path):
self.tool = tool
self.repo = repo
- self.args = ['scan', '--cache-dir', str(tmp_path), '--repo', repo.location]
+ self.args = ["scan", "--cache-dir", str(tmp_path), "--repo", repo.location]
profiles = [
- Profile('1', 'x86'),
- Profile('2', 'x86'),
- Profile('3', 'ppc'),
+ Profile("1", "x86"),
+ Profile("2", "x86"),
+ Profile("3", "ppc"),
]
self.repo.create_profiles(profiles)
- self.repo.arches.update(['amd64', 'ppc', 'x86'])
+ self.repo.arches.update(["amd64", "ppc", "x86"])
- with open(pjoin(self.repo.path, 'profiles', '1', 'package.use.stable.mask'), 'w') as f:
- f.write('dev-util/diffball foo')
- with open(pjoin(self.repo.path, 'profiles', '2', 'package.use.stable.force'), 'w') as f:
- f.write('=dev-util/diffball-0.1 bar foo')
- with open(pjoin(self.repo.path, 'profiles', '3', 'package.use.stable.force'), 'w') as f:
- f.write('dev-util/diffball bar foo')
+ with open(pjoin(self.repo.path, "profiles", "1", "package.use.stable.mask"), "w") as f:
+ f.write("dev-util/diffball foo")
+ with open(pjoin(self.repo.path, "profiles", "2", "package.use.stable.force"), "w") as f:
+ f.write("=dev-util/diffball-0.1 bar foo")
+ with open(pjoin(self.repo.path, "profiles", "3", "package.use.stable.force"), "w") as f:
+ f.write("dev-util/diffball bar foo")
- options, _ = self.tool.parse_args(self.args + ['--profiles=1,2,3'])
+ options, _ = self.tool.parse_args(self.args + ["--profiles=1,2,3"])
profile_addon = addons.init_addon(addons.profiles.ProfileAddon, options)
self.addon = feeds.EvaluateDepSet(options, profile_addon=profile_addon)
@@ -72,35 +70,45 @@ class TestEvaluateDepSet:
l = get_rets("0.0.2", "depend")
assert len(l) == 1, f"must collapse all profiles down to one run: got {l!r}"
assert len(l[0][1]) == 4, "must have four runs, (arch and ~arch for each profile)"
- assert sorted(set(x.name for x in l[0][1])) == ['1', '2'], f"must have two profiles: got {l!r}"
- assert l[0][1][0].key == 'x86'
- assert l[0][1][1].key == 'x86'
+ assert sorted(set(x.name for x in l[0][1])) == [
+ "1",
+ "2",
+ ], f"must have two profiles: got {l!r}"
+ assert l[0][1][0].key == "x86"
+ assert l[0][1][1].key == "x86"
l = get_rets(
- "0.1", "rdepend",
+ "0.1",
+ "rdepend",
RDEPEND="x? ( dev-util/confcache ) foo? ( dev-util/foo ) "
- "bar? ( dev-util/bar ) !bar? ( dev-util/nobar ) x11-libs/xserver"
+ "bar? ( dev-util/bar ) !bar? ( dev-util/nobar ) x11-libs/xserver",
)
assert len(l) == 3, f"must collapse all profiles down to 3 runs: got {l!r}"
# ordering is potentially random; thus pull out which depset result is
# which based upon profile
- l1 = [x for x in l if x[1][0].name == '1'][0]
- l2 = [x for x in l if x[1][0].name == '2'][0]
-
- assert (
- set(str(l1[0]).split()) ==
- {'dev-util/confcache', 'dev-util/bar', 'dev-util/nobar', 'x11-libs/xserver'})
-
- assert (
- set(str(l2[0]).split()) ==
- {'dev-util/confcache', 'dev-util/foo', 'dev-util/bar', 'x11-libs/xserver'})
+ l1 = [x for x in l if x[1][0].name == "1"][0]
+ l2 = [x for x in l if x[1][0].name == "2"][0]
+
+ assert set(str(l1[0]).split()) == {
+ "dev-util/confcache",
+ "dev-util/bar",
+ "dev-util/nobar",
+ "x11-libs/xserver",
+ }
+
+ assert set(str(l2[0]).split()) == {
+ "dev-util/confcache",
+ "dev-util/foo",
+ "dev-util/bar",
+ "x11-libs/xserver",
+ }
# test feed wiping, using an empty depset; if it didn't clear, then
# results from a pkg/attr tuple from above would come through rather
# then an empty.
- pkg = FakePkg('dev-util/diffball-0.5')
+ pkg = FakePkg("dev-util/diffball-0.5")
self.addon.feed(pkg)
l = get_rets("0.1", "rdepend")
assert len(l) == 1, f"feed didn't clear the cache- should be len 1: {l!r}"
@@ -110,20 +118,25 @@ class TestEvaluateDepSet:
# ensure it handles arch right.
l = get_rets("0", "depend", KEYWORDS="ppc x86")
assert len(l) == 1, f"should be len 1, got {l!r}"
- assert sorted(set(x.name for x in l[0][1])) == ["1", "2", "3"], (
- f"should have three profiles of 1-3, got {l[0][1]!r}")
+ assert sorted(set(x.name for x in l[0][1])) == [
+ "1",
+ "2",
+ "3",
+ ], f"should have three profiles of 1-3, got {l[0][1]!r}"
# ensure it's caching profile collapsing, iow, keywords for same ver
# that's partially cached (single attr at least) should *not* change
# things.
l = get_rets("0", "depend", KEYWORDS="ppc")
- assert sorted(set(x.name for x in l[0][1])) == ['1', '2', '3'], (
+ assert sorted(set(x.name for x in l[0][1])) == ["1", "2", "3"], (
f"should have 3 profiles, got {l[0][1]!r}\nthis indicates it's "
- "re-identifying profiles every invocation, which is unwarranted ")
+ "re-identifying profiles every invocation, which is unwarranted "
+ )
- l = get_rets("1", "depend", KEYWORDS="ppc x86",
- DEPEND="ppc? ( dev-util/ppc ) !ppc? ( dev-util/x86 )")
+ l = get_rets(
+ "1", "depend", KEYWORDS="ppc x86", DEPEND="ppc? ( dev-util/ppc ) !ppc? ( dev-util/x86 )"
+ )
assert len(l) == 2, f"should be len 2, got {l!r}"
# same issue, figure out what is what
diff --git a/tests/test_reporters.py b/tests/test_reporters.py
index 462cc44e..4a0cda39 100644
--- a/tests/test_reporters.py
+++ b/tests/test_reporters.py
@@ -16,15 +16,15 @@ class BaseReporter:
@pytest.fixture(autouse=True)
def _setup(self):
- self.log_warning = profiles.ProfileWarning(Exception('profile warning'))
- self.log_error = profiles.ProfileError(Exception('profile error'))
- pkg = FakePkg('dev-libs/foo-0')
- self.commit_result = git.InvalidCommitMessage('no commit message', commit='8d86269bb4c7')
- self.category_result = metadata_xml.CatMissingMetadataXml('metadata.xml', pkg=pkg)
- self.package_result = pkgdir.InvalidPN(('bar', 'baz'), pkg=pkg)
- self.versioned_result = metadata.BadFilename(('0.tar.gz', 'foo.tar.gz'), pkg=pkg)
- self.line_result = codingstyle.ReadonlyVariable('P', line='P=6', lineno=7, pkg=pkg)
- self.lines_result = codingstyle.EbuildUnquotedVariable('D', lines=(5, 7), pkg=pkg)
+ self.log_warning = profiles.ProfileWarning(Exception("profile warning"))
+ self.log_error = profiles.ProfileError(Exception("profile error"))
+ pkg = FakePkg("dev-libs/foo-0")
+ self.commit_result = git.InvalidCommitMessage("no commit message", commit="8d86269bb4c7")
+ self.category_result = metadata_xml.CatMissingMetadataXml("metadata.xml", pkg=pkg)
+ self.package_result = pkgdir.InvalidPN(("bar", "baz"), pkg=pkg)
+ self.versioned_result = metadata.BadFilename(("0.tar.gz", "foo.tar.gz"), pkg=pkg)
+ self.line_result = codingstyle.ReadonlyVariable("P", line="P=6", lineno=7, pkg=pkg)
+ self.lines_result = codingstyle.EbuildUnquotedVariable("D", lines=(5, 7), pkg=pkg)
def mk_reporter(self, **kwargs):
out = PlainTextFormatter(sys.stdout)
@@ -49,106 +49,121 @@ class BaseReporter:
class TestStrReporter(BaseReporter):
reporter_cls = reporters.StrReporter
- add_report_output = dedent("""\
- commit 8d86269bb4c7: no commit message
- profile warning
- dev-libs: category is missing metadata.xml
- dev-libs/foo: invalid package names: [ bar, baz ]
- dev-libs/foo-0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
- dev-libs/foo-0: read-only variable 'P' assigned, line 7: P=6
- dev-libs/foo-0: unquoted variable D on lines: 5, 7
- """)
+ add_report_output = dedent(
+ """\
+ commit 8d86269bb4c7: no commit message
+ profile warning
+ dev-libs: category is missing metadata.xml
+ dev-libs/foo: invalid package names: [ bar, baz ]
+ dev-libs/foo-0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
+ dev-libs/foo-0: read-only variable 'P' assigned, line 7: P=6
+ dev-libs/foo-0: unquoted variable D on lines: 5, 7
+ """
+ )
class TestFancyReporter(BaseReporter):
reporter_cls = reporters.FancyReporter
- add_report_output = dedent("""\
- commit
- InvalidCommitMessage: commit 8d86269bb4c7: no commit message
+ add_report_output = dedent(
+ """\
+ commit
+ InvalidCommitMessage: commit 8d86269bb4c7: no commit message
- profiles
- ProfileWarning: profile warning
+ profiles
+ ProfileWarning: profile warning
- dev-libs
- CatMissingMetadataXml: category is missing metadata.xml
+ dev-libs
+ CatMissingMetadataXml: category is missing metadata.xml
- dev-libs/foo
- InvalidPN: invalid package names: [ bar, baz ]
- BadFilename: version 0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
- ReadonlyVariable: version 0: read-only variable 'P' assigned, line 7: P=6
- UnquotedVariable: version 0: unquoted variable D on lines: 5, 7
- """)
+ dev-libs/foo
+ InvalidPN: invalid package names: [ bar, baz ]
+ BadFilename: version 0: bad filenames: [ 0.tar.gz, foo.tar.gz ]
+ ReadonlyVariable: version 0: read-only variable 'P' assigned, line 7: P=6
+ UnquotedVariable: version 0: unquoted variable D on lines: 5, 7
+ """
+ )
class TestJsonReporter(BaseReporter):
reporter_cls = reporters.JsonReporter
- add_report_output = dedent("""\
- {"_style": {"InvalidCommitMessage": "commit 8d86269bb4c7: no commit message"}}
- {"_warning": {"ProfileWarning": "profile warning"}}
- {"dev-libs": {"_error": {"CatMissingMetadataXml": "category is missing metadata.xml"}}}
- {"dev-libs": {"foo": {"_error": {"InvalidPN": "invalid package names: [ bar, baz ]"}}}}
- {"dev-libs": {"foo": {"0": {"_warning": {"BadFilename": "bad filenames: [ 0.tar.gz, foo.tar.gz ]"}}}}}
- {"dev-libs": {"foo": {"0": {"_warning": {"ReadonlyVariable": "read-only variable 'P' assigned, line 7: P=6"}}}}}
- {"dev-libs": {"foo": {"0": {"_warning": {"UnquotedVariable": "unquoted variable D on lines: 5, 7"}}}}}
- """)
+ add_report_output = dedent(
+ """\
+ {"_style": {"InvalidCommitMessage": "commit 8d86269bb4c7: no commit message"}}
+ {"_warning": {"ProfileWarning": "profile warning"}}
+ {"dev-libs": {"_error": {"CatMissingMetadataXml": "category is missing metadata.xml"}}}
+ {"dev-libs": {"foo": {"_error": {"InvalidPN": "invalid package names: [ bar, baz ]"}}}}
+ {"dev-libs": {"foo": {"0": {"_warning": {"BadFilename": "bad filenames: [ 0.tar.gz, foo.tar.gz ]"}}}}}
+ {"dev-libs": {"foo": {"0": {"_warning": {"ReadonlyVariable": "read-only variable 'P' assigned, line 7: P=6"}}}}}
+ {"dev-libs": {"foo": {"0": {"_warning": {"UnquotedVariable": "unquoted variable D on lines: 5, 7"}}}}}
+ """
+ )
class TestXmlReporter(BaseReporter):
reporter_cls = reporters.XmlReporter
- add_report_output = dedent("""\
- <checks>
- <result><class>InvalidCommitMessage</class><msg>commit 8d86269bb4c7: no commit message</msg></result>
- <result><class>ProfileWarning</class><msg>profile warning</msg></result>
- <result><category>dev-libs</category><class>CatMissingMetadataXml</class><msg>category is missing metadata.xml</msg></result>
- <result><category>dev-libs</category><package>foo</package><class>InvalidPN</class><msg>invalid package names: [ bar, baz ]</msg></result>
- <result><category>dev-libs</category><package>foo</package><version>0</version><class>BadFilename</class><msg>bad filenames: [ 0.tar.gz, foo.tar.gz ]</msg></result>
- <result><category>dev-libs</category><package>foo</package><version>0</version><class>ReadonlyVariable</class><msg>read-only variable 'P' assigned, line 7: P=6</msg></result>
- <result><category>dev-libs</category><package>foo</package><version>0</version><class>UnquotedVariable</class><msg>unquoted variable D on lines: 5, 7</msg></result>
- </checks>
- """)
+ add_report_output = dedent(
+ """\
+ <checks>
+ <result><class>InvalidCommitMessage</class><msg>commit 8d86269bb4c7: no commit message</msg></result>
+ <result><class>ProfileWarning</class><msg>profile warning</msg></result>
+ <result><category>dev-libs</category><class>CatMissingMetadataXml</class><msg>category is missing metadata.xml</msg></result>
+ <result><category>dev-libs</category><package>foo</package><class>InvalidPN</class><msg>invalid package names: [ bar, baz ]</msg></result>
+ <result><category>dev-libs</category><package>foo</package><version>0</version><class>BadFilename</class><msg>bad filenames: [ 0.tar.gz, foo.tar.gz ]</msg></result>
+ <result><category>dev-libs</category><package>foo</package><version>0</version><class>ReadonlyVariable</class><msg>read-only variable 'P' assigned, line 7: P=6</msg></result>
+ <result><category>dev-libs</category><package>foo</package><version>0</version><class>UnquotedVariable</class><msg>unquoted variable D on lines: 5, 7</msg></result>
+ </checks>
+ """
+ )
class TestCsvReporter(BaseReporter):
reporter_cls = reporters.CsvReporter
- add_report_output = dedent("""\
- ,,,commit 8d86269bb4c7: no commit message
- ,,,profile warning
- dev-libs,,,category is missing metadata.xml
- dev-libs,foo,,"invalid package names: [ bar, baz ]"
- dev-libs,foo,0,"bad filenames: [ 0.tar.gz, foo.tar.gz ]"
- dev-libs,foo,0,"read-only variable 'P' assigned, line 7: P=6"
- dev-libs,foo,0,"unquoted variable D on lines: 5, 7"
- """)
+ add_report_output = dedent(
+ """\
+ ,,,commit 8d86269bb4c7: no commit message
+ ,,,profile warning
+ dev-libs,,,category is missing metadata.xml
+ dev-libs,foo,,"invalid package names: [ bar, baz ]"
+ dev-libs,foo,0,"bad filenames: [ 0.tar.gz, foo.tar.gz ]"
+ dev-libs,foo,0,"read-only variable 'P' assigned, line 7: P=6"
+ dev-libs,foo,0,"unquoted variable D on lines: 5, 7"
+ """
+ )
class TestFormatReporter(BaseReporter):
- reporter_cls = partial(reporters.FormatReporter, '')
+ reporter_cls = partial(reporters.FormatReporter, "")
def test_add_report(self, capsys):
for format_str, expected in (
- ('r', 'r\n' * 7),
- ('{category}', 'dev-libs\n' * 5),
- ('{category}/{package}', '/\n/\ndev-libs/\n' + 'dev-libs/foo\n' * 4),
- ('{category}/{package}-{version}', '/-\n/-\ndev-libs/-\ndev-libs/foo-\n' + 'dev-libs/foo-0\n' * 3),
- ('{name}',
- 'InvalidCommitMessage\nProfileWarning\nCatMissingMetadataXml\nInvalidPN\nBadFilename\nReadonlyVariable\nUnquotedVariable\n'),
- ('{foo}', ''),
- ):
+ ("r", "r\n" * 7),
+ ("{category}", "dev-libs\n" * 5),
+ ("{category}/{package}", "/\n/\ndev-libs/\n" + "dev-libs/foo\n" * 4),
+ (
+ "{category}/{package}-{version}",
+ "/-\n/-\ndev-libs/-\ndev-libs/foo-\n" + "dev-libs/foo-0\n" * 3,
+ ),
+ (
+ "{name}",
+ "InvalidCommitMessage\nProfileWarning\nCatMissingMetadataXml\nInvalidPN\nBadFilename\nReadonlyVariable\nUnquotedVariable\n",
+ ),
+ ("{foo}", ""),
+ ):
self.reporter_cls = partial(reporters.FormatReporter, format_str)
self.add_report_output = expected
super().test_add_report(capsys)
def test_unsupported_index(self, capsys):
- self.reporter_cls = partial(reporters.FormatReporter, '{0}')
+ self.reporter_cls = partial(reporters.FormatReporter, "{0}")
with self.mk_reporter() as reporter:
with pytest.raises(base.PkgcheckUserException) as excinfo:
reporter.report(self.versioned_result)
- assert 'integer indexes are not supported' in str(excinfo.value)
+ assert "integer indexes are not supported" in str(excinfo.value)
class TestJsonStream(BaseReporter):
@@ -158,8 +173,13 @@ class TestJsonStream(BaseReporter):
def test_add_report(self, capsys):
with self.mk_reporter() as reporter:
for result in (
- self.log_warning, self.log_error, self.commit_result,
- self.category_result, self.package_result, self.versioned_result):
+ self.log_warning,
+ self.log_error,
+ self.commit_result,
+ self.category_result,
+ self.package_result,
+ self.versioned_result,
+ ):
reporter.report(result)
out, err = capsys.readouterr()
assert not err
@@ -169,28 +189,30 @@ class TestJsonStream(BaseReporter):
def test_deserialize_error(self):
with self.mk_reporter() as reporter:
# deserializing non-result objects raises exception
- obj = reporter.to_json(['result'])
- with pytest.raises(reporters.DeserializationError, match='failed loading'):
+ obj = reporter.to_json(["result"])
+ with pytest.raises(reporters.DeserializationError, match="failed loading"):
next(reporter.from_iter([obj]))
# deserializing mangled JSON result objects raises exception
obj = reporter.to_json(self.versioned_result)
- del obj['__class__']
+ del obj["__class__"]
json_obj = json.dumps(obj)
- with pytest.raises(reporters.DeserializationError, match='unknown result'):
+ with pytest.raises(reporters.DeserializationError, match="unknown result"):
next(reporter.from_iter([json_obj]))
class TestFlycheckReporter(BaseReporter):
reporter_cls = reporters.FlycheckReporter
- add_report_output = dedent("""\
- -.ebuild:0:style:InvalidCommitMessage: commit 8d86269bb4c7: no commit message
- -.ebuild:0:warning:ProfileWarning: profile warning
- -.ebuild:0:error:CatMissingMetadataXml: category is missing metadata.xml
- foo-.ebuild:0:error:InvalidPN: invalid package names: [ bar, baz ]
- foo-0.ebuild:0:warning:BadFilename: bad filenames: [ 0.tar.gz, foo.tar.gz ]
- foo-0.ebuild:7:warning:ReadonlyVariable: read-only variable 'P' assigned, line 7: P=6
- foo-0.ebuild:5:warning:UnquotedVariable: unquoted variable D
- foo-0.ebuild:7:warning:UnquotedVariable: unquoted variable D
- """)
+ add_report_output = dedent(
+ """\
+ -.ebuild:0:style:InvalidCommitMessage: commit 8d86269bb4c7: no commit message
+ -.ebuild:0:warning:ProfileWarning: profile warning
+ -.ebuild:0:error:CatMissingMetadataXml: category is missing metadata.xml
+ foo-.ebuild:0:error:InvalidPN: invalid package names: [ bar, baz ]
+ foo-0.ebuild:0:warning:BadFilename: bad filenames: [ 0.tar.gz, foo.tar.gz ]
+ foo-0.ebuild:7:warning:ReadonlyVariable: read-only variable 'P' assigned, line 7: P=6
+ foo-0.ebuild:5:warning:UnquotedVariable: unquoted variable D
+ foo-0.ebuild:7:warning:UnquotedVariable: unquoted variable D
+ """
+ )