From bb869cb895c61e91793153b62e830c7514a1a508 Mon Sep 17 00:00:00 2001 From: Kiran Pawar Date: Fri, 10 Oct 2025 10:51:27 +0000 Subject: [PATCH 01/19] Add tempest plugins to the denylist As per discussions: - https://meetings.opendev.org/irclogs/%23openstack-qa/%23openstack-qa.2025-10-14.log.html - https://review.opendev.org/c/openstack/requirements/+/963685/comments/cfdad27f_92ae83a9 Needed-By: https://review.opendev.org/c/openstack/manila-tempest-plugin/+/952368 Co-authored-by: : Kiran Pawar Signed-off-by: Tony Breeds Change-Id: Ica744e13a8fea271c3838bd5cb43c90cdb7d7c55 --- denylist.txt | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/denylist.txt b/denylist.txt index 5f86ca7c2..e21435a6e 100644 --- a/denylist.txt +++ b/denylist.txt @@ -50,6 +50,31 @@ molecule-plugins # Once any stable branch move to 'Extended Maintenance' and we pin the # older Tempest to test them then we can move it from here to u-c file. tempest +barbican_tempest_plugin +blazar_tempest_plugin +cinder_tempest-plugin +cloudkitty_tempest_plugin +cyborg_tempest-plugin +designate_tempest-plugin +freezer_tempest_plugin +glance_tempest-plugin +heat_tempest-plugin +ironic_tempest-plugin +keystone_tempest_plugin +magnum_tempest_plugin +manila_tempest-plugin +mistral_tempest_tests +monasca_tempest-plugin +neutron_tempest-plugin +octavia_tempest-plugin +telemetry_tempest_plugin +trove_tempest_plugin +venus_tempest-plugin +vitrage_tempest-plugin +watcher_tempest-plugin +whitebox_tempest-plugin +zaqar_tempest_plugin +zun_tempest-plugin # annoying from setuptools pkg_resources From 7f008f902d5470093c67d7b03cd3a896712702f0 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Wed, 20 Dec 2023 16:00:06 +0000 Subject: [PATCH 02/19] Replace pkg_resources This will be removed after 2025-11-30. Get ahead of that breakage. Change-Id: Ib97eea1037fac541c8ebadc526ae9c3bb160f685 Signed-off-by: Stephen Finucane --- detail.py | 14 +++++++------- openstack_requirements/constraints.py | 4 +++- openstack_requirements/requirement.py | 11 ++++++----- openstack_requirements/tests/test_requirement.py | 12 ++++++------ tools/cap.py | 9 ++++----- tools/what-broke.py | 4 ++-- 6 files changed, 28 insertions(+), 26 deletions(-) diff --git a/detail.py b/detail.py index b48d4c44f..a98065a20 100644 --- a/detail.py +++ b/detail.py @@ -20,7 +20,7 @@ import urllib.parse as urlparse import urllib.request as urlreq -import pkg_resources +import packaging.requirement try: PYPI_LOCATION = os.environ['PYPI_LOCATION'] @@ -40,11 +40,11 @@ def iter_names(req): - for k in (req.key, req.project_name): - yield k - yield k.title() - yield k.replace("-", "_") - yield k.replace("-", "_").title() + yield req.name + yield req.name.lower() + yield req.name.title() + yield req.name.replace("-", "_") + yield req.name.replace("-", "_").title() def release_data(req): @@ -76,7 +76,7 @@ def main(): line = line.strip() if line.startswith("#") or not line: continue - req = pkg_resources.Requirement.parse(line) + req = packaging.requirement.Requirement(line) print(" - processing: %s" % (req)) try: raw_req_data = release_data(req) diff --git a/openstack_requirements/constraints.py b/openstack_requirements/constraints.py index f374f242c..6680d3a20 100644 --- a/openstack_requirements/constraints.py +++ b/openstack_requirements/constraints.py @@ -12,6 +12,8 @@ from packaging import specifiers +from openstack_requirements import requirement + # FIXME(dhellmann): These items were not in the constraints list but # should not be denylisted. We don't know yet what versions they @@ -102,7 +104,7 @@ def satisfied(reqs, name, version, failures): failures = [] for pkg_constraints in constraints.values(): for constraint, _ in pkg_constraints: - name = constraint.package + name = requirement.canonical_name(constraint.package) version = constraint.specifiers[3:] satisfied(global_reqs, name, version, failures) return failures diff --git a/openstack_requirements/requirement.py b/openstack_requirements/requirement.py index da94db524..89b3e4c56 100644 --- a/openstack_requirements/requirement.py +++ b/openstack_requirements/requirement.py @@ -15,9 +15,10 @@ # This module has no IO at all, and none should be added. import collections +import packaging.requirements import packaging.specifiers +import packaging.utils import packaging.version -import pkg_resources import re @@ -70,7 +71,7 @@ def to_line(self, marker_sep=';', line_prefix='', comment_prefix=' ', def canonical_name(req_name): """Return the canonical form of req_name.""" - return pkg_resources.safe_name(req_name).lower() + return packaging.utils.canonicalize_name(req_name) def parse(content, permit_urls=False): @@ -116,7 +117,7 @@ def parse_line(req_line, permit_urls=False): hash_pos = hash_pos + parse_start else: # Trigger an early failure before we look for ':' - pkg_resources.Requirement.parse(req_line) + packaging.requirements.Requirement(req_line) else: parse_start = 0 location = '' @@ -138,8 +139,8 @@ def parse_line(req_line, permit_urls=False): specifier = '' elif req_line: # Pulled out a requirement - parsed = pkg_resources.Requirement.parse(req_line) - name = parsed.project_name + parsed = packaging.requirements.Requirement(req_line) + name = parsed.name extras = parsed.extras specifier = str(parsed.specifier) else: diff --git a/openstack_requirements/tests/test_requirement.py b/openstack_requirements/tests/test_requirement.py index f0a4ed5da..8942a3394 100644 --- a/openstack_requirements/tests/test_requirement.py +++ b/openstack_requirements/tests/test_requirement.py @@ -154,7 +154,7 @@ def test_multiline(self): """) reqs = requirement.parse(content) self.assertEqual( - set(['oslo.config', 'oslo.concurrency', 'oslo.context']), + {'oslo-config', 'oslo-concurrency', 'oslo-context'}, set(reqs.keys()), ) @@ -166,16 +166,16 @@ def test_extras(self): """) reqs = requirement.parse(content) self.assertEqual( - set(['oslo.config', 'oslo.concurrency', 'oslo.db']), + {'oslo-config', 'oslo-concurrency', 'oslo-db'}, set(reqs.keys()), ) - self.assertEqual(reqs['oslo.config'][0][0].extras, frozenset(())) - self.assertEqual(reqs['oslo.concurrency'][0][0].extras, + self.assertEqual(reqs['oslo-config'][0][0].extras, frozenset(())) + self.assertEqual(reqs['oslo-concurrency'][0][0].extras, frozenset(('fixtures',))) - self.assertEqual(reqs['oslo.db'][0][0].extras, + self.assertEqual(reqs['oslo-db'][0][0].extras, frozenset(('fixtures', 'mysql'))) self.assertCountEqual(reqs, - ['oslo.config', 'oslo.concurrency', 'oslo.db']) + ['oslo-config', 'oslo-concurrency', 'oslo-db']) class TestCanonicalName(testtools.TestCase): diff --git a/tools/cap.py b/tools/cap.py index cd6ba5675..82ed8e617 100755 --- a/tools/cap.py +++ b/tools/cap.py @@ -12,11 +12,10 @@ # License for the specific language governing permissions and limitations # under the License. - import argparse import re -import pkg_resources +import packaging.requirements overrides = dict() # List of overrides needed. Ignore version in pip-freeze and use the one here @@ -38,7 +37,7 @@ def cap(requirements, frozen): output = [] for line in requirements: try: - req = pkg_resources.Requirement.parse(line) + req = packaging.requirements.Requirement(line) specifier = str(req.specifier) if any(op in specifier for op in ['==', '~=', '<']): # if already capped, continue @@ -67,7 +66,7 @@ def cap(requirements, frozen): def pin(line, new_cap): """Add new cap into existing line - Don't use pkg_resources so we can preserve the comments. + Don't use packaging.requirements so we can preserve the comments. """ end = None use_comma = False @@ -109,7 +108,7 @@ def freeze(lines): for line in lines: try: - req = pkg_resources.Requirement.parse(line) + req = packaging.requirements.Requirement(line) freeze[req.project_name] = req.specifier except ValueError: # not a valid requirement, can be a comment, blank line etc diff --git a/tools/what-broke.py b/tools/what-broke.py index 01194341f..bb2d74e1e 100755 --- a/tools/what-broke.py +++ b/tools/what-broke.py @@ -38,7 +38,7 @@ import sys import urllib.request as urlreq -import pkg_resources +import packaging.requirements class Release(object): @@ -62,7 +62,7 @@ def _parse_pypi_released(datestr): def _package_name(line): - return pkg_resources.Requirement.parse(line).project_name + return packaging.requirements.Requirement(line).name def get_requirements(): From 139c872453d073b3da80c99abb4e6f011ebe99b2 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 12:36:05 +0100 Subject: [PATCH 03/19] docs: Remove cruft from conf.py file Change-Id: I9e1d4b123ea2825755e0764ad8e5221f8fa364fb Signed-off-by: Stephen Finucane --- doc/source/conf.py | 220 +++------------------------------------------ 1 file changed, 14 insertions(+), 206 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 1aa7b2fc8..ee761f553 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -1,26 +1,5 @@ -# -*- coding: utf-8 -*- - -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - # -- General configuration ----------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['openstackdocstheme'] @@ -30,35 +9,12 @@ # The suffix of source filenames. source_suffix = '.rst' -# The encoding of source files. -#source_encoding = 'utf-8-sig' - # The master toctree document. master_doc = 'index' # General information about the project. -project = u'requirements' -copyright = u'2017, OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +project = 'requirements' +copyright = '2017-present, OpenStack Foundation' # If true, the current module name will be prepended to all description # unit titles (such as .. function::). @@ -74,183 +30,35 @@ # A list of ignored prefixes for module index sorting. modindex_common_prefix = ['requirements-doc.'] -# openstackdocstheme options -openstackdocs_repo_name = 'openstack/requirements' -openstackdocs_auto_name = False -openstackdocs_bug_project = 'openstack-requirements' -openstackdocs_bug_tag = '' - -# -- Options for man page output ---------------------------------------------- -man_pages = [] - # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - # If false, no module index is generated. html_domain_indices = False # If false, no index is generated. html_use_index = False -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'requirements-doc' - - # -- Options for LaTeX output -------------------------------------------------- -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'RequirementsGuide.tex', 'Global Requirements Guide', - u'OpenStack contributors', 'manual'), + ( + 'index', + 'RequirementsGuide.tex', + 'Global Requirements Guide', + 'OpenStack contributors', + 'manual', + ), ] -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# -- Options for openstackdocstheme extension ---------------------------------- - -# -- Options for Epub output --------------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = u'requirements Documentation' -epub_author = u'OpenStack' -epub_publisher = u'OpenStack' -epub_copyright = u'2017, OpenStack' - -# The language of the text. It defaults to the language option -# or en if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -#epub_exclude_files = [] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True +openstackdocs_repo_name = 'openstack/requirements' +openstackdocs_auto_name = False +openstackdocs_bug_project = 'openstack-requirements' +openstackdocs_bug_tag = '' From 73ce4538d34498e56cd7e470493129a7add44bd6 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 12:20:52 +0100 Subject: [PATCH 04/19] Migrate setup configuration to pyproject.toml Change-Id: Ie9b82e3ecbc3418e68ed1ff644b2df0de8871561 Signed-off-by: Stephen Finucane --- pyproject.toml | 45 +++++++++++++++++++++++++++++++++++++++++++++ setup.cfg | 33 --------------------------------- 2 files changed, 45 insertions(+), 33 deletions(-) create mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..7f5d852af --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,45 @@ +[build-system] +requires = ["pbr>=6.1.1"] +build-backend = "pbr.build" + +[project] +name = "openstack_requirements" +description = "OpenStack python dependency management tools" +authors = [ + {name = "OpenStack", email = "openstack-discuss@lists.openstack.org"}, +] +readme = {file = "README.rst", content-type = "text/x-rst"} +license = {text = "Apache-2.0"} +dynamic = ["version", "dependencies"] +requires-python = ">=3.9" +classifiers = [ + "Environment :: OpenStack", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + +[project.urls] +Homepage = "https://docs.openstack.org/requirements" +Repository = "https://opendev.org/openstack/requirements" + +[project.scripts] +edit-constraints = "openstack_requirements.cmds.edit_constraint:main" +generate-constraints = "openstack_requirements.cmds.generate:main" +check-conflicts = "openstack_requirements.cmds.check_conflicts:main" +validate-constraints = "openstack_requirements.cmds.validate:main" +validate-projects = "openstack_requirements.cmds.validate_projects:main" +normalize-requirements = "openstack_requirements.cmds.normalize_requirements:main" +check-constraints = "openstack_requirements.cmds.check_exists:main" + +[tool.setuptools] +packages = [ + "openstack_requirements" +] diff --git a/setup.cfg b/setup.cfg index 0235e9647..c92a0ab72 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,35 +1,2 @@ [metadata] name = openstack_requirements -summary = OpenStack python dependency management tools -description_file = - README.rst -author = OpenStack -author_email = openstack-discuss@lists.openstack.org -home_page = https://docs.openstack.org/requirements/latest/ -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Operating System :: Microsoft :: Windows - Operating System :: MacOS :: MacOS X - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Programming Language :: Python :: 3.12 - -[files] -packages = - openstack_requirements - -[entry_points] -console_scripts = - edit-constraints = openstack_requirements.cmds.edit_constraint:main - generate-constraints = openstack_requirements.cmds.generate:main - validate-constraints = openstack_requirements.cmds.validate:main - validate-projects = openstack_requirements.cmds.validate_projects:main - normalize-requirements = openstack_requirements.cmds.normalize_requirements:main - check-constraints = openstack_requirements.cmds.check_exists:main From 3e7039df7dc1b0056c2940fa7a890db24e6b0b92 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 12:33:13 +0100 Subject: [PATCH 05/19] Add ruff Change-Id: I82f8f1ec3f350c8ac7d1578292001fe61d06249d Signed-off-by: Stephen Finucane --- babel-test/babel-input.py | 1 - detail.py | 50 +-- openstack_requirements/check.py | 133 ++++---- openstack_requirements/cmds/check_exists.py | 58 ++-- .../cmds/edit_constraint.py | 14 +- openstack_requirements/cmds/generate.py | 122 ++++--- .../cmds/normalize_requirements.py | 15 +- openstack_requirements/cmds/validate.py | 25 +- .../cmds/validate_projects.py | 31 +- openstack_requirements/constraints.py | 57 ++-- openstack_requirements/project.py | 15 +- openstack_requirements/project_config.py | 14 +- openstack_requirements/requirement.py | 93 ++++-- openstack_requirements/tests/common.py | 38 ++- .../tests/files/old-setup.py | 2 +- openstack_requirements/tests/files/setup.py | 4 +- openstack_requirements/tests/test_check.py | 163 +++++----- .../tests/test_check_constraints.py | 137 +++++--- .../tests/test_constraints.py | 45 +-- .../tests/test_edit_constraint.py | 61 ++-- openstack_requirements/tests/test_generate.py | 77 +++-- openstack_requirements/tests/test_project.py | 20 +- .../tests/test_requirement.py | 298 ++++++++++++------ openstack_requirements/utils.py | 2 +- .../files/project-requirements-change.py | 54 ++-- pyproject.toml | 14 + setup.py | 4 +- tools/cap.py | 17 +- tools/check-install.py | 2 +- tools/lint.py | 4 +- tools/what-broke.py | 36 +-- tox.ini | 5 + 32 files changed, 952 insertions(+), 659 deletions(-) diff --git a/babel-test/babel-input.py b/babel-test/babel-input.py index ece057a3e..05207f4b9 100644 --- a/babel-test/babel-input.py +++ b/babel-test/babel-input.py @@ -13,7 +13,6 @@ """Test input for Babel""" - from oslo.i18n import _ from oslo.i18n import _LE from oslo_log import log as logging diff --git a/detail.py b/detail.py index a98065a20..2d48f81e0 100644 --- a/detail.py +++ b/detail.py @@ -28,15 +28,17 @@ PYPI_LOCATION = 'http://pypi.org/project' -KEEP_KEYS = frozenset([ - 'author', - 'author_email', - 'maintainer', - 'maintainer_email', - 'license', - 'summary', - 'home_page', -]) +KEEP_KEYS = frozenset( + [ + 'author', + 'author_email', + 'maintainer', + 'maintainer_email', + 'license', + 'summary', + 'home_page', + ] +) def iter_names(req): @@ -51,7 +53,7 @@ def release_data(req): # Try to find it with various names... attempted = [] for name in iter_names(req): - url = PYPI_LOCATION + "/%s/json" % (urlparse.quote(name)) + url = PYPI_LOCATION + f"/{urlparse.quote(name)}/json" if url in attempted: continue with contextlib.closing(urlreq.urlopen(url)) as uh: @@ -59,17 +61,20 @@ def release_data(req): attempted.append(url) continue return json.loads(uh.read()) - attempted = [" * %s" % u for u in attempted] - raise IOError("Could not find '%s' on pypi\nAttempted urls:\n%s" - % (req.key, "\n".join(attempted))) + attempted = [f" * {u}" for u in attempted] + raise OSError( + "Could not find '{}' on pypi\nAttempted urls:\n{}".format( + req.key, "\n".join(attempted) + ) + ) def main(): if len(sys.argv) == 1: - print("%s requirement-file ..." % (sys.argv[0]), file=sys.stderr) + print(f"{sys.argv[0]} requirement-file ...", file=sys.stderr) sys.exit(1) for filename in sys.argv[1:]: - print("Analyzing file: %s" % (filename)) + print(f"Analyzing file: {filename}") details = {} with open(filename, "rb") as fh: for line in fh.read().splitlines(): @@ -77,15 +82,15 @@ def main(): if line.startswith("#") or not line: continue req = packaging.requirement.Requirement(line) - print(" - processing: %s" % (req)) + print(f" - processing: {req}") try: raw_req_data = release_data(req) - except IOError: + except OSError: traceback.print_exc() details[req.key] = None else: req_info = {} - for (k, v) in raw_req_data.get('info', {}).items(): + for k, v in raw_req_data.get('info', {}).items(): if k not in KEEP_KEYS: continue req_info[k] = v @@ -94,9 +99,12 @@ def main(): 'info': req_info, } filename, _ext = os.path.splitext(filename) - with open("%s.json" % (filename), "wb") as fh: - fh.write(json.dumps(details, sort_keys=True, indent=4, - separators=(",", ": "))) + with open(f"{filename}.json", "wb") as fh: + fh.write( + json.dumps( + details, sort_keys=True, indent=4, separators=(",", ": ") + ) + ) if __name__ == '__main__': diff --git a/openstack_requirements/check.py b/openstack_requirements/check.py index e14547485..9ca0433e5 100644 --- a/openstack_requirements/check.py +++ b/openstack_requirements/check.py @@ -24,12 +24,14 @@ MIN_PY_VERSION = '3.5' PY3_GLOBAL_SPECIFIER_RE = re.compile( - r'python_version(==|>=|>)[\'"]3\.\d+[\'"]') + r'python_version(==|>=|>)[\'"]3\.\d+[\'"]' +) PY3_LOCAL_SPECIFIER_RE = re.compile( - r'python_version(==|>=|>|<=|<)[\'"]3\.\d+[\'"]') + r'python_version(==|>=|>|<=|<)[\'"]3\.\d+[\'"]' +) -class RequirementsList(object): +class RequirementsList: def __init__(self, name, project): self.name = name self.reqs_by_file = {} @@ -38,8 +40,7 @@ def __init__(self, name, project): @property def reqs(self): - return {k: v for d in self.reqs_by_file.values() - for k, v in d.items()} + return {k: v for d in self.reqs_by_file.values() for k, v in d.items()} def extract_reqs(self, content, strict): reqs = collections.defaultdict(set) @@ -51,10 +52,13 @@ def extract_reqs(self, content, strict): list_reqs = [r for (r, line) in entries] # Strip the comments out before checking if there are duplicates list_reqs_stripped = [r._replace(comment='') for r in list_reqs] - if strict and len(list_reqs_stripped) != len(set( - list_reqs_stripped)): - print("ERROR: Requirements file has duplicate entries " - "for package %s : %r." % (name, list_reqs)) + if strict and len(list_reqs_stripped) != len( + set(list_reqs_stripped) + ): + print( + "ERROR: Requirements file has duplicate entries " + f"for package {name} : {list_reqs!r}." + ) self.failed = True reqs[name].update(list_reqs) return reqs @@ -67,17 +71,18 @@ def process(self, strict=True): - each has a list of Requirements objects - duplicates are not permitted within that list """ - print("Checking %(name)s" % {'name': self.name}) + print(f"Checking {self.name}") # First, parse. for fname, content in self.project.get('requirements', {}).items(): - print("Processing %(fname)s" % {'fname': fname}) + print(f"Processing {fname}") if strict and not content.endswith('\n'): - print("Requirements file %s does not " - "end with a newline." % fname) + print( + f"Requirements file {fname} does not end with a newline." + ) self.reqs_by_file[fname] = self.extract_reqs(content, strict) for name, content in project.extras(self.project).items(): - print("Processing .[%(extra)s]" % {'extra': name}) + print(f"Processing .[{name}]") self.reqs_by_file[name] = self.extract_reqs(content, strict) @@ -97,7 +102,6 @@ def _is_requirement_in_global_reqs( ): req_exclusions = _get_exclusions(local_req) for global_req in global_reqs: - matching = True for aname in ['package', 'location', 'markers']: local_req_val = getattr(local_req, aname) @@ -106,20 +110,15 @@ def _is_requirement_in_global_reqs( # if a python 3 version is not spefied in only one of # global requirements or local requirements, allow it since # python 3-only is okay - if ( - allow_3_only and - matching and - aname == 'markers' - ): - if ( - not local_req_val and - PY3_GLOBAL_SPECIFIER_RE.match(global_req_val) + if allow_3_only and matching and aname == 'markers': + if not local_req_val and PY3_GLOBAL_SPECIFIER_RE.match( + global_req_val ): continue if ( - not global_req_val and - local_req_val and - PY3_LOCAL_SPECIFIER_RE.match(local_req_val) + not global_req_val + and local_req_val + and PY3_LOCAL_SPECIFIER_RE.match(local_req_val) ): continue @@ -128,9 +127,9 @@ def _is_requirement_in_global_reqs( # requires a feature that is only available in a newer Python # library, while other packages are happy without this feature if ( - matching and - aname == 'markers' and - local_req.package in backports + matching + and aname == 'markers' + and local_req.package in backports ): if re.match( r'python_version(==|<=|<)[\'"]3\.\d+[\'"]', @@ -142,9 +141,13 @@ def _is_requirement_in_global_reqs( ) continue - print(f'WARNING: possible mismatch found for package "{local_req.package}"') # noqa: E501 + print( + f'WARNING: possible mismatch found for package "{local_req.package}"' + ) # noqa: E501 print(f' Attribute "{aname}" does not match') - print(f' "{local_req_val}" does not match "{global_req_val}"') # noqa: E501 + print( + f' "{local_req_val}" does not match "{global_req_val}"' + ) # noqa: E501 print(f' {local_req}') print(f' {global_req}') matching = False @@ -160,23 +163,21 @@ def _is_requirement_in_global_reqs( else: difference = req_exclusions - global_exclusions print( - "ERROR: Requirement for package {} " + f"ERROR: Requirement for package {local_req.package} " "excludes a version not excluded in the " "global list.\n" - " Local settings : {}\n" - " Global settings: {}\n" - " Unexpected : {}".format( - local_req.package, req_exclusions, global_exclusions, - difference) + f" Local settings : {req_exclusions}\n" + f" Global settings: {global_exclusions}\n" + f" Unexpected : {difference}" ) return False print( "ERROR: " - "Could not find a global requirements entry to match package {}. " + f"Could not find a global requirements entry to match package {local_req.package}. " "If the package is already included in the global list, " "the name or platform markers there may not match the local " - "settings.".format(local_req.package) + "settings." ) return False @@ -204,9 +205,11 @@ def _get_python3_reqs(reqs): results.append(req) else: req_markers = markers.Marker(req.markers) - if req_markers.evaluate({ - 'python_version': MIN_PY_VERSION, - }): + if req_markers.evaluate( + { + 'python_version': MIN_PY_VERSION, + } + ): results.append(req) return results @@ -228,7 +231,7 @@ def _validate_one( return False if name not in global_reqs: - print("ERROR: Requirement '%s' not in openstack/requirements" % reqs) + print(f"ERROR: Requirement '{reqs}' not in openstack/requirements") return True counts = {} @@ -240,15 +243,19 @@ def _validate_one( counts[''] = counts.get('', 0) + 1 if not _is_requirement_in_global_reqs( - req, global_reqs[name], backports, allow_3_only, + req, + global_reqs[name], + backports, + allow_3_only, ): return True # check for minimum being defined min = [s for s in req.specifiers.split(',') if '>' in s] if not min: - print("ERROR: Requirement for package '%s' has no lower bound" % - name) + print( + f"ERROR: Requirement for package '{name}' has no lower bound" + ) return True for extra, count in counts.items(): @@ -256,21 +263,27 @@ def _validate_one( # just need to make sure we have at least the number of entries for # supported Python 3 versions. if count != len(global_reqs[name]): - if (allow_3_only and - count >= len(_get_python3_reqs(global_reqs[name]))): - print("WARNING (probably OK for Ussuri and later): " - "Package '%s%s' is only tracking python 3 " - "requirements" % ( - name, - ('[%s]' % extra) if extra else '')) + if allow_3_only and count >= len( + _get_python3_reqs(global_reqs[name]) + ): + print( + "WARNING (probably OK for Ussuri and later): " + "Package '{}{}' is only tracking python 3 " + "requirements".format( + name, (f'[{extra}]') if extra else '' + ) + ) continue - print("ERROR: Package '%s%s' requirement does not match " - "number of lines (%d) in " - "openstack/requirements" % ( - name, - ('[%s]' % extra) if extra else '', - len(global_reqs[name]))) + print( + "ERROR: Package '{}{}' requirement does not match " + "number of lines ({}) in " + "openstack/requirements".format( + name, + (f'[{extra}]') if extra else '', + len(global_reqs[name]), + ) + ) return True return False @@ -287,7 +300,7 @@ def validate( # iterate through the changing entries and see if they match the global # equivalents we want enforced for fname, freqs in head_reqs.reqs_by_file.items(): - print("Validating %(fname)s" % {'fname': fname}) + print(f"Validating {fname}") for name, reqs in freqs.items(): failed = ( _validate_one( diff --git a/openstack_requirements/cmds/check_exists.py b/openstack_requirements/cmds/check_exists.py index 62f8bb58d..685b3a259 100644 --- a/openstack_requirements/cmds/check_exists.py +++ b/openstack_requirements/cmds/check_exists.py @@ -28,24 +28,33 @@ def main(args=None): parser = argparse.ArgumentParser() parser.add_argument( - 'project', - default='', - help='path to the project source root folder.') + 'project', default='', help='path to the project source root folder.' + ) parser.add_argument( - '-u', '--upper-constraints', + '-u', + '--upper-constraints', default='upper-constraints.txt', - help='path to the upper-constraints.txt file') + help='path to the upper-constraints.txt file', + ) parser.add_argument( - '-g', '--global-requirements', + '-g', + '--global-requirements', default='global-requirements.txt', - help='Path to the global-requirements.txt file') + help='Path to the global-requirements.txt file', + ) parser.add_argument( - '-b', '-d', '--denylist', + '-b', + '-d', + '--denylist', default='denylist.txt', - help='Path to the denylist.txt file') + help='Path to the denylist.txt file', + ) parser.add_argument( - '-G', '--gr-check', action='store_true', - help='Do a specifier check of global-requirements') + '-G', + '--gr-check', + action='store_true', + help='Do a specifier check of global-requirements', + ) args = parser.parse_args(args) upper_constraints = read_requirements_file(args.upper_constraints) @@ -55,20 +64,23 @@ def main(args=None): error_count = 0 for require_file, data in project_data.get('requirements', {}).items(): - print(u'\nComparing %s with global-requirements and upper-constraints' - % require_file) + print( + f'\nComparing {require_file} with global-requirements and upper-constraints' + ) requirements = requirement.parse(data) for name, spec_list in requirements.items(): if not name or name in denylist: continue if name not in global_requirements: - print(u'%s from %s not found in global-requirements' % ( - name, require_file)) + print( + f'{name} from {require_file} not found in global-requirements' + ) error_count += 1 continue if name not in upper_constraints: - print(u'%s from %s not found in upper-constraints' % ( - name, require_file)) + print( + f'{name} from {require_file} not found in upper-constraints' + ) error_count += 1 continue elif spec_list: @@ -83,9 +95,9 @@ def main(args=None): # then something is wrong. if Version(uc_spec.version) not in specs: print( - u'%s must be <= %s from upper-constraints and ' - 'include the upper-constraints version' % - (name, uc_spec.version)) + f'{name} must be <= {uc_spec.version} from upper-constraints and ' + 'include the upper-constraints version' + ) error_count += 1 continue if args.gr_check: @@ -97,9 +109,9 @@ def main(args=None): continue if spec.version not in spec_gr: print( - u'Specifier %s from %s is failing check ' - 'from global-requirements specifiers %s' % - (spec.version, name, str(spec_gr))) + f'Specifier {spec.version} from {name} is failing check ' + f'from global-requirements specifiers {str(spec_gr)}' + ) error_count += 1 continue diff --git a/openstack_requirements/cmds/edit_constraint.py b/openstack_requirements/cmds/edit_constraint.py index 1db62f807..a92f495cc 100644 --- a/openstack_requirements/cmds/edit_constraint.py +++ b/openstack_requirements/cmds/edit_constraint.py @@ -26,7 +26,8 @@ def edit(reqs, name, replacement): reqs.pop(key, None) else: reqs[key] = [ - (requirement.Requirement('', '', '', '', replacement), '')] + (requirement.Requirement('', '', '', '', replacement), '') + ] result = [] for entries in reqs.values(): for entry, _ in entries: @@ -47,8 +48,8 @@ def _validate_options(options, args): raise Exception("Not enough arguments given") if not os.path.exists(args[0]): raise Exception( - "Constraints file %(con)s not found." - % dict(con=args[0])) + "Constraints file {con} not found.".format(**dict(con=args[0])) + ) def main(argv=None, stdout=None): @@ -59,17 +60,18 @@ def main(argv=None, stdout=None): "replacement". If "name" is not present, it is added to the end of the file. If "replacement" is missing or empty, remove "name" from the file. - """)) + """), + ) options, args = parser.parse_args(argv) if stdout is None: stdout = sys.stdout _validate_options(options, args) args = args + [""] - content = open(args[0], 'rt').read() + content = open(args[0]).read() reqs = requirement.parse(content, permit_urls=True) out_reqs = edit(reqs, args[1], args[2]) out = requirement.to_content(out_reqs) - with open(args[0] + '.tmp', 'wt') as f: + with open(args[0] + '.tmp', 'w') as f: f.write(out) if os.path.exists(args[0]): os.remove(args[0]) diff --git a/openstack_requirements/cmds/generate.py b/openstack_requirements/cmds/generate.py index 0ad2e6280..edee037a7 100644 --- a/openstack_requirements/cmds/generate.py +++ b/openstack_requirements/cmds/generate.py @@ -34,7 +34,7 @@ "# testing, and can contain known vulnerabilities. Consumers are\n", "# *STRONGLY* encouraged to rely on curated distributions of OpenStack\n", "# or manage security patching of dependencies themselves.\n", - ] +] def _parse_freeze(text): @@ -47,7 +47,7 @@ def _parse_freeze(text): for line in text.splitlines(): line = line.strip() if line.startswith('-'): - raise Exception("Irregular line: %s" % line) + raise Exception(f"Irregular line: {line}") if line.startswith('#'): continue if not line: @@ -82,27 +82,34 @@ def _freeze(requirements, python): output = [] try: version_out = subprocess.check_output( - [python, "--version"], stderr=subprocess.STDOUT) + [python, "--version"], stderr=subprocess.STDOUT + ) output.append(version_out) version_all = version_out.decode('utf-8').split()[1] version = '.'.join(version_all.split('.')[:2]) with fixtures.TempDir() as temp: - output.append(subprocess.check_output( - [python, '-m', 'venv', temp.path])) + output.append( + subprocess.check_output([python, '-m', 'venv', temp.path]) + ) pip_bin = os.path.join(temp.path, 'bin', 'pip') - output.append(subprocess.check_output( - [pip_bin, 'install', '-U', 'pip', 'setuptools', 'wheel'])) - output.append(subprocess.check_output( - [pip_bin, 'install', '-r', requirements])) - freeze = subprocess.check_output( - [pip_bin, 'freeze']) + output.append( + subprocess.check_output( + [pip_bin, 'install', '-U', 'pip', 'setuptools', 'wheel'] + ) + ) + output.append( + subprocess.check_output( + [pip_bin, 'install', '-r', requirements] + ) + ) + freeze = subprocess.check_output([pip_bin, 'freeze']) output.append(freeze) return (version, _parse_freeze(freeze.decode('utf-8'))) except Exception as exc: if isinstance(exc, subprocess.CalledProcessError): output.append(exc.output) raise Exception( - "Failed to generate freeze: %s %s" % ( + "Failed to generate freeze: {} {}".format( b'\n'.join(output).decode('utf-8'), exc, ) @@ -132,17 +139,19 @@ def _combine_freezes(freezes, denylist=None): :return: A list of '\n' terminated lines for a requirements file. """ packages = {} # {package : {version : [py_version]}} - excludes = frozenset((requirement.canonical_name(s) - for s in denylist) if denylist else ()) + excludes = frozenset( + (requirement.canonical_name(s) for s in denylist) if denylist else () + ) reference_versions = [] for py_version, freeze in freezes: if py_version in reference_versions: - raise Exception("Duplicate python %s" % py_version) + raise Exception(f"Duplicate python {py_version}") reference_versions.append(py_version) for package, version in freeze: - packages.setdefault( - package, {}).setdefault(version, []).append(py_version) + packages.setdefault(package, {}).setdefault(version, []).append( + py_version + ) for package, versions in sorted(packages.items()): if package.lower() in excludes: @@ -151,7 +160,9 @@ def _combine_freezes(freezes, denylist=None): if len(versions) > 1: # markers for packages with multiple versions - we use python # version ranges for these - for idx, (version, py_versions) in enumerate(sorted(versions.items())): # noqa: E501 + for idx, (version, py_versions) in enumerate( + sorted(versions.items()) + ): # noqa: E501 if idx == 0: # lower-bound marker = f"python_version<='{py_versions[-1]}'" elif idx + 1 != len(versions): # intermediate version(s) @@ -163,7 +174,9 @@ def _combine_freezes(freezes, denylist=None): elif list(versions.values())[0] != reference_versions: # markers for packages with a single version - these are usually # version specific so we use strict python versions for these - for idx, (version, py_versions) in enumerate(sorted(versions.items())): # noqa: E501 + for idx, (version, py_versions) in enumerate( + sorted(versions.items()) + ): # noqa: E501 for py_version in sorted(py_versions): marker = f"python_version=='{py_version}'" yield f'{package}==={version};{marker}\n' @@ -179,13 +192,16 @@ def _clone_versions(freezes, options): if version in options.version_map: for dst_version in sorted(options.version_map[version]): if dst_version not in versions: - print("Duplicating %s freeze to %s" % - (version, dst_version), file=sys.stderr) + print( + f"Duplicating {version} freeze to {dst_version}", + file=sys.stderr, + ) freezes.append((dst_version, copy.copy(freeze))) # -- untested UI glue from here down. + def _validate_options(options): """Check that options are valid. @@ -196,23 +212,30 @@ def _validate_options(options): for python in options.pythons: if not shutil.which(python): raise Exception( - "Python %(python)s not found." % dict(python=python)) + "Python {python} not found.".format(**dict(python=python)) + ) if not options.requirements: raise Exception("No requirements file specified - see -r.") if not os.path.exists(options.requirements): raise Exception( - "Requirements file %(req)s not found." - % dict(req=options.requirements)) + "Requirements file {req} not found.".format( + **dict(req=options.requirements) + ) + ) if options.denylist and not os.path.exists(options.denylist): raise Exception( - "Denylist file %(path)s not found." - % dict(path=options.denylist)) + "Denylist file {path} not found.".format( + **dict(path=options.denylist) + ) + ) version_map = {} for map_entry in options.version_map: if ':' not in map_entry: raise Exception( - "Invalid version-map entry %(map_entry)s" - % dict(map_entry=map_entry)) + "Invalid version-map entry {map_entry}".format( + **dict(map_entry=map_entry) + ) + ) src, dst = map_entry.split(':') version_map.setdefault(src, set()) version_map[src].add(dst) @@ -223,7 +246,7 @@ def _parse_denylist(path): """Return the strings from path if it is not None.""" if path is None: return [] - with open(path, 'rt') as f: + with open(path) as f: return [line.strip() for line in f] @@ -243,30 +266,43 @@ def _make_sort_key(line): def main(argv=None, stdout=None): parser = optparse.OptionParser() parser.add_option( - "-p", dest="pythons", action="append", + "-p", + dest="pythons", + action="append", help="Specify Python versions to use when generating constraints." - "e.g. -p /usr/bin/python3") + "e.g. -p /usr/bin/python3", + ) parser.add_option( - "-r", dest="requirements", help="Requirements file to process.") + "-r", dest="requirements", help="Requirements file to process." + ) parser.add_option( - "-b", "-d", dest="denylist", - help="Filename of a list of package names to exclude.") + "-b", + "-d", + dest="denylist", + help="Filename of a list of package names to exclude.", + ) parser.add_option( - "--version-map", dest='version_map', default=[], action='append', - help=('Add a : separated list of versions to clone. To \'clone\' ' - 'a freeze generated by python3.4 to python3.5 specify 3.4:3.5. ' - 'This is intended as as a way to transition between python ' - 'versions when it\'s not possible to have all versions ' - 'installed')) + "--version-map", + dest='version_map', + default=[], + action='append', + help=( + 'Add a : separated list of versions to clone. To \'clone\' ' + 'a freeze generated by python3.4 to python3.5 specify 3.4:3.5. ' + 'This is intended as as a way to transition between python ' + 'versions when it\'s not possible to have all versions ' + 'installed' + ), + ) options, args = parser.parse_args(argv) if stdout is None: stdout = sys.stdout _validate_options(options) freezes = [ - _freeze(options.requirements, python) for python in options.pythons] + _freeze(options.requirements, python) for python in options.pythons + ] _clone_versions(freezes, options) denylist = _parse_denylist(options.denylist) - frozen = [ - *sorted(_combine_freezes(freezes, denylist), key=_make_sort_key)] + frozen = [*sorted(_combine_freezes(freezes, denylist), key=_make_sort_key)] stdout.writelines(SECURITY_WARNING + frozen) stdout.flush() diff --git a/openstack_requirements/cmds/normalize_requirements.py b/openstack_requirements/cmds/normalize_requirements.py index a5d728fb5..bcf8b518f 100644 --- a/openstack_requirements/cmds/normalize_requirements.py +++ b/openstack_requirements/cmds/normalize_requirements.py @@ -19,7 +19,7 @@ def write_requirements_file(filename, reqs): - with open(filename + 'tmp', 'wt') as f: + with open(filename + 'tmp', 'w') as f: f.write(reqs) if os.path.exists(filename): os.remove(filename) @@ -28,11 +28,16 @@ def write_requirements_file(filename, reqs): def main(): parser = argparse.ArgumentParser( - description="Normalize requirements files") + description="Normalize requirements files" + ) parser.add_argument('requirements', help='requirements file input') - parser.add_argument('-s', '--save', action='store_true', default=False, - help=('save normalized requirements ' - 'file instead of displaying it')) + parser.add_argument( + '-s', + '--save', + action='store_true', + default=False, + help=('save normalized requirements file instead of displaying it'), + ) args = parser.parse_args() with open(args.requirements) as f: requirements = [line.strip() for line in f.readlines()] diff --git a/openstack_requirements/cmds/validate.py b/openstack_requirements/cmds/validate.py index 8106238e6..dd0013509 100644 --- a/openstack_requirements/cmds/validate.py +++ b/openstack_requirements/cmds/validate.py @@ -10,9 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. -"""Apply validation rules to the various requirements lists. - -""" +"""Apply validation rules to the various requirements lists.""" import argparse import os @@ -44,44 +42,47 @@ def main(): error_count = 0 # Check the format of the constraints file. - print('\nChecking %s' % args.upper_constraints) + print(f'\nChecking {args.upper_constraints}') constraints_txt = read_requirements_file(args.upper_constraints) for msg in constraints.check_format(constraints_txt): print(msg) error_count += 1 # Check that the constraints and requirements are compatible. - print('\nChecking %s' % args.global_requirements) + print(f'\nChecking {args.global_requirements}') global_reqs = read_requirements_file(args.global_requirements) for msg in constraints.check_compatible(global_reqs, constraints_txt): print(msg) error_count += 1 # Check requirements to satisfy policy. - print('\nChecking requirements on %s' % args.global_requirements) + print(f'\nChecking requirements on {args.global_requirements}') for msg in requirement.check_reqs_bounds_policy(global_reqs): print(msg) error_count += 1 # Check that global requirements are uniformly formatted - print('\nValidating uniform formatting on %s' % args.global_requirements) - with open(args.global_requirements, 'rt') as f: + print(f'\nValidating uniform formatting on {args.global_requirements}') + with open(args.global_requirements) as f: for line in f: if line == '\n': continue req = requirement.parse_line(line) normed_req = req.to_line(comment_prefix=' ', sort_specifiers=True) if line.rstrip() != normed_req.rstrip(): - print("-%s\n+%s" % (line.rstrip(), normed_req.rstrip())) + print(f"-{line.rstrip()}\n+{normed_req.rstrip()}") error_count += 1 # Check that all of the items in the global-requirements list # appear in exactly one of the constraints file or the denylist. - print('\nChecking %s' % args.denylist) + print(f'\nChecking {args.denylist}') denylist = read_requirements_file(args.denylist) for msg in constraints.check_denylist_coverage( - global_reqs, constraints_txt, denylist, - os.path.basename(args.upper_constraints)): + global_reqs, + constraints_txt, + denylist, + os.path.basename(args.upper_constraints), + ): print(msg) error_count += 1 diff --git a/openstack_requirements/cmds/validate_projects.py b/openstack_requirements/cmds/validate_projects.py index db7de5598..b50d8e9bc 100644 --- a/openstack_requirements/cmds/validate_projects.py +++ b/openstack_requirements/cmds/validate_projects.py @@ -10,24 +10,24 @@ # License for the specific language governing permissions and limitations # under the License. -"""Apply validation rules to the projects.txt file - -""" +"""Apply validation rules to the projects.txt file""" import argparse from openstack_requirements import project_config -_BLACKLIST = set([ - # NOTE(dhellmann): It's not clear why these don't get updates, - # except that trying to do so may break the test jobs using them - # because of the nature of the projects. - 'openstack/hacking', - 'openstack/pbr', - # We can't enforce the check rules against this repo. - 'openstack/requirements', -]) +_BLACKLIST = set( + [ + # NOTE(dhellmann): It's not clear why these don't get updates, + # except that trying to do so may break the test jobs using them + # because of the nature of the projects. + 'openstack/hacking', + 'openstack/pbr', + # We can't enforce the check rules against this repo. + 'openstack/requirements', + ] +) def main(): @@ -43,8 +43,8 @@ def main(): error_count = 0 - print('\nChecking %s' % args.projects_list) - with open(args.projects_list, 'r') as f: + print(f'\nChecking {args.projects_list}') + with open(args.projects_list) as f: for repo in f: repo = repo.strip() if repo.startswith('#'): @@ -52,7 +52,8 @@ def main(): if repo in _BLACKLIST: continue pe = project_config.require_check_requirements_for_repo( - zuul_projects, repo) + zuul_projects, repo + ) for e in pe: print(e) error_count += 1 diff --git a/openstack_requirements/constraints.py b/openstack_requirements/constraints.py index 6680d3a20..f06d0e0fe 100644 --- a/openstack_requirements/constraints.py +++ b/openstack_requirements/constraints.py @@ -19,41 +19,46 @@ # should not be denylisted. We don't know yet what versions they # should have, so just ignore them for a little while until we have # time to figure that out. -UNCONSTRAINABLE = set([ - 'argparse', - 'pip', - 'setuptools', - 'wmi', - 'pywin32', - 'pymi', - 'wheel', - '', # blank lines -]) - - -def check_denylist_coverage(global_reqs, constraints, denylist, - constraints_list_name): +UNCONSTRAINABLE = set( + [ + 'argparse', + 'pip', + 'setuptools', + 'wmi', + 'pywin32', + 'pymi', + 'wheel', + '', # blank lines + ] +) + + +def check_denylist_coverage( + global_reqs, constraints, denylist, constraints_list_name +): """Report any items that are not properly constrained. Check that all of the items in the global-requirements list appear either in the constraints file or the denylist. """ to_be_constrained = ( - set(global_reqs.keys()) - set(denylist.keys()) - - UNCONSTRAINABLE + set(global_reqs.keys()) - set(denylist.keys()) - UNCONSTRAINABLE ) constrained = set(constraints.keys()) - set(['']) unconstrained = to_be_constrained - constrained for u in sorted(unconstrained): - yield ('%r appears in global-requirements.txt ' - 'but not %s or denylist.txt' % (u, constraints_list_name)) + yield ( + f'{u!r} appears in global-requirements.txt ' + f'but not {constraints_list_name} or denylist.txt' + ) # Verify that the denylist packages are not also listed in # the constraints file. dupes = constrained.intersection(set(denylist.keys())) for d in dupes: - yield ('%r appears in both denylist.txt and %s' - % (d, constraints_list_name)) + yield ( + f'{d!r} appears in both denylist.txt and {constraints_list_name}' + ) def check_format(parsed_constraints): @@ -61,8 +66,9 @@ def check_format(parsed_constraints): for name, spec_list in parsed_constraints.items(): for req, original_line in spec_list: if not req.specifiers.startswith('==='): - yield ('Invalid constraint for %s does not have 3 "=": %s' % - (name, original_line)) + yield ( + f'Invalid constraint for {name} does not have 3 "=": {original_line}' + ) def check_compatible(global_reqs, constraints): @@ -88,6 +94,7 @@ def check_compatible(global_reqs, constraints): :param constraints: The same from given constraints.txt. :return: A list of the error messages for constraints that failed. """ + def satisfied(reqs, name, version, failures): if name not in reqs: return True @@ -98,9 +105,11 @@ def satisfied(reqs, name, version, failures): if spec.contains(version, prereleases=True): return True tested.append(constraint.specifiers) - failures.append('Constraint %s for %s does not match requirement %s' % - (version, name, tested)) + failures.append( + f'Constraint {version} for {name} does not match requirement {tested}' + ) return False + failures = [] for pkg_constraints in constraints.values(): for constraint, _ in pkg_constraints: diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index 02ec716b9..056a0120e 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -34,14 +34,15 @@ def extras(project): # IO from here to the end of the file. + def _safe_read(project, filename, output=None): if output is None: output = project try: path = os.path.join(project['root'], filename) - with io.open(path, 'rt', encoding="utf-8") as f: + with open(path, encoding="utf-8") as f: output[filename] = f.read() - except IOError as e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -62,13 +63,15 @@ def read(root): requirements = {} result['requirements'] = requirements target_files = [ - 'requirements.txt', 'tools/pip-requires', - 'test-requirements.txt', 'tools/test-requires', + 'requirements.txt', + 'tools/pip-requires', + 'test-requirements.txt', + 'tools/test-requires', 'doc/requirements.txt', ] for py_version in (2, 3): - target_files.append('requirements-py%s.txt' % py_version) - target_files.append('test-requirements-py%s.txt' % py_version) + target_files.append(f'requirements-py{py_version}.txt') + target_files.append(f'test-requirements-py{py_version}.txt') for target_file in target_files: _safe_read(result, target_file, output=requirements) return result diff --git a/openstack_requirements/project_config.py b/openstack_requirements/project_config.py index 2d6e179e7..23c91704d 100644 --- a/openstack_requirements/project_config.py +++ b/openstack_requirements/project_config.py @@ -10,8 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. -"""Work with the project-config repository. -""" +"""Work with the project-config repository.""" import requests import yaml @@ -32,10 +31,7 @@ def get_zuul_projects_data(url=ZUUL_PROJECTS_URL): raw = yaml.safe_load(r.text) # Add a mapping from repo name to repo settings, since that is how # we access this most often. - projects = { - p['project']['name']: p['project'] - for p in raw - } + projects = {p['project']['name']: p['project'] for p in raw} return projects @@ -49,8 +45,7 @@ def require_check_requirements_for_repo(zuul_projects, repo): if repo not in zuul_projects: errors.append( - ('did not find %s in %s' % (repo, ZUUL_PROJECTS_FILENAME), - True) + (f'did not find {repo} in {ZUUL_PROJECTS_FILENAME}', True) ) else: p = zuul_projects[repo] @@ -59,7 +54,6 @@ def require_check_requirements_for_repo(zuul_projects, repo): # jobs, because we want projects to use the templates. if 'check-requirements' not in templates: errors.append( - '%s no check-requirements job specified for %s' - % (ZUUL_PROJECTS_FILENAME, repo) + f'{ZUUL_PROJECTS_FILENAME} no check-requirements job specified for {repo}' ) return errors diff --git a/openstack_requirements/requirement.py b/openstack_requirements/requirement.py index 89b3e4c56..8276498d5 100644 --- a/openstack_requirements/requirement.py +++ b/openstack_requirements/requirement.py @@ -23,42 +23,62 @@ def key_specifier(a): - weight = {'>=': 0, '>': 0, - '===': 1, '==': 1, '~=': 1, '!=': 1, - '<': 2, '<=': 2} + weight = { + '>=': 0, + '>': 0, + '===': 1, + '==': 1, + '~=': 1, + '!=': 1, + '<': 2, + '<=': 2, + } a = a._spec return (weight[a[0]], packaging.version.parse(a[1])) -class Requirement(collections.namedtuple('Requirement', - ['package', 'location', 'specifiers', - 'markers', 'comment', 'extras'])): - def __new__(cls, package, location, specifiers, markers, comment, - extras=None): - return super(Requirement, cls).__new__( - cls, package, location, specifiers, markers, comment, - frozenset(extras or ())) - - def to_line(self, marker_sep=';', line_prefix='', comment_prefix=' ', - sort_specifiers=False): +class Requirement( + collections.namedtuple( + 'Requirement', + ['package', 'location', 'specifiers', 'markers', 'comment', 'extras'], + ) +): + def __new__( + cls, package, location, specifiers, markers, comment, extras=None + ): + return super().__new__( + cls, + package, + location, + specifiers, + markers, + comment, + frozenset(extras or ()), + ) + + def to_line( + self, + marker_sep=';', + line_prefix='', + comment_prefix=' ', + sort_specifiers=False, + ): comment_p = comment_prefix if self.package else '' - comment = (comment_p + self.comment if self.comment else '') + comment = comment_p + self.comment if self.comment else '' marker = marker_sep + self.markers if self.markers else '' package = line_prefix + self.package if self.package else '' location = self.location + '#egg=' if self.location else '' - extras = '[%s]' % ",".join(sorted(self.extras)) if self.extras else '' + extras = ( + '[{}]'.format(",".join(sorted(self.extras))) if self.extras else '' + ) specifiers = self.specifiers if sort_specifiers: _specifiers = packaging.specifiers.SpecifierSet(specifiers) - _specifiers = ['%s' % s for s in sorted(_specifiers, - key=key_specifier)] + _specifiers = [ + f'{s}' for s in sorted(_specifiers, key=key_specifier) + ] specifiers = ','.join(_specifiers) - return '%s%s%s%s%s%s\n' % (location, - package, - extras, - specifiers, - marker, - comment) + return f'{location}{package}{extras}{specifiers}{marker}{comment}\n' Requirements = collections.namedtuple('Requirements', ['reqs']) @@ -66,7 +86,8 @@ def to_line(self, marker_sep=';', line_prefix='', comment_prefix=' ', url_re = re.compile( r'^(?P\s*(?:-e\s)?\s*(?:(?:[a-z]+\+)?(?:[a-z]+))://[^#]*)' - r'#egg=(?P[-\.\w]+)') + r'#egg=(?P[-\.\w]+)' +) def canonical_name(req_name): @@ -126,7 +147,7 @@ def parse_line(req_line, permit_urls=False): marker_pos = max(semi_pos, colon_pos) if marker_pos < 0: marker_pos = hash_pos - markers = req_line[marker_pos + 1:hash_pos].strip() + markers = req_line[marker_pos + 1 : hash_pos].strip() if hash_pos != end: comment = req_line[hash_pos:] else: @@ -154,7 +175,7 @@ def to_content(reqs, marker_sep=';', line_prefix=''): lines = [] for req in reqs.reqs: lines.append(req.to_line(marker_sep, line_prefix)) - return u''.join(lines) + return ''.join(lines) def to_dict(req_sequence): @@ -169,12 +190,15 @@ def to_dict(req_sequence): def _pass_through(req_line, permit_urls=False): """Identify unparsable lines.""" if permit_urls: - return (req_line.startswith('http://tarballs.openstack.org/') or - req_line.startswith('-f')) + return req_line.startswith( + 'http://tarballs.openstack.org/' + ) or req_line.startswith('-f') else: - return (req_line.startswith('http://tarballs.openstack.org/') or - req_line.startswith('-e') or - req_line.startswith('-f')) + return ( + req_line.startswith('http://tarballs.openstack.org/') + or req_line.startswith('-e') + or req_line.startswith('-f') + ) def to_reqs(content, permit_urls=False): @@ -204,5 +228,6 @@ def check_reqs_bounds_policy(global_reqs): if spec.operator == '>=': lower_bound.add(spec) if len(lower_bound): - yield ('Requirement %s should not include a >= specifier' % - req.package) + yield ( + f'Requirement {req.package} should not include a >= specifier' + ) diff --git a/openstack_requirements/tests/common.py b/openstack_requirements/tests/common.py index 97975aa4d..3465f9662 100644 --- a/openstack_requirements/tests/common.py +++ b/openstack_requirements/tests/common.py @@ -29,15 +29,16 @@ class Project(fixtures.Fixture): """A single project we can update.""" def __init__( - self, req_path, setup_path, setup_cfg_path, test_req_path=None): - super(Project, self).__init__() + self, req_path, setup_path, setup_cfg_path, test_req_path=None + ): + super().__init__() self._req_path = req_path self._setup_path = setup_path self._setup_cfg_path = setup_cfg_path self._test_req_path = test_req_path def setUp(self): - super(Project, self).setUp() + super().setUp() self.root = self.useFixture(fixtures.TempDir()).path self.req_file = os.path.join(self.root, 'requirements.txt') self.setup_file = os.path.join(self.root, 'setup.py') @@ -54,34 +55,39 @@ def setUp(self): "openstack_requirements/tests/files/project.txt", "openstack_requirements/tests/files/setup.py", "openstack_requirements/tests/files/setup.cfg", - "openstack_requirements/tests/files/test-project.txt") + "openstack_requirements/tests/files/test-project.txt", +) bad_project_fixture = Project( "openstack_requirements/tests/files/project-with-bad-requirement.txt", "openstack_requirements/tests/files/setup.py", - "openstack_requirements/tests/files/setup.cfg") + "openstack_requirements/tests/files/setup.cfg", +) oslo_fixture = Project( "openstack_requirements/tests/files/project-with-oslo-tar.txt", "openstack_requirements/tests/files/old-setup.py", - "openstack_requirements/tests/files/setup.cfg") + "openstack_requirements/tests/files/setup.cfg", +) pbr_fixture = Project( "openstack_requirements/tests/files/project.txt", "openstack_requirements/tests/files/setup.py", "openstack_requirements/tests/files/pbr_setup.cfg", - "openstack_requirements/tests/files/test-project.txt") + "openstack_requirements/tests/files/test-project.txt", +) class GlobalRequirements(fixtures.Fixture): - def setUp(self): - super(GlobalRequirements, self).setUp() + super().setUp() self.root = self.useFixture(fixtures.TempDir()).path self.req_file = os.path.join(self.root, "global-requirements.txt") shutil.copy( - "openstack_requirements/tests/files/gr-base.txt", self.req_file) + "openstack_requirements/tests/files/gr-base.txt", self.req_file + ) self.denylist_file = os.path.join(self.root, "denylist.txt") shutil.copy( "openstack_requirements/tests/files/denylist.txt", - self.denylist_file) + self.denylist_file, + ) # Static data for unit testing. @@ -91,12 +97,14 @@ def make_project(fixture): global_reqs = requirement.parse( - open("openstack_requirements/tests/files/gr-base.txt", "rt").read()) + open("openstack_requirements/tests/files/gr-base.txt").read() +) upper_constraints = requirement.parse( - open("openstack_requirements/tests/files/upper-constraints.txt", - "rt").read()) + open("openstack_requirements/tests/files/upper-constraints.txt").read() +) denylist = requirement.parse( - open("openstack_requirements/tests/files/denylist.txt", "rt").read()) + open("openstack_requirements/tests/files/denylist.txt").read() +) pbr_project = make_project(pbr_fixture) project_project = make_project(project_fixture) bad_project = make_project(bad_project_fixture) diff --git a/openstack_requirements/tests/files/old-setup.py b/openstack_requirements/tests/files/old-setup.py index ff5795f7d..6ecd2209e 100644 --- a/openstack_requirements/tests/files/old-setup.py +++ b/openstack_requirements/tests/files/old-setup.py @@ -21,7 +21,7 @@ name = 'swift' -with open('requirements.txt', 'r') as f: +with open('requirements.txt') as f: requires = [x.strip() for x in f if x.strip()] diff --git a/openstack_requirements/tests/files/setup.py b/openstack_requirements/tests/files/setup.py index 3144d17a0..74e4320cf 100644 --- a/openstack_requirements/tests/files/setup.py +++ b/openstack_requirements/tests/files/setup.py @@ -16,6 +16,4 @@ import setuptools -setuptools.setup( - setup_requires=['d2to1', 'pbr>=0.5,<0.6'], - d2to1=True) +setuptools.setup(setup_requires=['d2to1', 'pbr>=0.5,<0.6'], d2to1=True) diff --git a/openstack_requirements/tests/test_check.py b/openstack_requirements/tests/test_check.py index 8983e5f62..e8a25e05d 100644 --- a/openstack_requirements/tests/test_check.py +++ b/openstack_requirements/tests/test_check.py @@ -20,7 +20,6 @@ class TestIsReqInGlobalReqs(testtools.TestCase): - def setUp(self): super().setUp() @@ -29,11 +28,13 @@ def setUp(self): self.backports = list() self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) - self.global_reqs = check.get_global_reqs(textwrap.dedent(""" + self.global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.2,!=1.4 withmarker>=1.5;python_version=='3.5' withmarker>=1.2,!=1.4;python_version=='2.7' - """)) + """) + ) def test_match(self): """Test a basic package.""" @@ -48,9 +49,11 @@ def test_match(self): def test_match_with_markers(self): """Test a package specified with python 3 markers.""" - req = requirement.parse(textwrap.dedent(""" + req = requirement.parse( + textwrap.dedent(""" withmarker>=1.5;python_version=='3.5' - """))['withmarker'][0][0] + """) + )['withmarker'][0][0] self.assertTrue( check._is_requirement_in_global_reqs( req, @@ -61,15 +64,17 @@ def test_match_with_markers(self): def test_match_with_local_markers(self): """Test a package specified with python 3 markers.""" - req = requirement.parse(textwrap.dedent(""" + req = requirement.parse( + textwrap.dedent(""" name;python_version=='3.5' - """))['name'][0][0] + """) + )['name'][0][0] self.assertTrue( check._is_requirement_in_global_reqs( req, self.global_reqs['name'], self.backports, - allow_3_only=True + allow_3_only=True, ) ) @@ -79,15 +84,17 @@ def test_match_without_python3_markers(self): Python 3 packages are a thing. On those, it's totally unnecessary to specify e.g. a "python_version>'3" marker for packages. """ - req = requirement.parse(textwrap.dedent(""" + req = requirement.parse( + textwrap.dedent(""" withmarker>=1.5 - """))['withmarker'][0][0] + """) + )['withmarker'][0][0] self.assertTrue( check._is_requirement_in_global_reqs( req, self.global_reqs['withmarker'], self.backports, - allow_3_only=True + allow_3_only=True, ) ) @@ -182,7 +189,6 @@ def test_missing_exclusion(self): class TestGetExclusions(testtools.TestCase): - def test_none(self): req = list(check.get_global_reqs('name>=1.2')['name'])[0] self.assertEqual( @@ -206,9 +212,8 @@ def test_cap(self): class TestValidateOne(testtools.TestCase): - def setUp(self): - super(TestValidateOne, self).setUp() + super().setUp() self._stdout_fixture = fixtures.StringStream('stdout') self.stdout = self.useFixture(self._stdout_fixture).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) @@ -217,10 +222,7 @@ def setUp(self): def test_unchanged(self): # If the line matches the value in the branch list everything # is OK. - reqs = [ - r - for r, line in requirement.parse('name>=1.2,!=1.4')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -234,10 +236,7 @@ def test_unchanged(self): def test_denylisted(self): # If the package is denylisted, everything is OK. - reqs = [ - r - for r, line in requirement.parse('name>=1.2,!=1.4')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -252,10 +251,7 @@ def test_denylisted(self): def test_denylisted_mismatch(self): # If the package is denylisted, it doesn't matter if the # version matches. - reqs = [ - r - for r, line in requirement.parse('name>=1.5')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.5')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -269,10 +265,7 @@ def test_denylisted_mismatch(self): def test_not_in_global_list(self): # If the package is not in the global list, that is an error. - reqs = [ - r - for r, line in requirement.parse('name>=1.2,!=1.4')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']] global_reqs = check.get_global_reqs('') self.assertTrue( check._validate_one( @@ -286,10 +279,7 @@ def test_not_in_global_list(self): def test_new_item_matches_global_list(self): # If the new item matches the global list exactly that is OK. - reqs = [ - r - for r, line in requirement.parse('name>=1.2,!=1.4')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -304,10 +294,7 @@ def test_new_item_matches_global_list(self): def test_new_item_lower_min(self): # If the new item has a lower minimum value than the global # list, that is OK. - reqs = [ - r - for r, line in requirement.parse('name>=1.1,!=1.4')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.1,!=1.4')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -323,8 +310,7 @@ def test_new_item_extra_exclusion(self): # If the new item includes an exclusion that is not present in # the global list that is not OK. reqs = [ - r - for r, line in requirement.parse('name>=1.2,!=1.4,!=1.5')['name'] + r for r, line in requirement.parse('name>=1.2,!=1.4,!=1.5')['name'] ] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertTrue( @@ -340,10 +326,7 @@ def test_new_item_extra_exclusion(self): def test_new_item_missing_exclusion(self): # If the new item does not include an exclusion that is # present in the global list that is OK. - reqs = [ - r - for r, line in requirement.parse('name>=1.2')['name'] - ] + reqs = [r for r, line in requirement.parse('name>=1.2')['name']] global_reqs = check.get_global_reqs('name>=1.2,!=1.4') self.assertFalse( check._validate_one( @@ -363,14 +346,13 @@ def test_new_item_matches_global_list_with_extra(self): name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' - """)) + """) + ) self.assertFalse( check._validate_one( 'name', @@ -388,14 +370,13 @@ def test_new_item_missing_extra_line(self): r_content = textwrap.dedent(""" name>=1.2,!=1.4;python_version=='2.6' """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' - """)) + """) + ) self.assertTrue( check._validate_one( 'name', @@ -414,14 +395,13 @@ def test_new_item_mismatches_global_list_with_extra(self): name>=1.5;python_version=='3.6' name>=1.2,!=1.4;python_version=='2.6' """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' - """)) + """) + ) self.assertTrue( check._validate_one( 'name', @@ -440,15 +420,14 @@ def test_new_item_matches_py3_allowed_no_version(self): name>=1.5;python_version=='3.5' other-name """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' other-name - """)) + """) + ) self.assertFalse( check._validate_one( 'name', @@ -468,15 +447,14 @@ def test_new_item_matches_py3_allowed(self): name>=1.5 other-name """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version>='3.5' name>=1.2,!=1.4;python_version=='2.6' other-name - """)) + """) + ) self.assertFalse( check._validate_one( 'name', @@ -496,14 +474,13 @@ def test_new_item_matches_py3_allowed_with_py2(self): name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' - """)) + """) + ) self.assertFalse( check._validate_one( 'name', @@ -521,14 +498,13 @@ def test_new_item_matches_py3_allowed_no_py2(self): r_content = textwrap.dedent(""" name>=1.5;python_version=='3.5' """) - reqs = [ - r - for r, line in requirement.parse(r_content)['name'] - ] - global_reqs = check.get_global_reqs(textwrap.dedent(""" + reqs = [r for r, line in requirement.parse(r_content)['name']] + global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.5' name>=1.2,!=1.4;python_version=='2.6' - """)) + """) + ) self.assertFalse( check._validate_one( 'name', @@ -542,19 +518,22 @@ def test_new_item_matches_py3_allowed_no_py2(self): class TestBackportPythonMarkers(testtools.TestCase): - def setUp(self): - super(TestBackportPythonMarkers, self).setUp() + super().setUp() self._stdout_fixture = fixtures.StringStream('stdout') self.stdout = self.useFixture(self._stdout_fixture).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) - self.req = requirement.parse(textwrap.dedent(""" + self.req = requirement.parse( + textwrap.dedent(""" name>=1.5;python_version=='3.11' - """))['name'][0][0] - self.global_reqs = check.get_global_reqs(textwrap.dedent(""" + """) + )['name'][0][0] + self.global_reqs = check.get_global_reqs( + textwrap.dedent(""" name>=1.5;python_version=='3.10' - """)) + """) + ) def test_notmatching_no_backport(self): backports = requirement.parse("") diff --git a/openstack_requirements/tests/test_check_constraints.py b/openstack_requirements/tests/test_check_constraints.py index 8395648c5..354f33c7e 100644 --- a/openstack_requirements/tests/test_check_constraints.py +++ b/openstack_requirements/tests/test_check_constraints.py @@ -32,31 +32,35 @@ def mock_read_requirements_file(filename): elif os.path.basename(filename) == 'denylist.txt': return common.denylist else: - raise IOError('No such file or directory: %s' % filename) + raise OSError(f'No such file or directory: {filename}') class CheckExistsTest(testtools.TestCase): - def setUp(self): - super(CheckExistsTest, self).setUp() + super().setUp() @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) - @mock.patch('openstack_requirements.project.read', - return_value=common.project_project) + mock_read_requirements_file, + ) + @mock.patch( + 'openstack_requirements.project.read', + return_value=common.project_project, + ) def test_good_project(self, mock_project_read): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 0) @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) + mock_read_requirements_file, + ) def test_project_missing_from_uc(self): self.useFixture(common.project_fixture) orig_mocked_read_req = check_exists.read_requirements_file - read_req_path = ('openstack_requirements.cmds.check_exists.' - 'read_requirements_file') + read_req_path = ( + 'openstack_requirements.cmds.check_exists.read_requirements_file' + ) def remove_req_read_reqs_file(filename): if filename == 'upper-constraints.txt': @@ -66,51 +70,64 @@ def remove_req_read_reqs_file(filename): return orig_mocked_read_req(filename) - expected_out = ('six from requirements.txt not found in' - ' upper-constraints') + expected_out = ( + 'six from requirements.txt not found in upper-constraints' + ) # Start capturing some output mock_stdout = io.StringIO() - with mock.patch('openstack_requirements.project.read', - return_value=common.project_project), \ - mock.patch('sys.stdout', mock_stdout), \ - mock.patch(read_req_path, remove_req_read_reqs_file): + with ( + mock.patch( + 'openstack_requirements.project.read', + return_value=common.project_project, + ), + mock.patch('sys.stdout', mock_stdout), + mock.patch(read_req_path, remove_req_read_reqs_file), + ): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 1) self.assertIn(expected_out, mock_stdout.getvalue()) @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) + mock_read_requirements_file, + ) def test_project_missing_from_gr(self): self.useFixture(common.project_fixture) # Add some random package that wont exist in G-R with open(common.project_fixture.req_file, 'a') as req_file: - req_file.write(u'SomeRandomModule #Some random module\n') + req_file.write('SomeRandomModule #Some random module\n') req_file.flush() - expected_out = ('somerandommodule from requirements.txt not found in' - ' global-requirements') + expected_out = ( + 'somerandommodule from requirements.txt not found in' + ' global-requirements' + ) # Start capturing some output mock_stdout = io.StringIO() proj_read = project.read(common.project_fixture.root) - with mock.patch('openstack_requirements.project.read', - return_value=proj_read), \ - mock.patch('sys.stdout', mock_stdout): + with ( + mock.patch( + 'openstack_requirements.project.read', return_value=proj_read + ), + mock.patch('sys.stdout', mock_stdout), + ): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 1) self.assertIn(expected_out, mock_stdout.getvalue()) @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) + mock_read_requirements_file, + ) def test_project_multiple_missing_from_uc_and_gr(self): self.useFixture(common.project_fixture) orig_mocked_read_req = check_exists.read_requirements_file - read_req_path = ('openstack_requirements.cmds.check_exists.' - 'read_requirements_file') + read_req_path = ( + 'openstack_requirements.cmds.check_exists.read_requirements_file' + ) def remove_req_read_reqs_file(filename): if filename == 'upper-constraints.txt': @@ -124,11 +141,13 @@ def remove_req_read_reqs_file(filename): # lets change the six requirement not include the u-c version proj_read = project.read(common.project_fixture.root) - proj_read['requirements']['requirements.txt'] = \ + proj_read['requirements']['requirements.txt'] = ( proj_read['requirements']['requirements.txt'][:-1] + new_reqs - proj_read['requirements']['test-requirements.txt'] = \ - proj_read['requirements']['test-requirements.txt'] + \ - 'anotherrandommodule\n' + ) + proj_read['requirements']['test-requirements.txt'] = ( + proj_read['requirements']['test-requirements.txt'] + + 'anotherrandommodule\n' + ) expected_outs = [ 'lxml from requirements.txt not found in upper-constraints', @@ -137,14 +156,18 @@ def remove_req_read_reqs_file(filename): 'anotherrandommodule from test-requirements.txt not found in ' 'global-requirements', 'six must be <= 1.10.0 from upper-constraints and include the ' - 'upper-constraints version'] + 'upper-constraints version', + ] # Start capturing some output mock_stdout = io.StringIO() - with mock.patch('openstack_requirements.project.read', - return_value=proj_read), \ - mock.patch('sys.stdout', mock_stdout), \ - mock.patch(read_req_path, remove_req_read_reqs_file): + with ( + mock.patch( + 'openstack_requirements.project.read', return_value=proj_read + ), + mock.patch('sys.stdout', mock_stdout), + mock.patch(read_req_path, remove_req_read_reqs_file), + ): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 1) for expected in expected_outs: @@ -152,45 +175,59 @@ def remove_req_read_reqs_file(filename): @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) + mock_read_requirements_file, + ) def test_project_req_bigger_then_uc(self): self.useFixture(common.project_fixture) # lets change the six requirement not include the u-c version proj_read = project.read(common.project_fixture.root) - proj_read['requirements']['requirements.txt'] = \ + proj_read['requirements']['requirements.txt'] = ( proj_read['requirements']['requirements.txt'][:-1] + '>1.10.0\n' - expected_out = ('six must be <= 1.10.0 from upper-constraints and ' - 'include the upper-constraints version') + ) + expected_out = ( + 'six must be <= 1.10.0 from upper-constraints and ' + 'include the upper-constraints version' + ) # Start capturing some output mock_stdout = io.StringIO() - with mock.patch('openstack_requirements.project.read', - return_value=proj_read), \ - mock.patch('sys.stdout', mock_stdout): + with ( + mock.patch( + 'openstack_requirements.project.read', return_value=proj_read + ), + mock.patch('sys.stdout', mock_stdout), + ): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 1) self.assertIn(expected_out, mock_stdout.getvalue()) @mock.patch( 'openstack_requirements.cmds.check_exists.read_requirements_file', - mock_read_requirements_file) + mock_read_requirements_file, + ) def test_project_req_not_include_uc_version(self): self.useFixture(common.project_fixture) # lets change the six requirement not include the u-c version proj_read = project.read(common.project_fixture.root) - proj_read['requirements']['requirements.txt'] = \ - proj_read['requirements']['requirements.txt'][:-1] + \ - '<1.10.0,>1.10.0\n' - expected_out = ('six must be <= 1.10.0 from upper-constraints and ' - 'include the upper-constraints version') + proj_read['requirements']['requirements.txt'] = ( + proj_read['requirements']['requirements.txt'][:-1] + + '<1.10.0,>1.10.0\n' + ) + expected_out = ( + 'six must be <= 1.10.0 from upper-constraints and ' + 'include the upper-constraints version' + ) # Start capturing some output mock_stdout = io.StringIO() - with mock.patch('openstack_requirements.project.read', - return_value=proj_read), \ - mock.patch('sys.stdout', mock_stdout): + with ( + mock.patch( + 'openstack_requirements.project.read', return_value=proj_read + ), + mock.patch('sys.stdout', mock_stdout), + ): ret = check_exists.main([common.project_fixture.root]) self.assertEqual(ret, 1) self.assertIn(expected_out, mock_stdout.getvalue()) diff --git a/openstack_requirements/tests/test_constraints.py b/openstack_requirements/tests/test_constraints.py index f1a7f1fa8..81815107a 100644 --- a/openstack_requirements/tests/test_constraints.py +++ b/openstack_requirements/tests/test_constraints.py @@ -17,21 +17,18 @@ class TestCheckCompatible(testtools.TestCase): - def test_non_requirement(self): global_reqs = {} good_constraints = requirement.parse("foo===1.2.5\n") self.assertEqual( - [], - constraints.check_compatible(global_reqs, good_constraints) + [], constraints.check_compatible(global_reqs, good_constraints) ) def test_compatible(self): global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") good_constraints = requirement.parse("foo===1.2.5\n") self.assertEqual( - [], - constraints.check_compatible(global_reqs, good_constraints) + [], constraints.check_compatible(global_reqs, good_constraints) ) def test_constraint_below_range(self): @@ -48,32 +45,29 @@ def test_constraint_above_range(self): class TestCheckFormat(testtools.TestCase): - def test_ok(self): good_constraints = requirement.parse("foo===1.2.5\n") - self.assertEqual( - [], - list(constraints.check_format(good_constraints)) - ) + self.assertEqual([], list(constraints.check_format(good_constraints))) def test_two_equals(self): bad_constraints = requirement.parse("foo==1.2.5\n") self.assertEqual( - 1, - len(list(constraints.check_format(bad_constraints))) + 1, len(list(constraints.check_format(bad_constraints))) ) class TestDenylistCoverage(testtools.TestCase): - def test_constrained(self): global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") good_constraints = requirement.parse("foo===1.2.5\nbar==2.1") denylist = requirement.parse('flake8\nhacking') self.assertEqual( [], - list(constraints.check_denylist_coverage( - global_reqs, good_constraints, denylist, 'test')) + list( + constraints.check_denylist_coverage( + global_reqs, good_constraints, denylist, 'test' + ) + ), ) def test_denylisted(self): @@ -82,16 +76,22 @@ def test_denylisted(self): denylist = requirement.parse('flake8\nhacking\nbar') self.assertEqual( [], - list(constraints.check_denylist_coverage( - global_reqs, good_constraints, denylist, 'test')) + list( + constraints.check_denylist_coverage( + global_reqs, good_constraints, denylist, 'test' + ) + ), ) def test_both(self): global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") good_constraints = requirement.parse("foo===1.2.5\nbar>2.0") denylist = requirement.parse('flake8\nhacking\nbar') - results = list(constraints.check_denylist_coverage( - global_reqs, good_constraints, denylist, 'test')) + results = list( + constraints.check_denylist_coverage( + global_reqs, good_constraints, denylist, 'test' + ) + ) self.assertEqual(1, len(results)) self.assertIn("'bar' appears in both", results[0]) @@ -99,7 +99,10 @@ def test_neither(self): global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") good_constraints = requirement.parse("foo===1.2.5\n") denylist = requirement.parse('flake8\nhacking') - results = list(constraints.check_denylist_coverage( - global_reqs, good_constraints, denylist, 'test')) + results = list( + constraints.check_denylist_coverage( + global_reqs, good_constraints, denylist, 'test' + ) + ) self.assertEqual(1, len(results)) self.assertIn("'bar' appears in global-requirements.txt", results[0]) diff --git a/openstack_requirements/tests/test_edit_constraint.py b/openstack_requirements/tests/test_edit_constraint.py index 0c86ddb76..b65acd22a 100644 --- a/openstack_requirements/tests/test_edit_constraint.py +++ b/openstack_requirements/tests/test_edit_constraint.py @@ -26,46 +26,55 @@ class SmokeTest(testtools.TestCase): - def test_make_url(self): stdout = io.StringIO() tmpdir = self.useFixture(fixtures.TempDir()).path constraints_path = os.path.join(tmpdir, 'name.txt') - with open(constraints_path, 'wt') as f: + with open(constraints_path, 'w') as f: f.write('bar===1\nfoo===1.0.2\nquux==3\n') rv = edit.main( - [constraints_path, 'foo', '--', '-e /path/to/foo'], stdout) + [constraints_path, 'foo', '--', '-e /path/to/foo'], stdout + ) self.assertEqual(0, rv) - content = open(constraints_path, 'rt').read() + content = open(constraints_path).read() self.assertEqual('-e /path/to/foo\nbar===1\nquux==3\n', content) def test_edit_paths(self): stdout = io.StringIO() tmpdir = self.useFixture(fixtures.TempDir()).path constraints_path = os.path.join(tmpdir, 'name.txt') - with open(constraints_path, 'wt') as f: - f.write(textwrap.dedent("""\ + with open(constraints_path, 'w') as f: + f.write( + textwrap.dedent("""\ file:///path/to/foo#egg=foo -e file:///path/to/bar#egg=bar - """)) + """) + ) rv = edit.main( [constraints_path, 'foo', '--', '-e file:///path/to/foo#egg=foo'], - stdout) + stdout, + ) self.assertEqual(0, rv) - content = open(constraints_path, 'rt').read() - self.assertEqual(textwrap.dedent("""\ + content = open(constraints_path).read() + self.assertEqual( + textwrap.dedent("""\ -e file:///path/to/foo#egg=foo -e file:///path/to/bar#egg=bar - """), content) + """), + content, + ) class TestEdit(testtools.TestCase): - def test_add(self): reqs = {} res = edit.edit(reqs, 'foo', 'foo==1.2') - self.assertEqual(requirement.Requirements( - [requirement.Requirement('', '', '', '', 'foo==1.2')]), res) + self.assertEqual( + requirement.Requirements( + [requirement.Requirement('', '', '', '', 'foo==1.2')] + ), + res, + ) def test_delete(self): reqs = requirement.parse('foo==1.2\n') @@ -75,18 +84,30 @@ def test_delete(self): def test_replace(self): reqs = requirement.parse('foo==1.2\n') res = edit.edit(reqs, 'foo', 'foo==1.3') - self.assertEqual(requirement.Requirements( - [requirement.Requirement('', '', '', '', 'foo==1.3')]), res) + self.assertEqual( + requirement.Requirements( + [requirement.Requirement('', '', '', '', 'foo==1.3')] + ), + res, + ) def test_replace_many(self): reqs = requirement.parse('foo==1.2;p\nfoo==1.3;q') res = edit.edit(reqs, 'foo', 'foo==1.3') - self.assertEqual(requirement.Requirements( - [requirement.Requirement('', '', '', '', 'foo==1.3')]), res) + self.assertEqual( + requirement.Requirements( + [requirement.Requirement('', '', '', '', 'foo==1.3')] + ), + res, + ) def test_replace_non_canonical(self): new_req = '-e file:///path#egg=foo_baz' reqs = requirement.parse("foo-baz===1.0.2\n") res = edit.edit(reqs, 'foo_baz', new_req) - self.assertEqual(res, requirement.Requirements( - [requirement.Requirement('', '', '', '', new_req)])) + self.assertEqual( + res, + requirement.Requirements( + [requirement.Requirement('', '', '', '', new_req)] + ), + ) diff --git a/openstack_requirements/tests/test_generate.py b/openstack_requirements/tests/test_generate.py index 38898b733..b8568e993 100644 --- a/openstack_requirements/tests/test_generate.py +++ b/openstack_requirements/tests/test_generate.py @@ -21,18 +21,23 @@ class TestFreeze(testtools.TestCase): - def test_freeze_smoke(self): # Use an arbitrary python, but make sure it has the venv standard lib. - versions = ['/usr/bin/python3.%(v)s' % dict(v=v) for v in range(5, 10)] + versions = [ + '/usr/bin/python3.{v}'.format(**dict(v=v)) for v in range(5, 10) + ] found = [v for v in versions if os.path.exists(v)] found_with_venv = [] for py in found: - output = str(subprocess.check_output( - [py, - '-c', - 'import pkgutil; [print(x) for x in pkgutil.iter_modules()]'] - )) + output = str( + subprocess.check_output( + [ + py, + '-c', + 'import pkgutil; [print(x) for x in pkgutil.iter_modules()]', + ] + ) + ) # Needs both venv and ensurepip if 'venv' in output and 'ensurepip' in output: found_with_venv.append(py) @@ -44,7 +49,7 @@ def test_freeze_smoke(self): # break. pyversion = found_with_venv[-1] req = self.useFixture(fixtures.TempDir()).path + '/r.txt' - with open(req, 'wt') as output: + with open(req, 'w') as output: output.write('fixtures==2.0.0') frozen = generate._freeze(req, pyversion) expected_version = pyversion[-3:] @@ -56,12 +61,12 @@ def test_freeze_smoke(self): class TestParse(testtools.TestCase): - def test_parse(self): text = "linecache2==1.0.0\nargparse==1.2\n\n# fred\n" parsed = generate._parse_freeze(text) self.assertEqual( - [('linecache2', '1.0.0'), ('argparse', '1.2')], parsed) + [('linecache2', '1.0.0'), ('argparse', '1.2')], parsed + ) def test_editable_banned(self): text = "-e git:..." @@ -69,29 +74,33 @@ def test_editable_banned(self): class TestCombine(testtools.TestCase): - def test_same_items(self): fixtures = [('fixtures', '1.2.0')] freeze_27 = ('2.7', fixtures) freeze_34 = ('3.4', fixtures) self.assertEqual( ['fixtures===1.2.0\n'], - list(generate._combine_freezes([freeze_27, freeze_34]))) + list(generate._combine_freezes([freeze_27, freeze_34])), + ) def test_distinct_items(self): freeze_27 = ('2.7', [('fixtures', '1.2.0')]) freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')]) self.assertEqual( ["enum===1.5.0;python_version=='3.4'\n", 'fixtures===1.2.0\n'], - list(generate._combine_freezes([freeze_27, freeze_34]))) + list(generate._combine_freezes([freeze_27, freeze_34])), + ) def test_different_versions(self): freeze_27 = ('2.7', [('fixtures', '1.2.0')]) freeze_34 = ('3.4', [('fixtures', '1.5.0')]) self.assertEqual( - ["fixtures===1.2.0;python_version<='2.7'\n", - "fixtures===1.5.0;python_version>='3.4'\n"], - list(generate._combine_freezes([freeze_27, freeze_34]))) + [ + "fixtures===1.2.0;python_version<='2.7'\n", + "fixtures===1.5.0;python_version>='3.4'\n", + ], + list(generate._combine_freezes([freeze_27, freeze_34])), + ) def test_duplicate_pythons(self): with testtools.ExpectedException(Exception): @@ -103,31 +112,37 @@ def test_denylist(self): freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')]) self.assertEqual( ["enum===1.5.0;python_version=='3.4'\n"], - list(generate._combine_freezes( - [freeze_27, freeze_34], denylist=denylist))) + list( + generate._combine_freezes( + [freeze_27, freeze_34], denylist=denylist + ) + ), + ) def test_denylist_with_safe_name(self): denylist = ['flake8_docstrings'] - freeze_27 = ('2.7', [('flake8-docstrings', '0.2.1.post1'), - ('enum', '1.5.0')]) + freeze_27 = ( + '2.7', + [('flake8-docstrings', '0.2.1.post1'), ('enum', '1.5.0')], + ) self.assertEqual( ['enum===1.5.0\n'], - list(generate._combine_freezes( - [freeze_27], denylist=denylist))) + list(generate._combine_freezes([freeze_27], denylist=denylist)), + ) -class Namespace(object): +class Namespace: def __init__(self, **kwargs): self.__dict__.update(kwargs) class TestClone(testtools.TestCase): - def test_py34_clone_py35(self): # Simulate an environment where we have python 3.4 data and need to # clone that to python 3.5 - options = Namespace(version_map={'3.4': set(['3.5']), - '3.5': set(['3.4'])}) + options = Namespace( + version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])} + ) freeze_27 = ('2.7', [('dnspython', '1.15.0')]) freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) freeze_35 = ('3.5', [('dnspython3', '1.12.0')]) @@ -142,8 +157,9 @@ def test_py34_clone_py35(self): def test_py34_noclone_py35(self): # Simulate an environment where we have python 3.4 and python 3.5 data # so there is no need to clone. - options = Namespace(version_map={'3.4': set(['3.5']), - '3.5': set(['3.4'])}) + options = Namespace( + version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])} + ) freeze_27 = ('2.7', [('dnspython', '1.15.0')]) freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) freeze_35 = ('3.5', [('other-pkg', '1.0.0')]) @@ -158,8 +174,9 @@ def test_py34_noclone_py35(self): def test_py35_clone_py34(self): # Simulate an environment where we have python 3.5 data and need to # clone that to python 3.4 - options = Namespace(version_map={'3.4': set(['3.5']), - '3.5': set(['3.4'])}) + options = Namespace( + version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])} + ) freeze_27 = ('2.7', [('dnspython', '1.15.0')]) freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) freeze_35 = ('3.5', [('dnspython3', '1.12.0')]) diff --git a/openstack_requirements/tests/test_project.py b/openstack_requirements/tests/test_project.py index 6394a1699..3f4e7979c 100644 --- a/openstack_requirements/tests/test_project.py +++ b/openstack_requirements/tests/test_project.py @@ -25,18 +25,18 @@ class TestReadProject(testtools.TestCase): - def test_pbr(self): root = self.useFixture(common.pbr_fixture).root proj = project.read(root) self.expectThat(proj['root'], matchers.Equals(root)) - setup_py = open(root + '/setup.py', 'rt').read() + setup_py = open(root + '/setup.py').read() self.expectThat(proj['setup.py'], matchers.Equals(setup_py)) - setup_cfg = open(root + '/setup.cfg', 'rt').read() + setup_cfg = open(root + '/setup.cfg').read() self.expectThat(proj['setup.cfg'], matchers.Equals(setup_cfg)) self.expectThat( proj['requirements'], - matchers.KeysEqual('requirements.txt', 'test-requirements.txt')) + matchers.KeysEqual('requirements.txt', 'test-requirements.txt'), + ) def test_no_setup_py(self): root = self.useFixture(fixtures.TempDir()).path @@ -47,24 +47,22 @@ def test_no_setup_py(self): class TestProjectExtras(testtools.TestCase): - def test_smoke(self): - proj = {'setup.cfg': textwrap.dedent(u""" + proj = { + 'setup.cfg': textwrap.dedent(""" [extras] 1 = foo 2 = foo # fred bar - """)} - expected = { - '1': '\nfoo', - '2': '\nfoo # fred\nbar' + """) } + expected = {'1': '\nfoo', '2': '\nfoo # fred\nbar'} self.assertEqual(expected, project.extras(proj)) def test_none(self): - proj = {'setup.cfg': u"[metadata]\n"} + proj = {'setup.cfg': "[metadata]\n"} self.assertEqual({}, project.extras(proj)) def test_no_setup_cfg(self): diff --git a/openstack_requirements/tests/test_requirement.py b/openstack_requirements/tests/test_requirement.py index 8942a3394..1de817623 100644 --- a/openstack_requirements/tests/test_requirement.py +++ b/openstack_requirements/tests/test_requirement.py @@ -22,114 +22,213 @@ class TestParseRequirement(testtools.TestCase): - dist_scenarios = [ - ('package', dict( - line='swift', - req=requirement.Requirement('swift', '', '', '', ''))), - ('specifier', dict( - line='alembic>=0.4.1', - req=requirement.Requirement('alembic', '', '>=0.4.1', '', ''))), - ('specifiers', dict( - line='alembic>=0.4.1,!=1.1.8', - req=requirement.Requirement('alembic', '', '!=1.1.8,>=0.4.1', '', - ''))), - ('comment-only', dict( - line='# foo', - req=requirement.Requirement('', '', '', '', '# foo'))), - ('comment', dict( - line='Pint>=0.5 # BSD', - req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD'))), - ('comment-with-semicolon', dict( - line='Pint>=0.5 # BSD;fred', - req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD;fred'))), - ('case', dict( - line='Babel>=1.3', - req=requirement.Requirement('Babel', '', '>=1.3', '', ''))), - ('markers', dict( - line="pywin32;sys_platform=='win32'", - req=requirement.Requirement('pywin32', '', '', - "sys_platform=='win32'", ''))), - ('markers-with-comment', dict( - line="Sphinx<=1.2; python_version=='2.7'# Sadface", - req=requirement.Requirement('Sphinx', '', '<=1.2', - "python_version=='2.7'", '# Sadface')))] + ( + 'package', + dict( + line='swift', + req=requirement.Requirement('swift', '', '', '', ''), + ), + ), + ( + 'specifier', + dict( + line='alembic>=0.4.1', + req=requirement.Requirement('alembic', '', '>=0.4.1', '', ''), + ), + ), + ( + 'specifiers', + dict( + line='alembic>=0.4.1,!=1.1.8', + req=requirement.Requirement( + 'alembic', '', '!=1.1.8,>=0.4.1', '', '' + ), + ), + ), + ( + 'comment-only', + dict( + line='# foo', + req=requirement.Requirement('', '', '', '', '# foo'), + ), + ), + ( + 'comment', + dict( + line='Pint>=0.5 # BSD', + req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD'), + ), + ), + ( + 'comment-with-semicolon', + dict( + line='Pint>=0.5 # BSD;fred', + req=requirement.Requirement( + 'Pint', '', '>=0.5', '', '# BSD;fred' + ), + ), + ), + ( + 'case', + dict( + line='Babel>=1.3', + req=requirement.Requirement('Babel', '', '>=1.3', '', ''), + ), + ), + ( + 'markers', + dict( + line="pywin32;sys_platform=='win32'", + req=requirement.Requirement( + 'pywin32', '', '', "sys_platform=='win32'", '' + ), + ), + ), + ( + 'markers-with-comment', + dict( + line="Sphinx<=1.2; python_version=='2.7'# Sadface", + req=requirement.Requirement( + 'Sphinx', '', '<=1.2', "python_version=='2.7'", '# Sadface' + ), + ), + ), + ] url_scenarios = [ - ('url', dict( - line='file:///path/to/thing#egg=thing', - req=requirement.Requirement('thing', 'file:///path/to/thing', '', '', - ''), - permit_urls=True)), - ('oslo-url', dict( - line='file:///path/to/oslo.thing#egg=oslo.thing', - req=requirement.Requirement('oslo.thing', - 'file:///path/to/oslo.thing', '', '', ''), - permit_urls=True)), - ('url-comment', dict( - line='file:///path/to/thing#egg=thing # http://altpath#egg=boo', - req=requirement.Requirement('thing', 'file:///path/to/thing', '', '', - '# http://altpath#egg=boo'), - permit_urls=True)), - ('editable', dict( - line='-e file:///path/to/bar#egg=bar', - req=requirement.Requirement('bar', '-e file:///path/to/bar', '', '', - ''), - permit_urls=True)), - ('editable_vcs_git', dict( - line='-e git+http://github.com/path/to/oslo.bar#egg=oslo.bar', - req=requirement.Requirement('oslo.bar', - '-e git+http://github.com' - '/path/to/oslo.bar', '', '', ''), - permit_urls=True)), - ('editable_vcs_git_ssh', dict( - line='-e git+ssh://github.com/path/to/oslo.bar#egg=oslo.bar', - req=requirement.Requirement('oslo.bar', - '-e git+ssh://github.com' - '/path/to/oslo.bar', '', '', ''), - permit_urls=True)), + ( + 'url', + dict( + line='file:///path/to/thing#egg=thing', + req=requirement.Requirement( + 'thing', 'file:///path/to/thing', '', '', '' + ), + permit_urls=True, + ), + ), + ( + 'oslo-url', + dict( + line='file:///path/to/oslo.thing#egg=oslo.thing', + req=requirement.Requirement( + 'oslo.thing', 'file:///path/to/oslo.thing', '', '', '' + ), + permit_urls=True, + ), + ), + ( + 'url-comment', + dict( + line='file:///path/to/thing#egg=thing # http://altpath#egg=boo', + req=requirement.Requirement( + 'thing', + 'file:///path/to/thing', + '', + '', + '# http://altpath#egg=boo', + ), + permit_urls=True, + ), + ), + ( + 'editable', + dict( + line='-e file:///path/to/bar#egg=bar', + req=requirement.Requirement( + 'bar', '-e file:///path/to/bar', '', '', '' + ), + permit_urls=True, + ), + ), + ( + 'editable_vcs_git', + dict( + line='-e git+http://github.com/path/to/oslo.bar#egg=oslo.bar', + req=requirement.Requirement( + 'oslo.bar', + '-e git+http://github.com/path/to/oslo.bar', + '', + '', + '', + ), + permit_urls=True, + ), + ), + ( + 'editable_vcs_git_ssh', + dict( + line='-e git+ssh://github.com/path/to/oslo.bar#egg=oslo.bar', + req=requirement.Requirement( + 'oslo.bar', + '-e git+ssh://github.com/path/to/oslo.bar', + '', + '', + '', + ), + permit_urls=True, + ), + ), ] scenarios = dist_scenarios + url_scenarios def test_parse(self): parsed = requirement.parse_line( - self.line, permit_urls=getattr(self, 'permit_urls', False)) + self.line, permit_urls=getattr(self, 'permit_urls', False) + ) self.assertEqual(self.req, parsed) class TestParseRequirementFailures(testtools.TestCase): - scenarios = [ - ('url', dict(line='http://tarballs.openstack.org/oslo.config/' - 'oslo.config-1.2.0a3.tar.gz#egg=oslo.config')), + ( + 'url', + dict( + line='http://tarballs.openstack.org/oslo.config/' + 'oslo.config-1.2.0a3.tar.gz#egg=oslo.config' + ), + ), ('-e', dict(line='-e git+https://foo.com#egg=foo')), - ('-f', dict(line='-f http://tarballs.openstack.org/'))] + ('-f', dict(line='-f http://tarballs.openstack.org/')), + ] def test_does_not_parse(self): self.assertRaises(ValueError, requirement.parse_line, self.line) class TestToContent(testtools.TestCase): - def test_smoke(self): - reqs = requirement.to_content(requirement.Requirements( - [requirement.Requirement( - 'foo', '', '<=1', "python_version=='2.7'", '# BSD')]), - marker_sep='!') - self.assertEqual( - "foo<=1!python_version=='2.7' # BSD\n", - reqs) + reqs = requirement.to_content( + requirement.Requirements( + [ + requirement.Requirement( + 'foo', '', '<=1', "python_version=='2.7'", '# BSD' + ) + ] + ), + marker_sep='!', + ) + self.assertEqual("foo<=1!python_version=='2.7' # BSD\n", reqs) def test_location(self): - reqs = requirement.to_content(requirement.Requirements( - [requirement.Requirement( - 'foo', 'file://foo', '', "python_version=='2.7'", '# BSD')])) + reqs = requirement.to_content( + requirement.Requirements( + [ + requirement.Requirement( + 'foo', + 'file://foo', + '', + "python_version=='2.7'", + '# BSD', + ) + ] + ) + ) self.assertEqual( - "file://foo#egg=foo;python_version=='2.7' # BSD\n", - reqs) + "file://foo#egg=foo;python_version=='2.7' # BSD\n", reqs + ) class TestToReqs(testtools.TestCase): - def test_editable(self): line = '-e file:///foo#egg=foo' reqs = list(requirement.to_reqs(line, permit_urls=True)) @@ -144,7 +243,8 @@ def test_urls(self): def test_not_urls(self): self.assertRaises( - ValueError, list, requirement.to_reqs('file:///foo#egg=foo')) + ValueError, list, requirement.to_reqs('file:///foo#egg=foo') + ) def test_multiline(self): content = textwrap.dedent("""\ @@ -170,30 +270,31 @@ def test_extras(self): set(reqs.keys()), ) self.assertEqual(reqs['oslo-config'][0][0].extras, frozenset(())) - self.assertEqual(reqs['oslo-concurrency'][0][0].extras, - frozenset(('fixtures',))) - self.assertEqual(reqs['oslo-db'][0][0].extras, - frozenset(('fixtures', 'mysql'))) - self.assertCountEqual(reqs, - ['oslo-config', 'oslo-concurrency', 'oslo-db']) + self.assertEqual( + reqs['oslo-concurrency'][0][0].extras, frozenset(('fixtures',)) + ) + self.assertEqual( + reqs['oslo-db'][0][0].extras, frozenset(('fixtures', 'mysql')) + ) + self.assertCountEqual( + reqs, ['oslo-config', 'oslo-concurrency', 'oslo-db'] + ) class TestCanonicalName(testtools.TestCase): - def test_underscores(self): self.assertEqual('foo-bar', requirement.canonical_name('Foo_bar')) class TestToDict(testtools.TestCase): - def test_canonicalises(self): req = requirement.Requirement('Foo_bar', '', '', '', '') self.assertEqual( - {'foo-bar': [(req, '')]}, requirement.to_dict([(req, '')])) + {'foo-bar': [(req, '')]}, requirement.to_dict([(req, '')]) + ) class TestReqPolicy(testtools.TestCase): - def test_requirements_policy_pass(self): content = textwrap.dedent("""\ cffi!=1.1.2 @@ -209,7 +310,10 @@ def test_requirements_policy_fail(self): other>=1,>=2,!=1.1.0 """) reqs = requirement.parse(content) - self.assertEqual([ - 'Requirement cffi should not include a >= specifier', - 'Requirement other should not include a >= specifier'], - sorted([x for x in requirement.check_reqs_bounds_policy(reqs)])) + self.assertEqual( + [ + 'Requirement cffi should not include a >= specifier', + 'Requirement other should not include a >= specifier', + ], + sorted([x for x in requirement.check_reqs_bounds_policy(reqs)]), + ) diff --git a/openstack_requirements/utils.py b/openstack_requirements/utils.py index 58b13d017..6a48b7db2 100644 --- a/openstack_requirements/utils.py +++ b/openstack_requirements/utils.py @@ -2,6 +2,6 @@ def read_requirements_file(filename): - with open(filename, 'rt') as f: + with open(filename) as f: body = f.read() return requirement.parse(body) diff --git a/playbooks/files/project-requirements-change.py b/playbooks/files/project-requirements-change.py index 551053a85..09da5c6ca 100755 --- a/playbooks/files/project-requirements-change.py +++ b/playbooks/files/project-requirements-change.py @@ -37,13 +37,13 @@ def run_command(cmd): print(cmd) cmd_list = shlex.split(str(cmd)) kwargs = {} - if sys.version_info >= (3, ): - kwargs = { - 'encoding': 'utf-8', - 'errors': 'surrogateescape', - } - p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, **kwargs) + kwargs = { + 'encoding': 'utf-8', + 'errors': 'surrogateescape', + } + p = subprocess.Popen( + cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs + ) (out, err) = p.communicate() if p.returncode != 0: raise SystemError(err) @@ -51,7 +51,8 @@ def run_command(cmd): _DEFAULT_REQS_DIR = os.path.expanduser( - '~/src/opendev.org/openstack/requirements') + '~/src/opendev.org/openstack/requirements' +) def grab_args(): @@ -59,14 +60,19 @@ def grab_args(): parser = argparse.ArgumentParser( description="Check if project requirements have changed" ) - parser.add_argument('--local', action='store_true', - help='check local changes (not yet in git)') + parser.add_argument( + '--local', + action='store_true', + help='check local changes (not yet in git)', + ) parser.add_argument('src_dir', help='directory to process') - parser.add_argument('branch', nargs='?', default='master', - help='target branch for diffs') + parser.add_argument( + 'branch', nargs='?', default='master', help='target branch for diffs' + ) parser.add_argument('--zc', help='what zuul cloner to call') - parser.add_argument('--reqs', help='use a specified requirements tree', - default=None) + parser.add_argument( + '--reqs', help='use a specified requirements tree', default=None + ) return parser.parse_args() @@ -91,31 +97,29 @@ def main(): if args.local: print('selecting default requirements directory for local mode') reqdir = os.path.dirname( - os.path.dirname( - os.path.dirname( - os.path.abspath(sys.argv[0])))) + os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) + ) else: print('selecting default requirements directory for normal mode') reqdir = _DEFAULT_REQS_DIR - print('Branch: {}'.format(branch)) - print('Source: {}'.format(args.src_dir)) - print('Requirements: {}'.format(reqdir)) + print(f'Branch: {branch}') + print(f'Source: {args.src_dir}') + print(f'Requirements: {reqdir}') os.chdir(args.src_dir) sha, _ = run_command('git log -n 1 --format=%H') - print('Patch under test: {}'.format(sha)) + print(f'Patch under test: {sha}') # build a list of requirements from the global list in the # openstack/requirements project so we can match them to the changes with tempdir(): - with open(reqdir + '/global-requirements.txt', 'rt') as f: + with open(reqdir + '/global-requirements.txt') as f: global_reqs = check.get_global_reqs(f.read()) - denylist = requirement.parse( - open(reqdir + '/denylist.txt', 'rt').read()) + denylist = requirement.parse(open(reqdir + '/denylist.txt').read()) backports_file = reqdir + '/backports.txt' if os.path.exists(backports_file): - backports = requirement.parse(open(backports_file, 'rt').read()) + backports = requirement.parse(open(backports_file).read()) else: backports = {} cwd = os.getcwd() diff --git a/pyproject.toml b/pyproject.toml index 7f5d852af..0d31dcc54 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,3 +43,17 @@ check-constraints = "openstack_requirements.cmds.check_exists:main" packages = [ "openstack_requirements" ] + +[tool.ruff] +line-length = 79 + +[tool.ruff.format] +quote-style = "preserve" +docstring-code-format = true + +[tool.ruff.lint] +select = ["E4", "E7", "E9", "F", "U"] + +# [tool.ruff.lint.per-file-ignores] +# "openstack/tests/*" = ["S"] +# "examples/*" = ["S"] diff --git a/setup.py b/setup.py index f63cc23c5..83c92e22c 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,4 @@ # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools -setuptools.setup( - setup_requires=['pbr>=2.0.0'], - pbr=True) +setuptools.setup(setup_requires=['pbr>=2.0.0'], pbr=True) diff --git a/tools/cap.py b/tools/cap.py index 82ed8e617..20a9cfcf2 100755 --- a/tools/cap.py +++ b/tools/cap.py @@ -78,13 +78,13 @@ def pin(line, new_cap): end = parts[1] # cap to new max version if end: - new_end = "<=%s #%s" % (new_cap, end) + new_end = f"<={new_cap} #{end}" else: - new_end = "<=%s" % new_cap + new_end = f"<={new_cap}" if use_comma is True: - return "%s,%s" % (parts[0].strip(), new_end) + return f"{parts[0].strip()},{new_end}" else: - return "%s%s" % (parts[0].strip(), new_end) + return f"{parts[0].strip()}{new_end}" def split(line): @@ -119,12 +119,13 @@ def freeze(lines): def main(): parser = argparse.ArgumentParser( description="Take the output of " - "'pip freeze' and use the installed versions to " - "caps requirements.") + "'pip freeze' and use the installed versions to " + "caps requirements." + ) parser.add_argument('requirements', help='requirements file input') parser.add_argument( - 'freeze', - help='output of pip freeze, taken from a full tempest job') + 'freeze', help='output of pip freeze, taken from a full tempest job' + ) args = parser.parse_args() with open(args.requirements) as f: requirements = [line.strip() for line in f.readlines()] diff --git a/tools/check-install.py b/tools/check-install.py index 1ecf4e19b..d01a89793 100644 --- a/tools/check-install.py +++ b/tools/check-install.py @@ -19,7 +19,7 @@ def main(): try: importlib.import_module(module) except ImportError as err: - print('Imports for %s failed:\n\t%s' % (script, err)) + print(f'Imports for {script} failed:\n\t{err}') errors += 1 return 1 if errors else 0 diff --git a/tools/lint.py b/tools/lint.py index 7ac319bf5..b59b43622 100755 --- a/tools/lint.py +++ b/tools/lint.py @@ -56,9 +56,7 @@ def sort() -> None: deps.append((line, comment or None)) comment = '' - section_deps[section] = sorted( - deps, key=lambda x: x[0].lower() - ) + section_deps[section] = sorted(deps, key=lambda x: x[0].lower()) with open(GLOBAL_REQS, 'w') as fh: for i, section in enumerate(section_deps): diff --git a/tools/what-broke.py b/tools/what-broke.py index bb2d74e1e..b356ba74d 100755 --- a/tools/what-broke.py +++ b/tools/what-broke.py @@ -41,7 +41,7 @@ import packaging.requirements -class Release(object): +class Release: name = "" version = "" filename = "" @@ -54,7 +54,7 @@ def __init__(self, name, version, filename, released): self.released = released def __repr__(self): - return "" % (self.name, self.version, self.released) + return f"" def _parse_pypi_released(datestr): @@ -79,7 +79,6 @@ def get_requirements(): def get_releases_for_package(name, since): - """Get the release history from pypi Use the json API to get the release history from pypi. The @@ -94,7 +93,7 @@ def get_releases_for_package(name, since): our purposes. """ - f = urlreq.urlopen("http://pypi.org/project/%s/json" % name) + f = urlreq.urlopen(f"http://pypi.org/project/{name}/json") jsondata = f.read() data = json.loads(jsondata) releases = [] @@ -106,12 +105,7 @@ def get_releases_for_package(name, since): if when < since: continue - releases.append( - Release( - name, - relname, - rel['filename'], - when)) + releases.append(Release(name, relname, rel['filename'], when)) break return releases @@ -121,9 +115,9 @@ def get_releases_since(reqs, since): for req in reqs: all_releases.extend(get_releases_for_package(req, since)) # return these in a sorted order from newest to oldest - sorted_releases = sorted(all_releases, - key=lambda x: x.released, - reverse=True) + sorted_releases = sorted( + all_releases, key=lambda x: x.released, reverse=True + ) return sorted_releases @@ -131,17 +125,23 @@ def parse_args(): parser = argparse.ArgumentParser( description=( 'List recent releases of items in global requirements ' - 'to look for possible breakage')) - parser.add_argument('-s', '--since', type=int, - default=14, - help='look back ``since`` days (default 14)') + 'to look for possible breakage' + ) + ) + parser.add_argument( + '-s', + '--since', + type=int, + default=14, + help='look back ``since`` days (default 14)', + ) return parser.parse_args() def main(): opts = parse_args() since = datetime.datetime.today() - datetime.timedelta(days=opts.since) - print("Looking for requirements releases since %s" % since) + print(f"Looking for requirements releases since {since}") reqs = get_requirements() # additional sensitive requirements reqs.append('tox') diff --git a/tox.ini b/tox.ini index 546eeaa9c..d3e036098 100644 --- a/tox.ini +++ b/tox.ini @@ -63,11 +63,14 @@ commands = validate-projects {toxinidir}/projects.txt description = Perform linting skip_install = true deps = + ruff~=0.13.0 # MIT hacking~=7.0 # Apache-2.0 bashate~=2.1 # Apache-2.0 allowlist_externals = bash commands = + ruff check --fix --unsafe-fixes + ruff format flake8 bash -c "find {toxinidir}/tools \ -type f \ @@ -112,4 +115,6 @@ deps = Babel commands = {toxinidir}/tools/babel-test.sh [flake8] +# We only enable the hacking (H) checks +select = H exclude = .venv,.git,.tox,dist,doc,*egg,build From 1691dcb547658a5182e713a0896f2efe2e898fab Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 11:43:25 +0100 Subject: [PATCH 06/19] Deprecate support for legacy requirement files Deprecate support for the following requirement files: - tools/pip-requires - tools/test-requires - requirements-py2.txt - requirements-py3.txt - test-requirements-py2.txt - test-requirements-py3.txt As noted inline, none of these work with pbr anymore. If people are using them with a different installer, then they will need to switch to a 'pyproject.toml' file instead. Change-Id: I98c9dc3260b93863d166e34d08b589cd129c512c Signed-off-by: Stephen Finucane --- openstack_requirements/check.py | 34 +++++++++++++++++++++++++++---- openstack_requirements/project.py | 18 ++++++++-------- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/openstack_requirements/check.py b/openstack_requirements/check.py index 9ca0433e5..f3113d8a9 100644 --- a/openstack_requirements/check.py +++ b/openstack_requirements/check.py @@ -16,6 +16,7 @@ import collections import re +import sys from packaging import markers @@ -56,8 +57,9 @@ def extract_reqs(self, content, strict): set(list_reqs_stripped) ): print( - "ERROR: Requirements file has duplicate entries " - f"for package {name} : {list_reqs!r}." + f"ERROR: Requirements file has duplicate entries " + f"for package {name} : {list_reqs!r}.", + file=sys.stderr, ) self.failed = True reqs[name].update(list_reqs) @@ -72,12 +74,36 @@ def process(self, strict=True): - duplicates are not permitted within that list """ print(f"Checking {self.name}") - # First, parse. for fname, content in self.project.get('requirements', {}).items(): + if ( + fname + in { + 'tools/pip-requires', + 'tools/test-requires', + 'requirements-py2.txt', + 'requirements-py3.txt', + 'test-requirements-py2.txt', + 'test-requirements-py3.txt', + } + and content + ): + # TODO(stephenfin): Make this an error in the H cycle (mid + # 2026). These files are all obsolete and pbr no longer + # supported the pyN-suffixed files (since pbr 5.0) and never + # supported the *-requires files + print( + "WARNING: Requirements file {fname} is non-standard " + "and will cause an error in the future. " + "Use a pyproject.toml or requirements.txt / " + "test-requirements.txt file instead.", + file=sys.stderr, + ) + print(f"Processing {fname}") if strict and not content.endswith('\n'): print( - f"Requirements file {fname} does not end with a newline." + f"Requirements file {fname} does not end with a newline.", + file=sys.stderr, ) self.reqs_by_file[fname] = self.extract_reqs(content, strict) diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index 056a0120e..e48ef4866 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -60,18 +60,20 @@ def read(root): result = {'root': root} _safe_read(result, 'setup.py') _safe_read(result, 'setup.cfg') + requirements = {} result['requirements'] = requirements - target_files = [ + for target_file in [ 'requirements.txt', - 'tools/pip-requires', 'test-requirements.txt', - 'tools/test-requires', 'doc/requirements.txt', - ] - for py_version in (2, 3): - target_files.append(f'requirements-py{py_version}.txt') - target_files.append(f'test-requirements-py{py_version}.txt') - for target_file in target_files: + # deprecated aliases (warnings are handled elsewhere) + 'tools/pip-requires', + 'tools/test-requires', + 'requirements-py2.txt', + 'requirements-py3.txt', + 'test-requirements-py2.txt', + 'test-requirements-py3.txt', + ]: _safe_read(result, target_file, output=requirements) return result From f7ff019cfa5d68feaa8946a24af97c5b5dbc3ba8 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 13:18:04 +0100 Subject: [PATCH 07/19] Rework extras tracking Store this in the same dict that we currently store requirements in. This will make it easier to tack on pyproject.toml handling in an upcoming change. Change-Id: Icf2ae0a15aa1f35ed3954f3ca2fb0b3f3ceee414 Signed-off-by: Stephen Finucane --- openstack_requirements/check.py | 13 ++--- openstack_requirements/project.py | 54 ++++++++++++-------- openstack_requirements/tests/test_project.py | 49 ++++++++++++------ 3 files changed, 72 insertions(+), 44 deletions(-) diff --git a/openstack_requirements/check.py b/openstack_requirements/check.py index f3113d8a9..8cab75eab 100644 --- a/openstack_requirements/check.py +++ b/openstack_requirements/check.py @@ -20,7 +20,6 @@ from packaging import markers -from openstack_requirements import project from openstack_requirements import requirement MIN_PY_VERSION = '3.5' @@ -74,7 +73,7 @@ def process(self, strict=True): - duplicates are not permitted within that list """ print(f"Checking {self.name}") - for fname, content in self.project.get('requirements', {}).items(): + for fname, content in self.project['requirements'].items(): if ( fname in { @@ -99,7 +98,7 @@ def process(self, strict=True): file=sys.stderr, ) - print(f"Processing {fname}") + print(f"Processing {fname} (requirements)") if strict and not content.endswith('\n'): print( f"Requirements file {fname} does not end with a newline.", @@ -107,9 +106,11 @@ def process(self, strict=True): ) self.reqs_by_file[fname] = self.extract_reqs(content, strict) - for name, content in project.extras(self.project).items(): - print(f"Processing .[{name}]") - self.reqs_by_file[name] = self.extract_reqs(content, strict) + for fname, extras in self.project['extras'].items(): + print(f"Processing {fname} (extras)") + for name, content in extras: + print(f"Processing .[{name}]") + self.reqs_by_file[name] = self.extract_reqs(content, strict) def _get_exclusions(req): diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index e48ef4866..ca6c999fe 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -21,27 +21,24 @@ import os -def extras(project): - """Return a dict of extra-name:content for the extras in setup.cfg.""" - if 'setup.cfg' not in project: - return {} - c = configparser.ConfigParser() - c.read_file(io.StringIO(project['setup.cfg'])) - if not c.has_section('extras'): - return {} - return dict(c.items('extras')) +def _read_setup_cfg_extras(root): + data = _read_raw(root, 'setup.cfg') + if data is None: + return None + c = configparser.ConfigParser() + c.read_file(io.StringIO(data)) + if c.has_section('extras'): + return dict(c.items('extras')) -# IO from here to the end of the file. + return None -def _safe_read(project, filename, output=None): - if output is None: - output = project +def _read_raw(root, filename): try: - path = os.path.join(project['root'], filename) + path = os.path.join(root, filename) with open(path, encoding="utf-8") as f: - output[filename] = f.read() + return f.read() except OSError as e: if e.errno != errno.ENOENT: raise @@ -55,15 +52,21 @@ def read(root): - root: The root dir. - setup.py: Contents of setup.py. - setup.cfg: Contents of setup.cfg. - - requirements: Dict of requirement file name: contents. + - requirements: Dict of requirement file name + - extras: Dict of extras file name to a dict of extra names and + requirements """ + # Store root directory and installer-related files for later processing result = {'root': root} - _safe_read(result, 'setup.py') - _safe_read(result, 'setup.cfg') + # TODO(stephenfin): Can we delete this now? + for filename in {'setup.cfg', 'setup.py'}: + if (data := _read_raw(root, filename)) is not None: + result[filename] = data + + # Store requirements + result['requirements'] = {} - requirements = {} - result['requirements'] = requirements - for target_file in [ + for filename in [ 'requirements.txt', 'test-requirements.txt', 'doc/requirements.txt', @@ -75,5 +78,12 @@ def read(root): 'test-requirements-py2.txt', 'test-requirements-py3.txt', ]: - _safe_read(result, target_file, output=requirements) + if (data := _read_raw(root, filename)) is not None: + result['requirements'][filename] = data + + # Store extras + result['extras'] = {} + if (data := _read_setup_cfg_extras(root)) is not None: + result['extras']['setup.cfg'] = data + return result diff --git a/openstack_requirements/tests/test_project.py b/openstack_requirements/tests/test_project.py index 3f4e7979c..7128bfdb0 100644 --- a/openstack_requirements/tests/test_project.py +++ b/openstack_requirements/tests/test_project.py @@ -10,6 +10,7 @@ # License for the specific language governing permissions and limitations # under the License. +import os import textwrap import fixtures @@ -42,29 +43,45 @@ def test_no_setup_py(self): root = self.useFixture(fixtures.TempDir()).path proj = project.read(root) self.expectThat( - proj, matchers.Equals({'root': root, 'requirements': {}}) + proj, + matchers.Equals( + { + 'root': root, + 'requirements': {}, + 'extras': {}, + } + ), ) class TestProjectExtras(testtools.TestCase): def test_smoke(self): - proj = { - 'setup.cfg': textwrap.dedent(""" - [extras] - 1 = - foo - 2 = - foo # fred - bar - """) - } + root = self.useFixture(fixtures.TempDir()).path + with open(os.path.join(root, 'setup.cfg'), 'w') as fh: + fh.write( + textwrap.dedent(""" + [extras] + 1 = + foo + 2 = + foo # fred + bar + """) + ) expected = {'1': '\nfoo', '2': '\nfoo # fred\nbar'} - self.assertEqual(expected, project.extras(proj)) + self.assertEqual(expected, project._read_setup_cfg_extras(root)) def test_none(self): - proj = {'setup.cfg': "[metadata]\n"} - self.assertEqual({}, project.extras(proj)) + root = self.useFixture(fixtures.TempDir()).path + with open(os.path.join(root, 'setup.cfg'), 'w') as fh: + fh.write( + textwrap.dedent(""" + [metadata] + name = foo + """) + ) + self.assertIsNone(project._read_setup_cfg_extras(root)) def test_no_setup_cfg(self): - proj = {} - self.assertEqual({}, project.extras(proj)) + root = self.useFixture(fixtures.TempDir()).path + self.assertIsNone(project._read_setup_cfg_extras(root)) From 32096476187f47cf05d0f42a832e63858ed1351a Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 15:41:56 +0100 Subject: [PATCH 08/19] Stop storing setup.cfg, setup.py We no longer need to parse these anywhere. We also remove the unnecessary use of matchers from tests in favour of (more common) assertions. Change-Id: I2c98b0792228d96f0ce027dc5476b146365cfc58 Signed-off-by: Stephen Finucane --- openstack_requirements/project.py | 6 ----- openstack_requirements/tests/test_project.py | 27 ++++++++------------ 2 files changed, 10 insertions(+), 23 deletions(-) diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index ca6c999fe..805bd158f 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -50,18 +50,12 @@ def read(root): :param root: A directory path. :return: A dict representing the project with the following keys: - root: The root dir. - - setup.py: Contents of setup.py. - - setup.cfg: Contents of setup.cfg. - requirements: Dict of requirement file name - extras: Dict of extras file name to a dict of extra names and requirements """ # Store root directory and installer-related files for later processing result = {'root': root} - # TODO(stephenfin): Can we delete this now? - for filename in {'setup.cfg', 'setup.py'}: - if (data := _read_raw(root, filename)) is not None: - result[filename] = data # Store requirements result['requirements'] = {} diff --git a/openstack_requirements/tests/test_project.py b/openstack_requirements/tests/test_project.py index 7128bfdb0..62a511c9f 100644 --- a/openstack_requirements/tests/test_project.py +++ b/openstack_requirements/tests/test_project.py @@ -16,7 +16,6 @@ import fixtures import testscenarios import testtools -from testtools import matchers from openstack_requirements import project from openstack_requirements.tests import common @@ -29,28 +28,22 @@ class TestReadProject(testtools.TestCase): def test_pbr(self): root = self.useFixture(common.pbr_fixture).root proj = project.read(root) - self.expectThat(proj['root'], matchers.Equals(root)) - setup_py = open(root + '/setup.py').read() - self.expectThat(proj['setup.py'], matchers.Equals(setup_py)) - setup_cfg = open(root + '/setup.cfg').read() - self.expectThat(proj['setup.cfg'], matchers.Equals(setup_cfg)) - self.expectThat( - proj['requirements'], - matchers.KeysEqual('requirements.txt', 'test-requirements.txt'), + self.assertEqual(proj['root'], root) + self.assertEqual( + list(sorted(proj['requirements'])), + ['requirements.txt', 'test-requirements.txt'], ) def test_no_setup_py(self): root = self.useFixture(fixtures.TempDir()).path proj = project.read(root) - self.expectThat( + self.assertEqual( proj, - matchers.Equals( - { - 'root': root, - 'requirements': {}, - 'extras': {}, - } - ), + { + 'root': root, + 'requirements': {}, + 'extras': {}, + }, ) From 9b79bc474cf7884d04fa9d239abca1c5f6b0899e Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Thu, 18 Sep 2025 13:43:48 +0100 Subject: [PATCH 09/19] Add pyproject.toml support Add the ability to read requirements and extras from pyproject.toml files, eventually allowing us to move away from requirements.txt files if we so choose. Tests are reworked to test this new functionality, with some minor cleanup to remove unused fixtures. Change-Id: I3335b5faac72e2e6962d0930eef0e3b704820bbe Signed-off-by: Stephen Finucane --- .zuul.d/project-template.yaml | 1 + openstack_requirements/project.py | 48 +++++++++++++++++-- openstack_requirements/tests/common.py | 31 +++++++++--- .../tests/files/pyproject.toml | 28 +++++++++++ openstack_requirements/tests/files/setup.cfg | 7 +-- openstack_requirements/tests/test_project.py | 33 +++++++++++-- requirements.txt | 1 + 7 files changed, 131 insertions(+), 18 deletions(-) create mode 100644 openstack_requirements/tests/files/pyproject.toml diff --git a/.zuul.d/project-template.yaml b/.zuul.d/project-template.yaml index d3060cdc2..d6abf4b2f 100644 --- a/.zuul.d/project-template.yaml +++ b/.zuul.d/project-template.yaml @@ -42,6 +42,7 @@ - ^.*requirements-py[2,3].txt$ - ^doc/requirements.txt$ - ^lower-constraints.txt$ + - ^pyproject.toml$ - job: name: requirements-check-self diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index 805bd158f..dd5e03c49 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -20,6 +20,41 @@ import io import os +try: + # Python 3.11+ + import tomllib +except ImportError: + # Python 3.10 and lower + import tomli as tomllib # type: ignore + + +def _read_pyproject_toml(root): + data = _read_raw(root, 'pyproject.toml') + if data is None: + return None + + return tomllib.loads(data) + + +def _read_pyproject_toml_requirements(root): + data = _read_pyproject_toml(root) or {} + + # projects may not have PEP-621 project metadata + if 'project' not in data: + return None + + return data['project'].get('dependencies') + + +def _read_pyproject_toml_extras(root): + data = _read_pyproject_toml(root) or {} + + # projects may not have PEP-621 project metadata + if 'project' not in data: + return None + + return data['project'].get('optional-dependencies') + def _read_setup_cfg_extras(root): data = _read_raw(root, 'setup.cfg') @@ -28,10 +63,10 @@ def _read_setup_cfg_extras(root): c = configparser.ConfigParser() c.read_file(io.StringIO(data)) - if c.has_section('extras'): - return dict(c.items('extras')) + if not c.has_section('extras'): + return None - return None + return dict(c.items('extras')) def _read_raw(root, filename): @@ -60,6 +95,9 @@ def read(root): # Store requirements result['requirements'] = {} + if (data := _read_pyproject_toml_requirements(root)) is not None: + result['requirements']['pyproject.toml'] = data + for filename in [ 'requirements.txt', 'test-requirements.txt', @@ -77,7 +115,11 @@ def read(root): # Store extras result['extras'] = {} + if (data := _read_setup_cfg_extras(root)) is not None: result['extras']['setup.cfg'] = data + if (data := _read_pyproject_toml_extras(root)) is not None: + result['extras']['setup.cfg'] = data + return result diff --git a/openstack_requirements/tests/common.py b/openstack_requirements/tests/common.py index 3465f9662..5bbc38e35 100644 --- a/openstack_requirements/tests/common.py +++ b/openstack_requirements/tests/common.py @@ -29,27 +29,44 @@ class Project(fixtures.Fixture): """A single project we can update.""" def __init__( - self, req_path, setup_path, setup_cfg_path, test_req_path=None + self, + req_path=None, + setup_path=None, + setup_cfg_path=None, + test_req_path=None, + pyproject_toml_path=None, ): super().__init__() self._req_path = req_path self._setup_path = setup_path self._setup_cfg_path = setup_cfg_path self._test_req_path = test_req_path + self._pyproject_toml_path = pyproject_toml_path def setUp(self): super().setUp() self.root = self.useFixture(fixtures.TempDir()).path + self.req_file = os.path.join(self.root, 'requirements.txt') + if self._req_path: + shutil.copy(self._req_path, self.req_file) + self.setup_file = os.path.join(self.root, 'setup.py') + if self._setup_path: + shutil.copy(self._setup_path, self.setup_file) + self.setup_cfg_file = os.path.join(self.root, 'setup.cfg') + if self._setup_cfg_path: + shutil.copy(self._setup_cfg_path, self.setup_cfg_file) + self.test_req_file = os.path.join(self.root, 'test-requirements.txt') - shutil.copy(self._req_path, self.req_file) - shutil.copy(self._setup_path, self.setup_file) - shutil.copy(self._setup_cfg_path, self.setup_cfg_file) if self._test_req_path: shutil.copy(self._test_req_path, self.test_req_file) + self.pyproject_toml_file = os.path.join(self.root, 'pyproject.toml') + if self._pyproject_toml_path: + shutil.copy(self._pyproject_toml_path, self.pyproject_toml_file) + project_fixture = Project( "openstack_requirements/tests/files/project.txt", @@ -73,6 +90,9 @@ def setUp(self): "openstack_requirements/tests/files/pbr_setup.cfg", "openstack_requirements/tests/files/test-project.txt", ) +pep_518_fixture = Project( + pyproject_toml_path="openstack_requirements/tests/files/pyproject.toml", +) class GlobalRequirements(fixtures.Fixture): @@ -105,7 +125,4 @@ def make_project(fixture): denylist = requirement.parse( open("openstack_requirements/tests/files/denylist.txt").read() ) -pbr_project = make_project(pbr_fixture) project_project = make_project(project_fixture) -bad_project = make_project(bad_project_fixture) -oslo_project = make_project(oslo_fixture) diff --git a/openstack_requirements/tests/files/pyproject.toml b/openstack_requirements/tests/files/pyproject.toml new file mode 100644 index 000000000..1f88390c4 --- /dev/null +++ b/openstack_requirements/tests/files/pyproject.toml @@ -0,0 +1,28 @@ +[build-system] +requires = ["pbr>=6.1.1"] +build-backend = "pbr.build" + +[project] +name = "testproject" +description = "OpenStack Test Project" +authors = [ + {name = "OpenStack", email = "openstack-discuss@lists.openstack.org"}, +] +readme = {file = "README.rst", content-type = "text/x-rst"} +license = {text = "Apache-2.0"} +dynamic = ["version"] +dependencies = [ + "requests", + "debtcollector>=3.0", # Apache-2.0 +] +classifiers = [ + "Environment :: OpenStack", + "Intended Audience :: Information Technology", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", +] + +[project.urls] +Homepage = "https://docs.openstack.org/requirements" diff --git a/openstack_requirements/tests/files/setup.cfg b/openstack_requirements/tests/files/setup.cfg index 7fb53cd35..668292666 100644 --- a/openstack_requirements/tests/files/setup.cfg +++ b/openstack_requirements/tests/files/setup.cfg @@ -5,14 +5,11 @@ description-file = README.rst author = OpenStack author-email = openstack-discuss@lists.openstack.org -home-page = https://docs.openstack.org/requirements/latest/ +home-page = https://docs.openstack.org/requirements classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 2.6 + Programming Language :: Python :: 3 diff --git a/openstack_requirements/tests/test_project.py b/openstack_requirements/tests/test_project.py index 62a511c9f..a2608933d 100644 --- a/openstack_requirements/tests/test_project.py +++ b/openstack_requirements/tests/test_project.py @@ -25,7 +25,16 @@ class TestReadProject(testtools.TestCase): - def test_pbr(self): + def test_pyproject_toml(self): + root = self.useFixture(common.pep_518_fixture).root + proj = project.read(root) + self.assertEqual(proj['root'], root) + self.assertEqual( + list(sorted(proj['requirements'])), + ['pyproject.toml'], + ) + + def test_setup_cfg(self): root = self.useFixture(common.pbr_fixture).root proj = project.read(root) self.assertEqual(proj['root'], root) @@ -34,7 +43,7 @@ def test_pbr(self): ['requirements.txt', 'test-requirements.txt'], ) - def test_no_setup_py(self): + def test_empty(self): root = self.useFixture(fixtures.TempDir()).path proj = project.read(root) self.assertEqual( @@ -48,7 +57,25 @@ def test_no_setup_py(self): class TestProjectExtras(testtools.TestCase): - def test_smoke(self): + def test_pyproject_toml(self): + root = self.useFixture(fixtures.TempDir()).path + with open(os.path.join(root, 'pyproject.toml'), 'w') as fh: + fh.write( + textwrap.dedent(""" + [project.optional-dependencies] + 1 = [ + "foo", + ] + 2 = [ + "foo", # fred + "bar", + ] + """) + ) + expected = {'1': ['foo'], '2': ['foo', 'bar']} + self.assertEqual(expected, project._read_pyproject_toml_extras(root)) + + def test_setup_cfg(self): root = self.useFixture(fixtures.TempDir()).path with open(os.path.join(root, 'setup.cfg'), 'w') as fh: fh.write( diff --git a/requirements.txt b/requirements.txt index 8984054ca..1ed6c384f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,3 +4,4 @@ requests>=2.14.2 # Apache-2.0 PyYAML>=3.12 # MIT beagle>=0.2.1 # Apache-2.0 setuptools!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2,!=36.2.0,>=21.0.0 # PSF/ZPL +tomli;python_version<'3.11' # MIT From 6759ad0f43ed1cdd3e30e01c98ba756ac4589559 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Sat, 25 Oct 2025 08:15:31 +0000 Subject: [PATCH 10/19] Updated from generate-constraints Keep version specific pins where applicable. Pin versions for packages that are not ready for updating: - pyasn1,pyasn1_modules - gabbi: placement: "AttributeError: 'NoneType' object has no attribute 'api'" - infoblox-client: designate: |- [designate.plugin] Loaded plugin backend:infoblox [infoblox_client.connector] Cloud WAPI version detected: 2.10 [infoblox_client.connector] Configuring session [urllib3.util.retry] Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None, status=None) [infoblox_client.connector] Authenticating with username and password. [infoblox_client.connector] SSL verification is False [infoblox_client.connector] Connector initialized with options: {'host': '192.0.2.1', 'username': '****', 'password': '****', 'http_pool_connections': None, 'http_pool_maxsize': None, 'wapi_version': '2.10', 'ssl_verify': None, 'cert': '****', 'key': '****'} - pysnmp-lextudio: ironic: "AttributeError: module 'pysnmp.hlapi' has no attribute 'usmHMACMD5AuthProtocol'" - boto3, botocore, s3transfer: glance: - https://review.opendev.org/c/openstack/glance-specs/+/962248 Change-Id: I35412c12870fc6292636a0308dce55a5951e3737 Signed-off-by: OpenStack Proposal Bot Signed-off-by: Tony Breeds Generated-By: openstack/project-config:playbooks/proposal/propose_update.sh --- upper-constraints.txt | 368 +++++++++++++++++++----------------------- 1 file changed, 166 insertions(+), 202 deletions(-) diff --git a/upper-constraints.txt b/upper-constraints.txt index 64e1082bd..194120030 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -25,52 +25,50 @@ WebOb===1.8.9 sphinxcontrib-actdiag===3.0.0 pecan===1.7.0 os-api-ref===3.1.0 -python-ldap===3.4.4 +python-ldap===3.4.5 oslo.concurrency===7.2.0 -websocket-client===1.8.0 +websocket-client===1.9.0 osprofiler===4.3.0 os-resource-classes===1.1.0 mypy_extensions===1.1.0 tabulate===0.9.0 python-ironic-inspector-client===5.4.0 -lxml===6.0.0 +lxml===6.0.2 vintage===0.4.1 rst2txt===1.1.0 -setproctitle===1.3.6 -pytest===8.4.1 +setproctitle===1.3.7 +pytest===8.4.2 python-slugify===8.0.4 cursive===0.2.3 oslo.service===4.3.0 django-appconf===1.1.0 -ntc_templates===7.9.0 +ntc_templates===8.1.0 sphinxcontrib-nwdiag===2.0.0 rbd-iscsi-client===0.1.8 requests-aws===0.1.8 -alabaster===0.7.16;python_version=='3.9' -alabaster===1.0.0;python_version>='3.10' +alabaster===1.0.0 pbr===7.0.1 munch===4.0.0 waiting===1.5.0 -attrs===25.3.0 +attrs===25.4.0 microversion-parse===2.0.0 jwcrypto===1.5.6 -Pint===0.23;python_version=='3.9' -Pint===0.24.4;python_version>='3.10' +Pint===0.24.4 oslo.i18n===6.6.0 jsonpath-rw-ext===1.2.2 python-mistralclient===6.0.0 oslo.context===6.1.0 rcssmin===1.1.2 pycadf===4.0.1 -grpcio===1.74.0 +grpcio===1.76.0 sniffio===1.3.1 -fixtures===4.2.5 +fixtures===4.2.6 neutron-lib===3.22.0 XStatic-FileSaver===1.3.2.0 -jaraco.functools===4.2.1 +jaraco.functools===4.3.0 oslo.metrics===0.13.0 storage-interfaces===1.0.5 -pydantic===2.11.7 +pydantic===2.12.3 persist-queue===1.0.0 pystache===0.6.8 XStatic-Font-Awesome===4.7.0.0 @@ -84,98 +82,91 @@ XStatic-jQuery===3.5.1.1 ddt===1.7.2 XStatic-Graphlib===2.1.7.0 pyserial===3.5 -moto===5.1.9 +moto===5.1.15 infi.dtypes.wwn===0.1.1 python-freezerclient===6.1.0 python-vitrageclient===5.3.0 -py-pure-client===1.73.0 -krest===1.3.7 -psycopg2===2.9.10 -networkx===3.2.1;python_version=='3.9' -networkx===3.4.2;python_version>='3.10' -cheroot===10.0.1 +py-pure-client===1.77.0 +krest===1.3.8 +psycopg2===2.9.11 +networkx===3.4.2 +cheroot===11.0.0 XStatic-Angular===1.8.2.2 types-requests===2.31.0.6 zuul-sphinx===0.7.0 ply===3.11 -google-api-core===2.25.1 +google-api-core===2.27.0 requests-toolbelt===1.0.0 -simplejson===3.20.1 -types-paramiko===3.5.0.20250801 +simplejson===3.20.2 +types-paramiko===4.0.0.20250822 python-swiftclient===4.8.0 pyOpenSSL===24.2.1 -typing-inspection===0.4.1 +typing-inspection===0.4.2 monasca-common===3.8.0 hyperframe===6.1.0 -zeroconf===0.147.0 -scipy===1.13.1;python_version=='3.9' -scipy===1.15.3;python_version>='3.10' -opentelemetry-exporter-otlp===1.36.0 -python-gnupg===0.5.4 +zeroconf===0.148.0 +scipy===1.15.3 +opentelemetry-exporter-otlp===1.38.0 +python-gnupg===0.5.5 rsd-lib===1.2.0 XStatic-Jasmine===2.4.1.2 -googleapis-common-protos===1.70.0 -python-glanceclient===4.8.0;python_version=='3.9' -python-glanceclient===4.10.0;python_version>='3.10' -prometheus_client===0.22.1 +googleapis-common-protos===1.71.0 +python-glanceclient===4.10.0 +prometheus_client===0.23.1 jaraco.classes===3.4.0 debtcollector===3.0.0 -responses===0.25.7 +responses===0.25.8 croniter===6.0.0 horizon===25.5.1 octavia-lib===3.10.0 python-watcherclient===4.9.0 -MarkupSafe===3.0.2 -types-python-dateutil===2.9.0.20250708 -ruamel.yaml.clib===0.2.12 -doc8===1.1.2;python_version=='3.9' -doc8===2.0.0;python_version>='3.10' -pymongo===4.13.2 +MarkupSafe===3.0.3 +types-python-dateutil===2.9.0.20251008 +ruamel.yaml.clib===0.2.14 +doc8===2.0.0 +pymongo===4.15.3 python-cloudkittyclient===5.4.0 -soupsieve===2.7 +soupsieve===2.8 sqlparse===0.5.3 oslotest===5.0.1 jsonpointer===3.0.0 defusedxml===0.7.1 -opentelemetry-sdk===1.36.0 +opentelemetry-sdk===1.38.0 netaddr===1.3.0 -pyghmi===1.6.2 +pyghmi===1.6.6 sphinxcontrib-blockdiag===3.0.0 aiosqlite===0.21.0 -thrift===0.22.0 gnocchiclient===7.2.0 -wcwidth===0.2.13 +wcwidth===0.2.14 sphinxcontrib.datatemplates===0.11.0 jsonpath-rw===1.4.0 prettytable===3.16.0 vine===5.1.0 pathspec===0.12.1 taskflow===6.0.2 -arrow===1.3.0 +arrow===1.4.0 semantic-version===2.10.0 -async-timeout===5.0.1;python_version=='3.10' -async-timeout===5.0.1;python_version=='3.9' +async-timeout===5.0.1 virtualbmc===3.2.0 -SQLAlchemy===2.0.42 +SQLAlchemy===2.0.44 pyroute2===0.8.1 -google-auth===2.40.3 +google-auth===2.41.1 kazoo===2.10.0 -pyspnego===0.11.2 +pyspnego===0.12.0 XStatic-roboto-fontface===0.5.0.0 -pyudev===0.24.3 -eventlet===0.40.2 +pyudev===0.24.4 +eventlet===0.40.3 openstack-doc-tools===4.0.1 oslo.messaging===17.1.0 -jira===3.8.0;python_version=='3.9' -jira===3.10.5;python_version>='3.10' +jira===3.10.5 PyJWT===2.10.1 -typing_extensions===4.14.1 +typing_extensions===4.15.0 XStatic-lodash===4.16.4.2 zVMCloudConnector===1.6.3 -paramiko===3.5.1 +paramiko===4.0.0 ifaddr===0.2.0 reno===4.1.0 -ncclient===0.6.19 +ncclient===0.7.0 imagesize===1.4.1 pydot===4.0.1 urllib3===1.26.20 @@ -185,37 +176,33 @@ python-observabilityclient===1.2.0 whereto===0.4.0 pywbem===1.7.3 python-subunit===1.4.4 -tornado===6.5.1 -pycparser===2.22 +pycparser===2.23 mock===5.2.0 -PyYAML===6.0.2 -beautifulsoup4===4.13.4 -ovs===3.5.1 +PyYAML===6.0.3 +beautifulsoup4===4.14.2 +ovs===3.6.0 cryptography===43.0.3 httpcore===1.0.9 URLObject===3.0.0 -psycopg2-binary===2.9.10 -glance_store===4.10.0;python_version=='3.9' -glance_store===5.2.0;python_version>='3.10' +psycopg2-binary===2.9.11 +glance_store===5.2.0 openstack-release-test===8.1.0 requests-mock===1.12.1 os-apply-config===14.0.1 gunicorn===23.0.0 storpool===7.3.0 -textfsm===1.1.3 +textfsm===2.1.0 python-3parclient===4.2.14 django-compressor===4.5.1 -libvirt-python===11.6.0 +libvirt-python===11.8.0 python-zunclient===5.3.0 tzlocal===5.3.1 sphinxcontrib-jsmath===1.0.1 python-novaclient===18.11.0 pact===1.12.0 bcrypt===4.0.1 -exceptiongroup===1.3.0;python_version=='3.10' -exceptiongroup===1.3.0;python_version=='3.9' -os-client-config===2.1.0;python_version=='3.9' -os-client-config===2.3.0;python_version>='3.10' +exceptiongroup===1.3.0 +os-client-config===2.3.0 XStatic-Angular-Gettext===2.4.1.0 h11===0.16.0 Pygments===2.19.2 @@ -223,17 +210,16 @@ XStatic-Hogan===2.0.0.3 XStatic-objectpath===1.2.1.0 python-manilaclient===5.6.0 sphinxcontrib-serializinghtml===2.0.0 -requests===2.32.4 +requests===2.32.5 snowballstemmer===3.0.1 Jinja2===3.1.6 XStatic-Bootstrap-SCSS===3.4.1.0 pyzabbix===1.3.1 ptyprocess===0.7.0 -threadloop===1.0.2 amqp===5.3.1 -ruamel.yaml===0.18.14 +ruamel.yaml===0.18.16 websockify===0.13.0 -gssapi===1.9.0 +gssapi===1.10.1 XStatic-JQuery.quicksearch===2.0.3.2 pyasn1_modules===0.4.1 mpmath===1.3.0 @@ -245,41 +231,38 @@ XStatic-JQuery-Migrate===3.3.2.1 pytest-html===4.1.1 appdirs===1.4.4 google-auth-httplib2===0.2.0 -daiquiri===3.2.5.1;python_version=='3.9' -daiquiri===3.3.0;python_version>='3.10' +daiquiri===3.4.0 influxdb===5.3.2 funcparserlib===2.0.0a0 passlib===1.7.4 -cliff===4.9.1;python_version=='3.9' -cliff===4.11.0;python_version>='3.10' +cliff===4.11.0 os-brick===6.13.0 +valkey===6.1.1 scp===0.15.0 lark===1.3.0 -python-zaqarclient===3.0.1;python_version=='3.9' -python-zaqarclient===4.1.0;python_version>='3.10' +python-zaqarclient===4.1.0 ldappool===3.0.0 hpack===4.1.0 -joblib===1.5.1 -google-api-python-client===2.177.0 +joblib===1.5.2 +google-api-python-client===2.185.0 castellan===5.4.1 oslo.versionedobjects===3.8.0 enmerkar===0.7.1 webcolors===24.11.1 -aodhclient===3.8.0;python_version=='3.9' -aodhclient===3.9.1;python_version>='3.10' +aodhclient===3.9.1 autobahn===24.4.2 -SQLAlchemy-Utils===0.41.2 +SQLAlchemy-Utils===0.42.0 retryz===0.1.9 pluggy===1.6.0 -coverage===7.10.1 -freezegun===1.5.4 +coverage===7.11.0 +freezegun===1.5.5 mdurl===0.1.2 toml===0.10.2 pycdlib===1.14.0 -pyperclip===1.9.0 -cassandra-driver===3.29.2 +pyperclip===1.11.0 +cassandra-driver===3.29.3 XStatic-Angular-Schema-Form===0.8.13.0 -opentelemetry-exporter-otlp-proto-http===1.36.0 +opentelemetry-exporter-otlp-proto-http===1.38.0 gabbi===3.1.0 nwdiag===3.0.0 XStatic-bootswatch===3.3.7.0 @@ -289,43 +272,42 @@ XStatic-JS-Yaml===3.8.1.0 XStatic-term.js===0.0.7.0 oslo.log===7.2.1 nodeenv===1.9.1 -gossip===2.4.0 +gossip===2.5.0 suds-community===1.2.0 os_vif===4.2.1 qrcode===8.2 oslo.middleware===6.6.0 XStatic-mdi===1.6.50.2 -pydantic_core===2.33.2 +pydantic_core===2.41.4 uritemplate===4.2.0 docutils===0.21.2 threadpoolctl===3.6.0 os-ken===3.1.1 -ujson===5.10.0 +ujson===5.11.0 selenium===3.141.0 -pytest-subtests===0.14.2 -mistral-lib===3.3.1;python_version=='3.9' -mistral-lib===3.4.0;python_version>='3.10' +pytest-subtests===0.15.0 +mistral-lib===3.4.0 dogtag-pki===11.2.1 XStatic-Angular-UUID===0.0.4.0 sphinxcontrib-seqdiag===3.0.0 os-win===5.9.0 capacity===1.3.14 -markdown-it-py===3.0.0 -retrying===1.4.1 +markdown-it-py===4.0.0 +retrying===1.4.2 XStatic-Dagre===0.6.4.1 -platformdirs===4.3.8 +platformdirs===4.5.0 pydotplus===2.0.2 boto3===1.35.99 jeepney===0.9.0 stestr===4.2.0 -pillow===11.3.0 +pillow===12.0.0 infoblox-client===0.6.1 pysmi-lextudio===1.4.3 oslo.serialization===5.8.0 warlock===2.1.0 exabgp===4.2.25 -aiomysql===0.2.0 -types-simplejson===3.20.0.20250326 +aiomysql===0.3.2 +types-simplejson===3.20.0.20250822 sphinxcontrib-httpdomain===1.8.1 metalsmith===2.5.0 s3transfer===0.10.0 @@ -336,29 +318,27 @@ XStatic-moment===2.8.4.3 autopage===0.5.2 gitdb===4.0.12 python-monascaclient===2.8.0 -opentelemetry-api===1.36.0 +opentelemetry-api===1.38.0 automaton===3.2.0 types-urllib3===1.26.25.14 -os-service-types===1.7.0;python_version=='3.9' -os-service-types===1.8.0;python_version>='3.10' +os-service-types===1.8.0 keyring===25.6.0 elementpath===4.8.0 wsgi_intercept===1.13.1 -jsonschema-specifications===2025.4.1 +jsonschema-specifications===2025.9.1 testscenarios===0.5.0 sphinxcontrib-pecanwsme===0.11.0 sadisplay===0.4.9 infinisdk===258.0.2 rich-argparse===1.7.1 packaging===25.0 -opentelemetry-exporter-otlp-proto-grpc===1.36.0 +opentelemetry-exporter-otlp-proto-grpc===1.38.0 XStatic-Dagre-D3===0.4.17.0 -psutil===7.0.0 -txaio===23.6.1;python_version=='3.9' -txaio===25.6.1;python_version>='3.10' +psutil===7.1.1 +txaio===25.9.2 elasticsearch===2.4.1 django-nose===1.4.7 -asgiref===3.9.1 +asgiref===3.10.0 XStatic-JQuery.TableSorter===2.14.5.2 pifpaf===3.4.0 blockdiag===3.0.0 @@ -367,152 +347,141 @@ infi.dtypes.iqn===0.4.0 XStatic-tv4===1.2.7.0 XStatic-JSEncrypt===2.3.1.1 python-cinderclient===9.8.0 -keystonemiddleware===10.10.0;python_version=='3.9' -keystonemiddleware===10.12.0;python_version>='3.10' +keystonemiddleware===10.12.0 django-formtools===2.5.1 XStatic-Spin===1.2.5.3 -rich===14.1.0 +rich===14.2.0 os-traits===3.5.0 typepy===1.3.4 -SecretStorage===3.3.3 +SecretStorage===3.4.0 XStatic-Rickshaw===1.5.1.0 iso8601===2.1.0 tooz===7.0.0 -idna===3.10 -yamlloader===1.5.1 -protobuf===6.31.1 +idna===3.11 +yamlloader===1.5.2 +protobuf===6.33.0 sushy===5.7.1 python-neutronclient===11.6.0 -types-setuptools===80.9.0.20250801 +types-setuptools===80.9.0.20250822 pika===1.3.2 oslo.cache===3.12.0 -WebTest===3.0.6 +WebTest===3.0.7 os-collect-config===14.0.1 edgegrid-python===2.0.2 python-octaviaclient===3.12.0 -pysaml2===7.5.2 +pysaml2===7.5.4 requests-oauthlib===2.0.0 oslo.reports===3.6.0 pysnmp-lextudio===6.1.2 bitmath===1.3.3.1 -ceilometermiddleware===3.7.0;python_version=='3.9' -ceilometermiddleware===3.8.0;python_version>='3.10' +ceilometermiddleware===3.8.0 testrepository===0.0.21 sympy===1.14.0 Logbook===1.8.2 -PyNaCl===1.5.0 -osc-lib===4.0.2;python_version=='3.9' -osc-lib===4.2.0;python_version>='3.10' +PyNaCl===1.6.0 +osc-lib===4.2.0 python-consul===1.1.0 -more-itertools===10.7.0 +more-itertools===10.8.0 seqdiag===3.0.0 -numpy===2.0.0;python_version=='3.9' -numpy===2.2.6;python_version>='3.10' -msgpack===1.1.1 -Sphinx===7.4.7;python_version=='3.9' -Sphinx===8.1.3;python_version>='3.10' +numpy===2.2.6 +msgpack===1.1.2 +Sphinx===8.1.3 oslo.config===10.0.0 openstackdocstheme===3.5.0 osc-placement===4.7.0 -rpds-py===0.26.0 +rpds-py===0.28.0 zake===0.2.2 -flux===1.3.5 +flux===1.4.0 pysnmpcrypto===0.0.4 flexparser===0.4 -krb5===0.7.1 -PyMySQL===1.1.1 +krb5===0.8.0 +PyMySQL===1.1.2 uhashring===2.4 -kubernetes===33.1.0 -httplib2===0.22.0 +kubernetes===34.1.0 +httplib2===0.31.0 betamax===0.9.0 construct===2.10.70 pytest-metadata===3.1.1 -pyparsing===3.2.3 -geomet===0.2.1.post1 -opentelemetry-exporter-otlp-proto-common===1.36.0 +pyparsing===3.2.5 +geomet===1.1.0 +opentelemetry-exporter-otlp-proto-common===1.38.0 distlib===0.4.0 XStatic-Moment-Timezone===0.5.22.0 -dogpile.cache===1.4.0 +dogpile.cache===1.5.0 python-barbicanclient===7.2.0 -salt===3007.6 -opentelemetry-semantic-conventions===0.57b0 +salt===3007.8 +opentelemetry-semantic-conventions===0.59b0 api-object-schema===2.0.0 blinker===1.9.0 aenum===3.1.16 WSME===0.12.1 -tomli===2.2.1;python_version=='3.10' -tomli===2.2.1;python_version=='3.9' +tomli===2.3.0 oslo.upgradecheck===2.6.0 sherlock===0.4.1 stevedore===5.5.0 botocore===1.35.99 -xmltodict===0.14.2 +xmltodict===1.0.2 pyasn1===0.6.0 oslo.rootwrap===7.7.0 -Django===4.2.23 +Django===4.2.25 pexpect===4.9.0 cmd2===2.7.0 -python-json-logger===3.3.0 -redis===6.2.0 -valkey===6.1.1 +python-json-logger===4.0.0 +redis===7.0.0 jmespath===1.0.1 -click===8.1.8;python_version=='3.9' -click===8.2.2;python_version>='3.10' +click===8.3.0 XStatic-smart-table===1.4.13.2 -kuryr-lib===3.3.0 -scrypt===0.8.27 +kuryr-lib===3.3.1 +scrypt===0.9.4 jsonpatch===1.33 libsass===0.23.0 os-testr===3.0.0 -cotyledon===2.0.0 -xattr===1.2.0 +cotyledon===2.1.0 +xattr===1.3.0 systemd-python===235 python-memcached===1.62 -openstacksdk===4.5.0;python_version=='3.9' -openstacksdk===4.7.1;python_version>='3.10' +openstacksdk===4.7.1 infi.dtypes.nqn===0.1.0 six===1.17.0 -h2===4.2.0 +h2===4.3.0 dulwich===0.24.1 dfs-sdk===1.2.27 -sentinels===1.0.0 +sentinels===1.1.1 kombu===5.5.4 distro===1.9.0 zstd===1.5.7.2 -yaql===3.1.0 +yaql===3.2.0 durationpy===0.10 requestsexceptions===1.4.0 testresources===2.0.2 -falcon===4.0.2 +falcon===4.1.0 tomlkit===0.13.3 etcd3gw===2.4.2 Flask-RESTful===0.3.10 GitPython===3.1.45 -requests_ntlm===1.3.0 python-ironicclient===5.13.0 babel===2.17.0 XStatic===1.0.3 XStatic-Angular-FileUpload===12.2.13.0 -python-openstackclient===8.0.0;python_version=='3.9' -python-openstackclient===8.2.0;python_version>='3.10' -pyzmq===27.0.0 +python-openstackclient===8.2.0 +pyzmq===27.1.0 oslo.db===17.4.0 simplegeneric===0.8.1 python-pcre===0.7 -yappi===1.6.10 +yappi===1.7.3 mbstrdecoder===1.1.4 pymemcache===4.0.0 -wrapt===1.17.2 +wrapt===2.0.0 oslo.privsep===3.8.0 sphinxcontrib-apidoc===0.6.0 oslo.policy===4.6.0 hvac===2.3.0 -pyeclib===1.6.4 +pyeclib===1.7.0 repoze.lru===0.7 rfc3986===2.0.0 tenacity===9.1.2 +invoke===2.2.1 python-designateclient===6.3.0 -future===1.0.0 pytest-cov===4.1.0 reactivex===4.0.4 Paste===3.10.1 @@ -529,20 +498,19 @@ python-troveclient===8.9.0 cachez===0.1.2 XStatic-Bootstrap-Datepicker===1.4.0.0 netifaces===0.11.0 -cachetools===5.5.2 +cachetools===6.2.1 flexcache===0.3 sphinxcontrib-qthelp===2.0.0 keystoneauth1===5.12.0 statsd===4.0.1 proto-plus===1.26.1 -python-keystoneclient===5.6.0;python_version=='3.9' -python-keystoneclient===5.7.0;python_version>='3.10' +python-keystoneclient===5.7.0 diskimage-builder===3.39.0 heat-translator===3.3.0 python-magnumclient===4.9.0 docker===7.1.0 storops===1.2.11 -anyio===4.9.0 +anyio===4.11.0 XStatic-Angular-lrdragndrop===1.0.2.6 ovsdbapp===2.13.0 aniso8601===10.0.1 @@ -550,60 +518,56 @@ rjsmin===1.2.2 icalendar===6.3.1 decorator===5.2.1 DateTimeRange===2.3.1 -cffi===1.17.1 +cffi===2.0.0 python-cyborgclient===2.6.0 futurist===3.2.1 -jsonschema===4.25.0 +jsonschema===4.25.1 sphinxcontrib-devhelp===2.0.0 -python-blazarclient===4.3.0;python_version=='3.9' -python-blazarclient===4.4.0;python_version>='3.10' -alembic===1.16.4 +python-blazarclient===4.4.0 +alembic===1.17.0 execnet===2.1.1 sphinxcontrib-programoutput===0.18 storpool.spopenstack===3.2.0 -dnspython===2.7.0 +dnspython===2.8.0 oauthlib===3.3.1 zipp===3.23.0 -greenlet===3.2.3 +greenlet===3.2.4 XStatic-Angular-Vis===4.16.0.0 -iniconfig===2.1.0 -referencing===0.36.2 -confluent-kafka===2.11.0 +iniconfig===2.3.0 +referencing===0.37.0 +confluent-kafka===2.12.1 backports.tarfile===1.2.0 -xvfbwrapper===0.2.13 +xvfbwrapper===0.2.15 influxdb-client===1.49.0 tosca-parser===2.13.0 python-consul2===0.1.5 -charset-normalizer===3.4.2 -Flask===3.1.1 +charset-normalizer===3.4.4 +Flask===3.1.2 httpx===0.28.1 sqlalchemy-filters===0.13.0 sphinxcontrib-runcmd===0.2.0 confspirator===0.3.0 -fasteners===0.19 -importlib-metadata===6.2.1;python_version=='3.9' -importlib-metadata===8.7.0;python_version>='3.10' +fasteners===0.20 +importlib_metadata===8.7.0 sortedcontainers===2.4.0 python-linstor===1.25.3 -filelock===3.18.0 +filelock===3.20.0 python-tackerclient===2.4.0 -python-heatclient===4.2.0;python_version=='3.9' -python-heatclient===4.3.0;python_version>='3.10' +python-heatclient===4.3.0 oslo.utils===9.1.0 requests-kerberos===0.15.0 itsdangerous===2.2.0 XStatic-jquery-ui===1.13.0.1 monasca-statsd===2.7.0 python-dateutil===2.9.0.post0 -virtualenv===20.32.0 +virtualenv===20.35.3 colorama===0.4.6 confetti===2.5.3 ironic-lib===7.0.0 pytz===2025.2 -opentelemetry-proto===1.36.0 +opentelemetry-proto===1.38.0 XStatic-D3===3.5.17.0 actdiag===3.0.0 sysv-ipc===1.1.0 sphinxcontrib-applehelp===2.0.0 -scikit-learn===1.6.1;python_version=='3.9' -scikit-learn===1.7.1;python_version>='3.10' +scikit-learn===1.7.2 From ced14b82274bf0faa87e4f5a2362c389a8ce2cea Mon Sep 17 00:00:00 2001 From: Artem Goncharov Date: Fri, 9 May 2025 15:18:17 +0200 Subject: [PATCH 11/19] Release cap for bcrypt It was necessary to cap version of bcrypt while Keystone was getting rid of this unmaintained dependency. It was completed in last cycle and it is time to remove the limitation of the bcrypt version. Change-Id: Iefcdbc6e050dfe13f54097e665f9a91dec02a63a Signed-off-by: Takashi Kajinami Signed-off-by: Tony Breeds --- global-requirements.txt | 4 +--- upper-constraints.txt | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/global-requirements.txt b/global-requirements.txt index 0619639df..957a253e3 100644 --- a/global-requirements.txt +++ b/global-requirements.txt @@ -18,9 +18,7 @@ autobahn # MIT License automaton # Apache-2.0 autopage # Apache-2.0 Babel!=2.4.0 # BSD -# NOTE bcrypt is currently pinned at 4.0.1 in upper-constraints due to an issue -# with passlib: https://foss.heptapod.net/python-libs/passlib/-/issues/190 -bcrypt==4.0.1 # Apache-2.0 +bcrypt # Apache-2.0 beautifulsoup4 # MIT betamax # Apache-2.0 boto # MIT diff --git a/upper-constraints.txt b/upper-constraints.txt index 194120030..ecff5f61b 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -200,7 +200,7 @@ tzlocal===5.3.1 sphinxcontrib-jsmath===1.0.1 python-novaclient===18.11.0 pact===1.12.0 -bcrypt===4.0.1 +bcrypt===4.3.0 exceptiongroup===1.3.0 os-client-config===2.3.0 XStatic-Angular-Gettext===2.4.1.0 From ebb2c96aace18b35c2ec566c905c8e6cab2e75bd Mon Sep 17 00:00:00 2001 From: Allain Legacy Date: Tue, 14 Oct 2025 14:33:37 -0400 Subject: [PATCH 12/19] Add networking generic switch to requirements The networking-generic-switch is being added as a driver requirement needed in unit tests for Ironic so must be added here. Change-Id: I6633770c3533c6510f8331831ac3b900ed3421b4 Signed-off-by: Allain Legacy --- global-requirements.txt | 1 + upper-constraints.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/global-requirements.txt b/global-requirements.txt index 0619639df..38b79f7f6 100644 --- a/global-requirements.txt +++ b/global-requirements.txt @@ -346,6 +346,7 @@ monasca-common # Apache-2.0 monasca-statsd # Apache-2.0 networking-bagpipe # Apache-2.0 networking-bgpvpn # Apache-2.0 +networking-generic-switch # Apache-2.0 networking-l2gw # Apache-2.0 networking-sfc # Apache-2.0 neutron # Apache-2.0 diff --git a/upper-constraints.txt b/upper-constraints.txt index 194120030..3a4f41f76 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -571,3 +571,4 @@ actdiag===3.0.0 sysv-ipc===1.1.0 sphinxcontrib-applehelp===2.0.0 scikit-learn===1.7.2 +networking-generic-switch===8.0.0 From 99ad43d08b36a6182e9dea3457d34b8eafb928c5 Mon Sep 17 00:00:00 2001 From: Takashi Kajinami Date: Tue, 28 Oct 2025 22:13:00 +0900 Subject: [PATCH 13/19] Fix broken extras check Change-Id: If8878a133e5de423124e57546115b063a7c8f088 Signed-off-by: Takashi Kajinami --- openstack_requirements/check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openstack_requirements/check.py b/openstack_requirements/check.py index 8cab75eab..34ded6df5 100644 --- a/openstack_requirements/check.py +++ b/openstack_requirements/check.py @@ -108,7 +108,7 @@ def process(self, strict=True): for fname, extras in self.project['extras'].items(): print(f"Processing {fname} (extras)") - for name, content in extras: + for name, content in extras.items(): print(f"Processing .[{name}]") self.reqs_by_file[name] = self.extract_reqs(content, strict) From b4c9e30d26dd6301258e5d218cbf44e02d1f0e14 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Tue, 28 Oct 2025 16:28:41 +0000 Subject: [PATCH 14/19] Fix keyname used to store extras Change-Id: Ic9885cf58cca1ef560c2a25134fe5402fd32bc82 Signed-off-by: Stephen Finucane --- openstack_requirements/project.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index dd5e03c49..594b9336f 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -120,6 +120,6 @@ def read(root): result['extras']['setup.cfg'] = data if (data := _read_pyproject_toml_extras(root)) is not None: - result['extras']['setup.cfg'] = data + result['extras']['pyproject.toml'] = data return result From 1505061d9cbd9efb184fe60968af80d46d33d1e6 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Tue, 28 Oct 2025 16:32:07 +0000 Subject: [PATCH 15/19] Store pyproject.toml data as strings pyproject.toml files are structured, while setup.cfg and requirements.txt files need some normalization. Temporarily unnormalize the former pending a fix, so that we can treat them the same. Change-Id: Ief701ff7e9627487fefab4e367da887742c25eab Signed-off-by: Stephen Finucane --- openstack_requirements/project.py | 10 ++++++++-- openstack_requirements/tests/test_project.py | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/openstack_requirements/project.py b/openstack_requirements/project.py index 594b9336f..213d42834 100644 --- a/openstack_requirements/project.py +++ b/openstack_requirements/project.py @@ -43,7 +43,9 @@ def _read_pyproject_toml_requirements(root): if 'project' not in data: return None - return data['project'].get('dependencies') + # FIXME(stephenfin): We should not be doing this, but the fix requires a + # larger change to do normalization here. + return '\n'.join(data['project'].get('dependencies', [])) def _read_pyproject_toml_extras(root): @@ -53,7 +55,11 @@ def _read_pyproject_toml_extras(root): if 'project' not in data: return None - return data['project'].get('optional-dependencies') + # FIXME(stephenfin): As above, we should not be doing this. + return { + k: '\n'.join(v) for k, v in + data['project'].get('optional-dependencies', {}).items() + } def _read_setup_cfg_extras(root): diff --git a/openstack_requirements/tests/test_project.py b/openstack_requirements/tests/test_project.py index a2608933d..da672fe8e 100644 --- a/openstack_requirements/tests/test_project.py +++ b/openstack_requirements/tests/test_project.py @@ -72,7 +72,7 @@ def test_pyproject_toml(self): ] """) ) - expected = {'1': ['foo'], '2': ['foo', 'bar']} + expected = {'1': 'foo', '2': 'foo\nbar'} self.assertEqual(expected, project._read_pyproject_toml_extras(root)) def test_setup_cfg(self): From 41dd43d9a7ecb57274b816861a0eb8abc0f8e657 Mon Sep 17 00:00:00 2001 From: Stephen Finucane Date: Tue, 28 Oct 2025 16:24:54 +0000 Subject: [PATCH 16/19] Add tests for parsing requirements Change-Id: I915269401436820af463c84240111794bf116611 Signed-off-by: Stephen Finucane --- openstack_requirements/tests/test_check.py | 39 ++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/openstack_requirements/tests/test_check.py b/openstack_requirements/tests/test_check.py index e8a25e05d..414d2c67d 100644 --- a/openstack_requirements/tests/test_check.py +++ b/openstack_requirements/tests/test_check.py @@ -19,6 +19,45 @@ import testtools +class TestRequirementsList(testtools.TestCase): + def setUp(self): + super().setUp() + self._stdout_fixture = fixtures.StringStream('stdout') + self.stdout = self.useFixture(self._stdout_fixture).stream + self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout)) + + def test_extras__setup_cfg(self): + project_data = { + 'root': '/fake/root', + 'requirements': { + 'requirements.txt': 'requests>=2.0.0\n' + }, + 'extras': { + 'setup.cfg': { + 'test': 'pytest>=6.0.0\nflake8>=3.8.0\n', + 'dev': 'black>=24.0.0\nmypy>=0.900\n' + } + } + } + + req_list = check.RequirementsList('test-project', project_data) + req_list.process(strict=False) + + self.assertIn('test', req_list.reqs_by_file) + self.assertIn('dev', req_list.reqs_by_file) + + test_reqs = req_list.reqs_by_file['test'] + dev_reqs = req_list.reqs_by_file['dev'] + + self.assertEqual(len(test_reqs), 2) + self.assertIn('pytest', test_reqs) + self.assertIn('flake8', test_reqs) + + self.assertEqual(len(dev_reqs), 2) + self.assertIn('black', dev_reqs) + self.assertIn('mypy', dev_reqs) + + class TestIsReqInGlobalReqs(testtools.TestCase): def setUp(self): super().setUp() From cbafcca3c8b6975a76e6158f9631aa7fabb9d7dd Mon Sep 17 00:00:00 2001 From: Thomas Goirand Date: Mon, 27 Oct 2025 10:55:16 +0100 Subject: [PATCH 17/19] Add awscurl Ceilometer recently added requests-aws to requirements. Though this package is unmaintained for 10+ years upstream. Let's add awscurl to replace it. Related PR: https://review.opendev.org/c/openstack/ceilometer/+/964757 Checklist: - Is the library actively maintained?: Yes, and it's even recommended by AWS itself. - Is the library good code? Looks good to me. - Is the library license compatible? It's under the MIT license (ie: we call that variant "Expat" license in Debian, since there's multiple version of MIT). That's compatible. - Is the library already packaged in the distros we target (Ubuntu latest LTS / Debian latest)? Well, I'm the actual maintainer, and refused to package requests-aws because it's unmaintained upstream. I've just uploaded awscurl in Debian, so it's in the NEW queue. It will eventually reach Ubuntu when approved by FTP masters. - Is the library required for OpenStack project or related dev or infrastructure setup? Yes, see https://review.opendev.org/c/openstack/ceilometer/+/964757 That's a replacement for requests-aws that Ceilometer has been using, and which I don't think is maintained upstream. - If the library release is managed by the Openstack release process does it use the cycle-with-intermediary release type? N/A Signed-off-by: Thomas Goirand Change-Id: I3a95d3e4cc83e1bf3fec2557becc0b7d88fef2ba Signed-off-by: Tony Breeds --- global-requirements.txt | 1 + upper-constraints.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/global-requirements.txt b/global-requirements.txt index b39f5eabe..9dfde3bd5 100644 --- a/global-requirements.txt +++ b/global-requirements.txt @@ -17,6 +17,7 @@ apscheduler # MIT License autobahn # MIT License automaton # Apache-2.0 autopage # Apache-2.0 +awscurl # MIT Babel!=2.4.0 # BSD bcrypt # Apache-2.0 beautifulsoup4 # MIT diff --git a/upper-constraints.txt b/upper-constraints.txt index d56ff4283..908f32d0e 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -572,3 +572,4 @@ sysv-ipc===1.1.0 sphinxcontrib-applehelp===2.0.0 scikit-learn===1.7.2 networking-generic-switch===8.0.0 +awscurl===0.36 From e2d30f1ad7b3e7c1b3d2d62ce826b8d0c953c141 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Thu, 30 Oct 2025 10:03:34 +0000 Subject: [PATCH 18/19] update constraint for os-service-types to new release 1.8.1 meta: version: 1.8.1 meta: team: OpenStackSDK meta: diff-start: - meta: series: independent meta: branch: master meta: release-type: release meta: pypi: yes meta: first: no meta: release:Author: Stephen Finucane meta: release:Commit: Stephen Finucane meta: release:Change-Id: I0b29e476360d068c2814a24ad132b92da4861a4a meta: release:Workflow+1: Thierry Carrez meta: release:Code-Review+2: Elod Illes meta: release:Code-Review+2: Thierry Carrez meta: release:Code-Review+1: Artem Goncharov Change-Id: If0ef5c4b3310a0e6d9223b4f67f7996a36257442 Signed-off-by: OpenStack Proposal Bot Generated-By: openstack/project-config:roles/copy-release-tools-scripts/files/release-tools/update_constraints.sh --- upper-constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upper-constraints.txt b/upper-constraints.txt index 908f32d0e..18c4cb44e 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -321,7 +321,7 @@ python-monascaclient===2.8.0 opentelemetry-api===1.38.0 automaton===3.2.0 types-urllib3===1.26.25.14 -os-service-types===1.8.0 +os-service-types===1.8.1 keyring===25.6.0 elementpath===4.8.0 wsgi_intercept===1.13.1 From 0b542873b8fe527b6280790bd59e5dc146350f14 Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Thu, 30 Oct 2025 10:08:31 +0000 Subject: [PATCH 19/19] update constraint for pbr to new release 7.0.2 meta: version: 7.0.2 meta: team: oslo meta: diff-start: - meta: series: independent meta: branch: master meta: release-type: release meta: pypi: yes meta: first: no meta: release:Author: Jeremy Stanley meta: release:Commit: Jeremy Stanley meta: release:Change-Id: I29cac66277deb12ff8bb67ce7c91cbf09e100a7f meta: release:Code-Review+2: Thierry Carrez meta: release:Code-Review+1: Takashi Kajinami meta: release:Code-Review+1: Daniel Bengtsson meta: release:Code-Review+2: Elod Illes meta: release:Code-Review+1: Stephen Finucane meta: release:Workflow+1: Thierry Carrez Change-Id: I5b6f8f561181696432fab11df5aae6e9e1794161 Signed-off-by: OpenStack Proposal Bot Generated-By: openstack/project-config:roles/copy-release-tools-scripts/files/release-tools/update_constraints.sh --- upper-constraints.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upper-constraints.txt b/upper-constraints.txt index 908f32d0e..959cf2d63 100644 --- a/upper-constraints.txt +++ b/upper-constraints.txt @@ -47,7 +47,7 @@ sphinxcontrib-nwdiag===2.0.0 rbd-iscsi-client===0.1.8 requests-aws===0.1.8 alabaster===1.0.0 -pbr===7.0.1 +pbr===7.0.2 munch===4.0.0 waiting===1.5.0 attrs===25.4.0