From ea5044dda26f3ef18d46f139c4d9eb97baec5ba8 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 17 Jul 2023 13:28:38 -0700 Subject: [PATCH 01/76] Create script to update repo skeleton #80 Signed-off-by: Jono Yang --- etc/scripts/update_skeleton.py | 104 +++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 etc/scripts/update_skeleton.py diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py new file mode 100644 index 00000000000..635898ba5af --- /dev/null +++ b/etc/scripts/update_skeleton.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/skeleton for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from pathlib import Path +import os +import subprocess + +import click + + +NEXB_PUBLIC_REPO_NAMES=[ + "aboutcode-toolkit", + "ahocode", + "bitcode", + "clearcode-toolkit", + "commoncode", + "container-inspector", + "debian-inspector", + "deltacode", + "elf-inspector", + "extractcode", + "fetchcode", + "gemfileparser2", + "gh-issue-sandbox", + "go-inspector", + "heritedcode", + "license-expression", + "license_copyright_pipeline", + "nuget-inspector", + "pip-requirements-parser", + "plugincode", + "purldb", + "pygmars", + "python-inspector", + "sanexml", + "saneyaml", + "scancode-analyzer", + "scancode-toolkit-contrib", + "scancode-toolkit-reference-scans", + "thirdparty-toolkit", + "tracecode-toolkit", + "tracecode-toolkit-strace", + "turbo-spdx", + "typecode", + "univers", +] + + +@click.command() +@click.help_option("-h", "--help") +def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): + """ + Update project files of nexB projects that use the skeleton + + This script will: + - Clone the repo + - Add the skeleton repo as a new origin + - Create a new branch named "update-skeleton-files" + - Merge in the new skeleton files into the "update-skeleton-files" branch + + The user will need to save merge commit messages that pop up when running + this script in addition to resolving the merge conflicts on repos that have + them. + """ + + # Create working directory + work_dir_path = Path("/tmp/update_skeleton/") + if not os.path.exists(work_dir_path): + os.makedirs(work_dir_path, exist_ok=True) + + for repo_name in repo_names: + # Move to work directory + os.chdir(work_dir_path) + + # Clone repo + repo_git = f"git@github.com:nexB/{repo_name}.git" + subprocess.run(["git", "clone", repo_git]) + + # Go into cloned repo + os.chdir(work_dir_path / repo_name) + + # Add skeleton as an origin + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + + # Fetch skeleton files + subprocess.run(["git", "fetch", "skeleton"]) + + # Create and checkout new branch + subprocess.run(["git", "checkout", "-b", "update-skeleton-files"]) + + # Merge skeleton files into the repo + subprocess.run(["git", "merge", "skeleton/main", "--allow-unrelated-histories"]) + + +if __name__ == "__main__": + update_skeleton_files() From 320ec21daa249ceae0c07787f9e52134b3ad06ab Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 02/76] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b33248..1738b20a89d 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde79074d70..01e60fc6583 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369b42a..aaec643fc5c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From d4e29c36c21ab81797604911cdeaea83d80e8088 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 03/76] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc6583..cea91bd1763 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 6c028f7219ae876ea62074ae435e574525e205d6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 04/76] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd1763..9e627366170 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 233f3edabfbab390029fb9f1842bf43766b04583 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 05/76] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20a89d..930e80101c6 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 55545bf7a1a8f119a560c7f548ce5a460f39f37d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 06/76] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded948fa..62dbb14f053 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff527a1e..30d376c475c 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c56437..a33b8b387ae 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d48ed..c71543f6273 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0cdd25..de0ac95d1df 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc7289351..b0295eca933 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e627366170..ba55770f20a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From 0b63e5073b6b1cdc0960abe35060ad0fdb67b665 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 07/76] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c502390f2c..a9ac2235bbc 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295eca933..6d5ffdce529 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 092f545f5b87442ae22884cb4d5381883343a1c2 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 08/76] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14f053..1aa4e28aca1 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c475c..c2246837ee3 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90dc519..cfe68e67e59 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f6273..cd39cda3986 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac2235bbc..167bc9f54b0 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdce529..4ea1babbd4f 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From d05665ad44a50b71f66b974ad24c81f7443e8180 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 09/76] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2c999..8aad82949c4 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28aca1..bb8347a5a0e 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c2246837ee3..76a19a60503 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e67e59..89d0626527c 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae807ef..1b87944239e 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48ed73..85482056598 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b387ae..de4b7066a28 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda3986..b6bff5186c5 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95d1df..dd954bca74b 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f54b0..b9b2c0e7701 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1babbd4f..aafc1d69f06 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 63bcbf507e8a25f22853d56605c107e47c3673cc Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 10/76] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196f10f..8a93c94d733 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770f20a..a872ab3a50c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c167d8..8cd85c94616 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 9d1393a85303bf8cf92c9a25aa5cc50bdfd080d1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 11/76] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a5a0e..65ae595edd0 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b7066a28..0e9c360ae7a 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c94616..7135ac0d3c9 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From f10b783b6b6fe33032a7862352ed532294efdf14 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 12/76] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3a50c..0f8bd587122 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 1d6c8f3bb8755aa7c9d2804240c01b0161417328 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 13/76] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc8af5..16e204644a7 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index 6cbd8395fcf..3d6cb4e1ae2 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,6 @@ A Simple Python Project Skeleton ================================ + This repo attempts to standardize the structure of the Python-based project's repositories using modern Python packaging and configuration techniques. Using this `blog post`_ as inspiration, this repository serves as the base for @@ -47,16 +48,19 @@ Release Notes - 2022-03-04: - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 0213c1ea9a15ab94a854b8d7af27a1a036e393f4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 14/76] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e80101c6..debc404ef8e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From c112f2a9c20d58e986424f5f32bd259814fc8e3f Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 15/76] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd587122..51761ff8ad6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 944b6c5371bea2ce0763fd26888de6436116d185 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:34:26 +0100 Subject: [PATCH 16/76] Add support for new OS versions Signed-off-by: Philippe Ombredanne --- README.rst | 50 +++++++++++++++++++++++++++++++++++++++++---- azure-pipelines.yml | 36 ++++++++++++++++++++++++++++++-- 2 files changed, 80 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 6cbd8395fcf..f848b4b3fc2 100644 --- a/README.rst +++ b/README.rst @@ -1,9 +1,11 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's -repositories using modern Python packaging and configuration techniques. -Using this `blog post`_ as inspiration, this repository serves as the base for -all new Python projects and is mergeable in existing repositories as well. + +This repo attempts to standardize the structure of the Python-based project's repositories using +modern Python packaging and configuration techniques that can then be applied to many repos. + +Using this `blog post`_ as inspiration, this repository serves as the base for all new Python +projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -13,6 +15,7 @@ Usage A brand new project ------------------- + .. code-block:: bash git init my-new-repo @@ -26,6 +29,7 @@ From here, you can make the appropriate changes to the files for your specific p Update an existing project --------------------------- + .. code-block:: bash cd my-existing-project @@ -41,17 +45,54 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-29: + + - Add support for beta macOS-15 + - Add support for beta windows-2025 + +- 2025-02-14: + + - Drop support for Python 3.8, add support in CI for Python 3.13, use Python 3.12 as default + version. + +- 2025-01-17: + + - Drop support for macOS-12, add support for macOS-14 + - Add support in CI for ubuntu-24.04 + - Add support in CI for Python 3.12 + +- 2024-08-20: + + - Update references of ownership from nexB to aboutcode-org + +- 2024-07-01: + + - Drop support for Python 3.8 + - Drop support for macOS-11, add support for macOS-14 + +- 2024-02-19: + + - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 + +- 2023-10-18: + + - Add dark mode support in documentation + - 2023-07-18: + - Add macOS-13 job in azure-pipelines.yml - 2022-03-04: + - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` + - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` @@ -59,4 +100,5 @@ Release Notes as well as collecting dependencies as wheels and generating ABOUT files for them. - 2021-05-11: + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a220f2bec0f..80ae45b1827 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,11 +26,27 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython + image_name: macOS-13-xlarge + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14-large + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython_arm64 @@ -41,8 +57,16 @@ jobs: - template: etc/ci/azure-posix.yml parameters: - job_name: macos14_cpython - image_name: macOS-14-large + job_name: macos15_cpython + image_name: macOS-15 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos15_cpython_arm64 + image_name: macOS-15-large python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -62,3 +86,11 @@ jobs: python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs + + - template: etc/ci/azure-win.yml + parameters: + job_name: win2025_cpython + image_name: windows-2025 + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + test_suites: + all: venv\Scripts\pytest -n 2 -vvs From 136af3912336616fbd2431a96230961517a2c356 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 12:45:32 +0200 Subject: [PATCH 17/76] Update scripts aboutcode references Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 635898ba5af..5705fc43afc 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. AboutCode, and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -16,7 +15,7 @@ import click -NEXB_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES=[ "aboutcode-toolkit", "ahocode", "bitcode", @@ -56,9 +55,9 @@ @click.command() @click.help_option("-h", "--help") -def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): +def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): """ - Update project files of nexB projects that use the skeleton + Update project files of AboutCode projects that use the skeleton This script will: - Clone the repo @@ -81,14 +80,14 @@ def update_skeleton_files(repo_names=NEXB_PUBLIC_REPO_NAMES): os.chdir(work_dir_path) # Clone repo - repo_git = f"git@github.com:nexB/{repo_name}.git" + repo_git = f"git@github.com:aboutcode-org/{repo_name}.git" subprocess.run(["git", "clone", repo_git]) # Go into cloned repo os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:nexB/skeleton.git"]) + subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From da8eff0383611df60311b8bac599657450eaeb52 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 18/76] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff8ad6..7d807ebf054 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 4f9e936d452acc3822df8d3f932cbd7071b31d72 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 19/76] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807ebf054..5e16b564dc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From a2809fb28c60b54aec0c367285acacdea1cb03a8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 20/76] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d92885864..83fd2035c1c 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d68528..18b37038145 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643fc5c..ad8e0d8f903 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From 43b96c28baaa1621d24b6f5791c6d915d2edc5f3 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 21/76] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404ef8e..d21a2f95077 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b564dc3..bfb1d353bfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From b7194c80c9425087f1d05e430bd9d6a14fb9c3a0 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 22/76] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b2555..10ba5faa6a6 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f95077..413399e5530 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 94163239416..00000000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c5426863197..00000000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad82949c4..056ca6eafeb 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db264be..041b3583238 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d353bfa..c9e67720432 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From a5bcdbdd71d1542a0e9ec9b190a2e3d573c53744 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 23/76] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c80763..cf0579a7e5e 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From a6c25fb2a2fa35311d26621b9db400ca52bd376e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 24/76] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b3583238..dee9296db80 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From 68daae1e7e475a89568e353f64f29af13754ce9e Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Thu, 27 Mar 2025 14:54:31 -0700 Subject: [PATCH 25/76] Replace black and isort with ruff * Use ruff config and Make commands from scancode.io Signed-off-by: Jono Yang --- Makefile | 27 ++++++++++++--------------- pyproject.toml | 37 +++++++++++++++++++++++++++++++++++++ setup.cfg | 3 +-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index 94451b33248..1738b20a89d 100644 --- a/Makefile +++ b/Makefile @@ -17,27 +17,24 @@ dev: @echo "-> Configure the development envt." ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py - -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py - doc8: @echo "-> Run doc8 validation" @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" diff --git a/pyproject.toml b/pyproject.toml index cde79074d70..01e60fc6583 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,40 @@ addopts = [ "--strict-markers", "--doctest-modules" ] + +[tool.ruff] +line-length = 88 +extend-exclude = [] +target-version = "py310" + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ + "E", # pycodestyle + "W", # pycodestyle warnings + "D", # pydocstyle + "F", # Pyflakes + "UP", # pyupgrade + "S", # flake8-bandit + "I", # isort + "C9", # McCabe complexity +] +ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] + +[tool.ruff.lint.isort] +force-single-line = true +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here diff --git a/setup.cfg b/setup.cfg index ef7d369b42a..aaec643fc5c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,8 +54,7 @@ testing = aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - isort + ruff docs = Sphinx>=5.0.2 From 6a8c9ae144a1985b59fb69a0b2c55e32831714b8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 00:46:06 +0100 Subject: [PATCH 26/76] Use org standard 100 line length Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 01e60fc6583..cea91bd1763 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ addopts = [ ] [tool.ruff] -line-length = 88 +line-length = 100 extend-exclude = [] target-version = "py310" From 2fd31d54afa47418c764de0f1a30d67c7059ed7b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 08:40:28 +0100 Subject: [PATCH 27/76] Lint all common code directories Signed-off-by: Philippe Ombredanne --- pyproject.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea91bd1763..9e627366170 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,14 @@ addopts = [ line-length = 100 extend-exclude = [] target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "doc/**/*", + "*.py" +] [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From eb5fc82ab1cab8a4742a2b9028d1436956960e81 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 09:07:47 +0100 Subject: [PATCH 28/76] Remove unused targets Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1738b20a89d..930e80101c6 100644 --- a/Makefile +++ b/Makefile @@ -48,4 +48,4 @@ docs: rm -rf docs/_build/ @${ACTIVATE} sphinx-build docs/ docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs From 529d51621c9e2af8e1ec2503044b8752c71c3ba7 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 11:03:05 +0100 Subject: [PATCH 29/76] Improve import sorting Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 - etc/scripts/fetch_thirdparty.py | 2 +- etc/scripts/test_utils_pip_compatibility_tags.py | 3 +-- etc/scripts/utils_dejacode.py | 1 - etc/scripts/utils_pip_compatibility_tags.py | 14 ++++++-------- etc/scripts/utils_thirdparty.py | 3 +-- pyproject.toml | 7 ++++++- 7 files changed, 15 insertions(+), 16 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 2daded948fa..62dbb14f053 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -12,7 +12,6 @@ import utils_thirdparty - @click.command() @click.option( "-d", diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 3f9ff527a1e..30d376c475c 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -16,8 +16,8 @@ import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c56437..a33b8b387ae 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -25,14 +25,13 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest import utils_pip_compatibility_tags - @pytest.mark.parametrize( "version_info, expected", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index 652252d48ed..c71543f6273 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -14,7 +14,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0cdd25..de0ac95d1df 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -27,14 +27,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 46dc7289351..b0295eca933 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -25,14 +25,13 @@ import packageurl import requests import saneyaml +import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version -import utils_pip_compatibility_tags - """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. diff --git a/pyproject.toml b/pyproject.toml index 9e627366170..ba55770f20a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,10 +76,15 @@ select = [ "I", # isort "C9", # McCabe complexity ] -ignore = ["D1", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] [tool.ruff.lint.isort] force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + sections = { django = ["django"] } section-order = [ "future", From aae1a2847c0e493b0e8bea542da30dbdfb2be68e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:35:16 +0100 Subject: [PATCH 30/76] Apply small code updates Signed-off-by: Philippe Ombredanne --- etc/scripts/utils_requirements.py | 20 ++++++++----- etc/scripts/utils_thirdparty.py | 48 +++++++++++++++---------------- 2 files changed, 37 insertions(+), 31 deletions(-) diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 1c502390f2c..a9ac2235bbc 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -57,21 +57,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -120,7 +124,7 @@ def get_installed_reqs(site_packages_dir): # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -150,9 +154,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index b0295eca933..6d5ffdce529 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -559,7 +558,8 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - assert self.filename + if not self.filename: + raise ValueError(f"self.filename has no value but is required: {self.filename!r}") if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -829,10 +829,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): urls = LinksRepository.from_url( use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [l.get("file") - for l in self.extra_data.get("licenses", {})] + extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [ln for ln in extra_lic_names if ln] + extra_lic_names = [eln for eln in extra_lic_names if eln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -853,7 +852,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except: + except Exception: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -866,8 +865,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except: - msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' + except Exception: + msg = f"No text for license {filename} in expression " + f"{self.license_expression!r} from {self}" print(msg) errors.append(msg) @@ -1009,7 +1009,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [l for l in urls if l.endswith(f"/{filename}")] + path_or_url = [url for url in urls if url.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1140,7 +1140,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -1301,7 +1300,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except: + except Exception: return False @@ -1489,8 +1488,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): ) except InvalidDistributionFilename: if TRACE_DEEP: - print( - f" Skipping invalid distribution from: {path_or_url}") + print(f" Skipping invalid distribution from: {path_or_url}") continue return dists @@ -1500,8 +1498,7 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - for wheel in self.wheels: - yield wheel + yield from self.wheels def get_url_for_filename(self, filename): """ @@ -1632,7 +1629,8 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " + "available in this repo" ), ) @@ -1647,7 +1645,8 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. " + "Otherwise, fetch and cache." ), ) @@ -1656,7 +1655,8 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - assert name + if not name: + raise ValueError(f"name is required: {name!r}") normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1713,7 +1713,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [l.partition("#sha256=") for l in links] + links = [link.partition("#sha256=") for link in links] links = [url for url, _, _sha256 in links] return links @@ -1936,7 +1936,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2161,7 +2161,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: @@ -2227,7 +2227,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print(f"Downloading wheels using command:", " ".join(cli_args)) + print("Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2260,7 +2260,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2312,5 +2312,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(l).lower() for l in declared_licenses] + lics = [python_safe_name(lic).lower() for lic in declared_licenses] return " AND ".join(lics).lower() From 037f9fc1b03736eeac9e0eefac3e35acc916d193 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 21:42:03 +0100 Subject: [PATCH 31/76] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 3 +- etc/scripts/fetch_thirdparty.py | 26 ++++----- etc/scripts/gen_pypi_simple.py | 4 +- etc/scripts/utils_dejacode.py | 15 +++--- etc/scripts/utils_requirements.py | 9 ++-- etc/scripts/utils_thirdparty.py | 90 +++++++++++-------------------- 6 files changed, 50 insertions(+), 97 deletions(-) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 62dbb14f053..1aa4e28aca1 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -16,8 +16,7 @@ @click.option( "-d", "--dest", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), required=True, help="Path to the thirdparty directory to check.", ) diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 30d376c475c..c2246837ee3 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -55,8 +55,7 @@ "-d", "--dest", "dest_dir", - type=click.Path(exists=True, readable=True, - path_type=str, file_okay=False), + type=click.Path(exists=True, readable=True, path_type=str, file_okay=False), metavar="DIR", default=utils_thirdparty.THIRDPARTY_DIR, show_default=True, @@ -121,7 +120,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -132,7 +131,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -143,7 +142,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -225,8 +224,7 @@ def fetch_thirdparty( environments = None if wheels: evts = itertools.product(python_versions, operating_systems) - environments = [utils_thirdparty.Environment.from_pyver_and_os( - pyv, os) for pyv, os in evts] + environments = [utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in evts] # Collect PyPI repos repos = [] @@ -250,7 +248,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,14 +259,11 @@ def fetch_thirdparty( repos=repos, ) if not fetched: - wheels_or_sdist_not_found[f"{name}=={version}"].append( - environment) + wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: print(f" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -292,8 +286,7 @@ def fetch_thirdparty( sdist_missing = sdists and "sdist" in dists and not name in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any( - d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -303,8 +296,7 @@ def fetch_thirdparty( raise Exception(mia) print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") - utils_thirdparty.fetch_abouts_and_licenses( - dest_dir=dest_dir, use_cached_index=use_cached_index) + utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90dc519..cfe68e67e59 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -69,7 +69,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -200,11 +199,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c71543f6273..cd39cda3986 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -32,8 +32,7 @@ def can_do_api_calls(): if not DEJACODE_API_KEY and DEJACODE_API_URL: - print( - "DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") + print("DejaCode DEJACODE_API_KEY and DEJACODE_API_URL not configured. Doing nothing") return False else: return True @@ -68,8 +67,7 @@ def get_package_data(distribution): return results[0] elif len_results > 1: - print( - f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") + print(f"More than 1 entry exists, review at: {DEJACODE_API_URL_PACKAGES}") else: print("Could not find package:", distribution.download_url) @@ -150,12 +148,11 @@ def find_latest_dejacode_package(distribution): # there was no exact match, find the latest version # TODO: consider the closest version rather than the latest # or the version that has the best data - with_versions = [(packaging_version.parse(p["version"]), p) - for p in packages] + with_versions = [(packaging_version.parse(p["version"]), p) for p in packages] with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -181,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -209,5 +206,5 @@ def create_dejacode_package(distribution): if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index a9ac2235bbc..167bc9f54b0 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -106,8 +106,7 @@ def lock_dev_requirements( all_req_nvs = get_required_name_versions(all_req_lines) dev_only_req_nvs = {n: v for n, v in all_req_nvs if n not in main_names} - new_reqs = "\n".join( - f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) + new_reqs = "\n".join(f"{n}=={v}" for n, v in sorted(dev_only_req_nvs.items())) with open(dev_requirements_file, "w") as fo: fo.write(new_reqs) @@ -118,12 +117,10 @@ def get_installed_reqs(site_packages_dir): as a text. """ if not os.path.exists(site_packages_dir): - raise Exception( - f"site_packages directory: {site_packages_dir!r} does not exists") + raise Exception(f"site_packages directory: {site_packages_dir!r} does not exists") # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip - args = ["pip", "freeze", "--exclude-editable", - "--all", "--path", site_packages_dir] + args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] return subprocess.check_output(args, encoding="utf-8") # noqa: S603 diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 6d5ffdce529..4ea1babbd4f 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -243,11 +243,9 @@ def download_wheel(name, version, environment, dest_dir=THIRDPARTY_DIR, repos=tu package = repo.get_package_version(name=name, version=version) if not package: if TRACE_DEEP: - print( - f" download_wheel: No package in {repo.index_url} for {name}=={version}") + print(f" download_wheel: No package in {repo.index_url} for {name}=={version}") continue - supported_wheels = list( - package.get_supported_wheels(environment=environment)) + supported_wheels = list(package.get_supported_wheels(environment=environment)) if not supported_wheels: if TRACE_DEEP: print( @@ -291,8 +289,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): if not package: if TRACE_DEEP: - print( - f" download_sdist: No package in {repo.index_url} for {name}=={version}") + print(f" download_sdist: No package in {repo.index_url} for {name}=={version}") continue sdist = package.sdist if not sdist: @@ -301,8 +298,7 @@ def download_sdist(name, version, dest_dir=THIRDPARTY_DIR, repos=tuple()): continue if TRACE_DEEP: - print( - f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") + print(f" download_sdist: Getting sdist from index (or cache): {sdist.download_url}") fetched_sdist_filename = package.sdist.download(dest_dir=dest_dir) if fetched_sdist_filename: @@ -357,7 +353,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -535,8 +530,7 @@ def get_best_download_url(self, repos=tuple()): repos = DEFAULT_PYPI_REPOS for repo in repos: - package = repo.get_package_version( - name=self.name, version=self.version) + package = repo.get_package_version(name=self.name, version=self.version) if not package: if TRACE: print( @@ -776,8 +770,7 @@ def load_remote_about_data(self): if notice_text: about_data["notice_text"] = notice_text except RemoteNotFetchedException: - print( - f"Failed to fetch NOTICE file: {self.notice_download_url}") + print(f"Failed to fetch NOTICE file: {self.notice_download_url}") return self.load_about_data(about_data) def get_checksums(self, dest_dir=THIRDPARTY_DIR): @@ -826,8 +819,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): Fetch license files if missing in `dest_dir`. Return True if license files were fetched. """ - urls = LinksRepository.from_url( - use_cached_index=use_cached_index).links + urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] @@ -840,8 +832,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): try: # try remotely first - lic_url = get_license_link_for_filename( - filename=filename, urls=urls) + lic_url = get_license_link_for_filename(filename=filename, urls=urls) fetch_and_save( path_or_url=lic_url, @@ -919,8 +910,7 @@ def load_pkginfo_data(self, dest_dir=THIRDPARTY_DIR): c for c in classifiers if c.startswith("License") ] license_expression = get_license_expression(declared_license) - other_classifiers = [ - c for c in classifiers if not c.startswith("License")] + other_classifiers = [c for c in classifiers if not c.startswith("License")] holder = raw_data["Author"] holder_contact = raw_data["Author-email"] @@ -962,8 +952,7 @@ def update(self, data, overwrite=False, keep_extra=True): package_url = data.get("package_url") if package_url: purl_from_data = packageurl.PackageURL.from_string(package_url) - purl_from_self = packageurl.PackageURL.from_string( - self.package_url) + purl_from_self = packageurl.PackageURL.from_string(self.package_url) if purl_from_data != purl_from_self: print( f"Invalid dist update attempt, no same same purl with dist: " @@ -1013,8 +1002,7 @@ def get_license_link_for_filename(filename, urls): if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: - raise Exception( - f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) + raise Exception(f"Multiple links to file: {filename}: \n" + "\n".join(path_or_url)) return path_or_url[0] @@ -1102,7 +1090,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1407,8 +1394,7 @@ def packages_from_dir(cls, directory): """ base = os.path.abspath(directory) - paths = [os.path.join(base, f) - for f in os.listdir(base) if f.endswith(EXTENSIONS)] + paths = [os.path.join(base, f) for f in os.listdir(base) if f.endswith(EXTENSIONS)] if TRACE_ULTRA_DEEP: print("packages_from_dir: paths:", paths) @@ -1469,8 +1455,7 @@ def dists_from_paths_or_urls(cls, paths_or_urls): dists = [] if TRACE_ULTRA_DEEP: print(" ###paths_or_urls:", paths_or_urls) - installable = [f for f in paths_or_urls if f.endswith( - EXTENSIONS_INSTALLABLE)] + installable = [f for f in paths_or_urls if f.endswith(EXTENSIONS_INSTALLABLE)] for path_or_url in installable: try: dist = Distribution.from_path_or_url(path_or_url) @@ -1536,8 +1521,7 @@ class Environment: implementation = attr.ib( type=str, default="cp", - metadata=dict( - help="Python implementation supported by this environment."), + metadata=dict(help="Python implementation supported by this environment."), repr=False, ) @@ -1551,8 +1535,7 @@ class Environment: platforms = attr.ib( type=list, default=attr.Factory(list), - metadata=dict( - help="List of platform tags supported by this environment."), + metadata=dict(help="List of platform tags supported by this environment."), repr=False, ) @@ -1637,8 +1620,7 @@ class PypiSimpleRepository: fetched_package_normalized_names = attr.ib( type=set, default=attr.Factory(set), - metadata=dict( - help="A set of already fetched package normalized names."), + metadata=dict(help="A set of already fetched package normalized names."), ) use_cached_index = attr.ib( @@ -1671,12 +1653,10 @@ def _get_package_versions_map(self, name): self.packages[normalized_name] = versions except RemoteNotFetchedException as e: if TRACE: - print( - f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") + print(f"failed to fetch package name: {name} from: {self.index_url}:\n{e}") if not versions and TRACE: - print( - f"WARNING: package {name} not found in repo: {self.index_url}") + print(f"WARNING: package {name} not found in repo: {self.index_url}") return versions @@ -1861,8 +1841,7 @@ def get(self, path_or_url, as_text=True, force=False): if force or not os.path.exists(cached): if TRACE_DEEP: print(f" FILE CACHE MISS: {path_or_url}") - content = get_file_content( - path_or_url=path_or_url, as_text=as_text) + content = get_file_content(path_or_url=path_or_url, as_text=as_text) wmode = "w" if as_text else "wb" with open(cached, wmode) as fo: fo.write(content) @@ -1884,8 +1863,7 @@ def get_file_content(path_or_url, as_text=True): if path_or_url.startswith("https://"): if TRACE_DEEP: print(f"Fetching: {path_or_url}") - _headers, content = get_remote_file_content( - url=path_or_url, as_text=as_text) + _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content elif path_or_url.startswith("file://") or ( @@ -1936,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -1951,8 +1929,7 @@ def get_remote_file_content( ) else: - raise RemoteNotFetchedException( - f"Failed HTTP request from {url} with {status}") + raise RemoteNotFetchedException(f"Failed HTTP request from {url} with {status}") if headers_only: return response.headers, None @@ -2043,8 +2020,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # lets try to get from another dist of the same local package @@ -2056,8 +2032,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get another version of the same package that is not our version @@ -2068,8 +2043,7 @@ def get_other_dists(_package, _dist): ] other_local_version = other_local_packages and other_local_packages[-1] if other_local_version: - latest_local_dists = list( - other_local_version.get_distributions()) + latest_local_dists = list(other_local_version.get_distributions()) for latest_local_dist in latest_local_dists: latest_local_dist.load_about_data(dest_dir=dest_dir) if not latest_local_dist.has_key_metadata(): @@ -2095,8 +2069,7 @@ def get_other_dists(_package, _dist): # if has key data we may look to improve later, but we can move on if local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir=dest_dir) - local_dist.fetch_license_files( - dest_dir=dest_dir, use_cached_index=use_cached_index) + local_dist.fetch_license_files(dest_dir=dest_dir, use_cached_index=use_cached_index) continue # try to get a latest version of the same package that is not our version @@ -2137,8 +2110,7 @@ def get_other_dists(_package, _dist): # if local_dist.has_key_metadata() or not local_dist.has_key_metadata(): local_dist.save_about_and_notice_files(dest_dir) - lic_errs = local_dist.fetch_license_files( - dest_dir, use_cached_index=use_cached_index) + lic_errs = local_dist.fetch_license_files(dest_dir, use_cached_index=use_cached_index) if not local_dist.has_key_metadata(): print(f"Unable to add essential ABOUT data for: {local_dist}") @@ -2161,10 +2133,9 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( # noqa: S603 args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() @@ -2260,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2286,8 +2257,7 @@ def find_problems( for dist in package.get_distributions(): dist.load_about_data(dest_dir=dest_dir) - abpth = os.path.abspath(os.path.join( - dest_dir, dist.about_filename)) + abpth = os.path.abspath(os.path.join(dest_dir, dist.about_filename)) if not dist.has_key_metadata(): print(f" Missing key ABOUT data in file://{abpth}") if "classifiers" in dist.extra_data: From 1189dda52570ed018f52eacd38c4990e8be8ff1a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:02:19 +0100 Subject: [PATCH 32/76] Apply cosmetic refactorings Signed-off-by: Philippe Ombredanne --- docs/source/conf.py | 3 ++- etc/scripts/check_thirdparty.py | 4 +--- etc/scripts/fetch_thirdparty.py | 17 ++++++++--------- etc/scripts/gen_pypi_simple.py | 15 +++++++-------- etc/scripts/gen_requirements.py | 4 ++-- etc/scripts/gen_requirements_dev.py | 4 ++-- .../test_utils_pip_compatibility_tags.py | 9 +++++---- etc/scripts/utils_dejacode.py | 9 +++++---- etc/scripts/utils_pip_compatibility_tags.py | 8 +++++--- etc/scripts/utils_requirements.py | 3 +-- etc/scripts/utils_thirdparty.py | 3 ++- 11 files changed, 40 insertions(+), 39 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 8c88fa2c999..8aad82949c4 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -94,7 +94,8 @@ html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index 1aa4e28aca1..bb8347a5a0e 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -41,8 +40,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index c2246837ee3..76a19a60503 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -10,7 +9,6 @@ # import itertools -import os import sys from collections import defaultdict @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -261,7 +260,7 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: @@ -276,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -295,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index cfe68e67e59..89d0626527c 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -132,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -177,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 2b65ae807ef..1b87944239e 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 5db1c48ed73..85482056598 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index a33b8b387ae..de4b7066a28 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -50,7 +51,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -81,7 +82,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -104,7 +105,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index cd39cda3986..b6bff5186c5 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -25,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -50,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -93,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -113,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -201,6 +201,7 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index de0ac95d1df..dd954bca74b 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -130,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -140,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 167bc9f54b0..b9b2c0e7701 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 4ea1babbd4f..aafc1d69f06 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -91,7 +91,8 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as + {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: From 84257fbe200e5780cb13536a5c8eb56a88539e6a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:05:23 +0100 Subject: [PATCH 33/76] Reformat test code Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + pyproject.toml | 19 +++++++++++-------- tests/test_skeleton_codestyle.py | 25 ++++++++++++++++--------- 3 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 2d48196f10f..8a93c94d733 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ +/.ruff_cache/ diff --git a/pyproject.toml b/pyproject.toml index ba55770f20a..a872ab3a50c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,16 +67,17 @@ include = [ [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ - "E", # pycodestyle - "W", # pycodestyle warnings - "D", # pydocstyle - "F", # Pyflakes - "UP", # pyupgrade - "S", # flake8-bandit +# "E", # pycodestyle +# "W", # pycodestyle warnings +# "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit "I", # isort - "C9", # McCabe complexity +# "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] + [tool.ruff.lint.isort] force-single-line = true @@ -100,3 +101,5 @@ max-complexity = 10 [tool.ruff.lint.per-file-ignores] # Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index b4ce8c167d8..8cd85c94616 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -7,30 +7,37 @@ # See https://aboutcode.org for more information about nexB OSS projects. # +import configparser import subprocess import unittest -import configparser - class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ + # This test shouldn't run in proliferated repositories. + + # TODO: update with switch to pyproject.toml setup_cfg = configparser.ConfigParser() setup_cfg.read("setup.cfg") if setup_cfg["metadata"]["name"] != "skeleton": return - args = "venv/bin/black --check -l 100 setup.py etc tests" + commands = [ + ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "format", "--check"], + ] + command = None try: - subprocess.check_output(args.split()) + for command in commands: + subprocess.check_output(command) # noqa: S603 except subprocess.CalledProcessError as e: print("===========================================================") print(e.output) print("===========================================================") raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", + f"Code style and linting command check failed: {' '.join(command)!r}.\n" + "You can check and format the code using:\n" + " make valid\n", + "OR:\n ruff format\n", + " ruff check --fix\n", e.output, ) from e From 00684f733a0873bd837af85471814907ba93f456 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:08:25 +0100 Subject: [PATCH 34/76] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/check_thirdparty.py | 1 + etc/scripts/test_utils_pip_compatibility_tags.py | 1 + tests/test_skeleton_codestyle.py | 1 + 3 files changed, 3 insertions(+) diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index bb8347a5a0e..65ae595edd0 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -11,6 +11,7 @@ import utils_thirdparty + @click.command() @click.option( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index de4b7066a28..0e9c360ae7a 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -33,6 +33,7 @@ import utils_pip_compatibility_tags + @pytest.mark.parametrize( "version_info, expected", [ diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 8cd85c94616..7135ac0d3c9 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -11,6 +11,7 @@ import subprocess import unittest + class BaseTests(unittest.TestCase): def test_skeleton_codestyle(self): # This test shouldn't run in proliferated repositories. From 7c4278df4e8acf04888a188d115b4c687060f1e5 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:10:45 +0100 Subject: [PATCH 35/76] Refine ruff configuration Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a872ab3a50c..0f8bd587122 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,11 +72,11 @@ select = [ # "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade -# "S", # flake8-bandit + "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] -ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415"] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] [tool.ruff.lint.isort] From 47cb840db13d3e7328dba8d8e62197cda82e48ec Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:01 +0100 Subject: [PATCH 36/76] Format doc Signed-off-by: Philippe Ombredanne --- AUTHORS.rst | 2 +- README.rst | 29 ++++++++++++++--------------- 2 files changed, 15 insertions(+), 16 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 51a19cc8af5..16e204644a7 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,3 +1,3 @@ The following organizations or individuals have contributed to this repo: -- +- diff --git a/README.rst b/README.rst index f848b4b3fc2..01d02108416 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,10 @@ A Simple Python Project Skeleton ================================ -This repo attempts to standardize the structure of the Python-based project's repositories using -modern Python packaging and configuration techniques that can then be applied to many repos. - -Using this `blog post`_ as inspiration, this repository serves as the base for all new Python -projects and is mergeable in existing repositories as well. +This repo attempts to standardize the structure of the Python-based project's +repositories using modern Python packaging and configuration techniques. +Using this `blog post`_ as inspiration, this repository serves as the base for +all new Python projects and is mergeable in existing repositories as well. .. _blog post: https://blog.jaraco.com/a-project-skeleton-for-python-projects/ @@ -69,7 +68,7 @@ Release Notes - Drop support for Python 3.8 - Drop support for macOS-11, add support for macOS-14 - + - 2024-02-19: - Replace support in CI of default ubuntu-20.04 by ubuntu-22.04 @@ -86,19 +85,19 @@ Release Notes - Synchronize configure and configure.bat scripts for sanity - Update CI operating system support with latest Azure OS images - - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party dependencies - There are now fewer scripts. See etc/scripts/README.rst for details + - Streamline utility scripts in etc/scripts/ to create, fetch and manage third-party + dependencies. There are now fewer scripts. See etc/scripts/README.rst for details - 2021-09-03: - - - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` and ``requirements-dev.txt`` - + - ``configure`` now requires pinned dependencies via the use of ``requirements.txt`` + and ``requirements-dev.txt`` - ``configure`` can now accept multiple options at once - Add utility scripts from scancode-toolkit/etc/release/ for use in generating project files - Rename virtual environment directory from ``tmp`` to ``venv`` - - Update README.rst with instructions for generating ``requirements.txt`` and ``requirements-dev.txt``, - as well as collecting dependencies as wheels and generating ABOUT files for them. + - Update README.rst with instructions for generating ``requirements.txt`` + and ``requirements-dev.txt``, as well as collecting dependencies as wheels and generating + ABOUT files for them. - 2021-05-11: - - - Adopt new configure scripts from ScanCode TK that allows correct configuration of which Python version is used. + - Adopt new configure scripts from ScanCode TK that allows correct configuration of which + Python version is used. From 7b29b5914a443069a7ff967eb4ef096034333248 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:54:35 +0100 Subject: [PATCH 37/76] Run doc8 on all rst files Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 930e80101c6..debc404ef8e 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst valid: @echo "-> Run Ruff format" From 86c7ca45d3132e5f6873658ab1743b6e27cfeb58 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 Mar 2025 22:55:20 +0100 Subject: [PATCH 38/76] Enable doc style checks Signed-off-by: Philippe Ombredanne --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0f8bd587122..51761ff8ad6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "doc/**/*", - "*.py" + "*.py", + "." ] [tool.ruff.lint] @@ -69,10 +70,10 @@ include = [ select = [ # "E", # pycodestyle # "W", # pycodestyle warnings -# "D", # pydocstyle + "D", # pydocstyle # "F", # Pyflakes # "UP", # pyupgrade - "S", # flake8-bandit +# "S", # flake8-bandit "I", # isort # "C9", # McCabe complexity ] From 71583c5ecdfe5dd352b7f2bb9d26deaad971e151 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:40:36 +0200 Subject: [PATCH 39/76] Do not format more test data Signed-off-by: Philippe Ombredanne --- pyproject.toml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 51761ff8ad6..7d807ebf054 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,25 @@ include = [ "src/**/*.py", "etc/**/*.py", "test/**/*.py", + "tests/**/*.py", "doc/**/*", + "docs/**/*", "*.py", "." ] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +] + [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ From 0f1a40382bdcadf82512395787faab50927256f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 14:58:36 +0200 Subject: [PATCH 40/76] Do not treat rst as Python Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7d807ebf054..5e16b564dc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,8 @@ include = [ "etc/**/*.py", "test/**/*.py", "tests/**/*.py", - "doc/**/*", - "docs/**/*", + "doc/**/*.py", + "docs/**/*.py", "*.py", "." ] From 6bea6577864bf432438dabaed9e46a721aae2961 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 16:41:57 +0200 Subject: [PATCH 41/76] Combine testing and docs extra for simplicity Signed-off-by: Philippe Ombredanne --- configure | 2 -- configure.bat | 4 ---- setup.cfg | 3 --- 3 files changed, 9 deletions(-) diff --git a/configure b/configure index 22d92885864..83fd2035c1c 100755 --- a/configure +++ b/configure @@ -30,7 +30,6 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -185,7 +184,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index 5b9a9d68528..18b37038145 100644 --- a/configure.bat +++ b/configure.bat @@ -28,7 +28,6 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) diff --git a/setup.cfg b/setup.cfg index aaec643fc5c..ad8e0d8f903 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,8 +55,6 @@ testing = pycodestyle >= 2.8.0 twine ruff - -docs = Sphinx>=5.0.2 sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 @@ -64,4 +62,3 @@ docs = sphinx-autobuild sphinx-rtd-dark-mode>=1.3.0 sphinx-copybutton - From c615589b54bfac74b6f17b2233b2afe60bf1d0f6 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 17:18:19 +0200 Subject: [PATCH 42/76] Refine checking of docs with doc8 Signed-off-by: Philippe Ombredanne --- Makefile | 2 +- pyproject.toml | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index debc404ef8e..d21a2f95077 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ *.rst + @${ACTIVATE} doc8 docs/ *.rst valid: @echo "-> Run Ruff format" diff --git a/pyproject.toml b/pyproject.toml index 5e16b564dc3..bfb1d353bfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -119,3 +119,10 @@ max-complexity = 10 # Place paths of files to be ignored by ruff here "tests/*" = ["S101"] "test_*.py" = ["S101"] + + +[tool.doc8] + +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 +verbose=0 From 04e0a89a3bbf5359f27b6e3ef9a5026638e63de8 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:41:00 +0200 Subject: [PATCH 43/76] Refine doc handling * remove CI scripts and use Makefile targets instead * ensure doc8 runs quiet * add new docs-check make target to run documentation and links checks * update oudated doc for docs contribution Signed-off-by: Philippe Ombredanne --- .github/workflows/docs-ci.yml | 12 +++++------- Makefile | 10 +++++++--- docs/scripts/doc8_style_check.sh | 5 ----- docs/scripts/sphinx_build_link_check.sh | 5 ----- docs/source/conf.py | 2 +- docs/source/contribute/contrib_doc.rst | 8 ++++---- pyproject.toml | 2 -- 7 files changed, 17 insertions(+), 27 deletions(-) delete mode 100755 docs/scripts/doc8_style_check.sh delete mode 100644 docs/scripts/sphinx_build_link_check.sh diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 621de4b2555..10ba5faa6a6 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -21,14 +21,12 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/Makefile b/Makefile index d21a2f95077..413399e5530 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 docs/ *.rst + @${ACTIVATE} doc8 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -46,6 +46,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ -.PHONY: conf dev check valid clean test docs +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ + +.PHONY: conf dev check valid clean test docs docs-check diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100755 index 94163239416..00000000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c5426863197..00000000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 8aad82949c4..056ca6eafeb 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 5640db264be..041b3583238 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -147,7 +147,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -169,11 +169,11 @@ What is checked: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +ScanCode toolkit documentation uses `Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ for more information. For example:: @@ -230,7 +230,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: diff --git a/pyproject.toml b/pyproject.toml index bfb1d353bfa..c9e67720432 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,7 +122,5 @@ max-complexity = 10 [tool.doc8] - ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] max-line-length=100 -verbose=0 From 8897cc63eb9ef9b06a1fdc77ebfe21289c69961b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 18:49:01 +0200 Subject: [PATCH 44/76] Add twine check to release publication Signed-off-by: Philippe Ombredanne --- .github/workflows/pypi-release.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index a66c9c80763..cf0579a7e5e 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -30,12 +30,15 @@ jobs: with: python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user build twine - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* + - name: Upload built archives uses: actions/upload-artifact@v4 with: From 3d42985990860188c5fbf9f64f3fd4d14c590a65 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 Mar 2025 19:16:31 +0200 Subject: [PATCH 45/76] Refine doc contribution docs Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 119 ++++++++----------------- 1 file changed, 38 insertions(+), 81 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 041b3583238..dee9296db80 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/aboutcode-org/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. From f428366859a143add93160bd0b1ae685be89fcbb Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:35:36 -0700 Subject: [PATCH 46/76] Update codestyle command * Remove trailing whitespace Signed-off-by: Jono Yang --- docs/source/contribute/contrib_doc.rst | 4 ++-- tests/test_skeleton_codestyle.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296db80..2a719a52ad6 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py index 7135ac0d3c9..6060c0857c9 100644 --- a/tests/test_skeleton_codestyle.py +++ b/tests/test_skeleton_codestyle.py @@ -23,7 +23,7 @@ def test_skeleton_codestyle(self): return commands = [ - ["venv/bin/ruff", "--check"], + ["venv/bin/ruff", "check"], ["venv/bin/ruff", "format", "--check"], ] command = None From f0d0e21d5e6f98645b02ff9a8fee6ee3def1be75 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 13:44:30 -0700 Subject: [PATCH 47/76] Update README.rst Signed-off-by: Jono Yang --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 01d02108416..11a4dfb02fd 100644 --- a/README.rst +++ b/README.rst @@ -44,6 +44,10 @@ More usage instructions can be found in ``docs/skeleton-usage.rst``. Release Notes ============= +- 2025-03-31: + + - Use ruff as the main code formatting tool, add ruff rules to pyproject.toml + - 2025-03-29: - Add support for beta macOS-15 From f3a8aa6cee5f645a668750ab7e6bf0cdc774e041 Mon Sep 17 00:00:00 2001 From: Jono Yang Date: Mon, 31 Mar 2025 14:31:37 -0700 Subject: [PATCH 48/76] Update BUILDDIR envvar in docs/Makefile Signed-off-by: Jono Yang --- docs/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Makefile b/docs/Makefile index 788b0396195..94f686b2085 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -7,7 +7,7 @@ SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source -BUILDDIR = build +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: From 5b0f4d6b4079719caa9ed97efb2ba776bc2bbac1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:42:52 +0200 Subject: [PATCH 49/76] Fix doc line length Signed-off-by: Philippe Ombredanne --- docs/source/contribute/contrib_doc.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index dee9296db80..2a719a52ad6 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -97,7 +97,7 @@ What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this +Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. What is checked: @@ -263,7 +263,7 @@ Converting from Markdown ------------------------ If you want to convert a ``.md`` file to a ``.rst`` file, this -`tool `_ does it pretty well. +`tool `_ does it pretty well. You will still have to clean up and check for errors as this contains a lot of bugs. But this is definitely better than converting everything by yourself. From e776fef5ad595378752d39425109a4cbd2cb5175 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:49:40 +0200 Subject: [PATCH 50/76] Format code Signed-off-by: Philippe Ombredanne --- etc/scripts/update_skeleton.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py index 5705fc43afc..374c06f24fe 100644 --- a/etc/scripts/update_skeleton.py +++ b/etc/scripts/update_skeleton.py @@ -15,7 +15,7 @@ import click -ABOUTCODE_PUBLIC_REPO_NAMES=[ +ABOUTCODE_PUBLIC_REPO_NAMES = [ "aboutcode-toolkit", "ahocode", "bitcode", @@ -87,7 +87,9 @@ def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): os.chdir(work_dir_path / repo_name) # Add skeleton as an origin - subprocess.run(["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"]) + subprocess.run( + ["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"] + ) # Fetch skeleton files subprocess.run(["git", "fetch", "skeleton"]) From 2a43f4cdc8105473b279eea873db00addaa14551 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 14:59:05 +0200 Subject: [PATCH 51/76] Correct supported runner on Azure See for details: https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml macOS ARM images do not seem to be supported there Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 80ae45b1827..fb03c090e5c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,14 +26,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos13_cpython - image_name: macOS-13-xlarge - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos13_cpython_arm64 image_name: macOS-13 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -42,14 +34,6 @@ jobs: - template: etc/ci/azure-posix.yml parameters: job_name: macos14_cpython - image_name: macOS-14-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos14_cpython_arm64 image_name: macOS-14 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: @@ -63,14 +47,6 @@ jobs: test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos15_cpython_arm64 - image_name: macOS-15-large - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] - test_suites: - all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2019_cpython From 4a15550b7bcea5ec949a5049fe1a501d3bb888ff Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 19:34:14 +0200 Subject: [PATCH 52/76] Add code checks to CI Remove running "make check" as a test Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 8 ++++++ tests/test_skeleton_codestyle.py | 44 -------------------------------- 2 files changed, 8 insertions(+), 44 deletions(-) delete mode 100644 tests/test_skeleton_codestyle.py diff --git a/azure-pipelines.yml b/azure-pipelines.yml index fb03c090e5c..ad18b28a8fa 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -7,6 +7,14 @@ jobs: + - template: etc/ci/azure-posix.yml + parameters: + job_name: run_code_checks + image_name: ubuntu-24.04 + python_versions: ['3.12'] + test_suites: + all: make check + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu22_cpython diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py deleted file mode 100644 index 6060c0857c9..00000000000 --- a/tests/test_skeleton_codestyle.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# ScanCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/aboutcode-org/skeleton for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - -import configparser -import subprocess -import unittest - - -class BaseTests(unittest.TestCase): - def test_skeleton_codestyle(self): - # This test shouldn't run in proliferated repositories. - - # TODO: update with switch to pyproject.toml - setup_cfg = configparser.ConfigParser() - setup_cfg.read("setup.cfg") - if setup_cfg["metadata"]["name"] != "skeleton": - return - - commands = [ - ["venv/bin/ruff", "check"], - ["venv/bin/ruff", "format", "--check"], - ] - command = None - try: - for command in commands: - subprocess.check_output(command) # noqa: S603 - except subprocess.CalledProcessError as e: - print("===========================================================") - print(e.output) - print("===========================================================") - raise Exception( - f"Code style and linting command check failed: {' '.join(command)!r}.\n" - "You can check and format the code using:\n" - " make valid\n", - "OR:\n ruff format\n", - " ruff check --fix\n", - e.output, - ) from e From b2d7512735ca257088d2cac4b55590fc5d7b20b4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 1 Apr 2025 20:15:18 +0200 Subject: [PATCH 53/76] Revert support for Python 3.13 This is not yet supported everywhere Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ad18b28a8fa..7a2d4d9b8ce 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,7 +51,7 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -59,7 +59,7 @@ jobs: parameters: job_name: win2019_cpython image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -67,7 +67,7 @@ jobs: parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +75,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] + python_versions: ['3.9', '3.10', '3.11', '3.12'] test_suites: all: venv\Scripts\pytest -n 2 -vvs From 2e3464b79811bcf505d93692da2418e2444150ed Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:52:32 +0200 Subject: [PATCH 54/76] Ignore local .env file Signed-off-by: Philippe Ombredanne --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8a93c94d733..4818bb3ae73 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,4 @@ tcl # Ignore Jupyter Notebook related temp files .ipynb_checkpoints/ /.ruff_cache/ +.env \ No newline at end of file From d4af79f0da82ab0d16dcf60b363a6a5290cd9403 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:54:29 +0200 Subject: [PATCH 55/76] Add correct extras for documentation Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 8ab23688c75..7e399c8a644 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - docs + - testing From 49bfd37c7273f2118d35585c67e53f0cf7642f43 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 16:58:37 +0200 Subject: [PATCH 56/76] Improve MANIFEST Signed-off-by: Philippe Ombredanne --- MANIFEST.in | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index ef3721e8c74..0f197075dff 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,6 @@ graft src +graft docs +graft etc include *.LICENSE include NOTICE @@ -6,10 +8,18 @@ include *.ABOUT include *.toml include *.yml include *.rst +include *.png include setup.* include configure* include requirements* -include .git* +include .dockerignore +include .gitignore +include .readthedocs.yml +include manage.py +include Dockerfile* +include Makefile +include MANIFEST.in -global-exclude *.py[co] __pycache__ *.*~ +include .VERSION +global-exclude *.py[co] __pycache__ *.*~ From 5bc987a16cb3ae0f6de101a9c9e277df431f4317 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:12:58 +0200 Subject: [PATCH 57/76] Improve cleaning on POSIX Signed-off-by: Philippe Ombredanne --- configure | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/configure b/configure index 83fd2035c1c..3dd9a0abf19 100755 --- a/configure +++ b/configure @@ -35,7 +35,7 @@ DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constrai VIRTUALENV_DIR=venv # Cleanable files and directories to delete with the --clean option -CLEANABLE="build dist venv .cache .eggs" +CLEANABLE="build dist venv .cache .eggs *.egg-info docs/_build/ pip-selfcheck.json" # extra arguments passed to pip PIP_EXTRA_ARGS=" " @@ -167,6 +167,7 @@ clean() { for cln in $CLEANABLE; do rm -rf "${CFG_ROOT_DIR:?}/${cln:?}"; done + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete set +e exit } From 887779a9bd36650ffc6751f0069b492e80dd2f08 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:19:44 +0200 Subject: [PATCH 58/76] Rename dev extra to "dev" Instead of testing ... and update references accordingly Signed-off-by: Philippe Ombredanne --- .readthedocs.yml | 2 +- Makefile | 7 ++++++- configure | 2 +- configure.bat | 2 +- setup.cfg | 2 +- 5 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7e399c8a644..683f3a82a3c 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -26,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - testing + - dev diff --git a/Makefile b/Makefile index 413399e5530..3041547b234 100644 --- a/Makefile +++ b/Makefile @@ -13,8 +13,13 @@ PYTHON_EXE?=python3 VENV=venv ACTIVATE?=. ${VENV}/bin/activate; + +conf: + @echo "-> Install dependencies" + ./configure + dev: - @echo "-> Configure the development envt." + @echo "-> Configure and install development dependencies" ./configure --dev doc8: diff --git a/configure b/configure index 3dd9a0abf19..5ef0e063b31 100755 --- a/configure +++ b/configure @@ -29,7 +29,7 @@ CLI_ARGS=$1 # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" -DEV_REQUIREMENTS="--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +DEV_REQUIREMENTS="--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" # where we create a virtualenv VIRTUALENV_DIR=venv diff --git a/configure.bat b/configure.bat index 18b37038145..3e9881fb4f1 100644 --- a/configure.bat +++ b/configure.bat @@ -27,7 +27,7 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable . --constraint requirements.txt" -set "DEV_REQUIREMENTS=--editable .[testing] --constraint requirements.txt --constraint requirements-dev.txt" +set "DEV_REQUIREMENTS=--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" diff --git a/setup.cfg b/setup.cfg index ad8e0d8f903..99ba2607620 100644 --- a/setup.cfg +++ b/setup.cfg @@ -48,7 +48,7 @@ where = src [options.extras_require] -testing = +dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 From 209231f0d27de0b0cfcc51eeb0fbaf9393d3df1c Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:50:44 +0200 Subject: [PATCH 59/76] Add more excludes from tests Signed-off-by: Philippe Ombredanne --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c9e67720432..bcca1a8ac7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ norecursedirs = [ "dist", "build", "_build", - "dist", "etc", "local", "ci", @@ -34,7 +33,9 @@ norecursedirs = [ "thirdparty", "tmp", "venv", + ".venv", "tests/data", + "*/tests/test_data", ".eggs", "src/*/data", "tests/*/data" From 47bce2da33db6b3ce3bb16831ddca89a65494e23 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:54:23 +0200 Subject: [PATCH 60/76] Do not lint django migrations Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bcca1a8ac7e..d79574ef953 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ include = [ "docs/**/*.py", "*.py", "." + ] # ignore test data and testfiles: they should never be linted nor formatted exclude = [ @@ -78,9 +79,10 @@ exclude = [ # vulnerablecode, fetchcode "**/tests/*/test_data/**/*", "**/tests/test_data/**/*", +# django migrations + "**/migrations/**/*" ] - [tool.ruff.lint] # Rules: https://docs.astral.sh/ruff/rules/ select = [ From 5025cfb59f0555bf4b40cd75e75ce41188e19e11 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 17:58:07 +0200 Subject: [PATCH 61/76] Add README.rst to list of "license files" Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index 99ba2607620..e5b56dad878 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,6 +28,7 @@ license_files = AUTHORS.rst CHANGELOG.rst CODE_OF_CONDUCT.rst + README.rst [options] package_dir = From 548a72eac69e4400e4b01f22941d38fe1cb4648d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 18:59:08 +0200 Subject: [PATCH 62/76] Use Python 3.9 as lowest suupported version Signed-off-by: Philippe Ombredanne --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index e5b56dad878..a9c5dbc6049 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,8 @@ license_files = README.rst [options] +python_requires = >=3.9 + package_dir = =src packages = find: @@ -39,7 +41,6 @@ zip_safe = false setup_requires = setuptools_scm[toml] >= 4 -python_requires = >=3.8 install_requires = From 3d256b4ac7976b46c23424e86bb62a38f0e4a095 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:01:22 +0200 Subject: [PATCH 63/76] Drop pycodestyle Not used anymore Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index a9c5dbc6049..6d0b6488b53 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,6 @@ dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 - pycodestyle >= 2.8.0 twine ruff Sphinx>=5.0.2 From 645052974bf7e6f45c1e55a24a2acaa0cee24523 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 2 Apr 2025 19:26:17 +0200 Subject: [PATCH 64/76] Bump pytest minimal version Signed-off-by: Philippe Ombredanne --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 6d0b6488b53..69f850ca6a3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -51,7 +51,7 @@ where = src [options.extras_require] dev = - pytest >= 6, != 7.0.0 + pytest >= 7.0.1 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 twine From fec416a8f7a1f7758bb1d38a7580a49f85ff361c Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 18 Jun 2025 00:04:10 +0530 Subject: [PATCH 65/76] Test pyahocorasick inter-long fix and py3.13 Signed-off-by: Ayan Sinha Mahapatra --- azure-pipelines.yml | 28 ++++++++++++++-------------- setup.cfg | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index eef4c1fbb39..78ea34c3682 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -15,7 +15,7 @@ jobs: parameters: job_name: core_tests image_name: ubuntu-24.04 - python_versions: ['3.10'] + python_versions: ['3.13'] test_suites: misc_and_scancode: | # cli tests are launched below on all OSes @@ -123,7 +123,7 @@ jobs: job_name: ubuntu24_cpython image_name: ubuntu-24.04 python_architecture: x64 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py --reruns 2 @@ -132,7 +132,7 @@ jobs: job_name: ubuntu22_cpython image_name: ubuntu-22.04 python_architecture: x64 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py --reruns 2 @@ -140,7 +140,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] python_architecture: x64 test_suites: all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py --reruns 2 @@ -149,7 +149,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py --reruns 2 @@ -167,7 +167,7 @@ jobs: job_name: win2022_cpython image_name: windows-2022 python_architecture: x64 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs tests\scancode\test_cli.py --reruns 2 @@ -184,7 +184,7 @@ jobs: parameters: job_name: win2022_cpython_2 image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] python_architecture: x64 test_suites: all: venv\Scripts\pytest -n 2 -vvs tests\scancode\test_cli.py --reruns 2 @@ -199,7 +199,7 @@ jobs: parameters: job_name: ubuntu22_test_all_supported_click_versions image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] python_architecture: x64 test_suites: click_versions: | @@ -219,7 +219,7 @@ jobs: parameters: job_name: ubuntu22_cpython_latest_from_pip image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py @@ -227,7 +227,7 @@ jobs: parameters: job_name: ubuntu24_cpython_latest_from_pip image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py @@ -235,7 +235,7 @@ jobs: parameters: job_name: macos14_cpython_latest_from_pip image_name: macos-14 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py @@ -243,7 +243,7 @@ jobs: parameters: job_name: macos13_cpython_latest_from_pip image_name: macos-13 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py @@ -251,7 +251,7 @@ jobs: parameters: job_name: win2019_cpython_latest_from_pip image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv\Scripts\pytest -n 2 -vvs tests\scancode\test_cli.py @@ -259,7 +259,7 @@ jobs: parameters: job_name: win2022_cpython_latest_from_pip image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv\Scripts\pytest -n 2 -vvs tests\scancode\test_cli.py diff --git a/setup.cfg b/setup.cfg index 960a1613d79..8fe3e246f7d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -99,7 +99,7 @@ install_requires = pluggy >= 1.0.0 plugincode >= 32.0.0 publicsuffix2 - pyahocorasick >= 2.0.0 + pyahocorasick @ git+https://github.com/WojciechMula/pyahocorasick.git@bfa33e901444475792b86b8d01dc8039d9083ae4 pygmars >= 0.9.0 pygments pymaven_patch >= 0.2.8 From d69ae0210498987ecdc5a33f68506e71fa2e447b Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 18 Jun 2025 00:09:13 +0530 Subject: [PATCH 66/76] Use updated dependencies with py3.13 support * cffi==1.17.1 * intbitset==4.0.0 * lxml==5.4.0 * MarkupSafe==3.0.2 * pyahocorasick==2.2.0 * PyYAML==6.0.2 Signed-off-by: Ayan Sinha Mahapatra --- requirements-native.txt | 12 ++++++------ requirements.txt | 12 ++++++------ setup.cfg | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements-native.txt b/requirements-native.txt index f1f2b09b80e..aee3901fbb8 100644 --- a/requirements-native.txt +++ b/requirements-native.txt @@ -1,6 +1,6 @@ -cffi==1.16.0 -intbitset==3.1.0 -lxml==5.1.0 -MarkupSafe==2.1.5 -pyahocorasick==2.1.0 -PyYAML==6.0.1 +cffi==1.17.1 +intbitset==4.0.0 +lxml==5.4.0 +MarkupSafe==3.0.2 +pyahocorasick==2.2.0 +PyYAML==6.0.2 diff --git a/requirements.txt b/requirements.txt index 358fa3ff257..7bfa44f8850 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ binaryornot==0.4.4 beartype==0.17.2 boolean.py==4.0 certifi==2024.2.2 -cffi==1.16.0 +cffi==1.17.1 chardet==5.0.0 charset-normalizer==2.1.0 click==8.2.1;python_version>='3.10' @@ -30,15 +30,15 @@ html5lib==1.1 idna==3.3 importlib-metadata==4.12.0 inflection==0.5.1 -intbitset==3.1.0 +intbitset==4.0.0 isodate==0.6.1 jaraco.functools==4.1.0 javaproperties==0.8.1 Jinja2==3.1.3 jsonstreams==0.6.0 license-expression==30.4.1 -lxml==5.1.0 -MarkupSafe==2.1.5 +lxml==5.4.0 +MarkupSafe==3.0.2 more-itertools==8.13.0 normality==2.3.3 packageurl-python==0.15.0 @@ -53,14 +53,14 @@ pluggy==1.0.0 plugincode==32.0.0 ply==3.11 publicsuffix2==2.20191221 -pyahocorasick==2.1.0 +pyahocorasick==2.2.0 pycparser==2.21 pygmars==0.9.0 Pygments==2.13.0 pymaven-patch==0.3.2 pyparsing==3.0.9 pytz==2022.1 -PyYAML==6.0.1 +PyYAML==6.0.2 rdflib==6.2.0 requests==2.31.0 saneyaml==0.6.0 diff --git a/setup.cfg b/setup.cfg index 8fe3e246f7d..d2a5cd2ed78 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,7 +86,7 @@ install_requires = jinja2 >= 2.7.0 jsonstreams >= 0.5.0 license_expression >= 30.4.1 - lxml >= 4.9.2 + lxml >= 5.4.0 MarkupSafe >= 2.1.2 packageurl_python >= 0.9.0 packvers >= 21.0.0 From bfdafad6d19c51f172a42b18de1e15c62170f9f5 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 19 Jun 2025 20:23:49 +0530 Subject: [PATCH 67/76] Bump pyahocorasick to released v2.2.0 Signed-off-by: Ayan Sinha Mahapatra --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index d2a5cd2ed78..e40a4b7f174 100644 --- a/setup.cfg +++ b/setup.cfg @@ -99,7 +99,7 @@ install_requires = pluggy >= 1.0.0 plugincode >= 32.0.0 publicsuffix2 - pyahocorasick @ git+https://github.com/WojciechMula/pyahocorasick.git@bfa33e901444475792b86b8d01dc8039d9083ae4 + pyahocorasick >= 2.0.0 pygmars >= 0.9.0 pygments pymaven_patch >= 0.2.8 From 0d00fe606913b14301324d0906fdf95830242533 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 19 Jun 2025 23:45:23 +0530 Subject: [PATCH 68/76] Regen tests for maven text with `ø` Signed-off-by: Ayan Sinha Mahapatra --- .../m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.json | 2 +- .../codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.package.json | 2 +- .../m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.json | 2 +- .../codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.package.json | 2 +- .../data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.json | 2 +- .../plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.package.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.json b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.json index 785d40ea381..cfbf574d3bf 100644 --- a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.json +++ b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.json @@ -116,7 +116,7 @@ }, { "id": "trygvis", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null, "organization": null, diff --git a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.package.json b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.package.json index c687d0c4262..48a9910b926 100644 --- a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.package.json +++ b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom.package.json @@ -75,7 +75,7 @@ { "type": "person", "role": "developer", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null }, diff --git a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.json b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.json index abb2e8cd171..5dc0bd3d2a5 100644 --- a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.json +++ b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.json @@ -116,7 +116,7 @@ }, { "id": "trygvis", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null, "organization": null, diff --git a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.package.json b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.package.json index 3bef77f7890..2db8c834a73 100644 --- a/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.package.json +++ b/tests/packagedcode/data/m2/org/codehaus/plexus/plexus/1.0.5/plexus-1.0.5.pom.package.json @@ -75,7 +75,7 @@ { "type": "person", "role": "developer", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null }, diff --git a/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.json b/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.json index 598e4b3955c..c43b7c2dadc 100644 --- a/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.json +++ b/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.json @@ -116,7 +116,7 @@ }, { "id": "trygvis", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null, "organization": null, diff --git a/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.package.json b/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.package.json index 20b1518aabc..9e0f18b2929 100644 --- a/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.package.json +++ b/tests/packagedcode/data/m2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom.package.json @@ -75,7 +75,7 @@ { "type": "person", "role": "developer", - "name": "Trygve Laugstl", + "name": "Trygve Laugst", "email": "trygvis@codehaus.org", "url": null }, From 404908357a04850a9378dd254fbd5cff38e96f6c Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Fri, 20 Jun 2025 13:43:49 +0530 Subject: [PATCH 69/76] Update dependency requirements Signed-off-by: Ayan Sinha Mahapatra --- etc/scripts/utils_thirdparty.py | 6 +-- requirements-dev.txt | 2 +- requirements.txt | 86 ++++++++++++++++----------------- 3 files changed, 47 insertions(+), 47 deletions(-) diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index 25b8d5eda72..100106b3688 100755 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -115,14 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "38", "39", "310", "311", "312" +PYTHON_VERSIONS = "39", "310", "311", "312", "313" PYTHON_DOT_VERSIONS_BY_VER = { - "38": "3.8", "39": "3.9", "310": "3.10", "311": "3.11", "312": "3.12", + "313": "3.13", } @@ -134,11 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "38": ["cp38", "cp38m", "abi3"], "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], "311": ["cp311", "cp311m", "abi3"], "312": ["cp312", "cp312m", "abi3"], + "313": ["cp313", "cp313m", "abi3"], } PLATFORMS_BY_OS = { diff --git a/requirements-dev.txt b/requirements-dev.txt index 6b74f9fc1cb..15193db62b8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -28,6 +28,6 @@ secretstorage==3.3.2 tomli==2.0.1 tqdm==4.64.0 twine==4.0.1 -typing_extensions==4.3.0 +typing_extensions==4.14.0 vendorize==0.3.0 diff --git a/requirements.txt b/requirements.txt index 7bfa44f8850..66194586faf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,82 +1,82 @@ -attrs==23.2.0 +attrs==25.3.0 banal==1.0.6 -beautifulsoup4==4.13.3 +beautifulsoup4==4.13.4 binaryornot==0.4.4 -beartype==0.17.2 -boolean.py==4.0 -certifi==2024.2.2 +beartype==0.21.0 +boolean.py==5.0 +certifi==2025.6.15 cffi==1.17.1 -chardet==5.0.0 -charset-normalizer==2.1.0 +chardet==5.2.0 +charset-normalizer==3.4.2 click==8.2.1;python_version>='3.10' click==8.1.7;python_version<'3.10' -colorama==0.4.5 +colorama==0.4.6 commoncode==32.3.0 construct==2.10.68 -container-inspector==31.1.0 -cryptography==42.0.5 +container-inspector==33.0.0 +cryptography==45.0.4 debian-inspector==31.1.0 -dockerfile-parse==1.2.0 +dockerfile-parse==2.0.1 dparse2==0.7.0 extractcode==31.0.0 extractcode-7z==16.5.210531 extractcode-libarchive==3.5.1.210531 -fasteners==0.17.3 -fingerprints==1.0.3 -ftfy==6.1.1 +fasteners==0.19 +fingerprints==1.2.3 +ftfy==6.3.1 future==0.18.2 -gemfileparser2==0.9.0 +gemfileparser2==0.9.4 html5lib==1.1 -idna==3.3 -importlib-metadata==4.12.0 +idna==3.10 +importlib-metadata==8.7.0 inflection==0.5.1 intbitset==4.0.0 -isodate==0.6.1 +isodate==0.7.2 jaraco.functools==4.1.0 -javaproperties==0.8.1 -Jinja2==3.1.3 +javaproperties==0.8.2 +Jinja2==3.1.6 jsonstreams==0.6.0 license-expression==30.4.1 lxml==5.4.0 MarkupSafe==3.0.2 -more-itertools==8.13.0 -normality==2.3.3 -packageurl-python==0.15.0 -packaging==24.1 +more-itertools==10.7.0 +normality==2.6.1 +packageurl-python==0.17.1 +packaging==25.0 packvers==21.5 parameter-expansion-patched==0.3.1 -pdfminer.six==20220524 -pefile==2022.5.30 +pdfminer.six==20250506 +pefile==2024.8.26 pip-requirements-parser==32.0.1 pkginfo2==30.0.0 -pluggy==1.0.0 +pluggy==1.6.0 plugincode==32.0.0 ply==3.11 publicsuffix2==2.20191221 pyahocorasick==2.2.0 -pycparser==2.21 +pycparser==2.22 pygmars==0.9.0 -Pygments==2.13.0 +Pygments==2.19.1 pymaven-patch==0.3.2 -pyparsing==3.0.9 +pyparsing==3.2.3 pytz==2022.1 PyYAML==6.0.2 -rdflib==6.2.0 -requests==2.31.0 -saneyaml==0.6.0 -semantic-version==2.8.5 -six==1.16.0 -soupsieve==2.3.2.post1 +rdflib==7.1.4 +requests==2.32.4 +saneyaml==0.6.1 +semantic-version==2.10.0 +six==1.17.0 +soupsieve==2.7 spdx-tools==0.8.2 text-unidecode==1.3 toml==0.10.2 -typecode==30.0.1 +typecode==30.0.2 typecode-libmagic==5.39.210531 -typing-extensions==4.3.0 -uritools==4.0.2 -urllib3==2.2.1 +typing-extensions==4.14.0 +uritools==5.0.0 +urllib3==2.5.0 urlpy==0.5 -wcwidth==0.2.5 +wcwidth==0.2.13 webencodings==0.5.1 -xmltodict==0.13.0 -zipp==3.8.1 +xmltodict==0.14.2 +zipp==3.23.0 From 97bc9b5a6e752d06124280c66d5965b6be723293 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Tue, 24 Jun 2025 20:58:48 +0530 Subject: [PATCH 70/76] Rollback dependencies based on detection issues Reference: https://github.com/aboutcode-org/scancode-toolkit/issues/4447 Signed-off-by: Ayan Sinha Mahapatra --- requirements.txt | 4 ++-- tests/packagedcode/test_pypi.py | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/requirements.txt b/requirements.txt index 66194586faf..f041ab5f331 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,7 +28,7 @@ future==0.18.2 gemfileparser2==0.9.4 html5lib==1.1 idna==3.10 -importlib-metadata==8.7.0 +importlib-metadata==6.2.1 inflection==0.5.1 intbitset==4.0.0 isodate==0.7.2 @@ -56,7 +56,7 @@ publicsuffix2==2.20191221 pyahocorasick==2.2.0 pycparser==2.22 pygmars==0.9.0 -Pygments==2.19.1 +Pygments==2.13.0 pymaven-patch==0.3.2 pyparsing==3.2.3 pytz==2022.1 diff --git a/tests/packagedcode/test_pypi.py b/tests/packagedcode/test_pypi.py index acbd47e255b..3dcfa7d4268 100644 --- a/tests/packagedcode/test_pypi.py +++ b/tests/packagedcode/test_pypi.py @@ -100,12 +100,6 @@ def test_develop_with_parse_metadata(self): expected_loc = self.get_test_loc('pypi/develop/scancode_toolkit.egg-info-expected.json') self.check_packages_data(package, expected_loc, regen=REGEN_TEST_FIXTURES) - def test_develop_with_parse(self): - test_file = self.get_test_loc('pypi/develop/scancode_toolkit.egg-info/PKG-INFO') - package = pypi.PythonEditableInstallationPkgInfoFile.parse(test_file) - expected_loc = self.get_test_loc('pypi/develop/scancode_toolkit.egg-info-expected.json') - self.check_packages_data(package, expected_loc, regen=REGEN_TEST_FIXTURES) - class TestPyPiPkgInfoAndMetadata(PackageTester): test_data_dir = os.path.join(os.path.dirname(__file__), 'data') From a4e6e1170ec4d5cfc7ff45b2ea1a3c74dea2ac4f Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Tue, 24 Jun 2025 21:08:26 +0530 Subject: [PATCH 71/76] Update deprecated windows action runner 2019->2025 Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/scancode-release.yml | 4 ++-- azure-pipelines.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/scancode-release.yml b/.github/workflows/scancode-release.yml index 5b3e5307023..a408162a8b5 100644 --- a/.github/workflows/scancode-release.yml +++ b/.github/workflows/scancode-release.yml @@ -285,7 +285,7 @@ jobs: strategy: fail-fast: true matrix: - os: [windows-2019, windows-2022] + os: [windows-2025, windows-2022] pyver: ["3.9", "3.10", "3.11", "3.12"] steps: @@ -412,7 +412,7 @@ jobs: strategy: fail-fast: true matrix: - os: [windows-2019, windows-2022] + os: [windows-2025, windows-2022] pyver: ["3.9", "3.10", "3.11", "3.12"] steps: diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 78ea34c3682..ca9d469ac14 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -156,7 +156,7 @@ jobs: - template: etc/ci/azure-win.yml parameters: job_name: win2019_cpython_1 - image_name: windows-2019 + image_name: windows-2025 python_versions: ['3.9'] python_architecture: x64 test_suites: @@ -250,7 +250,7 @@ jobs: - template: etc/ci/azure-win.yml parameters: job_name: win2019_cpython_latest_from_pip - image_name: windows-2019 + image_name: windows-2025 python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[testing] && venv\Scripts\pytest -n 2 -vvs tests\scancode\test_cli.py From af87cfab2d06fb034a90412a87e0d4e660e214ee Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 00:40:47 +0530 Subject: [PATCH 72/76] Update CI runners and scripts Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/docs-ci.yml | 2 +- .github/workflows/pypi-release.yml | 4 +- azure-pipelines.yml | 24 ++++-------- configure | 2 +- configure.bat | 4 +- etc/ci/azure-container-deb.yml | 2 +- etc/ci/azure-container-rpm.yml | 2 +- etc/scripts/utils_thirdparty.py | 61 +++++++++++++++--------------- 8 files changed, 46 insertions(+), 55 deletions(-) diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 10ba5faa6a6..8d8aa55145a 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -9,7 +9,7 @@ jobs: strategy: max-parallel: 4 matrix: - python-version: [3.12] + python-version: [3.13] steps: - name: Checkout code diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index cf0579a7e5e..7f813614467 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -31,10 +31,10 @@ jobs: python-version: 3.12 - name: Install pypa/build and twine - run: python -m pip install --user build twine + run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --wheel --outdir dist/ + run: python -m build --sdist --outdir dist/ - name: Validate wheel and sdis for Pypi run: python -m twine check dist/* diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7a2d4d9b8ce..4d347b70371 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -11,7 +11,7 @@ jobs: parameters: job_name: run_code_checks image_name: ubuntu-24.04 - python_versions: ['3.12'] + python_versions: ['3.13'] test_suites: all: make check @@ -19,7 +19,7 @@ jobs: parameters: job_name: ubuntu22_cpython image_name: ubuntu-22.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -27,7 +27,7 @@ jobs: parameters: job_name: ubuntu24_cpython image_name: ubuntu-24.04 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -35,7 +35,7 @@ jobs: parameters: job_name: macos13_cpython image_name: macOS-13 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -43,7 +43,7 @@ jobs: parameters: job_name: macos14_cpython image_name: macOS-14 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs @@ -51,23 +51,15 @@ jobs: parameters: job_name: macos15_cpython image_name: macOS-15 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml - parameters: - job_name: win2019_cpython - image_name: windows-2019 - python_versions: ['3.9', '3.10', '3.11', '3.12'] - test_suites: - all: venv\Scripts\pytest -n 2 -vvs - - template: etc/ci/azure-win.yml parameters: job_name: win2022_cpython image_name: windows-2022 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs @@ -75,6 +67,6 @@ jobs: parameters: job_name: win2025_cpython image_name: windows-2025 - python_versions: ['3.9', '3.10', '3.11', '3.12'] + python_versions: ['3.9', '3.10', '3.11', '3.12', '3.13'] test_suites: all: venv\Scripts\pytest -n 2 -vvs diff --git a/configure b/configure index 5ef0e063b31..6d317d4c794 100755 --- a/configure +++ b/configure @@ -110,7 +110,7 @@ create_virtualenv() { fi $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ - --wheel embed --pip embed --setuptools embed \ + --pip embed --setuptools embed \ --seeder pip \ --never-download \ --no-periodic-update \ diff --git a/configure.bat b/configure.bat index 3e9881fb4f1..15ab7015486 100644 --- a/configure.bat +++ b/configure.bat @@ -110,7 +110,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( if exist "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ( %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ @@ -126,7 +126,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( ) ) %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ diff --git a/etc/ci/azure-container-deb.yml b/etc/ci/azure-container-deb.yml index 85b611d3557..d80e8dfb819 100644 --- a/etc/ci/azure-container-deb.yml +++ b/etc/ci/azure-container-deb.yml @@ -21,7 +21,7 @@ jobs: - job: ${{ parameters.job_name }} pool: - vmImage: 'ubuntu-16.04' + vmImage: 'ubuntu-22.04' container: image: ${{ parameters.container }} diff --git a/etc/ci/azure-container-rpm.yml b/etc/ci/azure-container-rpm.yml index 1e6657d0f2b..a64138c9b85 100644 --- a/etc/ci/azure-container-rpm.yml +++ b/etc/ci/azure-container-rpm.yml @@ -1,6 +1,6 @@ parameters: job_name: '' - image_name: 'ubuntu-16.04' + image_name: 'ubuntu-22.04' container: '' python_path: '' python_version: '' diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index aafc1d69f06..6f812f090e4 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. @@ -24,13 +25,14 @@ import packageurl import requests import saneyaml -import utils_pip_compatibility_tags from commoncode import fileutils from commoncode.hash import multi_checksums from commoncode.text import python_safe_name from packvers import tags as packaging_tags from packvers import version as packaging_version +import utils_pip_compatibility_tags + """ Utilities to manage Python thirparty libraries source, binaries and metadata in local directories and remote repositories. @@ -91,8 +93,7 @@ - parse requirement file - create a TODO queue of requirements to process -- done: create an empty map of processed binary requirements as - {package name: (list of versions/tags} +- done: create an empty map of processed binary requirements as {package name: (list of versions/tags} - while we have package reqs in TODO queue, process one requirement: @@ -114,13 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "37", "38", "39", "310" +PYTHON_VERSIONS = "39", "310", "311", "312", "313" PYTHON_DOT_VERSIONS_BY_VER = { - "37": "3.7", - "38": "3.8", "39": "3.9", "310": "3.10", + "311": "3.11", + "312": "3.12", + "313": "3.13", } @@ -132,10 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "37": ["cp37", "cp37m", "abi3"], - "38": ["cp38", "cp38m", "abi3"], "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], + "311": ["cp311", "cp311m", "abi3"], + "312": ["cp312", "cp312m", "abi3"], + "313": ["cp313", "cp313m", "abi3"], } PLATFORMS_BY_OS = { @@ -553,8 +556,7 @@ def download(self, dest_dir=THIRDPARTY_DIR): Download this distribution into `dest_dir` directory. Return the fetched filename. """ - if not self.filename: - raise ValueError(f"self.filename has no value but is required: {self.filename!r}") + assert self.filename if TRACE_DEEP: print( f"Fetching distribution of {self.name}=={self.version}:", @@ -822,9 +824,9 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): """ urls = LinksRepository.from_url(use_cached_index=use_cached_index).links errors = [] - extra_lic_names = [lic.get("file") for lic in self.extra_data.get("licenses", {})] + extra_lic_names = [l.get("file") for l in self.extra_data.get("licenses", {})] extra_lic_names += [self.extra_data.get("license_file")] - extra_lic_names = [eln for eln in extra_lic_names if eln] + extra_lic_names = [ln for ln in extra_lic_names if ln] lic_names = [f"{key}.LICENSE" for key in self.get_license_keys()] for filename in lic_names + extra_lic_names: floc = os.path.join(dest_dir, filename) @@ -844,7 +846,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from remote: {lic_url}") - except Exception: + except: try: # try licensedb second lic_url = f"{LICENSEDB_API_URL}/{filename}" @@ -857,9 +859,8 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR, use_cached_index=False): if TRACE: print(f"Fetched license from licensedb: {lic_url}") - except Exception: - msg = f"No text for license {filename} in expression " - f"{self.license_expression!r} from {self}" + except: + msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' print(msg) errors.append(msg) @@ -999,7 +1000,7 @@ def get_license_link_for_filename(filename, urls): exception if no link is found or if there are more than one link for that file name. """ - path_or_url = [url for url in urls if url.endswith(f"/{filename}")] + path_or_url = [l for l in urls if l.endswith(f"/{filename}")] if not path_or_url: raise Exception(f"Missing link to file: {filename}") if not len(path_or_url) == 1: @@ -1288,7 +1289,7 @@ def is_pure(self): def is_pure_wheel(filename): try: return Wheel.from_filename(filename).is_pure() - except Exception: + except: return False @@ -1484,7 +1485,8 @@ def get_distributions(self): """ if self.sdist: yield self.sdist - yield from self.wheels + for wheel in self.wheels: + yield wheel def get_url_for_filename(self, filename): """ @@ -1613,8 +1615,7 @@ class PypiSimpleRepository: type=dict, default=attr.Factory(lambda: defaultdict(dict)), metadata=dict( - help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} " - "available in this repo" + help="Mapping of {name: {version: PypiPackage, version: PypiPackage, etc} available in this repo" ), ) @@ -1628,8 +1629,7 @@ class PypiSimpleRepository: type=bool, default=False, metadata=dict( - help="If True, use any existing on-disk cached PyPI index files. " - "Otherwise, fetch and cache." + help="If True, use any existing on-disk cached PyPI index files. Otherwise, fetch and cache." ), ) @@ -1638,8 +1638,7 @@ def _get_package_versions_map(self, name): Return a mapping of all available PypiPackage version for this package name. The mapping may be empty. It is ordered by version from oldest to newest """ - if not name: - raise ValueError(f"name is required: {name!r}") + assert name normalized_name = NameVer.normalize_name(name) versions = self.packages[normalized_name] if not versions and normalized_name not in self.fetched_package_normalized_names: @@ -1694,7 +1693,7 @@ def fetch_links(self, normalized_name): ) links = collect_urls(text) # TODO: keep sha256 - links = [link.partition("#sha256=") for link in links] + links = [l.partition("#sha256=") for l in links] links = [url for url, _, _sha256 in links] return links @@ -1915,7 +1914,7 @@ def get_remote_file_content( # several redirects and that we can ignore content there. A HEAD request may # not get us this last header print(f" DOWNLOADING: {url}") - with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: # noqa: S113 + with requests.get(url, allow_redirects=True, stream=True, headers=headers) as response: status = response.status_code if status != requests.codes.ok: # NOQA if status == 429 and _delay < 20: @@ -2134,7 +2133,7 @@ def call(args, verbose=TRACE): """ if TRACE_DEEP: print("Calling:", " ".join(args)) - with subprocess.Popen( # noqa: S603 + with subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: stdouts = [] @@ -2199,7 +2198,7 @@ def download_wheels_with_pip( cli_args.extend(["--requirement", req_file]) if TRACE: - print("Downloading wheels using command:", " ".join(cli_args)) + print(f"Downloading wheels using command:", " ".join(cli_args)) existing = set(os.listdir(dest_dir)) error = False @@ -2232,7 +2231,7 @@ def download_wheels_with_pip( def check_about(dest_dir=THIRDPARTY_DIR): try: - subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) # noqa: S603 + subprocess.check_output(f"venv/bin/about check {dest_dir}".split()) except subprocess.CalledProcessError as cpe: print() print("Invalid ABOUT files:") @@ -2283,5 +2282,5 @@ def get_license_expression(declared_licenses): return get_only_expression_from_extracted_license(declared_licenses) except ImportError: # Scancode is not installed, clean and join all the licenses - lics = [python_safe_name(lic).lower() for lic in declared_licenses] + lics = [python_safe_name(l).lower() for l in declared_licenses] return " AND ".join(lics).lower() From 600759df8342b7b57e4e1a4f0d8d606f89a6cf86 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 01:52:29 +0530 Subject: [PATCH 73/76] Update scancode release script for py3.13 Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/scancode-release.yml | 46 ++-- AUTHORS.rst | 6 +- CHANGELOG.rst | 44 ++-- CONTRIBUTING.rst | 30 +-- INSTALL.rst | 7 +- Makefile | 3 +- README.rst | 203 +++++++++++++++++- ROADMAP-ABOUTCODE.rst | 12 +- ROADMAP.rst | 15 +- configure | 4 +- configure.bat | 4 +- docs/source/getting-started/install.rst | 2 +- .../scancode-create-release-app-linux.sh | 2 +- .../scancode-create-release-app-macos.sh | 2 +- .../scancode-create-release-app-sources.sh | 2 +- .../scancode-create-release-app-windows.sh | 2 +- etc/scripts/fetch_thirdparty.py | 2 - etc/scripts/utils_requirements.py | 4 +- requirements-dev.txt | 8 +- requirements.txt | 8 +- setup-mini.cfg | 1 + setup.cfg | 1 + 22 files changed, 316 insertions(+), 92 deletions(-) diff --git a/.github/workflows/scancode-release.yml b/.github/workflows/scancode-release.yml index a408162a8b5..9a03090463e 100644 --- a/.github/workflows/scancode-release.yml +++ b/.github/workflows/scancode-release.yml @@ -5,7 +5,7 @@ name: Create ScanCode release archives, then test and publish to GH and PyPI # Summary of the steps: # - Build wheel and sdist for the "main" scancode, then build these for the "mini" flavor # - test each wheel and sdist on every possible OS x Python version combinations - # - Build release app archives, one for each of linux, windows, macos on Python 3.9 to 3.12 + # - Build release app archives, one for each of linux, windows, macos on Python 3.9 to 3.13 # - test each on its target OS and Python version # - Create gh-release and upload app archives to release # - Upload all wheels and sdist to PyPI @@ -34,7 +34,7 @@ jobs: strategy: fail-fast: true matrix: - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -74,7 +74,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - name: Install requirements then build main and mini sdist run: etc/release/scancode-create-pypi-sdist.sh @@ -100,7 +100,7 @@ jobs: strategy: fail-fast: true matrix: - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -135,7 +135,7 @@ jobs: strategy: fail-fast: true matrix: - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -170,7 +170,7 @@ jobs: strategy: fail-fast: true matrix: - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -211,7 +211,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - name: Build source archive with deps run: etc/release/scancode-create-release-app-sources.sh @@ -240,7 +240,7 @@ jobs: fail-fast: true matrix: os: [ubuntu-24.04, ubuntu-24.04, macos-13, macos-14] - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -286,7 +286,7 @@ jobs: fail-fast: true matrix: os: [windows-2025, windows-2022] - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -331,7 +331,7 @@ jobs: fail-fast: true matrix: os: [ubuntu-24.04, ubuntu-24.04] - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -372,7 +372,7 @@ jobs: fail-fast: true matrix: os: [macos-13, macos-14] - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -413,7 +413,7 @@ jobs: fail-fast: true matrix: os: [windows-2025, windows-2022] - pyver: ["3.9", "3.10", "3.11", "3.12"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -485,6 +485,12 @@ jobs: name: macos_app_py_3.12 path: dist + - name: Download a single artifact macos_app for python 3.13 + uses: actions/download-artifact@v4 + with: + name: macos_app_py_3.13 + path: dist + - name: Download a single artifact linux_app for python 3.9 uses: actions/download-artifact@v4 with: @@ -509,6 +515,12 @@ jobs: name: linux_app_py_3.12 path: dist + - name: Download a single artifact linux_app for python 3.13 + uses: actions/download-artifact@v4 + with: + name: linux_app_py_3.13 + path: dist + - name: Download a single artifact windows_app for python 3.9 uses: actions/download-artifact@v4 with: @@ -533,6 +545,12 @@ jobs: name: windows_app_py_3.12 path: dist + - name: Download a single artifact windows_app for python 3.13 + uses: actions/download-artifact@v4 + with: + name: windows_app_py_3.13 + path: dist + - name: Mock GH release run: | ls -al dist @@ -559,13 +577,13 @@ jobs: strategy: fail-fast: true matrix: - dist_names: ["wheels-3.9", "wheels-3.10", "wheels-3.11", "wheels-3.12", sdists] + dist_names: ["wheels-3.9", "wheels-3.10", "wheels-3.11", "wheels-3.12", "wheels-3.13", sdists] steps: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.13 - name: Download a single artifact uses: actions/download-artifact@v4 diff --git a/AUTHORS.rst b/AUTHORS.rst index 43e7ca97b0e..3ef1709b8af 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -48,7 +48,7 @@ The following organizations or individuals have contributed to ScanCode: - Mankaran Singh @MankaranSingh - Marc-Etienne Vargenau @vargenau - Martin Petkov @MartinPetkov -- Maximilian Huber @maxhbr +- Maximilian Huber @maxhbr - Michael Herzog @mjherzog - Michael Rupprecht @michaelrup - Mike Rombout @mrombout @@ -89,7 +89,7 @@ The following organizations or individuals have contributed to ScanCode: - Thomas Druez @tdruez - Thomas Steenbergen @tsteenbe - Thorsten Harter @ThorstenHarter -- Till Jaeger @LeChasseur +- Till Jaeger @LeChasseur - Tobias Furuholm @furuholm - Tushar Goel @TG1999 - Tushar Mittal @techytushar @@ -97,7 +97,7 @@ The following organizations or individuals have contributed to ScanCode: - Van Lindberg @VanL - Vibhu Agarwal @Vibhu-Agarwal - Viktor Tiulpin @tiulpin -- Vinay Kumar Singh @Vinay0001 +- Vinay Kumar Singh @Vinay0001 - Virag Umathe @viragumathe5 - Yash D. Saraf @yashdsaraf - Yash Nisar @yash-nisar diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c40849180b3..bb2b78950f8 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1791,14 +1791,16 @@ v3.2.0rc1 (2020-09-08) - Add new license rules for "bad" licenses #1899 @viragumathe5 - Improve copyright detection @WizardOhio24 - Improve tests @hanif-ali - - Add and improve support for package manifest for #2080 Go, Ruby gem gemspec, Cocoapod podspec, opam, Python PKG-INFO - Rohit Potter @rpotter12 - - Add and improve support for package lockfiles for Pipfile.lock, requirements.tx, Cargo.lock - Rohit Potter @rpotter12 + - Add and improve support for package manifest for #2080 Go, Ruby gem gemspec, + Cocoapod podspec, opam, Python PKG-INFO - Rohit Potter @rpotter12 + - Add and improve support for package lockfiles for Pipfile.lock, + requirements.tx, Cargo.lock - Rohit Potter @rpotter12 - Add new --max-depth option to limit sca depth - Hanif Ali @hanif-ali - Add initial Debian packaging - @aj4ayushjain - Add new documentation web site and documentation generation system - The "headers" attribute in JSON outputs now contains a 'duration' field. #1942 - Rework packaging and third-party support handling: Create new scripts and - process to provision, install and manage third-party dependencies - Abhishek Kumar @Abhishek-Dev09 + process to provision, install and manage third-party dependencies @Abhishek-Dev09 - Improve CSV output and fix manifest path bug #1718 Aditya Viki8 - Add new documentation, as well as tools and process. Ayan Sinha Mahapatra - Add new license detection rules - Ayan Sinha Mahapatra @@ -1826,9 +1828,12 @@ v3.2.0rc1 (2020-09-08) - Improve Documentation - Michael Herzog - Add new checksum type for sha256 - Nitish @nitish81299 - Improve documentation - Philippe Ombredanne - - Add new license detection rules and improve detection #1777 #1720 #1734 #1486 #1757 #1749 #1283 #1795 #2214 #1978 - - Add new license detection rules and improve detection #2187 #2188 #2189 #1904 #2207 #1905 #419 #2190 #1910 #1911 - - Add new license detection rules and improve detection #1841 #1913 #1795 #2124 #2145 #1800 #2200 #2206 #2186 + - Add new license detection rules and improve detection + #1777 #1720 #1734 #1486 #1757 #1749 #1283 #1795 #2214 #1978 + - Add new license detection rules and improve detection + #2187 #2188 #2189 #1904 #2207 #1905 #419 #2190 #1910 #1911 + - Add new license detection rules and improve detection + #1841 #1913 #1795 #2124 #2145 #1800 #2200 #2206 #2186 - Allow to call "run_scan" as a function #1780 - Update license data to SPDX 3.7 #1789 - Collect matched license text correctly including with Turkish diacritics #1872 @@ -1869,7 +1874,8 @@ Major new feature: New features: - - Improve package manifest support for #1643 RPMs, #1628 Cran, Python #1600, Maven #1649 Chef #1600 @licodeli @JonoYang + - Improve package manifest support for #1643 RPMs, #1628 Cran, Python #1600, + Maven #1649 Chef #1600 @licodeli @JonoYang - Add plugin to collect ELF and LKM clues #1685 @licodeli - Add runtime support for FreeBSD #1695 @knobix - Add support to extract lzip archives #245 #989 @@ -1946,15 +1952,18 @@ Other: v2.9.9 (2018-12-12) ------------------- -This is the penultimate pre-release of what will come up for 3.0 with some API change for packages. +This is the penultimate pre-release of what will come up for 3.0 +with some API change for packages. API changes: - - Streamline Package models #1226 #1324 and #1327. In particular the way checksums are managed has changed + - Streamline Package models #1226 #1324 and #1327. + In particular the way checksums are managed has changed Other changes: - Copyright detection improvements #1305 by @JonoYang - Correct CC-BY V3.0 and V4.0 license texts by correct one by @sschuberth #1320 - - Add new and improved licenses and license detection rules including the latest SPDX list 3.4 and #1322 #1324 + - Add new and improved licenses and license detection rules including + the latest SPDX list 3.4 and #1322 #1324 - Rename proprietary license key to proprietary-license - Rename commercial license key to commercial-license - Improve npm package.json handling #1308 and #1314 by @majurg @@ -1967,14 +1976,16 @@ This is a close-to-final pre-release of what will come up for 3.0 with some API API changes: - In Package models, rename normalized_license to license_expression and - add license detection on the declared_license to populate the license_expression #1092 #1268 #1278 + add license detection on the declared_license to populate + the license_expression #1092 #1268 #1278 Outputs: - Do not open output files until the command lines are validated as correct #1266 - The html-app output is marked as DEPRECATED. Use the AboutCode manager app instead # - Ensure HTML outputs can deal with non-ASCII file paths without crashsing #1292 - JSON outputs now use a "headers" attributes for top-level scan headers # - - SPDX output is now possible even without "--info" SHA1 checksums. This creates a partially valid document + - SPDX output is now possible even without "--info" SHA1 checksums. + This creates a partially valid document - LicenseRef for non-SPDX ScanCode licenses are named as "LicenseRef-scancode-" # - license_expression are correctly included in the CSV output #1238 - do not crash with multiple outputs #1199 @@ -1986,7 +1997,8 @@ License detection: - An optional "relevance" attribute has been added to the license YAML attributes. This is to store the relevance to e matched .LICENSE text when used as a rule. - - Licenses have been synchronized with the latest v3.3 SPDX license list and the latest DejaCode licenses #1242 + - Licenses have been synchronized with the latest v3.3 SPDX license list + and the latest DejaCode licenses #1242 - Duplicated SPDX keys have been fixed #1264 - Add new and improved license detection rules #1313 #1306 #1302 #1298 #1293 #1291 #1289 #1270 #1269 #1192 #1186 #1170 #1164 #1128 #1124 #1112 #1110 #1108 @@ -1996,7 +2008,8 @@ Packages: - Add support for haxe "haxelib" package manifests #1227 - Remove code_type attribute from Package models - In Package models, rename normalized_license to license_expression and - add license detection on the declared_license to populate the license_expression #1092 #1268 #1278 + add license detection on the declared_license to populate the + license_expression #1092 #1268 #1278 - Improve data returned for PHP Composer packages - Add PackageURL to top level output for packages - Report nuget as proper packages #1088 @@ -2122,7 +2135,8 @@ API change: - The returned copyright data structure has changed and is now simpler and less nested Licenses: - - Add new license and rules and improve licene rules #1186 #1108 #1124 #1171 #1173 #1039 #1098 #1111 + - Add new license and rules and improve licene rules + #1186 #1108 #1124 #1171 #1173 #1039 #1098 #1111 - Add new license clarity scoring #1180 This is also for use in the ClearlyDefined project - Add is_exception to license scan results #1159 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 1ab35f0fb78..c16ecdeed9e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -77,7 +77,7 @@ Documentation improvements ========================== Documentation can come in the form of new documentation pages/sections, tutorials/how-to documents, -any other general upgrades, etc. Even a minor typo fix is welcomed. +any other general upgrades, etc. Even a minor typo fix is welcomed. If something is missing in the documentation or if you found some part confusing, please file an issue with your suggestions for improvement. Use the “Documentation Improvement” @@ -104,25 +104,25 @@ To set up ScanCode for local development: git clone https://github.com/your_name_here/scancode-toolkit.git - See also GitHub docs for `SSH `_ + See also GitHub docs for `SSH `_ or `HTTPS `_ - + If you want to change the connection type, do following - + SSH to HTTPS :: - + git remote set-url https://github.com/your_name_here/scancode-toolkit.git - + HTTPS to SSH :: - + git remote set-url git@github.com:your_name_here/scancode-toolkit.git - + Generally is named origin, but in the case of multiple fetch/pull source of repository you can choose whatever name you want - + 3. Create a branch for local development:: git checkout -b name-of-your-bugfix-or-feature - + 4. Check out the Contributing to Code Development `documentation `_, as it contains more in-depth guide for contributing code and documentation. 5. To configure your local environment for development, locate to the main @@ -130,7 +130,7 @@ To set up ScanCode for local development: The configure script creates an isolated Python `virtual environment` in your checkout directory, the Python `pip` tool, and installs the third-party libraries (from the `thirdparty/ directory`), setup the paths, etc. - See https://virtualenv.pypa.io/en/latest/ for more details. + See https://virtualenv.pypa.io/en/latest/ for more details. Run this command to configure ScanCode:: @@ -160,15 +160,17 @@ To set up ScanCode for local development: 6. Now you can make your code changes in your local clone. Please create new unit tests for your code. We love tests! -7. An update to the ``CHANGELOG`` is required if any important changes are made that needs to be communicated such as: +7. An update to the ``CHANGELOG`` is required if any important changes are made + that needs to be communicated such as: * Changes in the API. * Addition or deletion of CLI options. * Addition of any new feature or any other miscellaneous changes to the program. - -8. If there is a code change, a significant document, or any other changes, you must update the ``AUTHORS`` to include your own name. + +8. If there is a code change, a significant document, or any other changes, + you must update the ``AUTHORS`` to include your own name. 9. When you are done with your changes, run all the tests. Use this command:: diff --git a/INSTALL.rst b/INSTALL.rst index 9e34a869e20..1aa9eb9ba8c 100644 --- a/INSTALL.rst +++ b/INSTALL.rst @@ -16,9 +16,10 @@ Prerequisites ------------- Before installing ScanCode make sure you have installed these prerequisites. -The main one is to have Python installed version 3.9, 3.10, 3.11 or 3.12 +The main one is to have Python installed version 3.9, 3.10, 3.11, 3.12 or 3.13. -- For Linux(Ubuntu): ``sudo apt install python3.9-dev bzip2 xz-utils zlib1g libxml2-dev libxslt1-dev`` +- For Linux(Ubuntu): + ``sudo apt install python3.9-dev bzip2 xz-utils zlib1g libxml2-dev libxslt1-dev`` - For MacOS: Install Python 3.x from https://www.python.org/ - For Windows: Install Python 3.x from https://www.python.org/ using the 64 bits amd64 variant - For FreeBSD: (this needs to be documented) @@ -34,7 +35,7 @@ Use a release download and install as an application https://github.com/nexB/scancode-toolkit/releases/ - Open a terminal window (or command prompt on Windows) and then `cd` to the - extracted ScanCode directory. + extracted ScanCode directory. - Run this command to self-configure and display the initial command line help: diff --git a/Makefile b/Makefile index 3041547b234..9203d79201f 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ dev: doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --quiet docs/ *.rst + @${ACTIVATE} doc8 --max-line-length 100 --ignore D000 --quiet docs/ *.rst valid: @echo "-> Run Ruff format" @@ -55,6 +55,5 @@ docs: docs-check: @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ - @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ .PHONY: conf dev check valid clean test docs docs-check diff --git a/README.rst b/README.rst index ff7c95a5d3a..9bae910bdcd 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ Discover also: - The ScanCode.io server project here: https://scancodeio.readthedocs.io - The ScanCode Workbench project for visualization of scancode results data: - https://github.com/nexB/scancode-workbench + https://github.com/nexB/scancode-workbench - Other companion SCA projects for code origin, license and security analysis here: https://aboutcode.org @@ -44,7 +44,7 @@ Why use ScanCode? `OpenEmbedded.org `_, the `FSFE `_, the `FSF `_, - `OSS Review Toolkit `_, + `OSS Review Toolkit `_, `ClearlyDefined.io `_, `RedHat Fabric8 analytics `_, and many more. @@ -89,7 +89,7 @@ Why use ScanCode? InstallShield installers, iOS apps, ISO images, Apache IVY, JBoss Sar, R CRAN, Apache Maven, Meteor, Mozilla extensions, MSI installers, JavaScript npm packages, package-lock.json, yarn.lock, NSIS Installers, - NuGet, OPam, Cocoapods, Python PyPI setup.py, setup.cfg, and + NuGet, OPam, Cocoapods, Python PyPI setup.py, setup.cfg, and several related lockfile formats, semi structured README files such as README.android, README.chromium, README.facebook, README.google, README.thirdparty, RPMs, Shell Archives, Squashfs images, Java WAR, Windows @@ -110,13 +110,13 @@ The ScanCode documentation is hosted at If you are new to visualization of scancode results data, start with our `newcomer `_ page. -If you want to compare output changes between different versions of ScanCode, +If you want to compare output changes between different versions of ScanCode, or want to look at scans generated by ScanCode, review our `reference scans `_. Other Important Documentation Pages: -- A `synopsis `_ +- A `synopsis `_ of ScanCode command line options. - Tutorials on: @@ -139,15 +139,15 @@ Installation ============ Before installing ScanCode make sure that you have installed the prerequisites -properly. This means installing Python 3.9 for x86/64 architectures. -We support Python 3.9, 3.10, 3.11 and 3.12. +properly. This means installing Python 3.10 for x86/64 architectures. +We support Python 3.9, 3.10, 3.11, 3.12 and 3.13. See `prerequisites `_ for detailed information on the support platforms and Python versions. There are a few common ways to `install ScanCode `_. -- `**Installation as an application: Install Python 3.9, download a release archive, extract and run**. +- `**Installation as an application: Install Python 3.9, download a release archive, extract and run**. `_ This is the recommended installation method. @@ -207,3 +207,190 @@ and `how to extract archives `_. + Gitter is now accessible through `Element `_ + or an `IRC bridge `_. + There are other AboutCode project-specific channels available there too. + +* The discussion channel for `scancode `_ + specifically aimed at users and developers using scancode-toolkit. + +Source code and downloads +========================= + +* https://github.com/nexB/scancode-toolkit/releases +* https://github.com/nexB/scancode-toolkit.git +* https://pypi.org/project/scancode-toolkit/ +* https://github.com/nexB/scancode-thirdparty-src.git +* https://github.com/nexB/scancode-plugins.git +* https://github.com/nexB/thirdparty-packages.git + +License +======= + +* Apache-2.0 as the overall license +* CC-BY-4.0 for reference datasets (initially was in the Public Domain). +* Multiple other secondary permissive or copyleft licenses (LGPL, MIT, + BSD, GPL 2/3, etc.) for third-party components and test suite code and data. + + +See the NOTICE file and the .ABOUT files that document the origin and license of +the third-party code used in ScanCode for more details. + + + +.. |azure| image:: https://dev.azure.com/nexB/scancode-toolkit/_apis/build/status/nexB.scancode-toolkit?branchName=develop + :target: https://dev.azure.com/nexB/scancode-toolkit/_build/latest?definitionId=1&branchName=develop + :alt: Azure tests status (Linux, macOS, Windows) + +.. |docs-rtd| image:: https://readthedocs.org/projects/scancode-toolkit/badge/?version=latest + :target: https://scancode-toolkit.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. |docs-github-actions| image:: https://github.com/nexB/scancode-toolkit/actions/workflows/docs-ci.yml/badge.svg?branch=develop + :target: https://github.com/nexB/scancode-toolkit/actions/workflows/docs-ci.yml + :alt: Documentation Tests + +.. |release-github-actions| image:: https://github.com/nexB/scancode-toolkit/actions/workflows/scancode-release.yml/badge.svg?event=push + :target: https://github.com/nexB/scancode-toolkit/actions/workflows/scancode-release.yml + :alt: Release tests + + +Acknowledgements, Funding, Support and Sponsoring +-------------------------------------------------------- + +This project is funded, supported and sponsored by: + +- Generous support and contributions from users like you! +- the European Commission NGI programme +- the NLnet Foundation +- the Swiss State Secretariat for Education, Research and Innovation (SERI) +- Google, including the Google Summer of Code and the Google Seasons of Doc programmes +- Mercedes-Benz Group +- Microsoft and Microsoft Azure +- AboutCode ASBL +- nexB Inc. + + +|europa| |dgconnect| + +|ngi| |nlnet| + +|aboutcode| |nexb| + + +This project was funded through the NGI0 Discovery Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 825322. + +|ngidiscovery| https://nlnet.nl/project/vulnerabilitydatabase/ + + +This project was funded through the NGI0 Entrust Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 101069594. + +|ngizeroentrust| https://nlnet.nl/project/Back2source/ + + +This project was funded through the NGI0 Core Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 101092990. + +|ngizerocore| https://nlnet.nl/project/Back2source-next/ + + +This project was funded through the NGI0 Core Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 101092990. + +|ngizerocore| https://nlnet.nl/project/FastScan/ + + +This project was funded through the NGI0 Commons Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 101135429. Additional +funding is made available by the Swiss State Secretariat for Education, Research and Innovation +(SERI). + +|ngizerocommons| |swiss| https://nlnet.nl/project/MassiveFOSSscan/ + +This project was funded through the NGI0 Entrust Fund, a fund established by NLnet with financial +support from the European Commission's Next Generation Internet programme, under the aegis of DG +Communications Networks, Content and Technology under grant agreement No 101069594. + +|ngizeroentrust| https://nlnet.nl/project/purl2sym/ + + +.. |nlnet| image:: https://nlnet.nl/logo/banner.png + :target: https://nlnet.nl + :height: 50 + :alt: NLnet foundation logo + +.. |ngi| image:: https://ngi.eu/wp-content/uploads/thegem-logos/logo_8269bc6efcf731d34b6385775d76511d_1x.png + :target: https://ngi.eu35 + :height: 50 + :alt: NGI logo + +.. |nexb| image:: https://nexb.com/wp-content/uploads/2022/04/nexB.svg + :target: https://nexb.com + :height: 30 + :alt: nexB logo + +.. |europa| image:: https://ngi.eu/wp-content/uploads/sites/77/2017/10/bandiera_stelle.png + :target: http://ec.europa.eu/index_en.htm + :height: 40 + :alt: Europa logo + +.. |aboutcode| image:: https://aboutcode.org/wp-content/uploads/2023/10/AboutCode.svg + :target: https://aboutcode.org/ + :height: 30 + :alt: AboutCode logo + +.. |swiss| image:: https://www.sbfi.admin.ch/sbfi/en/_jcr_content/logo/image.imagespooler.png/1493119032540/logo.png + :target: https://www.sbfi.admin.ch/sbfi/en/home/seri/seri.html + :height: 40 + :alt: Swiss logo + +.. |dgconnect| image:: https://commission.europa.eu/themes/contrib/oe_theme/dist/ec/images/logo/positive/logo-ec--en.svg + :target: https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en + :height: 40 + :alt: EC DG Connect logo + +.. |ngizerocore| image:: https://nlnet.nl/image/logos/NGI0_tag.svg + :target: https://nlnet.nl/core + :height: 40 + :alt: NGI Zero Core Logo + +.. |ngizerocommons| image:: https://nlnet.nl/image/logos/NGI0_tag.svg + :target: https://nlnet.nl/commonsfund/ + :height: 40 + :alt: NGI Zero Commons Logo + +.. |ngizeropet| image:: https://nlnet.nl/image/logos/NGI0PET_tag.svg + :target: https://nlnet.nl/PET + :height: 40 + :alt: NGI Zero PET logo + +.. |ngizeroentrust| image:: https://nlnet.nl/image/logos/NGI0Entrust_tag.svg + :target: https://nlnet.nl/entrust + :height: 38 + :alt: NGI Zero Entrust logo + +.. |ngiassure| image:: https://nlnet.nl/image/logos/NGIAssure_tag.svg + :target: https://nlnet.nl/image/logos/NGIAssure_tag.svg + :height: 32 + :alt: NGI Assure logo + +.. |ngidiscovery| image:: https://nlnet.nl/image/logos/NGI0Discovery_tag.svg + :target: https://nlnet.nl/discovery/ + :height: 40 + :alt: NGI Discovery logo diff --git a/ROADMAP-ABOUTCODE.rst b/ROADMAP-ABOUTCODE.rst index 7e7fc15a0e2..be4800281ef 100644 --- a/ROADMAP-ABOUTCODE.rst +++ b/ROADMAP-ABOUTCODE.rst @@ -94,7 +94,7 @@ License detection quality improvements Improve package detection ~~~~~~~~~~~~~~~~~~~~~~~~~~ -- Create synthethic, private packages from non-packaged files based on license and copyright +- Create synthethic, private packages from non-packaged files based on license and copyright - Create simplified purl-only lightweight package detection - Evolve model for dependencies towards requirements and true dependencies - Track private non-published packages @@ -102,7 +102,7 @@ Improve package detection Primary copyright detection for packages - This is closely tied to the primary license detection and should focus - on package manifests and key files. + on package manifests and key files. - Support copyright parsing from all package ecosystems. @@ -130,7 +130,7 @@ ABCTK: AboutCode Toolkit - add support for patterns for docoumented resources - add support for exclude for docoumented resources - document deployed resource for a development resource - + PURLDB: PurlDB ---------------- @@ -154,7 +154,7 @@ VCIO: VulnerableCode.io - Adopt VulnTotal model throughout - Log advisory history - Add vulnerable code reachability -- Add vulnerable code required context/config +- Add vulnerable code required context/config - Add more upstream resources - Deploy purlsync public pilot @@ -178,14 +178,14 @@ INSPECTORS: misc package and technology inspectors - Decompile and collect binary symbols. - Collect source symbols - - Resolve dependencies for Gradle, SBT and Maven. + - Resolve dependencies for Gradle, SBT and Maven. - Inspector for JavaScript, CSS - Decompile/deminify and collect bundled and minified symbols. - Analyze map files - Collect source symbols - - Resolve dependencies for npm, yarn and pnpm. + - Resolve dependencies for npm, yarn and pnpm. - Inspector for C/C++ - Collect source symbols diff --git a/ROADMAP.rst b/ROADMAP.rst index ebb58ce241c..4afd31d6cdf 100644 --- a/ROADMAP.rst +++ b/ROADMAP.rst @@ -1,7 +1,7 @@ ScanCode IO/TK Roadmap ======================== -SCIO: ScanCode.io +SCIO: ScanCode.io SCTK: ScanCode Toolkit Top Issues @@ -22,13 +22,13 @@ The goal of this improvement is to: - In a license detection, expose a primary license expression in addition to the complete, full license expression. - + - Make the logic of selection of the primary license visible, at the minimum with a log of combination and primary license selection operations. This is for SCTK first. -Status: +Status: This has been completed in SCTK and also included in SCIO. We use an updated --summary option and a new license clarity score for this. @@ -93,18 +93,18 @@ Roadmap - SCTK: add primary license field in package output and populate this based on package-type/ecosystem conventions. -- SCTK: also populate secondary license fields +- SCTK: also populate secondary license fields - SCIO: add primary license field in DiscoveredPackage models and feed it with the data from packages - SCIO: Do we track secondary? or is this just data aggregated on the fly. -- SCIO: Refine primary license based on license in "key files" +- SCIO: Refine primary license based on license in "key files" 2. Primary copyright detection for packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - This is closely tied to the primary license detection and should focus - on package manifests and key files. + on package manifests and key files. - Support copyright parsing from all package ecosystems. 3. Package files @@ -135,7 +135,8 @@ Roadmap 6. License detection quality improvements ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- Finish and merge unknown license detection (this depends on completion of 4. Go to two-level reporting of detections for license) +- Finish and merge unknown license detection (this depends on completion of 4. + Go to two-level reporting of detections for license) - Update scancode-analyze to the new two-level reporting of license detections - Revamp how common list of suprrious licenses are detected (this is a bug) - Use important key phrases for license detection https://github.com/nexB/scancode-toolkit/issues/2637 diff --git a/configure b/configure index bd9b365f5be..5c5c64adb71 100755 --- a/configure +++ b/configure @@ -150,7 +150,7 @@ fi # Requirement arguments passed to pip and used by default or with --dev. REQUIREMENTS="--editable . --constraint requirements.txt" -DEV_REQUIREMENTS="--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" +DEV_REQUIREMENTS="--editable .[dev,packages] --constraint requirements.txt --constraint requirements-dev.txt" # where we create a virtualenv VIRTUALENV_DIR=venv @@ -234,7 +234,7 @@ create_virtualenv() { fi $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ - --pip embed --setuptools embed \ + --wheel embed --pip embed --setuptools embed \ --seeder pip \ --never-download \ --no-periodic-update \ diff --git a/configure.bat b/configure.bat index 034777c7530..3314a31516e 100644 --- a/configure.bat +++ b/configure.bat @@ -115,7 +115,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( if exist "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ( %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ - --pip embed --setuptools embed ^ + --wheel embed --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ @@ -131,7 +131,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( ) ) %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ - --pip embed --setuptools embed ^ + --wheel embed --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ diff --git a/docs/source/getting-started/install.rst b/docs/source/getting-started/install.rst index 14620c77525..efb601d12be 100644 --- a/docs/source/getting-started/install.rst +++ b/docs/source/getting-started/install.rst @@ -44,7 +44,7 @@ For advanced usage and experienced users, you can also use any of these mode: Before Installing ----------------- -- ScanCode requires a Python version between 3.9 to 3.12 and is +- ScanCode requires a Python version between 3.9 to 3.13 and is tested on Linux, macOS, and Windows. It should work fine on FreeBSD. .. _system_requirements: diff --git a/etc/release/scancode-create-release-app-linux.sh b/etc/release/scancode-create-release-app-linux.sh index 94aa847ae47..fbe5951a937 100755 --- a/etc/release/scancode-create-release-app-linux.sh +++ b/etc/release/scancode-create-release-app-linux.sh @@ -33,7 +33,7 @@ thirdparty_src_dir=$release_dir/thirdparty-src mkdir -p $thirdparty_dir mkdir -p $thirdparty_src_dir -./configure --rel +./configure --dev venv/bin/python etc/scripts/fetch_thirdparty.py \ --requirements requirements-native.txt \ diff --git a/etc/release/scancode-create-release-app-macos.sh b/etc/release/scancode-create-release-app-macos.sh index e689f1f1b04..5f34bf88f28 100755 --- a/etc/release/scancode-create-release-app-macos.sh +++ b/etc/release/scancode-create-release-app-macos.sh @@ -33,7 +33,7 @@ thirdparty_src_dir=$release_dir/thirdparty-src mkdir -p $thirdparty_dir mkdir -p $thirdparty_src_dir -./configure --rel +./configure --dev venv/bin/python etc/scripts/fetch_thirdparty.py \ --requirements requirements-native.txt \ diff --git a/etc/release/scancode-create-release-app-sources.sh b/etc/release/scancode-create-release-app-sources.sh index 1a73aa7485b..c7fa0fb61e1 100755 --- a/etc/release/scancode-create-release-app-sources.sh +++ b/etc/release/scancode-create-release-app-sources.sh @@ -29,7 +29,7 @@ mkdir -p $thirdparty_dir venv/bin/python setup.py --quiet sdist mv dist/*.tar.gz $release_dir -./configure --rel +./configure --dev venv/bin/python etc/scripts/fetch_thirdparty.py \ --requirements requirements.txt \ diff --git a/etc/release/scancode-create-release-app-windows.sh b/etc/release/scancode-create-release-app-windows.sh index a7957ae15bb..03a22d7117a 100755 --- a/etc/release/scancode-create-release-app-windows.sh +++ b/etc/release/scancode-create-release-app-windows.sh @@ -32,7 +32,7 @@ thirdparty_src_dir=$release_dir/thirdparty-src mkdir -p $thirdparty_dir mkdir -p $thirdparty_src_dir -./configure --rel +./configure --dev venv/bin/python etc/scripts/fetch_thirdparty.py \ --requirements requirements-native.txt \ diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index 76a19a60503..454247e3667 100755 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -283,8 +283,6 @@ def fetch_thirdparty( if name in no_dist: continue sdist_missing = sdists and "sdist" in dists and name not in wheel_only - if sdist_missing: - mia.append(f"SDist missing: {nv} {dists}") wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index b9b2c0e7701..4bdc96c6978 100755 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -153,7 +153,9 @@ def split_req(req): if not req: raise ValueError("req is required") # do not allow multiple constraints and tags - if not any(c in req for c in ",;"): + if ";" in req: + req = req.split(";")[0] + if any(c in req for c in ",:"): raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): diff --git a/requirements-dev.txt b/requirements-dev.txt index 15193db62b8..b995b2305b4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ -aboutcode-toolkit==7.0.2 +aboutcode-toolkit==11.1.1 black==22.6.0 bleach==5.0.1 -build==0.7.0 +build==1.2.2.post1 commonmark==0.9.1 docutils==0.19 et-xmlfile==1.1.0 @@ -14,7 +14,7 @@ mypy-extensions==0.4.3 openpyxl==3.0.10 pathspec==0.9.0 pep517==0.12.0 -pkginfo==1.8.3 +pkginfo==1.12.1.2 platformdirs==2.5.2 py==1.11.0 pytest==7.1.2 @@ -27,7 +27,7 @@ rich==12.5.1 secretstorage==3.3.2 tomli==2.0.1 tqdm==4.64.0 -twine==4.0.1 +twine==6.1.0 typing_extensions==4.14.0 vendorize==0.3.0 diff --git a/requirements.txt b/requirements.txt index f041ab5f331..c64daff856e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,7 +12,7 @@ click==8.2.1;python_version>='3.10' click==8.1.7;python_version<'3.10' colorama==0.4.6 commoncode==32.3.0 -construct==2.10.68 +construct==2.10.70 container-inspector==33.0.0 cryptography==45.0.4 debian-inspector==31.1.0 @@ -24,7 +24,7 @@ extractcode-libarchive==3.5.1.210531 fasteners==0.19 fingerprints==1.2.3 ftfy==6.3.1 -future==0.18.2 +future==1.0.0 gemfileparser2==0.9.4 html5lib==1.1 idna==3.10 @@ -32,11 +32,11 @@ importlib-metadata==6.2.1 inflection==0.5.1 intbitset==4.0.0 isodate==0.7.2 -jaraco.functools==4.1.0 +jaraco.functools==4.2.1 javaproperties==0.8.2 Jinja2==3.1.6 jsonstreams==0.6.0 -license-expression==30.4.1 +license-expression==30.4.3 lxml==5.4.0 MarkupSafe==3.0.2 more-itertools==10.7.0 diff --git a/setup-mini.cfg b/setup-mini.cfg index 59fbf90b014..7f43d6e766a 100644 --- a/setup-mini.cfg +++ b/setup-mini.cfg @@ -21,6 +21,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Topic :: Software Development Topic :: Utilities diff --git a/setup.cfg b/setup.cfg index f3f3d033787..27d45aca84a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,6 +21,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Topic :: Software Development Topic :: Utilities From be51a43cb4dd604cee2c0bb61cdf55032b04e128 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Wed, 25 Jun 2025 20:17:41 +0530 Subject: [PATCH 74/76] Fix release smoke tests on posix Signed-off-by: Ayan Sinha Mahapatra --- .github/workflows/scancode-release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/scancode-release.yml b/.github/workflows/scancode-release.yml index 9a03090463e..d8b932463a2 100644 --- a/.github/workflows/scancode-release.yml +++ b/.github/workflows/scancode-release.yml @@ -349,7 +349,7 @@ jobs: - name: test install app archive run: | - for f in `find dist -type f -name "*.tar.gz"`; \ + for f in `find dist -type f -name "*.zip"`; \ do \ python etc/release/scancode_release_tests.py $f; \ done @@ -390,7 +390,7 @@ jobs: - name: test install app archive run: | - for f in `find dist -type f -name "*.tar.gz"`; \ + for f in `find dist -type f -name "*.zip"`; \ do \ python etc/release/scancode_release_tests.py $f; \ done From 2a7a0b9bcaeb03c2d2854593e0d73ae3b7d14b7a Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 26 Jun 2025 18:09:32 +0530 Subject: [PATCH 75/76] Bump scancode version to v32.4.0 Signed-off-by: Ayan Sinha Mahapatra --- CHANGELOG.rst | 71 +++++++++++++++++++++++++++++++++++++----- src/scancode_config.py | 14 +++++---- 2 files changed, 71 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index bb2b78950f8..5ccc3746845 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -27,14 +27,6 @@ v33.0.0 (next next, roadmap) - `--unknown-licenses` is removed and this is always enabled and only used in case of improper detections automatically. -- All license rules have been tagged with required phrases to improve detection accuracy - and reduce false positives. See https://github.com/nexB/scancode-toolkit/issues/3300 - -- Equivalent words like license and licence, as well as plurals are now treated as the same in - license detection. With this, many redundant rules have been deprecated. - -- The license detection accuracy of Maven POMS has been improved fixing corner cases. - - File categorization support added, a post scan plugin tagging files with priority levels for review, and also take advantage of these in other summary plugins. @@ -46,9 +38,72 @@ v33.0.0 (next next, roadmap) - Update ABOUT files to adapt the ABOUT File Specification. See https://github.com/aboutcode-org/scancode-toolkit/issues/4181 +v32.4.0 - 2025-06-26 +-------------------- + +This is a feature release with:: + + - python 3.13 support + - support added for adding required phrases to rules automatically + - misc license and package detection improvements + - new and updated license detection rules and new licenses + - misc bugfixes, dependency and documentation updates + +There are new data attributes, and we have a output format version bump +from ``4.0.0`` to ``4.1.0``. The changes in Output Data Structure are: + + - A new resource level attribute ``sha1_git`` is added, which has + the corresponding checksum value for files, and is empty for + directories. This is returned optionally with the ``--info`` plugin. + + - A new resource level attribute ``is_community`` is added, which is + True from commonly used files used for community/project maintainence. + This is returned optionally with the ``--classify`` plugin. + +These are the details for the most important changes introduced:: + +- Add support for adding required phrases in rules automatically using + some console scripts and CLI options using already marked required + phrases for the same license-expression and license field attributes + The new console scripts are: + - `add-required-phrases` to add required phrases from other rules or + license attributes + - `gen-new-required-phrases-rules` to add required phrase rules for + marked required phrase in rules + This improves detection accuracy and reduces false positives. + https://github.com/aboutcode-org/scancode-toolkit/pull/3924 + https://github.com/aboutcode-org/scancode-toolkit/pull/4237 + https://github.com/aboutcode-org/scancode-toolkit/pull/4241 + +- Default value of processes used for scancode scans is changed from + 1 to `N-1`, where N is the number of CPU processes available in the + system. https://github.com/aboutcode-org/scancode-toolkit/pull/4104 + - Also return sha1_git checksums for each files with ``--info`` plugin. https://github.com/aboutcode-org/scancode-toolkit/issues/624 +- Equivalent words like license and licence, as well as plurals are + now treated as the same in license detection. With this, + many redundant rules have been deprecated. + https://github.com/aboutcode-org/scancode-toolkit/pull/4215 + +- Support running scancode with python3.13 + Update and use latest native dependencies with py3.13 support, + update and test py3.13 usage in CI and other scripts, and + update other third-party dependencies, use latest skeleton + https://github.com/aboutcode-org/scancode-toolkit/pull/4430 + +- Misc license detection improvements, new licenses and license + detection rules. + https://github.com/aboutcode-org/scancode-toolkit/pull/4261 + https://github.com/aboutcode-org/scancode-toolkit/pull/4412 + https://github.com/aboutcode-org/scancode-toolkit/pull/4405 + https://github.com/aboutcode-org/scancode-toolkit/pull/4278 + https://github.com/aboutcode-org/scancode-toolkit/pull/4093 + +- Fix an issues where `pip install scancode-toolkit` was failing + because of a compatibility issue with Click + https://github.com/aboutcode-org/scancode-toolkit/pull/4427 v32.3.3 - 2025-03-06 -------------------- diff --git a/src/scancode_config.py b/src/scancode_config.py index cfe24d5def7..d1da047b2cc 100644 --- a/src/scancode_config.py +++ b/src/scancode_config.py @@ -131,20 +131,22 @@ def _create_dir(location): # 4. hardcoded This is the default, fallback version in case package is not installed or we # do not have a proper version otherwise. +# See https://scancode-toolkit.readthedocs.io/en/latest/misc/versioning.html for +# more information on versioning if not __version__: - __version__ = '32.3.3' + __version__ = '32.4.0' ####################### # used to warn user when the version is out of date # this is (year, month, day) -__release_date__ = datetime.datetime(2025, 1, 20) +__release_date__ = datetime.datetime(2025, 6, 26) -# See https://github.com/nexB/scancode-toolkit/issues/2653 for more information -# on the data format version -__output_format_version__ = '4.0.0' +# See https://scancode-toolkit.readthedocs.io/en/latest/misc/versioning.html +# for more information on the data format version +__output_format_version__ = '4.1.0' # see https://github.com/spdx/tools-python/issues/820 -# this is actually `3.25.0` +# this is actually `3.26.0` spdx_license_list_version = '3.26' ################################################################################ From 0fe7fc8705e1d83b305f94ebb100fc92c316d415 Mon Sep 17 00:00:00 2001 From: Ayan Sinha Mahapatra Date: Thu, 26 Jun 2025 18:22:57 +0530 Subject: [PATCH 76/76] Bump scancode version to v32.4.0 in setup.cfg Signed-off-by: Ayan Sinha Mahapatra --- setup-mini.cfg | 18 +++++++++--------- setup.cfg | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/setup-mini.cfg b/setup-mini.cfg index 7f43d6e766a..8fe61f4e2b3 100644 --- a/setup-mini.cfg +++ b/setup-mini.cfg @@ -1,6 +1,6 @@ [metadata] name = scancode-toolkit-mini -version = 32.3.3 +version = 32.4.0 license = Apache-2.0 AND CC-BY-4.0 AND LicenseRef-scancode-other-permissive AND LicenseRef-scancode-other-copyleft # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 @@ -49,8 +49,10 @@ license_files = CHANGELOG.rst CODE_OF_CONDUCT.rst cc-by-4.0.LICENSE + README.rst [options] +python_requires = >=3.9 package_dir = =src packages = find: @@ -60,7 +62,6 @@ zip_safe = false py_modules = scancode_config -python_requires = >=3.9 install_requires = attrs >= 18.1,!=20.1.0;python_version<'3.11' @@ -87,7 +88,7 @@ install_requires = jinja2 >= 2.7.0 jsonstreams >= 0.5.0 license_expression >= 30.4.1 - lxml >= 4.9.2 + lxml >= 5.4.0 MarkupSafe >= 2.1.2 packageurl_python >= 0.9.0 packvers >= 21.0.0 @@ -126,20 +127,18 @@ full = typecode[full] >= 30.0.0 extractcode[full] >= 31.0.0 -testing = +dev = pytest >= 6, != 7.0.0 pytest-xdist >= 2 aboutcode-toolkit >= 7.0.2 - pycodestyle >= 2.8.0 twine black isort vendorize >= 0.3.0 pytest-rerunfailures - -docs = - Sphinx == 5.1.0 - sphinx_rtd_theme >= 0.5.1 + ruff + Sphinx>=5.0.2 + sphinx-rtd-theme>=1.0.0 sphinx-reredirects >= 0.1.2 doc8 >= 0.8.1 sphinx-autobuild @@ -161,6 +160,7 @@ console_scripts = scancode-license-data = licensedcode.license_db:dump_scancode_license_data regen-package-docs = packagedcode.regen_package_docs:regen_package_docs add-required-phrases = licensedcode.required_phrases:add_required_phrases + gen-new-required-phrases-rules = licensedcode.required_phrases:gen_required_phrases_rules # These are configurations for ScanCode plugins as setuptools entry points. # Each plugin entry hast this form: diff --git a/setup.cfg b/setup.cfg index 27d45aca84a..426ae21fef8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = scancode-toolkit -version = 32.3.3 +version = 32.4.0 license = Apache-2.0 AND CC-BY-4.0 AND LicenseRef-scancode-other-permissive AND LicenseRef-scancode-other-copyleft # description must be on ONE line https://github.com/pypa/setuptools/issues/1390