diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c05bf6cf..1e82ec1e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,7 +18,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - uses: pre-commit/action@v3.0.1 test-dist: @@ -44,7 +44,6 @@ jobs: fail-fast: false matrix: include: - - python: "3.8" - python: "3.9" - python: "3.10" - python: "3.11" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7331af1f..f127b10e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,13 +2,13 @@ # See https://pre-commit.com/hooks.html for more hooks default_language_version: - python: python3.8 + python: python3.9 exclude: ^src/auditwheel/_vendor/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-builtin-literals - id: check-added-large-files @@ -23,13 +23,13 @@ repos: - id: trailing-whitespace - repo: https://github.com/asottile/pyupgrade - rev: v3.16.0 + rev: v3.19.0 hooks: - id: pyupgrade - args: ["--py38-plus"] + args: ["--py39-plus"] - repo: https://github.com/psf/black - rev: 24.8.0 + rev: 24.10.0 hooks: - id: black @@ -46,7 +46,7 @@ repos: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.1 + rev: v1.13.0 hooks: - id: mypy exclude: ^tests/integration/.*/.*$ diff --git a/.travis.yml b/.travis.yml index 1b4cd195..1f78016f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,7 @@ os: linux -dist: focal +dist: jammy language: python -python: "3.9" -arch: arm64-graviton2 +python: "3.12" branches: except: @@ -10,9 +9,7 @@ branches: jobs: include: - - arch: arm64-graviton2 - virt: vm - group: edge + - arch: arm64 - arch: ppc64le - arch: s390x allow_failures: @@ -41,7 +38,7 @@ install: - pip install nox script: - - nox -s tests-3.9 + - nox -s tests-3.12 after_success: - if [ "$(uname -m)" != "aarch64" ]; then qemu-x86_64-static ${HOME}/codecov; fi diff --git a/README.rst b/README.rst index 6d157317..0aace12d 100644 --- a/README.rst +++ b/README.rst @@ -40,7 +40,7 @@ advised that bundling, like static linking, may implicate copyright concerns. Requirements ------------ - OS: Linux -- Python: 3.8+ +- Python: 3.9+ - `patchelf `_: 0.14+ Only systems that use `ELF @@ -135,7 +135,7 @@ daemon. These tests will pull a number of docker images if they are not already available on your system, but it won't update existing images. To update these images manually, run:: - docker pull python:3.8-slim + docker pull python:3.9-slim-bookworm docker pull quay.io/pypa/manylinux1_x86_64 docker pull quay.io/pypa/manylinux2010_x86_64 docker pull quay.io/pypa/manylinux2014_x86_64 diff --git a/noxfile.py b/noxfile.py index 9de14c45..3a860f98 100644 --- a/noxfile.py +++ b/noxfile.py @@ -8,11 +8,11 @@ nox.options.sessions = ["lint", "test-dist"] -PYTHON_ALL_VERSIONS = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +PYTHON_ALL_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] RUNNING_CI = "TRAVIS" in os.environ or "GITHUB_ACTIONS" in os.environ -@nox.session(python=["3.8"], reuse_venv=True) +@nox.session(python=["3.9"], reuse_venv=True) def lint(session: nox.Session) -> None: """ Run linters on the codebase. diff --git a/pyproject.toml b/pyproject.toml index f9a2f2e6..f4e00767 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,16 @@ [build-system] -requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"] +requires = ["setuptools>=61", "setuptools_scm>=8"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] # enable version inference [tool.black] -target-version = ["py38", "py39", "py310", "py311", "py312", "py313"] +target-version = ["py39", "py310", "py311", "py312", "py313"] extend-exclude = "src/auditwheel/_vendor" [tool.isort] -py_version = 38 +py_version = 39 profile = "black" extend_skip_glob = "src/auditwheel/_vendor/**/*.py" diff --git a/setup.cfg b/setup.cfg index 7d65c660..3674ff72 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,7 +14,6 @@ classifier = License :: OSI Approved :: MIT License Operating System :: POSIX :: Linux Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 @@ -33,7 +32,7 @@ install_requires = packages = find: package_dir = =src -python_requires = >=3.8 +python_requires = >=3.9 zip_safe = False [options.package_data] diff --git a/src/auditwheel/elfutils.py b/src/auditwheel/elfutils.py index b09470f2..8927bf00 100644 --- a/src/auditwheel/elfutils.py +++ b/src/auditwheel/elfutils.py @@ -1,8 +1,8 @@ from __future__ import annotations +from collections.abc import Iterator from os.path import basename from pathlib import Path -from typing import Iterator from elftools.common.exceptions import ELFError from elftools.elf.elffile import ELFFile diff --git a/src/auditwheel/policy/__init__.py b/src/auditwheel/policy/__init__.py index 7c9b1585..79ec72e7 100644 --- a/src/auditwheel/policy/__init__.py +++ b/src/auditwheel/policy/__init__.py @@ -7,9 +7,10 @@ import struct import sys from collections import defaultdict +from collections.abc import Generator from os.path import abspath, dirname, join from pathlib import Path -from typing import Any, Generator +from typing import Any from auditwheel.elfutils import filter_undefined_symbols, is_subdir @@ -156,9 +157,7 @@ def policy_is_satisfied( def lddtree_external_references(self, lddtree: dict, wheel_path: str) -> dict: # XXX: Document the lddtree structure, or put it in something # more stable than a big nested dict - def filter_libs( - libs: set[str], whitelist: set[str] - ) -> Generator[str, None, None]: + def filter_libs(libs: set[str], whitelist: set[str]) -> Generator[str]: for lib in libs: if "ld-linux" in lib or lib in ["ld64.so.2", "ld64.so.1"]: # always exclude ELF dynamic linker/loader diff --git a/src/auditwheel/repair.py b/src/auditwheel/repair.py index 85e3ca39..220f5404 100644 --- a/src/auditwheel/repair.py +++ b/src/auditwheel/repair.py @@ -7,11 +7,11 @@ import re import shutil import stat +from collections.abc import Iterable from os.path import abspath, basename, dirname, exists, isabs from os.path import join as pjoin from pathlib import Path from subprocess import check_call -from typing import Iterable from auditwheel.patcher import ElfPatcher diff --git a/src/auditwheel/tools.py b/src/auditwheel/tools.py index 63b32802..04e5d896 100644 --- a/src/auditwheel/tools.py +++ b/src/auditwheel/tools.py @@ -4,8 +4,9 @@ import os import subprocess import zipfile +from collections.abc import Iterable from datetime import datetime, timezone -from typing import Any, Iterable +from typing import Any def unique_by_index(sequence: Iterable[Any]) -> list[Any]: diff --git a/src/auditwheel/wheeltools.py b/src/auditwheel/wheeltools.py index b9f85b31..063d3493 100644 --- a/src/auditwheel/wheeltools.py +++ b/src/auditwheel/wheeltools.py @@ -11,6 +11,7 @@ import logging import os from base64 import urlsafe_b64encode +from collections.abc import Generator, Iterable from datetime import datetime, timezone from itertools import product from os.path import abspath, basename, dirname, exists @@ -19,7 +20,6 @@ from os.path import sep as psep from os.path import splitext from types import TracebackType -from typing import Generator, Iterable from packaging.utils import parse_wheel_filename @@ -69,7 +69,7 @@ def rewrite_record(bdist_dir: str) -> None: if exists(sig_path): os.unlink(sig_path) - def walk() -> Generator[str, None, None]: + def walk() -> Generator[str]: for dir, dirs, files in os.walk(bdist_dir): for f in files: yield pjoin(dir, f) @@ -173,7 +173,7 @@ def __enter__(self): self.path = super().__enter__() return self - def iter_files(self) -> Generator[str, None, None]: + def iter_files(self) -> Generator[str]: if self.path is None: raise ValueError("This function should be called from context manager") record_names = glob.glob(os.path.join(self.path, "*.dist-info/RECORD")) diff --git a/tests/integration/test_manylinux.py b/tests/integration/test_manylinux.py index acd7b1ab..c770a08c 100644 --- a/tests/integration/test_manylinux.py +++ b/tests/integration/test_manylinux.py @@ -49,22 +49,19 @@ DOCKER_CONTAINER_NAME = "auditwheel-test-anylinux" PYTHON_MAJ_MIN = [str(i) for i in sys.version_info[:2]] PYTHON_ABI_MAJ_MIN = "".join(PYTHON_MAJ_MIN) -PYTHON_ABI_FLAGS = "m" if sys.version_info.minor < 8 else "" -PYTHON_ABI = f"cp{PYTHON_ABI_MAJ_MIN}-cp{PYTHON_ABI_MAJ_MIN}{PYTHON_ABI_FLAGS}" -PYTHON_IMAGE_TAG = ".".join(PYTHON_MAJ_MIN) + ( - "-rc" if PYTHON_MAJ_MIN == ["3", "13"] else "" -) -MANYLINUX_PYTHON_IMAGE_ID = f"python:{PYTHON_IMAGE_TAG}-slim-bullseye" +PYTHON_ABI = f"cp{PYTHON_ABI_MAJ_MIN}-cp{PYTHON_ABI_MAJ_MIN}" +PYTHON_IMAGE_TAG = ".".join(PYTHON_MAJ_MIN) +MANYLINUX_PYTHON_IMAGE_ID = f"python:{PYTHON_IMAGE_TAG}-slim-bookworm" MUSLLINUX_IMAGES = { - "musllinux_1_1": f"quay.io/pypa/musllinux_1_1_{PLATFORM}:latest", + "musllinux_1_2": f"quay.io/pypa/musllinux_1_2_{PLATFORM}:latest", } MUSLLINUX_PYTHON_IMAGE_ID = f"python:{PYTHON_IMAGE_TAG}-alpine" DEVTOOLSET = { "manylinux_2_5": "devtoolset-2", "manylinux_2_12": "devtoolset-8", "manylinux_2_17": "devtoolset-10", - "manylinux_2_28": "gcc-toolset-12", - "musllinux_1_1": "devtoolset-not-present", + "manylinux_2_28": "gcc-toolset-13", + "musllinux_1_2": "devtoolset-not-present", } PATH_DIRS = [ f"/opt/python/{PYTHON_ABI}/bin", @@ -79,7 +76,6 @@ PATH = {k: ":".join(PATH_DIRS).format(devtoolset=v) for k, v in DEVTOOLSET.items()} WHEEL_CACHE_FOLDER = op.expanduser("~/.cache/auditwheel_tests") NUMPY_VERSION_MAP = { - "38": "1.21.4", "39": "1.21.4", "310": "1.21.4", "311": "1.23.4", @@ -205,6 +201,10 @@ def build_numpy(container, policy, output_dir): if policy.startswith("musllinux_"): docker_exec(container, "apk add openblas-dev") + if policy.endswith("_s390x"): + # https://github.com/numpy/numpy/issues/27932 + fix_hwcap = "echo '#define HWCAP_S390_VX 2048' >> /usr/include/bits/hwcap.h" + docker_exec(container, f'sh -c "{fix_hwcap}"') elif policy.startswith("manylinux_2_28_"): docker_exec(container, "dnf install -y openblas-devel") else: @@ -410,10 +410,14 @@ def test_build_wheel_with_binary_executable( ) # testprogram should be a Python shim since we had to rewrite its RPATH. - assert ( - docker_exec(docker_python, ["head", "-n1", "/usr/local/bin/testprogram"]) - == "#!/usr/local/bin/python\n" + shebang = docker_exec( + docker_python, ["head", "-n1", "/usr/local/bin/testprogram"] ) + assert shebang in { + "#!/usr/local/bin/python\n", + "#!/usr/local/bin/python3\n", + f"#!/usr/local/bin/python{'.'.join(PYTHON_MAJ_MIN)}\n", + } # testprogram_nodeps should be the unmodified ELF binary. assert ( diff --git a/tests/integration/testpackage/testpackage/__init__.py b/tests/integration/testpackage/testpackage/__init__.py index 9e728d99..499a036f 100644 --- a/tests/integration/testpackage/testpackage/__init__.py +++ b/tests/integration/testpackage/testpackage/__init__.py @@ -1,11 +1,22 @@ from __future__ import annotations +import os import subprocess - -import pkg_resources +import sysconfig +from importlib.metadata import distribution def runit(x): - filename = pkg_resources.resource_filename(__name__, "testprogram") + dist = distribution("testpackage") + scripts_paths = [ + os.path.abspath(sysconfig.get_path("scripts", scheme)) + for scheme in sysconfig.get_scheme_names() + ] + scripts = [] + for file in dist.files: + if os.path.abspath(str(file.locate().parent)) in scripts_paths: + scripts.append(file.locate().resolve(strict=True)) + assert len(scripts) == 2, scripts + filename = next(script for script in scripts if script.stem == "testprogram") output = subprocess.check_output([filename, str(x)]) return float(output)