From f1fac1c7fc5e4c2776a4062bf3ad28870648840b Mon Sep 17 00:00:00 2001 From: mayeut Date: Sun, 13 Aug 2023 13:00:45 +0200 Subject: [PATCH] chore: rework PyPy installation PyPy installation is just a tar extract. As such, it might be interesting to install PyPy at runtime rather than at build time. This is especially true for EOL versions of PyPy which would allow to reduce the image size, and thus, overall build time for users not using EOL PyPy versions. This commit does not modify default installed PyPy versions yet. This commit will also be useful for GraalPy installation. --- README.rst | 45 ++++- docker/Dockerfile | 52 ++--- .../download-and-install-interpreter.sh | 30 +++ docker/build_scripts/finalize-one.sh | 37 ++++ docker/build_scripts/finalize.sh | 36 ++-- docker/build_scripts/install-pypy.sh | 72 ------- .../build_scripts/manylinux-interpreters.py | 183 ++++++++++++++++++ docker/build_scripts/pypy.sha256 | 33 ---- docker/build_scripts/python_versions.json | 70 +++++++ noxfile.py | 10 +- tests/run_tests.sh | 54 +++++- tools/update_interpreters_download.py | 88 +++++++++ .../update_native_dependencies.py | 29 ++- 13 files changed, 555 insertions(+), 184 deletions(-) create mode 100755 docker/build_scripts/download-and-install-interpreter.sh create mode 100755 docker/build_scripts/finalize-one.sh delete mode 100755 docker/build_scripts/install-pypy.sh create mode 100644 docker/build_scripts/manylinux-interpreters.py delete mode 100644 docker/build_scripts/pypy.sha256 create mode 100644 docker/build_scripts/python_versions.json create mode 100644 tools/update_interpreters_download.py rename update_native_dependencies.py => tools/update_native_dependencies.py (89%) diff --git a/README.rst b/README.rst index 61a30bf2..0543a699 100644 --- a/README.rst +++ b/README.rst @@ -173,7 +173,7 @@ Image content All images currently contain: -- CPython 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and PyPy 3.7, 3.8, 3.9 installed in +- CPython 3.6, 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and PyPy 3.7, 3.8, 3.9, 3.10 installed in ``/opt/python/-``. The directories are named after the PEP 425 tags for each environment -- e.g. ``/opt/python/cp37-cp37m`` contains a CPython 3.7 build, and @@ -184,6 +184,49 @@ All images currently contain: - The `auditwheel `_ tool +- The manylinux-interpreters tool which allows to list all available interpreters & install ones missing from the image + + 3 commands are available: + + - ``manylinux-interpreters list`` + + .. code-block:: bash + + usage: manylinux-interpreters list [-h] [-v] [-i] [--format {text,json}] + + list available or installed interpreters + + options: + -h, --help show this help message and exit + -v, --verbose display additional information (--format=text only, ignored for --format=json) + -i, --installed only list installed interpreters + --format {text,json} text is not meant to be machine readable (i.e. the format is not stable) + + - ``manylinux-interpreters ensure-all`` + + .. code-block:: bash + + usage: manylinux-interpreters ensure-all [-h] + + make sure all interpreters are installed + + options: + -h, --help show this help message and exit + + - ``manylinux-interpreters ensure`` + + .. code-block:: bash + + usage: manylinux-interpreters ensure [-h] TAG [TAG ...] + + make sure a list of interpreters are installed + + positional arguments: + TAG tag with format '-' e.g. 'pp310-pypy310_pp73' + + options: + -h, --help show this help message and exit + Note that less common or virtually unheard of flag combinations (such as ``--with-pydebug`` (``d``) and ``--without-pymalloc`` (absence of ``m``)) are not provided. diff --git a/docker/Dockerfile b/docker/Dockerfile index 7e0636bf..425282c3 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -138,43 +138,31 @@ FROM build_cpython AS build_cpython312 COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.0rc2 -FROM build_cpython AS all_python -COPY build_scripts/install-pypy.sh \ - build_scripts/pypy.sha256 \ - build_scripts/finalize-python.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.7 7.3.9 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.8 7.3.11 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.9 7.3.12 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.10 7.3.12 -COPY --from=build_cpython36 /opt/_internal /opt/_internal/ -COPY --from=build_cpython37 /opt/_internal /opt/_internal/ -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh +FROM build_cpython AS all_cpython +COPY build_scripts/finalize-python.sh /build_scripts/ +RUN --mount=type=bind,target=/build_cpython36,from=build_cpython36 \ + --mount=type=bind,target=/build_cpython37,from=build_cpython37 \ + --mount=type=bind,target=/build_cpython38,from=build_cpython38 \ + --mount=type=bind,target=/build_cpython39,from=build_cpython39 \ + --mount=type=bind,target=/build_cpython310,from=build_cpython310 \ + --mount=type=bind,target=/build_cpython311,from=build_cpython311 \ + --mount=type=bind,target=/build_cpython312,from=build_cpython312 \ + mkdir -p /opt/_internal && \ + cp -rf /build_cpython*/opt/_internal/* /opt/_internal/ && \ + manylinux-entrypoint /build_scripts/finalize-python.sh FROM runtime_base COPY --from=build_git /manylinux-rootfs / COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/update-system-packages.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.6.txt \ - build_scripts/requirements3.7.txt \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts +COPY build_scripts /opt/_internal/build_scripts/ +RUN --mount=type=bind,target=/all_cpython,from=all_cpython \ + cp -rf /all_cpython/opt/_internal/* /opt/_internal/ && \ + manylinux-entrypoint /opt/_internal/build_scripts/finalize.sh \ + pp37-pypy37_pp73 \ + pp38-pypy38_pp73 \ + pp39-pypy39_pp73 \ + pp310-pypy310_pp73 ENV SSL_CERT_FILE=/opt/_internal/certs.pem diff --git a/docker/build_scripts/download-and-install-interpreter.sh b/docker/build_scripts/download-and-install-interpreter.sh new file mode 100755 index 00000000..a2f212fd --- /dev/null +++ b/docker/build_scripts/download-and-install-interpreter.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# Stop at any error, show all commands +set -exuo pipefail + +# Get script directory +MY_DIR=$(dirname "${BASH_SOURCE[0]}") + +ABI_TAG=$1 +DOWNLOAD_URL=$2 +SHA256=$3 + +PREFIX="/opt/_internal/${ABI_TAG}" + +case ${DOWNLOAD_URL} in + *.tar) COMP=;; + *.tar.gz) COMP=z;; + *.tar.bz2) COMP=j;; + *.tar.xz) COMP=J;; + *) echo "unsupported archive"; exit 1;; +esac + +mkdir ${PREFIX} + +curl -fsSL ${DOWNLOAD_URL} | tee >(tar -C ${PREFIX} --strip-components 1 -x${COMP}f -) | sha256sum -c <(echo "${SHA256} -") + +# remove debug symbols if any +find ${PREFIX}/bin -name '*.debug' -delete + +${MY_DIR}/finalize-one.sh ${PREFIX} diff --git a/docker/build_scripts/finalize-one.sh b/docker/build_scripts/finalize-one.sh new file mode 100755 index 00000000..68e7f1c5 --- /dev/null +++ b/docker/build_scripts/finalize-one.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Stop at any error, show all commands +set -exuo pipefail + +PREFIX=$1 + +# Get script directory +MY_DIR=$(dirname "${BASH_SOURCE[0]}") + +# Some python's install as bin/python3. Make them available as +# bin/python. +if [ -e ${PREFIX}/bin/python3 ] && [ ! -e ${PREFIX}/bin/python ]; then + ln -s python3 ${PREFIX}/bin/python +fi +PY_VER=$(${PREFIX}/bin/python -c "import sys; print('.'.join(str(v) for v in sys.version_info[:2]))") +PY_IMPL=$(${PREFIX}/bin/python -c "import sys; print(sys.implementation.name)") + +# Install pinned packages for this python version. +# Use the already intsalled cpython pip to bootstrap pip if available +if [ -f /usr/local/bin/python${PY_VER} ]; then + /usr/local/bin/python${PY_VER} -m pip --python ${PREFIX}/bin/python install -U --require-hashes -r ${MY_DIR}/requirements${PY_VER}.txt +else + ${PREFIX}/bin/python -m ensurepip + ${PREFIX}/bin/python -m pip install -U --require-hashes -r ${MY_DIR}/requirements${PY_VER}.txt +fi +if [ -e ${PREFIX}/bin/pip3 ] && [ ! -e ${PREFIX}/bin/pip ]; then + ln -s pip3 ${PREFIX}/bin/pip +fi +# Create a symlink to PREFIX using the ABI_TAG in /opt/python/ +ABI_TAG=$(${PREFIX}/bin/python ${MY_DIR}/python-tag-abi-tag.py) +ln -s ${PREFIX} /opt/python/${ABI_TAG} +# Make versioned python commands available directly in environment. +if [[ "${PY_IMPL}" == "cpython" ]]; then + ln -s ${PREFIX}/bin/python /usr/local/bin/python${PY_VER} +fi +ln -s ${PREFIX}/bin/python /usr/local/bin/${PY_IMPL}${PY_VER} diff --git a/docker/build_scripts/finalize.sh b/docker/build_scripts/finalize.sh index 621eab92..881f7ac0 100755 --- a/docker/build_scripts/finalize.sh +++ b/docker/build_scripts/finalize.sh @@ -11,30 +11,21 @@ source $MY_DIR/build_utils.sh mkdir /opt/python for PREFIX in $(find /opt/_internal/ -mindepth 1 -maxdepth 1 \( -name 'cpython*' -o -name 'pypy*' \)); do - # Some python's install as bin/python3. Make them available as - # bin/python. - if [ -e ${PREFIX}/bin/python3 ] && [ ! -e ${PREFIX}/bin/python ]; then - ln -s python3 ${PREFIX}/bin/python - fi - ${PREFIX}/bin/python -m ensurepip - if [ -e ${PREFIX}/bin/pip3 ] && [ ! -e ${PREFIX}/bin/pip ]; then - ln -s pip3 ${PREFIX}/bin/pip - fi - PY_VER=$(${PREFIX}/bin/python -c "import sys; print('.'.join(str(v) for v in sys.version_info[:2]))") - # Since we fall back on a canned copy of pip, we might not have - # the latest pip and friends. Upgrade them to make sure. - ${PREFIX}/bin/pip install -U --require-hashes -r ${MY_DIR}/requirements${PY_VER}.txt - # Create a symlink to PREFIX using the ABI_TAG in /opt/python/ - ABI_TAG=$(${PREFIX}/bin/python ${MY_DIR}/python-tag-abi-tag.py) - ln -s ${PREFIX} /opt/python/${ABI_TAG} - # Make versioned python commands available directly in environment. - if [[ "${PREFIX}" == *"/pypy"* ]]; then - ln -s ${PREFIX}/bin/python /usr/local/bin/pypy${PY_VER} - else - ln -s ${PREFIX}/bin/python /usr/local/bin/python${PY_VER} - fi + ${MY_DIR}/finalize-one.sh ${PREFIX} done +# create manylinux-interpreters script +cat < /usr/local/bin/manylinux-interpreters +#!/bin/bash + +set -euo pipefail + +/opt/python/cp310-cp310/bin/python $MY_DIR/manylinux-interpreters.py "\$@" +EOF +chmod 755 /usr/local/bin/manylinux-interpreters + +MANYLINUX_INTERPRETERS_NO_CHECK=1 /usr/local/bin/manylinux-interpreters ensure "$@" + # Create venv for auditwheel & certifi TOOLS_PATH=/opt/_internal/tools /opt/python/cp310-cp310/bin/python -m venv $TOOLS_PATH @@ -84,6 +75,7 @@ clean_pyc /opt/_internal # remove cache rm -rf /root/.cache +rm -rf /tmp/* || true hardlink -cv /opt/_internal diff --git a/docker/build_scripts/install-pypy.sh b/docker/build_scripts/install-pypy.sh deleted file mode 100755 index c3b54106..00000000 --- a/docker/build_scripts/install-pypy.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash - -# Stop at any error, show all commands -set -exuo pipefail - -# Get script directory -MY_DIR=$(dirname "${BASH_SOURCE[0]}") - -# Get build utilities -source $MY_DIR/build_utils.sh - -if [ "${BASE_POLICY}" == "musllinux" ]; then - echo "Skip PyPy build on musllinux" - exit 0 -fi - -PYTHON_VERSION=$1 -PYPY_VERSION=$2 -PYPY_DOWNLOAD_URL=https://downloads.python.org/pypy - - -function get_shortdir { - local exe=$1 - $exe -c 'import sys; print("pypy%d.%d-%d.%d.%d" % (sys.version_info[:2]+sys.pypy_version_info[:3]))' -} - - -mkdir -p /tmp -cd /tmp - -case ${AUDITWHEEL_ARCH} in - x86_64) PYPY_ARCH=linux64;; - i686) PYPY_ARCH=linux32;; - aarch64) PYPY_ARCH=aarch64;; - *) echo "No PyPy for ${AUDITWHEEL_ARCH}"; exit 0;; -esac - -EXPAND_NAME=pypy${PYTHON_VERSION}-v${PYPY_VERSION}-${PYPY_ARCH} -TMPDIR=/tmp/${EXPAND_NAME} -TARBALL=${EXPAND_NAME}.tar.bz2 -PREFIX="/opt/_internal" - -mkdir -p ${PREFIX} - -fetch_source ${TARBALL} ${PYPY_DOWNLOAD_URL} - -# We only want to check the current tarball sha256sum -grep " ${TARBALL}\$" ${MY_DIR}/pypy.sha256 > ${TARBALL}.sha256 -# then check sha256 sum -sha256sum -c ${TARBALL}.sha256 - -tar -xf ${TARBALL} - -# the new PyPy 3 distributions don't have pypy symlinks to pypy3 -if [ ! -f "${TMPDIR}/bin/pypy" ]; then - ln -s pypy3 ${TMPDIR}/bin/pypy -fi - -# rename the directory to something shorter like pypy3.7-7.3.4 -PREFIX=${PREFIX}/$(get_shortdir ${TMPDIR}/bin/pypy) -mv ${TMPDIR} ${PREFIX} - -# add a generic "python" symlink -if [ ! -f "${PREFIX}/bin/python" ]; then - ln -s pypy ${PREFIX}/bin/python -fi - -# remove debug symbols -rm ${PREFIX}/bin/*.debug - -# We do not need precompiled .pyc and .pyo files. -clean_pyc ${PREFIX} diff --git a/docker/build_scripts/manylinux-interpreters.py b/docker/build_scripts/manylinux-interpreters.py new file mode 100644 index 00000000..00236a42 --- /dev/null +++ b/docker/build_scripts/manylinux-interpreters.py @@ -0,0 +1,183 @@ +from __future__ import annotations + +import argparse +import os +import json +import subprocess +import sys +from functools import cache +from pathlib import Path + + +HERE = Path(__file__).parent.resolve(strict=True) +PYTHON_TAGS = json.loads(HERE.joinpath("python_versions.json").read_text()) +INSTALL_DIR = Path("/opt/python") +ARCH = os.environ["AUDITWHEEL_ARCH"] +POLICY = os.environ["AUDITWHEEL_POLICY"] +NO_CHECK = os.environ.get("MANYLINUX_INTERPRETERS_NO_CHECK", "0") == "1" + + +def sort_key(tag): + python_tag, _ = tag.split("-") + if python_tag.startswith(("cp", "pp")): + return python_tag[:2], int(python_tag[2]), int(python_tag[3:]) + raise LookupError(tag) + + +@cache +def get_all_tags(no_check: bool = False): + all_tags_ = set(p.name for p in INSTALL_DIR.iterdir() if p.is_dir()) + if POLICY.startswith("manylinux"): + all_tags_ |= set(tag for tag in PYTHON_TAGS if ARCH in PYTHON_TAGS[tag]) + if no_check: + all_tags_ |= set(PYTHON_TAGS.keys()) + all_tags = list(all_tags_) + all_tags.sort(key=lambda tag: sort_key(tag)) + return all_tags + + +def add_parser_list(subparsers): + description = "list available or installed interpreters" + parser = subparsers.add_parser("list", description=description, help=description) + parser.set_defaults(func=_list) + parser.add_argument("-v", "--verbose", default=False, action="store_true", help="display additional information (--format=text only, ignored for --format=json)") + parser.add_argument("-i", "--installed", default=False, action="store_true", help="only list installed interpreters") + parser.add_argument("--format", choices=["text", "json"], default="text", help="text is not meant to be machine readable (i.e. the format is not stable)") + + +def get_info_from_path(path: Path): + python = path / "bin" / "python" + script = """ +import json +import sys +pre_map = {"alpha": "a", "beta": "b", "candidate": "rc"} +pv = sys.version_info +pv_pre = pre_map.get(pv[3], "") +if pv_pre: + pv_pre += str(pv[4]) +iv = sys.implementation.version +iv_pre = pre_map.get(iv[3], "") +if iv_pre: + iv_pre += str(iv[4]) +info = { + "pv": ".".join(str(p) for p in pv[:3]) + pv_pre, + "i": sys.implementation.name, + "iv": ".".join(str(p) for p in iv[:3]) + iv_pre, +} +print(json.dumps(info)) + """ + output = subprocess.run( + [str(python), "-c", script], + check=True, + text=True, + stdout=subprocess.PIPE, + ).stdout + return json.loads(output) + + +def get_info_from_tag(tag): + python_tag, _ = tag.split("-") + if python_tag.startswith("pp"): + return { + "pv": f"{python_tag[2]}.{python_tag[3:]}", + "i": "pypy", + "iv": PYTHON_TAGS[tag][ARCH]["version"] + } + raise LookupError(tag) + + +def _list(args): + tags = get_all_tags() + if args.installed: + tags = [tag for tag in tags if INSTALL_DIR.joinpath(tag).exists()] + + tag_infos = [] + for tag in tags: + install_path = INSTALL_DIR.joinpath(tag) + installed = install_path.exists() + if installed: + info = get_info_from_path(install_path) + else: + info = get_info_from_tag(tag) + tag_info = { + "identifier": tag, + "installed": installed, + "python_version": info["pv"], + "implementation": info["i"], + "implementation_version": info["iv"], + "install_path": str(install_path), + } + tag_infos.append(tag_info) + + if args.format == "json": + json.dump(tag_infos, sys.stdout, indent=2) + return + + assert args.format == 'text' + for tag in tag_infos: + print(f"{tag['identifier']}{':' if args.verbose else ''}") + if args.verbose: + print(f" installed: {'yes' if tag['installed'] else 'no'}") + print(f" python version: {tag['python_version']}") + print(f" implemention: {tag['implementation']}") + print(f" implemention version: {tag['implementation_version']}") + print(f" install_path: {tag['install_path']}") + + +def add_parser_ensure(subparsers): + description = "make sure a list of interpreters are installed" + parser = subparsers.add_parser("ensure", description=description, help=description) + parser.set_defaults(func=ensure) + parser.add_argument("tags", choices=get_all_tags(no_check=NO_CHECK), metavar="TAG", nargs='+', help="tag with format '-' e.g. 'pp310-pypy310_pp73'") + + +def ensure_one(tag): + install_path = INSTALL_DIR.joinpath(tag) + if install_path.exists(): + print(f"'{tag}' already installed at '{install_path}'") + return + if tag not in get_all_tags() or ARCH not in PYTHON_TAGS[tag]: + print(f"skipping '{tag}' for '{ARCH}' architecture") + return + print(f"installing '{tag}' at '{install_path}'") + install_script = HERE / "download-and-install-interpreter.sh" + tag_info = PYTHON_TAGS[tag][ARCH] + download_url = tag_info["download_url"] + sha256 = tag_info["sha256"] + subprocess.run([str(install_script), tag, download_url, sha256], check=True) + if not install_path.exists(): + print("installation failed", file=sys.stderr) + exit(1) + + +def ensure(args): + for tag in args.tags: + ensure_one(tag) + + +def add_parser_ensure_all(subparsers): + description = "make sure all interpreters are installed" + parser = subparsers.add_parser("ensure-all", description=description, help=description) + parser.set_defaults(func=ensure_all) + + +def ensure_all(args): + for tag in get_all_tags(): + ensure_one(tag) + + +def main(): + parser = argparse.ArgumentParser() + subparsers = parser.add_subparsers(required=True) + add_parser_ensure(subparsers) + add_parser_ensure_all(subparsers) + add_parser_list(subparsers) + args = parser.parse_args() + if not hasattr(args, "func"): + parser.print_help() + return + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/docker/build_scripts/pypy.sha256 b/docker/build_scripts/pypy.sha256 deleted file mode 100644 index b9a9b06a..00000000 --- a/docker/build_scripts/pypy.sha256 +++ /dev/null @@ -1,33 +0,0 @@ -a1a84882525dd574c4b051b66e9b7ef0e132392acc2f729420d7825f96835216 pypy3.7-v7.3.7-aarch64.tar.bz2 -0ab9e2e8ae1ac463bb811b9d3ba24d138f41f7378c17ca9e2d8dee51bf151d19 pypy3.7-v7.3.7-linux32.tar.bz2 -8332f923755441fedfe4767a84601c94f4d6f8475384406cb5f259ad8d0b2002 pypy3.7-v7.3.7-linux64.tar.bz2 -cbd44e0a9146b3c03a9d14b265774a848f387ed846316c3e984847e278d0efd3 pypy3.8-v7.3.7-aarch64.tar.bz2 -dfb9d005f0fc917edc60fd618143e4934c412f9168b55166f5519ba0a3b1a835 pypy3.8-v7.3.7-linux32.tar.bz2 -5dee37c7c3cb8b160028fbde3a5901c68043dfa545a16794502b897d4bc40d7e pypy3.8-v7.3.7-linux64.tar.bz2 -dfc62f2c453fb851d10a1879c6e75c31ffebbf2a44d181bb06fcac4750d023fc pypy3.7-v7.3.9-aarch64.tar.bz2 -3398cece0167b81baa219c9cd54a549443d8c0a6b553ec8ec13236281e0d86cd pypy3.7-v7.3.9-linux32.tar.bz2 -c58195124d807ecc527499ee19bc511ed753f4f2e418203ca51bc7e3b124d5d1 pypy3.7-v7.3.9-linux64.tar.bz2 -5e124455e207425e80731dff317f0432fa0aba1f025845ffca813770e2447e32 pypy3.8-v7.3.9-aarch64.tar.bz2 -4b261516c6c59078ab0c8bd7207327a1b97057b4ec1714ed5e79a026f9efd492 pypy3.8-v7.3.9-linux32.tar.bz2 -08be25ec82fc5d23b78563eda144923517daba481a90af0ace7a047c9c9a3c34 pypy3.8-v7.3.9-linux64.tar.bz2 -2e1ae193d98bc51439642a7618d521ea019f45b8fb226940f7e334c548d2b4b9 pypy3.9-v7.3.9-aarch64.tar.bz2 -0de4b9501cf28524cdedcff5052deee9ea4630176a512bdc408edfa30914bae7 pypy3.9-v7.3.9-linux32.tar.bz2 -46818cb3d74b96b34787548343d266e2562b531ddbaf330383ba930ff1930ed5 pypy3.9-v7.3.9-linux64.tar.bz2 -e4caa1a545f22cfee87d5b9aa6f8852347f223643ad7d2562e0b2a2f4663ad98 pypy3.8-v7.3.10-aarch64.tar.bz2 -b70ed7fdc73a74ebdc04f07439f7bad1a849aaca95e26b4a74049d0e483f071c pypy3.8-v7.3.10-linux32.tar.bz2 -ceef6496fd4ab1c99e3ec22ce657b8f10f8bb77a32427fadfb5e1dd943806011 pypy3.8-v7.3.10-linux64.tar.bz2 -657a04fd9a5a992a2f116a9e7e9132ea0c578721f59139c9fb2083775f71e514 pypy3.9-v7.3.10-aarch64.tar.bz2 -b6db59613b9a1c0c1ab87bc103f52ee95193423882dc8a848b68850b8ba59cc5 pypy3.9-v7.3.10-linux32.tar.bz2 -95cf99406179460d63ddbfe1ec870f889d05f7767ce81cef14b88a3a9e127266 pypy3.9-v7.3.10-linux64.tar.bz2 -9a2fa0b8d92b7830aa31774a9a76129b0ff81afbd22cd5c41fbdd9119e859f55 pypy3.8-v7.3.11-aarch64.tar.bz2 -a79b31fce8f5bc1f9940b6777134189a1d3d18bda4b1c830384cda90077c9176 pypy3.8-v7.3.11-linux32.tar.bz2 -470330e58ac105c094041aa07bb05676b06292bc61409e26f5c5593ebb2292d9 pypy3.8-v7.3.11-linux64.tar.bz2 -09175dc652ed895d98e9ad63d216812bf3ee7e398d900a9bf9eb2906ba8302b9 pypy3.9-v7.3.11-aarch64.tar.bz2 -0099d72c2897b229057bff7e2c343624aeabdc60d6fb43ca882bff082f1ffa48 pypy3.9-v7.3.11-linux32.tar.bz2 -d506172ca11071274175d74e9c581c3166432d0179b036470e3b9e8d20eae581 pypy3.9-v7.3.11-linux64.tar.bz2 -e9327fb9edaf2ad91935d5b8563ec5ff24193bddb175c1acaaf772c025af1824 pypy3.9-v7.3.12-aarch64.tar.bz2 -aa04370d38f451683ccc817d76c2b3e0f471dbb879e0bd618d9affbdc9cd37a4 pypy3.9-v7.3.12-linux32.tar.bz2 -84c89b966fab2b58f451a482ee30ca7fec3350435bd0b9614615c61dc6da2390 pypy3.9-v7.3.12-linux64.tar.bz2 -26208b5a134d9860a08f74cce60960005758e82dc5f0e3566a48ed863a1f16a1 pypy3.10-v7.3.12-aarch64.tar.bz2 -811667825ae58ada4b7c3d8bc1b5055b9f9d6a377e51aedfbe0727966603f60e pypy3.10-v7.3.12-linux32.tar.bz2 -6c577993160b6f5ee8cab73cd1a807affcefafe2f7441c87bd926c10505e8731 pypy3.10-v7.3.12-linux64.tar.bz2 diff --git a/docker/build_scripts/python_versions.json b/docker/build_scripts/python_versions.json new file mode 100644 index 00000000..dee55804 --- /dev/null +++ b/docker/build_scripts/python_versions.json @@ -0,0 +1,70 @@ +{ + "pp37-pypy37_pp73": { + "x86_64": { + "version": "7.3.9", + "download_url": "https://downloads.python.org/pypy/pypy3.7-v7.3.9-linux64.tar.bz2", + "sha256": "c58195124d807ecc527499ee19bc511ed753f4f2e418203ca51bc7e3b124d5d1" + }, + "aarch64": { + "version": "7.3.9", + "download_url": "https://downloads.python.org/pypy/pypy3.7-v7.3.9-aarch64.tar.bz2", + "sha256": "dfc62f2c453fb851d10a1879c6e75c31ffebbf2a44d181bb06fcac4750d023fc" + }, + "i686": { + "version": "7.3.9", + "download_url": "https://downloads.python.org/pypy/pypy3.7-v7.3.9-linux32.tar.bz2", + "sha256": "3398cece0167b81baa219c9cd54a549443d8c0a6b553ec8ec13236281e0d86cd" + } + }, + "pp38-pypy38_pp73": { + "x86_64": { + "version": "7.3.11", + "download_url": "https://downloads.python.org/pypy/pypy3.8-v7.3.11-linux64.tar.bz2", + "sha256": "470330e58ac105c094041aa07bb05676b06292bc61409e26f5c5593ebb2292d9" + }, + "aarch64": { + "version": "7.3.11", + "download_url": "https://downloads.python.org/pypy/pypy3.8-v7.3.11-aarch64.tar.bz2", + "sha256": "9a2fa0b8d92b7830aa31774a9a76129b0ff81afbd22cd5c41fbdd9119e859f55" + }, + "i686": { + "version": "7.3.11", + "download_url": "https://downloads.python.org/pypy/pypy3.8-v7.3.11-linux32.tar.bz2", + "sha256": "a79b31fce8f5bc1f9940b6777134189a1d3d18bda4b1c830384cda90077c9176" + } + }, + "pp39-pypy39_pp73": { + "x86_64": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.9-v7.3.12-linux64.tar.bz2", + "sha256": "84c89b966fab2b58f451a482ee30ca7fec3350435bd0b9614615c61dc6da2390" + }, + "aarch64": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.9-v7.3.12-aarch64.tar.bz2", + "sha256": "e9327fb9edaf2ad91935d5b8563ec5ff24193bddb175c1acaaf772c025af1824" + }, + "i686": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.9-v7.3.12-linux32.tar.bz2", + "sha256": "aa04370d38f451683ccc817d76c2b3e0f471dbb879e0bd618d9affbdc9cd37a4" + } + }, + "pp310-pypy310_pp73": { + "x86_64": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.10-v7.3.12-linux64.tar.bz2", + "sha256": "6c577993160b6f5ee8cab73cd1a807affcefafe2f7441c87bd926c10505e8731" + }, + "aarch64": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.10-v7.3.12-aarch64.tar.bz2", + "sha256": "26208b5a134d9860a08f74cce60960005758e82dc5f0e3566a48ed863a1f16a1" + }, + "i686": { + "version": "7.3.12", + "download_url": "https://downloads.python.org/pypy/pypy3.10-v7.3.12-linux32.tar.bz2", + "sha256": "811667825ae58ada4b7c3d8bc1b5055b9f9d6a377e51aedfbe0727966603f60e" + } + } +} \ No newline at end of file diff --git a/noxfile.py b/noxfile.py index 4c69856f..39a6ffee 100644 --- a/noxfile.py +++ b/noxfile.py @@ -57,7 +57,13 @@ def update_python_tools(session): ) -@nox.session(python="3.10", reuse_venv=True) +@nox.session(python="3.11", reuse_venv=True) def update_native_dependencies(session): session.install("lastversion!=1.6.0,!=2.0.0", "packaging", "requests") - session.run("python", "update_native_dependencies.py", *session.posargs) + session.run("python", "tools/update_native_dependencies.py", *session.posargs) + + +@nox.session(python="3.11", reuse_venv=True) +def update_interpreters_download(session): + session.install("packaging", "requests") + session.run("python", "tools/update_interpreters_download.py", *session.posargs) diff --git a/tests/run_tests.sh b/tests/run_tests.sh index 0de0e86b..32cac0cb 100755 --- a/tests/run_tests.sh +++ b/tests/run_tests.sh @@ -17,7 +17,39 @@ else exit 1 fi +if [ "${AUDITWHEEL_POLICY:0:10}" == "musllinux_" ]; then + EXPECTED_PYTHON_COUNT=7 + EXPECTED_PYTHON_COUNT_ALL=7 +else + if [ "${AUDITWHEEL_ARCH}" == "x86_64" ] || [ "${AUDITWHEEL_ARCH}" == "i686" ] || [ "${AUDITWHEEL_ARCH}" == "aarch64" ]; then + EXPECTED_PYTHON_COUNT=11 + EXPECTED_PYTHON_COUNT_ALL=11 + else + EXPECTED_PYTHON_COUNT=7 + EXPECTED_PYTHON_COUNT_ALL=7 + fi +fi +PYTHON_COUNT=$(manylinux-interpreters list --installed | wc -l) +if [ ${EXPECTED_PYTHON_COUNT} -ne ${PYTHON_COUNT} ]; then + echo "unexpected number of default python installations: ${PYTHON_COUNT}, expecting ${EXPECTED_PYTHON_COUNT}" + manylinux-interpreters list --installed + exit 1 +fi +PYTHON_COUNT_ALL=$(manylinux-interpreters list | wc -l) +if [ ${EXPECTED_PYTHON_COUNT_ALL} -ne ${PYTHON_COUNT_ALL} ]; then + echo "unexpected number of overall python installations: ${PYTHON_COUNT_ALL}, expecting ${EXPECTED_PYTHON_COUNT_ALL}" + manylinux-interpreters list + exit 1 +fi +manylinux-interpreters ensure-all +PYTHON_COUNT=$(manylinux-interpreters list --installed | wc -l) +if [ ${EXPECTED_PYTHON_COUNT_ALL} -ne ${PYTHON_COUNT} ]; then + echo "unexpected number of python installations after 'manylinux-python ensure-all': ${PYTHON_COUNT}, expecting ${EXPECTED_PYTHON_COUNT_ALL}" + manylinux-interpreters list --installed + exit 1 +fi +PYTHON_COUNT=0 for PYTHON in /opt/python/*/bin/python; do # Smoke test to make sure that our Pythons work, and do indeed detect as # being manylinux compatible: @@ -26,19 +58,20 @@ for PYTHON in /opt/python/*/bin/python; do $PYTHON $MY_DIR/ssl-check.py IMPLEMENTATION=$(${PYTHON} -c "import sys; print(sys.implementation.name)") PYVERS=$(${PYTHON} -c "import sys; print('.'.join(map(str, sys.version_info[:2])))") - if [ "${IMPLEMENTATION}" == "pypy" ]; then - LINK_PREFIX=pypy - else - LINK_PREFIX=python + if [ "${IMPLEMENTATION}" == "cpython" ]; then # Make sure sqlite3 module can be loaded properly and is the manylinux version one # c.f. https://github.com/pypa/manylinux/issues/1030 $PYTHON -c 'import sqlite3; print(sqlite3.sqlite_version); assert sqlite3.sqlite_version_info[0:2] >= (3, 34)' # Make sure tkinter module can be loaded properly $PYTHON -c 'import tkinter; print(tkinter.TkVersion); assert tkinter.TkVersion >= 8.6' + # cpython shall be available as python + LINK_VERSION=$(python${PYVERS} -VV) + REAL_VERSION=$(${PYTHON} -VV) + test "${LINK_VERSION}" = "${REAL_VERSION}" fi - # pythonX.Y / pypyX.Y shall be available directly in PATH - LINK_VERSION=$(${LINK_PREFIX}${PYVERS} -V) - REAL_VERSION=$(${PYTHON} -V) + # cpythonX.Y / pypyX.Y shall be available directly in PATH + LINK_VERSION=$(${IMPLEMENTATION}${PYVERS} -VV) + REAL_VERSION=$(${PYTHON} -VV) test "${LINK_VERSION}" = "${REAL_VERSION}" # check a simple project can be built @@ -63,7 +96,14 @@ for PYTHON in /opt/python/*/bin/python; do echo "invalid answer, expecting 42" exit 1 fi + + PYTHON_COUNT=$(( $PYTHON_COUNT + 1 )) done +if [ ${EXPECTED_PYTHON_COUNT_ALL} -ne ${PYTHON_COUNT} ]; then + echo "all python installations were not tested: ${PYTHON_COUNT}, expecting ${EXPECTED_PYTHON_COUNT_ALL}" + ls /opt/python + exit 1 +fi # minimal tests for tools that should be present auditwheel --version diff --git a/tools/update_interpreters_download.py b/tools/update_interpreters_download.py new file mode 100644 index 00000000..b50b1da8 --- /dev/null +++ b/tools/update_interpreters_download.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import json +from hashlib import sha256 +from pathlib import Path + +import requests +from packaging.specifiers import Specifier +from packaging.version import Version + + +PROJECT_ROOT = Path(__file__).parent.parent.resolve(strict=True) +PYTHON_VERSIONS = PROJECT_ROOT / "docker" / "build_scripts" / "python_versions.json" + + +def update_pypy_version(releases, py_spec, pp_spec, tag, arch, version_dict): + pypy_arch = {"x86_64": "x64"}.get(arch, arch) + current_version = None + if "version" in version_dict: + current_version = Version(version_dict["version"]) + for r in releases: + if current_version is not None and current_version >= r["pypy_version"]: + continue + if not pp_spec.contains(r["pypy_version"]): + continue + if not py_spec.contains(r["python_version"]): + continue + try: + file = next( + f for f in r["files"] + if f["arch"] == pypy_arch and f["platform"] == "linux" + ) + except StopIteration: + continue + print(f"updating {tag} {arch} to {r['pypy_version']}") + response = requests.get(file["download_url"], stream=True) + response.raise_for_status() + sha256sum = sha256() + for chunk in response.iter_content(chunk_size=1024 * 4): + sha256sum.update(chunk) + version_dict["version"] = str(r["pypy_version"]) + version_dict["download_url"] = file["download_url"] + version_dict["sha256"] = sha256sum.hexdigest() + break + + +def update_pypy_versions(versions): + response = requests.get("https://downloads.python.org/pypy/versions.json") + response.raise_for_status() + releases = [r for r in response.json() if r["pypy_version"] != "nightly"] + for release in releases: + release["pypy_version"] = Version(release["pypy_version"]) + py_version = Version(release["python_version"]) + release["python_version"] = Version(f"{py_version.major}.{py_version.minor}") + # filter-out pre-release + releases = [ + r for r in releases + if not r["pypy_version"].is_prerelease and not r["pypy_version"].is_devrelease + ] + releases.sort(key=lambda r: r["pypy_version"], reverse=True) + + for tag in versions: + if not tag.startswith("pp"): + continue + python_tag, abi_tag = tag.split("-") + py_major = int(python_tag[2]) + py_minor = int(python_tag[3:]) + _, pp_ver = abi_tag.split("_") + assert pp_ver.startswith("pp") + pp_major = int(pp_ver[2]) + assert pp_major >= 7 + pp_minor = int(pp_ver[3:]) + py_spec = Specifier(f"=={py_major}.{py_minor}.*") + pp_spec = Specifier(f"=={pp_major}.{pp_minor}.*") + for arch in versions[tag]: + update_pypy_version( + releases, py_spec, pp_spec, tag, arch, versions[tag][arch] + ) + + +def main(): + versions = json.loads(PYTHON_VERSIONS.read_text()) + update_pypy_versions(versions) + PYTHON_VERSIONS.write_text(json.dumps(versions, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/update_native_dependencies.py b/tools/update_native_dependencies.py similarity index 89% rename from update_native_dependencies.py rename to tools/update_native_dependencies.py index 5c7d404c..3ec1e4c2 100644 --- a/update_native_dependencies.py +++ b/tools/update_native_dependencies.py @@ -11,6 +11,10 @@ from lastversion.Version import Version +PROJECT_ROOT = Path(__file__).parent.parent.resolve(strict=True) +DOCKERFILE = PROJECT_ROOT / "docker" / "Dockerfile" + + def _sha256(url): response = requests.get( url, @@ -25,8 +29,7 @@ def _sha256(url): def _update_cpython(dry_run): - dockerfile = Path(__file__).parent / "docker" / "Dockerfile" - lines = dockerfile.read_text().splitlines() + lines = DOCKERFILE.read_text().splitlines() re_ = re.compile(r"^RUN.*/build-cpython.sh (?P.*)$") for i in range(len(lines)): match = re_.match(lines[i]) @@ -42,7 +45,7 @@ def _update_cpython(dry_run): message = f"Bump CPython {current_version} → {latest_version}" print(message) if not dry_run: - dockerfile.write_text("\n".join(lines) + "\n") + DOCKERFILE.write_text("\n".join(lines) + "\n") subprocess.check_call(["git", "commit", "-am", message]) @@ -57,8 +60,7 @@ def _update_with_root(tool, dry_run): major = { "openssl": "1.1", } - dockerfile = Path(__file__).parent / "docker" / "Dockerfile" - lines = dockerfile.read_text().splitlines() + lines = DOCKERFILE.read_text().splitlines() re_ = re.compile(f"^RUN export {tool.upper()}_ROOT={tool}-(?P\\S+) && \\\\$") for i in range(len(lines)): match = re_.match(lines[i]) @@ -80,14 +82,13 @@ def _update_with_root(tool, dry_run): message = f"Bump {tool} {current_version} → {latest_version}" print(message) if not dry_run: - dockerfile.write_text("\n".join(lines) + "\n") + DOCKERFILE.write_text("\n".join(lines) + "\n") subprocess.check_call(["git", "commit", "-am", message]) break def _update_sqlite(dry_run): - dockerfile = Path(__file__).parent / "docker" / "Dockerfile" - lines = dockerfile.read_text().splitlines() + lines = DOCKERFILE.read_text().splitlines() re_ = re.compile(f"^RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-(?P\\S+) && \\\\$") for i in range(len(lines)): match = re_.match(lines[i]) @@ -113,7 +114,7 @@ def _update_sqlite(dry_run): message = f"Bump sqlite {current_version} → {latest_version}" print(message) if not dry_run: - dockerfile.write_text("\n".join(lines) + "\n") + DOCKERFILE.write_text("\n".join(lines) + "\n") subprocess.check_call(["git", "commit", "-am", message]) break @@ -122,8 +123,7 @@ def _update_with_gh(tool, dry_run): repo = { "libxcrypt": "besser82/libxcrypt", } - dockerfile = Path(__file__).parent / "docker" / "Dockerfile" - lines = dockerfile.read_text().splitlines() + lines = DOCKERFILE.read_text().splitlines() re_ = re.compile(f"^RUN export {tool.upper()}_VERSION=(?P\\S+) && \\\\$") for i in range(len(lines)): match = re_.match(lines[i]) @@ -140,14 +140,13 @@ def _update_with_gh(tool, dry_run): message = f"Bump {tool} {current_version} → {latest_version}" print(message) if not dry_run: - dockerfile.write_text("\n".join(lines) + "\n") + DOCKERFILE.write_text("\n".join(lines) + "\n") subprocess.check_call(["git", "commit", "-am", message]) break def _update_tcltk(dry_run): - dockerfile = Path(__file__).parent / "docker" / "Dockerfile" - lines = dockerfile.read_text().splitlines() + lines = DOCKERFILE.read_text().splitlines() re_ = re.compile("^RUN export TCL_ROOT=tcl(?P\\S+) && \\\\$") for i in range(len(lines)): match = re_.match(lines[i]) @@ -168,7 +167,7 @@ def _update_tcltk(dry_run): message = f"Bump Tcl/Tk {current_version} → {latest_version}" print(message) if not dry_run: - dockerfile.write_text("\n".join(lines) + "\n") + DOCKERFILE.write_text("\n".join(lines) + "\n") subprocess.check_call(["git", "commit", "-am", message]) break