diff --git a/cpython-unix/build.py b/cpython-unix/build.py index ebfff8be..88372fc7 100755 --- a/cpython-unix/build.py +++ b/cpython-unix/build.py @@ -214,7 +214,7 @@ def add_target_env(env, build_platform, target_triple, build_env): env["EXTRA_TARGET_LDFLAGS"] = " ".join(extra_target_ldflags) -def toolchain_archive_path(package_name, host_platform, downloads): +def toolchain_archive_path(package_name, host_platform, *, downloads: Downloads): entry = downloads[package_name] basename = "%s-%s-%s.tar" % (package_name, entry["version"], host_platform) @@ -222,8 +222,8 @@ def toolchain_archive_path(package_name, host_platform, downloads): return BUILD / basename -def install_binutils(platform): - return platform != "macos" +def install_binutils(host_platform): + return host_platform != "macos" def simple_build( @@ -234,14 +234,16 @@ def simple_build( host_platform, target_triple, optimizations, - downloads: Downloads, dest_archive, extra_archives=None, tools_path="deps", + *, + downloads: Downloads, ): - archive = download_entry(entry, DOWNLOADS_PATH, downloads) + archive = download_entry(entry, DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: + if settings.get("needs_toolchain"): build_env.install_toolchain( BUILD, @@ -250,10 +252,11 @@ def simple_build( binutils=install_binutils(host_platform), clang=True, musl="musl" in target_triple, + downloads=downloads, ) for a in extra_archives or []: - build_env.install_artifact_archive(BUILD, a, target_triple, optimizations) + build_env.install_artifact_archive(BUILD, a, target_triple, optimizations, downloads=downloads) build_env.copy_file(archive) build_env.copy_file(SUPPORT / ("build-%s.sh" % entry)) @@ -275,9 +278,9 @@ def simple_build( build_env.get_tools_archive(dest_archive, tools_path) -def build_binutils(client, image, host_platform, downloads): +def build_binutils(client, image, host_platform, *, downloads: Downloads): """Build binutils in the Docker image.""" - archive = download_entry("binutils", DOWNLOADS_PATH, downloads) + archive = download_entry("binutils", DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: install_sccache(build_env) @@ -295,13 +298,18 @@ def build_binutils(client, image, host_platform, downloads): ) build_env.get_tools_archive( - toolchain_archive_path("binutils", host_platform, downloads), "host" + toolchain_archive_path( + "binutils", + host_platform, + downloads=downloads, + ), + "host", ) -def materialize_clang(host_platform: str, target_triple: str, downloads): +def materialize_clang(host_platform: str, target_triple: str, *, downloads: Downloads): entry = clang_toolchain(host_platform, target_triple) - tar_zst = download_entry(entry, DOWNLOADS_PATH, downloads) + tar_zst = download_entry(entry, DOWNLOADS_PATH, downloads=downloads) local_filename = "%s-%s-%s.tar" % ( entry, downloads[entry]["version"], @@ -315,12 +323,24 @@ def materialize_clang(host_platform: str, target_triple: str, downloads): dctx.copy_stream(ifh, ofh) -def build_musl(client, image, host_platform: str, target_triple: str, downloads): - musl_archive = download_entry("musl", DOWNLOADS_PATH, downloads) +def build_musl( + client, + image, + host_platform: str, + target_triple: str, + *, + downloads: Downloads, +): + musl_archive = download_entry("musl", DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: build_env.install_toolchain( - BUILD, host_platform, target_triple, binutils=True, clang=True + BUILD, + host_platform, + target_triple, + binutils=True, + clang=True, + downloads=downloads, ) build_env.copy_file(musl_archive) build_env.copy_file(SUPPORT / "build-musl.sh") @@ -333,7 +353,12 @@ def build_musl(client, image, host_platform: str, target_triple: str, downloads) build_env.run("build-musl.sh", environment=env) build_env.get_tools_archive( - toolchain_archive_path("musl", host_platform), "host" + toolchain_archive_path( + "musl", + host_platform, + downloads=downloads, + ), + "host", ) @@ -345,9 +370,10 @@ def build_libedit( target_triple, optimizations, dest_archive, + *, downloads: Downloads, ): - libedit_archive = download_entry("libedit", DOWNLOADS_PATH, downloads) + libedit_archive = download_entry("libedit", DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: if settings.get("needs_toolchain"): @@ -358,10 +384,11 @@ def build_libedit( binutils=install_binutils(host_platform), clang=True, musl="musl" in target_triple, + downloads=downloads, ) build_env.install_artifact_archive( - BUILD, "ncurses", target_triple, optimizations + BUILD, "ncurses", target_triple, optimizations, downloads=downloads ) build_env.copy_file(libedit_archive) build_env.copy_file(SUPPORT / "build-libedit.sh") @@ -384,11 +411,12 @@ def build_tix( target_triple, optimizations, dest_archive, + *, downloads: Downloads, ): - tcl_archive = download_entry("tcl", DOWNLOADS_PATH, downloads) - tk_archive = download_entry("tk", DOWNLOADS_PATH, downloads) - tix_archive = download_entry("tix", DOWNLOADS_PATH, downloads) + tcl_archive = download_entry("tcl", DOWNLOADS_PATH, downloads=downloads) + tk_archive = download_entry("tk", DOWNLOADS_PATH, downloads=downloads) + tix_archive = download_entry("tix", DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: if settings.get("needs_toolchain"): @@ -399,6 +427,7 @@ def build_tix( binutils=install_binutils(host_platform), clang=True, musl="musl" in target_triple, + downloads=downloads, ) depends = {"tcl", "tk"} @@ -406,7 +435,7 @@ def build_tix( depends |= {"libX11", "xorgproto"} for p in sorted(depends): - build_env.install_artifact_archive(BUILD, p, target_triple, optimizations) + build_env.install_artifact_archive(BUILD, p, target_triple, optimizations, downloads=downloads) for p in (tcl_archive, tk_archive, tix_archive, SUPPORT / "build-tix.sh"): build_env.copy_file(p) @@ -432,10 +461,11 @@ def build_cpython_host( target_triple: str, optimizations: str, dest_archive, + *, downloads: Downloads, ): """Build binutils in the Docker image.""" - archive = download_entry(entry, DOWNLOADS_PATH, downloads) + archive = download_entry(entry, DOWNLOADS_PATH, downloads=downloads) with build_environment(client, image) as build_env: python_version = downloads[entry]["version"] @@ -446,6 +476,7 @@ def build_cpython_host( target_triple, binutils=install_binutils(host_platform), clang=True, + downloads=downloads, ) build_env.copy_file(archive) @@ -463,7 +494,7 @@ def build_cpython_host( "m4", } for p in sorted(packages): - build_env.install_artifact_archive(BUILD, p, target_triple, optimizations) + build_env.install_artifact_archive(BUILD, p, target_triple, optimizations,downloads=downloads) env = { "PYTHON_VERSION": python_version, @@ -492,12 +523,13 @@ def build_cpython_host( def python_build_info( build_env, version, - platform, + host_platform, target_triple, musl, optimizations, extensions, extra_metadata, + *, downloads: Downloads, ): """Obtain build metadata for the Python distribution.""" @@ -508,7 +540,7 @@ def python_build_info( binary_suffix = "" - if platform == "linux64": + if host_platform == "linux64": bi["core"]["static_lib"] = ( "install/lib/python{version}/config-{version}{binary_suffix}-x86_64-linux-gnu/libpython{version}{binary_suffix}.a".format( version=version, binary_suffix=binary_suffix @@ -522,7 +554,7 @@ def python_build_info( ) if optimizations in ("lto", "pgo+lto"): - llvm_version = downloads[clang_toolchain(platform, target_triple)][ + llvm_version = downloads[clang_toolchain(host_platform, target_triple)][ "version" ] if "+" in llvm_version: @@ -531,7 +563,7 @@ def python_build_info( object_file_format = f"llvm-bitcode:%{llvm_version}" else: object_file_format = "elf" - elif platform == "macos": + elif host_platform == "macos": bi["core"]["static_lib"] = ( "install/lib/python{version}/config-{version}{binary_suffix}-darwin/libpython{version}{binary_suffix}.a".format( version=version, binary_suffix=binary_suffix @@ -549,7 +581,7 @@ def python_build_info( else: object_file_format = "mach-o" else: - raise Exception("unsupported platform: %s" % platform) + raise Exception("unsupported platform: %s" % host_platform) bi["object_file_format"] = object_file_format @@ -564,9 +596,9 @@ def python_build_info( if lib.startswith("-l"): lib = lib[2:] - if platform == "linux64" and lib not in LINUX_ALLOW_SYSTEM_LIBRARIES: + if host_platform == "linux64" and lib not in LINUX_ALLOW_SYSTEM_LIBRARIES: raise Exception("unexpected library in LIBS (%s): %s" % (libs, lib)) - elif platform == "macos" and lib not in MACOS_ALLOW_SYSTEM_LIBRARIES: + elif host_platform == "macos" and lib not in MACOS_ALLOW_SYSTEM_LIBRARIES: raise Exception("unexpected library in LIBS (%s): %s" % (libs, lib)) log("adding core system link library: %s" % lib) @@ -681,7 +713,7 @@ def python_build_info( extension_suffix = extra_metadata["python_config_vars"]["EXT_SUFFIX"] entry["shared_lib"] = "%s/%s%s" % (shared_dir, extension, extension_suffix) - add_licenses_to_extension_entry(entry) + add_licenses_to_extension_entry(entry, downloads=downloads) bi["extensions"].setdefault(extension, []).append(entry) @@ -701,17 +733,18 @@ def build_cpython( host_platform, target_triple, optimizations, - downloads: Downloads, dest_archive, version=None, python_source=None, + *, + downloads: Downloads, ): """Build CPython in a Docker image'""" entry_name = "cpython-%s" % version entry = downloads[entry_name] if not python_source: python_version = entry["version"] - python_archive = download_entry(entry_name, DOWNLOADS_PATH, downloads) + python_archive = download_entry(entry_name, DOWNLOADS_PATH, downloads=downloads) else: python_version = os.environ["PYBUILD_PYTHON_VERSION"] python_archive = DOWNLOADS_PATH / ("Python-%s.tar.xz" % python_version) @@ -721,8 +754,10 @@ def build_cpython( fh, python_source, path_prefix="Python-%s" % python_version ) - setuptools_archive = download_entry("setuptools", DOWNLOADS_PATH, downloads) - pip_archive = download_entry("pip", DOWNLOADS_PATH, downloads) + setuptools_archive = download_entry( + "setuptools", DOWNLOADS_PATH, downloads=downloads + ) + pip_archive = download_entry("pip", DOWNLOADS_PATH, downloads=downloads) ems = extension_modules_config(EXTENSION_MODULES) @@ -746,6 +781,7 @@ def build_cpython( binutils=install_binutils(host_platform), clang=True, musl="musl" in target_triple, + downloads=downloads, ) packages = target_needs(TARGETS_CONFIG, target_triple, python_version) @@ -754,10 +790,10 @@ def build_cpython( packages.discard("musl") for p in sorted(packages): - build_env.install_artifact_archive(BUILD, p, target_triple, optimizations) + build_env.install_artifact_archive(BUILD, p, target_triple, optimizations, downloads=downloads) build_env.install_toolchain_archive( - BUILD, entry_name, host_platform, version=python_version + BUILD, entry_name, host_platform, version=python_version, downloads=downloads ) for p in ( @@ -1008,9 +1044,9 @@ def main(): write_dockerfiles(SUPPORT, BUILD) elif action == "makefiles": targets = get_targets(TARGETS_CONFIG) - write_triples_makefiles(targets, BUILD, SUPPORT) + write_triples_makefiles(targets, BUILD, SUPPORT, downloads=downloads) write_target_settings(targets, BUILD / "targets") - write_package_versions(BUILD / "versions") + write_package_versions(BUILD / "versions", downloads=downloads) # Override the DOWNLOADS package entry for CPython for the local build if python_source: @@ -1056,9 +1092,9 @@ def main(): target_triple=target_triple, optimizations=optimizations, dest_archive=dest_archive, - downloads=downloads, tools_path="host", extra_archives=["m4"], + downloads=downloads, ) elif action == "libedit": @@ -1110,8 +1146,8 @@ def main(): target_triple=target_triple, optimizations=optimizations, dest_archive=dest_archive, - downloads=downloads, tools_path=tools_path, + downloads=downloads, ) elif action == "libX11": @@ -1243,9 +1279,9 @@ def main(): target_triple=target_triple, optimizations=optimizations, dest_archive=dest_archive, - downloads=downloads, version=action.split("-")[1], python_source=python_source, + downloads=downloads, ) else: diff --git a/cpython-windows/build.py b/cpython-windows/build.py index 85a2eaf0..9a44e205 100644 --- a/cpython-windows/build.py +++ b/cpython-windows/build.py @@ -23,6 +23,7 @@ parse_config_c, ) from pythonbuild.utils import ( + Downloads, compress_python_archive, create_tar_from_directory, download_entry, @@ -471,17 +472,14 @@ def hack_project_files( cpython_source_path: pathlib.Path, build_directory: str, python_version: str, - downloads, + *, + downloads: Downloads, ): """Hacks Visual Studio project files to work with our build.""" pcbuild_path = cpython_source_path / "PCbuild" - hack_props( - td, - pcbuild_path, - build_directory, - ) + hack_props(td, pcbuild_path, build_directory, downloads=downloads) # Our SQLite directory is named weirdly. This throws off version detection # in the project file. Replace the parsing logic with a static string. @@ -964,6 +962,7 @@ def build_openssl( perl_path: pathlib.Path, arch: str, dest_archive: pathlib.Path, + *, downloads, ): """Build OpenSSL from sources using the Perl executable specified.""" @@ -971,9 +970,9 @@ def build_openssl( openssl_version = downloads[entry]["version"] # First ensure the dependencies are in place. - openssl_archive = download_entry(entry, downloads, BUILD) - nasm_archive = download_entry("nasm-windows-bin", downloads, BUILD) - jom_archive = download_entry("jom-windows-bin", downloads, BUILD) + openssl_archive = download_entry(entry, BUILD, downloads=downloads) + nasm_archive = download_entry("nasm-windows-bin", BUILD, downloads=downloads) + jom_archive = download_entry("jom-windows-bin", BUILD, downloads=downloads) with tempfile.TemporaryDirectory(prefix="openssl-build-") as td: td = pathlib.Path(td) @@ -1025,7 +1024,8 @@ def build_libffi( sh_exe: pathlib.Path, msvc_version: str, dest_archive: pathlib.Path, - downloads, + *, + downloads: Downloads, ): with tempfile.TemporaryDirectory(prefix="libffi-build-") as td: td = pathlib.Path(td) @@ -1063,7 +1063,7 @@ def build_libffi( ) # We build libffi by running the build script that CPython ships. - python_archive = download_entry(python, downloads, BUILD) + python_archive = download_entry(python, BUILD, downloads=downloads) extract_tar_to_directory(python_archive, td) python_entry = downloads[python] @@ -1117,6 +1117,7 @@ def collect_python_build_artifacts( arch: str, config: str, openssl_entry: str, + *, downloads, ): """Collect build artifacts from Python. @@ -1389,7 +1390,8 @@ def build_cpython( openssl_archive, libffi_archive, openssl_entry: str, - downloads, + *, + downloads: Downloads, ) -> pathlib.Path: pgo = profile == "pgo" @@ -1399,21 +1401,21 @@ def build_cpython( # The python.props file keys off MSBUILD, so it needs to be set. os.environ["MSBUILD"] = str(msbuild) - bzip2_archive = download_entry("bzip2", downloads, BUILD) - sqlite_archive = download_entry("sqlite", downloads, BUILD) + bzip2_archive = download_entry("bzip2", BUILD, downloads=downloads) + sqlite_archive = download_entry("sqlite", BUILD, downloads=downloads) tk_bin_archive = download_entry( - "tk-windows-bin", BUILD, local_name="tk-windows-bin.tar.gz" + "tk-windows-bin", BUILD, local_name="tk-windows-bin.tar.gz", downloads=downloads ) - xz_archive = download_entry("xz", downloads, BUILD) - zlib_archive = download_entry("zlib", downloads, BUILD) + xz_archive = download_entry("xz", BUILD, downloads=downloads) + zlib_archive = download_entry("zlib", BUILD, downloads=downloads) - python_archive = download_entry(python_entry_name, downloads, BUILD) + python_archive = download_entry(python_entry_name, BUILD, downloads=downloads) entry = downloads[python_entry_name] python_version = entry["version"] - setuptools_wheel = download_entry("setuptools", downloads, BUILD) - pip_wheel = download_entry("pip", downloads, BUILD) + setuptools_wheel = download_entry("setuptools", BUILD, downloads=downloads) + pip_wheel = download_entry("pip", BUILD, downloads=downloads) if arch == "amd64": build_platform = "x64" @@ -1483,6 +1485,7 @@ def build_cpython( cpython_source_path, build_directory, python_version=python_version, + downloads=downloads, ) if pgo: @@ -1810,8 +1813,8 @@ def build_cpython( return dest_path -def fetch_strawberry_perl() -> pathlib.Path: - strawberryperl_zip = download_entry("strawberryperl", downloads, BUILD) +def fetch_strawberry_perl(*, downloads: Downloads) -> pathlib.Path: + strawberryperl_zip = download_entry("strawberryperl", BUILD, downloads=downloads) strawberryperl = BUILD / "strawberry-perl" strawberryperl.mkdir(exist_ok=True) with zipfile.ZipFile(strawberryperl_zip) as zf: @@ -1887,7 +1890,9 @@ def main() -> None: "%s-%s-%s.tar" % (openssl_entry, target_triple, args.profile) ) if not openssl_archive.exists(): - perl_path = fetch_strawberry_perl() / "perl" / "bin" / "perl.exe" + perl_path = ( + fetch_strawberry_perl(downloads=downloads) / "perl" / "bin" / "perl.exe" + ) LOG_PREFIX[0] = "openssl" build_openssl( openssl_entry, diff --git a/pythonbuild/buildenv.py b/pythonbuild/buildenv.py index cc739c4e..9e95c9ab 100644 --- a/pythonbuild/buildenv.py +++ b/pythonbuild/buildenv.py @@ -10,10 +10,12 @@ import shutil import tarfile import tempfile +import typing from .docker import container_exec, container_get_archive, copy_file_to_container from .logging import log from .utils import ( + Downloads, clang_toolchain, create_tar_from_directory, exec_and_log, @@ -38,7 +40,7 @@ def copy_file(self, source: pathlib.Path, dest_path=None, dest_name=None): copy_file_to_container(source, self.container, dest_path, dest_name) def install_toolchain_archive( - self, build_dir, package_name, host_platform, downloads, version=None + self, build_dir, package_name, host_platform, version=None, *, downloads: Downloads ): entry = downloads[package_name] basename = "%s-%s-%s.tar" % ( @@ -52,7 +54,7 @@ def install_toolchain_archive( self.run(["/bin/tar", "-C", "/tools", "-xf", "/build/%s" % p.name]) def install_artifact_archive( - self, build_dir, package_name, target_triple, optimizations, downloads + self, build_dir, package_name, target_triple, optimizations, *, downloads: Downloads ): entry = downloads[package_name] basename = "%s-%s-%s-%s.tar" % ( @@ -75,19 +77,26 @@ def install_toolchain( binutils=False, musl=False, clang=False, - *, - downloads, + *, + downloads: Downloads ): if binutils: - self.install_toolchain_archive(build_dir, "binutils", host_platform, downloads=downloads) + self.install_toolchain_archive( + build_dir, "binutils", host_platform, downloads=downloads + ) if clang: self.install_toolchain_archive( - build_dir, clang_toolchain(host_platform, target_triple), host_platform, downloads=downloads + build_dir, + clang_toolchain(host_platform, target_triple), + host_platform, + downloads=downloads, ) if musl: - self.install_toolchain_archive(build_dir, "musl", host_platform, downloads=downloads) + self.install_toolchain_archive( + build_dir, "musl", host_platform, downloads=downloads + ) def run(self, program, user="build", environment=None): if isinstance(program, str) and not program.startswith("/"): @@ -198,19 +207,26 @@ def install_toolchain( binutils=False, musl=False, clang=False, - *, - downloads, + *, + downloads, ): if binutils: - self.install_toolchain_archive(build_dir, "binutils", platform, downloads=downloads) + self.install_toolchain_archive( + build_dir, "binutils", platform, downloads=downloads + ) if clang: self.install_toolchain_archive( - build_dir, clang_toolchain(platform, target_triple), platform, downloads=downloads + build_dir, + clang_toolchain(platform, target_triple), + platform, + downloads=downloads, ) if musl: - self.install_toolchain_archive(build_dir, "musl", platform, downloads=downloads) + self.install_toolchain_archive( + build_dir, "musl", platform, downloads=downloads + ) def run(self, program, user="build", environment=None): if user != "build": @@ -261,7 +277,7 @@ def find_output_files(self, base_path, pattern): @contextlib.contextmanager -def build_environment(client, image): +def build_environment(client, image) -> typing.Generator[ContainerContext, None, None]: if client is not None: container = client.containers.run( image, command=["/bin/sleep", "86400"], detach=True diff --git a/pythonbuild/utils.py b/pythonbuild/utils.py index e805d6bc..387a6b07 100644 --- a/pythonbuild/utils.py +++ b/pythonbuild/utils.py @@ -33,7 +33,7 @@ def get_downloads(downloads_path: pathlib.Path) -> Downloads: """Obtain the parsed downloads YAML file.""" with downloads_path.open("rb") as fh: - return yaml.load(fh, Loader=yaml.SafeLoader) + return typing.cast(Downloads, yaml.load(fh, Loader=yaml.SafeLoader)) def get_targets(yaml_path: pathlib.Path): @@ -142,7 +142,11 @@ def write_if_different(p: pathlib.Path, data: bytes): def write_triples_makefiles( - targets, dest_dir: pathlib.Path, support_search_dir: pathlib.Path, downloads, + targets, + dest_dir: pathlib.Path, + support_search_dir: pathlib.Path, + *, + downloads: Downloads, ): """Write out makefiles containing make variable settings derived from config.""" dest_dir.mkdir(parents=True, exist_ok=True) @@ -180,7 +184,7 @@ def write_triples_makefiles( write_if_different(makefile_path, "".join(lines).encode("ascii")) -def write_package_versions(dest_path: pathlib.Path, downloads): +def write_package_versions(dest_path: pathlib.Path, *, downloads: Downloads): """Write out versions of packages to files in a directory.""" dest_path.mkdir(parents=True, exist_ok=True) @@ -303,7 +307,9 @@ def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str): print("successfully downloaded %s" % url) -def download_entry(key: str, dest_path: pathlib.Path, downloads, local_name=None) -> pathlib.Path: +def download_entry( + key: str, dest_path: pathlib.Path, local_name=None, *, downloads: Downloads +) -> pathlib.Path: entry = downloads[key] url = entry["url"] size = entry["size"] @@ -457,7 +463,7 @@ def compress_python_archive( return dest_path -def add_licenses_to_extension_entry(entry): +def add_licenses_to_extension_entry(entry, *, downloads: Downloads): """Add licenses keys to a ``extensions`` entry for JSON distribution info.""" have_licenses = False @@ -472,7 +478,7 @@ def add_licenses_to_extension_entry(entry): if "path_static" in link or "path_dynamic" in link: have_local_link = True - for value in DOWNLOADS.values(): + for value in downloads.values(): if name not in value.get("library_names", []): continue