diff --git a/pex/build_system/__init__.py b/pex/build_system/__init__.py index 46db28c49..3cfc24085 100644 --- a/pex/build_system/__init__.py +++ b/pex/build_system/__init__.py @@ -3,6 +3,16 @@ from __future__ import absolute_import +from pex.typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Tuple + + import attr # vendor:skip +else: + from pex.third_party import attr + + # The split of PEP-517 / PEP-518 is quite awkward. PEP-518 doesn't really work without also # specifying a build backend or knowing a default value for one, but the concept is not defined # until PEP-517. As such, we break this historical? strange division and define the default outside @@ -11,3 +21,15 @@ # See: https://peps.python.org/pep-0517/#source-trees DEFAULT_BUILD_BACKEND = "setuptools.build_meta:__legacy__" DEFAULT_BUILD_REQUIRES = ("setuptools",) + + +@attr.s(frozen=True) +class BuildSystemTable(object): + requires = attr.ib() # type: Tuple[str, ...] + build_backend = attr.ib(default=DEFAULT_BUILD_BACKEND) # type: str + backend_path = attr.ib(default=()) # type: Tuple[str, ...] + + +DEFAULT_BUILD_SYSTEM_TABLE = BuildSystemTable( + requires=DEFAULT_BUILD_REQUIRES, build_backend=DEFAULT_BUILD_BACKEND +) diff --git a/pex/build_system/pep_517.py b/pex/build_system/pep_517.py index 50015a434..e96303d47 100644 --- a/pex/build_system/pep_517.py +++ b/pex/build_system/pep_517.py @@ -10,11 +10,10 @@ from pex import third_party from pex.build_system import DEFAULT_BUILD_BACKEND -from pex.build_system.pep_518 import BuildSystem, load_build_system, load_build_system_table +from pex.build_system.pep_518 import BuildSystem, load_build_system from pex.common import safe_mkdtemp from pex.dist_metadata import DistMetadata, Distribution, MetadataType from pex.jobs import Job, SpawnedJob -from pex.orderedset import OrderedSet from pex.pip.version import PipVersion, PipVersionValue from pex.resolve.resolvers import Resolver from pex.result import Error, try_ @@ -257,8 +256,6 @@ def get_requires_for_build_wheel( ): # type: (...) -> Tuple[str, ...] - build_system_table = try_(load_build_system_table(project_directory)) - requires = OrderedSet(build_system_table.requires) spawned_job = try_( _invoke_build_hook( project_directory, @@ -269,11 +266,11 @@ def get_requires_for_build_wheel( ) ) try: - requires.update(spawned_job.await_result()) + return tuple(spawned_job.await_result()) except Job.Error as e: if e.exitcode != _HOOK_UNAVAILABLE_EXIT_CODE: raise e - return tuple(requires) + return () def spawn_prepare_metadata( diff --git a/pex/build_system/pep_518.py b/pex/build_system/pep_518.py index 2cedc6a16..ac5db2424 100644 --- a/pex/build_system/pep_518.py +++ b/pex/build_system/pep_518.py @@ -7,7 +7,7 @@ import subprocess from pex import toml -from pex.build_system import DEFAULT_BUILD_BACKEND, DEFAULT_BUILD_REQUIRES +from pex.build_system import DEFAULT_BUILD_BACKEND, DEFAULT_BUILD_SYSTEM_TABLE, BuildSystemTable from pex.common import REPRODUCIBLE_BUILDS_ENV, CopyMode from pex.dist_metadata import Distribution from pex.interpreter import PythonInterpreter @@ -31,13 +31,6 @@ from pex.third_party import attr -@attr.s(frozen=True) -class BuildSystemTable(object): - requires = attr.ib() # type: Tuple[str, ...] - build_backend = attr.ib(default=DEFAULT_BUILD_BACKEND) # type: str - backend_path = attr.ib(default=()) # type: Tuple[str, ...] - - def _read_build_system_table( pyproject_toml, # type: str ): @@ -175,7 +168,7 @@ def load_build_system_table(project_directory): maybe_build_system_table_or_error = _maybe_load_build_system_table(project_directory) if maybe_build_system_table_or_error is not None: return maybe_build_system_table_or_error - return BuildSystemTable(requires=DEFAULT_BUILD_REQUIRES, build_backend=DEFAULT_BUILD_BACKEND) + return DEFAULT_BUILD_SYSTEM_TABLE def load_build_system( diff --git a/pex/cli/commands/lock.py b/pex/cli/commands/lock.py index 318da3a6e..61f92295d 100644 --- a/pex/cli/commands/lock.py +++ b/pex/cli/commands/lock.py @@ -526,6 +526,20 @@ def add_create_lock_options(cls, create_parser): ) ), ) + create_parser.add_argument( + "--lock-build-systems", + "--no-lock-build-systems", + dest="lock_build_systems", + default=False, + action=HandleBoolAction, + type=bool, + help=( + "When creating a lock that includes sdists, VCS requirements or local project " + "directories that will later need to be built into wheels when using the lock, " + "also lock the build system for each of these source tree artifacts to ensure " + "consistent build environments at future times." + ), + ) cls._add_lock_options(create_parser) cls._add_resolve_options(create_parser) cls.add_json_options(create_parser, entity="lock", include_switch=False) @@ -802,6 +816,30 @@ def add_extra_arguments( ) as sync_parser: cls._add_sync_arguments(sync_parser) + def _get_lock_configuration(self, target_configuration): + # type: (TargetConfiguration) -> Union[LockConfiguration, Error] + if self.options.style is LockStyle.UNIVERSAL: + return LockConfiguration( + style=LockStyle.UNIVERSAL, + requires_python=tuple( + str(interpreter_constraint.requires_python) + for interpreter_constraint in target_configuration.interpreter_constraints + ), + target_systems=tuple(self.options.target_systems), + lock_build_systems=self.options.lock_build_systems, + ) + + if self.options.target_systems: + return Error( + "The --target-system option only applies to --style {universal} locks.".format( + universal=LockStyle.UNIVERSAL.value + ) + ) + + return LockConfiguration( + style=self.options.style, lock_build_systems=self.options.lock_build_systems + ) + def _resolve_targets( self, action, # type: str @@ -891,24 +929,7 @@ def _create(self): target_configuration = target_options.configure( self.options, pip_configuration=pip_configuration ) - if self.options.style == LockStyle.UNIVERSAL: - lock_configuration = LockConfiguration( - style=LockStyle.UNIVERSAL, - requires_python=tuple( - str(interpreter_constraint.requires_python) - for interpreter_constraint in target_configuration.interpreter_constraints - ), - target_systems=tuple(self.options.target_systems), - ) - elif self.options.target_systems: - return Error( - "The --target-system option only applies to --style {universal} locks.".format( - universal=LockStyle.UNIVERSAL.value - ) - ) - else: - lock_configuration = LockConfiguration(style=self.options.style) - + lock_configuration = try_(self._get_lock_configuration(target_configuration)) targets = try_( self._resolve_targets( action="creating", @@ -1454,8 +1475,8 @@ def process_req_edits( lock_file=attr.evolve( lock_file, pex_version=__version__, - requirements=SortedTuple(requirements_by_project_name.values(), key=str), - constraints=SortedTuple(constraints_by_project_name.values(), key=str), + requirements=SortedTuple(requirements_by_project_name.values()), + constraints=SortedTuple(constraints_by_project_name.values()), locked_resolves=SortedTuple( resolve_update.updated_resolve for resolve_update in lock_update.resolves ), @@ -1539,24 +1560,7 @@ def _sync(self): target_configuration = target_options.configure( self.options, pip_configuration=pip_configuration ) - if self.options.style == LockStyle.UNIVERSAL: - lock_configuration = LockConfiguration( - style=LockStyle.UNIVERSAL, - requires_python=tuple( - str(interpreter_constraint.requires_python) - for interpreter_constraint in target_configuration.interpreter_constraints - ), - target_systems=tuple(self.options.target_systems), - ) - elif self.options.target_systems: - return Error( - "The --target-system option only applies to --style {universal} locks.".format( - universal=LockStyle.UNIVERSAL.value - ) - ) - else: - lock_configuration = LockConfiguration(style=self.options.style) - + lock_configuration = try_(self._get_lock_configuration(target_configuration)) lock_file_path = self.options.lock if os.path.exists(lock_file_path): build_configuration = pip_configuration.build_configuration diff --git a/pex/dist_metadata.py b/pex/dist_metadata.py index e1dc2ea76..c445015d2 100644 --- a/pex/dist_metadata.py +++ b/pex/dist_metadata.py @@ -730,7 +730,8 @@ def __str__(self): ) -@attr.s(frozen=True) +@functools.total_ordering +@attr.s(frozen=True, order=False) class Constraint(object): @classmethod def parse( @@ -849,8 +850,14 @@ def as_requirement(self): # type: () -> Requirement return Requirement(name=self.name, specifier=self.specifier, marker=self.marker) + def __lt__(self, other): + # type: (Any) -> bool + if not isinstance(other, Constraint): + return NotImplemented + return self._str < other._str -@attr.s(frozen=True) + +@attr.s(frozen=True, order=False) class Requirement(Constraint): @classmethod def parse( @@ -899,6 +906,12 @@ def as_constraint(self): # type: () -> Constraint return Constraint(name=self.name, specifier=self.specifier, marker=self.marker) + def __lt__(self, other): + # type: (Any) -> bool + if not isinstance(other, Requirement): + return NotImplemented + return self._str < other._str + # N.B.: DistributionMetadata can have an expensive hash when a distribution has many requirements; # so we cache the hash. See: https://github.com/pex-tool/pex/issues/1928 diff --git a/pex/pip/vcs.py b/pex/pip/vcs.py index e954590f3..89ac1cab8 100644 --- a/pex/pip/vcs.py +++ b/pex/pip/vcs.py @@ -7,6 +7,8 @@ import re from pex import hashing +from pex.build_system import BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table from pex.common import is_pyc_dir, is_pyc_file, open_zip, temporary_dir from pex.hashing import Sha256 from pex.pep_440 import Version @@ -61,7 +63,7 @@ def fingerprint_downloaded_vcs_archive( version, # type: str vcs, # type: VCS.Value ): - # type: (...) -> Tuple[Fingerprint, str] + # type: (...) -> Tuple[Fingerprint, BuildSystemTable, str] archive_path = try_( _find_built_source_dist( @@ -69,8 +71,8 @@ def fingerprint_downloaded_vcs_archive( ) ) digest = Sha256() - digest_vcs_archive(archive_path=archive_path, vcs=vcs, digest=digest) - return Fingerprint.from_digest(digest), archive_path + build_system_table = digest_vcs_archive(archive_path=archive_path, vcs=vcs, digest=digest) + return Fingerprint.from_digest(digest), build_system_table, archive_path def digest_vcs_archive( @@ -78,7 +80,7 @@ def digest_vcs_archive( vcs, # type: VCS.Value digest, # type: HintedDigest ): - # type: (...) -> None + # type: (...) -> BuildSystemTable # All VCS requirements are prepared as zip archives as encoded in: # `pip._internal.req.req_install.InstallRequirement.archive`. @@ -109,3 +111,5 @@ def digest_vcs_archive( ), file_filter=lambda f: not is_pyc_file(f), ) + + return try_(load_build_system_table(chroot)) diff --git a/pex/resolve/build_systems.py b/pex/resolve/build_systems.py new file mode 100644 index 000000000..78f96be8d --- /dev/null +++ b/pex/resolve/build_systems.py @@ -0,0 +1,117 @@ +# Copyright 2024 Pex project contributors. +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import + +import os.path +import tarfile +from collections import OrderedDict + +from pex.build_system import DEFAULT_BUILD_SYSTEM_TABLE, BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table +from pex.common import open_zip, safe_mkdtemp +from pex.dist_metadata import is_sdist, is_tar_sdist, is_zip_sdist +from pex.exceptions import production_assert, reportable_unexpected_error_msg +from pex.jobs import iter_map_parallel +from pex.resolve.resolved_requirement import PartialArtifact +from pex.resolve.resolvers import Resolver +from pex.result import try_ +from pex.typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: + from typing import Iterable, Iterator, Optional, Tuple + + import attr # vendor:skip +else: + from pex.third_party import attr + + +def extract_build_system_table(source_archive_path): + # type: (str) -> BuildSystemTable + + if is_tar_sdist(source_archive_path): + extract_chroot = safe_mkdtemp() + with tarfile.open(source_archive_path) as fp: + fp.extractall(extract_chroot) + elif is_zip_sdist(source_archive_path): + extract_chroot = safe_mkdtemp() + with open_zip(source_archive_path) as fp: + fp.extractall(extract_chroot) + else: + raise AssertionError( + reportable_unexpected_error_msg( + "Asked to extract a build system table from {path} which does not appear to be a " + "source archive.".format(path=source_archive_path) + ) + ) + + # We might get a Python-standard sdist, in which case the project root is at + # `-/` at the top of the archive, but we also might get some other sort of + # archive, like a GitHub source archive which does not use Python conventions. As such we just + # perform a top-down search for a project file and exit early for the highest-level such file + # found. + # TODO(John Sirois): XXX: Check if this works with VCS requirements that use Pip-proprietary + # subdirectory=YYY. + for root, dirs, files in os.walk(extract_chroot): + if any(f in ("pyproject.toml", "setup.py", "setupcfg") for f in files): + return try_(load_build_system_table(root)) + return DEFAULT_BUILD_SYSTEM_TABLE + + +@attr.s(frozen=True) +class BuildSystems(object): + resolver = attr.ib() # type: Resolver + + def determine_build_systems(self, artifacts): + # type: (Iterable[PartialArtifact]) -> Iterator[Tuple[PartialArtifact, Optional[BuildSystemTable]]] + + undetermined_artifacts = OrderedDict() # type: OrderedDict[PartialArtifact, float] + for artifact in artifacts: + if artifact.build_system_table: + yield artifact, artifact.build_system_table + elif artifact.url.is_wheel: + yield artifact, None + else: + if "file" == artifact.url.scheme: + if os.path.isdir(artifact.url.path): + cost = 0.0 + else: + # For almost all source archives this value should be <= 1 + cost = os.path.getsize(artifact.url.path) / 5.0 * 1024 * 1024 + else: + # We have no clue how big the archive is, but assume an internet fetch is 10 + # times more costly per byte than extraction from an archive alone is. + cost = 10.0 + undetermined_artifacts[artifact] = cost + + for artifact, build_system_table in iter_map_parallel( + inputs=undetermined_artifacts, + function=self._determine_build_system, + costing_function=lambda a: undetermined_artifacts[a], + result_render_function=lambda result: ( + cast("Tuple[PartialArtifact, Optional[BuildSystemTable]]", result)[0].url + ), + noun="artifact", + verb="extract build system", + verb_past="extracted build system", + ): + yield artifact, build_system_table + + def _determine_build_system(self, artifact): + # type: (PartialArtifact) -> Tuple[PartialArtifact, BuildSystemTable] + + if "file" == artifact.url.scheme and os.path.isdir(artifact.url.path): + return artifact, try_(load_build_system_table(artifact.url.path)) + + production_assert(is_sdist(artifact.url.path)) + if artifact.url.scheme == "file": + archive = artifact.url.path + else: + archive = ( + self.resolver.download_requirements( + requirements=[artifact.url.download_url], transitive=False + ) + .local_distributions[0] + .path + ) + return artifact, extract_build_system_table(archive) diff --git a/pex/resolve/configured_resolver.py b/pex/resolve/configured_resolver.py index 6e76e9e66..540a92356 100644 --- a/pex/resolve/configured_resolver.py +++ b/pex/resolve/configured_resolver.py @@ -10,7 +10,7 @@ from pex.resolve import lock_resolver from pex.resolve.lockfile.model import Lockfile from pex.resolve.resolver_configuration import PipConfiguration, ReposConfiguration, ResolverVersion -from pex.resolve.resolvers import Resolver, ResolveResult +from pex.resolve.resolvers import Downloaded, Resolver, ResolveResult from pex.result import try_ from pex.targets import Targets from pex.typing import TYPE_CHECKING @@ -115,3 +115,33 @@ def resolve_requirements( ), result_type=result_type, ) + + def download_requirements( + self, + requirements, # type: Iterable[str] + targets=Targets(), # type: Targets + pip_version=None, # type: Optional[PipVersionValue] + transitive=None, # type: Optional[bool] + extra_resolver_requirements=None, # type: Optional[Tuple[Requirement, ...]] + ): + # type: (...) -> Downloaded + return resolver.download( + targets=targets, + requirements=requirements, + allow_prereleases=False, + transitive=transitive if transitive is not None else self.pip_configuration.transitive, + indexes=self.pip_configuration.repos_configuration.indexes, + find_links=self.pip_configuration.repos_configuration.find_links, + resolver_version=self.pip_configuration.resolver_version, + network_configuration=self.pip_configuration.network_configuration, + build_configuration=self.pip_configuration.build_configuration, + max_parallel_jobs=self.pip_configuration.max_jobs, + pip_version=pip_version or self.pip_configuration.version, + resolver=self, + use_pip_config=self.pip_configuration.use_pip_config, + extra_pip_requirements=( + extra_resolver_requirements + if extra_resolver_requirements is not None + else self.pip_configuration.extra_requirements + ), + ) diff --git a/pex/resolve/lock_resolver.py b/pex/resolve/lock_resolver.py index 1b7f58e47..4ce1b8497 100644 --- a/pex/resolve/lock_resolver.py +++ b/pex/resolve/lock_resolver.py @@ -300,6 +300,7 @@ def resolve_from_lock( style=lock.style, requires_python=lock.requires_python, target_systems=lock.target_systems, + lock_build_systems=lock.lock_build_systems, ), target=resolved_subset.target, package_index_configuration=PackageIndexConfiguration.create( diff --git a/pex/resolve/locked_resolve.py b/pex/resolve/locked_resolve.py index 684a73f0c..c81bbb266 100644 --- a/pex/resolve/locked_resolve.py +++ b/pex/resolve/locked_resolve.py @@ -8,6 +8,7 @@ from collections import OrderedDict, defaultdict, deque from functools import total_ordering +from pex.build_system import BuildSystemTable from pex.common import pluralize from pex.dependency_configuration import DependencyConfiguration from pex.dist_metadata import DistMetadata, Requirement, is_sdist, is_wheel @@ -87,6 +88,7 @@ class LockConfiguration(object): style = attr.ib() # type: LockStyle.Value requires_python = attr.ib(default=()) # type: Tuple[str, ...] target_systems = attr.ib(default=()) # type: Tuple[TargetSystem.Value, ...] + lock_build_systems = attr.ib(default=False) # type: bool @requires_python.validator @target_systems.validator @@ -116,13 +118,16 @@ def from_artifact_url( artifact_url, # type: ArtifactURL fingerprint, # type: Fingerprint verified=False, # type: bool + build_system_table=None, # type: Optional[BuildSystemTable] ): # type: (...) -> Union[FileArtifact, LocalProjectArtifact, VCSArtifact] if isinstance(artifact_url.scheme, VCSScheme): - return VCSArtifact.from_artifact_url( - artifact_url=artifact_url, + return VCSArtifact( + url=artifact_url, fingerprint=fingerprint, verified=verified, + vcs=artifact_url.scheme.vcs, + build_system_table=build_system_table, ) if "file" == artifact_url.scheme and os.path.isdir(artifact_url.path): @@ -132,6 +137,7 @@ def from_artifact_url( fingerprint=fingerprint, verified=verified, directory=directory, + build_system_table=build_system_table, ) filename = os.path.basename(artifact_url.path) @@ -140,6 +146,7 @@ def from_artifact_url( fingerprint=fingerprint, verified=verified, filename=filename, + build_system_table=build_system_table, ) @classmethod @@ -148,10 +155,14 @@ def from_url( url, # type: str fingerprint, # type: Fingerprint verified=False, # type: bool + build_system_table=None, # type: Optional[BuildSystemTable] ): # type: (...) -> Union[FileArtifact, LocalProjectArtifact, VCSArtifact] return cls.from_artifact_url( - artifact_url=ArtifactURL.parse(url), fingerprint=fingerprint, verified=verified + artifact_url=ArtifactURL.parse(url), + fingerprint=fingerprint, + verified=verified, + build_system_table=build_system_table, ) url = attr.ib() # type: ArtifactURL @@ -168,6 +179,19 @@ def __lt__(self, other): @attr.s(frozen=True, order=False) class FileArtifact(Artifact): filename = attr.ib() # type: str + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] + + @build_system_table.validator + def _validate_only_set_for_sdist( + self, + attribute, # type: Any + value, # type: Optional[BuildSystemTable] + ): + if value and not self.is_source: + raise ValueError( + "A build system table was provided but this is a whl artifact that does not need " + "to be built: {url}".format(url=self.url.raw_url) + ) @property def is_source(self): @@ -184,6 +208,7 @@ def parse_tags(self): @attr.s(frozen=True, order=False) class LocalProjectArtifact(Artifact): directory = attr.ib() # type: str + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @property def is_source(self): @@ -193,28 +218,8 @@ def is_source(self): @attr.s(frozen=True, order=False) class VCSArtifact(Artifact): - @classmethod - def from_artifact_url( - cls, - artifact_url, # type: ArtifactURL - fingerprint, # type: Fingerprint - verified=False, # type: bool - ): - # type: (...) -> VCSArtifact - if not isinstance(artifact_url.scheme, VCSScheme): - raise ValueError( - "The given artifact URL is not that of a VCS artifact: {url}".format( - url=artifact_url.raw_url - ) - ) - return cls( - url=artifact_url, - fingerprint=fingerprint, - verified=verified, - vcs=artifact_url.scheme.vcs, - ) - vcs = attr.ib() # type: VCS.Value + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @property def is_source(self): @@ -252,7 +257,7 @@ def create( return cls( pin=pin, artifact=artifact, - requires_dists=SortedTuple(requires_dists, key=str), + requires_dists=SortedTuple(requires_dists), requires_python=requires_python, additional_artifacts=SortedTuple(additional_artifacts), ) @@ -442,7 +447,7 @@ def create( return cls( pin=pin, artifact=artifact, - satisfied_direct_requirements=SortedTuple(satisfied_direct_requirements, key=str), + satisfied_direct_requirements=SortedTuple(satisfied_direct_requirements), ) pin = attr.ib() # type: Pin @@ -518,6 +523,11 @@ def most_specific(cls, resolves): if TYPE_CHECKING: + class BuildSystemOracle(Protocol): + def determine_build_systems(self, artifacts): + # type: (Iterable[PartialArtifact]) -> Iterator[Tuple[PartialArtifact, Optional[BuildSystemTable]]] + pass + class Fingerprinter(Protocol): def fingerprint(self, artifacts): # type: (Iterable[PartialArtifact]) -> Iterator[FileArtifact] @@ -531,11 +541,24 @@ def create( cls, resolved_requirements, # type: Iterable[ResolvedRequirement] dist_metadatas, # type: Iterable[DistMetadata] + build_system_oracle, # type: Optional[BuildSystemOracle] fingerprinter, # type: Fingerprinter platform_tag=None, # type: Optional[tags.Tag] ): # type: (...) -> LockedResolve + artifacts_to_lock = OrderedSet( + itertools.chain.from_iterable( + resolved_requirement.iter_artifacts_to_lock() + for resolved_requirement in resolved_requirements + ) + ) + build_system_table_by_partial_artifact = ( + dict(build_system_oracle.determine_build_systems(artifacts_to_lock)) + if build_system_oracle + else {} + ) + artifacts_to_fingerprint = OrderedSet( itertools.chain.from_iterable( resolved_requirement.iter_artifacts_to_fingerprint() @@ -568,6 +591,7 @@ def resolve_fingerprint(partial_artifact): artifact_url=partial_artifact.url, fingerprint=partial_artifact.fingerprint, verified=partial_artifact.verified, + build_system_table=build_system_table_by_partial_artifact.get(partial_artifact), ) dist_metadata_by_pin = { diff --git a/pex/resolve/locker.py b/pex/resolve/locker.py index 4991bb7dd..1bbe74adb 100644 --- a/pex/resolve/locker.py +++ b/pex/resolve/locker.py @@ -10,6 +10,8 @@ from collections import OrderedDict, defaultdict from pex import hashing +from pex.build_system import BuildSystemTable +from pex.build_system.pep_518 import load_build_system_table from pex.common import safe_mkdtemp from pex.compatibility import urlparse from pex.dist_metadata import ProjectNameAndVersion, Requirement @@ -23,6 +25,7 @@ from pex.pip.vcs import fingerprint_downloaded_vcs_archive from pex.pip.version import PipVersionValue from pex.requirements import ArchiveScheme, VCSRequirement, VCSScheme +from pex.resolve import build_systems from pex.resolve.locked_resolve import LockConfiguration, LockStyle, TargetSystem from pex.resolve.pep_691.fingerprint_service import FingerprintService from pex.resolve.pep_691.model import Endpoint @@ -34,6 +37,7 @@ ResolvedRequirement, ) from pex.resolve.resolvers import Resolver +from pex.result import try_ from pex.targets import Target from pex.typing import TYPE_CHECKING @@ -369,8 +373,13 @@ def analyze(self, line): artifact_url = build_result.url source_fingerprint = None # type: Optional[Fingerprint] verified = False + build_system_table = None # type: Optional[BuildSystemTable] if isinstance(artifact_url.scheme, VCSScheme): - source_fingerprint, archive_path = fingerprint_downloaded_vcs_archive( + ( + source_fingerprint, + build_system_table, + archive_path, + ) = fingerprint_downloaded_vcs_archive( download_dir=self._download_dir, project_name=str(build_result.pin.project_name), version=str(build_result.pin.version), @@ -390,6 +399,9 @@ def analyze(self, line): # machinery that finalizes a locks missing fingerprints will download the # artifact and hash it. if os.path.isfile(source_archive_path): + build_system_table = build_systems.extract_build_system_table( + source_archive_path + ) digest = Sha256() hashing.file_hash(source_archive_path, digest) source_fingerprint = Fingerprint.from_digest(digest) @@ -398,11 +410,15 @@ def analyze(self, line): elif "file" == artifact_url.scheme: digest = Sha256() if os.path.isfile(artifact_url.path): + build_system_table = build_systems.extract_build_system_table( + artifact_url.path + ) hashing.file_hash(artifact_url.path, digest) self._selected_path_to_pin[ os.path.basename(artifact_url.path) ] = build_result.pin else: + build_system_table = try_(load_build_system_table(artifact_url.path)) digest_local_project( directory=artifact_url.path, digest=digest, @@ -427,7 +443,10 @@ def analyze(self, line): self._resolved_requirements[build_result.pin] = ResolvedRequirement( pin=build_result.pin, artifact=PartialArtifact( - url=artifact_url, fingerprint=source_fingerprint, verified=verified + url=artifact_url, + fingerprint=source_fingerprint, + verified=verified, + build_system_table=build_system_table, ), additional_artifacts=tuple(additional_artifacts.values()), ) diff --git a/pex/resolve/lockfile/create.py b/pex/resolve/lockfile/create.py index 5d153121c..599dcb3c7 100644 --- a/pex/resolve/lockfile/create.py +++ b/pex/resolve/lockfile/create.py @@ -3,6 +3,7 @@ from __future__ import absolute_import +import itertools import os import shutil import tarfile @@ -11,10 +12,16 @@ from pex import hashing, resolver from pex.auth import PasswordDatabase -from pex.build_system import pep_517 +from pex.build_system import BuildSystemTable, pep_517 from pex.common import open_zip, pluralize, safe_mkdtemp from pex.dependency_configuration import DependencyConfiguration -from pex.dist_metadata import DistMetadata, ProjectNameAndVersion, is_tar_sdist, is_zip_sdist +from pex.dist_metadata import ( + Constraint, + DistMetadata, + ProjectNameAndVersion, + is_tar_sdist, + is_zip_sdist, +) from pex.fetcher import URLFetcher from pex.jobs import Job, Retain, SpawnedJob, execute_parallel from pex.orderedset import OrderedSet @@ -22,6 +29,7 @@ from pex.pip.download_observer import DownloadObserver from pex.pip.tool import PackageIndexConfiguration from pex.resolve import lock_resolver, locker, resolvers +from pex.resolve.build_systems import BuildSystems from pex.resolve.configured_resolver import ConfiguredResolver from pex.resolve.downloads import ArtifactDownloader from pex.resolve.locked_resolve import ( @@ -40,8 +48,8 @@ from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolved_requirement import Pin, ResolvedRequirement from pex.resolve.resolver_configuration import PipConfiguration -from pex.resolve.resolvers import Resolver -from pex.resolver import BuildRequest, Downloaded, ResolveObserver, WheelBuilder +from pex.resolve.resolvers import Downloaded, Resolver +from pex.resolver import BuildRequest, ResolveObserver, WheelBuilder from pex.result import Error, try_ from pex.targets import Target, Targets from pex.tracer import TRACER @@ -50,7 +58,7 @@ from pex.version import __version__ if TYPE_CHECKING: - from typing import DefaultDict, Dict, Iterable, List, Mapping, Optional, Tuple, Union + from typing import DefaultDict, Dict, Iterable, Iterator, List, Mapping, Optional, Tuple, Union import attr # vendor:skip @@ -334,6 +342,11 @@ def lock(self, downloaded): LockedResolve.create( resolved_requirements=resolved_requirements, dist_metadatas=dist_metadatas_by_target[target], + build_system_oracle=( + BuildSystems(resolver=self.resolver) + if self.lock_configuration.lock_build_systems + else None + ), fingerprinter=ArtifactDownloader( resolver=self.resolver, lock_configuration=self.lock_configuration, @@ -341,23 +354,31 @@ def lock(self, downloaded): package_index_configuration=self.package_index_configuration, max_parallel_jobs=self.max_parallel_jobs, ), - platform_tag=None - if self.lock_configuration.style == LockStyle.UNIVERSAL - else target.platform.tag, + platform_tag=( + None + if self.lock_configuration.style is LockStyle.UNIVERSAL + else target.platform.tag + ), ) for target, resolved_requirements in resolved_requirements_by_target.items() ) -def create( +@attr.s(frozen=True) +class _LockResult(object): + requirements = attr.ib() # type: Tuple[ParsedRequirement, ...] + constraints = attr.ib() # type: Tuple[Constraint, ...] + locked_resolves = attr.ib() # type: Tuple[LockedResolve, ...] + + +def _lock( lock_configuration, # type: LockConfiguration requirement_configuration, # type: RequirementConfiguration targets, # type: Targets pip_configuration, # type: PipConfiguration dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration ): - # type: (...) -> Union[Lockfile, Error] - """Create a lock file for the given resolve configurations.""" + # type: (...) -> Union[_LockResult, Error] network_configuration = pip_configuration.network_configuration parsed_requirements = tuple(requirement_configuration.parse_requirements(network_configuration)) @@ -441,21 +462,165 @@ def create( ) create_lock_download_manager.store_all() + return _LockResult(parsed_requirements, constraints, locked_resolves) + + +def _lock_build_system( + build_system_table, # type: BuildSystemTable + lock_configuration, # type: LockConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Tuple[BuildSystemTable, Tuple[LockedResolve, ...]], Error] + + requirement_configuration = RequirementConfiguration(requirements=build_system_table.requires) + result = _lock( + lock_configuration, + requirement_configuration, + targets, + pip_configuration, + dependency_configuration=dependency_configuration, + ) + if isinstance(result, Error): + return result + + source_artifacts = OrderedSet( + artifact.url.download_url + for artifact in itertools.chain.from_iterable( + locked_requirement.iter_artifacts() + for locked_resolve in result.locked_resolves + for locked_requirement in locked_resolve.locked_requirements + ) + if not artifact.url.is_wheel + ) + if source_artifacts: + return Error( + "Failed to lock build backend {build_backend} which requires {requires}.\n" + "The following {packages} had source artifacts locked and recursive build system " + "locking is not supported:\n" + "{source_artifacts}".format( + build_backend=build_system_table.build_backend, + requires=", ".join(build_system_table.requires), + packages=pluralize(source_artifacts, "package"), + source_artifacts="\n".join(source_artifacts), + ) + ) + return build_system_table, result.locked_resolves + + +def _lock_build_systems( + locked_resolves, # type: Tuple[LockedResolve, ...] + lock_configuration, # type: LockConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Iterator[Union[Tuple[BuildSystemTable, Tuple[LockedResolve, ...]], Error]] + + if not lock_configuration.lock_build_systems: + return + + build_systems = OrderedSet( + artifact.build_system_table + for artifact in itertools.chain.from_iterable( + locked_requirement.iter_artifacts() + for locked_resolve in locked_resolves + for locked_requirement in locked_resolve.locked_requirements + ) + if artifact.build_system_table + ) + if not build_systems: + return + + build_system_pip_config = attr.evolve( + pip_configuration, + build_configuration=attr.evolve( + pip_configuration.build_configuration, allow_builds=False, allow_wheels=True + ), + ) + # TODO(John Sirois): Re-introduce iter_map_parallel after sorting out nested + # multiprocessing.Pool illegal usage. Currently this nets: + # File "/home/jsirois/dev/pex-tool/pex/pex/resolve/lockfile/create.py", line 588, in create + # for result in _lock_build_systems( + # File "/home/jsirois/dev/pex-tool/pex/pex/jobs.py", line 787, in iter_map_parallel + # for pid, result, elapsed_secs in pool.imap_unordered(apply_function, input_items): + # File "/home/jsirois/.pyenv/versions/3.11.10/lib/python3.11/multiprocessing/pool.py", line 873, in next + # raise value + # AssertionError: daemonic processes are not allowed to have children + for build_system_table in build_systems: + yield _lock_build_system( + build_system_table=build_system_table, + lock_configuration=lock_configuration, + targets=targets, + pip_configuration=build_system_pip_config, + dependency_configuration=dependency_configuration, + ) + + +def create( + lock_configuration, # type: LockConfiguration + requirement_configuration, # type: RequirementConfiguration + targets, # type: Targets + pip_configuration, # type: PipConfiguration + dependency_configuration=DependencyConfiguration(), # type: DependencyConfiguration +): + # type: (...) -> Union[Lockfile, Error] + """Create a lock file for the given resolve configurations.""" + + lock_result = try_( + _lock( + lock_configuration, + requirement_configuration, + targets, + pip_configuration, + dependency_configuration=dependency_configuration, + ) + ) + + build_system_lock_errors = [] # type: List[str] + build_systems = {} # type: Dict[BuildSystemTable, Tuple[LockedResolve, ...]] + for result in _lock_build_systems( + locked_resolves=lock_result.locked_resolves, + lock_configuration=lock_configuration, + targets=targets, + pip_configuration=pip_configuration, + dependency_configuration=dependency_configuration, + ): + if isinstance(result, Error): + build_system_lock_errors.append(str(result)) + else: + build_system_table, locked_resolves = result + build_systems[build_system_table] = locked_resolves + if build_system_lock_errors: + return Error( + "Failed to lock {count} build {systems}:\n{errors}".format( + count=len(build_system_lock_errors), + systems=pluralize(build_system_lock_errors, "system"), + errors="\n".join( + "{index}. {error}".format(index=index, error=error) + for index, error in enumerate(build_system_lock_errors, start=1) + ), + ) + ) + lock = Lockfile.create( pex_version=__version__, style=lock_configuration.style, requires_python=lock_configuration.requires_python, target_systems=lock_configuration.target_systems, + lock_build_systems=lock_configuration.lock_build_systems, pip_version=pip_configuration.version, resolver_version=pip_configuration.resolver_version, - requirements=parsed_requirements, - constraints=constraints, + requirements=lock_result.requirements, + constraints=lock_result.constraints, allow_prereleases=pip_configuration.allow_prereleases, build_configuration=pip_configuration.build_configuration, transitive=pip_configuration.transitive, excluded=dependency_configuration.excluded, overridden=dependency_configuration.all_overrides(), - locked_resolves=locked_resolves, + locked_resolves=lock_result.locked_resolves, + build_systems=build_systems, ) if lock_configuration.style is LockStyle.UNIVERSAL and ( @@ -471,11 +636,11 @@ def create( lock_resolver.resolve_from_lock( targets=check_targets, lock=lock, - resolver=configured_resolver, + resolver=ConfiguredResolver(pip_configuration=pip_configuration), indexes=pip_configuration.repos_configuration.indexes, find_links=pip_configuration.repos_configuration.find_links, resolver_version=pip_configuration.resolver_version, - network_configuration=network_configuration, + network_configuration=pip_configuration.network_configuration, password_entries=pip_configuration.repos_configuration.password_entries, build_configuration=pip_configuration.build_configuration, transitive=pip_configuration.transitive, diff --git a/pex/resolve/lockfile/json_codec.py b/pex/resolve/lockfile/json_codec.py index b8283ce79..63a456d88 100644 --- a/pex/resolve/lockfile/json_codec.py +++ b/pex/resolve/lockfile/json_codec.py @@ -4,8 +4,10 @@ from __future__ import absolute_import import json +from collections import defaultdict from pex import compatibility +from pex.build_system import BuildSystemTable from pex.dist_metadata import Requirement, RequirementParseError from pex.enum import Enum from pex.pep_440 import Version @@ -13,10 +15,13 @@ from pex.pip.version import PipVersion from pex.resolve.locked_resolve import ( Artifact, + FileArtifact, + LocalProjectArtifact, LockedRequirement, LockedResolve, LockStyle, TargetSystem, + VCSArtifact, ) from pex.resolve.lockfile.model import Lockfile from pex.resolve.path_mappings import PathMappings @@ -31,6 +36,7 @@ from typing import ( Any, Container, + DefaultDict, Dict, List, Mapping, @@ -337,6 +343,7 @@ def assemble_tag( style=get_enum_value(LockStyle, "style"), requires_python=get("requires_python", list), target_systems=target_systems, + lock_build_systems=get("lock_build_systems", bool, optional=True) or False, pip_version=get_enum_value( PipVersion, "pip_version", @@ -386,7 +393,59 @@ def as_json_data( path_mappings=PathMappings(), # type: PathMappings ): # type: (...) -> Dict[str, Any] - return { + + build_systems_by_backend = defaultdict( + dict + ) # type: DefaultDict[str, Dict[BuildSystemTable, str]] + + def serialize_artifact(artifact): + # type: (Union[FileArtifact, LocalProjectArtifact, VCSArtifact]) -> Dict[str, Any] + + artifact_data = { + "url": path_mappings.maybe_canonicalize(artifact.url.download_url), + "algorithm": artifact.fingerprint.algorithm, + "hash": artifact.fingerprint.hash, + } + if artifact.build_system_table: + backend = artifact.build_system_table.build_backend + tables = build_systems_by_backend[backend] + artifact_data["build_system"] = tables.setdefault( + artifact.build_system_table, + "{backend}-{index}".format(backend=backend, index=len(tables)), + ) + return artifact_data + + def serialize_locked_resolve(locked_resolve): + # type: (LockedResolve) -> Dict[str, Any] + return { + "platform_tag": [ + locked_resolve.platform_tag.interpreter, + locked_resolve.platform_tag.abi, + locked_resolve.platform_tag.platform, + ] + if locked_resolve.platform_tag + else None, + "locked_requirements": [ + { + "project_name": str(req.pin.project_name), + # N.B.: We store the raw version so that `===` can work as intended against + # the un-normalized form of versions that are non-legacy and thus + # normalizable. + "version": req.pin.version.raw, + "requires_dists": [ + path_mappings.maybe_canonicalize(str(dependency)) + for dependency in req.requires_dists + ], + "requires_python": str(req.requires_python) if req.requires_python else None, + "artifacts": [ + serialize_artifact(artifact) for artifact in req.iter_artifacts() + ], + } + for req in locked_resolve.locked_requirements + ], + } + + lock_data = { "pex_version": lockfile.pex_version, "style": str(lockfile.style), "requires_python": list(lockfile.requires_python), @@ -410,43 +469,24 @@ def as_json_data( "excluded": [str(exclude) for exclude in lockfile.excluded], "overridden": [str(override) for override in lockfile.overridden], "locked_resolves": [ - { - "platform_tag": [ - locked_resolve.platform_tag.interpreter, - locked_resolve.platform_tag.abi, - locked_resolve.platform_tag.platform, - ] - if locked_resolve.platform_tag - else None, - "locked_requirements": [ - { - "project_name": str(req.pin.project_name), - # N.B.: We store the raw version so that `===` can work as intended against - # the un-normalized form of versions that are non-legacy and thus - # normalizable. - "version": req.pin.version.raw, - "requires_dists": [ - path_mappings.maybe_canonicalize(str(dependency)) - for dependency in req.requires_dists - ], - "requires_python": str(req.requires_python) - if req.requires_python - else None, - "artifacts": [ - { - "url": path_mappings.maybe_canonicalize(artifact.url.download_url), - "algorithm": artifact.fingerprint.algorithm, - "hash": artifact.fingerprint.hash, - } - for artifact in req.iter_artifacts() - ], - } - for req in locked_resolve.locked_requirements - ], - } - for locked_resolve in lockfile.locked_resolves + serialize_locked_resolve(locked_resolve) for locked_resolve in lockfile.locked_resolves ], "path_mappings": { path_mapping.name: path_mapping.description for path_mapping in path_mappings.mappings }, } + if build_systems_by_backend: + lock_data["build_systems"] = { + build_system_id: { + "build_backend": build_system_table.build_backend, + "requires": build_system_table.requires, + "backend_path": build_system_table.backend_path, + "locked_resolves": [ + serialize_locked_resolve(locked_resolve) + for locked_resolve in lockfile.build_systems[build_system_table] + ], + } + for build_system in build_systems_by_backend.values() + for build_system_table, build_system_id in build_system.items() + } + return lock_data diff --git a/pex/resolve/lockfile/model.py b/pex/resolve/lockfile/model.py index bb309580e..40fae0eee 100644 --- a/pex/resolve/lockfile/model.py +++ b/pex/resolve/lockfile/model.py @@ -5,6 +5,7 @@ import os +from pex.build_system import BuildSystemTable from pex.dependency_configuration import DependencyConfiguration from pex.dist_metadata import Constraint, Requirement from pex.orderedset import OrderedSet @@ -36,6 +37,7 @@ def create( style, # type: LockStyle.Value requires_python, # type: Iterable[str] target_systems, # type: Iterable[TargetSystem.Value] + lock_build_systems, # type: bool requirements, # type: Iterable[Union[Requirement, ParsedRequirement]] constraints, # type: Iterable[Constraint] allow_prereleases, # type: bool @@ -44,6 +46,7 @@ def create( excluded, # type: Iterable[Requirement] overridden, # type: Iterable[Requirement] locked_resolves, # type: Iterable[LockedResolve] + build_systems=None, # type: Optional[Mapping[BuildSystemTable, Iterable[LockedResolve]]] source=None, # type: Optional[str] pip_version=None, # type: Optional[PipVersionValue] resolver_version=None, # type: Optional[ResolverVersion.Value] @@ -94,10 +97,11 @@ def extract_requirement(req): style=style, requires_python=SortedTuple(requires_python), target_systems=SortedTuple(target_systems), + lock_build_systems=lock_build_systems, pip_version=pip_ver, resolver_version=resolver_version or ResolverVersion.default(pip_ver), - requirements=SortedTuple(resolve_requirements, key=str), - constraints=SortedTuple(constraints, key=str), + requirements=SortedTuple(resolve_requirements), + constraints=SortedTuple(constraints), allow_prereleases=allow_prereleases, allow_wheels=build_configuration.allow_wheels, only_wheels=SortedTuple(build_configuration.only_wheels), @@ -111,6 +115,10 @@ def extract_requirement(req): excluded=SortedTuple(excluded), overridden=SortedTuple(overridden), locked_resolves=SortedTuple(locked_resolves), + build_systems={ + build_system_table: SortedTuple(locked_resolves) + for build_system_table, locked_resolves in (build_systems or {}).items() + }, local_project_requirement_mapping=requirement_by_local_project_directory, source=source, ) @@ -119,6 +127,7 @@ def extract_requirement(req): style = attr.ib() # type: LockStyle.Value requires_python = attr.ib() # type: SortedTuple[str] target_systems = attr.ib() # type: SortedTuple[TargetSystem.Value] + lock_build_systems = attr.ib() # type: bool pip_version = attr.ib() # type: PipVersionValue resolver_version = attr.ib() # type: ResolverVersion.Value requirements = attr.ib() # type: SortedTuple[Requirement] @@ -136,6 +145,7 @@ def extract_requirement(req): excluded = attr.ib() # type: SortedTuple[Requirement] overridden = attr.ib() # type: SortedTuple[Requirement] locked_resolves = attr.ib() # type: SortedTuple[LockedResolve] + build_systems = attr.ib() # type: Mapping[BuildSystemTable, SortedTuple[LockedResolve]] local_project_requirement_mapping = attr.ib(eq=False) # type: Mapping[str, Requirement] source = attr.ib(default=None, eq=False) # type: Optional[str] diff --git a/pex/resolve/lockfile/updater.py b/pex/resolve/lockfile/updater.py index d66224352..ec1c76631 100644 --- a/pex/resolve/lockfile/updater.py +++ b/pex/resolve/lockfile/updater.py @@ -665,6 +665,7 @@ def create( style=lock_file.style, requires_python=lock_file.requires_python, target_systems=lock_file.target_systems, + lock_build_systems=lock_file.lock_build_systems, ) pip_configuration = PipConfiguration( version=lock_file.pip_version, diff --git a/pex/resolve/resolved_requirement.py b/pex/resolve/resolved_requirement.py index eb63daa6c..0571043fd 100644 --- a/pex/resolve/resolved_requirement.py +++ b/pex/resolve/resolved_requirement.py @@ -6,6 +6,7 @@ import hashlib from pex import hashing +from pex.build_system import BuildSystemTable from pex.compatibility import url_unquote, urlparse from pex.dist_metadata import ProjectNameAndVersion, Requirement, is_wheel from pex.hashing import HashlibHasher @@ -167,6 +168,7 @@ class PartialArtifact(object): url = attr.ib(converter=_convert_url) # type: ArtifactURL fingerprint = attr.ib(default=None) # type: Optional[Fingerprint] verified = attr.ib(default=False) # type: bool + build_system_table = attr.ib(default=None) # type: Optional[BuildSystemTable] @attr.s(frozen=True) @@ -186,3 +188,8 @@ def iter_artifacts_to_fingerprint(self): for artifact in self.iter_artifacts(): if not artifact.fingerprint: yield artifact + + def iter_artifacts_to_lock(self): + for artifact in self.iter_artifacts(): + if not artifact.url.is_wheel: + yield artifact diff --git a/pex/resolve/resolvers.py b/pex/resolve/resolvers.py index ef98e9651..1e6f78b40 100644 --- a/pex/resolve/resolvers.py +++ b/pex/resolve/resolvers.py @@ -1,17 +1,19 @@ # Copyright 2022 Pex project contributors. # Licensed under the Apache License, Version 2.0 (see LICENSE). -from __future__ import absolute_import +from __future__ import absolute_import, print_function +import hashlib import itertools import os +import zipfile from abc import abstractmethod from collections import OrderedDict, defaultdict -from pex import pex_warnings +from pex import pex_warnings, targets from pex.common import pluralize from pex.dependency_configuration import DependencyConfiguration -from pex.dist_metadata import Distribution, Requirement +from pex.dist_metadata import Distribution, Requirement, is_wheel from pex.fingerprinted_distribution import FingerprintedDistribution from pex.pep_427 import InstallableType from pex.pep_503 import ProjectName @@ -20,6 +22,7 @@ from pex.sorted_tuple import SortedTuple from pex.targets import AbbreviatedPlatform, Target, Targets from pex.typing import TYPE_CHECKING +from pex.util import CacheHelper if TYPE_CHECKING: from typing import DefaultDict, Iterable, List, Optional, Tuple @@ -216,6 +219,48 @@ class ResolveResult(object): type = attr.ib() # type: InstallableType.Value +def fingerprint_path(path): + # type: (str) -> str + + # We switched from sha1 to sha256 at the transition from using `pip install --target` to + # `pip install --prefix` to serve two purposes: + # 1. Insulate the new installation scheme from the old. + # 2. Move past sha1 which was shown to have practical collision attacks in 2019. + # + # The installation scheme switch was the primary purpose and switching hashes proved a pragmatic + # insulation. If the `pip install --prefix` re-arrangement scheme evolves, then some other + # option than switching hashing algorithms will be needed, like post-fixing a running version + # integer or just mixing one into the hashed content. + # + # See: https://github.com/pex-tool/pex/issues/1655 for a general overview of these cache + # structure concerns. + hasher = hashlib.sha256 + + if os.path.isdir(path): + return CacheHelper.dir_hash(path, hasher=hasher) + return CacheHelper.hash(path, hasher=hasher) + + +@attr.s(frozen=True) +class LocalDistribution(object): + path = attr.ib() # type: str + fingerprint = attr.ib() # type: str + target = attr.ib(factory=targets.current) # type: Target + + @fingerprint.default + def _calculate_fingerprint(self): + return fingerprint_path(self.path) + + @property + def is_wheel(self): + return is_wheel(self.path) and zipfile.is_zipfile(self.path) + + +@attr.s(frozen=True) +class Downloaded(object): + local_distributions = attr.ib() # type: Tuple[LocalDistribution, ...] + + class Resolver(object): @abstractmethod def is_default_repos(self): @@ -249,3 +294,15 @@ def resolve_requirements( ): # type: (...) -> ResolveResult raise NotImplementedError() + + @abstractmethod + def download_requirements( + self, + requirements, # type: Iterable[str] + targets=Targets(), # type: Targets + pip_version=None, # type: Optional[PipVersionValue] + transitive=None, # type: Optional[bool] + extra_resolver_requirements=None, # type: Optional[Tuple[Requirement, ...]] + ): + # type: (...) -> Downloaded + raise NotImplementedError() diff --git a/pex/resolver.py b/pex/resolver.py index 9dacac413..8500b6f97 100644 --- a/pex/resolver.py +++ b/pex/resolver.py @@ -6,14 +6,12 @@ import functools import glob -import hashlib import itertools import os import zipfile from abc import abstractmethod from collections import OrderedDict, defaultdict -from pex import targets from pex.atomic_directory import AtomicDirectory, atomic_directory from pex.auth import PasswordEntry from pex.cache.dirs import BuiltWheelDir, CacheDir @@ -36,17 +34,19 @@ from pex.resolve.requirement_configuration import RequirementConfiguration from pex.resolve.resolver_configuration import BuildConfiguration, PipLog, ResolverVersion from pex.resolve.resolvers import ( + Downloaded, + LocalDistribution, ResolvedDistribution, Resolver, ResolveResult, Unsatisfiable, Untranslatable, check_resolve, + fingerprint_path, ) from pex.targets import AbbreviatedPlatform, CompletePlatform, LocalInterpreter, Target, Targets from pex.tracer import TRACER from pex.typing import TYPE_CHECKING -from pex.util import CacheHelper from pex.variables import ENV if TYPE_CHECKING: @@ -277,28 +277,6 @@ class IntegrityError(Exception): pass -def fingerprint_path(path): - # type: (str) -> str - - # We switched from sha1 to sha256 at the transition from using `pip install --target` to - # `pip install --prefix` to serve two purposes: - # 1. Insulate the new installation scheme from the old. - # 2. Move past sha1 which was shown to have practical collision attacks in 2019. - # - # The installation scheme switch was the primary purpose and switching hashes proved a pragmatic - # insulation. If the `pip install --prefix` re-arrangement scheme evolves, then some other - # option than switching hashing algorithms will be needed, like post-fixing a running version - # integer or just mixing one into the hashed content. - # - # See: https://github.com/pex-tool/pex/issues/1655 for a general overview of these cache - # structure concerns. - hasher = hashlib.sha256 - - if os.path.isdir(path): - return CacheHelper.dir_hash(path, hasher=hasher) - return CacheHelper.hash(path, hasher=hasher) - - @attr.s(frozen=True) class BuildRequest(object): @classmethod @@ -1224,26 +1202,6 @@ def _download_internal( return local_projects, download_results -@attr.s(frozen=True) -class LocalDistribution(object): - path = attr.ib() # type: str - fingerprint = attr.ib() # type: str - target = attr.ib(factory=targets.current) # type: Target - - @fingerprint.default - def _calculate_fingerprint(self): - return fingerprint_path(self.path) - - @property - def is_wheel(self): - return is_wheel(self.path) and zipfile.is_zipfile(self.path) - - -@attr.s(frozen=True) -class Downloaded(object): - local_distributions = attr.ib() # type: Tuple[LocalDistribution, ...] - - class ResolveObserver(object): @abstractmethod def observe_download( diff --git a/tests/integration/cli/commands/test_export.py b/tests/integration/cli/commands/test_export.py index 3d616f639..5314b268d 100644 --- a/tests/integration/cli/commands/test_export.py +++ b/tests/integration/cli/commands/test_export.py @@ -45,6 +45,7 @@ style=LockStyle.UNIVERSAL, requires_python=SortedTuple(), target_systems=SortedTuple(), + lock_build_systems=False, pip_version=PipVersion.DEFAULT, resolver_version=ResolverVersion.PIP_2020, requirements=SortedTuple([Requirement.parse("ansicolors")]), @@ -86,6 +87,7 @@ ) ] ), + build_systems={}, local_project_requirement_mapping={}, ) diff --git a/tests/integration/cli/commands/test_export_subset.py b/tests/integration/cli/commands/test_export_subset.py index 082fd18ae..02b0ae53c 100644 --- a/tests/integration/cli/commands/test_export_subset.py +++ b/tests/integration/cli/commands/test_export_subset.py @@ -59,7 +59,7 @@ def test_full( if sys.version_info[0] == 2: expected_requirements.append(Requirement.parse(to_unicode("enum34==1.1.10"))) - assert sorted(expected_requirements, key=str) == sorted(actual_requirements, key=str) + assert sorted(expected_requirements) == sorted(actual_requirements) def test_subset( diff --git a/tests/integration/cli/commands/test_lock_dependency_groups.py b/tests/integration/cli/commands/test_lock_dependency_groups.py index 3c414d883..b08c8d031 100644 --- a/tests/integration/cli/commands/test_lock_dependency_groups.py +++ b/tests/integration/cli/commands/test_lock_dependency_groups.py @@ -46,10 +46,7 @@ def test_lock_dependency_groups(tmpdir): ).assert_success() lockfile = json_codec.load(lock) - assert ( - SortedTuple((req("cowsay==5.0"), req("ansicolors==1.1.8")), key=str) - == lockfile.requirements - ) + assert SortedTuple((req("cowsay==5.0"), req("ansicolors==1.1.8"))) == lockfile.requirements assert 1 == len(lockfile.locked_resolves) locked_requirements = lockfile.locked_resolves[0].locked_requirements assert sorted( diff --git a/tests/integration/test_locked_resolve.py b/tests/integration/test_locked_resolve.py index 4f33872df..30111b30d 100644 --- a/tests/integration/test_locked_resolve.py +++ b/tests/integration/test_locked_resolve.py @@ -13,7 +13,8 @@ from pex.resolve.lockfile.create import LockObserver from pex.resolve.resolved_requirement import Pin from pex.resolve.resolver_configuration import PipConfiguration -from pex.resolver import Downloaded, LocalDistribution, WheelBuilder +from pex.resolve.resolvers import Downloaded, LocalDistribution +from pex.resolver import WheelBuilder from pex.typing import TYPE_CHECKING from pex.util import CacheHelper from testing.resolve import normalize_locked_resolve diff --git a/tests/resolve/lockfile/test_json_codec.py b/tests/resolve/lockfile/test_json_codec.py index 66eeb03f7..beb830aed 100644 --- a/tests/resolve/lockfile/test_json_codec.py +++ b/tests/resolve/lockfile/test_json_codec.py @@ -41,6 +41,7 @@ def test_roundtrip(tmpdir): style=LockStyle.STRICT, requires_python=(), target_systems=(), + lock_build_systems=False, pip_version=PipVersion.VENDORED, resolver_version=ResolverVersion.PIP_2020, requirements=( diff --git a/tests/resolve/test_locked_resolve.py b/tests/resolve/test_locked_resolve.py index b60a612fa..ccf075805 100644 --- a/tests/resolve/test_locked_resolve.py +++ b/tests/resolve/test_locked_resolve.py @@ -833,6 +833,7 @@ def fingerprint(self, _artifacts): locked_resolve = LockedResolve.create( resolved_requirements=(), dist_metadatas=(), + build_system_oracle=None, fingerprinter=DevNullFingerprinter(), ) assert Resolved(