Skip to content

Commit

Permalink
Respect hashes in constraint files
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Sep 5, 2024
1 parent 2da795a commit c1ae6f6
Show file tree
Hide file tree
Showing 19 changed files with 381 additions and 101 deletions.
7 changes: 4 additions & 3 deletions crates/bench/benches/uv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ mod resolver {
use uv_cache::Cache;
use uv_client::RegistryClient;
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, IndexStrategy, SourceStrategy,
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, SourceStrategy,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
Expand Down Expand Up @@ -159,7 +159,7 @@ mod resolver {
let installed_packages = EmptyInstalledPackages;
let sources = SourceStrategy::default();
let options = OptionsBuilder::new().exclude_newer(exclude_newer).build();
let build_constraints = [];
let build_constraints = Constraints::default();

let python_requirement = if universal {
PythonRequirement::from_requires_python(
Expand All @@ -173,7 +173,7 @@ mod resolver {
let build_context = BuildDispatch::new(
client,
&cache,
&build_constraints,
build_constraints,
interpreter,
&index_locations,
&flat_index,
Expand All @@ -185,6 +185,7 @@ mod resolver {
build_isolation,
LinkMode::default(),
&build_options,
&hashes,
exclude_newer,
sources,
concurrency,
Expand Down
38 changes: 31 additions & 7 deletions crates/distribution-types/src/specified_requirement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,16 @@ use uv_normalize::ExtraName;

use crate::VerbatimParsedUrl;

/// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only
/// hashes but in the future also editable and similar information.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct NameRequirementSpecification {
/// The actual requirement.
pub requirement: Requirement,
/// Hashes of the downloadable packages.
pub hashes: Vec<String>,
}

/// An [`UnresolvedRequirement`] with additional metadata from `requirements.txt`, currently only
/// hashes but in the future also editable and similar information.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
Expand Down Expand Up @@ -86,13 +96,7 @@ impl UnresolvedRequirement {
/// Return the hashes of the requirement, as specified in the URL fragment.
pub fn hashes(&self) -> Option<Hashes> {
match self {
Self::Named(requirement) => {
let RequirementSource::Url { ref url, .. } = requirement.source else {
return None;
};
let fragment = url.fragment()?;
Hashes::parse_fragment(fragment).ok()
}
Self::Named(requirement) => requirement.hashes(),
Self::Unnamed(requirement) => {
let ParsedUrl::Archive(ref url) = requirement.url.parsed_url else {
return None;
Expand All @@ -104,6 +108,17 @@ impl UnresolvedRequirement {
}
}

impl NameRequirementSpecification {
/// Return the hashes of the requirement, as specified in the URL fragment.
pub fn hashes(&self) -> Option<Hashes> {
let RequirementSource::Url { ref url, .. } = self.requirement.source else {
return None;
};
let fragment = url.fragment()?;
Hashes::parse_fragment(fragment).ok()
}
}

impl From<Requirement> for UnresolvedRequirementSpecification {
fn from(requirement: Requirement) -> Self {
Self {
Expand All @@ -112,3 +127,12 @@ impl From<Requirement> for UnresolvedRequirementSpecification {
}
}
}

impl From<Requirement> for NameRequirementSpecification {
fn from(requirement: Requirement) -> Self {
Self {
requirement,
hashes: Vec::new(),
}
}
}
13 changes: 11 additions & 2 deletions crates/pypi-types/src/requirement.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use uv_git::{GitReference, GitSha, GitUrl};
use uv_normalize::{ExtraName, PackageName};

use crate::{
ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, ParsedUrlError,
VerbatimParsedUrl,
Hashes, ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl,
ParsedUrlError, VerbatimParsedUrl,
};

#[derive(Debug, Error)]
Expand Down Expand Up @@ -114,6 +114,15 @@ impl Requirement {
..self
})
}

/// Return the hashes of the requirement, as specified in the URL fragment.
pub fn hashes(&self) -> Option<Hashes> {
let RequirementSource::Url { ref url, .. } = self.source else {
return None;
};
let fragment = url.fragment()?;
Hashes::parse_fragment(fragment).ok()
}
}

impl From<Requirement> for pep508_rs::Requirement<VerbatimUrl> {
Expand Down
9 changes: 6 additions & 3 deletions crates/uv-dispatch/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ pub struct BuildDispatch<'a> {
link_mode: install_wheel_rs::linker::LinkMode,
build_options: &'a BuildOptions,
config_settings: &'a ConfigSettings,
hasher: &'a HashStrategy,
exclude_newer: Option<ExcludeNewer>,
source_build_context: SourceBuildContext,
build_extra_env_vars: FxHashMap<OsString, OsString>,
Expand All @@ -58,7 +59,7 @@ impl<'a> BuildDispatch<'a> {
pub fn new(
client: &'a RegistryClient,
cache: &'a Cache,
constraints: &'a [Requirement],
constraints: Constraints,
interpreter: &'a Interpreter,
index_locations: &'a IndexLocations,
flat_index: &'a FlatIndex,
Expand All @@ -70,14 +71,15 @@ impl<'a> BuildDispatch<'a> {
build_isolation: BuildIsolation<'a>,
link_mode: install_wheel_rs::linker::LinkMode,
build_options: &'a BuildOptions,
hasher: &'a HashStrategy,
exclude_newer: Option<ExcludeNewer>,
sources: SourceStrategy,
concurrency: Concurrency,
) -> Self {
Self {
client,
cache,
constraints: Constraints::from_requirements(constraints.iter().cloned()),
constraints,
interpreter,
index_locations,
flat_index,
Expand All @@ -89,6 +91,7 @@ impl<'a> BuildDispatch<'a> {
build_isolation,
link_mode,
build_options,
hasher,
exclude_newer,
source_build_context: SourceBuildContext::default(),
build_extra_env_vars: FxHashMap::default(),
Expand Down Expand Up @@ -152,7 +155,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
Some(tags),
self.flat_index,
self.index,
&HashStrategy::None,
self.hasher,
self,
EmptyInstalledPackages,
DistributionDatabase::new(self.client, self, self.concurrency.downloads),
Expand Down
11 changes: 6 additions & 5 deletions crates/uv-installer/src/site_packages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use url::Url;

use distribution_types::{
Diagnostic, InstalledDist, Name, UnresolvedRequirement, UnresolvedRequirementSpecification,
Diagnostic, InstalledDist, Name, NameRequirementSpecification, UnresolvedRequirement,
UnresolvedRequirementSpecification,
};
use pep440_rs::{Version, VersionSpecifiers};
use pypi_types::{Requirement, ResolverMarkerEnvironment, VerbatimParsedUrl};
Expand Down Expand Up @@ -283,18 +284,18 @@ impl SitePackages {
pub fn satisfies(
&self,
requirements: &[UnresolvedRequirementSpecification],
constraints: &[Requirement],
constraints: &[NameRequirementSpecification],
markers: &ResolverMarkerEnvironment,
) -> Result<SatisfiesResult> {
// Collect the constraints.
let constraints: FxHashMap<&PackageName, Vec<&Requirement>> =
constraints
.iter()
.fold(FxHashMap::default(), |mut constraints, requirement| {
.fold(FxHashMap::default(), |mut constraints, constraint| {
constraints
.entry(&requirement.name)
.entry(&constraint.requirement.name)
.or_default()
.push(requirement);
.push(&constraint.requirement);
constraints
});

Expand Down
13 changes: 9 additions & 4 deletions crates/uv-requirements/src/specification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ use tracing::instrument;

use cache_key::CanonicalUrl;
use distribution_types::{
FlatIndexLocation, IndexUrl, UnresolvedRequirement, UnresolvedRequirementSpecification,
FlatIndexLocation, IndexUrl, NameRequirementSpecification, UnresolvedRequirement,
UnresolvedRequirementSpecification,
};
use pep508_rs::{MarkerTree, UnnamedRequirement, UnnamedRequirementUrl};
use pypi_types::Requirement;
Expand All @@ -56,7 +57,7 @@ pub struct RequirementsSpecification {
/// The requirements for the project.
pub requirements: Vec<UnresolvedRequirementSpecification>,
/// The constraints for the project.
pub constraints: Vec<Requirement>,
pub constraints: Vec<NameRequirementSpecification>,
/// The overrides for the project.
pub overrides: Vec<UnresolvedRequirementSpecification>,
/// The source trees from which to extract requirements.
Expand Down Expand Up @@ -129,6 +130,7 @@ impl RequirementsSpecification {
.constraints
.into_iter()
.map(Requirement::from)
.map(NameRequirementSpecification::from)
.collect(),
index_url: requirements_txt.index_url.map(IndexUrl::from),
extra_index_urls: requirements_txt
Expand Down Expand Up @@ -247,13 +249,16 @@ impl RequirementsSpecification {
}

// Read all constraints, treating both requirements _and_ constraints as constraints.
// Overrides are ignored, as are the hashes, as they are not relevant for constraints.
// Overrides are ignored.
for source in constraints {
let source = Self::from_source(source, client_builder).await?;
for entry in source.requirements {
match entry.requirement {
UnresolvedRequirement::Named(requirement) => {
spec.constraints.push(requirement);
spec.constraints.push(NameRequirementSpecification {
requirement,
hashes: entry.hashes,
});
}
UnresolvedRequirement::Unnamed(requirement) => {
return Err(anyhow::anyhow!(
Expand Down
82 changes: 70 additions & 12 deletions crates/uv-types/src/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,13 +126,56 @@ impl HashStrategy {
/// to "only evaluate marker expressions that reference an extra name.")
pub fn from_requirements<'a>(
requirements: impl Iterator<Item = (&'a UnresolvedRequirement, &'a [String])>,
constraints: impl Iterator<Item = (&'a Requirement, &'a [String])>,
marker_env: Option<&ResolverMarkerEnvironment>,
mode: HashCheckingMode,
) -> Result<Self, HashStrategyError> {
let mut hashes = FxHashMap::<VersionId, Vec<HashDigest>>::default();
let mut constraint_hashes = FxHashMap::<VersionId, Vec<HashDigest>>::default();

// First, index the constraints by name.
for (requirement, digests) in constraints {
if !requirement
.evaluate_markers(marker_env.map(ResolverMarkerEnvironment::markers), &[])
{
continue;
}

// Every constraint must be a pinned version.
let Some(id) = Self::pin(requirement) else {
if mode.is_require() {
return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(),
mode,
));
}
continue;
};

let digests = if digests.is_empty() {
// If there are no hashes, and the distribution is URL-based, attempt to extract
// it from the fragment.
requirement
.hashes()
.map(Hashes::into_digests)
.unwrap_or_default()
} else {
// Parse the hashes.
digests
.iter()
.map(|digest| HashDigest::from_str(digest))
.collect::<Result<Vec<_>, _>>()?
};

if digests.is_empty() {
continue;
}

constraint_hashes.insert(id, digests);
}

// For each requirement, map from name to allowed hashes. We use the last entry for each
// package.
let mut requirement_hashes = FxHashMap::<VersionId, Vec<HashDigest>>::default();
for (requirement, digests) in requirements {
if !requirement
.evaluate_markers(marker_env.map(ResolverMarkerEnvironment::markers), &[])
Expand All @@ -143,9 +186,17 @@ impl HashStrategy {
// Every requirement must be either a pinned version or a direct URL.
let id = match &requirement {
UnresolvedRequirement::Named(requirement) => {
Self::pin(requirement).ok_or_else(|| {
HashStrategyError::UnpinnedRequirement(requirement.to_string(), mode)
})?
if let Some(id) = Self::pin(requirement) {
id
} else {
if mode.is_require() {
return Err(HashStrategyError::UnpinnedRequirement(
requirement.to_string(),
mode,
));
}
continue;
}
}
UnresolvedRequirement::Unnamed(requirement) => {
// Direct URLs are always allowed.
Expand All @@ -168,20 +219,27 @@ impl HashStrategy {
.collect::<Result<Vec<_>, _>>()?
};

// Under `--require-hashes`, every requirement must include a hash.
if digests.is_empty() {
// Under `--require-hashes`, every requirement must include a hash.
if mode.is_require() {
return Err(HashStrategyError::MissingHashes(
requirement.to_string(),
mode,
));
if constraint_hashes.get(&id).map_or(true, Vec::is_empty) {
return Err(HashStrategyError::MissingHashes(
requirement.to_string(),
mode,
));
}
}
continue;
}

hashes.insert(id, digests);
requirement_hashes.insert(id, digests);
}

// Merge the hashes, preferring requirements over constraints, to match pip.
let hashes: FxHashMap<VersionId, Vec<HashDigest>> = constraint_hashes
.into_iter()
.chain(requirement_hashes)
.collect();
match mode {
HashCheckingMode::Verify => Ok(Self::Verify(Arc::new(hashes))),
HashCheckingMode::Require => Ok(Self::Require(Arc::new(hashes))),
Expand Down Expand Up @@ -248,9 +306,9 @@ pub enum HashStrategyError {
#[error(transparent)]
Hash(#[from] HashError),
#[error(
"In `{1}` mode, all requirement must have their versions pinned with `==`, but found: {0}"
"In `{1}` mode, all requirements must have their versions pinned with `==`, but found: {0}"
)]
UnpinnedRequirement(String, HashCheckingMode),
#[error("In `{1}` mode, all requirement must have a hash, but none were provided for: {0}")]
#[error("In `{1}` mode, all requirements must have a hash, but none were provided for: {0}")]
MissingHashes(String, HashCheckingMode),
}
Loading

0 comments on commit c1ae6f6

Please sign in to comment.