mirror of
https://github.com/astral-sh/uv.git
synced 2025-10-23 00:32:13 +00:00

Some checks are pending
CI / check system | python on macos aarch64 (push) Blocked by required conditions
CI / build binary | macos aarch64 (push) Blocked by required conditions
CI / Determine changes (push) Waiting to run
CI / lint (push) Waiting to run
CI / cargo clippy | ubuntu (push) Blocked by required conditions
CI / cargo clippy | windows (push) Blocked by required conditions
CI / cargo dev generate-all (push) Blocked by required conditions
CI / cargo shear (push) Waiting to run
CI / check windows trampoline | i686 (push) Blocked by required conditions
CI / cargo test | ubuntu (push) Blocked by required conditions
CI / cargo test | macos (push) Blocked by required conditions
CI / cargo test | windows (push) Blocked by required conditions
CI / integration test | uv publish (push) Blocked by required conditions
CI / check cache | ubuntu (push) Blocked by required conditions
CI / check cache | macos aarch64 (push) Blocked by required conditions
CI / check system | python on debian (push) Blocked by required conditions
CI / check system | python on fedora (push) Blocked by required conditions
CI / check system | python on ubuntu (push) Blocked by required conditions
CI / check system | python on opensuse (push) Blocked by required conditions
CI / check system | python on rocky linux 8 (push) Blocked by required conditions
CI / check system | python on rocky linux 9 (push) Blocked by required conditions
CI / check system | pypy on ubuntu (push) Blocked by required conditions
CI / check system | pyston (push) Blocked by required conditions
CI / check system | homebrew python on macos aarch64 (push) Blocked by required conditions
CI / check system | python on macos x86_64 (push) Blocked by required conditions
CI / check system | python3.10 on windows (push) Blocked by required conditions
CI / check system | python3.10 on windows x86 (push) Blocked by required conditions
CI / check system | python3.13 on windows (push) Blocked by required conditions
CI / check system | python3.12 via chocolatey (push) Blocked by required conditions
CI / check system | python3.9 via pyenv (push) Blocked by required conditions
CI / check system | python3.13 (push) Blocked by required conditions
CI / check system | conda3.11 on linux (push) Blocked by required conditions
CI / check system | conda3.8 on linux (push) Blocked by required conditions
CI / check system | conda3.11 on macos (push) Blocked by required conditions
CI / check system | conda3.8 on macos (push) Blocked by required conditions
CI / check system | conda3.11 on windows (push) Blocked by required conditions
CI / check windows trampoline | aarch64 (push) Blocked by required conditions
CI / check windows trampoline | x86_64 (push) Blocked by required conditions
CI / test windows trampoline | i686 (push) Blocked by required conditions
CI / test windows trampoline | x86_64 (push) Blocked by required conditions
CI / typos (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / build binary | linux (push) Blocked by required conditions
CI / build binary | macos x86_64 (push) Blocked by required conditions
CI / build binary | windows (push) Blocked by required conditions
CI / cargo build (msrv) (push) Blocked by required conditions
CI / build binary | freebsd (push) Blocked by required conditions
CI / ecosystem test | prefecthq/prefect (push) Blocked by required conditions
CI / ecosystem test | pallets/flask (push) Blocked by required conditions
CI / integration test | conda on ubuntu (push) Blocked by required conditions
CI / integration test | free-threaded on linux (push) Blocked by required conditions
CI / integration test | free-threaded on windows (push) Blocked by required conditions
CI / integration test | pypy on ubuntu (push) Blocked by required conditions
CI / integration test | pypy on windows (push) Blocked by required conditions
CI / integration test | graalpy on ubuntu (push) Blocked by required conditions
CI / integration test | graalpy on windows (push) Blocked by required conditions
CI / integration test | github actions (push) Blocked by required conditions
CI / integration test | determine publish changes (push) Blocked by required conditions
CI / check system | alpine (push) Blocked by required conditions
CI / check system | conda3.8 on windows (push) Blocked by required conditions
CI / check system | amazonlinux (push) Blocked by required conditions
CI / check system | embedded python3.10 on windows (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions
## Summary It's just a `usize`. It seems simpler and perhaps even more performant (?) to make it `Copy`.
255 lines
9.4 KiB
Rust
255 lines
9.4 KiB
Rust
use std::borrow::Cow;
|
|
use std::collections::VecDeque;
|
|
use std::path::Path;
|
|
use std::slice;
|
|
use std::sync::Arc;
|
|
|
|
use anyhow::{Context, Result};
|
|
use futures::stream::FuturesOrdered;
|
|
use futures::TryStreamExt;
|
|
use rustc_hash::FxHashSet;
|
|
use url::Url;
|
|
|
|
use uv_configuration::ExtrasSpecification;
|
|
use uv_distribution::{DistributionDatabase, Reporter, RequiresDist};
|
|
use uv_distribution_types::{
|
|
BuildableSource, DirectorySourceUrl, HashPolicy, SourceUrl, VersionId,
|
|
};
|
|
use uv_fs::Simplified;
|
|
use uv_normalize::{ExtraName, PackageName};
|
|
use uv_pep508::{MarkerTree, RequirementOrigin};
|
|
use uv_pypi_types::Requirement;
|
|
use uv_resolver::{InMemoryIndex, MetadataResponse};
|
|
use uv_types::{BuildContext, HashStrategy};
|
|
|
|
#[derive(Debug, Clone)]
|
|
pub struct SourceTreeResolution {
|
|
/// The requirements sourced from the source trees.
|
|
pub requirements: Vec<Requirement>,
|
|
/// The names of the projects that were resolved.
|
|
pub project: PackageName,
|
|
/// The extras used when resolving the requirements.
|
|
pub extras: Vec<ExtraName>,
|
|
}
|
|
|
|
/// A resolver for requirements specified via source trees.
|
|
///
|
|
/// Used, e.g., to determine the input requirements when a user specifies a `pyproject.toml`
|
|
/// file, which may require running PEP 517 build hooks to extract metadata.
|
|
pub struct SourceTreeResolver<'a, Context: BuildContext> {
|
|
/// The extras to include when resolving requirements.
|
|
extras: &'a ExtrasSpecification,
|
|
/// The hash policy to enforce.
|
|
hasher: &'a HashStrategy,
|
|
/// The in-memory index for resolving dependencies.
|
|
index: &'a InMemoryIndex,
|
|
/// The database for fetching and building distributions.
|
|
database: DistributionDatabase<'a, Context>,
|
|
}
|
|
|
|
impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
|
|
/// Instantiate a new [`SourceTreeResolver`] for a given set of `source_trees`.
|
|
pub fn new(
|
|
extras: &'a ExtrasSpecification,
|
|
hasher: &'a HashStrategy,
|
|
index: &'a InMemoryIndex,
|
|
database: DistributionDatabase<'a, Context>,
|
|
) -> Self {
|
|
Self {
|
|
extras,
|
|
hasher,
|
|
index,
|
|
database,
|
|
}
|
|
}
|
|
|
|
/// Set the [`Reporter`] to use for this resolver.
|
|
#[must_use]
|
|
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
|
|
Self {
|
|
database: self.database.with_reporter(reporter),
|
|
..self
|
|
}
|
|
}
|
|
|
|
/// Resolve the requirements from the provided source trees.
|
|
pub async fn resolve(
|
|
self,
|
|
source_trees: impl Iterator<Item = &Path>,
|
|
) -> Result<Vec<SourceTreeResolution>> {
|
|
let resolutions: Vec<_> = source_trees
|
|
.map(|source_tree| async { self.resolve_source_tree(source_tree).await })
|
|
.collect::<FuturesOrdered<_>>()
|
|
.try_collect()
|
|
.await?;
|
|
Ok(resolutions)
|
|
}
|
|
|
|
/// Infer the dependencies for a directory dependency.
|
|
async fn resolve_source_tree(&self, path: &Path) -> Result<SourceTreeResolution> {
|
|
let metadata = self.resolve_requires_dist(path).await?;
|
|
|
|
let origin = RequirementOrigin::Project(path.to_path_buf(), metadata.name.clone());
|
|
|
|
// Determine the extras to include when resolving the requirements.
|
|
let extras = self
|
|
.extras
|
|
.extra_names(metadata.provides_extras.iter())
|
|
.cloned()
|
|
.collect::<Vec<_>>();
|
|
|
|
let dependencies = metadata
|
|
.requires_dist
|
|
.into_iter()
|
|
.map(|requirement| Requirement {
|
|
origin: Some(origin.clone()),
|
|
marker: requirement.marker.simplify_extras(&extras),
|
|
..requirement
|
|
})
|
|
.collect::<Vec<_>>();
|
|
|
|
// Transitively process all extras that are recursively included, starting with the current
|
|
// extra.
|
|
let mut requirements = dependencies.clone();
|
|
let mut seen = FxHashSet::<(ExtraName, MarkerTree)>::default();
|
|
let mut queue: VecDeque<_> = requirements
|
|
.iter()
|
|
.filter(|req| req.name == metadata.name)
|
|
.flat_map(|req| {
|
|
req.extras
|
|
.iter()
|
|
.cloned()
|
|
.map(|extra| (extra, req.marker.simplify_extras(&extras)))
|
|
})
|
|
.collect();
|
|
while let Some((extra, marker)) = queue.pop_front() {
|
|
if !seen.insert((extra.clone(), marker)) {
|
|
continue;
|
|
}
|
|
|
|
// Find the requirements for the extra.
|
|
for requirement in &dependencies {
|
|
if requirement.marker.top_level_extra_name().as_ref() == Some(&extra) {
|
|
let requirement = {
|
|
let mut marker = marker;
|
|
marker.and(requirement.marker);
|
|
Requirement {
|
|
name: requirement.name.clone(),
|
|
extras: requirement.extras.clone(),
|
|
source: requirement.source.clone(),
|
|
origin: requirement.origin.clone(),
|
|
marker: marker.simplify_extras(slice::from_ref(&extra)),
|
|
}
|
|
};
|
|
if requirement.name == metadata.name {
|
|
// Add each transitively included extra.
|
|
queue.extend(
|
|
requirement
|
|
.extras
|
|
.iter()
|
|
.cloned()
|
|
.map(|extra| (extra, requirement.marker)),
|
|
);
|
|
} else {
|
|
// Add the requirements for that extra.
|
|
requirements.push(requirement);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Drop all the self-requirements now that we flattened them out.
|
|
requirements.retain(|req| req.name != metadata.name);
|
|
|
|
let project = metadata.name;
|
|
let extras = metadata.provides_extras;
|
|
|
|
Ok(SourceTreeResolution {
|
|
requirements,
|
|
project,
|
|
extras,
|
|
})
|
|
}
|
|
|
|
/// Resolve the [`RequiresDist`] metadata for a given source tree. Attempts to resolve the
|
|
/// requirements without building the distribution, even if the project contains (e.g.) a
|
|
/// dynamic version since, critically, we don't need to install the package itself; only its
|
|
/// dependencies.
|
|
async fn resolve_requires_dist(&self, path: &Path) -> Result<RequiresDist> {
|
|
// Convert to a buildable source.
|
|
let source_tree = fs_err::canonicalize(path).with_context(|| {
|
|
format!(
|
|
"Failed to canonicalize path to source tree: {}",
|
|
path.user_display()
|
|
)
|
|
})?;
|
|
let source_tree = source_tree.parent().ok_or_else(|| {
|
|
anyhow::anyhow!(
|
|
"The file `{}` appears to be a `pyproject.toml`, `setup.py`, or `setup.cfg` file, which must be in a directory",
|
|
path.user_display()
|
|
)
|
|
})?;
|
|
|
|
// If the path is a `pyproject.toml`, attempt to extract the requirements statically.
|
|
if let Ok(metadata) = self.database.requires_dist(source_tree).await {
|
|
return Ok(metadata);
|
|
}
|
|
|
|
let Ok(url) = Url::from_directory_path(source_tree) else {
|
|
return Err(anyhow::anyhow!("Failed to convert path to URL"));
|
|
};
|
|
let source = SourceUrl::Directory(DirectorySourceUrl {
|
|
url: &url,
|
|
install_path: Cow::Borrowed(source_tree),
|
|
editable: false,
|
|
});
|
|
|
|
// Determine the hash policy. Since we don't have a package name, we perform a
|
|
// manual match.
|
|
let hashes = match self.hasher {
|
|
HashStrategy::None => HashPolicy::None,
|
|
HashStrategy::Generate => HashPolicy::Generate,
|
|
HashStrategy::Verify(_) => HashPolicy::Generate,
|
|
HashStrategy::Require(_) => {
|
|
return Err(anyhow::anyhow!(
|
|
"Hash-checking is not supported for local directories: {}",
|
|
path.user_display()
|
|
));
|
|
}
|
|
};
|
|
|
|
// Fetch the metadata for the distribution.
|
|
let metadata = {
|
|
let id = VersionId::from_url(source.url());
|
|
if let Some(archive) =
|
|
self.index
|
|
.distributions()
|
|
.get(&id)
|
|
.as_deref()
|
|
.and_then(|response| {
|
|
if let MetadataResponse::Found(archive) = response {
|
|
Some(archive)
|
|
} else {
|
|
None
|
|
}
|
|
})
|
|
{
|
|
// If the metadata is already in the index, return it.
|
|
archive.metadata.clone()
|
|
} else {
|
|
// Run the PEP 517 build process to extract metadata from the source distribution.
|
|
let source = BuildableSource::Url(source);
|
|
let archive = self.database.build_wheel_metadata(&source, hashes).await?;
|
|
|
|
// Insert the metadata into the index.
|
|
self.index
|
|
.distributions()
|
|
.done(id, Arc::new(MetadataResponse::Found(archive.clone())));
|
|
|
|
archive.metadata
|
|
}
|
|
};
|
|
|
|
Ok(RequiresDist::from(metadata))
|
|
}
|
|
}
|