mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 21:35:00 +00:00
Strip sources out of pubgrub
This commit is contained in:
parent
90966c7269
commit
042432b200
15 changed files with 92 additions and 202 deletions
|
@ -83,7 +83,7 @@ impl From<Dist> for Requirement {
|
|||
),
|
||||
)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
|
||||
Dist::Built(BuiltDist::DirectUrl(wheel)) => Self {
|
||||
|
@ -91,14 +91,14 @@ impl From<Dist> for Requirement {
|
|||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(wheel.url)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
Dist::Built(BuiltDist::Path(wheel)) => Self {
|
||||
name: wheel.filename.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(wheel.url)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Registry(sdist)) => Self {
|
||||
name: sdist.filename.name,
|
||||
|
@ -109,28 +109,28 @@ impl From<Dist> for Requirement {
|
|||
),
|
||||
)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
Dist::Source(SourceDist::DirectUrl(sdist)) => Self {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Git(sdist)) => Self {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Path(sdist)) => Self {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ impl From<InstalledDist> for Requirement {
|
|||
)),
|
||||
)),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,6 @@ pub use marker::{
|
|||
};
|
||||
use pep440_rs::{TrackedFromStr, Version, VersionSpecifier, VersionSpecifiers};
|
||||
use uv_fs::normalize_url_path;
|
||||
use uv_normalize::Source;
|
||||
// Parity with the crates.io version of pep508_rs
|
||||
use crate::verbatim_url::VerbatimUrlError;
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
|
@ -146,7 +145,7 @@ pub struct Requirement {
|
|||
/// Those are a nested and/or tree.
|
||||
pub marker: Option<MarkerTree>,
|
||||
/// Source of the original file (where existing)
|
||||
pub source: Option<Source>,
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
impl Display for Requirement {
|
||||
|
@ -1317,7 +1316,7 @@ fn parse_pep508_requirement(
|
|||
extras,
|
||||
version_or_url: requirement_kind,
|
||||
marker,
|
||||
source: source.map(|p| Source::Requirement(p.to_string_lossy().to_string())),
|
||||
path: source.map(|p| p.to_string_lossy().to_string()),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1518,7 +1517,7 @@ mod tests {
|
|||
operator: MarkerOperator::LessThan,
|
||||
r_value: MarkerValue::QuotedString("2.7".to_string()),
|
||||
})),
|
||||
source: None,
|
||||
path: None,
|
||||
};
|
||||
assert_eq!(requests, expected);
|
||||
}
|
||||
|
@ -1742,7 +1741,7 @@ mod tests {
|
|||
extras: vec![],
|
||||
marker: None,
|
||||
version_or_url: Some(VersionOrUrl::Url(VerbatimUrl::from_str(url).unwrap())),
|
||||
source: None,
|
||||
path: None,
|
||||
};
|
||||
assert_eq!(pip_url, expected);
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ use uv_client::BaseClient;
|
|||
use uv_client::BaseClientBuilder;
|
||||
use uv_configuration::{NoBinary, NoBuild, PackageNameSpecifier};
|
||||
use uv_fs::{normalize_url_path, Simplified};
|
||||
use uv_normalize::{ExtraName, Source};
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
mod requirement;
|
||||
|
@ -495,10 +495,7 @@ impl RequirementsTxt {
|
|||
// _requirements_, but we don't want to support that.
|
||||
for entry in sub_constraints.requirements {
|
||||
match entry.requirement {
|
||||
RequirementsTxtRequirement::Pep508(mut requirement) => {
|
||||
if let Some(Source::Requirement(ref name)) = requirement.source {
|
||||
requirement.source.replace(Source::Constraint(name.clone()));
|
||||
}
|
||||
RequirementsTxtRequirement::Pep508(requirement) => {
|
||||
data.constraints.push(requirement);
|
||||
}
|
||||
RequirementsTxtRequirement::Unnamed(_) => {
|
||||
|
@ -509,10 +506,7 @@ impl RequirementsTxt {
|
|||
}
|
||||
}
|
||||
}
|
||||
for mut constraint in sub_constraints.constraints {
|
||||
if let Some(Source::Requirement(ref name)) = constraint.source {
|
||||
constraint.source.replace(Source::Constraint(name.clone()));
|
||||
}
|
||||
for constraint in sub_constraints.constraints {
|
||||
data.constraints.push(constraint);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::hash::BuildHasherDefault;
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use pep508_rs::Requirement;
|
||||
use uv_normalize::{PackageName, Source};
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
/// A set of constraints for a set of requirements.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
@ -14,10 +14,7 @@ impl Constraints {
|
|||
pub fn from_requirements(requirements: Vec<Requirement>) -> Self {
|
||||
let mut constraints: FxHashMap<PackageName, Vec<Requirement>> =
|
||||
FxHashMap::with_capacity_and_hasher(requirements.len(), BuildHasherDefault::default());
|
||||
for mut requirement in requirements {
|
||||
if let Some(Source::Requirement(ref name)) = requirement.source {
|
||||
requirement.source.replace(Source::Constraint(name.clone()));
|
||||
}
|
||||
for requirement in requirements {
|
||||
constraints
|
||||
.entry(requirement.name.clone())
|
||||
.or_default()
|
||||
|
|
|
@ -132,7 +132,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(equals_version),
|
||||
marker: None,
|
||||
source: requirement.source,
|
||||
path: requirement.path,
|
||||
}
|
||||
} else {
|
||||
requirement
|
||||
|
|
|
@ -128,7 +128,7 @@ impl<'a> SitePackages<'a> {
|
|||
}
|
||||
}),
|
||||
marker: None,
|
||||
source: None,
|
||||
path: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -149,7 +149,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -188,7 +188,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -276,7 +276,7 @@ impl<'a, Context: BuildContext + Send + Sync> NamedRequirementsResolver<'a, Cont
|
|||
extras: requirement.extras,
|
||||
version_or_url: Some(VersionOrUrl::Url(requirement.url)),
|
||||
marker: requirement.marker,
|
||||
source: None,
|
||||
path: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -251,7 +251,7 @@ impl NoSolutionError {
|
|||
);
|
||||
}
|
||||
PubGrubPackage::Extra(_, _, _) => {}
|
||||
PubGrubPackage::Package(name, _, _, _) => {
|
||||
PubGrubPackage::Package(name, _, _) => {
|
||||
// Avoid including available versions for packages that exist in the derivation
|
||||
// tree, but were never visited during resolution. We _may_ have metadata for
|
||||
// these packages, but it's non-deterministic, and omitting them ensures that
|
||||
|
@ -299,7 +299,7 @@ impl NoSolutionError {
|
|||
) -> Self {
|
||||
let mut new = FxHashMap::default();
|
||||
for package in self.derivation_tree.packages() {
|
||||
if let PubGrubPackage::Package(name, _, _, _) = package {
|
||||
if let PubGrubPackage::Package(name, _, _) = package {
|
||||
if let Some(entry) = unavailable_packages.get(name) {
|
||||
let reason = entry.value();
|
||||
new.insert(name.clone(), reason.clone());
|
||||
|
@ -318,7 +318,7 @@ impl NoSolutionError {
|
|||
) -> Self {
|
||||
let mut new = FxHashMap::default();
|
||||
for package in self.derivation_tree.packages() {
|
||||
if let PubGrubPackage::Package(name, _, _, _) = package {
|
||||
if let PubGrubPackage::Package(name, _, _) = package {
|
||||
if let Some(entry) = incomplete_packages.get(name) {
|
||||
let versions = entry.value();
|
||||
for entry in versions {
|
||||
|
|
|
@ -188,16 +188,7 @@ fn to_pubgrub(
|
|||
match requirement.version_or_url.as_ref() {
|
||||
// The requirement has no specifier (e.g., `flask`).
|
||||
None => Ok((
|
||||
PubGrubPackage::from_package(
|
||||
requirement.name.clone(),
|
||||
extra,
|
||||
requirement
|
||||
.source
|
||||
.as_ref()
|
||||
.map(|s| vec![s.clone()])
|
||||
.unwrap_or_default(),
|
||||
urls,
|
||||
),
|
||||
PubGrubPackage::from_package(requirement.name.clone(), extra, urls),
|
||||
Range::full(),
|
||||
)),
|
||||
|
||||
|
@ -226,16 +217,7 @@ fn to_pubgrub(
|
|||
};
|
||||
|
||||
Ok((
|
||||
PubGrubPackage::from_package(
|
||||
requirement.name.clone(),
|
||||
extra,
|
||||
requirement
|
||||
.source
|
||||
.as_ref()
|
||||
.map(|s| vec![s.clone()])
|
||||
.unwrap_or_default(),
|
||||
urls,
|
||||
),
|
||||
PubGrubPackage::from_package(requirement.name.clone(), extra, urls),
|
||||
version,
|
||||
))
|
||||
}
|
||||
|
@ -258,16 +240,7 @@ fn to_pubgrub(
|
|||
}
|
||||
|
||||
Ok((
|
||||
PubGrubPackage::Package(
|
||||
requirement.name.clone(),
|
||||
extra,
|
||||
Some(expected.clone()),
|
||||
requirement
|
||||
.source
|
||||
.as_ref()
|
||||
.map(|s| vec![s.clone()])
|
||||
.unwrap_or_default(),
|
||||
),
|
||||
PubGrubPackage::Package(requirement.name.clone(), extra, Some(expected.clone())),
|
||||
Range::full(),
|
||||
))
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use derivative::Derivative;
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use std::hash::Hash;
|
||||
use uv_normalize::{ExtraName, PackageName, Source};
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
|
||||
use crate::resolver::Urls;
|
||||
|
||||
|
@ -13,6 +13,7 @@ use crate::resolver::Urls;
|
|||
/// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g.,
|
||||
/// `black`). We then discard the virtual packages at the end of the resolution process.
|
||||
#[derive(Debug, Clone, Eq, Derivative)]
|
||||
#[derivative(PartialEq, Hash)]
|
||||
pub enum PubGrubPackage {
|
||||
/// The root package, which is used to start the resolution process.
|
||||
Root(Option<PackageName>),
|
||||
|
@ -59,9 +60,6 @@ pub enum PubGrubPackage {
|
|||
/// version before the registry version. So we could just error if we visit a URL variant
|
||||
/// _after_ a registry variant.
|
||||
Option<VerbatimUrl>,
|
||||
/// Names of the file sources of this package (e.g. requirements.in), or empty for dependencies
|
||||
/// only from other libraries
|
||||
Vec<Source>,
|
||||
),
|
||||
/// A proxy package to represent a dependency with an extra (e.g., `black[colorama]`).
|
||||
///
|
||||
|
@ -81,79 +79,12 @@ pub enum PubGrubPackage {
|
|||
|
||||
impl PubGrubPackage {
|
||||
/// Create a [`PubGrubPackage`] from a package name and optional extra name.
|
||||
pub(crate) fn from_package(
|
||||
name: PackageName,
|
||||
extra: Option<ExtraName>,
|
||||
source: Vec<Source>,
|
||||
urls: &Urls,
|
||||
) -> Self {
|
||||
pub(crate) fn from_package(name: PackageName, extra: Option<ExtraName>, urls: &Urls) -> Self {
|
||||
let url = urls.get(&name).cloned();
|
||||
if let Some(extra) = extra {
|
||||
Self::Extra(name, extra, url)
|
||||
} else {
|
||||
Self::Package(name, extra, url, source)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not using the default implementation, because otherwise two deps with different sources
|
||||
// are treated as two things
|
||||
impl Hash for PubGrubPackage {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
match self {
|
||||
PubGrubPackage::Root(name) => {
|
||||
"Root".hash(state);
|
||||
name.hash(state);
|
||||
}
|
||||
PubGrubPackage::Python(version) => {
|
||||
"Python".hash(state);
|
||||
version.hash(state);
|
||||
}
|
||||
PubGrubPackage::Package(name, extra, url, _source) => {
|
||||
"Package".hash(state);
|
||||
name.hash(state);
|
||||
extra.hash(state);
|
||||
url.hash(state);
|
||||
}
|
||||
PubGrubPackage::Extra(name, extra, url) => {
|
||||
"Extra".hash(state);
|
||||
name.hash(state);
|
||||
extra.hash(state);
|
||||
url.hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PubGrubPackage {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match self {
|
||||
PubGrubPackage::Root(name) => {
|
||||
if let PubGrubPackage::Root(other_name) = other {
|
||||
return other_name == name;
|
||||
}
|
||||
false
|
||||
}
|
||||
PubGrubPackage::Python(version) => {
|
||||
if let PubGrubPackage::Python(other_version) = other {
|
||||
return other_version == version;
|
||||
}
|
||||
false
|
||||
}
|
||||
PubGrubPackage::Package(name, extra, url, _source) => {
|
||||
if let PubGrubPackage::Package(other_name, other_extra, other_url, _other_source) =
|
||||
other
|
||||
{
|
||||
return other_name == name && other_extra == extra && other_url == url;
|
||||
}
|
||||
false
|
||||
}
|
||||
PubGrubPackage::Extra(name, extra, url) => {
|
||||
if let PubGrubPackage::Extra(other_name, other_extra, other_url) = other {
|
||||
return other_name == name && other_extra == extra && other_url == url;
|
||||
}
|
||||
false
|
||||
}
|
||||
Self::Package(name, extra, url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ impl PubGrubPriorities {
|
|||
PubGrubPackage::Root(_) => {}
|
||||
PubGrubPackage::Python(_) => {}
|
||||
|
||||
PubGrubPackage::Extra(name, _, None) | PubGrubPackage::Package(name, _, None, _) => {
|
||||
PubGrubPackage::Extra(name, _, None) | PubGrubPackage::Package(name, _, None) => {
|
||||
match self.0.entry(name.clone()) {
|
||||
std::collections::hash_map::Entry::Occupied(mut entry) => {
|
||||
// Preserve the original index.
|
||||
|
@ -61,8 +61,7 @@ impl PubGrubPriorities {
|
|||
}
|
||||
}
|
||||
}
|
||||
PubGrubPackage::Extra(name, _, Some(_))
|
||||
| PubGrubPackage::Package(name, _, Some(_), _) => {
|
||||
PubGrubPackage::Extra(name, _, Some(_)) | PubGrubPackage::Package(name, _, Some(_)) => {
|
||||
match self.0.entry(name.clone()) {
|
||||
std::collections::hash_map::Entry::Occupied(mut entry) => {
|
||||
// Preserve the original index.
|
||||
|
@ -96,7 +95,7 @@ impl PubGrubPriorities {
|
|||
PubGrubPackage::Root(_) => Some(PubGrubPriority::Root),
|
||||
PubGrubPackage::Python(_) => Some(PubGrubPriority::Root),
|
||||
PubGrubPackage::Extra(name, _, _) => self.0.get(name).copied(),
|
||||
PubGrubPackage::Package(name, _, _, _) => self.0.get(name).copied(),
|
||||
PubGrubPackage::Package(name, _, _) => self.0.get(name).copied(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,8 +64,6 @@ pub struct ResolutionGraph {
|
|||
editables: Editables,
|
||||
/// Any diagnostics that were encountered while building the graph.
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
/// Source files for dependencies
|
||||
sources: FxHashMap<PackageName, Vec<Source>>,
|
||||
}
|
||||
|
||||
impl ResolutionGraph {
|
||||
|
@ -87,24 +85,13 @@ impl ResolutionGraph {
|
|||
FxHashMap::with_capacity_and_hasher(selection.len(), BuildHasherDefault::default());
|
||||
let mut extras = FxHashMap::default();
|
||||
let mut diagnostics = Vec::new();
|
||||
let mut sources: FxHashMap<PackageName, Vec<Source>> = FxHashMap::default();
|
||||
|
||||
let mut insert_source = |package_name: &PackageName, source_names: &Vec<Source>| {
|
||||
for source_name in source_names {
|
||||
if let Some(source_packages) = sources.get_mut(package_name) {
|
||||
source_packages.push(source_name.clone());
|
||||
} else {
|
||||
sources.insert(package_name.clone(), vec![source_name.clone()]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Add every package to the graph.
|
||||
let mut inverse =
|
||||
FxHashMap::with_capacity_and_hasher(selection.len(), BuildHasherDefault::default());
|
||||
for (package, version) in selection {
|
||||
match package {
|
||||
PubGrubPackage::Package(package_name, None, None, source) => {
|
||||
PubGrubPackage::Package(package_name, None, None) => {
|
||||
// Create the distribution.
|
||||
let pinned_package = if let Some((editable, _)) = editables.get(package_name) {
|
||||
Dist::from_editable(package_name.clone(), editable.clone())?.into()
|
||||
|
@ -136,10 +123,8 @@ impl ResolutionGraph {
|
|||
// Add the distribution to the graph.
|
||||
let index = petgraph.add_node(pinned_package);
|
||||
inverse.insert(package_name, index);
|
||||
|
||||
insert_source(package_name, source);
|
||||
}
|
||||
PubGrubPackage::Package(package_name, None, Some(url), source) => {
|
||||
PubGrubPackage::Package(package_name, None, Some(url)) => {
|
||||
// Create the distribution.
|
||||
let pinned_package = if let Some((editable, _)) = editables.get(package_name) {
|
||||
Dist::from_editable(package_name.clone(), editable.clone())?
|
||||
|
@ -169,9 +154,8 @@ impl ResolutionGraph {
|
|||
// Add the distribution to the graph.
|
||||
let index = petgraph.add_node(pinned_package.into());
|
||||
inverse.insert(package_name, index);
|
||||
insert_source(package_name, source);
|
||||
}
|
||||
PubGrubPackage::Package(package_name, Some(extra), None, _) => {
|
||||
PubGrubPackage::Package(package_name, Some(extra), None) => {
|
||||
// Validate that the `extra` exists.
|
||||
let dist = PubGrubDistribution::from_registry(package_name, version);
|
||||
|
||||
|
@ -225,7 +209,7 @@ impl ResolutionGraph {
|
|||
}
|
||||
}
|
||||
}
|
||||
PubGrubPackage::Package(package_name, Some(extra), Some(url), _) => {
|
||||
PubGrubPackage::Package(package_name, Some(extra), Some(url)) => {
|
||||
// Validate that the `extra` exists.
|
||||
let dist = PubGrubDistribution::from_url(package_name, url);
|
||||
|
||||
|
@ -300,10 +284,10 @@ impl ResolutionGraph {
|
|||
continue;
|
||||
}
|
||||
|
||||
let PubGrubPackage::Package(self_package, _, _, _) = self_package else {
|
||||
let PubGrubPackage::Package(self_package, _, _) = self_package else {
|
||||
continue;
|
||||
};
|
||||
let PubGrubPackage::Package(dependency_package, _, _, _) = dependency_package
|
||||
let PubGrubPackage::Package(dependency_package, _, _) = dependency_package
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
@ -332,7 +316,6 @@ impl ResolutionGraph {
|
|||
extras,
|
||||
editables,
|
||||
diagnostics,
|
||||
sources,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -539,12 +522,15 @@ pub struct DisplayResolutionGraph<'a> {
|
|||
/// The style of annotation comments, used to indicate the dependencies that requested each
|
||||
/// package.
|
||||
annotation_style: AnnotationStyle,
|
||||
|
||||
sources: FxHashMap<PackageName, Vec<Source>>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ResolutionGraph> for DisplayResolutionGraph<'a> {
|
||||
fn from(resolution: &'a ResolutionGraph) -> Self {
|
||||
Self::new(
|
||||
resolution,
|
||||
FxHashMap::default(),
|
||||
&[],
|
||||
false,
|
||||
false,
|
||||
|
@ -560,6 +546,7 @@ impl<'a> DisplayResolutionGraph<'a> {
|
|||
#[allow(clippy::fn_params_excessive_bools)]
|
||||
pub fn new(
|
||||
underlying: &'a ResolutionGraph,
|
||||
sources: FxHashMap<PackageName, Vec<Source>>,
|
||||
no_emit_packages: &'a [PackageName],
|
||||
show_hashes: bool,
|
||||
include_extras: bool,
|
||||
|
@ -575,6 +562,7 @@ impl<'a> DisplayResolutionGraph<'a> {
|
|||
include_annotations,
|
||||
include_index_annotation,
|
||||
annotation_style,
|
||||
sources,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -714,12 +702,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
|
|||
.map(|edge| &self.resolution.petgraph[edge.source()])
|
||||
.collect::<Vec<_>>();
|
||||
edges.sort_unstable_by_key(|package| package.name());
|
||||
let source = self
|
||||
.resolution
|
||||
.sources
|
||||
.get(node.name())
|
||||
.cloned()
|
||||
.unwrap_or(vec![]);
|
||||
let source = self.sources.get(node.name()).cloned().unwrap_or(vec![]);
|
||||
|
||||
match self.annotation_style {
|
||||
AnnotationStyle::Line => {
|
||||
|
|
|
@ -53,7 +53,7 @@ impl BatchPrefetcher {
|
|||
index: &InMemoryIndex,
|
||||
selector: &CandidateSelector,
|
||||
) -> anyhow::Result<(), ResolveError> {
|
||||
let PubGrubPackage::Package(package_name, None, None, _source) = &next else {
|
||||
let PubGrubPackage::Package(package_name, None, None) = &next else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
|
@ -160,7 +160,7 @@ impl BatchPrefetcher {
|
|||
/// Each time we tried a version for a package, we register that here.
|
||||
pub(crate) fn version_tried(&mut self, package: PubGrubPackage) {
|
||||
// Only track base packages, no virtual packages from extras.
|
||||
if matches!(package, PubGrubPackage::Package(_, Some(_), _, _)) {
|
||||
if matches!(package, PubGrubPackage::Package(_, Some(_), _)) {
|
||||
return;
|
||||
}
|
||||
*self.tried_versions.entry(package).or_default() += 1;
|
||||
|
|
|
@ -364,7 +364,7 @@ impl<
|
|||
.expect("a package was chosen but we don't have a term.");
|
||||
|
||||
let reason = {
|
||||
if let PubGrubPackage::Package(ref package_name, _, _, _) = next {
|
||||
if let PubGrubPackage::Package(ref package_name, _, _) = next {
|
||||
// Check if the decision was due to the package being unavailable
|
||||
self.unavailable_packages
|
||||
.get(package_name)
|
||||
|
@ -526,7 +526,7 @@ impl<
|
|||
PubGrubPackage::Root(_) => {}
|
||||
PubGrubPackage::Python(_) => {}
|
||||
PubGrubPackage::Extra(_, _, _) => {}
|
||||
PubGrubPackage::Package(name, _extra, None, _) => {
|
||||
PubGrubPackage::Package(name, _extra, None) => {
|
||||
// Verify that the package is allowed under the hash-checking policy.
|
||||
if !self.hasher.allows_package(name) {
|
||||
return Err(ResolveError::UnhashedPackage(name.clone()));
|
||||
|
@ -537,7 +537,7 @@ impl<
|
|||
request_sink.send(Request::Package(name.clone())).await?;
|
||||
}
|
||||
}
|
||||
PubGrubPackage::Package(name, _extra, Some(url), _) => {
|
||||
PubGrubPackage::Package(name, _extra, Some(url)) => {
|
||||
// Verify that the package is allowed under the hash-checking policy.
|
||||
if !self.hasher.allows_url(url) {
|
||||
return Err(ResolveError::UnhashedPackage(name.clone()));
|
||||
|
@ -562,7 +562,7 @@ impl<
|
|||
// Iterate over the potential packages, and fetch file metadata for any of them. These
|
||||
// represent our current best guesses for the versions that we _might_ select.
|
||||
for (package, range) in packages {
|
||||
let PubGrubPackage::Package(package_name, None, None, _) = package else {
|
||||
let PubGrubPackage::Package(package_name, None, None) = package else {
|
||||
continue;
|
||||
};
|
||||
request_sink
|
||||
|
@ -606,7 +606,7 @@ impl<
|
|||
}
|
||||
|
||||
PubGrubPackage::Extra(package_name, _, Some(url))
|
||||
| PubGrubPackage::Package(package_name, _, Some(url), _) => {
|
||||
| PubGrubPackage::Package(package_name, _, Some(url)) => {
|
||||
debug!("Searching for a compatible version of {package} @ {url} ({range})");
|
||||
|
||||
// If the dist is an editable, return the version from the editable metadata.
|
||||
|
@ -697,7 +697,7 @@ impl<
|
|||
}
|
||||
|
||||
PubGrubPackage::Extra(package_name, _, None)
|
||||
| PubGrubPackage::Package(package_name, _, None, _) => {
|
||||
| PubGrubPackage::Package(package_name, _, None) => {
|
||||
// Wait for the metadata to be available.
|
||||
let versions_response = self
|
||||
.index
|
||||
|
@ -774,7 +774,7 @@ impl<
|
|||
let version = candidate.version().clone();
|
||||
|
||||
// Emit a request to fetch the metadata for this version.
|
||||
if matches!(package, PubGrubPackage::Package(_, _, _, _)) {
|
||||
if matches!(package, PubGrubPackage::Package(_, _, _)) {
|
||||
if self.index.distributions.register(candidate.version_id()) {
|
||||
let request = match dist.for_resolution() {
|
||||
ResolvedDistRef::Installable(dist) => Request::Dist(dist.clone()),
|
||||
|
@ -831,12 +831,8 @@ impl<
|
|||
|
||||
// Add a dependency on each editable.
|
||||
for (editable, metadata) in self.editables.iter() {
|
||||
let package = PubGrubPackage::from_package(
|
||||
metadata.name.clone(),
|
||||
None,
|
||||
vec![],
|
||||
&self.urls,
|
||||
);
|
||||
let package =
|
||||
PubGrubPackage::from_package(metadata.name.clone(), None, &self.urls);
|
||||
let version = Range::singleton(metadata.version.clone());
|
||||
|
||||
// Update the package priorities.
|
||||
|
@ -851,7 +847,6 @@ impl<
|
|||
PubGrubPackage::from_package(
|
||||
metadata.name.clone(),
|
||||
Some(extra.clone()),
|
||||
vec![],
|
||||
&self.urls,
|
||||
),
|
||||
Range::singleton(metadata.version.clone()),
|
||||
|
@ -864,7 +859,7 @@ impl<
|
|||
|
||||
PubGrubPackage::Python(_) => Ok(Dependencies::Available(Vec::default())),
|
||||
|
||||
PubGrubPackage::Package(package_name, extra, url, _source) => {
|
||||
PubGrubPackage::Package(package_name, extra, url) => {
|
||||
// If we're excluding transitive dependencies, short-circuit.
|
||||
if self.dependency_mode.is_direct() {
|
||||
// If an extra is provided, wait for the metadata to be available, since it's
|
||||
|
@ -1027,16 +1022,11 @@ impl<
|
|||
// Add a dependency on both the extra and base package.
|
||||
PubGrubPackage::Extra(package_name, extra, url) => Ok(Dependencies::Available(vec![
|
||||
(
|
||||
PubGrubPackage::Package(package_name.clone(), None, url.clone(), vec![]),
|
||||
PubGrubPackage::Package(package_name.clone(), None, url.clone()),
|
||||
Range::singleton(version.clone()),
|
||||
),
|
||||
(
|
||||
PubGrubPackage::Package(
|
||||
package_name.clone(),
|
||||
Some(extra.clone()),
|
||||
url.clone(),
|
||||
vec![],
|
||||
),
|
||||
PubGrubPackage::Package(package_name.clone(), Some(extra.clone()), url.clone()),
|
||||
Range::singleton(version.clone()),
|
||||
),
|
||||
])),
|
||||
|
@ -1254,10 +1244,10 @@ impl<
|
|||
PubGrubPackage::Root(_) => {}
|
||||
PubGrubPackage::Python(_) => {}
|
||||
PubGrubPackage::Extra(_, _, _) => {}
|
||||
PubGrubPackage::Package(package_name, _extra, Some(url), _) => {
|
||||
PubGrubPackage::Package(package_name, _extra, Some(url)) => {
|
||||
reporter.on_progress(package_name, &VersionOrUrl::Url(url));
|
||||
}
|
||||
PubGrubPackage::Package(package_name, _extra, None, _source) => {
|
||||
PubGrubPackage::Package(package_name, _extra, None) => {
|
||||
reporter.on_progress(package_name, &VersionOrUrl::Version(version));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ use anstream::{eprint, AutoStream, StripStream};
|
|||
use anyhow::{anyhow, Context, Result};
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tempfile::tempdir_in;
|
||||
use tracing::debug;
|
||||
|
||||
|
@ -29,7 +30,7 @@ use uv_dispatch::BuildDispatch;
|
|||
use uv_fs::Simplified;
|
||||
use uv_installer::Downloader;
|
||||
use uv_interpreter::{find_best_python, find_requested_python, PythonEnvironment};
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
use uv_normalize::{ExtraName, PackageName, Source};
|
||||
use uv_requirements::{
|
||||
upgrade::read_lockfile, ExtrasSpecification, LookaheadResolver, NamedRequirementsResolver,
|
||||
RequirementsSource, RequirementsSpecification, SourceTreeResolver,
|
||||
|
@ -344,6 +345,28 @@ pub(crate) async fn pip_compile(
|
|||
.resolve()
|
||||
.await?;
|
||||
|
||||
let mut sources: FxHashMap<PackageName, Vec<Source>> = FxHashMap::default();
|
||||
|
||||
let mut insert_source = |package_name: &PackageName, source: Source| {
|
||||
if let Some(source_packages) = sources.get_mut(package_name) {
|
||||
source_packages.push(source);
|
||||
} else {
|
||||
sources.insert(package_name.clone(), vec![source]);
|
||||
}
|
||||
};
|
||||
|
||||
for requirement in &requirements {
|
||||
if let Some(path) = &requirement.path {
|
||||
insert_source(&requirement.name, Source::Requirement(path.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
for constraint in &constraints {
|
||||
if let Some(path) = &constraint.path {
|
||||
insert_source(&constraint.name, Source::Constraint(path.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
// Collect constraints and overrides.
|
||||
let constraints = Constraints::from_requirements(constraints);
|
||||
let overrides = Overrides::from_requirements(overrides);
|
||||
|
@ -557,6 +580,7 @@ pub(crate) async fn pip_compile(
|
|||
"{}",
|
||||
DisplayResolutionGraph::new(
|
||||
&resolution,
|
||||
sources,
|
||||
&no_emit_packages,
|
||||
generate_hashes,
|
||||
include_extras,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue