Merge branch 'main' into bojan/perf-metrics

This commit is contained in:
Bojan Serafimov 2024-01-11 15:27:24 -05:00
commit 26edd03b05
60 changed files with 2509 additions and 569 deletions

View file

@ -2,7 +2,7 @@ name: CI
on:
push:
branches: [ main ]
branches: [main]
pull_request:
workflow_dispatch:
@ -37,13 +37,15 @@ jobs:
run: |
rustup component add clippy
- uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Clippy"
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
cargo-test:
strategy:
matrix:
os: [ ubuntu-latest ]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}
name: "cargo test | ${{ matrix.os }}"
steps:
@ -51,7 +53,7 @@ jobs:
- name: "Install Python"
uses: actions/setup-python@v4
with:
python-version: |
python-version: |
3.7
3.8
3.9
@ -60,11 +62,14 @@ jobs:
3.12
- name: "Install Rust toolchain"
run: rustup show
- uses: rui314/setup-mold@v1
- name: "Install cargo insta"
uses: taiki-e/install-action@v2
with:
tool: cargo-insta
- uses: Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Tests (Ubuntu)"
if: ${{ matrix.os == 'ubuntu-latest' }}
run: cargo insta test --all --all-features --unreferenced reject

View file

@ -96,6 +96,8 @@ jobs:
with:
submodules: recursive
- uses: swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: Install cargo-dist
run: ${{ matrix.install_dist }}
- name: Build artifacts

38
Cargo.lock generated
View file

@ -282,9 +282,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.6"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c79fed4cdb43e993fcdadc7e58a09fd0e3e649c4436fa11da71c9f1f3ee7feb9"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "bench"
@ -519,9 +519,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.4.14"
version = "4.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33e92c5c1a78c62968ec57dbc2440366a2d6e5a23faf829970ff1585dc6b18e2"
checksum = "c12ed66a79a555082f595f7eb980d08669de95009dd4b3d61168c573ebe38fc9"
dependencies = [
"clap_builder",
"clap_derive",
@ -529,9 +529,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.4.14"
version = "4.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4323769dc8a61e2c39ad7dc26f6f2800524691a44d74fe3d1071a5c24db6370"
checksum = "0f4645eab3431e5a8403a96bea02506a8b35d28cd0f0330977dd5d22f9c84f43"
dependencies = [
"anstream",
"anstyle",
@ -1167,6 +1167,7 @@ dependencies = [
"anstream",
"camino",
"clap",
"directories",
"fs-err",
"platform-host",
"puffin-cache",
@ -1182,9 +1183,9 @@ dependencies = [
[[package]]
name = "h2"
version = "0.3.22"
version = "0.3.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178"
checksum = "b553656127a00601c8ae5590fcfdc118e4083a7924b6cf4ffc1ea4b99dc429d7"
dependencies = [
"bytes",
"fnv",
@ -2028,9 +2029,7 @@ name = "pep440_rs"
version = "0.3.12"
dependencies = [
"indoc",
"once_cell",
"pyo3",
"regex",
"serde",
"tracing",
"unicode-width",
@ -2172,7 +2171,7 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5699cc8a63d1aa2b1ee8e12b9ad70ac790d65788cd36101fa37f87ea46c4cef"
dependencies = [
"base64 0.21.6",
"base64 0.21.7",
"indexmap 2.1.0",
"line-wrap",
"quick-xml",
@ -2260,7 +2259,7 @@ dependencies = [
[[package]]
name = "pubgrub"
version = "0.2.1"
source = "git+https://github.com/zanieb/pubgrub?rev=866c0f2a87fee1e8abe804d40a2ee934de0973d7#866c0f2a87fee1e8abe804d40a2ee934de0973d7"
source = "git+https://github.com/zanieb/pubgrub?rev=0e02ea9fc8d021fb6a6b9e77b09ade4332068f42#0e02ea9fc8d021fb6a6b9e77b09ade4332068f42"
dependencies = [
"indexmap 2.1.0",
"log",
@ -2813,7 +2812,6 @@ dependencies = [
"pep440_rs 0.3.12",
"pep508_rs",
"puffin-normalize",
"puffin-warnings",
"regex",
"rfc2047-decoder",
"serde",
@ -3032,7 +3030,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41"
dependencies = [
"async-compression",
"base64 0.21.6",
"base64 0.21.7",
"bytes",
"encoding_rs",
"futures-core",
@ -3124,7 +3122,7 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e372613f15fc5171f9052b0c1fbafca5b1e5b0ba86aa13c9c39fd91ca1f7955"
dependencies = [
"base64 0.21.6",
"base64 0.21.7",
"charset",
"chumsky",
"memchr",
@ -3211,7 +3209,7 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c"
dependencies = [
"base64 0.21.6",
"base64 0.21.7",
]
[[package]]
@ -3881,9 +3879,9 @@ dependencies = [
[[package]]
name = "tracing-durations-export"
version = "0.1.0"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d6bb8898f56f636911130c78cc528338a2bb0426bdfb5a8fb523f98fc8da46d"
checksum = "b96372957860418808d5044039d88e6402e489b1d1f2a511a0dc201454268f73"
dependencies = [
"anyhow",
"fs-err",
@ -4448,9 +4446,9 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
[[package]]
name = "winnow"
version = "0.5.33"
version = "0.5.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7520bbdec7211caa7c4e682eb1fbe07abe20cee6756b6e00f537c82c11816aa"
checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16"
dependencies = [
"memchr",
]

View file

@ -55,7 +55,7 @@ owo-colors = { version = "3.5.0" }
petgraph = { version = "0.6.4" }
platform-info = { version = "2.0.2" }
plist = { version = "1.6.0" }
pubgrub = { git = "https://github.com/zanieb/pubgrub", rev = "866c0f2a87fee1e8abe804d40a2ee934de0973d7" }
pubgrub = { git = "https://github.com/zanieb/pubgrub", rev = "0e02ea9fc8d021fb6a6b9e77b09ade4332068f42" }
pyo3 = { version = "0.20.2" }
pyo3-log = { version = "0.9.0"}
pyproject-toml = { version = "0.8.1" }

View file

@ -16,13 +16,13 @@ pub enum SourceDistExtension {
}
impl FromStr for SourceDistExtension {
type Err = SourceDistFilenameError;
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"zip" => Self::Zip,
"tar.gz" => Self::TarGz,
other => return Err(SourceDistFilenameError::InvalidExtension(other.to_string())),
other => return Err(other.to_string()),
})
}
}
@ -66,31 +66,38 @@ impl SourceDistFilename {
package_name: &PackageName,
) -> Result<Self, SourceDistFilenameError> {
let Some((stem, extension)) = SourceDistExtension::from_filename(filename) else {
return Err(SourceDistFilenameError::InvalidExtension(
filename.to_string(),
));
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Extension,
});
};
if stem.len() <= package_name.as_ref().len() + "-".len() {
return Err(SourceDistFilenameError::InvalidFilename {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
package_name: package_name.to_string(),
kind: SourceDistFilenameErrorKind::Filename(package_name.clone()),
});
}
let actual_package_name = PackageName::from_str(&stem[..package_name.as_ref().len()])
.map_err(|err| {
SourceDistFilenameError::InvalidPackageName(filename.to_string(), err)
.map_err(|err| SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::PackageName(err),
})?;
if &actual_package_name != package_name {
return Err(SourceDistFilenameError::InvalidFilename {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
package_name: package_name.to_string(),
kind: SourceDistFilenameErrorKind::Filename(package_name.clone()),
});
}
// We checked the length above
let version = Version::from_str(&stem[package_name.as_ref().len() + "-".len()..])
.map_err(SourceDistFilenameError::InvalidVersion)?;
let version =
Version::from_str(&stem[package_name.as_ref().len() + "-".len()..]).map_err(|err| {
SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Version(err),
}
})?;
Ok(Self {
name: package_name.clone(),
@ -107,18 +114,31 @@ impl Display for SourceDistFilename {
}
#[derive(Error, Debug, Clone)]
pub enum SourceDistFilenameError {
#[error("Source distribution name {filename} doesn't start with package name {package_name}")]
InvalidFilename {
filename: String,
package_name: String,
},
#[error("Source distributions filenames must end with .zip or .tar.gz, not {0}")]
InvalidExtension(String),
#[error("Source distribution filename version section is invalid: {0}")]
InvalidVersion(VersionParseError),
#[error("Source distribution filename has an invalid package name: {0}")]
InvalidPackageName(String, #[source] InvalidNameError),
pub struct SourceDistFilenameError {
filename: String,
kind: SourceDistFilenameErrorKind,
}
impl Display for SourceDistFilenameError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failed to parse source distribution filename {}: {}",
self.filename, self.kind
)
}
}
#[derive(Error, Debug, Clone)]
enum SourceDistFilenameErrorKind {
#[error("Name doesn't start with package name {0}")]
Filename(PackageName),
#[error("Source distributions filenames must end with .zip or .tar.gz")]
Extension,
#[error("Version section is invalid")]
Version(#[from] VersionParseError),
#[error(transparent)]
PackageName(#[from] InvalidNameError),
}
#[cfg(test)]

View file

@ -120,7 +120,7 @@ impl CachedDist {
}
impl CachedDirectUrlDist {
/// Initialize a [`CachedDirectUrlDist`] from a [`WheelFilename`], [`Url`], and [`Path`].
/// Initialize a [`CachedDirectUrlDist`] from a [`WheelFilename`], [`url::Url`], and [`Path`].
pub fn from_url(filename: WheelFilename, url: VerbatimUrl, path: PathBuf) -> Self {
Self {
filename,

View file

@ -13,6 +13,10 @@ repository = { workspace = true }
authors = { workspace = true }
license = { workspace = true }
[[bin]]
name = "gourgeist"
required-features = ["cli"]
[lints]
workspace = true
@ -23,12 +27,16 @@ puffin-interpreter = { path = "../puffin-interpreter" }
anstream = { workspace = true }
camino = { workspace = true }
clap = { workspace = true, features = ["derive"] }
clap = { workspace = true, features = ["derive"], optional = true }
directories = { workspace = true }
fs-err = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tempfile = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
tracing-subscriber = { workspace = true, optional = true }
which = { workspace = true }
[features]
cli = ["clap", "tracing-subscriber"]

View file

@ -2,15 +2,13 @@
set -e
cd "$(git rev-parse --show-toplevel)"
virtualenv --version
#cargo build --profile profiling
cargo build --release #--features parallel
# Benchmarking trick! strip your binaries ٩( ∂‿∂ )۶
strip target/release/gourgeist
cargo build --profile profiling --bin gourgeist --features cli
echo "## Bare"
hyperfine --warmup 1 --prepare "rm -rf target/a" "virtualenv -p 3.11 --no-seed target/a" "target/release/gourgeist -p 3.11 --bare target/a"
echo "## Default"
hyperfine --warmup 1 --prepare "rm -rf target/a" "virtualenv -p 3.11 target/a" "target/release/gourgeist -p 3.11 target/a"
hyperfine --warmup 1 --shell none --prepare "rm -rf target/venv-benchmark" \
"target/profiling/gourgeist -p 3.11 target/venv-benchmark" \
"virtualenv -p 3.11 --no-seed target/venv-benchmark"

View file

@ -5,6 +5,7 @@ use std::time::Instant;
use anstream::eprintln;
use camino::Utf8PathBuf;
use clap::Parser;
use directories::ProjectDirs;
use tracing::info;
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
@ -27,7 +28,11 @@ fn run() -> Result<(), gourgeist::Error> {
let location = cli.path.unwrap_or(Utf8PathBuf::from(".venv"));
let python = parse_python_cli(cli.python)?;
let platform = Platform::current()?;
let cache = Cache::temp()?;
let cache = if let Some(project_dirs) = ProjectDirs::from("", "", "gourgeist") {
Cache::from_path(project_dirs.cache_dir())?
} else {
Cache::from_path(".gourgeist_cache")?
};
let info = Interpreter::query(python.as_std_path(), platform, &cache).unwrap();
create_bare_venv(&location, &info)?;
Ok(())

View file

@ -17,9 +17,7 @@ name = "pep440_rs"
crate-type = ["rlib", "cdylib"]
[dependencies]
once_cell = { workspace = true }
pyo3 = { workspace = true, optional = true, features = ["extension-module", "abi3-py37"] }
regex = { workspace = true }
serde = { workspace = true, features = ["derive"], optional = true }
tracing = { workspace = true, optional = true }
unicode-width = { workspace = true }

View file

@ -12,10 +12,6 @@
//! assert!(version_specifiers.iter().all(|specifier| specifier.contains(&version)));
//! ```
//!
//! The error handling and diagnostics is a bit overdone because this my parser-and-diagnostics
//! learning project (which kinda failed because the byte based regex crate and char-based
//! diagnostics don't mix well)
//!
//! PEP 440 has a lot of unintuitive features, including:
//!
//! * An epoch that you can prefix the version which, e.g. `1!1.2.3`. Lower epoch always means lower

View file

@ -101,7 +101,6 @@ impl FromStr for Operator {
"<=" => Self::LessThanEqual,
">" => Self::GreaterThan,
">=" => Self::GreaterThanEqual,
// Should be forbidden by the regex if called from normal parsing
other => {
return Err(OperatorParseError {
got: other.to_string(),
@ -666,8 +665,7 @@ impl FromStr for Version {
/// Parses a version such as `1.19`, `1.0a1`,`1.0+abc.5` or `1!2012.2`
///
/// Note that this variant doesn't allow the version to end with a star, see
/// [`Self::from_str_star`] if you want to parse versions for specifiers
/// Note that this doesn't allow wildcard versions.
fn from_str(version: &str) -> Result<Self, Self::Err> {
Parser::new(version.as_bytes()).parse()
}
@ -2766,7 +2764,7 @@ mod tests {
}
#[test]
fn test_regex_mismatch() {
fn test_invalid_word() {
let result = Version::from_str("blergh");
assert_eq!(result.unwrap_err(), ErrorKind::NoLeadingNumber.into());
}

View file

@ -1279,7 +1279,7 @@ mod tests {
}
#[test]
fn test_regex_mismatch() {
fn test_invalid_word() {
let result = VersionSpecifiers::from_str("blergh");
assert_eq!(
result.unwrap_err().inner.err,

View file

@ -11,9 +11,7 @@ use std::process::Output;
use std::str::FromStr;
use std::sync::Arc;
use distribution_types::Resolution;
use fs_err as fs;
use fs_err::DirEntry;
use indoc::formatdoc;
use itertools::Itertools;
use once_cell::sync::Lazy;
@ -26,10 +24,11 @@ use tokio::process::Command;
use tokio::sync::Mutex;
use tracing::{debug, info_span, instrument, Instrument};
use distribution_types::Resolution;
use pep508_rs::Requirement;
use puffin_extract::extract_source;
use puffin_interpreter::{Interpreter, Virtualenv};
use puffin_traits::{BuildContext, BuildKind, SourceBuildTrait};
use puffin_traits::{BuildContext, BuildKind, SetupPyStrategy, SourceBuildTrait};
/// e.g. `pygraphviz/graphviz_wrap.c:3020:10: fatal error: graphviz/cgraph.h: No such file or directory`
static MISSING_HEADER_RE: Lazy<Regex> = Lazy::new(|| {
@ -38,11 +37,22 @@ static MISSING_HEADER_RE: Lazy<Regex> = Lazy::new(|| {
)
.unwrap()
});
/// e.g. `/usr/bin/ld: cannot find -lncurses: No such file or directory`
static LD_NOT_FOUND_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"/usr/bin/ld: cannot find -l([a-zA-Z10-9]+): No such file or directory").unwrap()
});
/// The default backend to use when PEP 517 is used without a `build-system` section.
static DEFAULT_BACKEND: Lazy<Pep517Backend> = Lazy::new(|| Pep517Backend {
backend: "setuptools.build_meta:__legacy__".to_string(),
backend_path: None,
requirements: vec![
Requirement::from_str("wheel").unwrap(),
Requirement::from_str("setuptools >= 40.8.0").unwrap(),
],
});
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
@ -89,8 +99,6 @@ pub enum MissingLibrary {
#[derive(Debug, Error)]
pub struct MissingHeaderCause {
missing_library: MissingLibrary,
// I've picked this over the better readable package name to make clear that you need to
// look for the build dependencies of that version or git commit respectively
package_id: String,
}
@ -109,7 +117,7 @@ impl Display for MissingHeaderCause {
f,
"This error likely indicates that you need to install the library that provides a shared library \
for {library} for {package_id} (e.g. lib{library}-dev)",
library=library, package_id=self.package_id
library = library, package_id = self.package_id
)
}
}
@ -171,7 +179,7 @@ pub struct PyProjectToml {
}
/// `[build-backend]` from pyproject.toml
#[derive(Debug)]
#[derive(Debug, Clone, PartialEq, Eq)]
struct Pep517Backend {
/// The build backend string such as `setuptools.build_meta:__legacy__` or `maturin` from
/// `build-backend.backend` in pyproject.toml
@ -222,7 +230,7 @@ impl Pep517Backend {
#[derive(Debug, Default, Clone)]
pub struct SourceBuildContext {
/// Cache the first resolution of `pip`, `setuptools` and `wheel` we made for setup.py (and
/// some PEP 517) builds so we can reuse it
/// some PEP 517) builds so we can reuse it.
setup_py_resolution: Arc<Mutex<Option<Resolution>>>,
}
@ -234,8 +242,9 @@ pub struct SourceBuildContext {
pub struct SourceBuild {
temp_dir: TempDir,
source_tree: PathBuf,
/// `Some` if this is a PEP 517 build
/// If performing a PEP 517 build, the backend to use.
pep517_backend: Option<Pep517Backend>,
/// The virtual environment in which to build the source distribution.
venv: Virtualenv,
/// Populated if `prepare_metadata_for_build_wheel` was called.
///
@ -258,6 +267,7 @@ impl SourceBuild {
/// contents from an archive if necessary.
///
/// `source_dist` is for error reporting only.
#[allow(clippy::too_many_arguments)]
pub async fn setup(
source: &Path,
subdirectory: Option<&Path>,
@ -265,6 +275,7 @@ impl SourceBuild {
build_context: &impl BuildContext,
source_build_context: SourceBuildContext,
package_id: String,
setup_py: SetupPyStrategy,
build_kind: BuildKind,
) -> Result<SourceBuild, Error> {
let temp_dir = tempdir()?;
@ -283,6 +294,8 @@ impl SourceBuild {
source_root
};
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
// Check if we have a PEP 517 build backend.
let pep517_backend = match fs::read_to_string(source_tree.join("pyproject.toml")) {
Ok(toml) => {
@ -306,74 +319,91 @@ impl SourceBuild {
} else {
// If a `pyproject.toml` is present, but `[build-system]` is missing, proceed with
// a PEP 517 build using the default backend, to match `pip` and `build`.
Some(Pep517Backend {
backend: "setuptools.build_meta:__legacy__".to_string(),
backend_path: None,
requirements: vec![
Requirement::from_str("wheel").unwrap(),
Requirement::from_str("setuptools >= 40.8.0").unwrap(),
],
})
Some(default_backend.clone())
}
}
Err(err) if err.kind() == io::ErrorKind::NotFound => {
// We require either a `pyproject.toml` or a `setup.py` file at the top level.
if !source_tree.join("setup.py").is_file() {
return Err(Error::InvalidSourceDist(
"The archive contains neither a `pyproject.toml` nor a `setup.py` file at the top level"
.to_string(),
));
}
// If no `pyproject.toml` is present, by default, proceed with a PEP 517 build using
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
// case (which we allow via `SetupPyStrategy::Setuptools`), but plans to make PEP
// 517 builds the default in the future.
// See: https://github.com/pypa/pip/issues/9175.
match setup_py {
SetupPyStrategy::Pep517 => Some(default_backend.clone()),
SetupPyStrategy::Setuptools => None,
}
}
Err(err) if err.kind() == io::ErrorKind::NotFound => None,
Err(err) => return Err(err.into()),
};
let venv = gourgeist::create_venv(&temp_dir.path().join(".venv"), interpreter.clone())?;
// Setup the build environment using PEP 517 or the legacy setuptools backend.
if let Some(pep517_backend) = pep517_backend.as_ref() {
let resolved_requirements = build_context
.resolve(&pep517_backend.requirements)
.await
.map_err(|err| {
Error::RequirementsInstall("build-system.requires (resolve)", err)
})?;
build_context
.install(&resolved_requirements, &venv)
.await
.map_err(|err| {
Error::RequirementsInstall("build-system.requires (install)", err)
})?;
// Setup the build environment.
let resolved_requirements = if let Some(pep517_backend) = pep517_backend.as_ref() {
if pep517_backend.requirements == default_backend.requirements {
let mut resolution = source_build_context.setup_py_resolution.lock().await;
if let Some(resolved_requirements) = &*resolution {
resolved_requirements.clone()
} else {
let resolved_requirements = build_context
.resolve(&default_backend.requirements)
.await
.map_err(|err| {
Error::RequirementsInstall("setup.py build (resolve)", err)
})?;
*resolution = Some(resolved_requirements.clone());
resolved_requirements
}
} else {
build_context
.resolve(&pep517_backend.requirements)
.await
.map_err(|err| {
Error::RequirementsInstall("build-system.requires (resolve)", err)
})?
}
} else {
let requirements = vec![
Requirement::from_str("wheel").unwrap(),
Requirement::from_str("setuptools").unwrap(),
Requirement::from_str("pip").unwrap(),
];
// Install default requirements for `setup.py`-based builds.
let mut resolution = source_build_context.setup_py_resolution.lock().await;
let resolved_requirements = if let Some(resolved_requirements) = &*resolution {
if let Some(resolved_requirements) = &*resolution {
resolved_requirements.clone()
} else {
let resolved_requirements = build_context
.resolve(&requirements)
.resolve(&default_backend.requirements)
.await
.map_err(|err| Error::RequirementsInstall("setup.py build (resolve)", err))?;
*resolution = Some(resolved_requirements.clone());
resolved_requirements
};
build_context
.install(&resolved_requirements, &venv)
.await
.map_err(|err| Error::RequirementsInstall("setup.py build (install)", err))?;
}
};
build_context
.install(&resolved_requirements, &venv)
.await
.map_err(|err| Error::RequirementsInstall("build-system.requires (install)", err))?;
// If we're using the default backend configuration, skip `get_requires_for_build_*`, since
// we already installed the requirements above.
if let Some(pep517_backend) = &pep517_backend {
create_pep517_build_environment(
&source_tree,
&venv,
pep517_backend,
build_context,
&package_id,
build_kind,
)
.await?;
} else if !source_tree.join("setup.py").is_file() {
return Err(Error::InvalidSourceDist(
"The archive contains neither a `pyproject.toml` nor a `setup.py` file at the top level"
.to_string(),
));
if pep517_backend != &default_backend {
create_pep517_build_environment(
&source_tree,
&venv,
pep517_backend,
build_context,
&package_id,
build_kind,
)
.await?;
}
}
Ok(Self {
@ -389,15 +419,12 @@ impl SourceBuild {
/// Try calling `prepare_metadata_for_build_wheel` to get the metadata without executing the
/// actual build.
///
/// TODO(konstin): Return the actual metadata instead of the dist-info dir.
pub async fn get_metadata_without_build(&mut self) -> Result<Option<PathBuf>, Error> {
// setup.py builds don't support this.
let Some(pep517_backend) = &self.pep517_backend else {
return Ok(None);
};
// We've already called this method, but return the existing result is easier than erroring
// We've already called this method; return the existing result.
if let Some(metadata_dir) = &self.metadata_directory {
return Ok(Some(metadata_dir.clone()));
}
@ -505,7 +532,7 @@ impl SourceBuild {
));
}
let dist = fs::read_dir(self.source_tree.join("dist"))?;
let dist_dir = dist.collect::<io::Result<Vec<DirEntry>>>()?;
let dist_dir = dist.collect::<io::Result<Vec<fs_err::DirEntry>>>()?;
let [dist_wheel] = dist_dir.as_slice() else {
return Err(Error::from_command_output(
format!(
@ -624,8 +651,8 @@ async fn create_pep517_build_environment(
"#, pep517_backend.backend_import(), build_kind
};
let span = info_span!(
"get_requires_for_build_wheel",
script="build_wheel",
"run_python_script",
script=format!("get_requires_for_build_{}", build_kind),
python_version = %venv.interpreter().version()
);
let output = run_python_script(venv, &script, source_tree)
@ -646,6 +673,7 @@ async fn create_pep517_build_environment(
.map_err(|err| err.to_string())
.and_then(|last_line| last_line.ok_or("Missing message".to_string()))
.and_then(|message| serde_json::from_str(&message).map_err(|err| err.to_string()));
let extra_requires: Vec<Requirement> = extra_requires.map_err(|err| {
Error::from_command_output(
format!(
@ -655,14 +683,14 @@ async fn create_pep517_build_environment(
package_id,
)
})?;
// Some packages (such as tqdm 4.66.1) list only extra requires that have already been part of
// the pyproject.toml requires (in this case, `wheel`). We can skip doing the whole resolution
// and installation again.
// TODO(konstin): Do we still need this when we have a fast resolver?
if !extra_requires.is_empty()
&& !extra_requires
.iter()
.all(|req| pep517_backend.requirements.contains(req))
if extra_requires
.iter()
.any(|req| !pep517_backend.requirements.contains(req))
{
debug!("Installing extra requirements for build backend");
let requirements: Vec<Requirement> = pep517_backend
@ -681,6 +709,7 @@ async fn create_pep517_build_environment(
.await
.map_err(|err| Error::RequirementsInstall("build-system.requires (install)", err))?;
}
Ok(())
}

View file

@ -392,7 +392,7 @@ pub enum CacheBucket {
/// * `simple-v0/pypi/<package_name>.msgpack`
/// * `simple-v0/<digest(index_url)>/<package_name>.msgpack`
///
/// The response is parsed into [`puffin_client::SimpleMetadata`] before storage.
/// The response is parsed into `puffin_client::SimpleMetadata` before storage.
Simple,
}

View file

@ -23,6 +23,7 @@ use puffin_installer::Downloader;
use puffin_interpreter::{Interpreter, PythonVersion};
use puffin_normalize::ExtraName;
use puffin_resolver::{Manifest, PreReleaseMode, ResolutionMode, ResolutionOptions, Resolver};
use puffin_traits::SetupPyStrategy;
use requirements_txt::EditableRequirement;
use crate::commands::reporters::{DownloadReporter, ResolverReporter};
@ -44,6 +45,7 @@ pub(crate) async fn pip_compile(
prerelease_mode: PreReleaseMode,
upgrade_mode: UpgradeMode,
index_urls: IndexUrls,
setup_py: SetupPyStrategy,
no_build: bool,
python_version: Option<PythonVersion>,
exclude_newer: Option<DateTime<Utc>>,
@ -141,6 +143,7 @@ pub(crate) async fn pip_compile(
&interpreter,
&index_urls,
interpreter.sys_executable().to_path_buf(),
setup_py,
no_build,
)
.with_options(options);

View file

@ -28,7 +28,7 @@ use puffin_normalize::PackageName;
use puffin_resolver::{
Manifest, PreReleaseMode, ResolutionGraph, ResolutionMode, ResolutionOptions, Resolver,
};
use puffin_traits::OnceMap;
use puffin_traits::{OnceMap, SetupPyStrategy};
use requirements_txt::EditableRequirement;
use crate::commands::reporters::{DownloadReporter, InstallReporter, ResolverReporter};
@ -48,6 +48,7 @@ pub(crate) async fn pip_install(
index_urls: IndexUrls,
reinstall: &Reinstall,
link_mode: LinkMode,
setup_py: SetupPyStrategy,
no_build: bool,
strict: bool,
exclude_newer: Option<DateTime<Utc>>,
@ -144,6 +145,7 @@ pub(crate) async fn pip_install(
&interpreter,
&index_urls,
venv.python_executable(),
setup_py,
no_build,
)
.with_options(options);

View file

@ -14,7 +14,7 @@ use puffin_client::{RegistryClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_installer::{Downloader, InstallPlan, Reinstall, ResolvedEditable, SitePackages};
use puffin_interpreter::Virtualenv;
use puffin_traits::OnceMap;
use puffin_traits::{OnceMap, SetupPyStrategy};
use pypi_types::Yanked;
use requirements_txt::EditableRequirement;
@ -30,6 +30,7 @@ pub(crate) async fn pip_sync(
reinstall: &Reinstall,
link_mode: LinkMode,
index_urls: IndexUrls,
setup_py: SetupPyStrategy,
no_build: bool,
strict: bool,
cache: Cache,
@ -69,6 +70,7 @@ pub(crate) async fn pip_sync(
venv.interpreter(),
&index_urls,
venv.python_executable(),
setup_py,
no_build,
);

View file

@ -1,5 +1,6 @@
use std::fmt::Write;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::Result;
use fs_err as fs;
@ -7,22 +8,29 @@ use miette::{Diagnostic, IntoDiagnostic};
use owo_colors::OwoColorize;
use thiserror::Error;
use distribution_types::{DistributionMetadata, IndexUrls, Name};
use pep508_rs::Requirement;
use platform_host::Platform;
use puffin_cache::Cache;
use puffin_client::RegistryClientBuilder;
use puffin_dispatch::BuildDispatch;
use puffin_interpreter::Interpreter;
use puffin_traits::{BuildContext, SetupPyStrategy};
use crate::commands::ExitStatus;
use crate::printer::Printer;
/// Create a virtual environment.
#[allow(clippy::unnecessary_wraps)]
pub(crate) fn venv(
pub(crate) async fn venv(
path: &Path,
base_python: Option<&Path>,
index_urls: &IndexUrls,
seed: bool,
cache: &Cache,
printer: Printer,
) -> Result<ExitStatus> {
match venv_impl(path, base_python, cache, printer) {
match venv_impl(path, base_python, index_urls, seed, cache, printer).await {
Ok(status) => Ok(status),
Err(err) => {
#[allow(clippy::print_stderr)]
@ -51,12 +59,18 @@ enum VenvError {
#[error("Failed to create virtual environment")]
#[diagnostic(code(puffin::venv::creation))]
CreationError(#[source] gourgeist::Error),
#[error("Failed to install seed packages")]
#[diagnostic(code(puffin::venv::seed))]
SeedError(#[source] anyhow::Error),
}
/// Create a virtual environment.
fn venv_impl(
async fn venv_impl(
path: &Path,
base_python: Option<&Path>,
index_urls: &IndexUrls,
seed: bool,
cache: &Cache,
mut printer: Printer,
) -> miette::Result<ExitStatus> {
@ -96,7 +110,51 @@ fn venv_impl(
.into_diagnostic()?;
// Create the virtual environment.
gourgeist::create_venv(path, interpreter).map_err(VenvError::CreationError)?;
let venv = gourgeist::create_venv(path, interpreter).map_err(VenvError::CreationError)?;
// Install seed packages.
if seed {
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone()).build();
// Prep the build context.
let build_dispatch = BuildDispatch::new(
&client,
cache,
venv.interpreter(),
index_urls,
venv.python_executable(),
SetupPyStrategy::default(),
true,
);
// Resolve the seed packages.
let resolution = build_dispatch
.resolve(&[
Requirement::from_str("wheel").unwrap(),
Requirement::from_str("pip").unwrap(),
Requirement::from_str("setuptools").unwrap(),
])
.await
.map_err(VenvError::SeedError)?;
// Install into the environment.
build_dispatch
.install(&resolution, &venv)
.await
.map_err(VenvError::SeedError)?;
for distribution in resolution.distributions() {
writeln!(
printer,
" {} {}{}",
"+".green(),
distribution.name().as_ref().white().bold(),
distribution.version_or_url().dimmed()
)
.into_diagnostic()?;
}
}
Ok(ExitStatus::Success)
}

View file

@ -14,6 +14,7 @@ use puffin_installer::Reinstall;
use puffin_interpreter::PythonVersion;
use puffin_normalize::{ExtraName, PackageName};
use puffin_resolver::{PreReleaseMode, ResolutionMode};
use puffin_traits::SetupPyStrategy;
use requirements::ExtrasSpecification;
use crate::commands::{extra_name_with_clap_error, ExitStatus};
@ -166,6 +167,11 @@ struct PipCompileArgs {
#[clap(long)]
upgrade: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
@ -228,6 +234,11 @@ struct PipSyncArgs {
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
@ -324,6 +335,11 @@ struct PipInstallArgs {
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
/// Use legacy `setuptools` behavior when building source distributions without a
/// `pyproject.toml`.
#[clap(long)]
legacy_setup_py: bool,
/// Don't build source distributions.
///
/// When enabled, resolving will not run arbitrary code. The cached wheels of already-built
@ -392,9 +408,25 @@ struct VenvArgs {
#[clap(short, long)]
python: Option<PathBuf>,
/// Install seed packages (`pip`, `setuptools`, and `wheel`) into the virtual environment.
#[clap(long)]
seed: bool,
/// The path to the virtual environment to create.
#[clap(default_value = ".venv")]
name: PathBuf,
/// The URL of the Python Package Index.
#[clap(long, short, default_value = IndexUrl::Pypi.as_str(), env = "PUFFIN_INDEX_URL")]
index_url: IndexUrl,
/// Extra URLs of package indexes to use, in addition to `--index-url`.
#[clap(long)]
extra_index_url: Vec<IndexUrl>,
/// Ignore the package index, instead relying on local archives and caches.
#[clap(long, conflicts_with = "index_url", conflicts_with = "extra_index_url")]
no_index: bool,
}
#[derive(Args)]
@ -480,6 +512,11 @@ async fn inner() -> Result<ExitStatus> {
args.prerelease,
args.upgrade.into(),
index_urls,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
args.python_version,
args.exclude_newer,
@ -502,6 +539,11 @@ async fn inner() -> Result<ExitStatus> {
&reinstall,
args.link_mode,
index_urls,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
args.strict,
cache,
@ -547,6 +589,11 @@ async fn inner() -> Result<ExitStatus> {
index_urls,
&reinstall,
args.link_mode,
if args.legacy_setup_py {
SetupPyStrategy::Setuptools
} else {
SetupPyStrategy::Pep517
},
args.no_build,
args.strict,
args.exclude_newer,
@ -567,7 +614,19 @@ async fn inner() -> Result<ExitStatus> {
}
Commands::Clean(args) => commands::clean(&cache, &args.package, printer),
Commands::PipFreeze(args) => commands::freeze(&cache, args.strict, printer),
Commands::Venv(args) => commands::venv(&args.name, args.python.as_deref(), &cache, printer),
Commands::Venv(args) => {
let index_urls =
IndexUrls::from_args(args.index_url, args.extra_index_url, args.no_index);
commands::venv(
&args.name,
args.python.as_deref(),
&index_urls,
args.seed,
&cache,
printer,
)
.await
}
Commands::Add(args) => commands::add(&args.name, printer),
Commands::Remove(args) => commands::remove(&args.name, printer),
}

View file

@ -670,9 +670,11 @@ fn compile_python_37() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of Python>=3.8 and black==23.10.1 depends
on Python>=3.8, black==23.10.1 is forbidden.
And because root depends on black==23.10.1, version solving failed.
Because there are no versions of Python that satisfy Python>=3.8
and black==23.10.1 depends on Python>=3.8, we can conclude that
black==23.10.1 cannot be used.
And because root depends on black==23.10.1 we can conclude that the
requirements are unsatisfiable.
"###);
});
@ -845,6 +847,8 @@ fn compile_wheel_url_dependency() -> Result<()> {
}
/// Resolve a specific Flask source distribution via a URL dependency.
///
/// Exercises the `prepare_metadata_for_build_wheel` hooks.
#[test]
fn compile_sdist_url_dependency() -> Result<()> {
let temp_dir = TempDir::new()?;
@ -1405,8 +1409,9 @@ fn conflicting_direct_url_dependency() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of werkzeug==3.0.0 and root depends on
werkzeug==3.0.0, version solving failed.
Because there is no version of werkzeug==3.0.0 and root depends
on werkzeug==3.0.0, we can conclude that the requirements are
unsatisfiable.
"###);
});
@ -1556,8 +1561,10 @@ fn conflicting_transitive_url_dependency() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because flask==3.0.0 depends on werkzeug>=3.0.0 and there are no
versions of werkzeug>=3.0.0, flask==3.0.0 is forbidden.
And because root depends on flask==3.0.0, version solving failed.
versions of werkzeug that satisfy werkzeug>=3.0.0, we can conclude that
flask==3.0.0 cannot be used.
And because root depends on flask==3.0.0 we can conclude that the
requirements are unsatisfiable.
"###);
});
@ -1899,8 +1906,9 @@ dependencies = ["django==300.1.4"]
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of django==300.1.4 and my-project depends on
django==300.1.4, version solving failed.
Because there is no version of django==300.1.4 and my-project
depends on django==300.1.4, we can conclude that the requirements are
unsatisfiable.
"###);
});
@ -2225,8 +2233,9 @@ fn compile_yanked_version_indirect() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of attrs>20.3.0, <21.2.0 and root depends
on attrs>20.3.0, <21.2.0, version solving failed.
Because there are no versions of attrs that satisfy attrs>20.3.0,<21.2.0
and root depends on attrs>20.3.0,<21.2.0, we can conclude that the
requirements are unsatisfiable.
"###);
});
@ -2573,11 +2582,10 @@ fn compile_editable() -> Result<()> {
})?;
let filter_path = requirements_in.display().to_string();
let filters = INSTA_FILTERS
.iter()
.chain(&[(filter_path.as_str(), "requirements.in")])
.copied()
.collect::<Vec<_>>();
let filters: Vec<_> = iter::once((filter_path.as_str(), "requirements.in"))
.chain(INSTA_FILTERS.to_vec())
.collect();
insta::with_settings!({
filters => filters
}, {
@ -2768,6 +2776,8 @@ fn trailing_slash() -> Result<()> {
.arg(cache_dir.path())
.arg("--index-url")
.arg("https://test.pypi.org/simple")
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
@ -2794,6 +2804,8 @@ fn trailing_slash() -> Result<()> {
.arg(cache_dir.path())
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
@ -2812,3 +2824,90 @@ fn trailing_slash() -> Result<()> {
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend (default).
#[test]
fn compile_legacy_sdist_pep_517() -> Result<()> {
let temp_dir = TempDir::new()?;
let cache_dir = TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_in = temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-compile")
.arg("requirements.in")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile requirements.in --cache-dir [CACHE_DIR]
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###);
});
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using `setuptools` directly.
#[test]
fn compile_legacy_sdist_setuptools() -> Result<()> {
let temp_dir = TempDir::new()?;
let cache_dir = TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_in = temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-compile")
.arg("requirements.in")
.arg("--legacy-setup-py")
.arg("--cache-dir")
.arg(cache_dir.path())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile requirements.in --legacy-setup-py --cache-dir [CACHE_DIR]
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###);
});
Ok(())
}

View file

@ -77,10 +77,11 @@ fn no_solution() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of flask>3.0.0 and flask==3.0.0 depends on
werkzeug>=3.0.0, flask>=3.0.0 depends on werkzeug>=3.0.0.
Because there are no versions of flask that satisfy flask>3.0.0
and flask==3.0.0 depends on werkzeug>=3.0.0, we can conclude that
flask>=3.0.0 depends on werkzeug>=3.0.0.
And because root depends on flask>=3.0.0 and root depends on
werkzeug<1.0.0, version solving failed.
werkzeug<1.0.0, we can conclude that the requirements are unsatisfiable.
"###);
Ok(())

View file

@ -82,7 +82,13 @@ fn excluded_only_version() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a<1.0.0 | >1.0.0 and root depends on a<1.0.0 | >1.0.0, version solving failed.
Because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
and root depends on one of:
a<1.0.0
a>1.0.0
we can conclude that the requirements are unsatisfiable.
"###);
});
@ -150,9 +156,23 @@ fn excluded_only_compatible_version() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a<1.0.0 | >1.0.0, <2.0.0 | >2.0.0, <3.0.0 | >3.0.0 and a==1.0.0 depends on b==1.0.0, a<2.0.0 depends on b==1.0.0.
And because a==3.0.0 depends on b==3.0.0, a<2.0.0 | >2.0.0 depends on b<=1.0.0 | >=3.0.0.
And because root depends on b>=2.0.0, <3.0.0 and root depends on a<2.0.0 | >2.0.0, version solving failed.
Because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0,<2.0.0
a>2.0.0,<3.0.0
a>3.0.0
and a==1.0.0 depends on b==1.0.0, we can conclude that a<2.0.0 depends on b==1.0.0.
And because a==3.0.0 depends on b==3.0.0 we can conclude that any of:
a<2.0.0
a>2.0.0
depends on one of:
b<=1.0.0
b>=3.0.0
And because root depends on b>=2.0.0,<3.0.0 and root depends on one of:
a<2.0.0
a>2.0.0
we can conclude that the requirements are unsatisfiable.
"###);
});
@ -258,13 +278,27 @@ fn dependency_excludes_range_of_compatible_versions() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a<1.0.0 | >1.0.0, <2.0.0 | >3.0.0 and a==1.0.0 depends on b==1.0.0, a<2.0.0 depends on b==1.0.0. (1)
Because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0,<2.0.0
a>3.0.0
and a==1.0.0 depends on b==1.0.0, we can conclude that a<2.0.0 depends on b==1.0.0. (1)
Because there are no versions of c<1.0.0 | >1.0.0, <2.0.0 | >2.0.0 and c==1.0.0 depends on a<2.0.0, c<2.0.0 depends on a<2.0.0.
And because c==2.0.0 depends on a>=3.0.0, c depends on a<2.0.0 | >=3.0.0.
And because a<2.0.0 depends on b==1.0.0 (1), a!=3.0.0, c*, b!=1.0.0 are incompatible.
And because a==3.0.0 depends on b==3.0.0, c depends on b<=1.0.0 | >=3.0.0.
And because root depends on c and root depends on b>=2.0.0, <3.0.0, version solving failed.
Because there are no versions of c that satisfy any of:
c<1.0.0
c>1.0.0,<2.0.0
c>2.0.0
and c==1.0.0 depends on a<2.0.0, we can conclude that c<2.0.0 depends on a<2.0.0.
And because c==2.0.0 depends on a>=3.0.0 we can conclude that all versions of c depends on one of:
a<2.0.0
a>=3.0.0
And because we know from (1) that a<2.0.0 depends on b==1.0.0, we can conclude that a!=3.0.0, all versions of c, b!=1.0.0 are incompatible.
And because a==3.0.0 depends on b==3.0.0 we can conclude that all versions of c depends on one of:
b<=1.0.0
b>=3.0.0
And because root depends on c and root depends on b>=2.0.0,<3.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -386,13 +420,39 @@ fn dependency_excludes_non_contiguous_range_of_compatible_versions() -> Result<(
----- stderr -----
× No solution found when resolving dependencies:
Because a==1.0.0 depends on b==1.0.0 and there are no versions of a<1.0.0 | >1.0.0, <2.0.0 | >3.0.0, a<2.0.0 depends on b==1.0.0.
And because a==3.0.0 depends on b==3.0.0, a<2.0.0 | >=3.0.0 depends on b<=1.0.0 | >=3.0.0. (1)
Because a==1.0.0 depends on b==1.0.0 and there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0,<2.0.0
a>3.0.0
we can conclude that a<2.0.0 depends on b==1.0.0.
And because a==3.0.0 depends on b==3.0.0 we can conclude that any of:
a<2.0.0
a>=3.0.0
depends on one of:
b<=1.0.0
b>=3.0.0
(1)
Because there are no versions of c<1.0.0 | >1.0.0, <2.0.0 | >2.0.0 and c==1.0.0 depends on a<2.0.0, c<2.0.0 depends on a<2.0.0.
And because c==2.0.0 depends on a>=3.0.0, c depends on a<2.0.0 | >=3.0.0.
And because a<2.0.0 | >=3.0.0 depends on b<=1.0.0 | >=3.0.0 (1), c depends on b<=1.0.0 | >=3.0.0.
And because root depends on b>=2.0.0, <3.0.0 and root depends on c, version solving failed.
Because there are no versions of c that satisfy any of:
c<1.0.0
c>1.0.0,<2.0.0
c>2.0.0
and c==1.0.0 depends on a<2.0.0, we can conclude that c<2.0.0 depends on a<2.0.0.
And because c==2.0.0 depends on a>=3.0.0 we can conclude that all versions of c depends on one of:
a<2.0.0
a>=3.0.0
And because we know from (1) that any of:
a<2.0.0
a>=3.0.0
depends on one of:
b<=1.0.0
b>=3.0.0
we can conclude that all versions of c depends on one of:
b<=1.0.0
b>=3.0.0
And because root depends on b>=2.0.0,<3.0.0 and root depends on c, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -521,7 +581,7 @@ fn requires_package_only_prereleases_in_range() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a>0.1.0 and root depends on a>0.1.0, version solving failed.
Because there are no versions of a that satisfy a>0.1.0 and root depends on a>0.1.0, we can conclude that the requirements are unsatisfiable.
hint: Pre-releases are available for a in the requested range (e.g., 1.0.0a1), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
@ -1116,8 +1176,11 @@ fn requires_transitive_package_only_prereleases_in_range() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of b>0.1 and a==0.1.0 depends on b>0.1, a==0.1.0 is forbidden.
And because there are no versions of a<0.1.0 | >0.1.0 and root depends on a, version solving failed.
Because there are no versions of b that satisfy b>0.1 and a==0.1.0 depends on b>0.1, we can conclude that a==0.1.0 cannot be used.
And because there are no versions of a that satisfy any of:
a<0.1.0
a>0.1.0
and root depends on a, we can conclude that the requirements are unsatisfiable.
hint: Pre-releases are available for b in the requested range (e.g., 1.0.0a1), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
@ -1271,10 +1334,13 @@ fn requires_transitive_prerelease_and_stable_dependency() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of c==2.0.0b1 and a==1.0.0 depends on c==2.0.0b1, a==1.0.0 is forbidden.
And because there are no versions of a<1.0.0 | >1.0.0 and root depends on a, version solving failed.
Because there is no version of c==2.0.0b1 and a==1.0.0 depends on c==2.0.0b1, we can conclude that a==1.0.0 cannot be used.
And because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
and root depends on a, we can conclude that the requirements are unsatisfiable.
hint: c was requested with a pre-release marker (e.g., ==2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`)
hint: c was requested with a pre-release marker (e.g., c==2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
});
@ -1468,12 +1534,18 @@ fn requires_transitive_prerelease_and_stable_dependency_many_versions() -> Resul
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of b<1.0.0 | >1.0.0 and b==1.0.0 depends on c, b depends on c.
And because there are no versions of c>=2.0.0b1, b depends on c<2.0.0b1.
And because a==1.0.0 depends on c>=2.0.0b1 and there are no versions of a<1.0.0 | >1.0.0, b*, a* are incompatible.
And because root depends on b and root depends on a, version solving failed.
Because there are no versions of b that satisfy any of:
b<1.0.0
b>1.0.0
and b==1.0.0 depends on c, we can conclude that all versions of b depends on c.
And because there are no versions of c that satisfy c>=2.0.0b1 we can conclude that all versions of b depends on c<2.0.0b1.
And because a==1.0.0 depends on c>=2.0.0b1 and there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
we can conclude that all versions of b and all versions of a are incompatible.
And because root depends on b and root depends on a, we can conclude that the requirements are unsatisfiable.
hint: c was requested with a pre-release marker (e.g., >=2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`)
hint: c was requested with a pre-release marker (e.g., c>=2.0.0b1), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
});
@ -1567,10 +1639,25 @@ fn requires_transitive_prerelease_and_stable_dependency_many_versions_holes() ->
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of c>1.0.0, <2.0.0a5 | >2.0.0a7, <2.0.0b1 | >2.0.0b1, <2.0.0b5 and a==1.0.0 depends on c>1.0.0, <2.0.0a5 | >2.0.0a7, <2.0.0b1 | >2.0.0b1, <2.0.0b5, a==1.0.0 is forbidden.
And because there are no versions of a<1.0.0 | >1.0.0 and root depends on a, version solving failed.
Because there are no versions of c that satisfy any of:
c>1.0.0,<2.0.0a5
c>2.0.0a7,<2.0.0b1
c>2.0.0b1,<2.0.0b5
and a==1.0.0 depends on one of:
c>1.0.0,<2.0.0a5
c>2.0.0a7,<2.0.0b1
c>2.0.0b1,<2.0.0b5
we can conclude that a==1.0.0 cannot be used.
And because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
and root depends on a, we can conclude that the requirements are unsatisfiable.
hint: c was requested with a pre-release marker (e.g., >1.0.0, <2.0.0a5 | >2.0.0a7, <2.0.0b1 | >2.0.0b1, <2.0.0b5), but pre-releases weren't enabled (try: `--prerelease=allow`)
hint: c was requested with a pre-release marker (e.g., any of:
c>1.0.0,<2.0.0a5
c>2.0.0a7,<2.0.0b1
c>2.0.0b1,<2.0.0b5
), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
});
@ -1681,7 +1768,7 @@ fn requires_exact_version_does_not_exist() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of a==2.0.0 and root depends on a==2.0.0, version solving failed.
Because there is no version of a==2.0.0 and root depends on a==2.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -1737,7 +1824,7 @@ fn requires_greater_version_does_not_exist() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a>1.0.0 and root depends on a>1.0.0, version solving failed.
Because there are no versions of a that satisfy a>1.0.0 and root depends on a>1.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -1794,7 +1881,7 @@ fn requires_less_version_does_not_exist() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a<2.0.0 and root depends on a<2.0.0, version solving failed.
Because there are no versions of a that satisfy a<2.0.0 and root depends on a<2.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -1978,8 +2065,11 @@ fn requires_transitive_incompatible_with_root_version() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of a<1.0.0 | >1.0.0 and a==1.0.0 depends on b==2.0.0, a depends on b==2.0.0.
And because root depends on a and root depends on b==1.0.0, version solving failed.
Because there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
and a==1.0.0 depends on b==2.0.0, we can conclude that all versions of a depends on b==2.0.0.
And because root depends on a and root depends on b==1.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2054,9 +2144,15 @@ fn requires_transitive_incompatible_with_transitive() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of b<1.0.0 | >1.0.0 and b==1.0.0 depends on c==2.0.0, b depends on c==2.0.0.
And because a==1.0.0 depends on c==1.0.0 and there are no versions of a<1.0.0 | >1.0.0, a*, b* are incompatible.
And because root depends on b and root depends on a, version solving failed.
Because there are no versions of b that satisfy any of:
b<1.0.0
b>1.0.0
and b==1.0.0 depends on c==2.0.0, we can conclude that all versions of b depends on c==2.0.0.
And because a==1.0.0 depends on c==1.0.0 and there are no versions of a that satisfy any of:
a<1.0.0
a>1.0.0
we can conclude that all versions of a and all versions of b are incompatible.
And because root depends on b and root depends on a, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2116,8 +2212,8 @@ fn requires_python_version_does_not_exist() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of Python>=4.0 and a==1.0.0 depends on Python>=4.0, a==1.0.0 is forbidden.
And because root depends on a==1.0.0, version solving failed.
Because there are no versions of Python that satisfy Python>=4.0 and a==1.0.0 depends on Python>=4.0, we can conclude that a==1.0.0 cannot be used.
And because root depends on a==1.0.0 we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2173,8 +2269,8 @@ fn requires_python_version_less_than_current() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of Python<=3.8 and a==1.0.0 depends on Python<=3.8, a==1.0.0 is forbidden.
And because root depends on a==1.0.0, version solving failed.
Because there are no versions of Python that satisfy Python<=3.8 and a==1.0.0 depends on Python<=3.8, we can conclude that a==1.0.0 cannot be used.
And because root depends on a==1.0.0 we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2233,8 +2329,8 @@ fn requires_python_version_greater_than_current() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of Python>=3.10 and a==1.0.0 depends on Python>=3.10, a==1.0.0 is forbidden.
And because root depends on a==1.0.0, version solving failed.
Because there are no versions of Python that satisfy Python>=3.10 and a==1.0.0 depends on Python>=3.10, we can conclude that a==1.0.0 cannot be used.
And because root depends on a==1.0.0 we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2315,7 +2411,7 @@ fn requires_python_version_greater_than_current_many() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there is no version of a==1.0.0 and root depends on a==1.0.0, version solving failed.
Because there is no version of a==1.0.0 and root depends on a==1.0.0, we can conclude that the requirements are unsatisfiable.
"###);
});
@ -2451,17 +2547,24 @@ fn requires_python_version_greater_than_current_excluded() -> Result<()> {
----- stderr -----
× No solution found when resolving dependencies:
Because there are no versions of Python>=3.10, <3.11 and there are no versions of Python>=3.12, Python>=3.10, <3.11 | >=3.12 are incompatible.
And because there are no versions of Python>=3.11, <3.12, Python>=3.10 are incompatible.
And because a==2.0.0 depends on Python>=3.10 and there are no versions of a>2.0.0, <3.0.0 | >3.0.0, <4.0.0 | >4.0.0, a>=2.0.0, <3.0.0 is forbidden. (1)
Because there are no versions of Python that satisfy Python>=3.10,<3.11 and there are no versions of Python that satisfy Python>=3.12, we can conclude that any of:
Python>=3.10,<3.11
Python>=3.12
are incompatible.
And because there are no versions of Python that satisfy Python>=3.11,<3.12 we can conclude that Python>=3.10 are incompatible.
And because a==2.0.0 depends on Python>=3.10 and there are no versions of a that satisfy any of:
a>2.0.0,<3.0.0
a>3.0.0,<4.0.0
a>4.0.0
we can conclude that a>=2.0.0,<3.0.0 cannot be used. (1)
Because there are no versions of Python>=3.11, <3.12 and there are no versions of Python>=3.12, Python>=3.11 are incompatible.
And because a==3.0.0 depends on Python>=3.11, a==3.0.0 is forbidden.
And because a>=2.0.0, <3.0.0 is forbidden (1), a>=2.0.0, <4.0.0 is forbidden. (2)
Because there are no versions of Python that satisfy Python>=3.11,<3.12 and there are no versions of Python that satisfy Python>=3.12, we can conclude that Python>=3.11 are incompatible.
And because a==3.0.0 depends on Python>=3.11 we can conclude that a==3.0.0 cannot be used.
And because we know from (1) that a>=2.0.0,<3.0.0 cannot be used, we can conclude that a>=2.0.0,<4.0.0 cannot be used. (2)
Because there are no versions of Python>=3.12 and a==4.0.0 depends on Python>=3.12, a==4.0.0 is forbidden.
And because a>=2.0.0, <4.0.0 is forbidden (2), a>=2.0.0 is forbidden.
And because root depends on a>=2.0.0, version solving failed.
Because there are no versions of Python that satisfy Python>=3.12 and a==4.0.0 depends on Python>=3.12, we can conclude that a==4.0.0 cannot be used.
And because we know from (2) that a>=2.0.0,<4.0.0 cannot be used, we can conclude that a>=2.0.0 cannot be used.
And because root depends on a>=2.0.0 we can conclude that the requirements are unsatisfiable.
"###);
});

View file

@ -2591,3 +2591,74 @@ fn incompatible_wheel() -> Result<()> {
Ok(())
}
/// Install a project without a `pyproject.toml`, using the PEP 517 build backend (default).
#[test]
fn sync_legacy_sdist_pep_517() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_in = temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-sync")
.arg("requirements.in")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ flake8==6.0.0 (from https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz)
"###);
});
Ok(())
}
/// Install a project without a `pyproject.toml`, using `setuptools` directly.
#[test]
fn sync_legacy_sdist_setuptools() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let cache_dir = assert_fs::TempDir::new()?;
let venv = create_venv_py312(&temp_dir, &cache_dir);
let requirements_in = temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
insta::with_settings!({
filters => INSTA_FILTERS.to_vec()
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("pip-sync")
.arg("requirements.in")
.arg("--legacy-setup-py")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ flake8==6.0.0 (from https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz)
"###);
});
Ok(())
}

View file

@ -73,3 +73,39 @@ fn create_venv_defaults_to_cwd() -> Result<()> {
Ok(())
}
#[test]
fn seed() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let venv = temp_dir.child(".venv");
insta::with_settings!({
filters => vec![
(r"Using Python 3\.\d+\.\d+ at .+", "Using Python [VERSION] at [PATH]"),
(temp_dir.to_str().unwrap(), "/home/ferris/project"),
]
}, {
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
.arg("venv")
.arg(venv.as_os_str())
.arg("--seed")
.arg("--python")
.arg("python3.12")
.current_dir(&temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python [VERSION] at [PATH]
Creating virtual environment at: /home/ferris/project/.venv
+ setuptools==69.0.3
+ pip==23.3.2
+ wheel==0.42.0
"###);
});
venv.assert(predicates::path::is_dir());
Ok(())
}

View file

@ -12,7 +12,7 @@ use puffin_cache::{Cache, CacheArgs};
use puffin_client::RegistryClientBuilder;
use puffin_dispatch::BuildDispatch;
use puffin_interpreter::Virtualenv;
use puffin_traits::{BuildContext, BuildKind};
use puffin_traits::{BuildContext, BuildKind, SetupPyStrategy};
#[derive(Parser)]
pub(crate) struct BuildArgs {
@ -55,6 +55,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
let venv = Virtualenv::from_env(platform, &cache)?;
let client = RegistryClientBuilder::new(cache.clone()).build();
let index_urls = IndexUrls::default();
let setup_py = SetupPyStrategy::default();
let build_dispatch = BuildDispatch::new(
&client,
@ -62,6 +63,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
venv.interpreter(),
&index_urls,
venv.python_executable(),
setup_py,
false,
);
@ -72,6 +74,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result<PathBuf> {
&build_dispatch,
SourceBuildContext::default(),
args.sdist.display().to_string(),
setup_py,
build_kind,
)
.await?;

View file

@ -25,7 +25,7 @@ use puffin_installer::Downloader;
use puffin_interpreter::Virtualenv;
use puffin_normalize::PackageName;
use puffin_resolver::DistFinder;
use puffin_traits::{BuildContext, OnceMap};
use puffin_traits::{BuildContext, OnceMap, SetupPyStrategy};
#[derive(Parser)]
pub(crate) struct InstallManyArgs {
@ -60,13 +60,16 @@ pub(crate) async fn install_many(args: InstallManyArgs) -> Result<()> {
let venv = Virtualenv::from_env(platform, &cache)?;
let client = RegistryClientBuilder::new(cache.clone()).build();
let index_urls = IndexUrls::default();
let setup_py = SetupPyStrategy::default();
let tags = venv.interpreter().tags()?;
let build_dispatch = BuildDispatch::new(
&client,
&cache,
venv.interpreter(),
&index_urls,
venv.python_executable(),
setup_py,
args.no_build,
);

View file

@ -17,6 +17,7 @@ use puffin_client::RegistryClientBuilder;
use puffin_dispatch::BuildDispatch;
use puffin_interpreter::Virtualenv;
use puffin_resolver::{Manifest, ResolutionOptions, Resolver};
use puffin_traits::SetupPyStrategy;
#[derive(ValueEnum, Default, Clone)]
pub(crate) enum ResolveCliFormat {
@ -50,6 +51,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
let venv = Virtualenv::from_env(platform, &cache)?;
let client = RegistryClientBuilder::new(cache.clone()).build();
let index_urls = IndexUrls::default();
let setup_py = SetupPyStrategy::default();
let build_dispatch = BuildDispatch::new(
&client,
@ -57,6 +59,7 @@ pub(crate) async fn resolve_cli(args: ResolveCliArgs) -> Result<()> {
venv.interpreter(),
&index_urls,
venv.python_executable(),
setup_py,
args.no_build,
);

View file

@ -20,7 +20,7 @@ use puffin_client::{RegistryClient, RegistryClientBuilder};
use puffin_dispatch::BuildDispatch;
use puffin_interpreter::Virtualenv;
use puffin_normalize::PackageName;
use puffin_traits::BuildContext;
use puffin_traits::{BuildContext, SetupPyStrategy};
#[derive(Parser)]
pub(crate) struct ResolveManyArgs {
@ -74,6 +74,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
let venv = Virtualenv::from_env(platform, &cache)?;
let client = RegistryClientBuilder::new(cache.clone()).build();
let index_urls = IndexUrls::default();
let setup_py = SetupPyStrategy::default();
let build_dispatch = BuildDispatch::new(
&client,
@ -81,6 +82,7 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
venv.interpreter(),
&index_urls,
venv.python_executable(),
setup_py,
args.no_build,
);
let build_dispatch = Arc::new(build_dispatch);
@ -149,7 +151,20 @@ pub(crate) async fn resolve_many(args: ResolveManyArgs) -> Result<()> {
{
"Building source distributions is disabled".to_string()
} else {
format!("{err:?}")
err.chain()
.map(|err| {
let formatted = err.to_string();
// Cut overly long c/c++ compile output
if formatted.lines().count() > 20 {
let formatted: Vec<_> = formatted.lines().collect();
formatted[..20].join("\n")
+ "\n[...]\n"
+ &formatted[formatted.len() - 20..].join("\n")
} else {
formatted
}
})
.join("\n Caused by: ")
};
info!(
"Error for {} ({}/{}, {} ms): {}",

View file

@ -17,7 +17,7 @@ use puffin_client::RegistryClient;
use puffin_installer::{Downloader, InstallPlan, Installer, Reinstall, SitePackages};
use puffin_interpreter::{Interpreter, Virtualenv};
use puffin_resolver::{Manifest, ResolutionOptions, Resolver};
use puffin_traits::{BuildContext, BuildKind, OnceMap};
use puffin_traits::{BuildContext, BuildKind, OnceMap, SetupPyStrategy};
/// The main implementation of [`BuildContext`], used by the CLI, see [`BuildContext`]
/// documentation.
@ -27,6 +27,7 @@ pub struct BuildDispatch<'a> {
interpreter: &'a Interpreter,
index_urls: &'a IndexUrls,
base_python: PathBuf,
setup_py: SetupPyStrategy,
no_build: bool,
source_build_context: SourceBuildContext,
options: ResolutionOptions,
@ -40,6 +41,7 @@ impl<'a> BuildDispatch<'a> {
interpreter: &'a Interpreter,
index_urls: &'a IndexUrls,
base_python: PathBuf,
setup_py: SetupPyStrategy,
no_build: bool,
) -> Self {
Self {
@ -48,6 +50,7 @@ impl<'a> BuildDispatch<'a> {
interpreter,
index_urls,
base_python,
setup_py,
no_build,
source_build_context: SourceBuildContext::default(),
options: ResolutionOptions::default(),
@ -81,6 +84,10 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self.no_build
}
fn setup_py_strategy(&self) -> SetupPyStrategy {
self.setup_py
}
async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result<Resolution> {
let markers = self.interpreter.markers();
let tags = self.interpreter.tags()?;
@ -224,6 +231,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
if self.no_build {
bail!("Building source distributions is disabled");
}
let builder = SourceBuild::setup(
source,
subdirectory,
@ -231,6 +239,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self,
self.source_build_context.clone(),
package_id.to_string(),
self.setup_py,
build_kind,
)
.await?;

View file

@ -151,31 +151,25 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
debug!("Fetching disk-based wheel from URL: {}", wheel.url);
let reader = self.client.stream_external(&wheel.url).await?;
let filename = wheel.filename.to_string();
// Download the wheel to a temporary file.
// Download and unzip the wheel to a temporary dir.
let temp_dir = tempfile::tempdir_in(self.cache.root())?;
let temp_file = temp_dir.path().join(&filename);
let mut writer =
tokio::io::BufWriter::new(tokio::fs::File::create(&temp_file).await?);
tokio::io::copy(&mut reader.compat(), &mut writer).await?;
let temp_target = temp_dir.path().join(wheel.filename.to_string());
unzip_no_seek(reader.compat(), &temp_target).await?;
// Move the temporary file to the cache.
let cache_entry = self.cache.entry(
CacheBucket::Wheels,
WheelCache::Url(&wheel.url).remote_wheel_dir(wheel.name().as_ref()),
filename,
wheel.filename.stem(),
);
fs::create_dir_all(&cache_entry.dir()).await?;
tokio::fs::rename(temp_file, &cache_entry.path()).await?;
let target = cache_entry.into_path_buf();
tokio::fs::rename(temp_target, &target).await?;
let local_wheel = LocalWheel::Disk(DiskWheel {
let local_wheel = LocalWheel::Unzipped(UnzippedWheel {
dist: dist.clone(),
target: cache_entry
.with_file(wheel.filename.stem())
.path()
.to_path_buf(),
path: cache_entry.into_path_buf(),
target,
filename: wheel.filename.clone(),
});
@ -226,7 +220,7 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
match dist {
Dist::Built(built_dist) => Ok((self.client.wheel_metadata(built_dist).await?, None)),
Dist::Source(source_dist) => {
// Optimization: Skip source dist download when we must not build them anyway
// Optimization: Skip source dist download when we must not build them anyway.
if self.build_context.no_build() {
return Err(DistributionDatabaseError::NoBuild);
}
@ -242,8 +236,11 @@ impl<'a, Context: BuildContext + Send + Sync> DistributionDatabase<'a, Context>
None => Cow::Borrowed(source_dist),
};
let built_wheel = self.builder.download_and_build(&source_dist).await?;
Ok((built_wheel.metadata, precise))
let metadata = self
.builder
.download_and_build_metadata(&source_dist)
.await?;
Ok((metadata, precise))
}
}
}

View file

@ -5,7 +5,6 @@ use tracing::warn;
use distribution_filename::WheelFilename;
use platform_tags::Tags;
use puffin_cache::CacheEntry;
use pypi_types::Metadata21;
use crate::source::manifest::{DiskFilenameAndMetadata, Manifest};
@ -18,8 +17,6 @@ pub struct BuiltWheelMetadata {
pub(crate) target: PathBuf,
/// The parsed filename.
pub(crate) filename: WheelFilename,
/// The metadata of the built wheel.
pub(crate) metadata: Metadata21,
}
impl BuiltWheelMetadata {
@ -30,8 +27,8 @@ impl BuiltWheelMetadata {
cache_entry: &CacheEntry,
) -> Option<Self> {
// Find a compatible cache entry in the manifest.
let (filename, cached_dist) = manifest.find_compatible(tags)?;
let metadata = Self::from_cached(filename.clone(), cached_dist.clone(), cache_entry);
let (filename, wheel) = manifest.find_wheel(tags)?;
let metadata = Self::from_cached(filename.clone(), wheel.clone(), cache_entry);
// Validate that the wheel exists on disk.
if !metadata.path.is_file() {
@ -52,10 +49,9 @@ impl BuiltWheelMetadata {
cache_entry: &CacheEntry,
) -> Self {
Self {
path: cache_entry.dir().join(&cached_dist.disk_filename),
path: cache_entry.dir().join(cached_dist.disk_filename),
target: cache_entry.dir().join(filename.stem()),
filename,
metadata: cached_dist.metadata,
}
}
}

View file

@ -22,7 +22,7 @@ pub enum SourceDistError {
Client(#[from] puffin_client::Error),
// Cache writing error
#[error("Failed to write to source dist cache")]
#[error("Failed to write to source distribution cache")]
Io(#[from] std::io::Error),
#[error("Cache deserialization failed")]
Decode(#[from] rmp_serde::decode::Error),

View file

@ -6,31 +6,49 @@ use platform_tags::Tags;
use pypi_types::Metadata21;
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub(crate) struct Manifest(FxHashMap<WheelFilename, DiskFilenameAndMetadata>);
pub(crate) struct Manifest {
/// The metadata for the distribution, as returned by `prepare_metadata_for_build_wheel`.
metadata: Option<Metadata21>,
/// The built wheels for the distribution, each of which was returned from `build_wheel`.
built_wheels: FxHashMap<WheelFilename, DiskFilenameAndMetadata>,
}
impl Manifest {
/// Find a compatible wheel in the cache.
pub(crate) fn find_compatible(
/// Set the prepared metadata.
pub(crate) fn set_metadata(&mut self, metadata: Metadata21) {
self.metadata = Some(metadata);
}
/// Insert a built wheel into the manifest.
pub(crate) fn insert_wheel(
&mut self,
filename: WheelFilename,
disk_filename_and_metadata: DiskFilenameAndMetadata,
) {
self.built_wheels
.insert(filename, disk_filename_and_metadata);
}
/// Find a compatible wheel in the manifest.
pub(crate) fn find_wheel(
&self,
tags: &Tags,
) -> Option<(&WheelFilename, &DiskFilenameAndMetadata)> {
self.0
self.built_wheels
.iter()
.find(|(filename, _metadata)| filename.is_compatible(tags))
.find(|(filename, _)| filename.is_compatible(tags))
}
}
impl std::ops::Deref for Manifest {
type Target = FxHashMap<WheelFilename, DiskFilenameAndMetadata>;
/// Find a metadata in the manifest.
pub(crate) fn find_metadata(&self) -> Option<&Metadata21> {
// If we already have a prepared metadata, return it.
if let Some(metadata) = &self.metadata {
return Some(metadata);
}
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for Manifest {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
// Otherwise, return the metadata from any of the built wheels.
let wheel = self.built_wheels.values().next()?;
Some(&wheel.metadata)
}
}

View file

@ -69,6 +69,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
}
}
/// Download and build a [`SourceDist`].
pub async fn download_and_build(
&self,
source_dist: &SourceDist,
@ -129,6 +130,73 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
Ok(built_wheel_metadata)
}
/// Download a [`SourceDist`] and determine its metadata. This typically involves building the
/// source distribution into a wheel; however, some build backends support determining the
/// metadata without building the source distribution.
pub async fn download_and_build_metadata(
&self,
source_dist: &SourceDist,
) -> Result<Metadata21, SourceDistError> {
let metadata = match &source_dist {
SourceDist::DirectUrl(direct_url_source_dist) => {
let filename = direct_url_source_dist
.filename()
.expect("Distribution must have a filename");
let DirectArchiveUrl { url, subdirectory } =
DirectArchiveUrl::from(direct_url_source_dist.url.raw());
// For direct URLs, cache directly under the hash of the URL itself.
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Url(&url).remote_wheel_dir(direct_url_source_dist.name().as_ref()),
);
self.url_metadata(
source_dist,
filename,
&url,
&cache_shard,
subdirectory.as_deref(),
)
.await?
}
SourceDist::Registry(registry_source_dist) => {
let url = registry_source_dist
.base
.join_relative(&registry_source_dist.file.url)
.map_err(|err| {
SourceDistError::UrlParse(registry_source_dist.file.url.clone(), err)
})?;
// For registry source distributions, shard by package, then by SHA.
// Ex) `pypi/requests/a673187abc19fe6c`
let cache_shard = self.build_context.cache().shard(
CacheBucket::BuiltWheels,
WheelCache::Index(&registry_source_dist.index)
.remote_wheel_dir(registry_source_dist.name.as_ref())
.join(&registry_source_dist.file.hashes.sha256[..16]),
);
self.url_metadata(
source_dist,
&registry_source_dist.file.filename,
&url,
&cache_shard,
None,
)
.await?
}
SourceDist::Git(git_source_dist) => {
self.git_metadata(source_dist, git_source_dist).await?
}
SourceDist::Path(path_source_dist) => {
self.path_metadata(source_dist, path_source_dist).await?
}
};
Ok(metadata)
}
/// Build a source distribution from a remote URL.
#[allow(clippy::too_many_arguments)]
async fn url<'data>(
@ -197,12 +265,6 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
)
.await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
}
}
let cached_data = DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
metadata: metadata.clone(),
@ -213,16 +275,22 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
// Just return if the response wasn't cacheable or there was another errors that
// `CachedClient` already complained about
if let Ok(cached) = fs::read(cache_entry.path()).await {
// If the file exists and it was just read or written by `CachedClient`, we assume it must
// be correct.
// If the file exists and it was just read or written by `CachedClient`, we assume it
// must be correct.
let mut cached = rmp_serde::from_slice::<DataWithCachePolicy<Manifest>>(&cached)?;
cached
.data
.insert(wheel_filename.clone(), cached_data.clone());
.insert_wheel(wheel_filename.clone(), cached_data.clone());
write_atomic(cache_entry.path(), rmp_serde::to_vec(&cached)?).await?;
};
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
}
}
Ok(BuiltWheelMetadata::from_cached(
wheel_filename,
cached_data,
@ -230,6 +298,120 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
))
}
/// Build the source distribution's metadata from a local path.
///
/// If the build backend supports `prepare_metadata_for_build_wheel`, this method will avoid
/// building the wheel.
#[allow(clippy::too_many_arguments)]
async fn url_metadata<'data>(
&self,
source_dist: &'data SourceDist,
filename: &'data str,
url: &'data Url,
cache_shard: &CacheShard,
subdirectory: Option<&'data Path>,
) -> Result<Metadata21, SourceDistError> {
let cache_entry = cache_shard.entry(METADATA);
let download = |response| {
async {
// At this point, we're seeing a new or updated source distribution; delete all
// wheels, and redownload.
match fs::remove_dir_all(&cache_entry.dir()).await {
Ok(()) => debug!("Cleared built wheels and metadata for {source_dist}"),
Err(err) if err.kind() == std::io::ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
debug!("Downloading source distribution: {source_dist}");
// Download the source distribution.
let source_dist_entry = cache_shard.entry(filename);
self.persist_source_dist_url(response, source_dist, filename, &source_dist_entry)
.await?;
Ok(Manifest::default())
}
.instrument(info_span!("download", source_dist = %source_dist))
};
let req = self.cached_client.uncached().get(url.clone()).build()?;
let manifest = self
.cached_client
.get_cached_with_callback(req, &cache_entry, download)
.await
.map_err(|err| match err {
CachedClientError::Callback(err) => err,
CachedClientError::Client(err) => SourceDistError::Client(err),
})?;
// If the cache contains compatible metadata, return it.
if let Some(metadata) = manifest.find_metadata() {
return Ok(metadata.clone());
}
// Otherwise, we either need to build the metadata or the wheel.
let source_dist_entry = cache_shard.entry(filename);
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, source_dist_entry.path(), subdirectory)
.await?
{
if let Ok(cached) = fs::read(cache_entry.path()).await {
let mut cached = rmp_serde::from_slice::<DataWithCachePolicy<Manifest>>(&cached)?;
cached.data.set_metadata(metadata.clone());
write_atomic(cache_entry.path(), rmp_serde::to_vec(&cached)?).await?;
};
return Ok(metadata);
}
// At this point, we're seeing cached metadata (as in, we have an up-to-date source
// distribution), but the wheel(s) we built previously are incompatible.
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
// Build the source distribution.
let (disk_filename, wheel_filename, metadata) = self
.build_source_dist(
source_dist,
source_dist_entry.path(),
subdirectory,
&cache_entry,
)
.await?;
// Not elegant that we have to read again here, but also not too relevant given that we
// have to build a source dist next.
// Just return if the response wasn't cacheable or there was another errors that
// `CachedClient` already complained about
if let Ok(cached) = fs::read(cache_entry.path()).await {
// If the file exists and it was just read or written by `CachedClient`, we assume it
// must be correct.
let mut cached = rmp_serde::from_slice::<DataWithCachePolicy<Manifest>>(&cached)?;
cached.data.insert_wheel(
wheel_filename.clone(),
DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
metadata: metadata.clone(),
},
);
write_atomic(cache_entry.path(), rmp_serde::to_vec(&cached)?).await?;
};
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
}
}
Ok(metadata)
}
/// Build a source distribution from a local path.
async fn path(
&self,
@ -271,7 +453,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
};
// Read the existing metadata from the cache.
let mut manifest = Self::read_fresh_metadata(&cache_entry, modified)
let mut manifest = Self::read_cached_metadata(&cache_entry, modified)
.await?
.unwrap_or_default();
@ -292,15 +474,8 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
.build_source_dist(source_dist, &path_source_dist.path, None, &cache_entry)
.await?;
if metadata.name != path_source_dist.name {
return Err(SourceDistError::NameMismatch {
metadata: metadata.name,
given: path_source_dist.name.clone(),
});
}
// Store the metadata for this build along with all the other builds.
manifest.insert(
manifest.insert_wheel(
filename.clone(),
DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
@ -327,10 +502,115 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
path,
target,
filename,
metadata,
})
}
/// Build the source distribution's metadata from a local path.
///
/// If the build backend supports `prepare_metadata_for_build_wheel`, this method will avoid
/// building the wheel.
async fn path_metadata(
&self,
source_dist: &SourceDist,
path_source_dist: &PathSourceDist,
) -> Result<Metadata21, SourceDistError> {
let cache_entry = self.build_context.cache().entry(
CacheBucket::BuiltWheels,
WheelCache::Path(&path_source_dist.url)
.remote_wheel_dir(path_source_dist.name().as_ref()),
METADATA,
);
// Determine the last-modified time of the source distribution.
let file_metadata = fs_err::metadata(&path_source_dist.path)?;
let modified = if file_metadata.is_file() {
// `modified()` is infallible on windows and unix (i.e., all platforms we support).
file_metadata.modified()?
} else {
if let Some(metadata) = path_source_dist
.path
.join("pyproject.toml")
.metadata()
.ok()
.filter(std::fs::Metadata::is_file)
{
metadata.modified()?
} else if let Some(metadata) = path_source_dist
.path
.join("setup.py")
.metadata()
.ok()
.filter(std::fs::Metadata::is_file)
{
metadata.modified()?
} else {
return Err(SourceDistError::DirWithoutEntrypoint);
}
};
// Read the existing metadata from the cache.
let mut manifest = Self::read_cached_metadata(&cache_entry, modified)
.await?
.unwrap_or_default();
// If the cache contains compatible metadata, return it.
if let Some(metadata) = manifest.find_metadata() {
return Ok(metadata.clone());
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, &path_source_dist.path, None)
.await?
{
// Store the metadata for this build along with all the other builds.
manifest.set_metadata(metadata.clone());
let cached = CachedByTimestamp {
timestamp: modified,
data: manifest,
};
let data = rmp_serde::to_vec(&cached)?;
fs::create_dir_all(&cache_entry.dir()).await?;
write_atomic(cache_entry.path(), data).await?;
return Ok(metadata);
}
// Otherwise, we need to build a wheel.
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
let (disk_filename, filename, metadata) = self
.build_source_dist(source_dist, &path_source_dist.path, None, &cache_entry)
.await?;
// Store the metadata for this build along with all the other builds.
manifest.insert_wheel(
filename.clone(),
DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
metadata: metadata.clone(),
},
);
let cached = CachedByTimestamp {
timestamp: modified,
data: manifest,
};
let data = rmp_serde::to_vec(&cached)?;
write_atomic(cache_entry.path(), data).await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
}
}
Ok(metadata)
}
/// Build a source distribution from a Git repository.
async fn git(
&self,
@ -372,15 +652,8 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
)
.await?;
if metadata.name != git_source_dist.name {
return Err(SourceDistError::NameMismatch {
metadata: metadata.name,
given: git_source_dist.name.clone(),
});
}
// Store the metadata for this build along with all the other builds.
manifest.insert(
manifest.insert_wheel(
filename.clone(),
DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
@ -403,10 +676,86 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
path,
target,
filename,
metadata,
})
}
/// Build the source distribution's metadata from a Git repository.
///
/// If the build backend supports `prepare_metadata_for_build_wheel`, this method will avoid
/// building the wheel.
async fn git_metadata(
&self,
source_dist: &SourceDist,
git_source_dist: &GitSourceDist,
) -> Result<Metadata21, SourceDistError> {
let (fetch, subdirectory) = self.download_source_dist_git(&git_source_dist.url).await?;
let git_sha = fetch.git().precise().expect("Exact commit after checkout");
let cache_entry = self.build_context.cache().entry(
CacheBucket::BuiltWheels,
WheelCache::Git(&git_source_dist.url, &git_sha.to_short_string())
.remote_wheel_dir(git_source_dist.name().as_ref()),
METADATA,
);
// Read the existing metadata from the cache.
let mut manifest = Self::read_metadata(&cache_entry).await?.unwrap_or_default();
// If the cache contains compatible metadata, return it.
if let Some(metadata) = manifest.find_metadata() {
return Ok(metadata.clone());
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_source_dist_metadata(source_dist, fetch.path(), subdirectory.as_deref())
.await?
{
// Store the metadata for this build along with all the other builds.
manifest.set_metadata(metadata.clone());
let data = rmp_serde::to_vec(&manifest)?;
fs::create_dir_all(&cache_entry.dir()).await?;
write_atomic(cache_entry.path(), data).await?;
return Ok(metadata);
}
// Otherwise, we need to build a wheel.
let task = self
.reporter
.as_ref()
.map(|reporter| reporter.on_build_start(source_dist));
let (disk_filename, filename, metadata) = self
.build_source_dist(
source_dist,
fetch.path(),
subdirectory.as_deref(),
&cache_entry,
)
.await?;
// Store the metadata for this build along with all the other builds.
manifest.insert_wheel(
filename.clone(),
DiskFilenameAndMetadata {
disk_filename: disk_filename.clone(),
metadata: metadata.clone(),
},
);
let data = rmp_serde::to_vec(&manifest)?;
write_atomic(cache_entry.path(), data).await?;
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_build_complete(source_dist, task);
}
}
Ok(metadata)
}
/// Download and unzip a source distribution into the cache from an HTTP response.
async fn persist_source_dist_url<'data>(
&self,
@ -518,7 +867,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
/// Build a source distribution, storing the built wheel in the cache.
///
/// Returns the un-normalized disk filename, the parsed, normalized filename and the metadata
#[instrument(skip_all, fields(dist = %dist))]
#[instrument(skip_all, fields(dist))]
async fn build_source_dist(
&self,
dist: &SourceDist,
@ -552,10 +901,64 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
let filename = WheelFilename::from_str(&disk_filename)?;
let metadata = read_metadata(&filename, cache_entry.dir().join(&disk_filename))?;
// Validate the metadata.
if &metadata.name != dist.name() {
return Err(SourceDistError::NameMismatch {
metadata: metadata.name,
given: dist.name().clone(),
});
}
debug!("Finished building: {dist}");
Ok((disk_filename, filename, metadata))
}
/// Build the metadata for a source distribution.
#[instrument(skip_all, fields(dist))]
async fn build_source_dist_metadata(
&self,
dist: &SourceDist,
source_dist: &Path,
subdirectory: Option<&Path>,
) -> Result<Option<Metadata21>, SourceDistError> {
debug!("Preparing metadata for: {dist}");
// Setup the builder.
let mut builder = self
.build_context
.setup_build(
source_dist,
subdirectory,
&dist.to_string(),
BuildKind::Wheel,
)
.await
.map_err(|err| SourceDistError::Build(dist.to_string(), err))?;
// Build the metadata.
let dist_info = builder
.metadata()
.await
.map_err(|err| SourceDistError::Build(dist.to_string(), err))?;
let Some(dist_info) = dist_info else {
return Ok(None);
};
// Read the metadata from disk.
debug!("Prepared metadata for: {dist}");
let metadata = Metadata21::parse(&fs::read(dist_info.join("METADATA")).await?)?;
// Validate the metadata.
if &metadata.name != dist.name() {
return Err(SourceDistError::NameMismatch {
metadata: metadata.name,
given: dist.name().clone(),
});
}
Ok(Some(metadata))
}
/// Build a single directory into an editable wheel
pub async fn build_editable(
&self,
@ -591,7 +994,7 @@ impl<'a, T: BuildContext> SourceDistCachedBuilder<'a, T> {
}
/// Read an existing cache entry, if it exists and is up-to-date.
async fn read_fresh_metadata(
async fn read_cached_metadata(
cache_entry: &CacheEntry,
modified: std::time::SystemTime,
) -> Result<Option<Manifest>, SourceDistError> {

View file

@ -49,9 +49,10 @@ pub async fn unzip_no_seek<R: tokio::io::AsyncRead + Unpin>(
if let Some(parent) = path.parent() {
tokio::fs::create_dir_all(parent).await?;
}
let mut file = tokio::fs::File::create(path).await?;
let file = tokio::fs::File::create(path).await?;
let mut writer = tokio::io::BufWriter::new(file);
let mut reader = entry.reader_mut().compat();
tokio::io::copy(&mut reader, &mut file).await?;
tokio::io::copy(&mut reader, &mut writer).await?;
}
// Close current file to get access to the next one. See docs:

View file

@ -4,6 +4,7 @@ use std::time::SystemTimeError;
use thiserror::Error;
pub use crate::cfg::Configuration;
pub use crate::interpreter::Interpreter;
pub use crate::python_version::PythonVersion;
pub use crate::virtual_env::Virtualenv;

View file

@ -1,18 +1,16 @@
use pubgrub::range::Range;
use rustc_hash::FxHashMap;
use distribution_types::{Dist, DistributionMetadata, IndexUrl, Name};
use distribution_types::{Dist, DistributionMetadata, Name};
use pep440_rs::VersionSpecifiers;
use pep508_rs::{Requirement, VersionOrUrl};
use puffin_normalize::PackageName;
use pypi_types::BaseUrl;
use crate::file::DistFile;
use crate::prerelease_mode::PreReleaseStrategy;
use crate::pubgrub::PubGrubVersion;
use crate::python_requirement::PythonRequirement;
use crate::resolution_mode::ResolutionStrategy;
use crate::version_map::{ResolvableFile, VersionMap};
use crate::version_map::{DistRequiresPython, ResolvableFile, VersionMap};
use crate::{Manifest, ResolutionOptions};
#[derive(Debug, Clone)]
@ -247,12 +245,12 @@ impl<'a> Candidate<'a> {
}
/// Return the [`DistFile`] to use when resolving the package.
pub(crate) fn resolve(&self) -> &DistFile {
pub(crate) fn resolve(&self) -> &DistRequiresPython {
self.file.resolve()
}
/// Return the [`DistFile`] to use when installing the package.
pub(crate) fn install(&self) -> &DistFile {
pub(crate) fn install(&self) -> &DistRequiresPython {
self.file.install()
}
@ -271,7 +269,7 @@ impl<'a> Candidate<'a> {
// If the candidate is a source distribution, and doesn't support the installed Python
// version, return the failing version specifiers, since we won't be able to build it.
if self.install().is_sdist() {
if matches!(self.install().dist, Dist::Source(_)) {
if !requires_python.contains(requirement.installed()) {
return Some(requires_python);
}
@ -279,17 +277,6 @@ impl<'a> Candidate<'a> {
None
}
/// Return the [`Dist`] to use when resolving the candidate.
pub(crate) fn into_distribution(self, index: IndexUrl, base: BaseUrl) -> Dist {
Dist::from_registry(
self.name().clone(),
self.version().clone().into(),
self.resolve().clone().into(),
index,
base,
)
}
}
impl Name for Candidate<'_> {

View file

@ -7,12 +7,11 @@ use rustc_hash::FxHashMap;
use thiserror::Error;
use url::Url;
use distribution_types::{BuiltDist, IndexUrl, PathBuiltDist, PathSourceDist, SourceDist};
use distribution_types::{BuiltDist, PathBuiltDist, PathSourceDist, SourceDist};
use pep508_rs::Requirement;
use puffin_distribution::DistributionDatabaseError;
use puffin_normalize::PackageName;
use puffin_traits::OnceMap;
use pypi_types::BaseUrl;
use crate::candidate_selector::CandidateSelector;
use crate::pubgrub::{PubGrubPackage, PubGrubPython, PubGrubReportFormatter, PubGrubVersion};
@ -161,7 +160,7 @@ impl NoSolutionError {
pub(crate) fn with_available_versions(
mut self,
python_requirement: &PythonRequirement,
package_versions: &OnceMap<PackageName, (IndexUrl, BaseUrl, VersionMap)>,
package_versions: &OnceMap<PackageName, VersionMap>,
) -> Self {
let mut available_versions = FxHashMap::default();
for package in self.derivation_tree.packages() {
@ -181,7 +180,7 @@ impl NoSolutionError {
}
PubGrubPackage::Package(name, ..) => {
if let Some(entry) = package_versions.get(name) {
let (_, _, version_map) = entry.value();
let version_map = entry.value();
available_versions.insert(
package.clone(),
version_map

View file

@ -1,92 +0,0 @@
use std::ops::Deref;
use distribution_types::File;
/// A distribution can either be a wheel or a source distribution.
#[derive(Debug, Clone)]
pub(crate) struct WheelFile(pub(crate) File);
#[derive(Debug, Clone)]
pub(crate) struct SdistFile(pub(crate) File);
#[derive(Debug, Clone)]
pub(crate) enum DistFile {
Wheel(WheelFile),
Sdist(SdistFile),
}
impl Deref for WheelFile {
type Target = File;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for SdistFile {
type Target = File;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<WheelFile> for File {
fn from(wheel: WheelFile) -> Self {
wheel.0
}
}
impl From<SdistFile> for File {
fn from(sdist: SdistFile) -> Self {
sdist.0
}
}
impl From<WheelFile> for DistFile {
fn from(wheel: WheelFile) -> Self {
Self::Wheel(wheel)
}
}
impl From<SdistFile> for DistFile {
fn from(sdist: SdistFile) -> Self {
Self::Sdist(sdist)
}
}
impl DistFile {
pub(crate) fn filename(&self) -> &str {
match self {
Self::Wheel(wheel) => wheel.filename.as_str(),
Self::Sdist(sdist) => sdist.filename.as_str(),
}
}
pub(crate) fn is_sdist(&self) -> bool {
match self {
Self::Wheel(_) => false,
Self::Sdist(_) => true,
}
}
}
impl From<DistFile> for File {
fn from(file: DistFile) -> Self {
match file {
DistFile::Wheel(wheel) => wheel.into(),
DistFile::Sdist(sdist) => sdist.into(),
}
}
}
impl Deref for DistFile {
type Target = File;
fn deref(&self) -> &Self::Target {
match self {
DistFile::Wheel(file) => &file.0,
DistFile::Sdist(file) => &file.0,
}
}
}

View file

@ -2,14 +2,13 @@ pub use error::ResolveError;
pub use finder::{DistFinder, Reporter as FinderReporter};
pub use manifest::Manifest;
pub use prerelease_mode::PreReleaseMode;
pub use resolution::ResolutionGraph;
pub use resolution::{Diagnostic, ResolutionGraph};
pub use resolution_mode::ResolutionMode;
pub use resolution_options::ResolutionOptions;
pub use resolver::{BuildId, Reporter as ResolverReporter, Resolver, ResolverProvider};
mod candidate_selector;
mod error;
mod file;
mod finder;
mod manifest;
mod overrides;

View file

@ -1,8 +1,7 @@
use rustc_hash::FxHashMap;
use distribution_types::{File, IndexUrl};
use distribution_types::Dist;
use puffin_normalize::PackageName;
use pypi_types::BaseUrl;
use crate::candidate_selector::Candidate;
@ -11,29 +10,19 @@ use crate::candidate_selector::Candidate;
/// For example, given `Flask==3.0.0`, the [`FilePins`] would contain a mapping from `Flask` to
/// `3.0.0` to the specific wheel or source distribution archive that was pinned for that version.
#[derive(Debug, Default)]
pub(crate) struct FilePins(
FxHashMap<PackageName, FxHashMap<pep440_rs::Version, (IndexUrl, BaseUrl, File)>>,
);
pub(crate) struct FilePins(FxHashMap<PackageName, FxHashMap<pep440_rs::Version, Dist>>);
impl FilePins {
/// Pin a candidate package.
pub(crate) fn insert(&mut self, candidate: &Candidate, index: &IndexUrl, base: &BaseUrl) {
pub(crate) fn insert(&mut self, candidate: &Candidate) {
self.0.entry(candidate.name().clone()).or_default().insert(
candidate.version().clone().into(),
(
index.clone(),
base.clone(),
candidate.install().clone().into(),
),
candidate.install().dist.clone(),
);
}
/// Return the pinned file for the given package name and version, if it exists.
pub(crate) fn get(
&self,
name: &PackageName,
version: &pep440_rs::Version,
) -> Option<&(IndexUrl, BaseUrl, File)> {
pub(crate) fn get(&self, name: &PackageName, version: &pep440_rs::Version) -> Option<&Dist> {
self.0.get(name)?.get(version)
}
}

View file

@ -1,9 +1,11 @@
use std::borrow::Cow;
use std::cmp::Ordering;
use std::ops::Bound;
use derivative::Derivative;
use owo_colors::OwoColorize;
use pubgrub::range::Range;
use pubgrub::report::{DerivationTree, External, ReportFormatter};
use pubgrub::report::{DerivationTree, Derived, External, ReportFormatter};
use pubgrub::term::Term;
use pubgrub::type_aliases::Map;
use rustc_hash::{FxHashMap, FxHashSet};
@ -37,16 +39,19 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
} else if set.as_singleton().is_some() {
format!("there is no version of {package}{set}")
} else {
format!("there are no versions of {package}{set}")
format!(
"there are no versions of {} that satisfy {}",
package,
PackageRange::compatibility(package, &set)
)
}
}
External::UnavailableDependencies(package, set) => {
let set = self.simplify_set(set, package);
if set.as_ref() == &Range::full() {
format!("dependencies of {package} are unavailable")
} else {
format!("dependencies of {package}{set} are unavailable")
}
format!(
"dependencies of {}are unavailable",
Padded::new("", &PackageRange::compatibility(package, &set), " ")
)
}
External::UnusableDependencies(package, set, reason) => {
if let Some(reason) = reason {
@ -54,44 +59,34 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
format!("{package} dependencies are unusable: {reason}")
} else {
let set = self.simplify_set(set, package);
if set.as_ref() == &Range::full() {
format!("dependencies of {package} are unusable: {reason}")
} else {
format!("dependencies of {package}{set} are unusable: {reason}",)
}
format!(
"dependencies of {}are unusable: {reason}",
Padded::new("", &PackageRange::compatibility(package, &set), " ")
)
}
} else {
let set = self.simplify_set(set, package);
if set.as_ref() == &Range::full() {
format!("dependencies of {package} are unusable")
} else {
format!("dependencies of {package}{set} are unusable")
}
format!(
"dependencies of {}are unusable",
Padded::new("", &PackageRange::compatibility(package, &set), " ")
)
}
}
External::FromDependencyOf(package, package_set, dependency, dependency_set) => {
let package_set = self.simplify_set(package_set, package);
let dependency_set = self.simplify_set(dependency_set, dependency);
if package_set.as_ref() == &Range::full()
&& dependency_set.as_ref() == &Range::full()
{
format!("{package} depends on {dependency}")
} else if package_set.as_ref() == &Range::full() {
format!("{package} depends on {dependency}{dependency_set}")
} else if dependency_set.as_ref() == &Range::full() {
if matches!(package, PubGrubPackage::Root(_)) {
// Exclude the dummy version for root packages
format!("{package} depends on {dependency}")
} else {
format!("{package}{package_set} depends on {dependency}")
}
if matches!(package, PubGrubPackage::Root(_)) {
// Exclude the dummy version for root packages
format!(
"{package} depends on {}",
PackageRange::dependency(dependency, &dependency_set)
)
} else {
if matches!(package, PubGrubPackage::Root(_)) {
// Exclude the dummy version for root packages
format!("{package} depends on {dependency}{dependency_set}")
} else {
format!("{package}{package_set} depends on {dependency}{dependency_set}")
}
format!(
"{}depends on {}",
Padded::new("", &PackageRange::compatibility(package, &package_set), " "),
PackageRange::dependency(dependency, &dependency_set)
)
}
}
}
@ -101,7 +96,7 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
fn format_terms(&self, terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>) -> String {
let terms_vec: Vec<_> = terms.iter().collect();
match terms_vec.as_slice() {
[] | [(PubGrubPackage::Root(_), _)] => "version solving failed".into(),
[] | [(PubGrubPackage::Root(_), _)] => "the requirements are unsatisfiable".into(),
[(package @ PubGrubPackage::Package(..), Term::Positive(range))] => {
let range = range.simplify(
self.available_versions
@ -109,7 +104,10 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
.unwrap_or(&vec![])
.iter(),
);
format!("{package}{range} is forbidden")
format!(
"{} cannot be used",
PackageRange::compatibility(package, &range)
)
}
[(package @ PubGrubPackage::Package(..), Term::Negative(range))] => {
let range = range.simplify(
@ -118,7 +116,10 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
.unwrap_or(&vec![])
.iter(),
);
format!("{package}{range} is mandatory")
format!(
"{} must be used",
PackageRange::compatibility(package, &range)
)
}
[(p1, Term::Positive(r1)), (p2, Term::Negative(r2))] => self.format_external(
&External::FromDependencyOf((*p1).clone(), r1.clone(), (*p2).clone(), r2.clone()),
@ -127,14 +128,150 @@ impl ReportFormatter<PubGrubPackage, Range<PubGrubVersion>> for PubGrubReportFor
&External::FromDependencyOf((*p2).clone(), r2.clone(), (*p1).clone(), r1.clone()),
),
slice => {
let mut result = String::new();
let str_terms: Vec<_> = slice
.iter()
.map(|(p, t)| format!("{p}{}", PubGrubTerm::from_term((*t).clone())))
.map(|(p, t)| format!("{}", PackageTerm::new(p, t)))
.collect();
str_terms.join(", ") + " are incompatible"
for (index, term) in str_terms.iter().enumerate() {
result.push_str(term);
match str_terms.len().cmp(&2) {
Ordering::Equal if index == 0 => {
result.push_str(" and ");
}
Ordering::Greater if index + 1 < str_terms.len() => {
result.push_str(", ");
}
_ => (),
}
}
result.push_str(" are incompatible");
result
}
}
}
/// Simplest case, we just combine two external incompatibilities.
fn explain_both_external(
&self,
external1: &External<PubGrubPackage, Range<PubGrubVersion>>,
external2: &External<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
let external1 = self.format_external(external1);
let external2 = self.format_external(external2);
let terms = self.format_terms(current_terms);
format!(
"Because {}and {}we can conclude that {}",
Padded::from_string("", &external1, " "),
Padded::from_string("", &external2, ", "),
Padded::from_string("", &terms, ".")
)
}
/// Both causes have already been explained so we use their refs.
fn explain_both_ref(
&self,
ref_id1: usize,
derived1: &Derived<PubGrubPackage, Range<PubGrubVersion>>,
ref_id2: usize,
derived2: &Derived<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
// TODO: order should be chosen to make it more logical.
let derived1_terms = self.format_terms(&derived1.terms);
let derived2_terms = self.format_terms(&derived2.terms);
let current_terms = self.format_terms(current_terms);
format!(
"Because we know from ({}) that {}and we know from ({}) that {}{}",
ref_id1,
Padded::new("", &derived1_terms, " "),
ref_id2,
Padded::new("", &derived2_terms, ", "),
Padded::new("", &current_terms, "."),
)
}
/// One cause is derived (already explained so one-line),
/// the other is a one-line external cause,
/// and finally we conclude with the current incompatibility.
fn explain_ref_and_external(
&self,
ref_id: usize,
derived: &Derived<PubGrubPackage, Range<PubGrubVersion>>,
external: &External<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
// TODO: order should be chosen to make it more logical.
let derived_terms = self.format_terms(&derived.terms);
let external = self.format_external(external);
let current_terms = self.format_terms(current_terms);
format!(
"Because we know from ({}) that {}and {}we can conclude that {}",
ref_id,
Padded::new("", &derived_terms, " "),
Padded::new("", &external, ", "),
Padded::new("", &current_terms, "."),
)
}
/// Add an external cause to the chain of explanations.
fn and_explain_external(
&self,
external: &External<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
let external = self.format_external(external);
let terms = self.format_terms(current_terms);
format!(
"And because {}we can conclude that {}",
Padded::from_string("", &external, " "),
Padded::from_string("", &terms, "."),
)
}
/// Add an already explained incompat to the chain of explanations.
fn and_explain_ref(
&self,
ref_id: usize,
derived: &Derived<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
let derived = self.format_terms(&derived.terms);
let current = self.format_terms(current_terms);
format!(
"And because we know from ({}) that {}we can conclude that {}",
ref_id,
Padded::from_string("", &derived, ", "),
Padded::from_string("", &current, "."),
)
}
/// Add an already explained incompat to the chain of explanations.
fn and_explain_prior_and_external(
&self,
prior_external: &External<PubGrubPackage, Range<PubGrubVersion>>,
external: &External<PubGrubPackage, Range<PubGrubVersion>>,
current_terms: &Map<PubGrubPackage, Term<Range<PubGrubVersion>>>,
) -> String {
let prior_external = self.format_external(prior_external);
let external = self.format_external(external);
let terms = self.format_terms(current_terms);
format!(
"And because {}and {}we can conclude that {}",
Padded::from_string("", &prior_external, " "),
Padded::from_string("", &external, ", "),
Padded::from_string("", &terms, "."),
)
}
}
impl PubGrubReportFormatter<'_> {
@ -267,35 +404,177 @@ impl std::fmt::Display for PubGrubHint {
"hint".bold().cyan(),
":".bold(),
package.bold(),
range.bold()
PackageRange::compatibility(package, range).bold()
)
}
}
}
}
/// A derivative of the [Term] type with custom formatting.
struct PubGrubTerm {
inner: Term<Range<PubGrubVersion>>,
/// A [`Term`] and [`PubGrubPackage`] combination for display.
struct PackageTerm<'a> {
package: &'a PubGrubPackage,
term: &'a Term<Range<PubGrubVersion>>,
}
impl std::fmt::Display for PubGrubTerm {
impl std::fmt::Display for PackageTerm<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.inner {
Term::Positive(set) => write!(f, "{set}"),
match &self.term {
Term::Positive(set) => write!(f, "{}", PackageRange::compatibility(self.package, set)),
Term::Negative(set) => {
if let Some(version) = set.as_singleton() {
write!(f, "!={version}")
let package = self.package;
write!(f, "{package}!={version}")
} else {
write!(f, "!( {set} )")
write!(f, "!( {} )", PackageRange::compatibility(self.package, set))
}
}
}
}
}
impl PubGrubTerm {
fn from_term(term: Term<Range<PubGrubVersion>>) -> PubGrubTerm {
PubGrubTerm { inner: term }
impl PackageTerm<'_> {
fn new<'a>(
package: &'a PubGrubPackage,
term: &'a Term<Range<PubGrubVersion>>,
) -> PackageTerm<'a> {
PackageTerm { package, term }
}
}
/// The kind of version ranges being displayed in [`PackageRange`]
#[derive(Debug)]
enum PackageRangeKind {
Dependency,
Compatibility,
}
/// A [`Range`] and [`PubGrubPackage`] combination for display.
#[derive(Debug)]
struct PackageRange<'a> {
package: &'a PubGrubPackage,
range: &'a Range<PubGrubVersion>,
kind: PackageRangeKind,
}
impl std::fmt::Display for PackageRange<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.range.is_empty() {
write!(f, "")?;
} else {
let segments: Vec<_> = self.range.iter().collect();
if segments.len() > 1 {
match self.kind {
PackageRangeKind::Dependency => write!(f, "one of:")?,
PackageRangeKind::Compatibility => write!(f, "any of:")?,
}
}
for segment in &segments {
if segments.len() > 1 {
write!(f, "\n ")?;
}
let package = self.package;
match segment {
(Bound::Unbounded, Bound::Unbounded) => match self.kind {
PackageRangeKind::Dependency => write!(f, "{package}")?,
PackageRangeKind::Compatibility => write!(f, "all versions of {package}")?,
},
(Bound::Unbounded, Bound::Included(v)) => write!(f, "{package}<={v}")?,
(Bound::Unbounded, Bound::Excluded(v)) => write!(f, "{package}<{v}")?,
(Bound::Included(v), Bound::Unbounded) => write!(f, "{package}>={v}")?,
(Bound::Included(v), Bound::Included(b)) => {
if v == b {
write!(f, "{package}=={v}")?;
} else {
write!(f, "{package}>={v},<={b}")?;
}
}
(Bound::Included(v), Bound::Excluded(b)) => write!(f, "{package}>={v},<{b}")?,
(Bound::Excluded(v), Bound::Unbounded) => write!(f, "{package}>{v}")?,
(Bound::Excluded(v), Bound::Included(b)) => write!(f, "{package}>{v},<={b}")?,
(Bound::Excluded(v), Bound::Excluded(b)) => write!(f, "{package}>{v},<{b}")?,
};
}
if segments.len() > 1 {
writeln!(f)?;
}
}
Ok(())
}
}
impl PackageRange<'_> {
fn compatibility<'a>(
package: &'a PubGrubPackage,
range: &'a Range<PubGrubVersion>,
) -> PackageRange<'a> {
PackageRange {
package,
range,
kind: PackageRangeKind::Compatibility,
}
}
fn dependency<'a>(
package: &'a PubGrubPackage,
range: &'a Range<PubGrubVersion>,
) -> PackageRange<'a> {
PackageRange {
package,
range,
kind: PackageRangeKind::Dependency,
}
}
}
/// Inserts the given padding on the left and right sides of the content if
/// the content does not start and end with whitespace respectively.
#[derive(Debug)]
struct Padded<'a, T: std::fmt::Display> {
left: &'a str,
content: &'a T,
right: &'a str,
}
impl<'a, T: std::fmt::Display> Padded<'a, T> {
fn new(left: &'a str, content: &'a T, right: &'a str) -> Self {
Padded {
left,
content,
right,
}
}
}
impl<'a> Padded<'a, String> {
fn from_string(left: &'a str, content: &'a String, right: &'a str) -> Self {
Padded {
left,
content,
right,
}
}
}
impl<T: std::fmt::Display> std::fmt::Display for Padded<'_, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut result = String::new();
let content = self.content.to_string();
if let Some(char) = content.chars().next() {
if !char.is_whitespace() {
result.push_str(self.left);
}
}
result.push_str(&content);
if let Some(char) = content.chars().last() {
if !char.is_whitespace() {
result.push_str(self.right);
}
}
write!(f, "{result}")
}
}

View file

@ -55,12 +55,10 @@ impl ResolutionGraph {
match package {
PubGrubPackage::Package(package_name, None, None) => {
let version = Version::from(version.clone());
let (index, base, file) = pins
let pinned_package = pins
.get(package_name, &version)
.expect("Every package should be pinned")
.clone();
let pinned_package =
Dist::from_registry(package_name.clone(), version, file, index, base);
let index = petgraph.add_node(pinned_package);
inverse.insert(package_name, index);
@ -89,12 +87,10 @@ impl ResolutionGraph {
if !metadata.provides_extras.contains(extra) {
let version = Version::from(version.clone());
let (index, base, file) = pins
let pinned_package = pins
.get(package_name, &version)
.expect("Every package should be pinned")
.clone();
let pinned_package =
Dist::from_registry(package_name.clone(), version, file, index, base);
diagnostics.push(Diagnostic::MissingExtra {
dist: pinned_package,

View file

@ -1,10 +1,10 @@
use url::Url;
use distribution_types::{IndexUrl, PackageId};
use distribution_types::PackageId;
use pep440_rs::VersionSpecifiers;
use puffin_normalize::PackageName;
use puffin_traits::OnceMap;
use pypi_types::{BaseUrl, Metadata21};
use pypi_types::Metadata21;
use crate::version_map::VersionMap;
@ -13,7 +13,7 @@ use crate::version_map::VersionMap;
pub(crate) struct Index {
/// A map from package name to the metadata for that package and the index where the metadata
/// came from.
pub(crate) packages: OnceMap<PackageName, (IndexUrl, BaseUrl, VersionMap)>,
pub(crate) packages: OnceMap<PackageName, VersionMap>,
/// A map from package ID to metadata for that distribution.
pub(crate) distributions: OnceMap<PackageId, Metadata21>,

View file

@ -17,7 +17,8 @@ use url::Url;
use distribution_filename::WheelFilename;
use distribution_types::{
BuiltDist, Dist, DistributionMetadata, IndexUrl, LocalEditable, Name, SourceDist, VersionOrUrl,
BuiltDist, Dist, DistributionMetadata, LocalEditable, Name, RemoteSource, SourceDist,
VersionOrUrl,
};
use pep508_rs::{MarkerEnvironment, Requirement};
use platform_tags::Tags;
@ -26,7 +27,7 @@ use puffin_distribution::DistributionDatabase;
use puffin_interpreter::Interpreter;
use puffin_normalize::PackageName;
use puffin_traits::BuildContext;
use pypi_types::{BaseUrl, Metadata21};
use pypi_types::Metadata21;
use crate::candidate_selector::CandidateSelector;
use crate::error::ResolveError;
@ -472,7 +473,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
PubGrubPackage::Package(package_name, extra, None) => {
// Wait for the metadata to be available.
let entry = self.index.packages.wait(package_name).await;
let (index, base, version_map) = entry.value();
let version_map = entry.value();
if let Some(extra) = extra {
debug!(
@ -502,20 +503,28 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
candidate.name(),
extra,
candidate.version(),
candidate.resolve().filename()
candidate
.resolve()
.dist
.filename()
.unwrap_or("unknown filename")
);
} else {
debug!(
"Selecting: {}=={} ({})",
candidate.name(),
candidate.version(),
candidate.resolve().filename()
candidate
.resolve()
.dist
.filename()
.unwrap_or("unknown filename")
);
}
// We want to return a package pinned to a specific version; but we _also_ want to
// store the exact file that we selected to satisfy that version.
pins.insert(&candidate, index, base);
pins.insert(&candidate);
let version = candidate.version().clone();
@ -525,7 +534,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
.distributions
.register_owned(candidate.package_id())
{
let distribution = candidate.into_distribution(index.clone(), base.clone());
let distribution = candidate.resolve().dist.clone();
request_sink.unbounded_send(Request::Dist(distribution))?;
}
@ -670,11 +679,9 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
while let Some(response) = response_stream.next().await {
match response? {
Some(Response::Package(package_name, index, base, version_map)) => {
Some(Response::Package(package_name, version_map)) => {
trace!("Received package metadata for: {package_name}");
self.index
.packages
.done(package_name, (index, base, version_map));
self.index.packages.done(package_name, version_map);
}
Some(Response::Dist(Dist::Built(distribution), metadata, ..)) => {
trace!("Received built distribution metadata for: {distribution}");
@ -713,12 +720,12 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
match request {
// Fetch package metadata from the registry.
Request::Package(package_name) => {
let (index, base, metadata) = self
let version_map = self
.provider
.get_version_map(&package_name)
.await
.map_err(ResolveError::Client)?;
Ok(Some(Response::Package(package_name, index, base, metadata)))
Ok(Some(Response::Package(package_name, version_map)))
}
// Fetch distribution metadata from the distribution database.
@ -746,7 +753,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
Request::Prefetch(package_name, range) => {
// Wait for the package metadata to become available.
let entry = self.index.packages.wait(&package_name).await;
let (index, base, version_map) = entry.value();
let version_map = entry.value();
// Try to find a compatible version. If there aren't any compatible versions,
// short-circuit and return `None`.
@ -769,7 +776,7 @@ impl<'a, Provider: ResolverProvider> Resolver<'a, Provider> {
.distributions
.register_owned(candidate.package_id())
{
let dist = candidate.into_distribution(index.clone(), base.clone());
let dist = candidate.resolve().dist.clone();
drop(entry);
let (metadata, precise) = self
@ -837,7 +844,7 @@ enum Request {
#[allow(clippy::large_enum_variant)]
enum Response {
/// The returned metadata for a package hosted on a registry.
Package(PackageName, IndexUrl, BaseUrl, VersionMap),
Package(PackageName, VersionMap),
/// The returned metadata for a distribution.
Dist(Dist, Metadata21, Option<Url>),
}

View file

@ -5,19 +5,19 @@ use chrono::{DateTime, Utc};
use futures::TryFutureExt;
use url::Url;
use distribution_types::{Dist, IndexUrl};
use distribution_types::Dist;
use platform_tags::Tags;
use puffin_client::RegistryClient;
use puffin_distribution::{DistributionDatabase, DistributionDatabaseError};
use puffin_normalize::PackageName;
use puffin_traits::BuildContext;
use pypi_types::{BaseUrl, Metadata21};
use pypi_types::Metadata21;
use crate::python_requirement::PythonRequirement;
use crate::version_map::VersionMap;
use crate::yanks::AllowedYanks;
type VersionMapResponse = Result<(IndexUrl, BaseUrl, VersionMap), puffin_client::Error>;
type VersionMapResponse = Result<VersionMap, puffin_client::Error>;
type WheelMetadataResponse = Result<(Metadata21, Option<Url>), DistributionDatabaseError>;
pub trait ResolverProvider: Send + Sync {
@ -37,7 +37,7 @@ pub trait ResolverProvider: Send + Sync {
dist: &'io Dist,
) -> impl Future<Output = WheelMetadataResponse> + Send + 'io;
/// Set the [`Reporter`] to use for this installer.
/// Set the [`puffin_distribution::Reporter`] to use for this installer.
#[must_use]
fn with_reporter(self, reporter: impl puffin_distribution::Reporter + 'static) -> Self;
}
@ -83,17 +83,15 @@ impl<'a, Context: BuildContext + Send + Sync> ResolverProvider
self.client
.simple(package_name)
.map_ok(move |(index, base, metadata)| {
(
index,
base,
VersionMap::from_metadata(
metadata,
package_name,
self.tags,
&self.python_requirement,
&self.allowed_yanks,
self.exclude_newer.as_ref(),
),
VersionMap::from_metadata(
metadata,
package_name,
&index,
&base,
self.tags,
&self.python_requirement,
&self.allowed_yanks,
self.exclude_newer.as_ref(),
)
})
}

View file

@ -5,13 +5,14 @@ use chrono::{DateTime, Utc};
use tracing::{instrument, warn};
use distribution_filename::DistFilename;
use distribution_types::{Dist, IndexUrl};
use pep440_rs::VersionSpecifiers;
use platform_tags::{TagPriority, Tags};
use puffin_client::SimpleMetadata;
use puffin_normalize::PackageName;
use puffin_warnings::warn_user_once;
use pypi_types::Yanked;
use pypi_types::{BaseUrl, Yanked};
use crate::file::{DistFile, SdistFile, WheelFile};
use crate::pubgrub::PubGrubVersion;
use crate::python_requirement::PythonRequirement;
use crate::yanks::AllowedYanks;
@ -23,9 +24,12 @@ pub struct VersionMap(BTreeMap<PubGrubVersion, PrioritizedDistribution>);
impl VersionMap {
/// Initialize a [`VersionMap`] from the given metadata.
#[instrument(skip_all, fields(package_name = % package_name))]
#[allow(clippy::too_many_arguments)]
pub(crate) fn from_metadata(
metadata: SimpleMetadata,
package_name: &PackageName,
index: &IndexUrl,
base: &BaseUrl,
tags: &Tags,
python_requirement: &PythonRequirement,
allowed_yanks: &AllowedYanks,
@ -65,6 +69,7 @@ impl VersionMap {
}
}
let requires_python = file.requires_python.clone();
match filename {
DistFilename::WheelFilename(filename) => {
// To be compatible, the wheel must both have compatible tags _and_ have a
@ -78,25 +83,45 @@ impl VersionMap {
.all(|version| requires_python.contains(version))
})
});
let dist = Dist::from_registry(
filename.name.clone(),
filename.version.clone(),
file,
index.clone(),
base.clone(),
);
match version_map.entry(version.clone().into()) {
Entry::Occupied(mut entry) => {
entry.get_mut().insert_built(WheelFile(file), priority);
entry
.get_mut()
.insert_built(dist, requires_python, priority);
}
Entry::Vacant(entry) => {
entry.insert(PrioritizedDistribution::from_built(
WheelFile(file),
dist,
requires_python,
priority,
));
}
}
}
DistFilename::SourceDistFilename(_) => {
DistFilename::SourceDistFilename(filename) => {
let dist = Dist::from_registry(
filename.name.clone(),
filename.version.clone(),
file,
index.clone(),
base.clone(),
);
match version_map.entry(version.clone().into()) {
Entry::Occupied(mut entry) => {
entry.get_mut().insert_source(SdistFile(file));
entry.get_mut().insert_source(dist, requires_python);
}
Entry::Vacant(entry) => {
entry.insert(PrioritizedDistribution::from_source(SdistFile(file)));
entry.insert(PrioritizedDistribution::from_source(
dist,
requires_python,
));
}
}
}
@ -122,63 +147,111 @@ impl VersionMap {
}
}
/// Attach its requires-python to a [`Dist`], since downstream needs this information to filter
/// [`PrioritizedDistribution`].
#[derive(Debug)]
pub(crate) struct DistRequiresPython {
pub(crate) dist: Dist,
pub(crate) requires_python: Option<VersionSpecifiers>,
}
#[derive(Debug)]
struct PrioritizedDistribution {
/// An arbitrary source distribution for the package version.
source: Option<DistFile>,
source: Option<DistRequiresPython>,
/// The highest-priority, platform-compatible wheel for the package version.
compatible_wheel: Option<(DistFile, TagPriority)>,
compatible_wheel: Option<(DistRequiresPython, TagPriority)>,
/// An arbitrary, platform-incompatible wheel for the package version.
incompatible_wheel: Option<DistFile>,
incompatible_wheel: Option<DistRequiresPython>,
}
impl PrioritizedDistribution {
/// Create a new [`PrioritizedDistribution`] from the given wheel distribution.
fn from_built(dist: WheelFile, priority: Option<TagPriority>) -> Self {
fn from_built(
dist: Dist,
requires_python: Option<VersionSpecifiers>,
priority: Option<TagPriority>,
) -> Self {
if let Some(priority) = priority {
Self {
source: None,
compatible_wheel: Some((dist.into(), priority)),
compatible_wheel: Some((
DistRequiresPython {
dist,
requires_python,
},
priority,
)),
incompatible_wheel: None,
}
} else {
Self {
source: None,
compatible_wheel: None,
incompatible_wheel: Some(dist.into()),
incompatible_wheel: Some(DistRequiresPython {
dist,
requires_python,
}),
}
}
}
/// Create a new [`PrioritizedDistribution`] from the given source distribution.
fn from_source(dist: SdistFile) -> Self {
fn from_source(dist: Dist, requires_python: Option<VersionSpecifiers>) -> Self {
Self {
source: Some(dist.into()),
source: Some(DistRequiresPython {
dist,
requires_python,
}),
compatible_wheel: None,
incompatible_wheel: None,
}
}
/// Insert the given built distribution into the [`PrioritizedDistribution`].
fn insert_built(&mut self, file: WheelFile, priority: Option<TagPriority>) {
fn insert_built(
&mut self,
dist: Dist,
requires_python: Option<VersionSpecifiers>,
priority: Option<TagPriority>,
) {
// Prefer the highest-priority, platform-compatible wheel.
if let Some(priority) = priority {
if let Some((.., existing_priority)) = &self.compatible_wheel {
if priority > *existing_priority {
self.compatible_wheel = Some((file.into(), priority));
self.compatible_wheel = Some((
DistRequiresPython {
dist,
requires_python,
},
priority,
));
}
} else {
self.compatible_wheel = Some((file.into(), priority));
self.compatible_wheel = Some((
DistRequiresPython {
dist,
requires_python,
},
priority,
));
}
} else if self.incompatible_wheel.is_none() {
self.incompatible_wheel = Some(file.into());
self.incompatible_wheel = Some(DistRequiresPython {
dist,
requires_python,
});
}
}
/// Insert the given source distribution into the [`PrioritizedDistribution`].
fn insert_source(&mut self, file: SdistFile) {
fn insert_source(&mut self, dist: Dist, requires_python: Option<VersionSpecifiers>) {
if self.source.is_none() {
self.source = Some(file.into());
self.source = Some(DistRequiresPython {
dist,
requires_python,
});
}
}
@ -195,9 +268,11 @@ impl PrioritizedDistribution {
// wheel. We assume that all distributions have the same metadata for a given package
// version. If a compatible source distribution exists, we assume we can build it, but
// using the wheel is faster.
(_, Some(sdist), Some(wheel)) => Some(ResolvableFile::IncompatibleWheel(sdist, wheel)),
(_, Some(source_dist), Some(wheel)) => {
Some(ResolvableFile::IncompatibleWheel(source_dist, wheel))
}
// Otherwise, if we have a source distribution, return it.
(_, Some(sdist), _) => Some(ResolvableFile::SourceDist(sdist)),
(_, Some(source_dist), _) => Some(ResolvableFile::SourceDist(source_dist)),
_ => None,
}
}
@ -206,18 +281,18 @@ impl PrioritizedDistribution {
#[derive(Debug, Clone)]
pub(crate) enum ResolvableFile<'a> {
/// The distribution should be resolved and installed using a source distribution.
SourceDist(&'a DistFile),
SourceDist(&'a DistRequiresPython),
/// The distribution should be resolved and installed using a wheel distribution.
CompatibleWheel(&'a DistFile),
CompatibleWheel(&'a DistRequiresPython),
/// The distribution should be resolved using an incompatible wheel distribution, but
/// installed using a source distribution.
IncompatibleWheel(&'a DistFile, &'a DistFile),
IncompatibleWheel(&'a DistRequiresPython, &'a DistRequiresPython),
}
impl<'a> ResolvableFile<'a> {
/// Return the [`DistFile`] to use during resolution.
pub(crate) fn resolve(&self) -> &DistFile {
match self {
pub(crate) fn resolve(&self) -> &DistRequiresPython {
match *self {
ResolvableFile::SourceDist(sdist) => sdist,
ResolvableFile::CompatibleWheel(wheel) => wheel,
ResolvableFile::IncompatibleWheel(_, wheel) => wheel,
@ -225,8 +300,8 @@ impl<'a> ResolvableFile<'a> {
}
/// Return the [`DistFile`] to use during installation.
pub(crate) fn install(&self) -> &DistFile {
match self {
pub(crate) fn install(&self) -> &DistRequiresPython {
match *self {
ResolvableFile::SourceDist(sdist) => sdist,
ResolvableFile::CompatibleWheel(wheel) => wheel,
ResolvableFile::IncompatibleWheel(sdist, _) => sdist,

View file

@ -20,7 +20,7 @@ use puffin_interpreter::{Interpreter, Virtualenv};
use puffin_resolver::{
Manifest, PreReleaseMode, ResolutionGraph, ResolutionMode, ResolutionOptions, Resolver,
};
use puffin_traits::{BuildContext, BuildKind, SourceBuildTrait};
use puffin_traits::{BuildContext, BuildKind, SetupPyStrategy, SourceBuildTrait};
// Exclude any packages uploaded after this date.
static EXCLUDE_NEWER: Lazy<DateTime<Utc>> = Lazy::new(|| {
@ -49,6 +49,14 @@ impl BuildContext for DummyContext {
panic!("The test should not need to build source distributions")
}
fn no_build(&self) -> bool {
false
}
fn setup_py_strategy(&self) -> SetupPyStrategy {
SetupPyStrategy::default()
}
async fn resolve<'a>(&'a self, _requirements: &'a [Requirement]) -> Result<Resolution> {
panic!("The test should not need to build source distributions")
}
@ -494,7 +502,7 @@ async fn black_disallow_prerelease() -> Result<()> {
.unwrap_err();
assert_snapshot!(err, @r###"
Because there are no versions of black<=20.0 and root depends on black<=20.0, version solving failed.
Because there are no versions of black that satisfy black<=20.0 and root depends on black<=20.0, we can conclude that the requirements are unsatisfiable.
hint: Pre-releases are available for black in the requested range (e.g., 19.10b0), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
@ -516,7 +524,7 @@ async fn black_allow_prerelease_if_necessary() -> Result<()> {
.unwrap_err();
assert_snapshot!(err, @r###"
Because there are no versions of black<=20.0 and root depends on black<=20.0, version solving failed.
Because there are no versions of black that satisfy black<=20.0 and root depends on black<=20.0, we can conclude that the requirements are unsatisfiable.
hint: Pre-releases are available for black in the requested range (e.g., 19.10b0), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
@ -642,10 +650,10 @@ async fn msgraph_sdk() -> Result<()> {
.unwrap_err();
assert_snapshot!(err, @r###"
Because there are no versions of msgraph-core>=1.0.0a2 and msgraph-sdk==1.0.0 depends on msgraph-core>=1.0.0a2, msgraph-sdk==1.0.0 is forbidden.
And because root depends on msgraph-sdk==1.0.0, version solving failed.
Because there are no versions of msgraph-core that satisfy msgraph-core>=1.0.0a2 and msgraph-sdk==1.0.0 depends on msgraph-core>=1.0.0a2, we can conclude that msgraph-sdk==1.0.0 cannot be used.
And because root depends on msgraph-sdk==1.0.0 we can conclude that the requirements are unsatisfiable.
hint: msgraph-core was requested with a pre-release marker (e.g., >=1.0.0a2), but pre-releases weren't enabled (try: `--prerelease=allow`)
hint: msgraph-core was requested with a pre-release marker (e.g., msgraph-core>=1.0.0a2), but pre-releases weren't enabled (try: `--prerelease=allow`)
"###);
Ok(())

View file

@ -68,9 +68,10 @@ pub trait BuildContext {
/// Whether source distribution building is disabled. This [`BuildContext::setup_build`] calls
/// will fail in this case. This method exists to avoid fetching source distributions if we know
/// we can't build them
fn no_build(&self) -> bool {
false
}
fn no_build(&self) -> bool;
/// The strategy to use when building source distributions that lack a `pyproject.toml`.
fn setup_py_strategy(&self) -> SetupPyStrategy;
/// Resolve the given requirements into a ready-to-install set of package versions.
fn resolve<'a>(
@ -123,6 +124,16 @@ pub trait SourceBuildTrait {
-> impl Future<Output = Result<String>> + Send + 'a;
}
/// The strategy to use when building source distributions that lack a `pyproject.toml`.
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum SetupPyStrategy {
/// Perform a PEP 517 build.
#[default]
Pep517,
/// Perform a build by invoking `setuptools` directly.
Setuptools,
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum BuildKind {
/// A regular PEP 517 wheel build

View file

@ -16,7 +16,6 @@ workspace = true
pep440_rs = { path = "../pep440-rs", features = ["serde"] }
pep508_rs = { path = "../pep508-rs", features = ["serde"] }
puffin-normalize = { path = "../puffin-normalize" }
puffin-warnings = { path = "../puffin-warnings" }
chrono = { workspace = true, features = ["serde"] }
mailparse = { workspace = true }

View file

@ -5,7 +5,7 @@
#
# Example usage:
#
# ./scripts/benchmarks/venv.sh ./scripts/benchmarks/requirements.txt
# ./scripts/benchmarks/venv.sh
###
set -euxo pipefail
@ -15,7 +15,7 @@ set -euxo pipefail
###
hyperfine --runs 20 --warmup 3 \
--prepare "rm -rf .venv" \
"./target/release/puffin venv --no-cache" \
"./target/release/puffin venv" \
--prepare "rm -rf .venv" \
"virtualenv --without-pip .venv" \
--prepare "rm -rf .venv" \
@ -23,10 +23,10 @@ hyperfine --runs 20 --warmup 3 \
###
# Create a virtual environment with seed packages.
#
# TODO(charlie): Support seed packages in `puffin venv`.
###
hyperfine --runs 20 --warmup 3 \
--prepare "rm -rf .venv" \
"./target/release/puffin venv --seed" \
--prepare "rm -rf .venv" \
"virtualenv .venv" \
--prepare "rm -rf .venv" \

View file

@ -0,0 +1,74 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/all-kinds.in
annotated-types==0.6.0
# via pydantic
asgiref==3.7.2
# via django
blinker==1.7.0
# via flask
certifi==2023.11.17
# via requests
cffi==1.16.0
# via cryptography
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via flask
cryptography==41.0.7
defusedxml==0.7.1
# via python3-openid
django==5.0.1
# via django-allauth
django-allauth==0.51.0
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
idna==3.6
# via requests
itsdangerous==2.1.2
# via flask
jinja2==3.1.2
# via flask
markupsafe==2.1.3
# via
# jinja2
# werkzeug
numpy==1.26.3
# via pandas
oauthlib==3.2.2
# via requests-oauthlib
pandas==2.1.4
pycparser==2.21
# via cffi
pydantic==2.5.3
# via pydantic-extra-types
pydantic-core==2.14.6
# via pydantic
pydantic-extra-types @ git+https://github.com/pydantic/pydantic-extra-types.git@5ebc5bba58605c656a821eed773973725e35cf83
pyjwt==2.8.0
# via django-allauth
python-dateutil==2.8.2
# via pandas
python3-openid==3.2.0
# via django-allauth
pytz==2023.3.post1
# via pandas
requests==2.31.0
# via
# django-allauth
# requests-oauthlib
requests-oauthlib==1.3.1
# via django-allauth
six==1.16.0
# via python-dateutil
sqlparse==0.4.4
# via django
typing-extensions==4.9.0
# via
# asgiref
# pydantic
# pydantic-core
tzdata==2023.4
# via pandas
urllib3==2.1.0
# via requests
werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz
# via flask

View file

@ -0,0 +1,17 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/black.in
black==23.12.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.12.1
# via black
platformdirs==4.1.0
# via black
tomli==2.0.1
# via black
typing-extensions==4.9.0
# via black

View file

@ -0,0 +1,65 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/boto3.in
boto3==1.7.84
botocore==1.10.84
# via
# boto3
# s3transfer
cachetools==5.3.2
# via google-auth
certifi==2023.11.17
# via requests
cffi==1.16.0
# via cryptography
charset-normalizer==3.3.2
# via requests
cryptography==41.0.7
docutils==0.20.1
# via botocore
google-api-core==2.15.0
google-auth==2.26.1
# via google-api-core
googleapis-common-protos==1.62.0
# via
# google-api-core
# grpcio-status
grpcio==1.60.0
# via grpcio-status
grpcio-status==1.60.0
idna==3.6
# via requests
jmespath==0.10.0
# via
# boto3
# botocore
numpy==1.26.3
packaging==23.2
pip==23.3.2
protobuf==4.25.1
# via
# google-api-core
# googleapis-common-protos
# grpcio-status
pyasn1==0.5.1
# via
# pyasn1-modules
# rsa
pyasn1-modules==0.3.0
# via google-auth
pycparser==2.21
# via cffi
python-dateutil==2.8.2
# via botocore
pyyaml==6.0.1
requests==2.31.0
# via google-api-core
rsa==4.9
# via google-auth
s3transfer==0.1.13
# via boto3
setuptools==69.0.3
six==1.16.0
# via python-dateutil
urllib3==2.1.0
# via requests
wheel==0.42.0

View file

@ -0,0 +1,5 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/dtlssocket.in
cython==0.29.37
# via dtlssocket
dtlssocket==0.1.16

View file

@ -0,0 +1,256 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/flyte.in
absl-py==2.0.0
# via
# tensorboard
# tensorflow
asttokens==2.4.1
# via stack-data
astunparse==1.6.3
# via tensorflow
attrs==23.2.0
# via hypothesis
autoflake==2.2.1
cachetools==5.3.2
# via google-auth
certifi==2023.11.17
# via requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.3.2
# via requests
codespell==2.2.6
coverage==7.4.0
# via pytest-cov
decorator==5.1.1
# via ipython
distlib==0.3.8
# via virtualenv
exceptiongroup==1.2.0
# via
# hypothesis
# ipython
# pytest
executing==2.0.1
# via stack-data
filelock==3.13.1
# via virtualenv
flatbuffers==23.5.26
# via tensorflow
gast==0.5.4
# via tensorflow
google-api-core==2.15.0
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
# google-cloud-core
google-auth==2.26.1
# via
# google-api-core
# google-auth-oauthlib
# google-cloud-core
# tensorboard
google-auth-oauthlib==0.4.6
# via tensorboard
google-cloud-bigquery==3.14.1
google-cloud-bigquery-storage==2.24.0
google-cloud-core==2.4.1
# via google-cloud-bigquery
google-crc32c==1.5.0
# via google-resumable-media
google-pasta==0.2.0
# via tensorflow
google-resumable-media==2.7.0
# via google-cloud-bigquery
googleapis-common-protos==1.62.0
# via
# google-api-core
# grpcio-status
grpcio==1.60.0
# via
# grpcio-status
# tensorboard
# tensorflow
grpcio-status==1.60.0
h5py==3.10.0
# via tensorflow
hypothesis==6.92.5
identify==2.5.33
# via pre-commit
idna==3.6
# via requests
iniconfig==2.0.0
# via pytest
ipython==8.20.0
jaraco-classes==3.3.0
# via keyrings-alt
jedi==0.19.1
# via ipython
joblib==1.3.2
# via scikit-learn
keras==2.8.0
# via tensorflow
keras-preprocessing==1.1.2
# via tensorflow
keyrings-alt==5.0.0
libclang==16.0.6
# via tensorflow
markdown==3.5.1
# via tensorboard
markupsafe==2.1.3
# via werkzeug
matplotlib-inline==0.1.6
# via ipython
mock==5.1.0
more-itertools==10.2.0
# via jaraco-classes
mypy==1.8.0
mypy-extensions==1.0.0
# via mypy
nodeenv==1.8.0
# via pre-commit
numpy==1.26.3
# via
# h5py
# keras-preprocessing
# opt-einsum
# scikit-learn
# scipy
# tensorboard
# tensorflow
oauthlib==3.2.2
# via requests-oauthlib
opt-einsum==3.3.0
# via tensorflow
packaging==23.2
# via
# google-cloud-bigquery
# pytest
parso==0.8.3
# via jedi
pexpect==4.9.0
# via ipython
pillow==10.2.0
platformdirs==4.1.0
# via virtualenv
pluggy==1.3.0
# via pytest
pre-commit==3.6.0
prometheus-client==0.19.0
prompt-toolkit==3.0.43
# via ipython
proto-plus==1.23.0
# via google-cloud-bigquery-storage
protobuf==4.25.1
# via
# google-api-core
# google-cloud-bigquery-storage
# googleapis-common-protos
# grpcio-status
# proto-plus
# tensorboard
# tensorflow
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.2
# via stack-data
pyasn1==0.5.1
# via
# pyasn1-modules
# rsa
pyasn1-modules==0.3.0
# via google-auth
pyflakes==3.2.0
# via autoflake
pygments==2.17.2
# via ipython
pytest==7.4.4
# via
# pytest-asyncio
# pytest-cov
pytest-asyncio==0.23.3
pytest-cov==4.1.0
python-dateutil==2.8.2
# via google-cloud-bigquery
pyyaml==6.0.1
# via pre-commit
requests==2.31.0
# via
# google-api-core
# google-cloud-bigquery
# requests-oauthlib
# tensorboard
requests-oauthlib==1.3.1
# via google-auth-oauthlib
rsa==4.9
# via google-auth
scikit-learn==1.3.2
scipy==1.11.4
# via scikit-learn
setuptools==69.0.3
# via
# nodeenv
# tensorboard
# tensorflow
six==1.16.0
# via
# asttokens
# astunparse
# google-pasta
# keras-preprocessing
# python-dateutil
# tensorflow
sortedcontainers==2.4.0
# via hypothesis
stack-data==0.6.3
# via ipython
tensorboard==2.8.0
# via tensorflow
tensorboard-data-server==0.6.1
# via tensorboard
tensorboard-plugin-wit==1.8.1
# via tensorboard
tensorflow==2.8.1
tensorflow-estimator==2.8.0
# via tensorflow
tensorflow-io-gcs-filesystem==0.35.0
# via tensorflow
termcolor==2.4.0
# via tensorflow
threadpoolctl==3.2.0
# via scikit-learn
tomli==2.0.1
# via
# autoflake
# mypy
# pytest
torch==1.12.1
traitlets==5.14.1
# via
# ipython
# matplotlib-inline
types-croniter==2.0.0.20240106
types-mock==5.1.0.20240106
types-protobuf==4.24.0.20240106
types-requests==2.31.0.20240106
typing-extensions==4.9.0
# via
# mypy
# tensorflow
# torch
urllib3==2.1.0
# via
# requests
# types-requests
virtualenv==20.25.0
# via pre-commit
wcwidth==0.2.13
# via prompt-toolkit
werkzeug==3.0.1
# via tensorboard
wheel==0.42.0
# via
# astunparse
# tensorboard
wrapt==1.16.0
# via tensorflow

View file

@ -0,0 +1,31 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/pdm_2193.in
certifi==2023.11.17
# via requests
charset-normalizer==3.3.2
# via requests
idna==3.6
# via requests
numpy==1.26.3
# via pandas
pandas==1.3.5
pystac==1.9.0
# via pystac-client
pystac-client==0.6.1
python-dateutil==2.7.5
# via
# pandas
# pystac
# pystac-client
# sat-stac
pytz==2023.3.post1
# via pandas
requests==2.31.0
# via
# pystac-client
# sat-stac
sat-stac==0.4.1
six==1.16.0
# via python-dateutil
urllib3==2.1.0
# via requests

View file

@ -0,0 +1,216 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/scispacy.in
annotated-types==0.6.0
# via pydantic
black==23.12.1
blis==0.7.11
# via thinc
catalogue==2.0.10
# via
# spacy
# srsly
# thinc
certifi==2023.11.17
# via requests
cffi==1.16.0
# via cryptography
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via
# black
# typer
confection==0.1.4
# via thinc
conllu==4.5.3
coverage==7.4.0
# via pytest-cov
cryptography==41.0.7
# via secretstorage
cymem==2.0.8
# via
# preshed
# spacy
# thinc
docutils==0.20.1
# via readme-renderer
exceptiongroup==1.2.0
# via pytest
flake8==7.0.0
idna==3.6
# via requests
importlib-metadata==7.0.1
# via
# keyring
# twine
iniconfig==2.0.0
# via pytest
jaraco-classes==3.3.0
# via keyring
jeepney==0.8.0
# via
# keyring
# secretstorage
jinja2==3.1.2
# via spacy
joblib==1.3.2
# via scikit-learn
keyring==24.3.0
# via twine
langcodes==3.3.0
# via spacy
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.3
# via jinja2
mccabe==0.7.0
# via flake8
mdurl==0.1.2
# via markdown-it-py
more-itertools==10.2.0
# via jaraco-classes
murmurhash==1.0.10
# via
# preshed
# spacy
# thinc
mypy==1.8.0
mypy-extensions==1.0.0
# via
# black
# mypy
nh3==0.2.15
# via readme-renderer
numpy==1.26.3
# via
# blis
# pandas
# scikit-learn
# scipy
# spacy
# thinc
packaging==23.2
# via
# black
# pytest
# spacy
# thinc
pandas==2.1.4
pathspec==0.12.1
# via black
pathy==0.10.3
# via spacy
pkginfo==1.9.6
# via twine
platformdirs==4.1.0
# via black
pluggy==1.3.0
# via pytest
preshed==3.0.9
# via
# spacy
# thinc
pycodestyle==2.11.1
# via flake8
pycparser==2.21
# via cffi
pydantic==2.5.3
# via
# confection
# spacy
# thinc
pydantic-core==2.14.6
# via pydantic
pyflakes==3.2.0
# via flake8
pygments==2.17.2
# via
# readme-renderer
# rich
pysbd==0.3.4
pytest==7.4.4
# via pytest-cov
pytest-cov==4.1.0
python-dateutil==2.8.2
# via pandas
pytz==2023.3.post1
# via pandas
readme-renderer==42.0
# via twine
requests==2.31.0
# via
# requests-toolbelt
# spacy
# twine
requests-toolbelt==1.0.0
# via twine
rfc3986==2.0.0
# via twine
rich==13.7.0
# via twine
scikit-learn==1.3.2
scipy==1.10.1
# via scikit-learn
secretstorage==3.3.3
# via keyring
setuptools==69.0.3
# via
# spacy
# spacy-lookups-data
# thinc
six==1.16.0
# via python-dateutil
smart-open==6.4.0
# via
# pathy
# spacy
spacy==3.6.1
spacy-legacy==3.0.12
# via spacy
spacy-loggers==1.0.5
# via spacy
spacy-lookups-data==1.0.5
srsly==2.4.8
# via
# confection
# spacy
# thinc
thinc==8.1.12
# via spacy
threadpoolctl==3.2.0
# via scikit-learn
tomli==2.0.1
# via
# black
# mypy
# pytest
tqdm==4.66.1
# via spacy
twine==4.0.2
typer==0.9.0
# via
# pathy
# spacy
types-requests==2.31.0.20240106
types-setuptools==69.0.0.20240106
types-tabulate==0.9.0.20240106
typing-extensions==4.9.0
# via
# black
# mypy
# pydantic
# pydantic-core
# typer
tzdata==2023.4
# via pandas
urllib3==2.1.0
# via
# requests
# twine
# types-requests
wasabi==1.1.2
# via
# spacy
# thinc
zipp==3.17.0
# via importlib-metadata

View file

@ -0,0 +1,81 @@
# This file was autogenerated by Puffin v0.0.1 via the following command:
# puffin pip-compile scripts/requirements/trio.in
alabaster==0.7.15
# via sphinx
attrs==23.2.0
# via outcome
babel==2.14.0
# via sphinx
certifi==2023.11.17
# via requests
cffi==1.16.0
# via cryptography
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via towncrier
cryptography==41.0.7
# via pyopenssl
docutils==0.19
# via
# sphinx
# sphinx-rtd-theme
exceptiongroup==1.2.0
idna==3.6
# via requests
imagesize==1.4.1
# via sphinx
immutables==0.20
incremental==22.10.0
# via towncrier
jinja2==3.1.2
# via
# sphinx
# towncrier
markupsafe==2.1.3
# via jinja2
outcome==1.3.0.post0
packaging==23.2
# via sphinx
pycparser==2.21
# via cffi
pygments==2.17.2
# via sphinx
pyopenssl==23.3.0
requests==2.31.0
# via sphinx
sniffio==1.3.0
snowballstemmer==2.2.0
# via sphinx
sortedcontainers==2.4.0
sphinx==6.1.3
# via
# sphinx-rtd-theme
# sphinxcontrib-applehelp
# sphinxcontrib-devhelp
# sphinxcontrib-htmlhelp
# sphinxcontrib-jquery
# sphinxcontrib-qthelp
# sphinxcontrib-serializinghtml
# sphinxcontrib-trio
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.7
# via sphinx
sphinxcontrib-devhelp==1.0.5
# via sphinx
sphinxcontrib-htmlhelp==2.0.4
# via sphinx
sphinxcontrib-jquery==4.1
# via sphinx-rtd-theme
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.6
# via sphinx
sphinxcontrib-serializinghtml==1.1.9
# via sphinx
sphinxcontrib-trio==1.1.2
tomli==2.0.1
# via towncrier
towncrier==23.11.0
urllib3==2.1.0
# via requests