Cache tool environments in uv tool run (#4784)

## Summary

The basic strategy:

- When the user does `uv tool run`, we resolve the `from` and `with`
requirements (always).
- After resolving, we generate a hash of the requirements. For now, I'm
just converting to a lockfile and hashing _that_, but that's an
implementation detail.
- Once we have a hash, we _also_ hash the interpreter.
- We then store environments in
`${CACHE_DIR}/${INTERPRETER_HASH}/${RESOLUTION_HASH}`.

Some consequences:

- We cache based on the interpreter, so if you request a different
Python, we'll create a new environment (even if they're compatible).
This has the nice side-effect of ensuring that we don't use environments
for interpreters that were later deleted.
- We cache the `from` and `with` together. In practice, we may want to
cache them separately, then layer them? But this is also an
implementation detail that we could change later.
- Because we use the lockfile as the cache key, we will invalidate the
cache when the format changes. That seems ok, but we could improve it in
the future by generating a stable hash from a lockfile that's
independent of the schema.

Closes https://github.com/astral-sh/uv/issues/4752.
This commit is contained in:
Charlie Marsh 2024-07-03 19:25:39 -04:00 committed by GitHub
parent 1e8f5926e6
commit de40f798b9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 561 additions and 101 deletions

2
Cargo.lock generated
View file

@ -4434,6 +4434,7 @@ dependencies = [
"axoupdater", "axoupdater",
"base64 0.22.1", "base64 0.22.1",
"byteorder", "byteorder",
"cache-key",
"chrono", "chrono",
"clap", "clap",
"distribution-types", "distribution-types",
@ -4461,7 +4462,6 @@ dependencies = [
"rustc-hash 2.0.0", "rustc-hash 2.0.0",
"serde", "serde",
"serde_json", "serde_json",
"tempfile",
"textwrap", "textwrap",
"thiserror", "thiserror",
"tikv-jemallocator", "tikv-jemallocator",

View file

@ -1,3 +1,4 @@
pub use cache_key::{CacheKey, CacheKeyHasher};
pub use canonical_url::{CanonicalUrl, RepositoryUrl}; pub use canonical_url::{CanonicalUrl, RepositoryUrl};
pub use digest::digest; pub use digest::digest;

View file

@ -181,8 +181,8 @@ impl Cache {
/// Create an ephemeral Python environment in the cache. /// Create an ephemeral Python environment in the cache.
pub fn environment(&self) -> io::Result<tempfile::TempDir> { pub fn environment(&self) -> io::Result<tempfile::TempDir> {
fs::create_dir_all(self.bucket(CacheBucket::Environments))?; fs::create_dir_all(self.bucket(CacheBucket::Builds))?;
tempfile::tempdir_in(self.bucket(CacheBucket::Environments)) tempfile::tempdir_in(self.bucket(CacheBucket::Builds))
} }
/// Returns `true` if a cache entry must be revalidated given the [`Refresh`] policy. /// Returns `true` if a cache entry must be revalidated given the [`Refresh`] policy.
@ -634,6 +634,8 @@ pub enum CacheBucket {
/// other buckets directly would make atomic operations impossible. /// other buckets directly would make atomic operations impossible.
Archive, Archive,
/// Ephemeral virtual environments used to execute PEP 517 builds and other operations. /// Ephemeral virtual environments used to execute PEP 517 builds and other operations.
Builds,
/// Reusable virtual environments used to invoke Python tools.
Environments, Environments,
} }
@ -647,7 +649,8 @@ impl CacheBucket {
Self::Simple => "simple-v9", Self::Simple => "simple-v9",
Self::Wheels => "wheels-v1", Self::Wheels => "wheels-v1",
Self::Archive => "archive-v0", Self::Archive => "archive-v0",
Self::Environments => "environments-v0", Self::Builds => "builds-v0",
Self::Environments => "environments-v1",
} }
} }
@ -758,6 +761,9 @@ impl CacheBucket {
Self::Archive => { Self::Archive => {
// Nothing to do. // Nothing to do.
} }
Self::Builds => {
// Nothing to do.
}
Self::Environments => { Self::Environments => {
// Nothing to do. // Nothing to do.
} }
@ -775,6 +781,7 @@ impl CacheBucket {
Self::Interpreter, Self::Interpreter,
Self::Simple, Self::Simple,
Self::Archive, Self::Archive,
Self::Builds,
Self::Environments, Self::Environments,
] ]
.iter() .iter()

View file

@ -697,7 +697,7 @@ impl InterpreterInfo {
// If `executable` is a pyenv shim, a bash script that redirects to the activated // If `executable` is a pyenv shim, a bash script that redirects to the activated
// python executable at another path, we're not allowed to cache the interpreter info. // python executable at another path, we're not allowed to cache the interpreter info.
if same_file::is_same_file(executable, &info.sys_executable).unwrap_or(false) { if is_same_file(executable, &info.sys_executable).unwrap_or(false) {
fs::create_dir_all(cache_entry.dir())?; fs::create_dir_all(cache_entry.dir())?;
write_atomic_sync( write_atomic_sync(
cache_entry.path(), cache_entry.path(),

View file

@ -95,9 +95,9 @@ impl StateStore {
/// are subdirectories of the state store root. /// are subdirectories of the state store root.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum StateBucket { pub enum StateBucket {
// Managed Python installations /// Managed Python installations
ManagedPython, ManagedPython,
// Installed tools /// Installed tools.
Tools, Tools,
} }

View file

@ -14,6 +14,7 @@ default-run = "uv"
workspace = true workspace = true
[dependencies] [dependencies]
cache-key = { workspace = true }
distribution-types = { workspace = true } distribution-types = { workspace = true }
install-wheel-rs = { workspace = true, default-features = false } install-wheel-rs = { workspace = true, default-features = false }
pep440_rs = { workspace = true } pep440_rs = { workspace = true }
@ -58,7 +59,6 @@ regex = { workspace = true }
rustc-hash = { workspace = true } rustc-hash = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
tempfile = { workspace = true }
textwrap = { workspace = true } textwrap = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }

View file

@ -0,0 +1,129 @@
use tracing::debug;
use cache_key::digest;
use pypi_types::Requirement;
use uv_cache::{Cache, CacheBucket};
use uv_client::Connectivity;
use uv_configuration::{Concurrency, PreviewMode};
use uv_fs::{LockedFile, Simplified};
use uv_python::{Interpreter, PythonEnvironment};
use uv_requirements::RequirementsSpecification;
use uv_resolver::Lock;
use crate::commands::project::{resolve_environment, sync_environment};
use crate::commands::SharedState;
use crate::printer::Printer;
use crate::settings::ResolverInstallerSettings;
/// An ephemeral [`PythonEnvironment`] stored in the cache.
#[derive(Debug)]
pub(crate) struct EphemeralEnvironment(PythonEnvironment);
impl From<EphemeralEnvironment> for PythonEnvironment {
fn from(ephemeral: EphemeralEnvironment) -> Self {
ephemeral.0
}
}
impl EphemeralEnvironment {
/// Get or create an [`EphemeralEnvironment`] based on a given set of requirements and a base
/// interpreter.
pub(crate) async fn get_or_create(
requirements: Vec<Requirement>,
interpreter: Interpreter,
settings: &ResolverInstallerSettings,
state: &SharedState,
preview: PreviewMode,
connectivity: Connectivity,
concurrency: Concurrency,
native_tls: bool,
cache: &Cache,
printer: Printer,
) -> anyhow::Result<Self> {
let spec = RequirementsSpecification::from_requirements(requirements);
// Resolve the requirements with the interpreter.
let resolution = resolve_environment(
&interpreter,
spec,
settings.as_ref().into(),
state,
preview,
connectivity,
concurrency,
native_tls,
cache,
printer,
)
.await?;
// Hash the resolution by hashing the generated lockfile.
// TODO(charlie): If the resolution contains any mutable metadata (like a path or URL
// dependency), skip this step.
let lock = Lock::from_resolution_graph(&resolution)?;
let toml = lock.to_toml()?;
let resolution_hash = digest(&toml);
// Hash the interpreter by hashing the sysconfig data.
// TODO(charlie): Come up with a robust hash for the interpreter.
let interpreter_hash = digest(&interpreter.sys_executable());
// Search in the content-addressed cache.
let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash);
// Lock the interpreter, to avoid concurrent modification across processes.
fs_err::tokio::create_dir_all(cache_entry.dir()).await?;
let _lock = LockedFile::acquire(
cache_entry.dir().join(".lock"),
cache_entry.dir().user_display(),
)?;
// If the receipt exists, return the environment.
let ok = cache_entry.path().join(".ok");
if ok.is_file() {
debug!(
"Found existing ephemeral environment at: `{}`",
cache_entry.path().display()
);
return Ok(Self(PythonEnvironment::from_root(
cache_entry.path(),
cache,
)?));
}
debug!(
"Creating ephemeral environment at: `{}`",
cache_entry.path().display()
);
let venv = uv_virtualenv::create_venv(
cache_entry.path(),
interpreter,
uv_virtualenv::Prompt::None,
false,
false,
)?;
// Install the ephemeral requirements.
// TODO(charlie): Rather than passing all the arguments to `sync_environment`, return a
// struct that lets us "continue" from `resolve_environment`.
let venv = sync_environment(
venv,
&resolution.into(),
settings.as_ref().into(),
state,
preview,
connectivity,
concurrency,
native_tls,
cache,
printer,
)
.await?;
// Create the receipt, to indicate to future readers that the environment is complete.
fs_err::tokio::File::create(ok).await?;
Ok(Self(venv))
}
}

View file

@ -9,7 +9,9 @@ use pep440_rs::Version;
use pypi_types::Requirement; use pypi_types::Requirement;
use uv_cache::Cache; use uv_cache::Cache;
use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder}; use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, SetupPyStrategy}; use uv_configuration::{
Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch; use uv_dispatch::BuildDispatch;
use uv_distribution::{DistributionDatabase, Workspace}; use uv_distribution::{DistributionDatabase, Workspace};
use uv_fs::Simplified; use uv_fs::Simplified;
@ -19,16 +21,17 @@ use uv_python::{
PythonInstallation, PythonPreference, PythonRequest, VersionRequest, PythonInstallation, PythonPreference, PythonRequest, VersionRequest,
}; };
use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification}; use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification};
use uv_resolver::{FlatIndex, OptionsBuilder, PythonRequirement, RequiresPython}; use uv_resolver::{FlatIndex, OptionsBuilder, PythonRequirement, RequiresPython, ResolutionGraph};
use uv_types::{BuildIsolation, HashStrategy}; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
use crate::commands::pip::operations::Modifications; use crate::commands::pip::operations::Modifications;
use crate::commands::reporters::ResolverReporter; use crate::commands::reporters::ResolverReporter;
use crate::commands::{pip, SharedState}; use crate::commands::{pip, SharedState};
use crate::printer::Printer; use crate::printer::Printer;
use crate::settings::ResolverInstallerSettings; use crate::settings::{InstallerSettingsRef, ResolverInstallerSettings, ResolverSettingsRef};
pub(crate) mod add; pub(crate) mod add;
pub(crate) mod ephemeral;
pub(crate) mod lock; pub(crate) mod lock;
pub(crate) mod remove; pub(crate) mod remove;
pub(crate) mod run; pub(crate) mod run;
@ -357,6 +360,234 @@ pub(crate) async fn resolve_names(
Ok(resolver.resolve().await?) Ok(resolver.resolve().await?)
} }
/// Run dependency resolution for an interpreter, returning the [`ResolutionGraph`].
pub(crate) async fn resolve_environment<'a>(
interpreter: &Interpreter,
spec: RequirementsSpecification,
settings: ResolverSettingsRef<'_>,
state: &SharedState,
preview: PreviewMode,
connectivity: Connectivity,
concurrency: Concurrency,
native_tls: bool,
cache: &Cache,
printer: Printer,
) -> anyhow::Result<ResolutionGraph> {
let ResolverSettingsRef {
index_locations,
index_strategy,
keyring_provider,
resolution,
prerelease,
config_setting,
exclude_newer,
link_mode,
upgrade: _,
build_options,
} = settings;
// Determine the tags, markers, and interpreter to use for resolution.
let tags = interpreter.tags()?;
let markers = interpreter.markers();
let python_requirement = PythonRequirement::from_interpreter(interpreter);
// Initialize the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.native_tls(native_tls)
.connectivity(connectivity)
.index_urls(index_locations.index_urls())
.index_strategy(index_strategy)
.keyring(keyring_provider)
.markers(markers)
.platform(interpreter.platform())
.build();
let options = OptionsBuilder::new()
.resolution_mode(resolution)
.prerelease_mode(prerelease)
.exclude_newer(exclude_newer)
.index_strategy(index_strategy)
.build();
// TODO(charlie): These are all default values. We should consider whether we want to make them
// optional on the downstream APIs.
let build_isolation = BuildIsolation::default();
let dev = Vec::default();
let extras = ExtrasSpecification::default();
let hasher = HashStrategy::default();
let preferences = Vec::default();
let setup_py = SetupPyStrategy::default();
// When resolving from an interpreter, we assume an empty environment, so reinstalls and
// upgrades aren't relevant.
let reinstall = Reinstall::default();
let upgrade = Upgrade::default();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
};
// Create a build dispatch.
let resolve_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&state.index,
&state.git,
&state.in_flight,
index_strategy,
setup_py,
config_setting,
build_isolation,
link_mode,
build_options,
exclude_newer,
concurrency,
preview,
);
// Resolve the requirements.
Ok(pip::operations::resolve(
spec.requirements,
spec.constraints,
spec.overrides,
dev,
spec.source_trees,
spec.project,
&extras,
preferences,
EmptyInstalledPackages,
&hasher,
&reinstall,
&upgrade,
Some(tags),
Some(markers),
python_requirement,
&client,
&flat_index,
&state.index,
&resolve_dispatch,
concurrency,
options,
printer,
preview,
)
.await?)
}
/// Sync a [`PythonEnvironment`] with a set of resolved requirements.
pub(crate) async fn sync_environment(
venv: PythonEnvironment,
resolution: &Resolution,
settings: InstallerSettingsRef<'_>,
state: &SharedState,
preview: PreviewMode,
connectivity: Connectivity,
concurrency: Concurrency,
native_tls: bool,
cache: &Cache,
printer: Printer,
) -> anyhow::Result<PythonEnvironment> {
let InstallerSettingsRef {
index_locations,
index_strategy,
keyring_provider,
config_setting,
exclude_newer,
link_mode,
compile_bytecode,
reinstall,
build_options,
} = settings;
let site_packages = SitePackages::from_environment(&venv)?;
// Determine the tags, markers, and interpreter to use for resolution.
let interpreter = venv.interpreter();
let tags = venv.interpreter().tags()?;
let markers = venv.interpreter().markers();
// Initialize the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.native_tls(native_tls)
.connectivity(connectivity)
.index_urls(index_locations.index_urls())
.index_strategy(index_strategy)
.keyring(keyring_provider)
.markers(markers)
.platform(interpreter.platform())
.build();
// TODO(charlie): These are all default values. We should consider whether we want to make them
// optional on the downstream APIs.
let build_isolation = BuildIsolation::default();
let dry_run = false;
let hasher = HashStrategy::default();
let setup_py = SetupPyStrategy::default();
// Resolve the flat indexes from `--find-links`.
let flat_index = {
let client = FlatIndexClient::new(&client, cache);
let entries = client.fetch(index_locations.flat_index()).await?;
FlatIndex::from_entries(entries, Some(tags), &hasher, build_options)
};
// Create a build dispatch.
let build_dispatch = BuildDispatch::new(
&client,
cache,
interpreter,
index_locations,
&flat_index,
&state.index,
&state.git,
&state.in_flight,
index_strategy,
setup_py,
config_setting,
build_isolation,
link_mode,
build_options,
exclude_newer,
concurrency,
preview,
);
// Sync the environment.
pip::operations::install(
resolution,
site_packages,
Modifications::Exact,
reinstall,
build_options,
link_mode,
compile_bytecode,
index_locations,
&hasher,
tags,
&client,
&state.in_flight,
concurrency,
&build_dispatch,
cache,
&venv,
dry_run,
printer,
preview,
)
.await?;
// Notify the user of any resolution diagnostics.
pip::operations::diagnose_resolution(resolution.diagnostics(), printer)?;
Ok(venv)
}
/// Update a [`PythonEnvironment`] to satisfy a set of [`RequirementsSource`]s. /// Update a [`PythonEnvironment`] to satisfy a set of [`RequirementsSource`]s.
pub(crate) async fn update_environment( pub(crate) async fn update_environment(
venv: PythonEnvironment, venv: PythonEnvironment,
@ -370,7 +601,6 @@ pub(crate) async fn update_environment(
cache: &Cache, cache: &Cache,
printer: Printer, printer: Printer,
) -> anyhow::Result<PythonEnvironment> { ) -> anyhow::Result<PythonEnvironment> {
// Extract the project settings.
let ResolverInstallerSettings { let ResolverInstallerSettings {
index_locations, index_locations,
index_strategy, index_strategy,

View file

@ -202,6 +202,8 @@ pub(crate) async fn install(
let environment = if let Some(environment) = existing_environment { let environment = if let Some(environment) = existing_environment {
environment environment
} else { } else {
// TODO(charlie): Resolve, then create the environment, then install. This ensures that
// we don't nuke the environment if the resolution fails.
installed_tools.create_environment(&from.name, interpreter)? installed_tools.create_environment(&from.name, interpreter)?
}; };

View file

@ -1,6 +1,5 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::ffi::OsString; use std::ffi::OsString;
use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
@ -21,11 +20,10 @@ use uv_python::{
EnvironmentPreference, PythonEnvironment, PythonFetch, PythonInstallation, PythonPreference, EnvironmentPreference, PythonEnvironment, PythonFetch, PythonInstallation, PythonPreference,
PythonRequest, PythonRequest,
}; };
use uv_requirements::RequirementsSpecification;
use uv_tool::InstalledTools; use uv_tool::InstalledTools;
use uv_warnings::warn_user_once; use uv_warnings::warn_user_once;
use crate::commands::project::update_environment; use crate::commands::project::ephemeral::EphemeralEnvironment;
use crate::commands::tool::common::resolve_requirements; use crate::commands::tool::common::resolve_requirements;
use crate::commands::{ExitStatus, SharedState}; use crate::commands::{ExitStatus, SharedState};
use crate::printer::Printer; use crate::printer::Printer;
@ -133,23 +131,6 @@ pub(crate) async fn run(
} }
} }
#[derive(Debug)]
enum ToolEnvironment {
Existing(PythonEnvironment),
Ephemeral(PythonEnvironment, #[allow(dead_code)] tempfile::TempDir),
}
impl Deref for ToolEnvironment {
type Target = PythonEnvironment;
fn deref(&self) -> &Self::Target {
match self {
ToolEnvironment::Existing(environment) => environment,
ToolEnvironment::Ephemeral(environment, _) => environment,
}
}
}
/// Get or create a [`PythonEnvironment`] in which to run the specified tools. /// Get or create a [`PythonEnvironment`] in which to run the specified tools.
/// ///
/// If the target tool is already installed in a compatible environment, returns that /// If the target tool is already installed in a compatible environment, returns that
@ -168,7 +149,7 @@ async fn get_or_create_environment(
native_tls: bool, native_tls: bool,
cache: &Cache, cache: &Cache,
printer: Printer, printer: Printer,
) -> Result<ToolEnvironment> { ) -> Result<PythonEnvironment> {
let client_builder = BaseClientBuilder::new() let client_builder = BaseClientBuilder::new()
.connectivity(connectivity) .connectivity(connectivity)
.native_tls(native_tls); .native_tls(native_tls);
@ -231,10 +212,10 @@ async fn get_or_create_environment(
requirements requirements
}; };
// Check if the tool is already installed in a compatible environment.
if !isolated { if !isolated {
let installed_tools = InstalledTools::from_settings()?; let installed_tools = InstalledTools::from_settings()?;
// Check if the tool is already installed in a compatible environment.
let existing_environment = let existing_environment =
installed_tools installed_tools
.get_environment(&from.name, cache)? .get_environment(&from.name, cache)?
@ -259,7 +240,7 @@ async fn get_or_create_environment(
Ok(SatisfiesResult::Fresh { .. }) Ok(SatisfiesResult::Fresh { .. })
) { ) {
debug!("Using existing tool `{}`", from.name); debug!("Using existing tool `{}`", from.name);
return Ok(ToolEnvironment::Existing(environment)); return Ok(environment);
} }
} }
} }
@ -267,24 +248,9 @@ async fn get_or_create_environment(
// TODO(zanieb): When implementing project-level tools, discover the project and check if it has the tool. // TODO(zanieb): When implementing project-level tools, discover the project and check if it has the tool.
// TODO(zanieb): Determine if we should layer on top of the project environment if it is present. // TODO(zanieb): Determine if we should layer on top of the project environment if it is present.
// If necessary, create an environment for the ephemeral requirements. let environment = EphemeralEnvironment::get_or_create(
debug!("Syncing ephemeral environment."); requirements,
// Create a virtual environment.
let temp_dir = cache.environment()?;
let venv = uv_virtualenv::create_venv(
temp_dir.path(),
interpreter, interpreter,
uv_virtualenv::Prompt::None,
false,
false,
)?;
// Install the ephemeral requirements.
let spec = RequirementsSpecification::from_requirements(requirements.clone());
let ephemeral_env = update_environment(
venv,
spec,
settings, settings,
&state, &state,
preview, preview,
@ -296,7 +262,7 @@ async fn get_or_create_environment(
) )
.await?; .await?;
Ok(ToolEnvironment::Ephemeral(ephemeral_env, temp_dir)) Ok(environment.into())
} }
/// Parse a target into a command name and a requirement. /// Parse a target into a command name and a requirement.

View file

@ -223,32 +223,32 @@ dependencies = ["flask==1.0.x"]
--- stderr: --- stderr:
Traceback (most recent call last): Traceback (most recent call last):
File "<string>", line 14, in <module> File "<string>", line 14, in <module>
File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel
return self._get_build_requires(config_settings, requirements=['wheel']) return self._get_build_requires(config_settings, requirements=['wheel'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 295, in _get_build_requires File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
self.run_setup() self.run_setup()
File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 487, in run_setup File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup
super().run_setup(setup_script=setup_script) super().run_setup(setup_script=setup_script)
File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 311, in run_setup File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
exec(code, locals()) exec(code, locals())
File "<string>", line 1, in <module> File "<string>", line 1, in <module>
File "[CACHE_DIR]/environments-v0/[TMP]/__init__.py", line 104, in setup File "[CACHE_DIR]/builds-v0/[TMP]/__init__.py", line 104, in setup
return distutils.core.setup(**attrs) return distutils.core.setup(**attrs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/environments-v0/[TMP]/core.py", line 159, in setup File "[CACHE_DIR]/builds-v0/[TMP]/core.py", line 159, in setup
dist.parse_config_files() dist.parse_config_files()
File "[CACHE_DIR]/environments-v0/[TMP]/_virtualenv.py", line 22, in parse_config_files File "[CACHE_DIR]/builds-v0/[TMP]/_virtualenv.py", line 22, in parse_config_files
result = old_parse_config_files(self, *args, **kwargs) result = old_parse_config_files(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/environments-v0/[TMP]/dist.py", line 631, in parse_config_files File "[CACHE_DIR]/builds-v0/[TMP]/dist.py", line 631, in parse_config_files
pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration
config = read_configuration(filepath, True, ignore_option_errors, dist) config = read_configuration(filepath, True, ignore_option_errors, dist)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration
validate(subset, filepath) validate(subset, filepath)
File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 57, in validate File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 57, in validate
raise ValueError(f"{error}/n{summary}") from None raise ValueError(f"{error}/n{summary}") from None
ValueError: invalid pyproject.toml config: `project.dependencies[0]`. ValueError: invalid pyproject.toml config: `project.dependencies[0]`.
configuration error: `project.dependencies[0]` must be pep508 configuration error: `project.dependencies[0]` must be pep508

View file

@ -9,7 +9,7 @@ mod common;
#[test] #[test]
fn tool_run_args() { fn tool_run_args() {
let context = TestContext::new("3.12"); let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools"); let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin"); let bin_dir = context.temp_dir.child("bin");
@ -40,9 +40,9 @@ fn tool_run_args() {
----- stderr ----- ----- stderr -----
warning: `uv tool run` is experimental and may change without warning. warning: `uv tool run` is experimental and may change without warning.
Resolved 4 packages in [TIME] Resolved [N] packages in [TIME]
Prepared 4 packages in [TIME] Prepared [N] packages in [TIME]
Installed 4 packages in [TIME] Installed [N] packages in [TIME]
+ iniconfig==2.0.0 + iniconfig==2.0.0
+ packaging==24.0 + packaging==24.0
+ pluggy==1.4.0 + pluggy==1.4.0
@ -63,12 +63,7 @@ fn tool_run_args() {
----- stderr ----- ----- stderr -----
warning: `uv tool run` is experimental and may change without warning. warning: `uv tool run` is experimental and may change without warning.
Resolved 4 packages in [TIME] Resolved [N] packages in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ pytest==8.1.1
"###); "###);
} }
@ -280,32 +275,6 @@ fn tool_run_from_install() {
+ platformdirs==4.2.0 + platformdirs==4.2.0
"###); "###);
// Verify that `tool run black` at a different version (via `--from`) installs the new version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("black==24.1.1")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.1.1 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.1.1
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `--with` installs a new version. // Verify that `--with` installs a new version.
// TODO(charlie): This could (in theory) layer the `--with` requirements on top of the existing // TODO(charlie): This could (in theory) layer the `--with` requirements on top of the existing
// environment. // environment.
@ -335,4 +304,160 @@ fn tool_run_from_install() {
+ pathspec==0.12.1 + pathspec==0.12.1
+ platformdirs==4.2.0 + platformdirs==4.2.0
"###); "###);
// Verify that `tool run black` at a different version (via `--from`) installs the new version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("black==24.2.0")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved 6 packages in [TIME]
Prepared 1 package in [TIME]
Installed 6 packages in [TIME]
+ black==24.2.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
}
#[test]
fn tool_run_cache() {
let context = TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Verify that `tool run black` installs the latest version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `tool run black` uses the cached version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved [N] packages in [TIME]
"###);
// Verify that varying the interpreter leads to a fresh environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.11")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.11.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// But that re-invoking with the previous interpreter retains the cached version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved [N] packages in [TIME]
"###);
// Verify that `--with` leads to a fresh environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("--with")
.arg("iniconfig")
.arg("black")
.arg("--version")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
.env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
warning: `uv tool run` is experimental and may change without warning.
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ iniconfig==2.0.0
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
} }