diff --git a/Cargo.lock b/Cargo.lock index a5e99e3c3..42318d269 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4434,6 +4434,7 @@ dependencies = [ "axoupdater", "base64 0.22.1", "byteorder", + "cache-key", "chrono", "clap", "distribution-types", @@ -4461,7 +4462,6 @@ dependencies = [ "rustc-hash 2.0.0", "serde", "serde_json", - "tempfile", "textwrap", "thiserror", "tikv-jemallocator", diff --git a/crates/cache-key/src/lib.rs b/crates/cache-key/src/lib.rs index f55f8bb0d..d7aa69929 100644 --- a/crates/cache-key/src/lib.rs +++ b/crates/cache-key/src/lib.rs @@ -1,3 +1,4 @@ +pub use cache_key::{CacheKey, CacheKeyHasher}; pub use canonical_url::{CanonicalUrl, RepositoryUrl}; pub use digest::digest; diff --git a/crates/uv-cache/src/lib.rs b/crates/uv-cache/src/lib.rs index 555585aab..a94dcbf60 100644 --- a/crates/uv-cache/src/lib.rs +++ b/crates/uv-cache/src/lib.rs @@ -181,8 +181,8 @@ impl Cache { /// Create an ephemeral Python environment in the cache. pub fn environment(&self) -> io::Result { - fs::create_dir_all(self.bucket(CacheBucket::Environments))?; - tempfile::tempdir_in(self.bucket(CacheBucket::Environments)) + fs::create_dir_all(self.bucket(CacheBucket::Builds))?; + tempfile::tempdir_in(self.bucket(CacheBucket::Builds)) } /// Returns `true` if a cache entry must be revalidated given the [`Refresh`] policy. @@ -634,6 +634,8 @@ pub enum CacheBucket { /// other buckets directly would make atomic operations impossible. Archive, /// Ephemeral virtual environments used to execute PEP 517 builds and other operations. + Builds, + /// Reusable virtual environments used to invoke Python tools. Environments, } @@ -647,7 +649,8 @@ impl CacheBucket { Self::Simple => "simple-v9", Self::Wheels => "wheels-v1", Self::Archive => "archive-v0", - Self::Environments => "environments-v0", + Self::Builds => "builds-v0", + Self::Environments => "environments-v1", } } @@ -758,6 +761,9 @@ impl CacheBucket { Self::Archive => { // Nothing to do. } + Self::Builds => { + // Nothing to do. + } Self::Environments => { // Nothing to do. } @@ -775,6 +781,7 @@ impl CacheBucket { Self::Interpreter, Self::Simple, Self::Archive, + Self::Builds, Self::Environments, ] .iter() diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index 1cb700c2c..50b52d9d5 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -697,7 +697,7 @@ impl InterpreterInfo { // If `executable` is a pyenv shim, a bash script that redirects to the activated // python executable at another path, we're not allowed to cache the interpreter info. - if same_file::is_same_file(executable, &info.sys_executable).unwrap_or(false) { + if is_same_file(executable, &info.sys_executable).unwrap_or(false) { fs::create_dir_all(cache_entry.dir())?; write_atomic_sync( cache_entry.path(), diff --git a/crates/uv-state/src/lib.rs b/crates/uv-state/src/lib.rs index d7a4f3dfe..91dfb47c2 100644 --- a/crates/uv-state/src/lib.rs +++ b/crates/uv-state/src/lib.rs @@ -95,9 +95,9 @@ impl StateStore { /// are subdirectories of the state store root. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum StateBucket { - // Managed Python installations + /// Managed Python installations ManagedPython, - // Installed tools + /// Installed tools. Tools, } diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 373618c17..44a68a083 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -14,6 +14,7 @@ default-run = "uv" workspace = true [dependencies] +cache-key = { workspace = true } distribution-types = { workspace = true } install-wheel-rs = { workspace = true, default-features = false } pep440_rs = { workspace = true } @@ -58,7 +59,6 @@ regex = { workspace = true } rustc-hash = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -tempfile = { workspace = true } textwrap = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } diff --git a/crates/uv/src/commands/project/ephemeral.rs b/crates/uv/src/commands/project/ephemeral.rs new file mode 100644 index 000000000..3ececa04f --- /dev/null +++ b/crates/uv/src/commands/project/ephemeral.rs @@ -0,0 +1,129 @@ +use tracing::debug; + +use cache_key::digest; +use pypi_types::Requirement; +use uv_cache::{Cache, CacheBucket}; +use uv_client::Connectivity; +use uv_configuration::{Concurrency, PreviewMode}; +use uv_fs::{LockedFile, Simplified}; +use uv_python::{Interpreter, PythonEnvironment}; +use uv_requirements::RequirementsSpecification; +use uv_resolver::Lock; + +use crate::commands::project::{resolve_environment, sync_environment}; +use crate::commands::SharedState; +use crate::printer::Printer; +use crate::settings::ResolverInstallerSettings; + +/// An ephemeral [`PythonEnvironment`] stored in the cache. +#[derive(Debug)] +pub(crate) struct EphemeralEnvironment(PythonEnvironment); + +impl From for PythonEnvironment { + fn from(ephemeral: EphemeralEnvironment) -> Self { + ephemeral.0 + } +} + +impl EphemeralEnvironment { + /// Get or create an [`EphemeralEnvironment`] based on a given set of requirements and a base + /// interpreter. + pub(crate) async fn get_or_create( + requirements: Vec, + interpreter: Interpreter, + settings: &ResolverInstallerSettings, + state: &SharedState, + preview: PreviewMode, + connectivity: Connectivity, + concurrency: Concurrency, + native_tls: bool, + cache: &Cache, + printer: Printer, + ) -> anyhow::Result { + let spec = RequirementsSpecification::from_requirements(requirements); + + // Resolve the requirements with the interpreter. + let resolution = resolve_environment( + &interpreter, + spec, + settings.as_ref().into(), + state, + preview, + connectivity, + concurrency, + native_tls, + cache, + printer, + ) + .await?; + + // Hash the resolution by hashing the generated lockfile. + // TODO(charlie): If the resolution contains any mutable metadata (like a path or URL + // dependency), skip this step. + let lock = Lock::from_resolution_graph(&resolution)?; + let toml = lock.to_toml()?; + let resolution_hash = digest(&toml); + + // Hash the interpreter by hashing the sysconfig data. + // TODO(charlie): Come up with a robust hash for the interpreter. + let interpreter_hash = digest(&interpreter.sys_executable()); + + // Search in the content-addressed cache. + let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash); + + // Lock the interpreter, to avoid concurrent modification across processes. + fs_err::tokio::create_dir_all(cache_entry.dir()).await?; + let _lock = LockedFile::acquire( + cache_entry.dir().join(".lock"), + cache_entry.dir().user_display(), + )?; + + // If the receipt exists, return the environment. + let ok = cache_entry.path().join(".ok"); + if ok.is_file() { + debug!( + "Found existing ephemeral environment at: `{}`", + cache_entry.path().display() + ); + return Ok(Self(PythonEnvironment::from_root( + cache_entry.path(), + cache, + )?)); + } + + debug!( + "Creating ephemeral environment at: `{}`", + cache_entry.path().display() + ); + + let venv = uv_virtualenv::create_venv( + cache_entry.path(), + interpreter, + uv_virtualenv::Prompt::None, + false, + false, + )?; + + // Install the ephemeral requirements. + // TODO(charlie): Rather than passing all the arguments to `sync_environment`, return a + // struct that lets us "continue" from `resolve_environment`. + let venv = sync_environment( + venv, + &resolution.into(), + settings.as_ref().into(), + state, + preview, + connectivity, + concurrency, + native_tls, + cache, + printer, + ) + .await?; + + // Create the receipt, to indicate to future readers that the environment is complete. + fs_err::tokio::File::create(ok).await?; + + Ok(Self(venv)) + } +} diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index c5e0ee904..5d25b3d9a 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -9,7 +9,9 @@ use pep440_rs::Version; use pypi_types::Requirement; use uv_cache::Cache; use uv_client::{BaseClientBuilder, Connectivity, FlatIndexClient, RegistryClientBuilder}; -use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, SetupPyStrategy}; +use uv_configuration::{ + Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy, Upgrade, +}; use uv_dispatch::BuildDispatch; use uv_distribution::{DistributionDatabase, Workspace}; use uv_fs::Simplified; @@ -19,16 +21,17 @@ use uv_python::{ PythonInstallation, PythonPreference, PythonRequest, VersionRequest, }; use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification}; -use uv_resolver::{FlatIndex, OptionsBuilder, PythonRequirement, RequiresPython}; -use uv_types::{BuildIsolation, HashStrategy}; +use uv_resolver::{FlatIndex, OptionsBuilder, PythonRequirement, RequiresPython, ResolutionGraph}; +use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; use crate::commands::pip::operations::Modifications; use crate::commands::reporters::ResolverReporter; use crate::commands::{pip, SharedState}; use crate::printer::Printer; -use crate::settings::ResolverInstallerSettings; +use crate::settings::{InstallerSettingsRef, ResolverInstallerSettings, ResolverSettingsRef}; pub(crate) mod add; +pub(crate) mod ephemeral; pub(crate) mod lock; pub(crate) mod remove; pub(crate) mod run; @@ -357,6 +360,234 @@ pub(crate) async fn resolve_names( Ok(resolver.resolve().await?) } +/// Run dependency resolution for an interpreter, returning the [`ResolutionGraph`]. +pub(crate) async fn resolve_environment<'a>( + interpreter: &Interpreter, + spec: RequirementsSpecification, + settings: ResolverSettingsRef<'_>, + state: &SharedState, + preview: PreviewMode, + connectivity: Connectivity, + concurrency: Concurrency, + native_tls: bool, + cache: &Cache, + printer: Printer, +) -> anyhow::Result { + let ResolverSettingsRef { + index_locations, + index_strategy, + keyring_provider, + resolution, + prerelease, + config_setting, + exclude_newer, + link_mode, + upgrade: _, + build_options, + } = settings; + + // Determine the tags, markers, and interpreter to use for resolution. + let tags = interpreter.tags()?; + let markers = interpreter.markers(); + let python_requirement = PythonRequirement::from_interpreter(interpreter); + + // Initialize the registry client. + let client = RegistryClientBuilder::new(cache.clone()) + .native_tls(native_tls) + .connectivity(connectivity) + .index_urls(index_locations.index_urls()) + .index_strategy(index_strategy) + .keyring(keyring_provider) + .markers(markers) + .platform(interpreter.platform()) + .build(); + + let options = OptionsBuilder::new() + .resolution_mode(resolution) + .prerelease_mode(prerelease) + .exclude_newer(exclude_newer) + .index_strategy(index_strategy) + .build(); + + // TODO(charlie): These are all default values. We should consider whether we want to make them + // optional on the downstream APIs. + let build_isolation = BuildIsolation::default(); + let dev = Vec::default(); + let extras = ExtrasSpecification::default(); + let hasher = HashStrategy::default(); + let preferences = Vec::default(); + let setup_py = SetupPyStrategy::default(); + + // When resolving from an interpreter, we assume an empty environment, so reinstalls and + // upgrades aren't relevant. + let reinstall = Reinstall::default(); + let upgrade = Upgrade::default(); + + // Resolve the flat indexes from `--find-links`. + let flat_index = { + let client = FlatIndexClient::new(&client, cache); + let entries = client.fetch(index_locations.flat_index()).await?; + FlatIndex::from_entries(entries, Some(tags), &hasher, build_options) + }; + + // Create a build dispatch. + let resolve_dispatch = BuildDispatch::new( + &client, + cache, + interpreter, + index_locations, + &flat_index, + &state.index, + &state.git, + &state.in_flight, + index_strategy, + setup_py, + config_setting, + build_isolation, + link_mode, + build_options, + exclude_newer, + concurrency, + preview, + ); + + // Resolve the requirements. + Ok(pip::operations::resolve( + spec.requirements, + spec.constraints, + spec.overrides, + dev, + spec.source_trees, + spec.project, + &extras, + preferences, + EmptyInstalledPackages, + &hasher, + &reinstall, + &upgrade, + Some(tags), + Some(markers), + python_requirement, + &client, + &flat_index, + &state.index, + &resolve_dispatch, + concurrency, + options, + printer, + preview, + ) + .await?) +} + +/// Sync a [`PythonEnvironment`] with a set of resolved requirements. +pub(crate) async fn sync_environment( + venv: PythonEnvironment, + resolution: &Resolution, + settings: InstallerSettingsRef<'_>, + state: &SharedState, + preview: PreviewMode, + connectivity: Connectivity, + concurrency: Concurrency, + native_tls: bool, + cache: &Cache, + printer: Printer, +) -> anyhow::Result { + let InstallerSettingsRef { + index_locations, + index_strategy, + keyring_provider, + config_setting, + exclude_newer, + link_mode, + compile_bytecode, + reinstall, + build_options, + } = settings; + + let site_packages = SitePackages::from_environment(&venv)?; + + // Determine the tags, markers, and interpreter to use for resolution. + let interpreter = venv.interpreter(); + let tags = venv.interpreter().tags()?; + let markers = venv.interpreter().markers(); + + // Initialize the registry client. + let client = RegistryClientBuilder::new(cache.clone()) + .native_tls(native_tls) + .connectivity(connectivity) + .index_urls(index_locations.index_urls()) + .index_strategy(index_strategy) + .keyring(keyring_provider) + .markers(markers) + .platform(interpreter.platform()) + .build(); + + // TODO(charlie): These are all default values. We should consider whether we want to make them + // optional on the downstream APIs. + let build_isolation = BuildIsolation::default(); + let dry_run = false; + let hasher = HashStrategy::default(); + let setup_py = SetupPyStrategy::default(); + + // Resolve the flat indexes from `--find-links`. + let flat_index = { + let client = FlatIndexClient::new(&client, cache); + let entries = client.fetch(index_locations.flat_index()).await?; + FlatIndex::from_entries(entries, Some(tags), &hasher, build_options) + }; + + // Create a build dispatch. + let build_dispatch = BuildDispatch::new( + &client, + cache, + interpreter, + index_locations, + &flat_index, + &state.index, + &state.git, + &state.in_flight, + index_strategy, + setup_py, + config_setting, + build_isolation, + link_mode, + build_options, + exclude_newer, + concurrency, + preview, + ); + + // Sync the environment. + pip::operations::install( + resolution, + site_packages, + Modifications::Exact, + reinstall, + build_options, + link_mode, + compile_bytecode, + index_locations, + &hasher, + tags, + &client, + &state.in_flight, + concurrency, + &build_dispatch, + cache, + &venv, + dry_run, + printer, + preview, + ) + .await?; + + // Notify the user of any resolution diagnostics. + pip::operations::diagnose_resolution(resolution.diagnostics(), printer)?; + + Ok(venv) +} + /// Update a [`PythonEnvironment`] to satisfy a set of [`RequirementsSource`]s. pub(crate) async fn update_environment( venv: PythonEnvironment, @@ -370,7 +601,6 @@ pub(crate) async fn update_environment( cache: &Cache, printer: Printer, ) -> anyhow::Result { - // Extract the project settings. let ResolverInstallerSettings { index_locations, index_strategy, diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index 4e3bd8878..d63d8d63d 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -202,6 +202,8 @@ pub(crate) async fn install( let environment = if let Some(environment) = existing_environment { environment } else { + // TODO(charlie): Resolve, then create the environment, then install. This ensures that + // we don't nuke the environment if the resolution fails. installed_tools.create_environment(&from.name, interpreter)? }; diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 72f610740..62dfd6c35 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -1,6 +1,5 @@ use std::borrow::Cow; use std::ffi::OsString; -use std::ops::Deref; use std::path::PathBuf; use std::str::FromStr; @@ -21,11 +20,10 @@ use uv_python::{ EnvironmentPreference, PythonEnvironment, PythonFetch, PythonInstallation, PythonPreference, PythonRequest, }; -use uv_requirements::RequirementsSpecification; use uv_tool::InstalledTools; use uv_warnings::warn_user_once; -use crate::commands::project::update_environment; +use crate::commands::project::ephemeral::EphemeralEnvironment; use crate::commands::tool::common::resolve_requirements; use crate::commands::{ExitStatus, SharedState}; use crate::printer::Printer; @@ -133,23 +131,6 @@ pub(crate) async fn run( } } -#[derive(Debug)] -enum ToolEnvironment { - Existing(PythonEnvironment), - Ephemeral(PythonEnvironment, #[allow(dead_code)] tempfile::TempDir), -} - -impl Deref for ToolEnvironment { - type Target = PythonEnvironment; - - fn deref(&self) -> &Self::Target { - match self { - ToolEnvironment::Existing(environment) => environment, - ToolEnvironment::Ephemeral(environment, _) => environment, - } - } -} - /// Get or create a [`PythonEnvironment`] in which to run the specified tools. /// /// If the target tool is already installed in a compatible environment, returns that @@ -168,7 +149,7 @@ async fn get_or_create_environment( native_tls: bool, cache: &Cache, printer: Printer, -) -> Result { +) -> Result { let client_builder = BaseClientBuilder::new() .connectivity(connectivity) .native_tls(native_tls); @@ -231,10 +212,10 @@ async fn get_or_create_environment( requirements }; + // Check if the tool is already installed in a compatible environment. if !isolated { let installed_tools = InstalledTools::from_settings()?; - // Check if the tool is already installed in a compatible environment. let existing_environment = installed_tools .get_environment(&from.name, cache)? @@ -259,7 +240,7 @@ async fn get_or_create_environment( Ok(SatisfiesResult::Fresh { .. }) ) { debug!("Using existing tool `{}`", from.name); - return Ok(ToolEnvironment::Existing(environment)); + return Ok(environment); } } } @@ -267,24 +248,9 @@ async fn get_or_create_environment( // TODO(zanieb): When implementing project-level tools, discover the project and check if it has the tool. // TODO(zanieb): Determine if we should layer on top of the project environment if it is present. - // If necessary, create an environment for the ephemeral requirements. - debug!("Syncing ephemeral environment."); - - // Create a virtual environment. - let temp_dir = cache.environment()?; - let venv = uv_virtualenv::create_venv( - temp_dir.path(), + let environment = EphemeralEnvironment::get_or_create( + requirements, interpreter, - uv_virtualenv::Prompt::None, - false, - false, - )?; - - // Install the ephemeral requirements. - let spec = RequirementsSpecification::from_requirements(requirements.clone()); - let ephemeral_env = update_environment( - venv, - spec, settings, &state, preview, @@ -296,7 +262,7 @@ async fn get_or_create_environment( ) .await?; - Ok(ToolEnvironment::Ephemeral(ephemeral_env, temp_dir)) + Ok(environment.into()) } /// Parse a target into a command name and a requirement. diff --git a/crates/uv/tests/pip_install.rs b/crates/uv/tests/pip_install.rs index 10a54736a..14b670a17 100644 --- a/crates/uv/tests/pip_install.rs +++ b/crates/uv/tests/pip_install.rs @@ -223,32 +223,32 @@ dependencies = ["flask==1.0.x"] --- stderr: Traceback (most recent call last): File "", line 14, in - File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel return self._get_build_requires(config_settings, requirements=['wheel']) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 295, in _get_build_requires + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires self.run_setup() - File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 487, in run_setup + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup super().run_setup(setup_script=setup_script) - File "[CACHE_DIR]/environments-v0/[TMP]/build_meta.py", line 311, in run_setup + File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup exec(code, locals()) File "", line 1, in - File "[CACHE_DIR]/environments-v0/[TMP]/__init__.py", line 104, in setup + File "[CACHE_DIR]/builds-v0/[TMP]/__init__.py", line 104, in setup return distutils.core.setup(**attrs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/environments-v0/[TMP]/core.py", line 159, in setup + File "[CACHE_DIR]/builds-v0/[TMP]/core.py", line 159, in setup dist.parse_config_files() - File "[CACHE_DIR]/environments-v0/[TMP]/_virtualenv.py", line 22, in parse_config_files + File "[CACHE_DIR]/builds-v0/[TMP]/_virtualenv.py", line 22, in parse_config_files result = old_parse_config_files(self, *args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/environments-v0/[TMP]/dist.py", line 631, in parse_config_files + File "[CACHE_DIR]/builds-v0/[TMP]/dist.py", line 631, in parse_config_files pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) - File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration config = read_configuration(filepath, True, ignore_option_errors, dist) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration validate(subset, filepath) - File "[CACHE_DIR]/environments-v0/[TMP]/pyprojecttoml.py", line 57, in validate + File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 57, in validate raise ValueError(f"{error}/n{summary}") from None ValueError: invalid pyproject.toml config: `project.dependencies[0]`. configuration error: `project.dependencies[0]` must be pep508 diff --git a/crates/uv/tests/tool_run.rs b/crates/uv/tests/tool_run.rs index 2efbf88a7..ccce00dcf 100644 --- a/crates/uv/tests/tool_run.rs +++ b/crates/uv/tests/tool_run.rs @@ -9,7 +9,7 @@ mod common; #[test] fn tool_run_args() { - let context = TestContext::new("3.12"); + let context = TestContext::new("3.12").with_filtered_counts(); let tool_dir = context.temp_dir.child("tools"); let bin_dir = context.temp_dir.child("bin"); @@ -40,9 +40,9 @@ fn tool_run_args() { ----- stderr ----- warning: `uv tool run` is experimental and may change without warning. - Resolved 4 packages in [TIME] - Prepared 4 packages in [TIME] - Installed 4 packages in [TIME] + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + iniconfig==2.0.0 + packaging==24.0 + pluggy==1.4.0 @@ -63,12 +63,7 @@ fn tool_run_args() { ----- stderr ----- warning: `uv tool run` is experimental and may change without warning. - Resolved 4 packages in [TIME] - Installed 4 packages in [TIME] - + iniconfig==2.0.0 - + packaging==24.0 - + pluggy==1.4.0 - + pytest==8.1.1 + Resolved [N] packages in [TIME] "###); } @@ -280,32 +275,6 @@ fn tool_run_from_install() { + platformdirs==4.2.0 "###); - // Verify that `tool run black` at a different version (via `--from`) installs the new version. - uv_snapshot!(context.filters(), context.tool_run() - .arg("--from") - .arg("black==24.1.1") - .arg("black") - .arg("--version") - .env("UV_TOOL_DIR", tool_dir.as_os_str()) - .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - black, 24.1.1 (compiled: yes) - Python (CPython) 3.12.[X] - - ----- stderr ----- - warning: `uv tool run` is experimental and may change without warning. - Resolved 6 packages in [TIME] - Installed 6 packages in [TIME] - + black==24.1.1 - + click==8.1.7 - + mypy-extensions==1.0.0 - + packaging==24.0 - + pathspec==0.12.1 - + platformdirs==4.2.0 - "###); - // Verify that `--with` installs a new version. // TODO(charlie): This could (in theory) layer the `--with` requirements on top of the existing // environment. @@ -335,4 +304,160 @@ fn tool_run_from_install() { + pathspec==0.12.1 + platformdirs==4.2.0 "###); + + // Verify that `tool run black` at a different version (via `--from`) installs the new version. + uv_snapshot!(context.filters(), context.tool_run() + .arg("--from") + .arg("black==24.2.0") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.2.0 (compiled: yes) + Python (CPython) 3.12.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved 6 packages in [TIME] + Prepared 1 package in [TIME] + Installed 6 packages in [TIME] + + black==24.2.0 + + click==8.1.7 + + mypy-extensions==1.0.0 + + packaging==24.0 + + pathspec==0.12.1 + + platformdirs==4.2.0 + "###); +} + +#[test] +fn tool_run_cache() { + let context = TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_counts(); + let tool_dir = context.temp_dir.child("tools"); + let bin_dir = context.temp_dir.child("bin"); + + // Verify that `tool run black` installs the latest version. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-p") + .arg("3.12") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.3.0 (compiled: yes) + Python (CPython) 3.12.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + black==24.3.0 + + click==8.1.7 + + mypy-extensions==1.0.0 + + packaging==24.0 + + pathspec==0.12.1 + + platformdirs==4.2.0 + "###); + + // Verify that `tool run black` uses the cached version. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-p") + .arg("3.12") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.3.0 (compiled: yes) + Python (CPython) 3.12.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved [N] packages in [TIME] + "###); + + // Verify that varying the interpreter leads to a fresh environment. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-p") + .arg("3.11") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.3.0 (compiled: yes) + Python (CPython) 3.11.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + black==24.3.0 + + click==8.1.7 + + mypy-extensions==1.0.0 + + packaging==24.0 + + pathspec==0.12.1 + + platformdirs==4.2.0 + "###); + + // But that re-invoking with the previous interpreter retains the cached version. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-p") + .arg("3.12") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.3.0 (compiled: yes) + Python (CPython) 3.12.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved [N] packages in [TIME] + "###); + + // Verify that `--with` leads to a fresh environment. + uv_snapshot!(context.filters(), context.tool_run() + .arg("-p") + .arg("3.12") + .arg("--with") + .arg("iniconfig") + .arg("black") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + black, 24.3.0 (compiled: yes) + Python (CPython) 3.12.[X] + + ----- stderr ----- + warning: `uv tool run` is experimental and may change without warning. + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + black==24.3.0 + + click==8.1.7 + + iniconfig==2.0.0 + + mypy-extensions==1.0.0 + + packaging==24.0 + + pathspec==0.12.1 + + platformdirs==4.2.0 + "###); }