Add support for dynamic cache keys (#7136)

## Summary

This PR adds a more flexible cache invalidation abstraction for uv, and
uses that new abstraction to improve support for dynamic metadata.

Specifically, instead of relying solely on a timestamp, we now pass
around a `CacheInfo` struct which (as of now) contains
`Option<Timestamp>` and `Option<Commit>`. The `CacheInfo` is saved in
`dist-info` as `uv_cache.json`, so we can test already-installed
distributions for cache validity (along with testing _cached_
distributions for cache validity).

Beyond the defaults (`pyproject.toml`, `setup.py`, and `setup.cfg`
changes), users can also specify additional cache keys, and it's easy
for us to extend support in the future. Right now, cache keys can either
be instructions to include the current commit (for `setuptools_scm` and
similar) or file paths (for `hatch-requirements-txt` and similar):

```toml
[tool.uv]
cache-keys = [{ file = "requirements.txt" }, { git = true }]
```

This change should be fully backwards compatible.

Closes https://github.com/astral-sh/uv/issues/6964.

Closes https://github.com/astral-sh/uv/issues/6255.

Closes https://github.com/astral-sh/uv/issues/6860.
This commit is contained in:
Charlie Marsh 2024-09-09 16:19:15 -04:00 committed by GitHub
parent 9a7262c360
commit 4f2349119c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
47 changed files with 1036 additions and 206 deletions

22
Cargo.lock generated
View file

@ -1048,6 +1048,7 @@ dependencies = [
"tracing",
"url",
"urlencoding",
"uv-cache-info",
"uv-fs",
"uv-git",
"uv-normalize",
@ -1780,6 +1781,7 @@ dependencies = [
"tempfile",
"thiserror",
"tracing",
"uv-cache-info",
"uv-fs",
"uv-normalize",
"uv-warnings",
@ -4498,6 +4500,7 @@ dependencies = [
"url",
"uv-auth",
"uv-cache",
"uv-cache-info",
"uv-cli",
"uv-client",
"uv-configuration",
@ -4595,11 +4598,24 @@ dependencies = [
"tempfile",
"tracing",
"url",
"uv-cache-info",
"uv-fs",
"uv-normalize",
"walkdir",
]
[[package]]
name = "uv-cache-info"
version = "0.0.1"
dependencies = [
"fs-err",
"schemars",
"serde",
"thiserror",
"toml",
"tracing",
]
[[package]]
name = "uv-cli"
version = "0.0.1"
@ -4681,6 +4697,7 @@ name = "uv-configuration"
version = "0.0.1"
dependencies = [
"anyhow",
"cache-key",
"clap",
"distribution-types",
"either",
@ -4696,6 +4713,7 @@ dependencies = [
"url",
"uv-auth",
"uv-cache",
"uv-cache-info",
"uv-normalize",
]
@ -4793,6 +4811,7 @@ dependencies = [
"tracing",
"url",
"uv-cache",
"uv-cache-info",
"uv-client",
"uv-configuration",
"uv-extract",
@ -4896,6 +4915,7 @@ dependencies = [
"tracing",
"url",
"uv-cache",
"uv-cache-info",
"uv-configuration",
"uv-distribution",
"uv-extract",
@ -4984,6 +5004,7 @@ dependencies = [
"tracing",
"url",
"uv-cache",
"uv-cache-info",
"uv-client",
"uv-extract",
"uv-fs",
@ -5116,6 +5137,7 @@ dependencies = [
"thiserror",
"toml",
"tracing",
"uv-cache-info",
"uv-configuration",
"uv-fs",
"uv-macros",

View file

@ -30,6 +30,7 @@ requirements-txt = { path = "crates/requirements-txt" }
uv-auth = { path = "crates/uv-auth" }
uv-build = { path = "crates/uv-build" }
uv-cache = { path = "crates/uv-cache" }
uv-cache-info = { path = "crates/uv-cache-info" }
uv-cli = { path = "crates/uv-cli" }
uv-client = { path = "crates/uv-client" }
uv-configuration = { path = "crates/uv-configuration" }

View file

@ -19,6 +19,7 @@ pep440_rs = { workspace = true }
pep508_rs = { workspace = true, features = ["serde"] }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache-info = { workspace = true }
uv-fs = { workspace = true }
uv-git = { workspace = true }
uv-normalize = { workspace = true }

View file

@ -5,6 +5,7 @@ use anyhow::{anyhow, Result};
use distribution_filename::WheelFilename;
use pep508_rs::VerbatimUrl;
use pypi_types::{HashDigest, ParsedDirectoryUrl};
use uv_cache_info::CacheInfo;
use uv_normalize::PackageName;
use crate::{
@ -26,6 +27,7 @@ pub struct CachedRegistryDist {
pub filename: WheelFilename,
pub path: PathBuf,
pub hashes: Vec<HashDigest>,
pub cache_info: CacheInfo,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
@ -36,6 +38,7 @@ pub struct CachedDirectUrlDist {
pub editable: bool,
pub r#virtual: bool,
pub hashes: Vec<HashDigest>,
pub cache_info: CacheInfo,
}
impl CachedDist {
@ -44,6 +47,7 @@ impl CachedDist {
remote: Dist,
filename: WheelFilename,
hashes: Vec<HashDigest>,
cache_info: CacheInfo,
path: PathBuf,
) -> Self {
match remote {
@ -51,11 +55,13 @@ impl CachedDist {
filename,
path,
hashes,
cache_info,
}),
Dist::Built(BuiltDist::DirectUrl(dist)) => Self::Url(CachedDirectUrlDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,
@ -64,6 +70,7 @@ impl CachedDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,
@ -72,11 +79,13 @@ impl CachedDist {
filename,
path,
hashes,
cache_info,
}),
Dist::Source(SourceDist::DirectUrl(dist)) => Self::Url(CachedDirectUrlDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,
@ -85,6 +94,7 @@ impl CachedDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,
@ -93,6 +103,7 @@ impl CachedDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,
@ -101,6 +112,7 @@ impl CachedDist {
filename,
url: dist.url,
hashes,
cache_info,
path,
editable: dist.editable,
r#virtual: dist.r#virtual,
@ -116,6 +128,14 @@ impl CachedDist {
}
}
/// Return the [`CacheInfo`] of the distribution.
pub fn cache_info(&self) -> &CacheInfo {
match self {
Self::Registry(dist) => &dist.cache_info,
Self::Url(dist) => &dist.cache_info,
}
}
/// Return the [`ParsedUrl`] of the distribution, if it exists.
pub fn parsed_url(&self) -> Result<Option<ParsedUrl>> {
match self {
@ -161,12 +181,14 @@ impl CachedDirectUrlDist {
filename: WheelFilename,
url: VerbatimUrl,
hashes: Vec<HashDigest>,
cache_info: CacheInfo,
path: PathBuf,
) -> Self {
Self {
filename,
url,
hashes,
cache_info,
path,
editable: false,
r#virtual: false,

View file

@ -10,6 +10,7 @@ use url::Url;
use distribution_filename::EggInfoFilename;
use pep440_rs::Version;
use pypi_types::DirectUrl;
use uv_cache_info::CacheInfo;
use uv_fs::Simplified;
use uv_normalize::PackageName;
@ -35,6 +36,7 @@ pub struct InstalledRegistryDist {
pub name: PackageName,
pub version: Version,
pub path: PathBuf,
pub cache_info: Option<CacheInfo>,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
@ -45,6 +47,7 @@ pub struct InstalledDirectUrlDist {
pub url: Url,
pub editable: bool,
pub path: PathBuf,
pub cache_info: Option<CacheInfo>,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
@ -90,6 +93,7 @@ impl InstalledDist {
let name = PackageName::from_str(name)?;
let version = Version::from_str(version).map_err(|err| anyhow!(err))?;
let cache_info = Self::cache_info(path)?;
return if let Some(direct_url) = Self::direct_url(path)? {
match Url::try_from(&direct_url) {
@ -100,6 +104,7 @@ impl InstalledDist {
direct_url: Box::new(direct_url),
url,
path: path.to_path_buf(),
cache_info,
}))),
Err(err) => {
warn!("Failed to parse direct URL: {err}");
@ -107,6 +112,7 @@ impl InstalledDist {
name,
version,
path: path.to_path_buf(),
cache_info,
})))
}
}
@ -115,6 +121,7 @@ impl InstalledDist {
name,
version,
path: path.to_path_buf(),
cache_info,
})))
};
}
@ -256,13 +263,27 @@ impl InstalledDist {
/// Read the `direct_url.json` file from a `.dist-info` directory.
pub fn direct_url(path: &Path) -> Result<Option<DirectUrl>> {
let path = path.join("direct_url.json");
let Ok(file) = fs_err::File::open(path) else {
return Ok(None);
let file = match fs_err::File::open(&path) {
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err.into()),
};
let direct_url = serde_json::from_reader::<fs_err::File, DirectUrl>(file)?;
Ok(Some(direct_url))
}
/// Read the `uv_cache.json` file from a `.dist-info` directory.
pub fn cache_info(path: &Path) -> Result<Option<CacheInfo>> {
let path = path.join("uv_cache.json");
let file = match fs_err::File::open(&path) {
Ok(file) => file,
Err(err) if err.kind() == std::io::ErrorKind::NotFound => return Ok(None),
Err(err) => return Err(err.into()),
};
let cache_info = serde_json::from_reader::<fs_err::File, CacheInfo>(file)?;
Ok(Some(cache_info))
}
/// Read the `METADATA` file from a `.dist-info` directory.
pub fn metadata(&self) -> Result<pypi_types::Metadata23> {
match self {

View file

@ -2,7 +2,7 @@
name = "install-wheel-rs"
version = "0.0.1"
publish = false
description = "Takes a wheel and installs it, either in a venv or for monotrail"
description = "Takes a wheel and installs it."
keywords = ["wheel", "python"]
edition = { workspace = true }
@ -24,6 +24,7 @@ distribution-filename = { workspace = true }
pep440_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache-info = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
uv-warnings = { workspace = true }

View file

@ -1,15 +0,0 @@
Reimplementation of wheel installing in rust. Supports both classical venvs and monotrail.
There are simple python bindings:
```python
from install_wheel_rs import LockedVenv
locked_venv = LockedVenv("path/to/.venv")
locked_venv.install_wheel("path/to/some_tagged_wheel.whl")
```
and there's only one function: `install_wheels_venv(wheels: List[str], venv: str)`, where `wheels`
is a list of paths to wheel files and `venv` is the location of the venv to install the packages in.
See monotrail for benchmarks.

View file

@ -5,6 +5,12 @@ use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::SystemTime;
use crate::script::{scripts_from_ini, Script};
use crate::wheel::{
extra_dist_info, install_data, parse_wheel_file, read_record_file, write_script_entrypoints,
LibKind,
};
use crate::{Error, Layout};
use distribution_filename::WheelFilename;
use fs_err as fs;
use fs_err::{DirEntry, File};
@ -14,16 +20,10 @@ use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use tempfile::tempdir_in;
use tracing::{debug, instrument};
use uv_cache_info::CacheInfo;
use uv_warnings::warn_user_once;
use walkdir::WalkDir;
use crate::script::{scripts_from_ini, Script};
use crate::wheel::{
extra_dist_info, install_data, parse_wheel_file, read_record_file, write_script_entrypoints,
LibKind,
};
use crate::{Error, Layout};
#[derive(Debug, Default)]
pub struct Locks(Mutex<FxHashMap<PathBuf, Arc<Mutex<()>>>>);
@ -41,6 +41,7 @@ pub fn install_wheel(
wheel: impl AsRef<Path>,
filename: &WheelFilename,
direct_url: Option<&DirectUrl>,
cache_info: Option<&CacheInfo>,
installer: Option<&str>,
link_mode: LinkMode,
locks: &Locks,
@ -145,6 +146,7 @@ pub fn install_wheel(
&dist_info_prefix,
true,
direct_url,
cache_info,
installer,
&mut record,
)?;

View file

@ -3,25 +3,24 @@ use std::io::{BufReader, Cursor, Read, Seek, Write};
use std::path::{Path, PathBuf};
use std::{env, io};
use crate::record::RecordEntry;
use crate::script::Script;
use crate::{Error, Layout};
use data_encoding::BASE64URL_NOPAD;
use fs_err as fs;
use fs_err::{DirEntry, File};
use mailparse::parse_headers;
use pypi_types::DirectUrl;
use rustc_hash::FxHashMap;
use sha2::{Digest, Sha256};
use tracing::{instrument, warn};
use uv_cache_info::CacheInfo;
use uv_fs::{relative_to, Simplified};
use uv_normalize::PackageName;
use walkdir::WalkDir;
use zip::write::FileOptions;
use zip::ZipWriter;
use pypi_types::DirectUrl;
use uv_fs::{relative_to, Simplified};
use uv_normalize::PackageName;
use crate::record::RecordEntry;
use crate::script::Script;
use crate::{Error, Layout};
const LAUNCHER_MAGIC_NUMBER: [u8; 4] = [b'U', b'V', b'U', b'V'];
#[cfg(all(windows, target_arch = "x86"))]
@ -728,6 +727,7 @@ pub(crate) fn extra_dist_info(
dist_info_prefix: &str,
requested: bool,
direct_url: Option<&DirectUrl>,
cache_info: Option<&CacheInfo>,
installer: Option<&str>,
record: &mut Vec<RecordEntry>,
) -> Result<(), Error> {
@ -743,6 +743,14 @@ pub(crate) fn extra_dist_info(
record,
)?;
}
if let Some(cache_info) = cache_info {
write_file_recorded(
site_packages,
&dist_info_dir.join("uv_cache.json"),
serde_json::to_string(cache_info)?.as_bytes(),
record,
)?;
}
if let Some(installer) = installer {
write_file_recorded(
site_packages,

View file

@ -0,0 +1,21 @@
[package]
name = "uv-cache-info"
version = "0.0.1"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
authors = { workspace = true }
license = { workspace = true }
[lints]
workspace = true
[dependencies]
fs-err = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }

View file

@ -0,0 +1,174 @@
use crate::commit_info::CacheCommit;
use crate::timestamp::Timestamp;
use serde::Deserialize;
use std::cmp::max;
use std::io;
use std::path::{Path, PathBuf};
use tracing::debug;
/// The information used to determine whether a built distribution is up-to-date, based on the
/// timestamps of relevant files, the current commit of a repository, etc.
#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "kebab-case")]
#[serde(try_from = "CacheInfoWire")]
pub struct CacheInfo {
/// The timestamp of the most recent `ctime` of any relevant files, at the time of the build.
/// The timestamp will typically be the maximum of the `ctime` values of the `pyproject.toml`,
/// `setup.py`, and `setup.cfg` files, if they exist; however, users can provide additional
/// files to timestamp via the `cache-keys` field.
timestamp: Option<Timestamp>,
/// The commit at which the distribution was built.
commit: Option<CacheCommit>,
}
impl CacheInfo {
/// Return the [`CacheInfo`] for a given timestamp.
pub fn from_timestamp(timestamp: Timestamp) -> Self {
Self {
timestamp: Some(timestamp),
..Self::default()
}
}
/// Compute the cache info for a given path, which may be a file or a directory.
pub fn from_path(path: &Path) -> io::Result<Self> {
let metadata = fs_err::metadata(path)?;
if metadata.is_file() {
Self::from_file(path)
} else {
Self::from_directory(path)
}
}
/// Compute the cache info for a given directory.
pub fn from_directory(directory: &Path) -> io::Result<Self> {
let mut commit = None;
let mut timestamp = None;
// Read the cache keys.
let cache_keys =
if let Ok(contents) = fs_err::read_to_string(directory.join("pyproject.toml")) {
if let Ok(pyproject_toml) = toml::from_str::<PyProjectToml>(&contents) {
pyproject_toml
.tool
.and_then(|tool| tool.uv)
.and_then(|tool_uv| tool_uv.cache_keys)
} else {
None
}
} else {
None
};
// If no cache keys were defined, use the defaults.
let cache_keys = cache_keys.unwrap_or_else(|| {
vec![
CacheKey::Path(directory.join("pyproject.toml")),
CacheKey::Path(directory.join("setup.py")),
CacheKey::Path(directory.join("setup.cfg")),
]
});
// Incorporate any additional timestamps or VCS information.
for cache_key in &cache_keys {
match cache_key {
CacheKey::Path(file) | CacheKey::File { file } => {
timestamp = max(
timestamp,
file.metadata()
.ok()
.filter(std::fs::Metadata::is_file)
.as_ref()
.map(Timestamp::from_metadata),
);
}
CacheKey::Git { git: true } => match CacheCommit::from_repository(directory) {
Ok(commit_info) => commit = Some(commit_info),
Err(err) => {
debug!("Failed to read the current commit: {err}");
}
},
CacheKey::Git { git: false } => {}
}
}
Ok(Self { timestamp, commit })
}
/// Compute the cache info for a given file, assumed to be a binary or source distribution
/// represented as (e.g.) a `.whl` or `.tar.gz` archive.
pub fn from_file(path: impl AsRef<Path>) -> Result<Self, io::Error> {
let metadata = fs_err::metadata(path.as_ref())?;
let timestamp = Timestamp::from_metadata(&metadata);
Ok(Self {
timestamp: Some(timestamp),
..Self::default()
})
}
pub fn is_empty(&self) -> bool {
self.timestamp.is_none() && self.commit.is_none()
}
}
#[derive(Debug, serde::Deserialize)]
struct TimestampCommit {
timestamp: Option<Timestamp>,
commit: Option<CacheCommit>,
}
#[derive(Debug, serde::Deserialize)]
#[serde(untagged)]
enum CacheInfoWire {
/// For backwards-compatibility, enable deserializing [`CacheInfo`] structs that are solely
/// represented by a timestamp.
Timestamp(Timestamp),
/// A [`CacheInfo`] struct that includes both a timestamp and a commit.
TimestampCommit(TimestampCommit),
}
impl From<CacheInfoWire> for CacheInfo {
fn from(wire: CacheInfoWire) -> Self {
match wire {
CacheInfoWire::Timestamp(timestamp) => Self {
timestamp: Some(timestamp),
..Self::default()
},
CacheInfoWire::TimestampCommit(TimestampCommit { timestamp, commit }) => {
Self { timestamp, commit }
}
}
}
}
/// A `pyproject.toml` with an (optional) `[tool.uv]` section.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
struct PyProjectToml {
tool: Option<Tool>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
struct Tool {
uv: Option<ToolUv>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
struct ToolUv {
cache_keys: Option<Vec<CacheKey>>,
}
#[derive(Debug, Clone, serde::Deserialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)]
pub enum CacheKey {
/// Ex) `"Cargo.lock"`
Path(PathBuf),
/// Ex) `{ file = "Cargo.lock" }`
File { file: PathBuf },
/// Ex) `{ git = true }`
Git { git: bool },
}

View file

@ -0,0 +1,91 @@
use std::path::{Path, PathBuf};
#[derive(Debug, thiserror::Error)]
pub(crate) enum CacheCommitError {
#[error("The repository at {0} is missing a `.git` directory")]
MissingGitDir(PathBuf),
#[error("The repository at {0} is missing a `HEAD` file")]
MissingHead(PathBuf),
#[error("The repository at {0} has an invalid reference: `{1}`")]
InvalidRef(PathBuf, String),
#[error("The discovered commit has an invalid length (expected 40 characters): `{0}`")]
WrongLength(String),
#[error("The discovered commit has an invalid character (expected hexadecimal): `{0}`")]
WrongDigit(String),
#[error(transparent)]
Io(#[from] std::io::Error),
}
/// The current commit for a repository (i.e., a 40-character hexadecimal string).
#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
pub(crate) struct CacheCommit(String);
impl CacheCommit {
/// Return the [`CacheCommit`] for the repository at the given path.
pub(crate) fn from_repository(path: &Path) -> Result<Self, CacheCommitError> {
// Find the `.git` directory, searching through parent directories if necessary.
let git_dir = path
.ancestors()
.map(|ancestor| ancestor.join(".git"))
.find(|git_dir| git_dir.exists())
.ok_or_else(|| CacheCommitError::MissingGitDir(path.to_path_buf()))?;
let git_head_path =
git_head(&git_dir).ok_or_else(|| CacheCommitError::MissingHead(git_dir.clone()))?;
let git_head_contents = fs_err::read_to_string(git_head_path)?;
// The contents are either a commit or a reference in the following formats
// - "<commit>" when the head is detached
// - "ref <ref>" when working on a branch
// If a commit, checking if the HEAD file has changed is sufficient
// If a ref, we need to add the head file for that ref to rebuild on commit
let mut git_ref_parts = git_head_contents.split_whitespace();
let commit_or_ref = git_ref_parts.next().ok_or_else(|| {
CacheCommitError::InvalidRef(git_dir.clone(), git_head_contents.clone())
})?;
let commit = if let Some(git_ref) = git_ref_parts.next() {
let git_ref_path = git_dir.join(git_ref);
fs_err::read_to_string(git_ref_path)?
} else {
commit_or_ref.to_string()
};
// The commit should be 40 hexadecimal characters.
if commit.len() != 40 {
return Err(CacheCommitError::WrongLength(commit));
}
if commit.chars().any(|c| !c.is_ascii_hexdigit()) {
return Err(CacheCommitError::WrongDigit(commit));
}
Ok(Self(commit))
}
}
/// Return the path to the `HEAD` file of a Git repository, taking worktrees into account.
fn git_head(git_dir: &Path) -> Option<PathBuf> {
// The typical case is a standard git repository.
let git_head_path = git_dir.join("HEAD");
if git_head_path.exists() {
return Some(git_head_path);
}
if !git_dir.is_file() {
return None;
}
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
// then let's try to attempt to read it as a worktree. If it's
// a worktree, then its contents will look like this, e.g.:
//
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
//
// And the HEAD file we want to watch will be at:
//
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
let contents = fs_err::read_to_string(git_dir).ok()?;
let (label, worktree_path) = contents.split_once(':')?;
if label != "gitdir" {
return None;
}
let worktree_path = worktree_path.trim();
Some(PathBuf::from(worktree_path))
}

View file

@ -0,0 +1,6 @@
pub use crate::cache_info::*;
pub use crate::timestamp::*;
mod cache_info;
mod commit_info;
mod timestamp;

View file

@ -9,7 +9,7 @@ use std::path::Path;
///
/// See: <https://github.com/restic/restic/issues/2179>
/// See: <https://apenwarr.ca/log/20181113>
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
pub struct Timestamp(std::time::SystemTime);
impl Timestamp {

View file

@ -17,6 +17,7 @@ workspace = true
cache-key = { workspace = true }
distribution-types = { workspace = true }
pypi-types = { workspace = true }
uv-cache-info = { workspace = true }
uv-fs = { workspace = true, features = ["tokio"] }
uv-normalize = { workspace = true }

View file

@ -1,6 +1,5 @@
use serde::{Deserialize, Serialize};
use crate::timestamp::Timestamp;
use uv_cache_info::Timestamp;
#[derive(Deserialize, Serialize)]
pub struct CachedByTimestamp<Data> {

View file

@ -12,6 +12,7 @@ use tracing::debug;
pub use archive::ArchiveId;
use distribution_types::InstalledDist;
use pypi_types::Metadata23;
use uv_cache_info::Timestamp;
use uv_fs::{cachedir, directories};
use uv_normalize::PackageName;
@ -19,7 +20,6 @@ pub use crate::by_timestamp::CachedByTimestamp;
#[cfg(feature = "clap")]
pub use crate::cli::CacheArgs;
pub use crate::removal::{rm_rf, Removal};
pub use crate::timestamp::Timestamp;
pub use crate::wheel::WheelCache;
use crate::wheel::WheelCacheKind;
@ -28,7 +28,6 @@ mod by_timestamp;
#[cfg(feature = "clap")]
mod cli;
mod removal;
mod timestamp;
mod wheel;
/// A [`CacheEntry`] which may or may not exist yet.

View file

@ -13,12 +13,14 @@ license = { workspace = true }
workspace = true
[dependencies]
cache-key = { workspace = true }
distribution-types = { workspace = true }
pep508_rs = { workspace = true, features = ["schemars"] }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-auth = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-normalize = { workspace = true }
clap = { workspace = true, features = ["derive"], optional = true }

View file

@ -1,3 +1,4 @@
use cache_key::CacheKeyHasher;
use std::{
collections::{btree_map::Entry, BTreeMap},
str::FromStr,
@ -108,6 +109,16 @@ impl FromIterator<ConfigSettingEntry> for ConfigSettings {
}
impl ConfigSettings {
/// Returns the number of settings in the configuration.
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns `true` if the configuration contains no settings.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
/// Convert the settings to a string that can be passed directly to a PEP 517 build backend.
pub fn escape_for_python(&self) -> String {
serde_json::to_string(self).expect("Failed to serialize config settings")
@ -150,6 +161,18 @@ impl ConfigSettings {
}
}
impl cache_key::CacheKey for ConfigSettings {
fn cache_key(&self, state: &mut CacheKeyHasher) {
for (key, value) in &self.0 {
key.cache_key(state);
match value {
ConfigSettingValue::String(value) => value.cache_key(state),
ConfigSettingValue::List(values) => values.cache_key(state),
}
}
}
}
impl serde::Serialize for ConfigSettings {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeMap;

View file

@ -3,7 +3,8 @@ use pep508_rs::PackageName;
use pypi_types::Requirement;
use rustc_hash::FxHashMap;
use uv_cache::{Refresh, Timestamp};
use uv_cache::Refresh;
use uv_cache_info::Timestamp;
/// Whether to reinstall packages.
#[derive(Debug, Default, Clone, serde::Serialize, serde::Deserialize)]

View file

@ -140,6 +140,10 @@ impl<'a> BuildContext for BuildDispatch<'a> {
self.build_options
}
fn config_settings(&self) -> &ConfigSettings {
self.config_settings
}
fn sources(&self) -> SourceStrategy {
self.sources
}

View file

@ -21,6 +21,7 @@ pep508_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-client = { workspace = true }
uv-configuration = { workspace = true }
uv-extract = { workspace = true }

View file

@ -21,7 +21,8 @@ use distribution_types::{
};
use platform_tags::Tags;
use pypi_types::HashDigest;
use uv_cache::{ArchiveId, ArchiveTimestamp, CacheBucket, CacheEntry, Timestamp, WheelCache};
use uv_cache::{ArchiveId, CacheBucket, CacheEntry, WheelCache};
use uv_cache_info::{CacheInfo, Timestamp};
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
@ -187,6 +188,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: wheel.filename.clone(),
cache: CacheInfo::default(),
}),
Err(Error::Extract(err)) => {
if err.is_http_streaming_unsupported() {
@ -217,6 +219,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: wheel.filename.clone(),
cache: CacheInfo::default(),
})
}
Err(err) => Err(err),
@ -248,6 +251,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: wheel.filename.clone(),
cache: CacheInfo::default(),
}),
Err(Error::Client(err)) if err.is_http_streaming_unsupported() => {
warn!(
@ -271,6 +275,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: wheel.filename.clone(),
cache: CacheInfo::default(),
})
}
Err(err) => Err(err),
@ -325,6 +330,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive,
filename: built_wheel.filename,
hashes: built_wheel.hashes,
cache: built_wheel.cache_info,
});
}
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
@ -341,6 +347,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&id),
hashes: built_wheel.hashes,
filename: built_wheel.filename,
cache: built_wheel.cache_info,
})
}
@ -724,7 +731,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
hashes: HashPolicy<'_>,
) -> Result<LocalWheel, Error> {
// Determine the last-modified time of the wheel.
let modified = ArchiveTimestamp::from_file(path).map_err(Error::CacheRead)?;
let modified = Timestamp::from_path(path).map_err(Error::CacheRead)?;
// Attempt to read the archive pointer from the cache.
let pointer_entry = wheel_entry.with_file(format!("{}.rev", filename.stem()));
@ -743,6 +750,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: filename.clone(),
cache: CacheInfo::from_timestamp(modified),
})
} else if hashes.is_none() {
// Otherwise, unzip the wheel.
@ -750,7 +758,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// Write the archive pointer to the cache.
let pointer = LocalArchivePointer {
timestamp: modified.timestamp(),
timestamp: modified,
archive: archive.clone(),
};
pointer.write_to(&pointer_entry).await?;
@ -760,6 +768,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: filename.clone(),
cache: CacheInfo::from_timestamp(modified),
})
} else {
// If necessary, compute the hashes of the wheel.
@ -795,7 +804,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
// Write the archive pointer to the cache.
let pointer = LocalArchivePointer {
timestamp: modified.timestamp(),
timestamp: modified,
archive: archive.clone(),
};
pointer.write_to(&pointer_entry).await?;
@ -805,6 +814,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
archive: self.build_context.cache().archive(&archive.id),
hashes: archive.hashes,
filename: filename.clone(),
cache: CacheInfo::from_timestamp(modified),
})
}
}
@ -960,6 +970,11 @@ impl HttpArchivePointer {
pub fn into_archive(self) -> Archive {
self.archive
}
/// Return the [`CacheInfo`] from the pointer.
pub fn to_cache_info(&self) -> CacheInfo {
CacheInfo::default()
}
}
/// A pointer to an archive in the cache, fetched from a local path.
@ -989,12 +1004,17 @@ impl LocalArchivePointer {
}
/// Returns `true` if the archive is up-to-date with the given modified timestamp.
pub fn is_up_to_date(&self, modified: ArchiveTimestamp) -> bool {
self.timestamp == modified.timestamp()
pub fn is_up_to_date(&self, modified: Timestamp) -> bool {
self.timestamp == modified
}
/// Return the [`Archive`] from the pointer.
pub fn into_archive(self) -> Archive {
self.archive
}
/// Return the [`CacheInfo`] from the pointer.
pub fn to_cache_info(&self) -> CacheInfo {
CacheInfo::from_timestamp(self.timestamp)
}
}

View file

@ -1,10 +1,10 @@
use std::path::{Path, PathBuf};
use crate::Error;
use distribution_filename::WheelFilename;
use distribution_types::{CachedDist, Dist, Hashed};
use pypi_types::{HashDigest, Metadata23};
use crate::Error;
use uv_cache_info::CacheInfo;
/// A locally available wheel.
#[derive(Debug, Clone)]
@ -16,6 +16,8 @@ pub struct LocalWheel {
/// The canonicalized path in the cache directory to which the wheel was downloaded.
/// Typically, a directory within the archive bucket.
pub(crate) archive: PathBuf,
/// The cache index of the wheel.
pub(crate) cache: CacheInfo,
/// The computed hashes of the wheel.
pub(crate) hashes: Vec<HashDigest>,
}
@ -51,7 +53,13 @@ impl Hashed for LocalWheel {
/// Convert a [`LocalWheel`] into a [`CachedDist`].
impl From<LocalWheel> for CachedDist {
fn from(wheel: LocalWheel) -> CachedDist {
CachedDist::from_remote(wheel.dist, wheel.filename, wheel.hashes, wheel.archive)
CachedDist::from_remote(
wheel.dist,
wheel.filename,
wheel.hashes,
wheel.cache,
wheel.archive,
)
}
}

View file

@ -1,15 +1,15 @@
use crate::index::cached_wheel::CachedWheel;
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION};
use crate::Error;
use distribution_types::{
DirectUrlSourceDist, DirectorySourceDist, GitSourceDist, Hashed, PathSourceDist,
};
use platform_tags::Tags;
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, CacheShard, WheelCache};
use uv_cache::{Cache, CacheBucket, CacheShard, WheelCache};
use uv_cache_info::CacheInfo;
use uv_fs::symlinks;
use uv_types::HashStrategy;
use crate::index::cached_wheel::CachedWheel;
use crate::source::{HttpRevisionPointer, LocalRevisionPointer, HTTP_REVISION, LOCAL_REVISION};
use crate::Error;
/// A local index of built distributions for a specific source distribution.
#[derive(Debug)]
pub struct BuiltWheelIndex<'a> {
@ -51,9 +51,10 @@ impl<'a> BuiltWheelIndex<'a> {
return Ok(None);
}
Ok(self.find(&cache_shard.shard(revision.id())))
}
let cache_shard = cache_shard.shard(revision.id());
Ok(self.find(&cache_shard))
}
/// Return the most compatible [`CachedWheel`] for a given source distribution at a local path.
pub fn path(&self, source_dist: &PathSourceDist) -> Result<Option<CachedWheel>, Error> {
let cache_shard = self.cache.shard(
@ -67,12 +68,10 @@ impl<'a> BuiltWheelIndex<'a> {
return Ok(None);
};
// Determine the last-modified time of the source distribution.
let modified =
ArchiveTimestamp::from_file(&source_dist.install_path).map_err(Error::CacheRead)?;
// If the distribution is stale, omit it from the index.
if !pointer.is_up_to_date(modified) {
let cache_info =
CacheInfo::from_file(&source_dist.install_path).map_err(Error::CacheRead)?;
if cache_info != *pointer.cache_info() {
return Ok(None);
}
@ -82,7 +81,11 @@ impl<'a> BuiltWheelIndex<'a> {
return Ok(None);
}
Ok(self.find(&cache_shard.shard(revision.id())))
let cache_shard = cache_shard.shard(revision.id());
Ok(self
.find(&cache_shard)
.map(|wheel| wheel.with_cache_info(cache_info)))
}
/// Return the most compatible [`CachedWheel`] for a given source distribution built from a
@ -106,17 +109,11 @@ impl<'a> BuiltWheelIndex<'a> {
return Ok(None);
};
// Determine the last-modified time of the source distribution.
let Some(modified) = ArchiveTimestamp::from_source_tree(&source_dist.install_path)
.map_err(Error::CacheRead)?
else {
return Err(Error::DirWithoutEntrypoint(
source_dist.install_path.clone(),
));
};
// If the distribution is stale, omit it from the index.
if !pointer.is_up_to_date(modified) {
let cache_info =
CacheInfo::from_directory(&source_dist.install_path).map_err(Error::CacheRead)?;
if cache_info != *pointer.cache_info() {
return Ok(None);
}
@ -126,7 +123,11 @@ impl<'a> BuiltWheelIndex<'a> {
return Ok(None);
}
Ok(self.find(&cache_shard.shard(revision.id())))
let cache_shard = cache_shard.shard(revision.id());
Ok(self
.find(&cache_shard)
.map(|wheel| wheel.with_cache_info(cache_info)))
}
/// Return the most compatible [`CachedWheel`] for a given source distribution at a git URL.

View file

@ -1,13 +1,13 @@
use std::path::Path;
use crate::archive::Archive;
use crate::{HttpArchivePointer, LocalArchivePointer};
use distribution_filename::WheelFilename;
use distribution_types::{CachedDirectUrlDist, CachedRegistryDist, Hashed};
use pep508_rs::VerbatimUrl;
use pypi_types::HashDigest;
use uv_cache::{Cache, CacheBucket, CacheEntry};
use crate::archive::Archive;
use crate::{HttpArchivePointer, LocalArchivePointer};
use uv_cache_info::CacheInfo;
#[derive(Debug, Clone)]
pub struct CachedWheel {
@ -17,6 +17,8 @@ pub struct CachedWheel {
pub entry: CacheEntry,
/// The [`HashDigest`]s for the wheel.
pub hashes: Vec<HashDigest>,
/// The [`CacheInfo`] for the wheel.
pub cache_info: CacheInfo,
}
impl CachedWheel {
@ -32,10 +34,12 @@ impl CachedWheel {
let archive = path.canonicalize().ok()?;
let entry = CacheEntry::from_path(archive);
let hashes = Vec::new();
let cache_info = CacheInfo::default();
Some(Self {
filename,
entry,
hashes,
cache_info,
})
}
@ -45,6 +49,7 @@ impl CachedWheel {
filename: self.filename,
path: self.entry.into_path_buf(),
hashes: self.hashes,
cache_info: self.cache_info,
}
}
@ -57,6 +62,7 @@ impl CachedWheel {
editable: false,
r#virtual: false,
hashes: self.hashes,
cache_info: self.cache_info,
}
}
@ -69,6 +75,7 @@ impl CachedWheel {
editable: true,
r#virtual: false,
hashes: self.hashes,
cache_info: self.cache_info,
}
}
@ -81,6 +88,7 @@ impl CachedWheel {
editable: false,
r#virtual: true,
hashes: self.hashes,
cache_info: self.cache_info,
}
}
@ -94,14 +102,17 @@ impl CachedWheel {
// Read the pointer.
let pointer = HttpArchivePointer::read_from(path).ok()??;
let cache_info = pointer.to_cache_info();
let Archive { id, hashes } = pointer.into_archive();
// Convert to a cached wheel.
let entry = cache.entry(CacheBucket::Archive, "", id);
// Convert to a cached wheel.
Some(Self {
filename,
entry,
hashes,
cache_info,
})
}
@ -115,6 +126,7 @@ impl CachedWheel {
// Read the pointer.
let pointer = LocalArchivePointer::read_from(path).ok()??;
let cache_info = pointer.to_cache_info();
let Archive { id, hashes } = pointer.into_archive();
// Convert to a cached wheel.
@ -123,8 +135,15 @@ impl CachedWheel {
filename,
entry,
hashes,
cache_info,
})
}
#[must_use]
pub fn with_cache_info(mut self, cache_info: CacheInfo) -> Self {
self.cache_info = cache_info;
self
}
}
impl Hashed for CachedWheel {

View file

@ -6,6 +6,7 @@ use distribution_types::Hashed;
use platform_tags::Tags;
use pypi_types::HashDigest;
use uv_cache::CacheShard;
use uv_cache_info::CacheInfo;
use uv_fs::files;
/// The information about the wheel we either just built or got from the cache.
@ -19,6 +20,8 @@ pub(crate) struct BuiltWheelMetadata {
pub(crate) filename: WheelFilename,
/// The computed hashes of the source distribution from which the wheel was built.
pub(crate) hashes: Vec<HashDigest>,
/// The cache information for the underlying source distribution.
pub(crate) cache_info: CacheInfo,
}
impl BuiltWheelMetadata {
@ -43,11 +46,11 @@ impl BuiltWheelMetadata {
target: cache_shard.join(filename.stem()),
path,
filename,
cache_info: CacheInfo::default(),
hashes: vec![],
})
}
/// Set the computed hashes of the wheel.
#[must_use]
pub(crate) fn with_hashes(mut self, hashes: Vec<HashDigest>) -> Self {
self.hashes = hashes;

View file

@ -5,34 +5,6 @@ use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use fs_err::tokio as fs;
use futures::{FutureExt, TryStreamExt};
use reqwest::Response;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::{debug, info_span, instrument, Instrument};
use url::Url;
use zip::ZipArchive;
use distribution_filename::{SourceDistExtension, WheelFilename};
use distribution_types::{
BuildableSource, DirectorySourceUrl, FileLocation, GitSourceUrl, HashPolicy, Hashed,
PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
};
use install_wheel_rs::metadata::read_archive_metadata;
use platform_tags::Tags;
use pypi_types::{HashDigest, Metadata12, Metadata23, RequiresTxt};
use uv_cache::{
ArchiveTimestamp, Cache, CacheBucket, CacheEntry, CacheShard, CachedByTimestamp, Removal,
Timestamp, WheelCache,
};
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
use uv_configuration::{BuildKind, BuildOutput};
use uv_extract::hash::Hasher;
use uv_fs::{rename_with_retry, write_atomic, LockedFile};
use uv_types::{BuildContext, SourceBuildTrait};
use crate::distribution_database::ManagedClient;
use crate::error::Error;
use crate::metadata::{ArchiveMetadata, Metadata};
@ -40,6 +12,30 @@ use crate::reporter::Facade;
use crate::source::built_wheel_metadata::BuiltWheelMetadata;
use crate::source::revision::Revision;
use crate::{Reporter, RequiresDist};
use distribution_filename::{SourceDistExtension, WheelFilename};
use distribution_types::{
BuildableSource, DirectorySourceUrl, FileLocation, GitSourceUrl, HashPolicy, Hashed,
PathSourceUrl, RemoteSource, SourceDist, SourceUrl,
};
use fs_err::tokio as fs;
use futures::{FutureExt, TryStreamExt};
use install_wheel_rs::metadata::read_archive_metadata;
use platform_tags::Tags;
use pypi_types::{HashDigest, Metadata12, Metadata23, RequiresTxt};
use reqwest::Response;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::{debug, info_span, instrument, Instrument};
use url::Url;
use uv_cache::{Cache, CacheBucket, CacheEntry, CacheShard, Removal, WheelCache};
use uv_cache_info::CacheInfo;
use uv_client::{
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
};
use uv_configuration::{BuildKind, BuildOutput};
use uv_extract::hash::Hasher;
use uv_fs::{rename_with_retry, write_atomic, LockedFile};
use uv_types::{BuildContext, SourceBuildTrait};
use zip::ZipArchive;
mod built_wheel_metadata;
mod revision;
@ -463,6 +459,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
target: cache_shard.join(wheel_filename.stem()),
filename: wheel_filename,
hashes: revision.into_hashes(),
cache_info: CacheInfo::default(),
})
}
@ -522,7 +519,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
});
}
// Otherwise, we either need to build the metadata or the wheel.
// Otherwise, we either need to build the metadata.
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, source_dist_entry.path(), subdirectory)
@ -654,7 +651,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let _lock = lock_shard(cache_shard).await?;
// Fetch the revision for the source distribution.
let revision = self
let LocalRevisionPointer {
cache_info,
revision,
} = self
.archive_revision(source, resource, cache_shard, hashes)
.await?;
@ -705,6 +705,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
target: cache_shard.join(filename.stem()),
filename,
hashes: revision.into_hashes(),
cache_info,
})
}
@ -722,7 +723,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let _lock = lock_shard(cache_shard).await?;
// Fetch the revision for the source distribution.
let revision = self
let LocalRevisionPointer { revision, .. } = self
.archive_revision(source, resource, cache_shard, hashes)
.await?;
@ -814,14 +815,14 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
resource: &PathSourceUrl<'_>,
cache_shard: &CacheShard,
hashes: HashPolicy<'_>,
) -> Result<Revision, Error> {
) -> Result<LocalRevisionPointer, Error> {
// Verify that the archive exists.
if !resource.path.is_file() {
return Err(Error::NotFound(resource.url.clone()));
}
// Determine the last-modified time of the source distribution.
let modified = ArchiveTimestamp::from_file(&resource.path).map_err(Error::CacheRead)?;
let cache_info = CacheInfo::from_file(&resource.path).map_err(Error::CacheRead)?;
// Read the existing metadata from the cache.
let revision_entry = cache_shard.entry(LOCAL_REVISION);
@ -829,10 +830,9 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// If the revision already exists, return it. There's no need to check for freshness, since
// we use an exact timestamp.
if let Some(pointer) = LocalRevisionPointer::read_from(&revision_entry)? {
if pointer.is_up_to_date(modified) {
let revision = pointer.into_revision();
if revision.has_digests(hashes) {
return Ok(revision);
if *pointer.cache_info() == cache_info {
if pointer.revision().has_digests(hashes) {
return Ok(pointer);
}
}
}
@ -846,20 +846,18 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let hashes = self
.persist_archive(&resource.path, resource.ext, entry.path(), hashes)
.await?;
// Include the hashes and cache info in the revision.
let revision = revision.with_hashes(hashes);
// Persist the revision.
write_atomic(
revision_entry.path(),
rmp_serde::to_vec(&CachedByTimestamp {
timestamp: modified.timestamp(),
data: revision.clone(),
})?,
)
.await
.map_err(Error::CacheWrite)?;
let pointer = LocalRevisionPointer {
cache_info,
revision,
};
pointer.write_to(&revision_entry).await?;
Ok(revision)
Ok(pointer)
}
/// Build a source distribution from a local source tree (i.e., directory), either editable or
@ -888,7 +886,10 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let _lock = lock_shard(&cache_shard).await?;
// Fetch the revision for the source distribution.
let revision = self
let LocalRevisionPointer {
cache_info,
revision,
} = self
.source_tree_revision(source, resource, &cache_shard)
.await?;
@ -927,7 +928,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
path: cache_shard.join(&disk_filename),
target: cache_shard.join(filename.stem()),
filename,
hashes: vec![],
hashes: revision.into_hashes(),
cache_info,
})
}
@ -947,26 +949,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
return Err(Error::HashesNotSupportedSourceTree(source.to_string()));
}
let cache_shard = self.build_context.cache().shard(
CacheBucket::SourceDistributions,
if resource.editable {
WheelCache::Editable(resource.url).root()
} else {
WheelCache::Path(resource.url).root()
},
);
let _lock = lock_shard(&cache_shard).await?;
// Fetch the revision for the source distribution.
let revision = self
.source_tree_revision(source, resource, &cache_shard)
.await?;
// Scope all operations to the revision. Within the revision, there's no need to check for
// freshness, since entries have to be fresher than the revision itself.
let cache_shard = cache_shard.shard(revision.id());
if let Some(metadata) =
Self::read_static_metadata(source, &resource.install_path, None).await?
{
@ -980,6 +962,26 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
));
}
let cache_shard = self.build_context.cache().shard(
CacheBucket::SourceDistributions,
if resource.editable {
WheelCache::Editable(resource.url).root()
} else {
WheelCache::Path(resource.url).root()
},
);
let _lock = lock_shard(&cache_shard).await?;
// Fetch the revision for the source distribution.
let LocalRevisionPointer { revision, .. } = self
.source_tree_revision(source, resource, &cache_shard)
.await?;
// Scope all operations to the revision. Within the revision, there's no need to check for
// freshness, since entries have to be fresher than the revision itself.
let cache_shard = cache_shard.shard(revision.id());
// If the cache contains compatible metadata, return it.
let metadata_entry = cache_shard.entry(METADATA);
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
@ -1055,20 +1057,15 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
source: &BuildableSource<'_>,
resource: &DirectorySourceUrl<'_>,
cache_shard: &CacheShard,
) -> Result<Revision, Error> {
) -> Result<LocalRevisionPointer, Error> {
// Verify that the source tree exists.
if !resource.install_path.is_dir() {
return Err(Error::NotFound(resource.url.clone()));
}
// Determine the last-modified time of the source distribution.
let Some(modified) =
ArchiveTimestamp::from_source_tree(&resource.install_path).map_err(Error::CacheRead)?
else {
return Err(Error::DirWithoutEntrypoint(
resource.install_path.to_path_buf(),
));
};
let cache_info =
CacheInfo::from_directory(&resource.install_path).map_err(Error::CacheRead)?;
// Read the existing metadata from the cache.
let entry = cache_shard.entry(LOCAL_REVISION);
@ -1082,8 +1079,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.is_fresh()
{
if let Some(pointer) = LocalRevisionPointer::read_from(&entry)? {
if pointer.timestamp == modified.timestamp() {
return Ok(pointer.into_revision());
if *pointer.cache_info() == cache_info {
return Ok(pointer);
}
}
}
@ -1091,12 +1088,12 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Otherwise, we need to create a new revision.
let revision = Revision::new();
let pointer = LocalRevisionPointer {
timestamp: modified.timestamp(),
revision: revision.clone(),
cache_info,
revision,
};
pointer.write_to(&entry).await?;
Ok(revision)
Ok(pointer)
}
/// Build a source distribution from a Git repository.
@ -1130,6 +1127,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
CacheBucket::SourceDistributions,
WheelCache::Git(resource.url, &git_sha.to_short_string()).root(),
);
let metadata_entry = cache_shard.entry(METADATA);
let _lock = lock_shard(&cache_shard).await?;
@ -1154,7 +1152,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
// Store the metadata.
let metadata_entry = cache_shard.entry(METADATA);
write_atomic(metadata_entry.path(), rmp_serde::to_vec(&metadata)?)
.await
.map_err(Error::CacheWrite)?;
@ -1164,6 +1161,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
target: cache_shard.join(filename.stem()),
filename,
hashes: vec![],
cache_info: CacheInfo::default(),
})
}
@ -1200,6 +1198,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
CacheBucket::SourceDistributions,
WheelCache::Git(resource.url, &git_sha.to_short_string()).root(),
);
let metadata_entry = cache_shard.entry(METADATA);
let _lock = lock_shard(&cache_shard).await?;
@ -1218,8 +1217,6 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
// If the cache contains compatible metadata, return it.
let metadata_entry = cache_shard.entry(METADATA);
if self
.build_context
.cache()
@ -1737,7 +1734,7 @@ impl HttpRevisionPointer {
/// Encoded with `MsgPack`, and represented on disk by a `.rev` file.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub(crate) struct LocalRevisionPointer {
timestamp: Timestamp,
cache_info: CacheInfo,
revision: Revision,
}
@ -1763,12 +1760,17 @@ impl LocalRevisionPointer {
.map_err(Error::CacheWrite)
}
/// Returns `true` if the revision is up-to-date with the given modified timestamp.
pub(crate) fn is_up_to_date(&self, modified: ArchiveTimestamp) -> bool {
self.timestamp == modified.timestamp()
/// Return the [`CacheInfo`] for the pointer.
pub(crate) fn cache_info(&self) -> &CacheInfo {
&self.cache_info
}
/// Return the [`Revision`] from the pointer.
/// Return the [`Revision`] for the pointer.
pub(crate) fn revision(&self) -> &Revision {
&self.revision
}
/// Return the [`Revision`] for the pointer.
pub(crate) fn into_revision(self) -> Revision {
self.revision
}

View file

@ -27,4 +27,4 @@ serde = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
url = { workspace = true }

View file

@ -22,6 +22,7 @@ pep508_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-configuration = { workspace = true }
uv-distribution = { workspace = true }
uv-extract = { workspace = true }

View file

@ -149,6 +149,11 @@ fn install(
.map(pypi_types::DirectUrl::try_from)
.transpose()?
.as_ref(),
if wheel.cache_info().is_empty() {
None
} else {
Some(wheel.cache_info())
},
installer_name.as_deref(),
link_mode,
&locks,

View file

@ -13,7 +13,8 @@ use distribution_types::{
};
use platform_tags::Tags;
use pypi_types::{Requirement, RequirementSource, ResolverMarkerEnvironment};
use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, WheelCache};
use uv_cache::{Cache, CacheBucket, WheelCache};
use uv_cache_info::{CacheInfo, Timestamp};
use uv_configuration::{BuildOptions, Reinstall};
use uv_distribution::{
BuiltWheelIndex, HttpArchivePointer, LocalArchivePointer, RegistryWheelIndex,
@ -93,8 +94,7 @@ impl<'a> Planner<'a> {
}
}
// Check if the package should be reinstalled. A reinstall involves (1) purging any
// cached distributions, and (2) marking any installed distributions as extraneous.
// Check if the package should be reinstalled.
let reinstall = match reinstall {
Reinstall::None => false,
Reinstall::All => true,
@ -207,6 +207,7 @@ impl<'a> Planner<'a> {
wheel.filename,
wheel.url,
archive.hashes,
CacheInfo::default(),
cache.archive(&archive.id),
);
@ -361,14 +362,16 @@ impl<'a> Planner<'a> {
.entry(format!("{}.rev", wheel.filename.stem()));
if let Some(pointer) = LocalArchivePointer::read_from(&cache_entry)? {
let timestamp = ArchiveTimestamp::from_file(&wheel.install_path)?;
let timestamp = Timestamp::from_path(&wheel.install_path)?;
if pointer.is_up_to_date(timestamp) {
let cache_info = pointer.to_cache_info();
let archive = pointer.into_archive();
if archive.satisfies(hasher.get(&wheel)) {
let cached_dist = CachedDirectUrlDist::from_url(
wheel.filename,
wheel.url,
archive.hashes,
cache_info,
cache.archive(&archive.id),
);

View file

@ -7,7 +7,7 @@ use url::Url;
use cache_key::{CanonicalUrl, RepositoryUrl};
use distribution_types::{InstalledDirectUrlDist, InstalledDist};
use pypi_types::{DirInfo, DirectUrl, RequirementSource, VcsInfo, VcsKind};
use uv_cache::{ArchiveTarget, ArchiveTimestamp};
use uv_cache_info::CacheInfo;
#[derive(Debug, Copy, Clone)]
pub(crate) enum RequirementSatisfaction {
@ -50,6 +50,7 @@ impl RequirementSatisfaction {
let InstalledDist::Url(InstalledDirectUrlDist {
direct_url,
editable,
cache_info,
..
}) = &distribution
else {
@ -81,10 +82,10 @@ impl RequirementSatisfaction {
// If the requirement came from a local path, check freshness.
if requested_url.scheme() == "file" {
if let Ok(archive) = requested_url.to_file_path() {
if !ArchiveTimestamp::up_to_date_with(
&archive,
ArchiveTarget::Install(distribution),
)? {
let Some(cache_info) = cache_info.as_ref() else {
return Ok(Self::OutOfDate);
};
if *cache_info != CacheInfo::from_path(&archive)? {
return Ok(Self::OutOfDate);
}
}
@ -153,7 +154,11 @@ impl RequirementSatisfaction {
ext: _,
url: _,
} => {
let InstalledDist::Url(InstalledDirectUrlDist { direct_url, .. }) = &distribution
let InstalledDist::Url(InstalledDirectUrlDist {
direct_url,
cache_info,
..
}) = &distribution
else {
return Ok(Self::Mismatch);
};
@ -184,11 +189,10 @@ impl RequirementSatisfaction {
return Ok(Self::Mismatch);
}
if !ArchiveTimestamp::up_to_date_with(
requested_path,
ArchiveTarget::Install(distribution),
)? {
trace!("Installed package is out of date");
let Some(cache_info) = cache_info.as_ref() else {
return Ok(Self::OutOfDate);
};
if *cache_info != CacheInfo::from_path(requested_path)? {
return Ok(Self::OutOfDate);
}
@ -200,7 +204,11 @@ impl RequirementSatisfaction {
r#virtual: _,
url: _,
} => {
let InstalledDist::Url(InstalledDirectUrlDist { direct_url, .. }) = &distribution
let InstalledDist::Url(InstalledDirectUrlDist {
direct_url,
cache_info,
..
}) = &distribution
else {
return Ok(Self::Mismatch);
};
@ -242,11 +250,10 @@ impl RequirementSatisfaction {
return Ok(Self::Mismatch);
}
if !ArchiveTimestamp::up_to_date_with(
requested_path,
ArchiveTarget::Install(distribution),
)? {
trace!("Installed package is out of date");
let Some(cache_info) = cache_info.as_ref() else {
return Ok(Self::OutOfDate);
};
if *cache_info != CacheInfo::from_path(requested_path)? {
return Ok(Self::OutOfDate);
}

View file

@ -21,6 +21,7 @@ pep508_rs = { workspace = true }
platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-client = { workspace = true }
uv-extract = { workspace = true }
uv-fs = { workspace = true }

View file

@ -18,7 +18,8 @@ use pep508_rs::{MarkerEnvironment, StringVersion};
use platform_tags::Platform;
use platform_tags::{Tags, TagsError};
use pypi_types::{ResolverMarkerEnvironment, Scheme};
use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness, Timestamp};
use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness};
use uv_cache_info::Timestamp;
use uv_fs::{write_atomic_sync, PythonExt, Simplified};
use crate::implementation::LenientImplementationName;

View file

@ -17,6 +17,7 @@ distribution-types = { workspace = true, features = ["schemars"] }
install-wheel-rs = { workspace = true, features = ["schemars", "clap"] }
pep508_rs = { workspace = true }
pypi-types = { workspace = true }
uv-cache-info = { workspace = true, features = ["schemars"] }
uv-configuration = { workspace = true, features = ["schemars", "clap"] }
uv-fs = { workspace = true }
uv-macros = { workspace = true }

View file

@ -6,6 +6,7 @@ use distribution_types::{FlatIndexLocation, IndexUrl};
use install_wheel_rs::linker::LinkMode;
use pep508_rs::Requirement;
use pypi_types::{SupportedEnvironments, VerbatimParsedUrl};
use uv_cache_info::CacheKey;
use uv_configuration::{
ConfigSettings, IndexStrategy, KeyringProviderType, PackageNameSpecifier, TargetTriple,
TrustedHost,
@ -42,6 +43,38 @@ pub struct Options {
#[option_group]
pub pip: Option<PipOptions>,
/// The keys to consider when caching builds for the project.
///
/// Cache keys enable you to specify the files or directories that should trigger a rebuild when
/// modified. By default, uv will rebuild a project whenever the `pyproject.toml`, `setup.py`,
/// or `setup.cfg` files in the project directory are modified, i.e.:
///
/// ```toml
/// cache-keys = [{ file = "pyproject.toml" }, { file = "setup.py" }, { file = "setup.cfg" }]
/// ```
///
/// As an example: if a project uses dynamic metadata to read its dependencies from a
/// `requirements.txt` file, you can specify `cache-keys = [{ file = "requirements.txt" }, { file = "pyproject.toml" }]`
/// to ensure that the project is rebuilt whenever the `requirements.txt` file is modified (in
/// addition to watching the `pyproject.toml`).
///
/// Cache keys can also include version control information. For example, if a project uses
/// `setuptools_scm` to read its version from a Git tag, you can specify `cache-keys = [{ git = true }, { file = "pyproject.toml" }]`
/// to include the current Git commit hash in the cache key (in addition to the
/// `pyproject.toml`).
///
/// Cache keys only affect the project defined by the `pyproject.toml` in which they're
/// specified (as opposed to, e.g., affecting all members in a workspace).
#[option(
default = r#"[{ file = "pyproject.toml" }, { file = "setup.py" }, { file = "setup.cfg" }]"#,
value_type = "list[dict]",
example = r#"
cache-keys = [{ file = "pyproject.toml" }, { file = "requirements.txt" }, { git = true }]
"#
)]
#[serde(default, skip_serializing)]
cache_keys: Option<Vec<CacheKey>>,
// NOTE(charlie): These fields are shared with `ToolUv` in
// `crates/uv-workspace/src/pyproject.rs`, and the documentation lives on that struct.
#[cfg_attr(feature = "schemars", schemars(skip))]

View file

@ -9,7 +9,7 @@ use distribution_types::{
use pep508_rs::PackageName;
use pypi_types::Requirement;
use uv_cache::Cache;
use uv_configuration::{BuildKind, BuildOptions, BuildOutput, SourceStrategy};
use uv_configuration::{BuildKind, BuildOptions, BuildOutput, ConfigSettings, SourceStrategy};
use uv_git::GitResolver;
use uv_python::PythonEnvironment;
@ -68,6 +68,9 @@ pub trait BuildContext {
/// This method exists to avoid fetching source distributions if we know we can't build them.
fn build_options(&self) -> &BuildOptions;
/// The [`ConfigSettings`] used to build distributions.
fn config_settings(&self) -> &ConfigSettings;
/// Whether to incorporate `tool.uv.sources` when resolving requirements.
fn sources(&self) -> SourceStrategy;

View file

@ -24,6 +24,7 @@ platform-tags = { workspace = true }
pypi-types = { workspace = true }
uv-auth = { workspace = true }
uv-cache = { workspace = true }
uv-cache-info = { workspace = true }
uv-cli = { workspace = true }
uv-client = { workspace = true }
uv-configuration = { workspace = true }

View file

@ -8,7 +8,8 @@ use pep440_rs::{VersionSpecifier, VersionSpecifiers};
use pep508_rs::MarkerTree;
use pypi_types::{Requirement, RequirementSource};
use tracing::debug;
use uv_cache::{Cache, Refresh, Timestamp};
use uv_cache::{Cache, Refresh};
use uv_cache_info::Timestamp;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::{Concurrency, Upgrade};
use uv_normalize::PackageName;

View file

@ -14,7 +14,8 @@ use distribution_types::{Name, UnresolvedRequirementSpecification};
use pep440_rs::{VersionSpecifier, VersionSpecifiers};
use pep508_rs::MarkerTree;
use pypi_types::{Requirement, RequirementSource};
use uv_cache::{Cache, Refresh, Timestamp};
use uv_cache::{Cache, Refresh};
use uv_cache_info::Timestamp;
use uv_cli::ExternalCommand;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::Concurrency;

View file

@ -8,10 +8,10 @@ use anyhow::Result;
use clap::error::{ContextKind, ContextValue};
use clap::{CommandFactory, Parser};
use owo_colors::OwoColorize;
use tracing::{debug, instrument};
use settings::PipTreeSettings;
use uv_cache::{Cache, Refresh, Timestamp};
use tracing::{debug, instrument};
use uv_cache::{Cache, Refresh};
use uv_cache_info::Timestamp;
use uv_cli::{
compat::CompatArgs, CacheCommand, CacheNamespace, Cli, Commands, PipCommand, PipNamespace,
ProjectCommand,

View file

@ -2926,7 +2926,7 @@ requires-python = ">=3.8"
"###
);
// Re-installing should be a no-op.
// Installing again should be a no-op.
uv_snapshot!(context.filters(), context.pip_install()
.arg("--editable")
.arg(editable_dir.path()), @r###"
@ -2951,7 +2951,7 @@ requires-python = ">=3.8"
"#,
)?;
// Re-installing should update the package.
// Installing again should update the package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("--editable")
.arg(editable_dir.path()), @r###"
@ -3015,7 +3015,7 @@ dependencies = {file = ["requirements.txt"]}
"###
);
// Re-installing should not re-install, as we don't special-case dynamic metadata.
// Installing again should not re-install, as we don't special-case dynamic metadata.
uv_snapshot!(context.filters(), context.pip_install()
.arg("--editable")
.arg(editable_dir.path()), @r###"
@ -3068,7 +3068,7 @@ requires-python = ">=3.8"
"###
);
// Re-installing should be a no-op.
// Installing again should be a no-op.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
@ -3093,7 +3093,7 @@ requires-python = ">=3.8"
"#,
)?;
// Re-installing should update the package.
// Installing again should update the package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
@ -3115,6 +3115,228 @@ requires-python = ">=3.8"
Ok(())
}
#[test]
fn invalidate_path_on_cache_key() -> Result<()> {
let context = TestContext::new("3.12");
// Create a local package.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["anyio==4.0.0"]
requires-python = ">=3.8"
[tool.uv]
cache-keys = ["constraints.txt", { file = "requirements.txt" }]
"#,
)?;
let requirements_txt = editable_dir.child("requirements.txt");
requirements_txt.write_str("idna")?;
let constraints_txt = editable_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.0.0
+ example==0.0.0 (from file://[TEMP_DIR]/editable)
+ idna==3.6
+ sniffio==1.3.1
"###
);
// Installing again should be a no-op.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
// Modify the constraints file.
constraints_txt.write_str("idna<3.5")?;
// Installing again should update the package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ example==0.0.0 (from file://[TEMP_DIR]/editable)
"###
);
// Modify the requirements file.
requirements_txt.write_str("flask")?;
// Installing again should update the package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ example==0.0.0 (from file://[TEMP_DIR]/editable)
"###
);
// Modify the `pyproject.toml` file (but not in a meaningful way).
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["anyio==4.0.0"]
requires-python = ">=3.8"
[tool.uv]
cache-keys = [{ file = "requirements.txt" }, "constraints.txt"]
"#,
)?;
// Installing again should be a no-op, since `pyproject.toml` was not included as a cache key.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
Ok(())
}
#[test]
fn invalidate_path_on_commit() -> Result<()> {
let context = TestContext::new("3.12");
// Create a local package.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "example"
version = "0.0.0"
dependencies = ["anyio==4.0.0"]
requires-python = ">=3.8"
[tool.uv]
cache-keys = [{ git = true }]
"#,
)?;
// Create a Git repository.
context
.temp_dir
.child(".git")
.child("HEAD")
.write_str("ref: refs/heads/main")?;
context
.temp_dir
.child(".git")
.child("refs")
.child("heads")
.child("main")
.write_str("1b6638fdb424e993d8354e75c55a3e524050c857")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.0.0
+ example==0.0.0 (from file://[TEMP_DIR]/editable)
+ idna==3.6
+ sniffio==1.3.1
"###
);
// Installing again should be a no-op.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
// Change the current commit.
context
.temp_dir
.child(".git")
.child("refs")
.child("heads")
.child("main")
.write_str("a1a42cbd10d83bafd8600ba81f72bbef6c579385")?;
// Installing again should update the package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("example @ .")
.current_dir(editable_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ example==0.0.0 (from file://[TEMP_DIR]/editable)
"###
);
Ok(())
}
/// Install from a direct path (wheel) with changed versions in the file name.
#[test]
fn path_name_version_change() {

View file

@ -3215,7 +3215,7 @@ requires-python = ">=3.8"
"###
);
// Re-installing should be a no-op.
// Installing again should be a no-op.
uv_snapshot!(context.filters(), context.pip_sync()
.arg("requirements.in"), @r###"
success: true

View file

@ -28,12 +28,33 @@ If you're running into caching issues, uv includes a few escape hatches:
## Dynamic metadata
Note that for local directory dependencies in particular (e.g., editables), uv will _only_ reinstall
the package if its `pyproject.toml`, `setup.py`, or `setup.cfg` file has changed. This is a
By default, uv will _only_ rebuild and reinstall local directory dependencies (e.g., editables) if
the `pyproject.toml`, `setup.py`, or `setup.cfg` file in the directory root has changed. This is a
heuristic and, in some cases, may lead to fewer re-installs than desired.
For example, if a local dependency uses `dynamic` metadata, you can instruct uv to _always_
reinstall the package by adding `reinstall-package` to the `uv` section of your `pyproject.toml`:
To incorporate other information into the cache key for a given package, you can add cache key
entries under `tool.uv.cache-key`, which can include both file paths and Git commit hashes.
For example, if a project uses [`setuptools-scm`](https://pypi.org/project/setuptools-scm/), and
should be rebuilt whenever the commit hash changes, you can add the following to the project's
`pyproject.toml`:
```toml title="pyproject.toml"
[tool.uv]
cache-key = [{ git = true }]
```
Similarly, if a project reads from a `requirements.txt` to populate its dependencies, you can add
the following to the project's `pyproject.toml`:
```toml title="pyproject.toml"
[tool.uv]
cache-key = [{ file = "requirements.txt" }]
```
As an escape hatch, if a project uses `dynamic` metadata that isn't covered by `tool.uv.cache-key`,
you can instruct uv to _always_ rebuild and reinstall it by adding the project to the
`tool.uv.reinstall-package` list:
```toml title="pyproject.toml"
[tool.uv]

View file

@ -59,6 +59,52 @@ Linux, and `%LOCALAPPDATA%\uv\cache` on Windows.
---
#### [`cache-keys`](#cache-keys) {: #cache-keys }
The keys to consider when caching builds for the project.
Cache keys enable you to specify the files or directories that should trigger a rebuild when
modified. By default, uv will rebuild a project whenever the `pyproject.toml`, `setup.py`,
or `setup.cfg` files in the project directory are modified, i.e.:
```toml
cache-keys = [{ file = "pyproject.toml" }, { file = "setup.py" }, { file = "setup.cfg" }]
```
As an example: if a project uses dynamic metadata to read its dependencies from a
`requirements.txt` file, you can specify `cache-keys = [{ file = "requirements.txt" }, { file = "pyproject.toml" }]`
to ensure that the project is rebuilt whenever the `requirements.txt` file is modified (in
addition to watching the `pyproject.toml`).
Cache keys can also include version control information. For example, if a project uses
`setuptools_scm` to read its version from a Git tag, you can specify `cache-keys = [{ git = true }, { file = "pyproject.toml" }]`
to include the current Git commit hash in the cache key (in addition to the
`pyproject.toml`).
Cache keys only affect the project defined by the `pyproject.toml` in which they're
specified (as opposed to, e.g., affecting all members in a workspace).
**Default value**: `[{ file = "pyproject.toml" }, { file = "setup.py" }, { file = "setup.cfg" }]`
**Type**: `list[dict]`
**Example usage**:
=== "pyproject.toml"
```toml
[tool.uv]
cache-keys = [{ file = "pyproject.toml" }, { file = "requirements.txt" }, { git = true }]
```
=== "uv.toml"
```toml
cache-keys = [{ file = "pyproject.toml" }, { file = "requirements.txt" }, { git = true }]
```
---
#### [`compile-bytecode`](#compile-bytecode) {: #compile-bytecode }
Compile Python files to bytecode after installation.

45
uv.schema.json generated
View file

@ -21,6 +21,17 @@
"null"
]
},
"cache-keys": {
"description": "The keys to consider when caching builds for the project.\n\nCache keys enable you to specify the files or directories that should trigger a rebuild when modified. By default, uv will rebuild a project whenever the `pyproject.toml`, `setup.py`, or `setup.cfg` files in the project directory are modified, i.e.:\n\n```toml cache-keys = [{ file = \"pyproject.toml\" }, { file = \"setup.py\" }, { file = \"setup.cfg\" }] ```\n\nAs an example: if a project uses dynamic metadata to read its dependencies from a `requirements.txt` file, you can specify `cache-keys = [{ file = \"requirements.txt\" }, { file = \"pyproject.toml\" }]` to ensure that the project is rebuilt whenever the `requirements.txt` file is modified (in addition to watching the `pyproject.toml`).\n\nCache keys can also include version control information. For example, if a project uses `setuptools_scm` to read its version from a Git tag, you can specify `cache-keys = [{ git = true }, { file = \"pyproject.toml\" }]` to include the current Git commit hash in the cache key (in addition to the `pyproject.toml`).\n\nCache keys only affect the project defined by the `pyproject.toml` in which they're specified (as opposed to, e.g., affecting all members in a workspace).",
"writeOnly": true,
"type": [
"array",
"null"
],
"items": {
"$ref": "#/definitions/CacheKey"
}
},
"compile-bytecode": {
"description": "Compile Python files to bytecode after installation.\n\nBy default, uv does not compile Python (`.py`) files to bytecode (`__pycache__/*.pyc`); instead, compilation is performed lazily the first time a module is imported. For use-cases in which start time is critical, such as CLI applications and Docker containers, this option can be enabled to trade longer installation times for faster start times.\n\nWhen enabled, uv will process the entire site-packages directory (including packages that are not being modified by the current operation) for consistency. Like pip, it will also ignore errors.",
"type": [
@ -418,6 +429,40 @@
}
]
},
"CacheKey": {
"anyOf": [
{
"description": "Ex) `\"Cargo.lock\"`",
"type": "string"
},
{
"description": "Ex) `{ file = \"Cargo.lock\" }`",
"type": "object",
"required": [
"file"
],
"properties": {
"file": {
"type": "string"
}
},
"additionalProperties": false
},
{
"description": "Ex) `{ git = true }`",
"type": "object",
"required": [
"git"
],
"properties": {
"git": {
"type": "boolean"
}
},
"additionalProperties": false
}
]
},
"ConfigSettingValue": {
"oneOf": [
{