Use full requirement when serializing receipt (#5494)

## Summary

The current receipt doesn't capture quite enough information. For
example, it doesn't differentiate between editable and non-editable
requirements. This PR instead uses the full `Requirement` type. I think
we should use a custom representation like we do in the lockfile, but
I'm just using the default representation to demonstrate the idea.
This commit is contained in:
Charlie Marsh 2024-07-31 12:16:39 -04:00 committed by GitHub
parent bf8934e3e4
commit f266fb711c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 341 additions and 56 deletions

2
Cargo.lock generated
View file

@ -2807,6 +2807,7 @@ dependencies = [
"toml",
"tracing",
"url",
"uv-fs",
"uv-git",
"uv-normalize",
]
@ -4844,6 +4845,7 @@ dependencies = [
"fs-err",
"reqwest",
"reqwest-middleware",
"serde",
"thiserror",
"tokio",
"tracing",

View file

@ -61,6 +61,11 @@ impl VersionSpecifiers {
pub fn contains(&self, version: &Version) -> bool {
self.iter().all(|specifier| specifier.contains(version))
}
/// Returns `true` if the specifiers are empty is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl FromIterator<VersionSpecifier> for VersionSpecifiers {
@ -98,6 +103,12 @@ impl std::fmt::Display for VersionSpecifiers {
}
}
impl Default for VersionSpecifiers {
fn default() -> Self {
Self::empty()
}
}
/// https://pyo3.rs/v0.18.2/class/protocols.html#iterable-objects
#[cfg(feature = "pyo3")]
#[pyclass]

View file

@ -3,7 +3,10 @@ use std::path::{Path, PathBuf};
use uv_normalize::PackageName;
/// The origin of a dependency, e.g., a `-r requirements.txt` file.
#[derive(Hash, Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
#[derive(
Hash, Debug, Clone, Eq, PartialEq, PartialOrd, Ord, serde::Serialize, serde::Deserialize,
)]
#[serde(rename_all = "kebab-case")]
pub enum RequirementOrigin {
/// The requirement was provided via a standalone file (e.g., a `requirements.txt` file).
File(PathBuf),

View file

@ -15,8 +15,9 @@ workspace = true
[dependencies]
pep440_rs = { workspace = true }
pep508_rs = { workspace = true }
uv-normalize = { workspace = true }
uv-fs = { workspace = true, features = ["serde"] }
uv-git = { workspace = true }
uv-normalize = { workspace = true }
chrono = { workspace = true, features = ["serde"] }
indexmap = { workspace = true, features = ["serde"] }

View file

@ -1,10 +1,12 @@
use std::fmt::{Display, Formatter};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use thiserror::Error;
use url::Url;
use pep440_rs::VersionSpecifiers;
use pep508_rs::{MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl};
use uv_fs::PortablePathBuf;
use uv_git::{GitReference, GitSha, GitUrl};
use uv_normalize::{ExtraName, PackageName};
@ -12,16 +14,30 @@ use crate::{
ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, VerbatimParsedUrl,
};
#[derive(Debug, Error)]
pub enum RequirementError {
#[error(transparent)]
VerbatimUrlError(#[from] pep508_rs::VerbatimUrlError),
#[error(transparent)]
UrlParseError(#[from] url::ParseError),
#[error(transparent)]
OidParseError(#[from] uv_git::OidParseError),
}
/// A representation of dependency on a package, an extension over a PEP 508's requirement.
///
/// The main change is using [`RequirementSource`] to represent all supported package sources over
/// [`VersionOrUrl`], which collapses all URL sources into a single stringly type.
#[derive(Hash, Debug, Clone, Eq, PartialEq)]
#[derive(Hash, Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Requirement {
pub name: PackageName,
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub extras: Vec<ExtraName>,
pub marker: Option<MarkerTree>,
#[serde(flatten)]
pub source: RequirementSource,
#[serde(skip)]
pub origin: Option<RequirementOrigin>,
}
@ -228,7 +244,8 @@ impl Display for Requirement {
/// We store both the parsed fields (such as the plain url and the subdirectory) and the joined
/// PEP 508 style url (e.g. `file:///<path>#subdirectory=<subdirectory>`) since we need both in
/// different locations.
#[derive(Hash, Debug, Clone, Eq, PartialEq)]
#[derive(Hash, Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(try_from = "RequirementSourceWire", into = "RequirementSourceWire")]
pub enum RequirementSource {
/// The requirement has a version specifier, such as `foo >1,<2`.
Registry {
@ -398,7 +415,7 @@ impl RequirementSource {
pub fn version_or_url(&self) -> Option<VersionOrUrl<VerbatimParsedUrl>> {
match self {
Self::Registry { specifier, .. } => {
if specifier.len() == 0 {
if specifier.is_empty() {
None
} else {
Some(VersionOrUrl::VersionSpecifier(specifier.clone()))
@ -415,3 +432,229 @@ impl RequirementSource {
matches!(self, Self::Directory { editable: true, .. })
}
}
impl Display for RequirementSource {
/// Display the [`RequirementSource`], with the intention of being shown directly to a user,
/// rather than for inclusion in a `requirements.txt` file.
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Registry { specifier, index } => {
write!(f, "{specifier}")?;
if let Some(index) = index {
write!(f, " (index: {index})")?;
}
}
Self::Url { url, .. } => {
write!(f, " {url}")?;
}
Self::Git {
url: _,
repository,
reference,
precise: _,
subdirectory,
} => {
write!(f, " git+{repository}")?;
if let Some(reference) = reference.as_str() {
write!(f, "@{reference}")?;
}
if let Some(subdirectory) = subdirectory {
writeln!(f, "#subdirectory={}", subdirectory.display())?;
}
}
Self::Path { url, .. } => {
write!(f, "{url}")?;
}
Self::Directory { url, .. } => {
write!(f, "{url}")?;
}
}
Ok(())
}
}
#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]
#[serde(untagged)]
enum RequirementSourceWire {
/// Ex) `source = { specifier = "foo >1,<2" }`
Registry {
#[serde(skip_serializing_if = "VersionSpecifiers::is_empty", default)]
specifier: VersionSpecifiers,
index: Option<String>,
},
/// Ex) `source = { git = "<https://github.com/astral-test/uv-public-pypackage?rev=0.0.1#0dacfd662c64cb4ceb16e6cf65a157a8b715b979>" }`
Git { git: String },
/// Ex) `source = { url = "<https://example.org/foo-1.0.zip>" }`
Direct {
url: Url,
subdirectory: Option<String>,
},
/// Ex) `source = { path = "/home/ferris/iniconfig-2.0.0-py3-none-any.whl" }`
Path { path: PortablePathBuf },
/// Ex) `source = { directory = "/home/ferris/iniconfig" }`
Directory { directory: PortablePathBuf },
/// Ex) `source = { editable = "/home/ferris/iniconfig" }`
Editable { editable: PortablePathBuf },
}
impl From<RequirementSource> for RequirementSourceWire {
fn from(value: RequirementSource) -> Self {
match value {
RequirementSource::Registry { specifier, index } => Self::Registry { specifier, index },
RequirementSource::Url {
subdirectory,
location,
url: _,
} => Self::Direct {
url: location,
subdirectory: subdirectory
.as_deref()
.and_then(Path::to_str)
.map(str::to_string),
},
RequirementSource::Git {
repository,
reference,
precise,
subdirectory,
url: _,
} => {
let mut url = repository;
// Clear out any existing state.
url.set_fragment(None);
url.set_query(None);
// Put the subdirectory in the query.
if let Some(subdirectory) = subdirectory.as_deref().and_then(Path::to_str) {
url.query_pairs_mut()
.append_pair("subdirectory", subdirectory);
}
// Put the requested reference in the query.
match reference {
GitReference::Branch(branch) => {
url.query_pairs_mut()
.append_pair("branch", branch.to_string().as_str());
}
GitReference::Tag(tag) => {
url.query_pairs_mut()
.append_pair("tag", tag.to_string().as_str());
}
GitReference::ShortCommit(rev)
| GitReference::BranchOrTag(rev)
| GitReference::BranchOrTagOrCommit(rev)
| GitReference::NamedRef(rev)
| GitReference::FullCommit(rev) => {
url.query_pairs_mut()
.append_pair("rev", rev.to_string().as_str());
}
GitReference::DefaultBranch => {}
}
// Put the precise commit in the fragment.
if let Some(precise) = precise {
url.set_fragment(Some(&precise.to_string()));
}
Self::Git {
git: url.to_string(),
}
}
RequirementSource::Path {
install_path,
lock_path: _,
url: _,
} => Self::Path {
path: PortablePathBuf::from(install_path),
},
RequirementSource::Directory {
install_path,
lock_path: _,
editable,
url: _,
} => {
if editable {
Self::Editable {
editable: PortablePathBuf::from(install_path),
}
} else {
Self::Directory {
directory: PortablePathBuf::from(install_path),
}
}
}
}
}
}
impl TryFrom<RequirementSourceWire> for RequirementSource {
type Error = RequirementError;
fn try_from(wire: RequirementSourceWire) -> Result<RequirementSource, RequirementError> {
match wire {
RequirementSourceWire::Registry { specifier, index } => {
Ok(RequirementSource::Registry { specifier, index })
}
RequirementSourceWire::Git { git } => {
let mut url = Url::parse(&git)?;
let mut reference = GitReference::DefaultBranch;
let mut subdirectory = None;
for (key, val) in url.query_pairs() {
match &*key {
"tag" => reference = GitReference::Tag(val.into_owned()),
"branch" => reference = GitReference::Branch(val.into_owned()),
"rev" => reference = GitReference::from_rev(val.into_owned()),
"subdirectory" => subdirectory = Some(PathBuf::from(val.into_owned())),
_ => continue,
};
}
let precise = url.fragment().map(GitSha::from_str).transpose()?;
url.set_query(None);
url.set_fragment(None);
Ok(RequirementSource::Git {
repository: url.clone(),
reference,
precise,
subdirectory,
url: VerbatimUrl::from_url(url),
})
}
RequirementSourceWire::Direct { url, subdirectory } => Ok(RequirementSource::Url {
url: VerbatimUrl::from_url(url.clone()),
subdirectory: subdirectory.map(PathBuf::from),
location: url.clone(),
}),
RequirementSourceWire::Path { path } => {
let path = PathBuf::from(path);
Ok(RequirementSource::Path {
url: VerbatimUrl::from_path(path.as_path())?,
install_path: path.clone(),
lock_path: path,
})
}
RequirementSourceWire::Directory { directory } => {
let directory = PathBuf::from(directory);
Ok(RequirementSource::Directory {
url: VerbatimUrl::from_path(directory.as_path())?,
install_path: directory.clone(),
lock_path: directory,
editable: false,
})
}
RequirementSourceWire::Editable { editable } => {
let editable = PathBuf::from(editable);
Ok(RequirementSource::Directory {
url: VerbatimUrl::from_path(editable.as_path())?,
install_path: editable.clone(),
lock_path: editable,
editable: true,
})
}
}
}
}

View file

@ -22,6 +22,7 @@ dashmap = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
reqwest = { workspace = true, features = ["blocking"] }
reqwest-middleware = { workspace = true }
serde = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }

View file

@ -20,7 +20,9 @@ use crate::sha::GitOid;
const CHECKOUT_READY_LOCK: &str = ".ok";
/// A reference to commit or commit-ish.
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(
Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize,
)]
pub enum GitReference {
/// A specific branch.
Branch(String),

View file

@ -26,7 +26,7 @@ impl From<GitOid> for GitSha {
}
}
impl std::fmt::Display for GitSha {
impl Display for GitSha {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
@ -40,6 +40,25 @@ impl FromStr for GitSha {
}
}
impl serde::Serialize for GitSha {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.0.as_str().serialize(serializer)
}
}
impl<'de> serde::Deserialize<'de> for GitSha {
fn deserialize<D>(deserializer: D) -> Result<GitSha, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
GitSha::from_str(&value).map_err(serde::de::Error::custom)
}
}
/// Unique identity of any Git object (commit, tree, blob, tag).
///
/// Note this type does not validate whether the input is a valid hash.

View file

@ -30,5 +30,5 @@ pathdiff = { workspace = true }
serde = { workspace = true }
thiserror = { workspace = true }
toml = { workspace = true }
toml_edit = { workspace = true }
toml_edit = { workspace = true, features = ["serde"] }
tracing = { workspace = true }

View file

@ -6,7 +6,7 @@ use toml_edit::Array;
use toml_edit::Table;
use toml_edit::Value;
use pypi_types::VerbatimParsedUrl;
use pypi_types::Requirement;
use uv_fs::PortablePath;
/// A tool entry.
@ -15,7 +15,7 @@ use uv_fs::PortablePath;
#[serde(rename_all = "kebab-case")]
pub struct Tool {
/// The requirements requested by the user during installation.
requirements: Vec<pep508_rs::Requirement<VerbatimParsedUrl>>,
requirements: Vec<Requirement>,
/// The Python requested by the user during installation.
python: Option<String>,
/// A mapping of entry point names to their metadata.
@ -59,7 +59,7 @@ fn each_element_on_its_line_array(elements: impl Iterator<Item = impl Into<Value
impl Tool {
/// Create a new `Tool`.
pub fn new(
requirements: Vec<pep508_rs::Requirement<VerbatimParsedUrl>>,
requirements: Vec<Requirement>,
python: Option<String>,
entrypoints: impl Iterator<Item = ToolEntrypoint>,
) -> Self {
@ -79,12 +79,20 @@ impl Tool {
table.insert("requirements", {
let requirements = match self.requirements.as_slice() {
[] => Array::new(),
[requirement] => Array::from_iter([Value::from(requirement.to_string())]),
requirements => each_element_on_its_line_array(
requirements
.iter()
.map(|requirement| Value::from(requirement.to_string())),
),
[requirement] => Array::from_iter([serde::Serialize::serialize(
&requirement,
toml_edit::ser::ValueSerializer::new(),
)
.unwrap()]),
requirements => {
each_element_on_its_line_array(requirements.iter().map(|requirement| {
serde::Serialize::serialize(
&requirement,
toml_edit::ser::ValueSerializer::new(),
)
.unwrap()
}))
}
};
value(requirements)
});
@ -98,7 +106,7 @@ impl Tool {
self.entrypoints
.iter()
.map(ToolEntrypoint::to_toml)
.map(toml_edit::Table::into_inline_table),
.map(Table::into_inline_table),
);
value(entrypoints)
});
@ -110,7 +118,7 @@ impl Tool {
&self.entrypoints
}
pub fn requirements(&self) -> &[pep508_rs::Requirement<VerbatimParsedUrl>] {
pub fn requirements(&self) -> &[Requirement] {
&self.requirements
}
}

View file

@ -435,10 +435,7 @@ pub(crate) async fn install(
debug!("Adding receipt for tool `{}`", from.name);
let tool = Tool::new(
requirements
.into_iter()
.map(pep508_rs::Requirement::from)
.collect(),
requirements.into_iter().collect(),
python,
target_entry_points
.into_iter()

View file

@ -82,7 +82,7 @@ fn tool_install() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -164,7 +164,7 @@ fn tool_install() {
// We should have a new tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("flask").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["flask"]
requirements = [{ name = "flask" }]
entrypoints = [
{ name = "flask", install-path = "[TEMP_DIR]/bin/flask" },
]
@ -300,7 +300,7 @@ fn tool_install_version() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black==24.2.0"]
requirements = [{ name = "black", specifier = "==24.2.0" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -380,7 +380,7 @@ fn tool_install_editable() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black @ file://[WORKSPACE]/scripts/packages/black_editable"]
requirements = [{ name = "black", editable = "[WORKSPACE]/scripts/packages/black_editable" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
]
@ -396,9 +396,7 @@ fn tool_install_editable() {
----- stderr -----
"###);
// Request `black`. It should retain the current installation.
// TODO(charlie): This is arguably incorrect, especially because the tool receipt removes the
// file path.
// Request `black`. It should reinstall from the registry.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.env("UV_TOOL_DIR", tool_dir.as_os_str())
@ -410,7 +408,7 @@ fn tool_install_editable() {
----- stderr -----
warning: `uv tool install` is experimental and may change without warning
Installed 1 executable: black
`black` is already installed
"###);
insta::with_settings!({
@ -419,7 +417,7 @@ fn tool_install_editable() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black", editable = "[WORKSPACE]/scripts/packages/black_editable" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
]
@ -460,7 +458,7 @@ fn tool_install_editable() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black==24.2.0"]
requirements = [{ name = "black", specifier = "==24.2.0" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -532,7 +530,7 @@ fn tool_install_editable_from() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black @ file://[WORKSPACE]/scripts/packages/black_editable"]
requirements = [{ name = "black", editable = "[WORKSPACE]/scripts/packages/black_editable" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
]
@ -683,7 +681,7 @@ fn tool_install_already_installed() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -717,7 +715,7 @@ fn tool_install_already_installed() {
// We should not have an additional tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1025,7 +1023,7 @@ fn tool_install_entry_point_exists() {
// We write a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1058,7 +1056,7 @@ fn tool_install_entry_point_exists() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1288,7 +1286,7 @@ fn tool_install_unnamed_package() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black @ https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl"]
requirements = [{ name = "black", url = "https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1400,7 +1398,7 @@ fn tool_install_unnamed_from() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black @ https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl"]
requirements = [{ name = "black", url = "https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1488,8 +1486,8 @@ fn tool_install_unnamed_with() {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
"black",
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
{ name = "black" },
{ name = "iniconfig", url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" },
]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
@ -1555,8 +1553,8 @@ fn tool_install_requirements_txt() {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
"black",
"iniconfig",
{ name = "black" },
{ name = "iniconfig" },
]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
@ -1598,8 +1596,8 @@ fn tool_install_requirements_txt() {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
"black",
"idna",
{ name = "black" },
{ name = "idna" },
]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
@ -1660,8 +1658,8 @@ fn tool_install_requirements_txt_arguments() {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
"black",
"idna",
{ name = "black" },
{ name = "idna" },
]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
@ -1776,7 +1774,7 @@ fn tool_install_upgrade() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black==24.1.1"]
requirements = [{ name = "black", specifier = "==24.1.1" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1806,7 +1804,7 @@ fn tool_install_upgrade() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },
@ -1842,8 +1840,8 @@ fn tool_install_upgrade() {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
"black",
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
{ name = "black" },
{ name = "iniconfig", url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" },
]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
@ -1882,7 +1880,7 @@ fn tool_install_upgrade() {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = ["black"]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd" },