Support remote https:// requirements files (#1332) (#2081)

## Summary

Allow using http(s) urls for constraints and requirements files handed
to the CLI, by handling paths starting with `http://` or `https://`
differently. This allows commands for such as: `uv pip install -c
https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.8.txt
requests`.

closes #1332

## Test Plan

Testing install using a `constraints.txt` file hosted on github in the
airflow repository:

fbdc2eba8e/crates/uv/tests/pip_install.rs (L1440-L1484)

## Advice Needed

- filesystem/http dispatch is implemented at a relatively low level (at
`crates/uv-fs/src/lib.rs#read_to_string`). Should I change some naming
here so it is obvious that the function is able to dispatch?
- I kept the CLI argument for -c and -r as a PathBuf, even though now it
is technically either a path or a url. We could either keep this as is
for now, or implement a new enum for this case? The enum could then
handle dispatch to files/http.
- Using another abstraction layer like
https://docs.rs/object_store/latest/object_store/ for the
files/urls/[s3] could work as well, though I ran into a bug during
testing which I couldn't debug
This commit is contained in:
jannisko 2024-03-06 06:18:11 +02:00 committed by GitHub
parent 2ebcef9ad8
commit 71626e8dec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 430 additions and 145 deletions

17
Cargo.lock generated
View file

@ -203,6 +203,17 @@ dependencies = [
"tokio",
]
[[package]]
name = "async-recursion"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]]
name = "async-trait"
version = "0.1.77"
@ -2785,6 +2796,7 @@ version = "0.0.1"
dependencies = [
"anyhow",
"assert_fs",
"async-recursion",
"fs-err",
"indoc",
"insta",
@ -2793,14 +2805,19 @@ dependencies = [
"pep440_rs",
"pep508_rs",
"regex",
"reqwest",
"reqwest-middleware",
"serde",
"serde_json",
"tempfile",
"test-case",
"thiserror",
"tokio",
"tracing",
"unscanny",
"url",
"uv-cache",
"uv-client",
"uv-fs",
"uv-normalize",
"uv-warnings",

View file

@ -3,7 +3,7 @@ members = ["crates/*"]
exclude = [
"scripts",
# Needs nightly
"crates/uv-trampoline"
"crates/uv-trampoline",
]
resolver = "2"
@ -22,6 +22,7 @@ anyhow = { version = "1.0.80" }
async-compression = { version = "0.4.6" }
async-channel = { version = "2.2.0" }
async-trait = { version = "0.1.77" }
async-recursion = { version = "1.0.5" }
async_http_range_reader = { version = "0.7.0" }
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "d76801da0943de985254fc6255c0e476b57c5836", features = ["deflate"] }
base64 = { version = "0.21.7" }

View file

@ -15,15 +15,21 @@ workspace = true
[dependencies]
pep440_rs = { path = "../pep440-rs", features = ["rkyv", "serde"] }
pep508_rs = { path = "../pep508-rs", features = ["rkyv", "serde", "non-pep508-extensions"] }
uv-cache = { path = "../uv-cache" }
uv-client = { path = "../uv-client" }
uv-fs = { path = "../uv-fs" }
uv-normalize = { path = "../uv-normalize" }
uv-warnings = { path = "../uv-warnings" }
async-recursion = { workspace = true }
fs-err = { workspace = true }
once_cell = { workspace = true }
regex = { workspace = true }
reqwest = { workspace = true }
reqwest-middleware = { workspace = true }
serde = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
unscanny = { workspace = true }
url = { workspace = true }

View file

@ -40,11 +40,13 @@ use std::io;
use std::path::{Path, PathBuf};
use serde::{Deserialize, Serialize};
use tracing::{instrument, warn};
use tracing::instrument;
use unscanny::{Pattern, Scanner};
use url::Url;
use uv_client::RegistryClient;
use uv_warnings::warn_user;
use async_recursion::async_recursion;
use pep508_rs::{
expand_path_vars, split_scheme, Extras, Pep508Error, Pep508ErrorSource, Requirement, Scheme,
VerbatimUrl,
@ -321,28 +323,38 @@ pub struct RequirementsTxt {
impl RequirementsTxt {
/// See module level documentation
#[instrument(skip_all, fields(requirements_txt = requirements_txt.as_ref().as_os_str().to_str()))]
pub fn parse(
pub async fn parse(
requirements_txt: impl AsRef<Path>,
working_dir: impl AsRef<Path>,
client: &RegistryClient,
) -> Result<Self, RequirementsTxtFileError> {
let requirements_txt = requirements_txt.as_ref();
let working_dir = working_dir.as_ref();
let content =
uv_fs::read_to_string(&requirements_txt).map_err(|err| RequirementsTxtFileError {
file: requirements_txt.as_ref().to_path_buf(),
error: RequirementsTxtParserError::IO(err),
if requirements_txt.starts_with("http://") | requirements_txt.starts_with("https://") {
read_url_to_string(&requirements_txt, client).await
} else {
uv_fs::read_to_string(&requirements_txt)
.await
.map_err(RequirementsTxtParserError::IO)
}
.map_err(|err| RequirementsTxtFileError {
file: requirements_txt.to_path_buf(),
error: err,
})?;
let working_dir = working_dir.as_ref();
let requirements_dir = requirements_txt.as_ref().parent().unwrap_or(working_dir);
let data = Self::parse_inner(&content, working_dir, requirements_dir).map_err(|err| {
RequirementsTxtFileError {
file: requirements_txt.as_ref().to_path_buf(),
let requirements_dir = requirements_txt.parent().unwrap_or(working_dir);
let data = Self::parse_inner(&content, working_dir, requirements_dir, client)
.await
.map_err(|err| RequirementsTxtFileError {
file: requirements_txt.to_path_buf(),
error: err,
}
})?;
})?;
if data == Self::default() {
warn_user!(
"Requirements file {} does not contain any dependencies",
requirements_txt.as_ref().display()
requirements_txt.simplified_display()
);
}
@ -355,10 +367,12 @@ impl RequirementsTxt {
/// the current working directory. However, relative paths to sub-files (e.g., `-r ../requirements.txt`)
/// are resolved against the directory of the containing `requirements.txt` file, to match
/// `pip`'s behavior.
pub fn parse_inner(
#[async_recursion]
pub async fn parse_inner(
content: &str,
working_dir: &Path,
requirements_dir: &Path,
client: &RegistryClient,
) -> Result<Self, RequirementsTxtParserError> {
let mut s = Scanner::new(content);
@ -370,14 +384,20 @@ impl RequirementsTxt {
start,
end,
} => {
let sub_file = requirements_dir.join(expand_path_vars(&filename).as_ref());
let sub_requirements = Self::parse(&sub_file, working_dir).map_err(|err| {
RequirementsTxtParserError::Subfile {
let filename = expand_path_vars(&filename);
let sub_file =
if filename.starts_with("http://") || filename.starts_with("https://") {
PathBuf::from(filename.as_ref())
} else {
requirements_dir.join(filename.as_ref())
};
let sub_requirements = Self::parse(&sub_file, working_dir, client)
.await
.map_err(|err| RequirementsTxtParserError::Subfile {
source: Box::new(err),
start,
end,
}
})?;
})?;
// Disallow conflicting `--index-url` in nested `requirements` files.
if sub_requirements.index_url.is_some()
@ -402,14 +422,20 @@ impl RequirementsTxt {
start,
end,
} => {
let sub_file = requirements_dir.join(expand_path_vars(&filename).as_ref());
let sub_constraints = Self::parse(&sub_file, working_dir).map_err(|err| {
RequirementsTxtParserError::Subfile {
let filename = expand_path_vars(&filename);
let sub_file =
if filename.starts_with("http://") || filename.starts_with("https://") {
PathBuf::from(filename.as_ref())
} else {
requirements_dir.join(filename.as_ref())
};
let sub_constraints = Self::parse(&sub_file, working_dir, client)
.await
.map_err(|err| RequirementsTxtParserError::Subfile {
source: Box::new(err),
start,
end,
}
})?;
})?;
// Treat any nested requirements or constraints as constraints. This differs
// from `pip`, which seems to treat `-r` requirements in constraints files as
// _requirements_, but we don't want to support that.
@ -767,6 +793,31 @@ fn parse_value<'a, T>(
}
}
/// Fetch the contents of a URL and return them as a string.
async fn read_url_to_string(
path: impl AsRef<Path>,
client: &RegistryClient,
) -> Result<String, RequirementsTxtParserError> {
// pip would URL-encode the non-UTF-8 bytes of the string; we just don't support them.
let path_utf8 =
path.as_ref()
.to_str()
.ok_or_else(|| RequirementsTxtParserError::NonUnicodeUrl {
url: path.as_ref().to_owned(),
})?;
Ok(client
.cached_client()
.uncached()
.get(path_utf8)
.send()
.await?
.error_for_status()
.map_err(reqwest_middleware::Error::Reqwest)?
.text()
.await
.map_err(reqwest_middleware::Error::Reqwest)?)
}
/// Error parsing requirements.txt, wrapper with filename
#[derive(Debug)]
pub struct RequirementsTxtFileError {
@ -808,6 +859,10 @@ pub enum RequirementsTxtParserError {
start: usize,
end: usize,
},
Reqwest(reqwest_middleware::Error),
NonUnicodeUrl {
url: PathBuf,
},
}
impl RequirementsTxtParserError {
@ -855,6 +910,8 @@ impl RequirementsTxtParserError {
start: start + offset,
end: end + offset,
},
Self::Reqwest(err) => Self::Reqwest(err),
Self::NonUnicodeUrl { url } => Self::NonUnicodeUrl { url },
}
}
}
@ -897,6 +954,16 @@ impl Display for RequirementsTxtParserError {
Self::Subfile { start, .. } => {
write!(f, "Error parsing included file at position {start}")
}
Self::Reqwest(err) => {
write!(f, "Error while accessing remote requirements file {err}")
}
Self::NonUnicodeUrl { url } => {
write!(
f,
"Remote requirements URL contains non-unicode characters: {}",
url.display(),
)
}
}
}
}
@ -914,6 +981,8 @@ impl std::error::Error for RequirementsTxtParserError {
Self::Pep508 { source, .. } => Some(source),
Self::Subfile { source, .. } => Some(source.as_ref()),
Self::Parser { .. } => None,
Self::Reqwest(err) => err.source(),
Self::NonUnicodeUrl { .. } => None,
}
}
}
@ -989,6 +1058,21 @@ impl Display for RequirementsTxtFileError {
self.file.simplified_display(),
)
}
RequirementsTxtParserError::Reqwest(err) => {
write!(
f,
"Error while accessing remote requirements file {}: {err}",
self.file.simplified_display(),
)
}
RequirementsTxtParserError::NonUnicodeUrl { url } => {
write!(
f,
"Remote requirements URL contains non-unicode characters: {}",
url.display(),
)
}
}
}
}
@ -1005,6 +1089,12 @@ impl From<io::Error> for RequirementsTxtParserError {
}
}
impl From<reqwest_middleware::Error> for RequirementsTxtParserError {
fn from(err: reqwest_middleware::Error) -> Self {
Self::Reqwest(err)
}
}
/// Calculates the column and line offset of a given cursor based on the
/// number of Unicode codepoints.
fn calculate_row_column(content: &str, position: usize) -> (usize, usize) {
@ -1057,6 +1147,7 @@ mod test {
use tempfile::tempdir;
use test_case::test_case;
use unscanny::Scanner;
use uv_client::{RegistryClient, RegistryClientBuilder};
use uv_fs::Simplified;
use crate::{calculate_row_column, EditableRequirement, RequirementsTxt};
@ -1065,6 +1156,12 @@ mod test {
PathBuf::from("./test-data")
}
fn registry_client() -> RegistryClient {
RegistryClientBuilder::new(uv_cache::Cache::temp().unwrap())
.connectivity(uv_client::Connectivity::Online)
.build()
}
#[test_case(Path::new("basic.txt"))]
#[test_case(Path::new("constraints-a.txt"))]
#[test_case(Path::new("constraints-b.txt"))]
@ -1075,11 +1172,14 @@ mod test {
#[test_case(Path::new("poetry-with-hashes.txt"))]
#[test_case(Path::new("small.txt"))]
#[test_case(Path::new("whitespace.txt"))]
fn parse(path: &Path) {
#[tokio::test]
async fn parse(path: &Path) {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
let actual = RequirementsTxt::parse(requirements_txt, &working_dir).unwrap();
let actual = RequirementsTxt::parse(requirements_txt, &working_dir, &registry_client())
.await
.unwrap();
let snapshot = format!("parse-{}", path.to_string_lossy());
insta::assert_debug_snapshot!(snapshot, actual);
@ -1096,7 +1196,8 @@ mod test {
#[test_case(Path::new("small.txt"))]
#[test_case(Path::new("whitespace.txt"))]
#[test_case(Path::new("editable.txt"))]
fn line_endings(path: &Path) {
#[tokio::test]
async fn line_endings(path: &Path) {
let working_dir = workspace_test_data_dir().join("requirements-txt");
let requirements_txt = working_dir.join(path);
@ -1120,14 +1221,16 @@ mod test {
let requirements_txt = temp_dir.path().join(path);
fs::write(&requirements_txt, contents).unwrap();
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir).unwrap();
let actual = RequirementsTxt::parse(&requirements_txt, &working_dir, &registry_client())
.await
.unwrap();
let snapshot = format!("line-endings-{}", path.to_string_lossy());
insta::assert_debug_snapshot!(snapshot, actual);
}
#[test]
fn invalid_include_missing_file() -> Result<()> {
#[tokio::test]
async fn invalid_include_missing_file() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let missing_txt = temp_dir.child("missing.txt");
let requirements_txt = temp_dir.child("requirements.txt");
@ -1135,7 +1238,10 @@ mod test {
-r missing.txt
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error)
.chain()
// The last error is operating-system specific.
@ -1155,22 +1261,25 @@ mod test {
}, {
insta::assert_snapshot!(errors, @r###"
Error parsing included file in `<REQUIREMENTS_TXT>` at position 0
failed to open file `<MISSING_TXT>`
failed to read from file `<MISSING_TXT>`
"###);
});
Ok(())
}
#[test]
fn invalid_requirement() -> Result<()> {
#[tokio::test]
async fn invalid_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {"
numpy[ö]==1.29
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1193,15 +1302,18 @@ mod test {
Ok(())
}
#[test]
fn unsupported_editable() -> Result<()> {
#[tokio::test]
async fn unsupported_editable() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {"
-e http://localhost:8080/
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1219,15 +1331,18 @@ mod test {
Ok(())
}
#[test]
fn invalid_editable_extra() -> Result<()> {
#[tokio::test]
async fn invalid_editable_extra() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {"
-e black[,abcdef]
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1247,15 +1362,18 @@ mod test {
Ok(())
}
#[test]
fn invalid_index_url() -> Result<()> {
#[tokio::test]
async fn invalid_index_url() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {"
--index-url 123
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1276,8 +1394,8 @@ mod test {
Ok(())
}
#[test]
fn missing_r() -> Result<()> {
#[tokio::test]
async fn missing_r() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let file_txt = temp_dir.child("file.txt");
@ -1289,7 +1407,10 @@ mod test {
file.txt
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1307,8 +1428,8 @@ mod test {
Ok(())
}
#[test]
fn relative_requirement() -> Result<()> {
#[tokio::test]
async fn relative_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
// Create a requirements file with a relative entry, in a subdirectory.
@ -1330,7 +1451,10 @@ mod test {
-r subdir/child.txt
"})?;
let requirements = RequirementsTxt::parse(parent_txt.path(), temp_dir.path()).unwrap();
let requirements =
RequirementsTxt::parse(parent_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
requirements: [
@ -1359,9 +1483,9 @@ mod test {
Ok(())
}
#[test]
#[tokio::test]
#[cfg(not(windows))]
fn nested_editable() -> Result<()> {
async fn nested_editable() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
@ -1381,7 +1505,9 @@ mod test {
"})?;
let requirements =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap();
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap();
insta::assert_debug_snapshot!(requirements, @r###"
RequirementsTxt {
@ -1419,8 +1545,8 @@ mod test {
Ok(())
}
#[test]
fn nested_conflicting_index_url() -> Result<()> {
#[tokio::test]
async fn nested_conflicting_index_url() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
@ -1439,7 +1565,10 @@ mod test {
--index-url https://fake.pypi.org/simple
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =
@ -1477,8 +1606,8 @@ mod test {
);
}
#[test]
fn parser_error_line_and_column() -> Result<()> {
#[tokio::test]
async fn parser_error_line_and_column() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {"
@ -1487,7 +1616,10 @@ mod test {
tqdm
"})?;
let error = RequirementsTxt::parse(requirements_txt.path(), temp_dir.path()).unwrap_err();
let error =
RequirementsTxt::parse(requirements_txt.path(), temp_dir.path(), &registry_client())
.await
.unwrap_err();
let errors = anyhow::Error::new(error).chain().join("\n");
let requirement_txt =

View file

@ -177,6 +177,12 @@ impl RegistryClient {
self.timeout
}
/// Set the index URLs to use for fetching packages.
#[must_use]
pub fn with_index_url(self, index_urls: IndexUrls) -> Self {
Self { index_urls, ..self }
}
/// Fetch a package from the `PyPI` simple API.
///
/// "simple" here refers to [PEP 503 Simple Repository API](https://peps.python.org/pep-0503/)

View file

@ -1,4 +1,5 @@
use std::fmt::Display;
use std::io::Read;
use std::path::{Path, PathBuf};
use fs2::FileExt;
@ -12,35 +13,31 @@ pub use crate::path::*;
mod path;
/// Reads the contents of the file path given into memory.
/// Reads the contents of the file path into memory.
///
/// If the file path is `-`, then contents are read from stdin instead.
pub fn read(path: impl AsRef<Path>) -> std::io::Result<Vec<u8>> {
use std::io::Read;
pub async fn read(path: impl AsRef<Path>) -> std::io::Result<Vec<u8>> {
let path = path.as_ref();
if path == Path::new("-") {
let mut buf = Vec::with_capacity(1024);
std::io::stdin().read_to_end(&mut buf)?;
Ok(buf)
} else {
fs::read(path)
fs_err::tokio::read(path).await
}
}
/// Reads the contents of the file path given into memory as a `String`.
/// Reads the contents of the file path into memory as a `String`.
///
/// If the file path is `-`, then contents are read from stdin instead.
pub fn read_to_string(path: impl AsRef<Path>) -> std::io::Result<String> {
use std::io::Read;
pub async fn read_to_string(path: impl AsRef<Path>) -> std::io::Result<String> {
let path = path.as_ref();
if path == Path::new("-") {
let mut buf = String::with_capacity(1024);
std::io::stdin().read_to_string(&mut buf)?;
Ok(buf)
} else {
fs::read_to_string(path)
fs_err::tokio::read_to_string(path).await
}
}

View file

@ -9,6 +9,7 @@ use std::str::FromStr;
use anstream::{eprint, AutoStream, StripStream};
use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc};
use futures::future::OptionFuture;
use itertools::Itertools;
use owo_colors::OwoColorize;
use rustc_hash::FxHashSet;
@ -82,6 +83,11 @@ pub(crate) async fn pip_compile(
));
}
// Initialize the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.connectivity(connectivity)
.build();
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
@ -94,11 +100,14 @@ pub(crate) async fn pip_compile(
no_index,
find_links,
extras: used_extras,
} = RequirementsSpecification::from_sources(requirements, constraints, overrides, &extras)?;
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
} = RequirementsSpecification::from_sources(
requirements,
constraints,
overrides,
&extras,
&client,
)
.await?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
@ -117,28 +126,33 @@ pub(crate) async fn pip_compile(
}
}
let preferences: Vec<Requirement> = output_file
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
.filter(|_| !upgrade.is_all())
.filter(|output_file| output_file.exists())
.map(Path::to_path_buf)
.map(RequirementsSource::from_path)
.as_ref()
.map(|source| RequirementsSpecification::from_source(source, &extras))
.transpose()?
.map(|spec| spec.requirements)
.map(|requirements| match upgrade {
// Respect all pinned versions from the existing lockfile.
Upgrade::None => requirements,
// Ignore all pinned versions from the existing lockfile.
Upgrade::All => vec![],
// Ignore pinned versions for the specified packages.
Upgrade::Packages(packages) => requirements
.into_iter()
.filter(|requirement| !packages.contains(&requirement.name))
.collect(),
})
.unwrap_or_default();
let preferences: Vec<Requirement> = OptionFuture::from(
output_file
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
.filter(|_| !upgrade.is_all())
.filter(|output_file| output_file.exists())
.map(Path::to_path_buf)
.map(RequirementsSource::from_path)
.as_ref()
.map(|source| async {
RequirementsSpecification::from_source(source, &extras, &client).await
}),
)
.await
.transpose()?
.map(|spec| spec.requirements)
.map(|requirements| match upgrade {
// Respect all pinned versions from the existing lockfile.
Upgrade::None => requirements,
// Ignore all pinned versions from the existing lockfile.
Upgrade::All => vec![],
// Ignore pinned versions for the specified packages.
Upgrade::Packages(packages) => requirements
.into_iter()
.filter(|requirement| !packages.contains(&requirement.name))
.collect(),
})
.unwrap_or_default();
// Find an interpreter to use for building distributions
let platform = Platform::current()?;
@ -196,11 +210,13 @@ pub(crate) async fn pip_compile(
|python_version| Cow::Owned(python_version.markers(interpreter.markers())),
);
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.connectivity(connectivity)
.build();
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
// Update the index URLs on the client, to take into account any index URLs added by the
// sources (e.g., `--index-url` in a `requirements.txt` file).
let client = client.with_index_url(index_locations.index_urls());
// Resolve the flat indexes from `--find-links`.
let flat_index = {

View file

@ -70,6 +70,11 @@ pub(crate) async fn pip_install(
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Initialize the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.connectivity(connectivity)
.build();
// Read all requirements from the provided sources.
let RequirementsSpecification {
project,
@ -82,11 +87,7 @@ pub(crate) async fn pip_install(
no_index,
find_links,
extras: used_extras,
} = specification(requirements, constraints, overrides, extras)?;
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
} = specification(requirements, constraints, overrides, extras, &client).await?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {
@ -169,11 +170,13 @@ pub(crate) async fn pip_install(
let tags = venv.interpreter().tags()?;
let markers = venv.interpreter().markers();
// Instantiate a client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.connectivity(connectivity)
.build();
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
// Update the index URLs on the client, to take into account any index URLs added by the
// sources (e.g., `--index-url` in a `requirements.txt` file).
let client = client.with_index_url(index_locations.index_urls());
// Resolve the flat indexes from `--find-links`.
let flat_index = {
@ -316,11 +319,12 @@ pub(crate) async fn pip_install(
}
/// Consolidate the requirements for an installation.
fn specification(
async fn specification(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification<'_>,
client: &RegistryClient,
) -> Result<RequirementsSpecification, Error> {
// If the user requests `extras` but does not provide a pyproject toml source
if !matches!(extras, ExtrasSpecification::None)
@ -332,8 +336,14 @@ fn specification(
}
// Read all requirements from the provided sources.
let spec =
RequirementsSpecification::from_sources(requirements, constraints, overrides, extras)?;
let spec = RequirementsSpecification::from_sources(
requirements,
constraints,
overrides,
extras,
client,
)
.await?;
// Check that all provided extras are used
if let ExtrasSpecification::Some(extras) = extras {

View file

@ -48,6 +48,11 @@ pub(crate) async fn pip_sync(
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Initialize the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.connectivity(connectivity)
.build();
// Read all requirements from the provided sources.
let RequirementsSpecification {
project: _project,
@ -60,7 +65,7 @@ pub(crate) async fn pip_sync(
no_index,
find_links,
extras: _extras,
} = RequirementsSpecification::from_simple_sources(sources)?;
} = RequirementsSpecification::from_simple_sources(sources, &client).await?;
let num_requirements = requirements.len() + editables.len();
if num_requirements == 0 {
@ -68,10 +73,6 @@ pub(crate) async fn pip_sync(
return Ok(ExitStatus::Success);
}
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
// Detect the current Python interpreter.
let platform = Platform::current()?;
let venv = if let Some(python) = python.as_ref() {
@ -108,11 +109,13 @@ pub(crate) async fn pip_sync(
// Determine the current environment markers.
let tags = venv.interpreter().tags()?;
// Prep the registry client.
let client = RegistryClientBuilder::new(cache.clone())
.index_urls(index_locations.index_urls())
.connectivity(connectivity)
.build();
// Incorporate any index locations from the provided sources.
let index_locations =
index_locations.combine(index_url, extra_index_urls, find_links, no_index);
// Update the index URLs on the client, to take into account any index URLs added by the
// sources (e.g., `--index-url` in a `requirements.txt` file).
let client = client.with_index_url(index_locations.index_urls());
// Resolve the flat indexes from `--find-links`.
let flat_index = {

View file

@ -7,6 +7,7 @@ use tracing::debug;
use distribution_types::{InstalledMetadata, Name};
use platform_host::Platform;
use uv_cache::Cache;
use uv_client::{Connectivity, RegistryClientBuilder};
use uv_fs::Simplified;
use uv_interpreter::PythonEnvironment;
@ -20,10 +21,16 @@ pub(crate) async fn pip_uninstall(
python: Option<String>,
system: bool,
cache: Cache,
connectivity: Connectivity,
printer: Printer,
) -> Result<ExitStatus> {
let start = std::time::Instant::now();
// Initialize the registry client.
let client: uv_client::RegistryClient = RegistryClientBuilder::new(cache.clone())
.connectivity(connectivity)
.build();
// Read all requirements from the provided sources.
let RequirementsSpecification {
project: _project,
@ -36,7 +43,7 @@ pub(crate) async fn pip_uninstall(
no_index: _no_index,
find_links: _find_links,
extras: _extras,
} = RequirementsSpecification::from_simple_sources(sources)?;
} = RequirementsSpecification::from_simple_sources(sources, &client).await?;
// Detect the current Python interpreter.
let platform = Platform::current()?;

View file

@ -861,6 +861,10 @@ struct PipUninstallArgs {
/// should be used with caution, as it can modify the system Python installation.
#[clap(long, conflicts_with = "python")]
system: bool,
/// Run offline, i.e., without accessing the network.
#[arg(global = true, long)]
offline: bool,
}
#[derive(Args)]
@ -1455,7 +1459,19 @@ async fn run() -> Result<ExitStatus> {
.map(RequirementsSource::from_path),
)
.collect::<Vec<_>>();
commands::pip_uninstall(&sources, args.python, args.system, cache, printer).await
commands::pip_uninstall(
&sources,
args.python,
args.system,
cache,
if args.offline {
Connectivity::Offline
} else {
Connectivity::Online
},
printer,
)
.await
}
Commands::Pip(PipNamespace {
command: PipCommand::Freeze(args),

View file

@ -12,6 +12,7 @@ use distribution_types::{FlatIndexLocation, IndexUrl};
use pep508_rs::Requirement;
use requirements_txt::{EditableRequirement, FindLink, RequirementsTxt};
use tracing::{instrument, Level};
use uv_client::RegistryClient;
use uv_fs::Simplified;
use uv_normalize::{ExtraName, PackageName};
@ -138,9 +139,10 @@ pub(crate) struct RequirementsSpecification {
impl RequirementsSpecification {
/// Read the requirements and constraints from a source.
#[instrument(skip_all, level = Level::DEBUG, fields(source = % source))]
pub(crate) fn from_source(
pub(crate) async fn from_source(
source: &RequirementsSource,
extras: &ExtrasSpecification,
extras: &ExtrasSpecification<'_>,
client: &RegistryClient,
) -> Result<Self> {
Ok(match source {
RequirementsSource::Package(name) => {
@ -176,7 +178,8 @@ impl RequirementsSpecification {
}
}
RequirementsSource::RequirementsTxt(path) => {
let requirements_txt = RequirementsTxt::parse(path, std::env::current_dir()?)?;
let requirements_txt =
RequirementsTxt::parse(path, std::env::current_dir()?, client).await?;
Self {
project: None,
requirements: requirements_txt
@ -206,7 +209,7 @@ impl RequirementsSpecification {
}
}
RequirementsSource::PyprojectToml(path) => {
let contents = uv_fs::read_to_string(path)?;
let contents = uv_fs::read_to_string(path).await?;
let pyproject_toml = toml::from_str::<pyproject_toml::PyProjectToml>(&contents)
.with_context(|| format!("Failed to parse `{}`", path.simplified_display()))?;
let mut used_extras = FxHashSet::default();
@ -273,11 +276,12 @@ impl RequirementsSpecification {
}
/// Read the combined requirements and constraints from a set of sources.
pub(crate) fn from_sources(
pub(crate) async fn from_sources(
requirements: &[RequirementsSource],
constraints: &[RequirementsSource],
overrides: &[RequirementsSource],
extras: &ExtrasSpecification,
extras: &ExtrasSpecification<'_>,
client: &RegistryClient,
) -> Result<Self> {
let mut spec = Self::default();
@ -285,7 +289,7 @@ impl RequirementsSpecification {
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
// a requirements file can also add constraints.
for source in requirements {
let source = Self::from_source(source, extras)?;
let source = Self::from_source(source, extras, client).await?;
spec.requirements.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.overrides.extend(source.overrides);
@ -312,7 +316,7 @@ impl RequirementsSpecification {
// Read all constraints, treating _everything_ as a constraint.
for source in constraints {
let source = Self::from_source(source, extras)?;
let source = Self::from_source(source, extras, client).await?;
spec.constraints.extend(source.requirements);
spec.constraints.extend(source.constraints);
spec.constraints.extend(source.overrides);
@ -332,7 +336,7 @@ impl RequirementsSpecification {
// Read all overrides, treating both requirements _and_ constraints as overrides.
for source in overrides {
let source = Self::from_source(source, extras)?;
let source = Self::from_source(source, extras, client).await?;
spec.overrides.extend(source.requirements);
spec.overrides.extend(source.constraints);
spec.overrides.extend(source.overrides);
@ -354,8 +358,11 @@ impl RequirementsSpecification {
}
/// Read the requirements from a set of sources.
pub(crate) fn from_simple_sources(requirements: &[RequirementsSource]) -> Result<Self> {
Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None)
pub(crate) async fn from_simple_sources(
requirements: &[RequirementsSource],
client: &RegistryClient,
) -> Result<Self> {
Self::from_sources(requirements, &[], &[], &ExtrasSpecification::None, client).await
}
}

View file

@ -115,7 +115,7 @@ fn missing_requirements_in() {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.in`
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);
@ -142,7 +142,7 @@ fn missing_venv() -> Result<()> {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.in`
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);

View file

@ -100,7 +100,7 @@ fn missing_requirements_txt() {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###
);
@ -1509,6 +1509,73 @@ fn install_constraints_inline() -> Result<()> {
Ok(())
}
/// Install a package from a `constraints.txt` file on a remote http server.
#[test]
fn install_constraints_remote() {
let context = TestContext::new("3.12");
uv_snapshot!(command(&context)
.arg("-c")
.arg("https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt")
.arg("typing_extensions>=4.0"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.7.1
"###
); // would yield typing-extensions==4.8.2 without constraint file
}
/// Install a package from a `requirements.txt` file, with an inline constraint, which points
/// to a remote http server.
#[test]
fn install_constraints_inline_remote() -> Result<()> {
let context = TestContext::new("3.12");
let requirementstxt = context.temp_dir.child("requirements.txt");
requirementstxt.write_str("typing-extensions>=4.0\n-c https://raw.githubusercontent.com/apache/airflow/constraints-2-6/constraints-3.11.txt")?;
uv_snapshot!(command(&context)
.arg("-r")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ typing-extensions==4.7.1
"### // would yield typing-extensions==4.8.2 without constraint file
);
Ok(())
}
#[test]
fn install_constraints_respects_offline_mode() {
let context = TestContext::new("3.12");
uv_snapshot!(command(&context)
.arg("--offline")
.arg("-r")
.arg("http://example.com/requirements.txt"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Error while accessing remote requirements file http://example.com/requirements.txt: Middleware error: Network connectivity is disabled, but the requested data wasn't found in the cache for: `http://example.com/requirements.txt`
Caused by: Network connectivity is disabled, but the requested data wasn't found in the cache for: `http://example.com/requirements.txt`
"###
);
}
/// Tests that we can install `polars==0.14.0`, which has this odd dependency
/// requirement in its wheel metadata: `pyarrow>=4.0.*; extra == 'pyarrow'`.
///

View file

@ -124,7 +124,7 @@ fn missing_requirements_txt() {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###);
@ -151,7 +151,7 @@ fn missing_venv() -> Result<()> {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###);

View file

@ -75,7 +75,7 @@ fn missing_requirements_txt() -> Result<()> {
----- stdout -----
----- stderr -----
error: failed to open file `requirements.txt`
error: failed to read from file `requirements.txt`
Caused by: No such file or directory (os error 2)
"###
);
@ -125,7 +125,7 @@ fn missing_pyproject_toml() -> Result<()> {
----- stdout -----
----- stderr -----
error: failed to open file `pyproject.toml`
error: failed to read from file `pyproject.toml`
Caused by: No such file or directory (os error 2)
"###
);