uv/crates/uv/tests/pip_compile.rs
Charlie Marsh c7d7b07408
Add test for extra-in-constraint (#3231)
## Summary

We have the wrong behavior here, so starting by adding a test for it.
2024-04-23 21:52:41 -04:00

8259 lines
266 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#![cfg(all(feature = "python", feature = "pypi"))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::disallowed_types))]
use std::env::current_dir;
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use anyhow::{bail, Context, Result};
use assert_fs::prelude::*;
use assert_fs::TempDir;
use indoc::indoc;
use url::Url;
use common::{uv_snapshot, TestContext};
use uv_fs::Simplified;
use crate::common::get_bin;
mod common;
/// Resolve a specific version of `anyio` from a `requirements.in` file.
#[test]
fn compile_requirements_in() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file with a `--annotation-style=line` flag.
#[test]
fn compile_requirements_in_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.arg("--annotation-style=line")
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z --annotation-style=line requirements.in
anyio==3.7.0
idna==3.6 # via anyio
sniffio==1.3.1 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file on stdin
/// when passed a path of `-`.
#[test]
fn compile_requirements_in_stdin() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.compile()
.stdin(fs::File::open(requirements_in)?)
.arg("-"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z -
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn missing_requirements_in() {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);
requirements_in.assert(predicates::path::missing());
}
#[test]
fn missing_venv() -> Result<()> {
let temp_dir = TempDir::new()?;
let cache_dir = TempDir::new()?;
let venv = temp_dir.child(".venv");
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--cache-dir")
.arg(cache_dir.path())
.env("VIRTUAL_ENV", venv.as_os_str())
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: failed to read from file `requirements.in`
Caused by: No such file or directory (os error 2)
"###
);
venv.assert(predicates::path::missing());
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file.
#[test]
fn compile_constraints_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
anyio==3.7.0
idna==3.3
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with an inline constraint.
#[test]
fn compile_constraints_inline() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
requirements_in.write_str("-c constraints.txt")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
----- stderr -----
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that
/// uses markers.
#[test]
fn compile_constraints_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
// If constraints are ignored, these will conflict
constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?;
constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.0
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that uses an
/// extra. The constraint should be enforced, but the extra should _not_ be included in the output
/// (though it currently _is_ included).
#[test]
fn compile_constraint_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask[dotenv]<24.3.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.2
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
python-dotenv==1.0.1
# via flask
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an optional dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra foo
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an extra with non-normalized names in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_name_normalization() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("FRiENDlY-...-_-BARd"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra FRiENDlY-...-_-BARd
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that does not exist as a dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("bar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extra not found: bar
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section.
#[test]
fn compile_pyproject_toml_poetry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
pytest = { version = "*", optional = true }
[tool.poetry.extras]
test = ["pytest"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("test"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra test
anyio==3.7.1
idna==3.6
# via anyio
iniconfig==2.0.0
# via pytest
packaging==24.0
# via pytest
pluggy==1.4.0
# via pytest
pytest==8.1.1
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file that uses setuptools as the build backend.
#[test]
fn compile_pyproject_toml_setuptools() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
"#,
)?;
let setup_cfg = context.temp_dir.child("setup.cfg");
setup_cfg.write_str(
r#"[options]
packages = find:
install_requires=
anyio
[options.extras_require]
dev =
iniconfig; python_version >= "3.7"
mypy; python_version <= "3.8"
"#,
)?;
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
)
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev
anyio==4.3.0
idna==3.6
# via anyio
iniconfig==2.0.0
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `setup.cfg` file.
#[test]
fn compile_setup_cfg() -> Result<()> {
let context = TestContext::new("3.12");
let setup_cfg = context.temp_dir.child("setup.cfg");
setup_cfg.write_str(
r#"[options]
packages = find:
install_requires=
anyio
[options.extras_require]
dev =
iniconfig; python_version >= "3.7"
mypy; python_version <= "3.8"
"#,
)?;
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
)
"#,
)?;
uv_snapshot!(context.compile()
.arg("setup.cfg")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z setup.cfg --extra dev
anyio==4.3.0
idna==3.6
# via anyio
iniconfig==2.0.0
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `setup.py` file.
#[test]
fn compile_setup_py() -> Result<()> {
let context = TestContext::new("3.12");
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
install_requires=["anyio"],
extras_require={
"dev": ["iniconfig; python_version >= '3.7'", "mypy; python_version <= '3.8'"],
},
)
"#,
)?;
uv_snapshot!(context.compile()
.arg("setup.py")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z setup.py --extra dev
anyio==4.3.0
idna==3.6
# via anyio
iniconfig==2.0.0
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a `pyproject.toml` file with an invalid project name.
#[test]
fn compile_pyproject_toml_invalid_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "!project"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse `pyproject.toml`
Caused by: TOML parse error at line 5, column 8
|
5 | name = "!project"
| ^^^^^^^^^^
Not a valid package or extra name: "!project". Names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters.
"###
);
Ok(())
}
/// Request multiple extras that do not exist as a dependency group in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo")
.arg("--extra")
.arg("bar")
.arg("--extra")
.arg("foobar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extras not found: bar, foobar
"###
);
Ok(())
}
/// Request extras when using a `requirements.in` file which does not support extras.
#[test]
fn compile_requirements_file_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--all-extras"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file.
"###
);
Ok(())
}
/// Request an extra with a name that does not conform to the specification.
#[test]
fn invalid_extra_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("invalid name!"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'invalid name!' for '--extra <EXTRA>': Extra names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12.
#[test]
fn compile_python_312() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --python-version 3.12
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 with `--annotation-style=line`.
#[test]
fn compile_python_312_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("--annotation-style=line")
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z --annotation-style=line requirements.in --python-version 3.12
black==23.10.1
click==8.1.7 # via black
mypy-extensions==1.0.0 # via black
packaging==24.0 # via black
pathspec==0.12.1 # via black
platformdirs==4.2.0 # via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 without deps.
#[test]
fn compile_python_312_no_deps() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-deps --python-version 3.12
black==23.10.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.7.
#[test]
fn compile_python_37() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let filters: Vec<_> = [
// 3.7 may not be installed
(
"warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
"",
),
]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (3.7) does not satisfy Python>=3.8 and black==23.10.1 depends on Python>=3.8, we can conclude that black==23.10.1 cannot be used.
And because you require black==23.10.1, we can conclude that the requirements are unsatisfiable.
"###);
Ok(())
}
/// Resolve a source distribution with `--resolution=lowest-direct`, to ensure that the build
/// requirements aren't resolved at their lowest compatible version.
#[test]
fn compile_sdist_resolution_lowest() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --resolution=lowest-direct --python-version 3.12
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_invalid_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7.x"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7.x' for '--python-version <PYTHON_VERSION>': after parsing 3.7, found ".x" after it, which is not part of a valid version
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_dev_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7-dev"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7-dev' for '--python-version <PYTHON_VERSION>': Python version 3.7-dev is a development release
For more information, try '--help'.
"###
);
Ok(())
}
/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an
/// incompatible sdist <https://github.com/astral-sh/uv/issues/388>
#[test]
fn compile_numpy_py38() -> Result<()> {
let context = TestContext::new("3.8");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("numpy")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-build"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-build
numpy==1.24.4
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask wheel via a URL dependency.
#[test]
fn compile_wheel_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask source distribution via a URL dependency.
///
/// Exercises the `prepare_metadata_for_build_wheel` hooks.
#[test]
fn compile_sdist_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage",
)?;
// In addition to the standard filters, remove the `main` commit, which will change frequently.
let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@[COMMIT]
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_branch_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_tag_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-tag",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific tag via a Git HTTPS dependency.
///
/// In this case, the tag is a date, and thus could feasibly refer to a short commit hash.
#[test]
#[cfg(feature = "git")]
fn compile_git_date_tag_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@20240402",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_long_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_short_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd6",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_refs_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@refs/pull/4/head")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@9d01a806f17ddacb9c7b66b1b68574adf790b63f
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Git dependency with a subdirectory.
#[test]
#[cfg(feature = "git")]
fn compile_git_subdirectory_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_concurrent_access() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_unnamed_concurrent_access() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\ngit+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a Git dependency with a declared name that differs from the true name of the package.
#[test]
#[cfg(feature = "git")]
fn compile_git_mismatched_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download and build: `dask @ git+https://github.com/pallets/flask.git@3.0.0`
Caused by: Package metadata name `flask` does not match given name `dask`
"###
);
Ok(())
}
/// Resolve a specific Git dependency with a subdirectory, where the root directory contains a
/// static `pyproject.toml` file.
#[test]
fn compile_git_subdirectory_static_metadata() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage#subdirectory=uv-public-pypackage")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage#subdirectory=uv-public-pypackage@b8c4e192456d736c27f2c84c61175c896dba8373
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for Werkzeug, which should avoid adding a
/// duplicate dependency from `PyPI`.
#[test]
fn mixed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at a _different_ version, which
/// should result in a conflict.
#[test]
fn conflicting_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of werkzeug==3.0.0 and you require werkzeug==3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at _the same_ version, which
/// should prefer the direct URL dependency.
#[test]
fn compatible_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions, which should result in a conflict.
#[test]
fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions. However, only one of the
/// URLs is compatible with the requested Python version, so there shouldn't be any conflict.
#[test]
fn conflicting_repeated_url_dependency_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl ; python_version >= '3.10'
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl ; python_version < '3.10'
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at the same version. Despite mapping to the same
/// version, it should still result in a conflict.
#[test]
#[cfg(feature = "git")]
fn conflicting_repeated_url_dependency_version_match() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug.git@2.0.0
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for a conflicting version of Werkzeug.
#[test]
fn conflicting_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only werkzeug<3.0.0 is available and flask==3.0.0 depends on werkzeug>=3.0.0, we can conclude that flask==3.0.0 cannot be used.
And because you require flask==3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Request `anyio` via two different URLs which resolve to the same canonical version.
#[test]
fn compatible_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
anyio @ git+https://github.com/agronholm/anyio@4.3.0
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request `anyio` via two different URLs which resolve to the same repository, but different
/// commits.
#[test]
fn conflicting_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
anyio @ git+https://github.com/agronholm/anyio.git@4.0.0
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `anyio`:
- git+https://github.com/agronholm/anyio.git@4.3.0
- git+https://github.com/agronholm/anyio.git@4.0.0
"###
);
Ok(())
}
/// Request `anyio` via three different URLs: `4.3.0`, a short SHA, and a precise SHA. All three
/// are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
anyio @ git+https://github.com/agronholm/anyio@437a7e31
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request `anyio` via three different URLs: a precise SHA, a short SHA, and `4.3.0`. All three
/// are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_broader_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
anyio @ git+https://github.com/agronholm/anyio@437a7e31
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio @ git+https://github.com/agronholm/anyio.git@437a7e310925a962cab4a58fcd2455fbcd578d51
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request `anyio` via two different URLs: `4.3.0`, and a precise SHA, followed by `4.3.0` again.
/// All three are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_repeated_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio @ git+https://github.com/agronholm/anyio.git@437a7e310925a962cab4a58fcd2455fbcd578d51
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request `anyio` via three different URLs: `4.3.0`, a precise SHA, and `master`.
///
/// Although `4.3.0` and the precise SHA resolve to the same canonical version, `master` resolves to
/// a different version, so there should be a conflict.
#[test]
fn incompatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
anyio @ git+https://github.com/agronholm/anyio.git@4.3.0
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
anyio @ git+https://github.com/agronholm/anyio.git@master
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `anyio`:
- git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
- git+https://github.com/agronholm/anyio.git@master
"###
);
Ok(())
}
/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_git_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since this URL is declared as a constraint, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4`.
/// Since this `iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4.git` is declared as a constraint, and
/// those map to the same canonical URL, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_canonical_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since `hatchling_editable` is a path (local) dependency, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ ${HATCH_PATH}")?;
let hatchling_path = current_dir()?.join("../../scripts/packages/hatchling_editable");
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("HATCH_PATH", hatchling_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
hatchling-editable @ ${HATCH_PATH}
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// A dependency with conflicting URLs in `requirements.in` and `constraints.txt` should be ignored
/// if the dependency has an override.
#[test]
fn requirement_constraint_override_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==3.7.0 and you require anyio==3.7.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// A dependency that uses a pre-release marker in `requirements.in` should be overridden by a
/// non-pre-release version in `overrides.txt`. We should _not_ allow Flask to be resolved to
/// a pre-release version.
#[test]
fn requirement_override_prerelease() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0rc4")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("flask<2.0.1,!=2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
click==7.1.2
# via flask
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --all-extras
anyio==3.7.0
# via httpcore
certifi==2024.2.2
# via httpcore
h11==0.14.0
# via httpcore
httpcore==0.18.0
idna==3.6
# via anyio
iniconfig==1.1.1
sniffio==1.3.1
# via
# anyio
# httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn compile_pyproject_toml_all_extras_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("--annotation-style=line")
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z --annotation-style=line pyproject.toml --all-extras
anyio==3.7.0 # via httpcore
certifi==2024.2.2 # via httpcore
h11==0.14.0 # via httpcore
httpcore==0.18.0
idna==3.6 # via anyio
iniconfig==1.1.1
sniffio==1.3.1 # via anyio, httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all optional dependency groups in a `pyproject.toml` file.
#[test]
fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--all-extras")
.arg("--extra")
.arg("foo"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--all-extras' cannot be used with '--extra <EXTRA>'
Usage: uv pip compile --cache-dir [CACHE_DIR] --exclude-newer <EXCLUDE_NEWER> --all-extras <SRC_FILE>...
For more information, try '--help'.
"###
);
Ok(())
}
/// Compile requirements that cannot be solved due to conflict in a `pyproject.toml` fil;e.
#[test]
fn compile_unsolvable_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
dependencies = ["anyio==3.7.0", "anyio==4.0.0"]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because my-project depends on anyio==3.7.0 and my-project depends on anyio==4.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Compile requirements in a `pyproject.toml` file that cannot be resolved due to
/// a requirement with a version that is not available online.
#[test]
fn compile_unsolvable_requirements_version_not_available() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
dependencies = ["anyio==300.1.4"]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==300.1.4 and my-project depends on anyio==300.1.4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve at a specific time in the past
#[test]
fn compile_exclude_newer() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
// 4.64.0: 2022-04-04T01:48:46.194635Z1
// 4.64.1: 2022-09-03T11:10:27.148080Z
.arg("2022-04-04T12:00:00Z")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --exclude-newer 2022-04-04T12:00:00Z --cache-dir [CACHE_DIR]
tqdm==4.64.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Use a date as input instead.
// We interpret a date as including this day
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --exclude-newer 2022-04-04 --cache-dir [CACHE_DIR]
tqdm==4.64.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Check the error message for invalid datetime
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04+02:00")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(context.temp_dir.path()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '2022-04-04+02:00' for '--exclude-newer <EXCLUDE_NEWER>': `2022-04-04+02:00` is neither a valid date (trailing input) nor a valid datetime (input contains invalid characters)
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific wheel.
#[test]
fn compile_wheel_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
// Run the same operation, but this time with a relative path, omitting the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file:flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file:flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, including the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, exclusive of any scheme.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ./flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ./flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL).
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("flask @ {}", flask_wheel.path().display()))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ [TEMP_DIR]/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL), including
// the `file://` prefix.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL), including
// the `file://localhost/` prefix.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ file://localhost/{}",
flask_wheel.path().display()
))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://localhost/[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific source distribution.
#[test]
fn compile_source_distribution_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a source distribution.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0.tar.gz");
let mut flask_wheel_file = std::fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0.tar.gz
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Resolve a local path dependency to a non-existent file.
#[test]
fn compile_wheel_path_dependency_missing() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
context
.temp_dir
.join("flask-3.0.0-py3-none-any.whl")
.simplified_display()
))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
"###);
Ok(())
}
/// Resolve a yanked version of `attrs` by specifying the version directly.
#[test]
fn compile_yanked_version_direct() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs==21.1.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
attrs==21.1.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Fail to resolve `attrs` due to the indirect use of a yanked version (`21.1.0`).
#[test]
fn compile_yanked_version_indirect() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs>20.3.0,<21.2.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only the following versions of attrs are available:
attrs<=20.3.0
attrs==21.1.0
attrs>=21.2.0
and attrs==21.1.0 is unusable because it was yanked (reason: Installable but not importable on Python 3.4), we can conclude that attrs>20.3.0,<21.2.0 cannot be used.
And because you require attrs>20.3.0,<21.2.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an incompatible version.
#[test]
fn override_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("werkzeug==2.3.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==2.3.0
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Black==23.10.1 depends on tomli>=1.1.0 for Python versions below 3.11. Demonstrate that we can
/// override it with a multi-line override.
#[test]
fn override_multi_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str(
"tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'",
)?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
tomli==2.0.1
# via black
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with a URL.
#[test]
fn override_dependency_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an unnamed URL.
#[test]
fn override_dependency_unnamed_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_registry_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[tensorboard]==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
warning: The package `black==23.10.1` does not have an extra named `tensorboard`.
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_url_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
warning: The package `flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl` does not have an extra named `tensorboard`.
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the exact casing of the URL as specified in the
/// requirements file.
#[test]
fn preserve_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the unexpanded environment variable as specified in
/// the requirements file.
#[test]
fn preserve_project_root() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, passing in the entire URL as an environment variable.
#[test]
fn respect_http_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should be parsed as such.
#[test]
fn respect_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should error if the environment
/// variable is not set.
#[test]
fn error_missing_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in `requirements.in` at position 0
Caused by: Expected package name starting with an alphanumeric character, found '$'
${URL}
^
"###
);
Ok(())
}
/// Resolve a dependency from a file path, passing in the entire path as an environment variable.
#[test]
fn respect_file_env_var() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${FILE_PATH}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("FILE_PATH", context.temp_dir.join("flask-3.0.0-py3-none-any.whl")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${FILE_PATH}
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
#[cfg(feature = "maturin")]
fn compile_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e ../../scripts/packages/poetry_editable
-e ${PROJECT_ROOT}/../../scripts/packages/maturin_editable
-e file://../../scripts/packages/black_editable[dev]
boltons # normal dependency for comparison
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
-e ${PROJECT_ROOT}/../../scripts/packages/maturin_editable
-e ../../scripts/packages/poetry_editable
-e file://../../scripts/packages/black_editable
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
anyio==4.3.0
# via poetry-editable
attrs==23.2.0
# via aiohttp
boltons==23.1.1
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via
# anyio
# yarl
multidict==6.0.5
# via
# aiohttp
# yarl
sniffio==1.3.1
# via anyio
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Built 3 editables in [TIME]
Resolved 14 packages in [TIME]
"###);
Ok(())
}
/// If an editable is repeated, it should only be built once.
#[test]
fn deduplicate_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e file://../../scripts/packages/black_editable
-e ${PROJECT_ROOT}/../../scripts/packages/black_editable
-e file://../../scripts/packages/black_editable[dev]
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
-e file://../../scripts/packages/black_editable
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
attrs==23.2.0
# via aiohttp
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via yarl
multidict==6.0.5
# via
# aiohttp
# yarl
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Built 1 editable in [TIME]
Resolved 9 packages in [TIME]
"###);
Ok(())
}
#[test]
fn recursive_extras_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[dev] @ ../../scripts/packages/black_editable")?;
let mut command = context.compile();
if cfg!(all(windows, debug_assertions)) {
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
// default windows stack of 1MB
command.env("UV_STACK_SIZE", (2 * 1024 * 1024).to_string());
}
uv_snapshot!(context.filters(), command
.arg(requirements_in.path())
.current_dir(current_dir().unwrap()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
attrs==23.2.0
# via aiohttp
black @ ../../scripts/packages/black_editable
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via yarl
multidict==6.0.5
# via
# aiohttp
# yarl
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Resolved 9 packages in [TIME]
"###);
Ok(())
}
/// Compile an editable package with a direct URL requirement.
#[test]
fn compile_editable_url_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/hatchling_editable")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
-e ../../scripts/packages/hatchling_editable
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###);
Ok(())
}
#[test]
#[ignore]
fn cache_errors_are_non_fatal() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
// No git dep, git has its own locking strategy
requirements_in.write_str(indoc! {r"
# pypi wheel
pandas
# url wheel
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# url source dist
werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz
"
})?;
// Pick a file from each kind of cache
let interpreter_cache = context
.cache_dir
.path()
.join("interpreter-v0")
.read_dir()?
.next()
.context("Expected a python interpreter cache file")??
.path();
let cache_files = [
PathBuf::from("simple-v0/pypi/numpy.msgpack"),
PathBuf::from(
"wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack",
),
PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"),
PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"),
interpreter_cache,
];
let check = || {
uv_snapshot!(context.compile()
.arg("pip")
.arg("compile")
.arg(requirements_in.path())
// It's sufficient to check that we resolve to a fix number of packages
.stdout(std::process::Stdio::null()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 13 packages in [TIME]
"###
);
};
insta::allow_duplicates! {
check();
// Replace some cache files with invalid contents
for file in &cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::write(file, "I borken you cache")?;
}
check();
#[cfg(unix)]
{
use fs_err::os::unix::fs::OpenOptionsExt;
// Make some files unreadable, so that the read instead of the deserialization will fail
for file in cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::OpenOptions::new()
.create(true)
.write(true)
.mode(0o000)
.open(file)?;
}
}
check();
Ok(())
}
}
/// Resolve a distribution from an HTML-only registry.
#[test]
#[cfg(not(target_env = "musl"))] // No musllinux wheels in the torch index
fn compile_html() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2<=3.1.2")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--index-url")
.arg("https://download.pytorch.org/whl")
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(context.temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR]
jinja2==3.1.2
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a distribution from a registry with and without a trailing slash.
#[test]
fn trailing_slash() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
jinja2==3.1.3
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple/"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
jinja2==3.1.3
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend (default).
#[test]
fn compile_legacy_sdist_pep_517() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using `setuptools` directly.
#[test]
fn compile_legacy_sdist_setuptools() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--legacy-setup-py"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --legacy-setup-py
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the registry in the generated output.
#[test]
fn generate_hashes_registry() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==4.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio==4.0.0 \
--hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_source_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz \
--hash=sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_built_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given a VCS dependency, include hashes for its dependencies, but not the repository itself.
#[test]
fn generate_hashes_git() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ git+https://github.com/agronholm/anyio@4.3.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given an unnamed URL, include hashes for the URL and its dependencies.
#[test]
fn generate_hashes_unnamed_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given a local directory, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_local_directory() -> Result<()> {
let _context = TestContext::new("3.12");
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/poetry_editable
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.arg("--generate-hashes")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --generate-hashes
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via poetry-editable
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
poetry-editable @ ../../scripts/packages/poetry_editable
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Given an editable dependency, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_editable() -> Result<()> {
let _context = TestContext::new("3.12");
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e ../../scripts/packages/poetry_editable
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.arg("--generate-hashes")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --generate-hashes
-e ../../scripts/packages/poetry_editable
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via poetry-editable
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a local directory.
#[test]
fn find_links_directory() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
tqdm
numpy
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
"})?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
markupsafe==2.1.5
# via werkzeug
numpy==1.26.4
tqdm==1000.0.0
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index.
#[test]
fn find_links_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-index
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL passed via an environment variable.
#[test]
fn find_links_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm\n--find-links ${URL}")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.env("URL", "https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-index
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index,
/// with the URL itself provided in a `requirements.txt` file.
#[test]
fn find_links_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-f https://download.pytorch.org/whl/torch_stable.html\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--emit-find-links"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-index --emit-find-links
--find-links https://download.pytorch.org/whl/torch_stable.html
tqdm==4.64.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `extras==0.0.2` fails to build (i.e., it always throws). Since `extras==0.0.1` is pinned, we
/// should never even attempt to build `extras==0.0.2`, despite an unpinned `extras[dev]`
/// requirement.
#[test]
fn avoid_irrelevant_extras() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
extras==0.0.1
extras[dev]
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
# via extras
extras==0.0.1
idna==3.6
# via anyio
iniconfig==2.0.0
# via extras
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Nothing should change.
#[test]
fn upgrade_none() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Both packages should be upgraded.
#[test]
fn upgrade_all() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Only `click` should be upgraded.
#[test]
fn upgrade_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade-package")
.arg("click"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt --upgrade-package click
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Attempt to resolve a requirement at a path that doesn't exist.
#[test]
fn missing_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(if cfg!(windows) {
"anyio @ file://C:/tmp/anyio-3.7.0.tar.gz"
} else {
"anyio @ file:///tmp/anyio-3.7.0.tar.gz"
})?;
let filters: Vec<_> = [(r"/C:/", "/")]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://tmp/anyio-3.7.0.tar.gz
"###);
Ok(())
}
/// Attempt to resolve an editable requirement at a path that doesn't exist.
#[test]
fn missing_editable_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e foo/anyio-3.7.0.tar.gz")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to build editables
Caused by: Source distribution not found at: [TEMP_DIR]/foo/anyio-3.7.0.tar.gz
"###);
Ok(())
}
/// Attempt to resolve a URL requirement without a package name. The package name can be extracted
/// from the URL.
#[test]
fn unnamed_requirement_with_package_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Exclude annotations from the output.
#[test]
fn no_annotate() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-annotate"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-annotate
black==23.10.1
click==8.1.7
mypy-extensions==1.0.0
packaging==24.0
pathspec==0.12.1
platformdirs==4.2.0
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Exclude header from the output.
#[test]
fn no_header() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-header"), @r###"
success: true
exit_code: 0
----- stdout -----
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Include custom compile command in the header.
#[test]
fn custom_compile_command() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--custom-compile-command")
.arg("./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// with env var
uv_snapshot!(context.compile()
.arg("requirements.in")
.env("UV_CUSTOM_COMPILE_COMMAND", "./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn allow_unsafe() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--allow-unsafe"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --allow-unsafe
markupsafe==2.1.5
# via werkzeug
werkzeug==3.0.1
----- stderr -----
warning: pip-compile's `--allow-unsafe` has no effect (uv can safely pin `pip` and other packages).
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn resolver_legacy() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolver=legacy"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: pip-compile's `--resolver=legacy` is unsupported (uv always backtracks).
"###
);
Ok(())
}
/// Emit the `--index-url` and `--extra-index-url` locations.
/// Also, preserve the `--index-url` and `--extra-index-url` flags in the command in the header.
#[test]
fn emit_index_urls() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--extra-index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-url --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple
--index-url https://test.pypi.org/simple/
--extra-index-url https://pypi.org/simple
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit the `--find-links` locations.
#[test]
fn emit_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-find-links")
.arg("--find-links")
.arg("./"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-find-links --find-links ./
--find-links ./
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Respect the `--no-index` flag in a `requirements.txt` file.
#[test]
fn no_index_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--no-index\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the provided package locations and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because index lookups were disabled and no additional package locations were provided (try: `--find-links <uri>`)
"###
);
Ok(())
}
/// Prefer the `--index-url` from the command line over the `--index-url` in a `requirements.txt`
/// file. Also, `--index-url` and `--extra-index-url` should not be presented in the output
/// unless we specify `--emit-index-url`.
#[test]
fn index_url_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
tqdm==4.66.2
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Raise an error when multiple `requirements.txt` files include different `--index-url` flags.
#[test]
fn conflicting_index_urls_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("--index-url https://wikipedia.org\nflask")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Multiple index URLs specified: `https://google.com/` vs. `https://wikipedia.org/`
"###
);
Ok(())
}
/// Doesn't raise an error when multiple `requirements.txt` files include matching `--index-url` flags.
#[test]
fn matching_index_urls_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://pypi.org/simple")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("--index-url https://pypi.org/simple")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.in
----- stderr -----
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a registry package without network access via the `--offline` flag.
#[test]
fn offline_registry() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
// Resolve with `--offline` with an empty cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because black==23.10.1 was not found in the cache and you require black==23.10.1, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Populate the cache.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// Resolve with `--offline` with a populated cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --offline
black==23.10.1
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a registry package without network access via the `--offline` flag. We should backtrack
/// to the latest version of the package that's available in the cache.
#[test]
fn offline_registry_backtrack() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig==1.1.1")?;
// Populate the cache.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
iniconfig==1.1.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Resolve with `--offline`, with a looser requirement. We should backtrack to `1.1.1`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --offline
iniconfig==1.1.1
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package without network access via the `--offline` flag, using `--find-links` for an
/// HTML registry.
#[test]
fn offline_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
// Resolve with `--offline` and `--find-links`. We indicate that the network was disabled,
// since both the `--find-links` and the registry lookups fail (but, importantly, we don't error
// when failing to fetch the `--find-links` URL).
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Resolve with `--offline`, `--find-links`, and `--no-index`.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--no-index")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
Ok(())
}
/// Resolve a direct URL package without network access via the `--offline` flag.
#[test]
fn offline_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?;
// Resolve with `--offline` with an empty cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download: `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
Caused by: Network connectivity is disabled, but the requested data wasn't found in the cache for: `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
"###
);
// Populate the cache.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Resolve with `--offline` with a populated cache.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --offline
iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package with invalid metadata, by way of an invalid `Requires-Python` field in the
/// `METADATA` file.
#[test]
fn invalid_metadata_requires_python() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation==2.0.0")?;
// `2.0.0` has invalid metadata.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because validation==2.0.0 is unusable because the package metadata could not be parsed and you require validation==2.0.0, we can conclude that the requirements are unsatisfiable.
hint: Metadata for validation==2.0.0 could not be parsed:
Failed to parse version: Unexpected end of version specifier, expected operator:
12
^^
"###
);
Ok(())
}
/// Resolve a package with multiple `.dist-info` directories.
#[test]
fn invalid_metadata_multiple_dist_info() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation==3.0.0")?;
// `3.0.0` has an invalid structure (multiple `.dist-info` directories).
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because validation==3.0.0 is unusable because the package has an invalid format and you require validation==3.0.0, we can conclude that the requirements are unsatisfiable.
hint: The structure of validation==3.0.0 was invalid:
Multiple .dist-info directories found: validation-2.0.0, validation-3.0.0
"###
);
Ok(())
}
/// Resolve a package, but backtrack past versions with invalid metadata.
#[test]
fn invalid_metadata_backtrack() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation")?;
// `2.0.0` and `3.0.0` have invalid metadata. We should backtrack to `1.0.0` (the preceding
// version, which has valid metadata).
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-index
validation==1.0.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve nested `-r` requirements files with relative paths.
#[test]
fn compile_relative_subfile() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r subdir/requirements.in")?;
let subdir = context.temp_dir.child("subdir");
let requirements_in = subdir.child("requirements.in");
requirements_in.write_str("-r requirements-dev.in")?;
let requirements_dev_in = subdir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a package with an invalid extra named `.none`.
#[test]
fn compile_none_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("entrypoints==0.3")?;
uv_snapshot!(context
.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
entrypoints==0.3
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package (`pytz`) with a preference that omits a trailing zero.
///
/// See: <https://github.com/astral-sh/uv/issues/1536>
#[test]
fn compile_types_pytz() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("types-pytz")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("types-pytz==2021.1")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("-o")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in -o requirements.txt
types-pytz==2021.1.0
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` pinning that package
/// to a specific URL.
#[test]
fn compile_constraints_compatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a direct URL package from a `requirements.in` file, with a `constraints.txt` file
/// pinning it to a specific version.
#[test]
fn compile_constraints_compatible_url_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio>4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning it to
/// a specific URL with an incompatible version.
#[test]
fn compile_constraints_incompatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio<4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only anyio>=4 is available and you require anyio<4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` in a
/// `requirements.in` file. The resolution should fail, since the package doesn't exist at the
#[test]
fn index_url_in_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because anyio<4 was not found in the package registry and you require anyio<4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` passed via the
/// command line over that in a `requirements.in` file.
#[test]
fn index_url_from_command_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==3.7.1
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file with a dependency that uses an unsupported
/// scheme.
#[test]
fn unsupported_scheme() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ bzr+https://example.com/anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unsupported scheme `bzr+https` on URL: bzr+https://example.com/anyio (Bazaar is not supported)
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including a valid extra.
#[test]
fn no_deps_valid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-deps
flask==3.0.2
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including an invalid extra.
#[test]
fn no_deps_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[empty]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-deps
flask==3.0.2
----- stderr -----
Resolved 1 package in [TIME]
warning: The package `flask==3.0.2` does not have an extra named `empty`.
"###
);
Ok(())
}
/// Resolve a package with `--no-deps` in which the requirements have a conflict in their
/// transitive dependencies. The resolution should succeed, since `--no-deps` ignores the
/// transitive dependencies.
#[test]
fn no_deps_transitive_conflict() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable with a dependency on `anyio` at a dedicated URL.
let editable_dir1 = context.temp_dir.child("editable1");
editable_dir1.create_dir_all()?;
let pyproject_toml = editable_dir1.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "editable1"
version = "0.0.1"
dependencies = [
"anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
)?;
// Create an editable with a dependency on `anyio` at a different, dedicated URL.
let editable_dir2 = context.temp_dir.child("editable2");
editable_dir2.create_dir_all()?;
let pyproject_toml = editable_dir2.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "editable2"
version = "0.0.1"
dependencies = [
"anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl"
]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&indoc::formatdoc! {r#"
-e {}
-e {}
"#,
editable_dir1.path().display(),
editable_dir2.path().display()
})?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-deps
-e [TEMP_DIR]/editable1
-e [TEMP_DIR]/editable2
----- stderr -----
Built 2 editables in [TIME]
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve an editable package with an invalid extra.
#[test]
fn editable_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/black_editable[empty]")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
-e ../../scripts/packages/black_editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 1 package in [TIME]
warning: The package `black @ file://[WORKSPACE]/scripts/packages/black_editable` does not have an extra named `empty`.
"###);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
fn no_strip_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-strip-extras
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask[dotenv]==3.0.2
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
python-dotenv==1.0.1
# via flask
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
#[cfg(not(windows))]
fn no_strip_extras() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio[trio]\nanyio[doc]")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --no-strip-extras
alabaster==0.7.16
# via sphinx
anyio[doc, trio]==4.3.0
attrs==23.2.0
# via
# outcome
# trio
babel==2.14.0
# via sphinx
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
docutils==0.20.1
# via
# sphinx
# sphinx-rtd-theme
idna==3.6
# via
# anyio
# requests
# trio
imagesize==1.4.1
# via sphinx
jinja2==3.1.3
# via sphinx
markupsafe==2.1.5
# via jinja2
outcome==1.3.0.post0
# via trio
packaging==24.0
# via
# anyio
# sphinx
pygments==2.17.2
# via sphinx
requests==2.31.0
# via sphinx
sniffio==1.3.1
# via
# anyio
# trio
snowballstemmer==2.2.0
# via sphinx
sortedcontainers==2.4.0
# via trio
sphinx==7.2.6
# via
# anyio
# sphinx-autodoc-typehints
# sphinx-rtd-theme
# sphinxcontrib-jquery
sphinx-autodoc-typehints==2.0.0
# via anyio
sphinx-rtd-theme==2.0.0
# via anyio
sphinxcontrib-applehelp==1.0.8
# via sphinx
sphinxcontrib-devhelp==1.0.6
# via sphinx
sphinxcontrib-htmlhelp==2.0.5
# via sphinx
sphinxcontrib-jquery==4.1
# via sphinx-rtd-theme
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.7
# via sphinx
sphinxcontrib-serializinghtml==1.1.10
# via sphinx
trio==0.25.0
# via anyio
urllib3==2.2.1
# via requests
----- stderr -----
Resolved 30 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its transitive dependencies to a specific version.
#[test]
fn compile_constraints_compatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("virtualenv")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
distlib==0.3.8
# via virtualenv
filelock==3.8.0
# via virtualenv
platformdirs==3.11.0
# via virtualenv
virtualenv==20.21.1
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn compile_constraints_incompatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and you require filelock==3.8.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn conflicting_url_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and you require filelock==3.8.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Override a regular package with an editable.
///
/// At present, this incorrectly resolves to the regular package.
#[test]
fn editable_override() -> Result<()> {
let context = TestContext::new("3.12");
// Add a non-editable requirement.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
// Add an editable override.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("-e file://../../scripts/packages/black_editable")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
black==24.3.0
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Override an editable with a regular package.
///
/// At present, this incorrectly resolves to the editable.
#[test]
fn override_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/black_editable")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("black==23.10.1")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.arg("--override")
.arg(overrides_txt.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --override [TEMP_DIR]/overrides.txt
-e ../../scripts/packages/black_editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// compatible, but resolve to exactly the same version.
#[test]
fn override_with_compatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<=3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt --override overrides.txt
anyio==3.0.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// incompatible, and so should error. (The correctness of this behavior is subject to debate.)
#[test]
fn override_with_incompatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require anyio>=3.0.0 and you require anyio<3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package, marking a dependency as unsafe.
#[test]
fn unsafe_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--unsafe-package")
.arg("jinja2")
.arg("--unsafe-package")
.arg("pydantic"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --unsafe-package jinja2 --unsafe-package pydantic
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.2
itsdangerous==2.1.2
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
# The following packages were excluded from the output:
# jinja2
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound, allowing pre-releases. Per PEP 440, pre-releases
/// that match the bound (e.g., `2.0.0rc1`) should be _not_ allowed.
#[test]
fn pre_release_upper_bound_exclude() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --prerelease=allow
click==7.1.2
# via flask
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound that includes a pre-release. Per PEP 440,
/// pre-releases _should_ be allowed.
#[test]
fn pre_release_upper_bound_include() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0rc4")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --prerelease=allow
click==8.1.7
# via flask
flask==2.0.0rc2
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow `--pre` as an alias for `--prerelease=allow`.
#[test]
fn pre_alias() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--pre"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --pre
click==7.1.2
# via flask
flask==1.1.4
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow a pre-release for a version specifier in a constraint file.
#[test]
fn pre_release_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask<=2.0.0rc2")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --constraint constraints.txt
click==8.1.7
# via flask
flask==2.0.0rc2
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve from a `pyproject.toml` file with a recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "my-project"
version = "0.0.1"
dependencies = [
"tomli",
]
[project.optional-dependencies]
test = [
"pep517",
"my-project[dev]"
]
dev = [
"my-project[test]",
]
"#,
)?;
uv_snapshot!(context.compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z pyproject.toml --extra dev
pep517==0.13.1
tomli==2.0.1
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// The dependencies of a local editable dependency should be considered "direct" dependencies.
#[test]
fn editable_direct_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/setuptools_editable")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.arg("--resolution")
.arg("lowest-direct")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in --resolution lowest-direct
-e ../../scripts/packages/setuptools_editable
iniconfig==0.1
# via setuptools-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `EXTRA_UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_extra_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("EXTRA_UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset", and so should be
/// overridden by an `--index-url` in a requirements file.
#[test]
fn empty_index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://test.pypi.org/simple\nidna")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// The `UV_INDEX_URL` should override an `--index-url` in a requirements file.
#[test]
fn index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://pypi.org/simple\nidna")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", "https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Expand an environment variable in a `-r` path within a `requirements.in` file.
#[test]
fn expand_env_var_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r ${PROJECT_ROOT}/requirements-dev.in")?;
let requirements_dev_in = context.temp_dir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Editable `example` requires Python <=3.8, but resolution targets Python 3.12.[X]
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable_target_version() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
let filters: Vec<_> = [
// 3.11 may not be installed
(
"warning: The requested Python version 3.11 is not available; .* will be used to build dependencies instead.\n",
"",
),
]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.compile()
.arg("requirements.in")
.arg("--python-version=3.11"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Editable `example` requires Python <=3.8, but resolution targets Python 3.11
"###
);
Ok(())
}
#[test]
fn editable_optional_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'
[project.optional-dependencies]
dev = [
"anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .[dev]")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
-e .
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
# via example
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Under `--resolution=lowest-direct`, ignore optional dependencies.
///
/// In the below example, ensure that `setuptools` does not resolve to the lowest-available version.
#[test]
fn editable_optional_lowest_direct() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["setuptools-scm>=8.0.0"]
requires-python = '>=3.8'
[project.optional-dependencies]
dev = ["setuptools"]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --resolution=lowest-direct
-e .
packaging==24.0
# via setuptools-scm
setuptools==69.2.0
# via setuptools-scm
setuptools-scm==8.0.1
# via example
----- stderr -----
Built 1 editable in [TIME]
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a source distribution that leverages Metadata 2.2.
#[test]
fn metadata_2_2() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
boltons==23.1.1
# via pyo3-mixed
pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from an index that "doesn't support" zip file streaming (by way of using
/// data descriptors).
#[test]
fn no_stream() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("hashb_foxglove_protocolbuffers_python==25.3.0.1.20240226043130+465630478360")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("protobuf<=5.26.0")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("-c")
.arg("constraints.in")
.arg("--extra-index-url")
.arg("https://buf.build/gen/python")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.env("VIRTUAL_ENV", context.venv.as_os_str())
.env("UV_NO_WRAP", "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in -c constraints.in --cache-dir [CACHE_DIR]
hashb-foxglove-protocolbuffers-python==25.3.0.1.20240226043130+465630478360
protobuf==5.26.0
# via hashb-foxglove-protocolbuffers-python
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a direct URL package with a URL that doesn't exist (i.e., returns a 404).
#[test]
fn not_found_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download: `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl`
Caused by: HTTP status client error (404 Not Found) for url (https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl)
"###
);
Ok(())
}
/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
#[test]
fn requires_python_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python<=3.8 and example==0.0.0 depends on Python<=3.8, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Build an editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ${ROOT_PATH}")?;
let root_path = current_dir()?.join("../../scripts/packages/root_editable");
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
-e ${ROOT_PATH}
black @ file://[WORKSPACE]/scripts/packages/root_editable/../black_editable
# via root-editable
----- stderr -----
Built 1 editable in [TIME]
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Build a non-editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_non_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${ROOT_PATH}\n${BLACK_PATH}")?;
let root_path = current_dir()?.join("../../scripts/packages/root_editable");
let black_path = current_dir()?.join("../../scripts/packages/black_editable");
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str())
.env("BLACK_PATH", black_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
black @ ${BLACK_PATH}
# via root-editable
root-editable @ ${ROOT_PATH}
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request a local wheel with a mismatched package name.
#[test]
fn requirement_wheel_name_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("dateutil @ https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested package name `dateutil` does not match `python-dateutil` in the distribution filename: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
"###
);
Ok(())
}
/// `--generate-hashes` should not update the hashes in the "lockfile" if the package is not
/// upgraded.
#[test]
fn preserve_hashes_no_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Avoid adding any additional hashes to the "lockfile".
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded via
/// `--upgrade`.
#[test]
fn preserve_hashes_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.2")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting an upgrade should update the hashes, even if the version didn't change.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package does not have
/// hashes, even if `--upgrade` is _not_ specified.
#[test]
fn preserve_hashes_no_existing_hashes() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2
"})?;
// Add additional hashes to the "lockfile".
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded due
/// to a change in requirements.
#[test]
fn preserve_hashes_newer_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.3")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting a more specific version should update the hashes.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
--hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
--hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
--hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
--hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
--hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
--hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
--hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
--hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
--hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
--hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
--hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
--hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
--hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
--hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
--hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
--hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
--hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
--hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
--hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
--hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
--hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
--hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
--hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
--hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
--hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
--hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
--hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
--hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
--hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
--hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
--hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
--hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
--hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
--hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
--hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
--hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
--hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
--hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
--hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
--hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
--hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
--hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
--hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
--hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
--hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
--hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
--hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
--hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
--hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
--hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
--hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
--hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
--hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
--hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
--hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
--hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
--hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
--hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
--hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Detect the package name from metadata sources from local directories.
#[test]
fn unnamed_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/poetry_editable
../../scripts/packages/black_editable
../../scripts/packages/setup_py_editable
../../scripts/packages/setup_cfg_editable
"
})?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
anyio==4.3.0
# via
# httpx
# poetry-editable
black @ ../../scripts/packages/black_editable
certifi==2024.2.2
# via
# httpcore
# httpx
# requests
charset-normalizer==3.3.2
# via requests
h11==0.14.0
# via httpcore
httpcore==1.0.4
# via httpx
httpx==0.27.0
# via setup-py-editable
idna==3.6
# via
# anyio
# httpx
# requests
poetry-editable @ ../../scripts/packages/poetry_editable
requests==2.31.0
# via setup-cfg-editable
setup-cfg-editable @ ../../scripts/packages/setup_cfg_editable
setup-py-editable @ ../../scripts/packages/setup_py_editable
sniffio==1.3.1
# via
# anyio
# httpx
urllib3==2.2.1
# via requests
----- stderr -----
Resolved 14 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from an unnamed Git requirement.
#[test]
fn unnamed_git_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@735a4701d6d5e848241e7d7535db898efb62d400
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from an unnamed HTTPS requirement.
#[test]
fn unnamed_https_requirement() -> Result<()> {
// Given the filename `3.0.2.tar.gz`, we need to download the file to determine the package name.
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from metadata sources from local directories.
#[test]
fn dynamic_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling-dynamic @ ../../scripts/packages/hatchling_dynamic")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z [TEMP_DIR]/requirements.in
anyio==4.3.0
# via hatchling-dynamic
hatchling-dynamic @ ../../scripts/packages/hatchling_dynamic
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// This tests the marker expressions emitted when depending on a package with
/// exciting markers like 'anyio'.
///
/// NOTE: This test runs on `linux` only because some of `anyio`'s markers
/// involve querying the specific platform being used to run `uv pip compile`.
/// Since this test was developed on Linux, the marker expression generated is
/// coupled with the Linux platform. Other tests for other platforms could be
/// added.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_exciting_linux() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# platform_system == 'Linux' and python_version == '3.12' and platform_python_implementation == 'CPython'
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`.
///
/// NOTE: This test runs on `linux` only because it requires that `sys_platform
/// == 'linux'` evaluates to `true`.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_direct() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio ; sys_platform == 'linux'")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# python_version == '3.12' and platform_python_implementation == 'CPython' and sys_platform == 'linux' and platform_system == 'Linux'
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`, even when the marker evaluates to false on the
/// current platform. In this case, we set `sys_platform == 'macos'` so that on
/// Linux, this dependency is ignored. But the marker expression generated must
/// have `sys_platform == 'Linux'`, since the locked set of packages might be
/// different (and indeed are different) on other platforms.
///
/// NOTE: This test runs on `linux` because it requires that `sys_platform
/// == 'macos'` evaluates to `false`. While this technically only requires
/// `not(target_os = "macos")`, the marker expression generated during test
/// development was on Linux. So we require Linux.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_conditional() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio ; sys_platform == 'macos'")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# sys_platform == 'linux'
----- stderr -----
Resolved 0 packages in [TIME]
"###);
Ok(())
}
/// This tests the marker expressions emitted when depending on a package with
/// a non-pypy dependency. Specifically, `pendulum` depends on `time-machine`,
/// but not when using pypy.
///
/// NOTE: This test runs on `linux` because it was written on Linux. While the
/// marker expression itself doesn't have anything in it that couples it to
/// Linux, it is possible for the resolution to change on other platforms. For
/// example, on Windows, the `tzdata` dependency is excluded. (It's actually
/// not clear why. The `tzdata` dependency appears to be an unconditional
/// dependency. And if anything, I'd expect it to be included on Windows and
/// excluded everywhere else... Odd.)
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_pypy() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pendulum")?;
uv_snapshot!(context
.compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# python_version == '3.12' and implementation_name == 'cpython'
pendulum==3.0.0
python-dateutil==2.9.0.post0
# via
# pendulum
# time-machine
six==1.16.0
# via python-dateutil
time-machine==2.14.1
# via pendulum
tzdata==2024.1
# via pendulum
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// A local version of a package shadowing a remote package is installed.
#[test]
fn local_version_of_remote_package() -> Result<()> {
let context = TestContext::new("3.12");
let root_path = context.workspace_root.join("scripts/packages");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Actually install the local dependency
let mut command = context.install();
command.arg(root_path.join("anyio_local"));
uv_snapshot!(
context.filters(),
command, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Downloaded 1 package in [TIME]
Installed 1 package in [TIME]
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
"###
);
// The local version should _not_ be included in the resolution
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Write a lock file with the local version
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(&indoc::formatdoc! {r"
anyio @ {workspace_root}/scripts/packages/anyio_local
",
workspace_root = context.workspace_root.simplified_display(),
})?;
// The local version is _still_ excluded from the resolution
// `uv pip compile` does not have access to an environment and cannot consider installed packages
// We may want to allow the lock file to be preserved in this case in the future, but right now
// we require the URL to always be in the input file.
uv_snapshot!(context.filters(), context.compile()
.arg(requirements_in.canonicalize()?)
.arg("--output-file")
.arg(requirements_txt.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --output-file requirements.txt
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn pendulum_no_tzdata_on_windows() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pendulum")?;
uv_snapshot!(
context.filters(),
windows_filters=false,
context.compile().arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
pendulum==3.0.0
python-dateutil==2.9.0.post0
# via
# pendulum
# time-machine
six==1.16.0
# via python-dateutil
time-machine==2.14.1
# via pendulum
tzdata==2024.1
# via pendulum
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// Allow URL dependencies recursively for local source trees.
#[test]
fn allow_recursive_url_local_path() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio @ file://[TEMP_DIR]/anyio/
# via lib
example @ ./app
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect overrides.
#[test]
fn allow_recursive_url_local_path_override() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt
anyio==3.7.0
# via lib
example @ ./app
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect both overrides _and_
/// constraints.
#[test]
fn allow_recursive_url_local_path_override_constraint() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==0.0.0")?;
// Ensure that resolution fails, since `0.0.0` does not exist on PyPI.
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==0.0.0 and lib==0.0.0 depends on anyio==0.0.0, we can conclude that lib==0.0.0 cannot be used.
And because only lib==0.0.0 is available and example==0.0.0 depends on lib, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
// Now constrain `anyio` to the local version.
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ ./anyio")?;
uv_snapshot!(context.filters(), context.compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --override overrides.txt --constraint constraints.txt
anyio @ ./anyio
# via lib
example @ ./app
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Allow pre-releases for dependencies of source path requirements.
#[test]
fn pre_release_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
// Create an a package that requires a pre-release version of `flask`.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(".")?;
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
click==8.1.7
# via flask
example @ .
flask==2.0.0rc1
# via example
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Allow pre-releases for dependencies of editable requirements.
#[test]
fn pre_release_editable_requirement() -> Result<()> {
let context = TestContext::new("3.12");
// Create an a package that requires a pre-release version of `flask`.r
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .")?;
uv_snapshot!( context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
-e .
click==8.1.7
# via flask
flask==2.0.0rc1
# via example
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Built 1 editable in [TIME]
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index, but at an incompatible version, the
/// resolution should fail by default (even though a compatible version exists on the "primary"
/// index).
#[test]
fn compile_index_url_first_match() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2==3.1.0")?;
uv_snapshot!(context.compile()
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://download.pytorch.org/whl/cpu")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of jinja2==3.1.0 and you require jinja2==3.1.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index, but at an incompatible version, the
/// resolution should fallback to the "primary" index when `--index-strategy unsafe-any-match`
/// is provided.
#[test]
fn compile_index_url_fallback() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2==3.1.0")?;
uv_snapshot!(context.compile()
.arg("--index-strategy")
.arg("unsafe-any-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://download.pytorch.org/whl/cpu")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z --index-strategy unsafe-any-match requirements.in --no-deps
jinja2==3.1.0
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index at a compatible version, the resolver should
/// prefer it, even if a newer versions exists on the "primary" index.
///
/// In this case, Jinja 3.1.2 is hosted on the "extra" index, but newer versions are available on
/// the "primary" index.
#[test]
fn compile_index_url_fallback_prefer_primary() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2")?;
uv_snapshot!(context.compile_without_exclude_newer()
.arg("--index-strategy")
.arg("unsafe-any-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://download.pytorch.org/whl/cpu")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-any-match requirements.in --no-deps
jinja2==3.1.3
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Ensure that the username and the password are omitted when
/// index annotations are displayed via `--emit-index-annotation`.
#[test]
fn emit_index_annotation_hide_password() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.env("UV_INDEX_URL", "https://test-user:test-password@pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-annotation
certifi==2024.2.2
# via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2
# via requests
# from https://pypi.org/simple
idna==3.6
# via requests
# from https://pypi.org/simple
requests==2.31.0
# from https://pypi.org/simple
urllib3==2.2.1
# via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` prints the index URL for each package.
#[test]
fn emit_index_annotation_pypi_org_simple() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-annotation"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-annotation
certifi==2024.2.2
# via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2
# via requests
# from https://pypi.org/simple
idna==3.6
# via requests
# from https://pypi.org/simple
requests==2.31.0
# from https://pypi.org/simple
urllib3==2.2.1
# via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` plays nicely with `--no-annotate`.
///
/// For now, `--no-annotate` doesn't affect `--emit-index-annotation`, in that we still emit the
/// index annotation, and leave `--no-annotate` to only affect the package _source_ annotations.
#[test]
fn emit_index_annotation_no_annotate() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.arg("--no-annotate"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-annotation --no-annotate
certifi==2024.2.2
# from https://pypi.org/simple
charset-normalizer==3.3.2
# from https://pypi.org/simple
idna==3.6
# from https://pypi.org/simple
requests==2.31.0
# from https://pypi.org/simple
urllib3==2.2.1
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`.
#[test]
fn emit_index_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.arg("--annotation-style")
.arg("line"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-annotation --annotation-style line
certifi==2024.2.2 # via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2 # via requests
# from https://pypi.org/simple
idna==3.6 # via requests
# from https://pypi.org/simple
requests==2.31.0
# from https://pypi.org/simple
urllib3==2.2.1 # via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// `--emit-index-annotation` where packages are pulled from two distinct indexes.
#[test]
fn emit_index_annotation_multiple_indexes() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("uv\nrequests")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--extra-index-url")
.arg("https://test.pypi.org/simple")
.arg("--emit-index-annotation"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --emit-index-annotation
requests==2.5.4.1
# from https://test.pypi.org/simple
uv==0.1.24
# from https://pypi.org/simple
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Test error message when direct dependency is an empty set.
#[test]
fn no_version_for_direct_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pypyp==1,>=1.2")?;
uv_snapshot!(context.compile()
.arg("requirements.in")
// Must error before we make any network requests
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ you require pypyp ∅
"###
);
Ok(())
}
/// Compile against a dedicated platform, which may differ from the current platform.
#[test]
fn python_platform() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
uv_snapshot!(context.filters(),
windows_filters=false,
context.compile()
.arg("requirements.in")
.arg("--python-platform")
.arg("aarch64-unknown-linux-gnu"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --python-platform aarch64-unknown-linux-gnu
black==24.3.0
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
uv_snapshot!(context.filters(),
windows_filters=false,
context.compile()
.arg("requirements.in")
.arg("--python-platform")
.arg("x86_64-pc-windows-msvc"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --python-platform x86_64-pc-windows-msvc
black==24.3.0
click==8.1.7
# via black
colorama==0.4.6
# via click
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Verify that command-line arguments take precedence over on-disk configuration.
#[test]
fn resolve_configuration() -> Result<()> {
let context = TestContext::new("3.12");
// Write a `uv.toml` file to the directory.
let config = context.temp_dir.child("uv.toml");
config.write_str(indoc::indoc! {r#"
[pip]
resolution = "lowest-direct"
generate-hashes = true
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>3.0.0")?;
// Resolution should use the lowest direct version, and generate hashes.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==3.0.1 \
--hash=sha256:1ef7622396ab55829d4236a6f75e2199df6d26a4ba79bea0cb942a5fd2f79a23 \
--hash=sha256:ed71f7542ef39875b65def219794d9dcb0a48c571317b13612c12b1f292701b5
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// Resolution should use the highest version, and generate hashes.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=highest"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --resolution=highest
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// Resolution should use the highest version, and omit hashes.
uv_snapshot!(context.compile()
.arg("requirements.in")
.arg("--resolution=highest")
.arg("--no-generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in --resolution=highest --no-generate-hashes
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// Write a `pyproject.toml` file to the directory.
let pyproject = context.temp_dir.child("pyproject.toml");
pyproject.write_str(indoc::indoc! {r#"
[project]
name = "example"
version = "0.0.0"
"#})?;
// Resolution should use the lowest direct version, and generate hashes.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==3.0.1 \
--hash=sha256:1ef7622396ab55829d4236a6f75e2199df6d26a4ba79bea0cb942a5fd2f79a23 \
--hash=sha256:ed71f7542ef39875b65def219794d9dcb0a48c571317b13612c12b1f292701b5
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// Remove the `uv.toml` file.
fs_err::remove_file(config.path())?;
// Resolution should use the highest version, and omit hashes.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==4.3.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// Add configuration to the `pyproject.toml` file.
pyproject.write_str(indoc::indoc! {r#"
[project]
name = "example"
version = "0.0.0"
[tool.uv.pip]
resolution = "lowest-direct"
generate-hashes = true
"#})?;
// Resolution should use the lowest direct version, and generate hashes.
uv_snapshot!(context.compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --exclude-newer 2024-03-25T00:00:00Z requirements.in
anyio==3.0.1 \
--hash=sha256:1ef7622396ab55829d4236a6f75e2199df6d26a4ba79bea0cb942a5fd2f79a23 \
--hash=sha256:ed71f7542ef39875b65def219794d9dcb0a48c571317b13612c12b1f292701b5
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}