uv/crates/uv/tests/pip_compile.rs
Di-Is 32ad3323a1
Add constraint dependencies to pyproject.toml (#5248)
Resolves #4467.

## Summary

This PR implements the following

1. Add `tool.uv.constraint-dependencies` to pyproject.toml
1. Support to refer `tool.uv.constraint-dependencies` in `uv lock`
1. Support to refer `tool.uv.constraint-dependencies` in `uv pip
compile/install`

These are analogues of the override features implemented in #3839 and
#4369.

## Test Plan

Add test.
2024-07-21 19:45:04 -04:00

11041 lines
345 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#![cfg(all(feature = "python", feature = "pypi"))]
#![allow(clippy::disallowed_types)]
use std::env::current_dir;
use std::fs;
use std::path::PathBuf;
use anyhow::{bail, Context, Result};
use assert_fs::prelude::*;
use indoc::indoc;
use url::Url;
use common::{uv_snapshot, TestContext};
use uv_fs::Simplified;
mod common;
/// Resolve a specific version of `anyio` from a `requirements.in` file.
#[test]
fn compile_requirements_in() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==3.7.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file with a `--annotation-style=line` flag.
#[test]
fn compile_requirements_in_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.arg("--annotation-style=line")
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line requirements.in
anyio==3.7.0 # via -r requirements.in
idna==3.6 # via anyio
sniffio==1.3.1 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file on stdin
/// when passed a path of `-`.
#[test]
fn compile_requirements_in_stdin() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.stdin(fs::File::open(requirements_in)?)
.arg("-"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] -
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn missing_requirements_in() {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: File not found: `requirements.in`
"###
);
requirements_in.assert(predicates::path::missing());
}
#[test]
fn missing_venv() -> Result<()> {
let context = TestContext::new("3.12");
context.temp_dir.child("requirements.in").touch()?;
fs_err::remove_dir_all(context.venv.path())?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
----- stderr -----
warning: Requirements file requirements.in does not contain any dependencies
Resolved 0 packages in [TIME]
"###
);
context.venv.assert(predicates::path::missing());
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file. Despite the version being
/// dynamic, we shouldn't need to build the package, since the requirements are static.
#[test]
fn compile_pyproject_toml_dynamic_version() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dynamic = ["version"]
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file with `--annotation-style=line`.
#[test]
fn compile_pyproject_toml_with_line_annotation() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("--annotation-style=line")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line pyproject.toml
anyio==3.7.0 # via project (pyproject.toml)
idna==3.6 # via anyio
sniffio==1.3.1 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file.
#[test]
fn compile_constraints_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio==3.7.0
# via -r requirements.in
idna==3.3
# via
# -c constraints.txt
# anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with an inline constraint.
#[test]
fn compile_constraints_inline() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
requirements_in.write_str("-c constraints.txt")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
----- stderr -----
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that
/// uses markers.
#[test]
fn compile_constraints_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
// If constraints are ignored, these will conflict
constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?;
constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.0
# via
# -c constraints.txt
# anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that uses an
/// extra. The constraint should be enforced, but the extra should _not_ be included in the output
/// (though it currently _is_ included).
#[test]
fn compile_constraint_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask[dotenv]<24.3.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.2
# via
# -c constraints.txt
# -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an optional extra in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra foo
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an extra with non-normalized names in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_name_normalization() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("FRiENDlY-...-_-BARd"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra FRiENDlY-...-_-BARd
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that does not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("bar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extra not found: bar
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section.
#[test]
fn compile_pyproject_toml_poetry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
pytest = { version = "*", optional = true }
[tool.poetry.extras]
test = ["pytest"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("test"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra test
anyio==3.7.1
# via poetry-editable (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via pytest
packaging==24.0
# via pytest
pluggy==1.4.0
# via pytest
pytest==8.1.1
# via poetry-editable (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section without a
/// `dependencies` field, which should be treated as an empty list.
#[test]
fn compile_pyproject_toml_poetry_empty_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.1
# via poetry-editable (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section with an invalid
/// `dependencies` field.
#[test]
fn compile_pyproject_toml_poetry_invalid_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[project.dependencies]
python = "^3.12"
msgspec = "^0.18.4"
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to extract static metadata from `pyproject.toml`
Caused by: TOML parse error at line 13, column 1
|
13 | [project.dependencies]
| ^^^^^^^^^^^^^^^^^^^^^^
invalid type: map, expected a sequence
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file that uses setuptools as the build backend.
#[test]
fn compile_pyproject_toml_setuptools() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
"#,
)?;
let setup_cfg = context.temp_dir.child("setup.cfg");
setup_cfg.write_str(
r#"[options]
packages = find:
install_requires=
anyio
[options.extras_require]
dev =
iniconfig; python_version >= "3.7"
mypy; python_version <= "3.8"
"#,
)?;
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
)
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
anyio==4.3.0
# via dummypkg (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via dummypkg (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `setup.cfg` file.
#[test]
fn compile_setup_cfg() -> Result<()> {
let context = TestContext::new("3.12");
let setup_cfg = context.temp_dir.child("setup.cfg");
setup_cfg.write_str(
r#"[options]
packages = find:
install_requires=
anyio
[options.extras_require]
dev =
iniconfig; python_version >= "3.7"
mypy; python_version <= "3.8"
"#,
)?;
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
)
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("setup.cfg")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] setup.cfg --extra dev
anyio==4.3.0
# via dummypkg (setup.cfg)
idna==3.6
# via anyio
iniconfig==2.0.0
# via dummypkg (setup.cfg)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `setup.py` file.
#[test]
fn compile_setup_py() -> Result<()> {
let context = TestContext::new("3.12");
let setup_py = context.temp_dir.child("setup.py");
setup_py.write_str(
r#"# setup.py
from setuptools import setup
setup(
name="dummypkg",
description="A dummy package",
install_requires=["anyio"],
extras_require={
"dev": ["iniconfig; python_version >= '3.7'", "mypy; python_version <= '3.8'"],
},
)
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("setup.py")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] setup.py --extra dev
anyio==4.3.0
# via dummypkg (setup.py)
idna==3.6
# via anyio
iniconfig==2.0.0
# via dummypkg (setup.py)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a `pyproject.toml` file with an invalid project name.
#[test]
fn compile_pyproject_toml_invalid_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "!project"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 5, column 8
|
5 | name = "!project"
| ^^^^^^^^^^
Not a valid package or extra name: "!project". Names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters.
"###
);
Ok(())
}
/// Request multiple extras that do not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extras_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo")
.arg("--extra")
.arg("bar")
.arg("--extra")
.arg("foobar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extras not found: bar, foobar
"###
);
Ok(())
}
/// Request extras when using a `requirements.in` file which does not support extras.
#[test]
fn compile_requirements_file_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--all-extras"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file.
"###
);
Ok(())
}
/// Request an extra with a name that does not conform to the specification.
#[test]
fn invalid_extra_name() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("invalid name!"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'invalid name!' for '--extra <EXTRA>': Extra names must start and end with a letter or digit and may only contain -, _, ., and alphanumeric characters
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12.
#[test]
fn compile_python_312() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-version 3.12
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 with `--annotation-style=line`.
#[test]
fn compile_python_312_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("--annotation-style=line")
.arg("requirements.in")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line requirements.in --python-version 3.12
black==23.10.1 # via -r requirements.in
click==8.1.7 # via black
mypy-extensions==1.0.0 # via black
packaging==24.0 # via black
pathspec==0.12.1 # via black
platformdirs==4.2.0 # via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.12 without deps.
#[test]
fn compile_python_312_no_deps() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-deps")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps --python-version 3.12
black==23.10.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black at Python 3.7.
#[test]
fn compile_python_37() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let filters: Vec<_> = [
// 3.7 may not be installed
(
"warning: The requested Python version 3.7 is not available; .* will be used to build dependencies instead.\n",
"",
),
(r"warning: uv is only compatible with Python 3\.8\+, found Python 3\.7.*\n", "")
]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (3.7.0) does not satisfy Python>=3.8 and black==23.10.1 depends on Python>=3.8, we can conclude that black==23.10.1 cannot be used.
And because you require black==23.10.1, we can conclude that the requirements are unsatisfiable.
"###);
Ok(())
}
/// Resolve a source distribution with `--resolution=lowest-direct`, to ensure that the build
/// requirements aren't resolved at their lowest compatible version.
#[test]
fn compile_sdist_resolution_lowest() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct")
.arg("--python-version")
.arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --resolution=lowest-direct --python-version 3.12
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_invalid_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7.x"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7.x' for '--python-version <PYTHON_VERSION>': Python version `3.7.x` could not be parsed: after parsing '3.7', found '.x', which is not part of a valid version
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a specific version of Black against an invalid Python version.
#[test]
fn compile_python_dev_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--python-version")
.arg("3.7-dev"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '3.7-dev' for '--python-version <PYTHON_VERSION>': Python version `3.7-dev` is a development release
For more information, try '--help'.
"###
);
Ok(())
}
/// Omit the constraint annotation (e.g., `# from -c constraints.txt`) when the constraint is not
/// applicable due to a marker expression.
#[test]
fn omit_non_matching_annotation() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna <3.7; python_version < '3.7'")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("-c")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Test that we select the last 3.8 compatible numpy version instead of trying to compile an
/// incompatible sdist <https://github.com/astral-sh/uv/issues/388>
#[test]
fn compile_numpy_py38() -> Result<()> {
let context = TestContext::new("3.8");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("numpy")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-build"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-build
numpy==1.24.4
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask wheel via a URL dependency.
#[test]
fn compile_wheel_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Flask source distribution via a URL dependency.
///
/// Exercises the `prepare_metadata_for_build_wheel` hooks.
#[test]
fn compile_sdist_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage",
)?;
// In addition to the standard filters, remove the `main` commit, which will change frequently.
let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@[COMMIT]
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_branch_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_tag_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-tag",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific tag via a Git HTTPS dependency.
///
/// In this case, the tag is a date, and thus could feasibly refer to a short commit hash.
#[test]
#[cfg(feature = "git")]
fn compile_git_date_tag_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@20240402",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_long_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_short_commit_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd6",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_refs_https_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@refs/pull/4/head")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@9d01a806f17ddacb9c7b66b1b68574adf790b63f
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific Git dependency with a subdirectory.
#[test]
#[cfg(feature = "git")]
fn compile_git_subdirectory_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_concurrent_access() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\nexample-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
# via -r requirements.in
example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
# via -r requirements.in
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve two packages from a `requirements.in` file with the same Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn compile_git_unnamed_concurrent_access() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a\ngit+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a
# via -r requirements.in
example-pkg-b @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_b
# via -r requirements.in
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a Git dependency with a declared name that differs from the true name of the package.
#[test]
#[cfg(feature = "git")]
fn compile_git_mismatched_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("flask @ git+https://github.com/pallets/flask.git@2.0.0\ndask @ git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download and build: `dask @ git+https://github.com/pallets/flask.git@3.0.0`
Caused by: Package metadata name `flask` does not match given name `dask`
"###
);
Ok(())
}
/// Resolve a specific Git dependency with a subdirectory, where the root directory contains a
/// static `pyproject.toml` file.
#[test]
fn compile_git_subdirectory_static_metadata() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage#subdirectory=uv-public-pypackage")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-workspace-pypackage@b8c4e192456d736c27f2c84c61175c896dba8373#subdirectory=uv-public-pypackage
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for Werkzeug, which should avoid adding a
/// duplicate dependency from `PyPI`.
#[test]
fn mixed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
# via
# -r requirements.in
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at a _different_ version, which
/// should result in a conflict.
#[test]
fn conflicting_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of werkzeug==3.0.0 and you require werkzeug==3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Request Werkzeug via both a version and a URL dependency at _the same_ version, which
/// should prefer the direct URL dependency.
#[test]
fn compatible_direct_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions, which should result in a conflict.
#[test]
fn conflicting_repeated_url_dependency_version_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at different versions. However, only one of the
/// URLs is compatible with the requested Python version, so there shouldn't be any conflict.
#[test]
fn conflicting_repeated_url_dependency_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl ; python_version >= '3.10'
werkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl ; python_version < '3.10'
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
werkzeug @ https://files.pythonhosted.org/packages/bd/24/11c3ea5a7e866bf2d97f0501d0b4b1c9bbeade102bb4b588f0d2919a5212/Werkzeug-2.0.1-py3-none-any.whl
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request Werkzeug via two different URLs at the same version. Despite mapping to the same
/// version, it should still result in a conflict.
#[test]
#[cfg(feature = "git")]
fn conflicting_repeated_url_dependency_version_match() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug @ git+https://github.com/pallets/werkzeug.git@2.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `werkzeug`:
- git+https://github.com/pallets/werkzeug.git@2.0.0
- https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl
"###
);
Ok(())
}
/// Request Flask, but include a URL dependency for a conflicting version of Werkzeug.
#[test]
fn conflicting_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0\nwerkzeug @ https://files.pythonhosted.org/packages/ff/1d/960bb4017c68674a1cb099534840f18d3def3ce44aed12b5ed8b78e0153e/Werkzeug-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only werkzeug<3.0.0 is available and flask==3.0.0 depends on werkzeug>=3.0.0, we can conclude that flask==3.0.0 cannot be used.
And because you require flask==3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via two different URLs which resolve to the same canonical version.
#[test]
fn compatible_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.2
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via two different URLs which resolve to the same repository, but
/// different commits.
#[test]
fn conflicting_repeated_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `uv-public-pypackage`:
- git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1
- git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via three different URLs: `0.0.2`, a short SHA, and a precise SHA.
/// All three are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via three different URLs: a precise SHA, a short SHA, and `4.3.0`.
/// All three are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_broader_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via two different URLs: `0.0.2`, and a precise SHA, followed by
/// `0.0.2` again. All three are compatible, since they resolve to the same canonical version.
#[test]
fn compatible_repeated_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Request `uv-public-pypackage` via three different URLs: `0.0.2`, a precise SHA, and
/// `test-branch`.
///
/// Although `0.0.2` and the precise SHA resolve to the same canonical version, `test-branch`
/// resolves to a different version, so there should be a conflict.
#[test]
fn incompatible_narrowed_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@test-branch
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requirements contain conflicting URLs for package `uv-public-pypackage`:
- git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389
- git+https://github.com/astral-test/uv-public-pypackage@test-branch
"###
);
Ok(())
}
/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_git_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
# via -r requirements.in
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since this URL is declared as a constraint, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
# via -r requirements.in
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via
# -c constraints.txt
# hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `transitive_url_dependency`, which depends on `iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4`.
/// Since this `iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4.git` is declared as a constraint, and
/// those map to the same canonical URL, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_canonical_url_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
hatchling-editable @ https://github.com/astral-sh/uv/files/14762645/hatchling_editable.zip
# via -r requirements.in
iniconfig @ git+https://github.com/pytest-dev/iniconfig.git@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via
# -c constraints.txt
# hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request `hatchling_editable`, which depends on `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`.
/// Since `hatchling_editable` is a path (local) dependency, we should accept it.
#[test]
#[cfg(feature = "git")]
fn allowed_transitive_url_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling_editable @ ${HATCH_PATH}")?;
let hatchling_path = current_dir()?.join("../../scripts/packages/hatchling_editable");
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.env("HATCH_PATH", hatchling_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
${HATCH_PATH}
# via -r requirements.in
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// A dependency with conflicting URLs in `requirements.in` and `constraints.txt` should be ignored
/// if the dependency has an override.
#[test]
fn requirement_constraint_override_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==3.7.0 and you require anyio==3.7.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// A dependency that uses a pre-release marker in `requirements.in` should be overridden by a
/// non-pre-release version in `overrides.txt`. We should _not_ allow Flask to be resolved to
/// a pre-release version.
#[test]
fn requirement_override_prerelease() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0rc4")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("flask<2.0.1,!=2.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
click==7.1.2
# via flask
flask==1.1.4
# via
# --override overrides.txt
# -r requirements.in
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --all-extras
anyio==3.7.0
# via
# project (pyproject.toml)
# httpcore
certifi==2024.2.2
# via httpcore
h11==0.14.0
# via httpcore
httpcore==0.18.0
# via project (pyproject.toml)
idna==3.6
# via anyio
iniconfig==1.1.1
# via project (pyproject.toml)
sniffio==1.3.1
# via
# anyio
# httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn compile_pyproject_toml_all_extras_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
version = "0.1.0"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("--annotation-style=line")
.arg("pyproject.toml")
.arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line pyproject.toml --all-extras
anyio==3.7.0 # via httpcore, project (pyproject.toml)
certifi==2024.2.2 # via httpcore
h11==0.14.0 # via httpcore
httpcore==0.18.0 # via project (pyproject.toml)
idna==3.6 # via anyio
iniconfig==1.1.1 # via project (pyproject.toml)
sniffio==1.3.1 # via anyio, httpcore
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from all extras in a `pyproject.toml` file.
#[test]
fn compile_does_not_allow_both_extra_and_all_extras() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "project"
dependencies = ["anyio==3.7.0"]
optional-dependencies.foo = [
"iniconfig==1.1.1",
]
optional-dependencies.bar = [
"httpcore==0.18.0",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--all-extras")
.arg("--extra")
.arg("foo"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--all-extras' cannot be used with '--extra <EXTRA>'
Usage: uv pip compile --cache-dir [CACHE_DIR] --all-extras --exclude-newer <EXCLUDE_NEWER> <SRC_FILE>...
For more information, try '--help'.
"###
);
Ok(())
}
/// Compile requirements that cannot be solved due to conflict in a `pyproject.toml` fil;e.
#[test]
fn compile_unsolvable_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==3.7.0", "anyio==4.0.0"]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because my-project depends on anyio==3.7.0 and anyio==4.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Compile requirements in a `pyproject.toml` file that cannot be resolved due to
/// a requirement with a version that is not available online.
#[test]
fn compile_unsolvable_requirements_version_not_available() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools", "wheel"]
[project]
name = "my-project"
version = "0.1.0"
dependencies = ["anyio==300.1.4"]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==300.1.4 and my-project depends on anyio==300.1.4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve at a specific time in the past
#[test]
fn compile_exclude_newer() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--exclude-newer")
// 4.64.0: 2022-04-04T01:48:46.194635Z1
// 4.64.1: 2022-09-03T11:10:27.148080Z
.arg("2022-04-04T12:00:00Z"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04T12:00:00Z
tqdm==4.64.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Use a date as input instead.
// We interpret a date as including this day
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --exclude-newer 2022-04-04
tqdm==4.64.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Check the error message for invalid datetime
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--exclude-newer")
.arg("2022-04-04+02:00"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '2022-04-04+02:00' for '--exclude-newer <EXCLUDE_NEWER>': `2022-04-04+02:00` is neither a valid date (trailing input) nor a valid datetime (input contains invalid characters)
For more information, try '--help'.
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific wheel.
#[test]
fn compile_wheel_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
// Run the same operation, but this time with a relative path, omitting the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file:flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
file:flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, including the `//`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with a relative path, exclusive of any scheme.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ./flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
./flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL).
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("flask @ {}", flask_wheel.path().display()))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL), including
// the `file://` prefix.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("flask @ file://{}", flask_wheel.path().display()))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// Run the same operation, but this time with an absolute path (rather than a URL), including
// the `file://localhost/` prefix.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ file://localhost/{}",
flask_wheel.path().display()
))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://localhost/[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a local path dependency on a specific source distribution.
#[test]
fn compile_source_distribution_path_dependency() -> Result<()> {
let context = TestContext::new("3.12");
// Download a source distribution.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/d8/09/c1a7354d3925a3c6c8cfdebf4245bae67d633ffda1ba415add06ffc839c5/flask-3.0.0.tar.gz")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0.tar.gz");
let mut flask_wheel_file = std::fs::File::create(&flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
Url::from_file_path(flask_wheel.path()).unwrap()
))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://[TEMP_DIR]/flask-3.0.0.tar.gz
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Resolve a local path dependency to a non-existent file.
#[test]
fn compile_wheel_path_dependency_missing() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!(
"flask @ {}",
context
.temp_dir
.join("flask-3.0.0-py3-none-any.whl")
.simplified_display()
))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://[TEMP_DIR]/flask-3.0.0-py3-none-any.whl
"###);
Ok(())
}
/// Resolve a yanked version of `attrs` by specifying the version directly.
#[test]
fn compile_yanked_version_direct() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs==21.1.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
attrs==21.1.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
warning: `attrs==21.1.0` is yanked (reason: "Installable but not importable on Python 3.4.")
"###
);
Ok(())
}
/// Fail to resolve `attrs` due to the indirect use of a yanked version (`21.1.0`).
#[test]
fn compile_yanked_version_indirect() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("attrs>20.3.0,<21.2.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only the following versions of attrs are available:
attrs<=20.3.0
attrs==21.1.0
attrs>=21.2.0
and attrs==21.1.0 was yanked (reason: Installable but not importable on Python 3.4), we can conclude that attrs>20.3.0,<21.2.0 cannot be used.
And because you require attrs>20.3.0,<21.2.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an incompatible version.
#[test]
fn override_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("werkzeug==2.3.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==2.3.0
# via
# --override overrides.txt
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Check that `tool.uv.override-dependencies` in `pyproject.toml` is respected.
#[test]
fn override_dependency_from_pyproject() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==3.0.0"
]
[tool.uv]
override-dependencies = [
"werkzeug==2.3.0"
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.current_dir(&context.temp_dir)
, @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via example (pyproject.toml)
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==2.3.0
# via
# --override (workspace)
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Check that `tool.uv.constraint-dependencies` in `pyproject.toml` is respected.
#[test]
fn constraint_dependency_from_pyproject() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==3.7.0"
]
[tool.uv]
constraint-dependencies = [
"idna<3.4"
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.0
# via example (pyproject.toml)
idna==3.3
# via
# -c (workspace)
# anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Check that `override-dependencies` in `uv.toml` is respected.
#[test]
fn override_dependency_from_specific_uv_toml() -> Result<()> {
let context = TestContext::new("3.12");
let _ = context.temp_dir.child("project").create_dir_all();
let pyproject_toml = context.temp_dir.child("project/pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==3.0.0"
]
"#,
)?;
let _ = context.temp_dir.child("uv").create_dir_all();
let uv_toml: assert_fs::fixture::ChildPath = context.temp_dir.child("uv").child("uv.toml");
uv_toml.write_str(
r#"
override-dependencies = [
"werkzeug==2.3.0"
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--config-file")
.arg("../uv/uv.toml")
.current_dir(&context.temp_dir.child("project"))
, @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --config-file ../uv/uv.toml
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via example (pyproject.toml)
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==2.3.0
# via
# --override (workspace)
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Black==23.10.1 depends on tomli>=1.1.0 for Python versions below 3.11. Demonstrate that we can
/// override it with a multi-line override.
#[test]
fn override_multi_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str(
"tomli>=1.1.0; python_version >= '3.11'\ntomli<1.0.0; python_version < '3.11'",
)?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
tomli==2.0.1
# via
# --override overrides.txt
# black
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// `urllib3==2.2.2` has an optional dependency on `pysocks!=1.5.7,<2.0,>=1.5.6; extra == 'socks'`,
/// So we shouldn't apply the `pysocks==1.7.1` override without the `socks` extra.
#[test]
fn dont_add_override_for_non_activated_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("urllib3==2.2.1")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("pysocks==1.7.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
urllib3==2.2.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Check how invalid `tool.uv.override-dependencies` is handled in `pyproject.toml`.
#[test]
fn override_dependency_from_workspace_invalid_syntax() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==3.0.0"
]
[tool.uv]
override-dependencies = [
"werkzeug=2.3.0"
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.current_dir(&context.temp_dir)
, @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: Failed to parse `pyproject.toml` during settings discovery; skipping...
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 9, column 29
|
9 | override-dependencies = [
| ^
no such comparison operator "=", must be one of ~= == != <= >= < > ===
werkzeug=2.3.0
^^^^^^
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with a URL.
#[test]
fn override_dependency_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
# via
# --override overrides.txt
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Flask==3.0.0 depends on Werkzeug>=3.0.0. Demonstrate that we can override this
/// requirement with an unnamed URL.
#[test]
fn override_dependency_unnamed_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask==3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.0
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug @ https://files.pythonhosted.org/packages/cc/94/5f7079a0e00bd6863ef8f1da638721e9da21e5bacee597595b318f71d62e/Werkzeug-1.0.1-py2.py3-none-any.whl
# via
# --override overrides.txt
# flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_registry_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[tensorboard]==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
warning: The package `black==23.10.1` does not have an extra named `tensorboard`
"###
);
Ok(())
}
/// Request an extra that doesn't exist on the specified package.
#[test]
fn missing_url_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[tensorboard] @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
warning: The package `flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl` does not have an extra named `tensorboard`
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the exact casing of the URL as specified in the
/// requirements file.
#[test]
fn preserve_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.PYTHONHOSTED.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, preserving the unexpanded environment variable as specified in
/// the requirements file.
#[test]
fn preserve_project_root() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ file://${PROJECT_ROOT}/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a dependency from a URL, passing in the entire URL as an environment variable.
#[test]
fn respect_http_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${URL}")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should be parsed as such.
#[test]
fn respect_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.env("URL", "https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ ${URL}
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// A requirement defined as a single unnamed environment variable should error if the environment
/// variable is not set.
#[test]
fn error_missing_unnamed_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${URL}")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in `requirements.in` at position 0
Caused by: Expected package name starting with an alphanumeric character, found '$'
${URL}
^
"###
);
Ok(())
}
/// Resolve a dependency from a file path, passing in the entire path as an environment variable.
#[test]
fn respect_file_env_var() -> Result<()> {
let context = TestContext::new("3.12");
// Download a wheel.
let response = reqwest::blocking::get("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
let flask_wheel = context.temp_dir.child("flask-3.0.0-py3-none-any.whl");
let mut flask_wheel_file = std::fs::File::create(flask_wheel)?;
std::io::copy(&mut response.bytes()?.as_ref(), &mut flask_wheel_file)?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask @ ${FILE_PATH}")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.env("FILE_PATH", context.temp_dir.join("flask-3.0.0-py3-none-any.whl")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
${FILE_PATH}
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn compile_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e ../../scripts/packages/poetry_editable
-e file://../../scripts/packages/black_editable[dev]
boltons # normal dependency for comparison
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
-e ../../scripts/packages/poetry_editable
# via -r [TEMP_DIR]/requirements.in
-e file://../../scripts/packages/black_editable
# via -r [TEMP_DIR]/requirements.in
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
anyio==4.3.0
# via poetry-editable
attrs==23.2.0
# via aiohttp
boltons==23.1.1
# via -r [TEMP_DIR]/requirements.in
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via
# anyio
# yarl
multidict==6.0.5
# via
# aiohttp
# yarl
sniffio==1.3.1
# via anyio
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Resolved 13 packages in [TIME]
"###);
Ok(())
}
/// If an editable is repeated, it should only be built once.
#[test]
fn deduplicate_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e file://../../scripts/packages/black_editable
-e ${PROJECT_ROOT}/../../scripts/packages/black_editable
-e file://../../scripts/packages/black_editable[dev]
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
-e file://../../scripts/packages/black_editable
# via -r [TEMP_DIR]/requirements.in
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
attrs==23.2.0
# via aiohttp
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via yarl
multidict==6.0.5
# via
# aiohttp
# yarl
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Resolved 9 packages in [TIME]
"###);
Ok(())
}
#[test]
fn strip_fragment_unnamed() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/black_editable#egg=black
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
../../scripts/packages/black_editable#egg=black
# via -r [TEMP_DIR]/requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
#[test]
fn strip_fragment_named() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
black @ ../../scripts/packages/black_editable#egg=black
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
../../scripts/packages/black_editable#egg=black
# via -r [TEMP_DIR]/requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
#[test]
fn recursive_extras_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black[dev] @ ../../scripts/packages/black_editable")?;
uv_snapshot!(context.filters(), context
.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir().unwrap()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
aiohttp==3.9.3
# via black
aiosignal==1.3.1
# via aiohttp
attrs==23.2.0
# via aiohttp
../../scripts/packages/black_editable
# via -r [TEMP_DIR]/requirements.in
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
idna==3.6
# via yarl
multidict==6.0.5
# via
# aiohttp
# yarl
uvloop==0.19.0
# via black
yarl==1.9.4
# via aiohttp
----- stderr -----
Resolved 9 packages in [TIME]
"###);
Ok(())
}
/// Compile an editable package with a direct URL requirement.
#[test]
fn compile_editable_url_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/hatchling_editable")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
-e ../../scripts/packages/hatchling_editable
# via -r [TEMP_DIR]/requirements.in
iniconfig @ git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4
# via hatchling-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###);
Ok(())
}
#[test]
#[ignore]
fn cache_errors_are_non_fatal() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
// No git dep, git has its own locking strategy
requirements_in.write_str(indoc! {r"
# pypi wheel
pandas
# url wheel
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# url source dist
werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz
"
})?;
// Pick a file from each kind of cache
let interpreter_cache = context
.cache_dir
.path()
.join("interpreter-v0")
.read_dir()?
.next()
.context("Expected a python interpreter cache file")??
.path();
let cache_files = [
PathBuf::from("simple-v0/pypi/numpy.msgpack"),
PathBuf::from(
"wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack",
),
PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"),
PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"),
interpreter_cache,
];
let check = || {
uv_snapshot!(context.pip_compile()
.arg("pip")
.arg("compile")
.arg(requirements_in.path())
// It's sufficient to check that we resolve to a fix number of packages
.stdout(std::process::Stdio::null()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 13 packages in [TIME]
"###
);
};
insta::allow_duplicates! {
check();
// Replace some cache files with invalid contents
for file in &cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::write(file, "I borken you cache")?;
}
check();
#[cfg(unix)]
{
use fs_err::os::unix::fs::OpenOptionsExt;
// Make some files unreadable, so that the read instead of the deserialization will fail
for file in cache_files {
let file = context.cache_dir.join(file);
if !file.is_file() {
bail!("Missing cache file {}", file.user_display());
}
fs_err::OpenOptions::new()
.create(true)
.write(true)
.mode(0o000)
.open(file)?;
}
}
check();
Ok(())
}
}
/// Resolve a distribution from an HTML-only registry.
#[test]
#[cfg(not(target_env = "musl"))] // No musllinux wheels in the torch index
fn compile_html() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2<=3.1.2")?;
uv_snapshot!(context.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--index-url")
.arg("https://download.pytorch.org/whl"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
jinja2==3.1.2
# via -r requirements.in
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a distribution from a registry with and without a trailing slash.
#[test]
fn trailing_slash() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
jinja2==3.1.3
# via -r requirements.in
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://test.pypi.org/simple/"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
jinja2==3.1.3
# via -r requirements.in
markupsafe==2.1.5
# via jinja2
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using the PEP 517 build backend (default).
#[test]
fn compile_legacy_sdist_pep_517() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
# via -r requirements.in
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a project without a `pyproject.toml`, using `setuptools` directly.
#[test]
fn compile_legacy_sdist_setuptools() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--legacy-setup-py"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --legacy-setup-py
flake8 @ https://files.pythonhosted.org/packages/66/53/3ad4a3b74d609b3b9008a10075c40e7c8909eae60af53623c3888f7a529a/flake8-6.0.0.tar.gz
# via -r requirements.in
mccabe==0.7.0
# via flake8
pycodestyle==2.10.0
# via flake8
pyflakes==3.0.1
# via flake8
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the registry in the generated output.
#[test]
fn generate_hashes_registry() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==4.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
anyio==4.0.0 \
--hash=sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f \
--hash=sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a
# via -r requirements.in
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_source_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz \
--hash=sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f
# via -r requirements.in
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Include hashes from the URL in the generated output.
#[test]
fn generate_hashes_built_distribution_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
# via -r requirements.in
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given a VCS dependency, include hashes for its dependencies, but not the repository itself.
#[test]
fn generate_hashes_git() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ git+https://github.com/agronholm/anyio@4.3.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
anyio @ git+https://github.com/agronholm/anyio@437a7e310925a962cab4a58fcd2455fbcd578d51
# via -r requirements.in
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given an unnamed URL, include hashes for the URL and its dependencies.
#[test]
fn generate_hashes_unnamed_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --generate-hashes
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8
# via -r requirements.in
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Given a local directory, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_local_directory() -> Result<()> {
let _context = TestContext::new("3.12");
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/poetry_editable
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.arg("--generate-hashes")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --generate-hashes
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via poetry-editable
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
../../scripts/packages/poetry_editable
# via -r [TEMP_DIR]/requirements.in
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Given an editable dependency, include hashes for its dependencies, but not the directory itself.
#[test]
fn generate_hashes_editable() -> Result<()> {
let _context = TestContext::new("3.12");
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
-e ../../scripts/packages/poetry_editable
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.arg("--generate-hashes")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --generate-hashes
-e ../../scripts/packages/poetry_editable
# via -r [TEMP_DIR]/requirements.in
anyio==4.3.0 \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via poetry-editable
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a local directory.
#[test]
fn find_links_directory() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
tqdm
numpy
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
"})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
markupsafe==2.1.5
# via werkzeug
numpy==1.26.4
# via -r requirements.in
tqdm==1000.0.0
# via -r requirements.in
werkzeug @ https://files.pythonhosted.org/packages/c3/fc/254c3e9b5feb89ff5b9076a23218dafbc99c96ac5941e900b71206e6313b/werkzeug-3.0.1-py3-none-any.whl
# via -r requirements.in
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index.
#[test]
fn find_links_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
tqdm==4.64.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL passed via an environment variable.
#[test]
fn find_links_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm\n--find-links ${URL}")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.env("URL", "https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
tqdm==4.64.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Compile using `--find-links` with a URL by resolving `tqdm` from the `PyTorch` wheels index,
/// with the URL itself provided in a `requirements.txt` file.
#[test]
fn find_links_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-f https://download.pytorch.org/whl/torch_stable.html\ntqdm")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--emit-find-links"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index --emit-find-links
--find-links https://download.pytorch.org/whl/torch_stable.html
tqdm==4.64.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `extras==0.0.2` fails to build (i.e., it always throws). Since `extras==0.0.1` is pinned, we
/// should never even attempt to build `extras==0.0.2`, despite an unpinned `extras[dev]`
/// requirement.
#[test]
fn avoid_irrelevant_extras() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
extras==0.0.1
extras[dev]
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==4.3.0
# via extras
extras==0.0.1
# via -r requirements.in
idna==3.6
# via anyio
iniconfig==2.0.0
# via extras
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// `extras==0.0.2` fails to build (i.e., it always throws). `extras==0.0.1` is the only version
/// that resolves the constraints, but if we don't visit `example[test]` prior to `extras==0.0.2`,
/// we'll end up with a broken build.
#[test]
fn avoid_irrelevant_recursive_extras() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'
[project.optional-dependencies]
test = ["extras<0.0.2"]
coverage = ["example[test]", "extras>=0.0.1,<=0.0.2"]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .[test,coverage]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
-e .
# via -r requirements.in
extras==0.0.1
# via example
iniconfig==2.0.0
# via extras
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Respect `requires-python` when prefetching.
///
/// `voluptuous==0.15.1` requires Python 3.9 or later, so we should resolve to an earlier version
/// and avoiding building 0.15.1 at all.
#[test]
fn requires_python_prefetch() -> Result<()> {
let context = TestContext::new("3.8");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("voluptuous<=0.15.1")?;
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
voluptuous==0.14.2
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Nothing should change.
#[test]
fn upgrade_none() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
black==23.10.1
# via -r requirements.in
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Both packages should be upgraded.
#[test]
fn upgrade_all() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Use an existing resolution for `black==23.10.1`, with stale versions of `click` and `pathspec`.
/// Only `click` should be upgraded.
#[test]
fn upgrade_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
black==23.10.1
click==8.1.2
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade-package")
.arg("click"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==23.2
# via black
pathspec==0.11.0
# via black
platformdirs==4.0.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Upgrade a package with a constraint on the allowed upgrade.
#[test]
fn upgrade_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
iniconfig==1.0.0
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--upgrade-package")
.arg("iniconfig<2"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
iniconfig==1.1.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("-P")
.arg("iniconfig"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
iniconfig==2.0.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Attempt to resolve a requirement at a path that doesn't exist.
#[test]
fn missing_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(if cfg!(windows) {
"anyio @ file://C:/tmp/anyio-3.7.0.tar.gz"
} else {
"anyio @ file:///tmp/anyio-3.7.0.tar.gz"
})?;
let filters: Vec<_> = [(r"/C:/", "/")]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://tmp/anyio-3.7.0.tar.gz
"###);
Ok(())
}
/// Attempt to resolve an editable requirement at a file path that doesn't exist.
#[test]
fn missing_editable_file() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e foo/anyio-3.7.0.tar.gz")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unsupported editable requirement in `requirements.in`
Caused by: Editable must refer to a local directory, not an archive: `file://[TEMP_DIR]/foo/anyio-3.7.0.tar.gz`
"###);
Ok(())
}
/// Attempt to resolve an editable requirement at a directory path that doesn't exist.
#[test]
fn missing_editable_directory() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e foo/bar")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Distribution not found at: file://[TEMP_DIR]/foo/bar
"###);
Ok(())
}
/// Attempt to resolve a URL requirement without a package name. The package name can be extracted
/// from the URL.
#[test]
fn unnamed_requirement_with_package_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Exclude annotations from the output.
#[test]
fn no_annotate() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-annotate"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-annotate
black==23.10.1
click==8.1.7
mypy-extensions==1.0.0
packaging==24.0
pathspec==0.12.1
platformdirs==4.2.0
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Exclude header from the output.
#[test]
fn no_header() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-header"), @r###"
success: true
exit_code: 0
----- stdout -----
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Include custom compile command in the header.
#[test]
fn custom_compile_command() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--custom-compile-command")
.arg("./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// with env var
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.env("UV_CUSTOM_COMPILE_COMMAND", "./custom-uv-compile.sh"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# ./custom-uv-compile.sh
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn allow_unsafe() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--allow-unsafe"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --allow-unsafe
markupsafe==2.1.5
# via werkzeug
werkzeug==3.0.1
# via -r requirements.in
----- stderr -----
warning: pip-compile's `--allow-unsafe` has no effect (uv can safely pin `pip` and other packages)
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Emit warnings when users pass redundant options from `pip-compile`.
#[test]
fn resolver_legacy() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("werkzeug==3.0.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--resolver=legacy"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: pip-compile's `--resolver=legacy` is unsupported (uv always backtracks)
"###
);
Ok(())
}
/// Emit the `--index-url` and `--extra-index-url` locations.
/// Also, preserve the `--index-url` and `--extra-index-url` flags in the command in the header.
#[test]
fn emit_index_urls() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-url")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--extra-index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple
--index-url https://test.pypi.org/simple/
--extra-index-url https://pypi.org/simple
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit the `--find-links` locations.
#[test]
fn emit_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-find-links")
.arg("--find-links")
.arg("./"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-find-links --find-links ./
--find-links ./
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Emit the `--find-links` locations using a relative path in a requirements file. The verbatim
/// path should be preserved.
#[test]
fn emit_find_links_relative() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-f ./\niniconfig")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-find-links"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-find-links
--find-links ./
iniconfig==2.0.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Emit the `--no-binary` and `--only-binary` options.
#[test]
fn emit_build_options() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-build-options")
.arg("--only-binary")
.arg("black")
.arg("--no-binary")
.arg(":all:"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-build-options --only-binary black --no-binary :all:
--no-binary :all:
--only-binary black
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Respect the `--no-index` flag in a `requirements.txt` file.
#[test]
fn no_index_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--no-index\ntqdm")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the provided package locations and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because index lookups were disabled and no additional package locations were provided (try: `--find-links <uri>`)
"###
);
Ok(())
}
/// Prefer the `--index-url` from the command line over the `--index-url` in a `requirements.txt`
/// file. Also, `--index-url` and `--extra-index-url` should not be presented in the output
/// unless we specify `--emit-index-url`.
#[test]
fn index_url_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
tqdm==4.66.2
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Raise an error when multiple `requirements.txt` files include different `--index-url` flags.
#[test]
fn conflicting_index_urls_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://google.com\ntqdm")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("--index-url https://wikipedia.org\nflask")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Multiple index URLs specified: `https://google.com/` vs. `https://wikipedia.org/`
"###
);
Ok(())
}
/// Doesn't raise an error when multiple `requirements.txt` files include matching `--index-url` flags.
#[test]
fn matching_index_urls_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://pypi.org/simple")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("--index-url https://pypi.org/simple")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.in
----- stderr -----
Resolved 0 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a registry package without network access via the `--offline` flag.
#[test]
fn offline_registry() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black==23.10.1")?;
// Resolve with `--offline` with an empty cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because black was not found in the cache and you require black==23.10.1, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Populate the cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// Resolve with `--offline` with a populated cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
black==23.10.1
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a registry package without network access via the `--offline` flag. We should backtrack
/// to the latest version of the package that's available in the cache.
#[test]
fn offline_registry_backtrack() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig==1.1.1")?;
// Populate the cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
iniconfig==1.1.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Resolve with `--offline`, with a looser requirement. We should backtrack to `1.1.1`.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
iniconfig==1.1.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package without network access via the `--offline` flag, using `--find-links` for an
/// HTML registry.
#[test]
fn offline_find_links() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("tqdm")?;
// Resolve with `--offline` and `--find-links`. We indicate that the network was disabled,
// since both the `--find-links` and the registry lookups fail (but, importantly, we don't error
// when failing to fetch the `--find-links` URL).
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
// Resolve with `--offline`, `--find-links`, and `--no-index`.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html")
.arg("--no-index")
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because tqdm was not found in the cache and you require tqdm, we can conclude that the requirements are unsatisfiable.
hint: Packages were unavailable because the network was disabled
"###
);
Ok(())
}
/// Resolve a direct URL package without network access via the `--offline` flag.
#[test]
fn offline_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?;
// Resolve with `--offline` with an empty cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download: `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
Caused by: Network connectivity is disabled, but the requested data wasn't found in the cache for: `https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl`
"###
);
// Populate the cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
// Resolve with `--offline` with a populated cache.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --offline
iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package with invalid metadata, by way of an invalid `Requires-Python` field in the
/// `METADATA` file.
#[test]
fn invalid_metadata_requires_python() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation==2.0.0")?;
// `2.0.0` has invalid metadata.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because validation==2.0.0 has invalid metadata and you require validation==2.0.0, we can conclude that the requirements are unsatisfiable.
hint: Metadata for validation==2.0.0 could not be parsed:
Failed to parse version: Unexpected end of version specifier, expected operator:
12
^^
"###
);
Ok(())
}
/// Resolve a package with multiple `.dist-info` directories.
#[test]
fn invalid_metadata_multiple_dist_info() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation==3.0.0")?;
// `3.0.0` has an invalid structure (multiple `.dist-info` directories).
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because validation==3.0.0 has an invalid package format and you require validation==3.0.0, we can conclude that the requirements are unsatisfiable.
hint: The structure of validation==3.0.0 was invalid:
Multiple .dist-info directories found: validation-2.0.0, validation-3.0.0
"###
);
Ok(())
}
/// Resolve a package, but backtrack past versions with invalid metadata.
#[test]
fn invalid_metadata_backtrack() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("validation")?;
// `2.0.0` and `3.0.0` have invalid metadata. We should backtrack to `1.0.0` (the preceding
// version, which has valid metadata).
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-index")
.arg("--find-links")
.arg(context.workspace_root.join("scripts").join("links")), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-index
validation==1.0.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve nested `-r` requirements files with relative paths.
#[test]
fn compile_relative_subfile() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r subdir/requirements.in")?;
let subdir = context.temp_dir.child("subdir");
let requirements_in = subdir.child("requirements.in");
requirements_in.write_str("-r requirements-dev.in")?;
let requirements_dev_in = subdir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==4.3.0
# via -r subdir/requirements-dev.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a package with an invalid extra named `.none`.
#[test]
fn compile_none_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("entrypoints==0.3")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
entrypoints==0.3
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a package (`pytz`) with a preference that omits a trailing zero.
///
/// See: <https://github.com/astral-sh/uv/issues/1536>
#[test]
fn compile_types_pytz() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("types-pytz")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("types-pytz==2021.1")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("-o")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt
types-pytz==2021.1.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a registry package (`black`) with an unnamed URL preference. The preference should be
/// ignored.
#[test]
fn compile_unnamed_preference() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("./scripts/packages/black_editable")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("-o")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -o requirements.txt
black==24.3.0
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` pinning that package
/// to a specific URL.
#[test]
fn compile_constraints_compatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
# via
# -c constraints.txt
# -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a direct URL package from a `requirements.in` file, with a `constraints.txt` file
/// pinning it to a specific version.
#[test]
fn compile_constraints_compatible_url_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio>4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
# via
# -c constraints.txt
# -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning it to
/// a specific URL with an incompatible version.
#[test]
fn compile_constraints_incompatible_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio<4")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only anyio>=4 is available and you require anyio<4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` in a
/// `requirements.in` file. The resolution should fail, since the package doesn't exist at the
#[test]
fn index_url_in_requirements() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because anyio was not found in the package registry and you require anyio<4, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, respecting the `--index-url` passed via the
/// command line over that in a `requirements.in` file.
#[test]
fn index_url_from_command_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://download.pytorch.org/whl\nanyio<4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--index-url")
.arg("https://pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==3.7.1
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file with a dependency that uses an unsupported
/// scheme.
#[test]
fn unsupported_scheme() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio @ bzr+https://example.com/anyio")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in `requirements.in` at position 0
Caused by: Unsupported URL prefix `bzr` in URL: `bzr+https://example.com/anyio` (Bazaar is not supported)
anyio @ bzr+https://example.com/anyio
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including a valid extra.
#[test]
fn no_deps_valid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
flask==3.0.2
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-deps`, including an invalid extra.
#[test]
fn no_deps_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[empty]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
flask==3.0.2
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
warning: The package `flask==3.0.2` does not have an extra named `empty`
"###
);
Ok(())
}
/// Resolve a package with `--no-deps` in which the requirements have a conflict in their
/// transitive dependencies. The resolution should succeed, since `--no-deps` ignores the
/// transitive dependencies.
#[test]
fn no_deps_transitive_conflict() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable with a dependency on `anyio` at a dedicated URL.
let editable_dir1 = context.temp_dir.child("editable1");
editable_dir1.create_dir_all()?;
let pyproject_toml = editable_dir1.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "editable1"
version = "0.0.1"
dependencies = [
"anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
)?;
// Create an editable with a dependency on `anyio` at a different, dedicated URL.
let editable_dir2 = context.temp_dir.child("editable2");
editable_dir2.create_dir_all()?;
let pyproject_toml = editable_dir2.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "editable2"
version = "0.0.1"
dependencies = [
"anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl"
]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&indoc::formatdoc! {r#"
-e {}
-e {}
"#,
editable_dir1.path().display(),
editable_dir2.path().display()
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-deps
-e [TEMP_DIR]/editable1
# via -r requirements.in
-e [TEMP_DIR]/editable2
# via -r requirements.in
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve an editable package with an invalid extra.
#[test]
fn editable_invalid_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/black_editable[empty]")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
-e ../../scripts/packages/black_editable
# via -r [TEMP_DIR]/requirements.in
----- stderr -----
Resolved 1 package in [TIME]
warning: The package `black @ file://[WORKSPACE]/scripts/packages/black_editable` does not have an extra named `empty`
"###);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
fn no_strip_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask[dotenv]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-extras
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask[dotenv]==3.0.2
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
python-dotenv==1.0.1
# via flask
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-extras`.
#[test]
#[cfg(not(windows))]
fn no_strip_extras() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio[trio]\nanyio[doc]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-strip-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-extras
alabaster==0.7.16
# via sphinx
anyio[doc, trio]==4.3.0
# via -r requirements.in
attrs==23.2.0
# via
# outcome
# trio
babel==2.14.0
# via sphinx
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
docutils==0.20.1
# via
# sphinx
# sphinx-rtd-theme
idna==3.6
# via
# anyio
# requests
# trio
imagesize==1.4.1
# via sphinx
jinja2==3.1.3
# via sphinx
markupsafe==2.1.5
# via jinja2
outcome==1.3.0.post0
# via trio
packaging==24.0
# via
# anyio
# sphinx
pygments==2.17.2
# via sphinx
requests==2.31.0
# via sphinx
sniffio==1.3.1
# via
# anyio
# trio
snowballstemmer==2.2.0
# via sphinx
sortedcontainers==2.4.0
# via trio
sphinx==7.2.6
# via
# anyio
# sphinx-autodoc-typehints
# sphinx-rtd-theme
# sphinxcontrib-jquery
sphinx-autodoc-typehints==2.0.0
# via anyio
sphinx-rtd-theme==2.0.0
# via anyio
sphinxcontrib-applehelp==1.0.8
# via sphinx
sphinxcontrib-devhelp==1.0.6
# via sphinx
sphinxcontrib-htmlhelp==2.0.5
# via sphinx
sphinxcontrib-jquery==4.1
# via sphinx-rtd-theme
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.7
# via sphinx
sphinxcontrib-serializinghtml==1.1.10
# via sphinx
trio==0.25.0
# via anyio
urllib3==2.2.1
# via requests
----- stderr -----
Resolved 30 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-markers`.
#[test]
fn no_strip_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio ; python_version > '3.11'")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-strip-markers")
.arg("--python-platform")
.arg("linux"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform linux
anyio==4.3.0 ; python_version > '3.11'
# via -r requirements.in
idna==3.6 ; python_version > '3.11'
# via anyio
sniffio==1.3.1 ; python_version > '3.11'
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-markers`. In this case, a single package is included with
/// multiple markers.
#[test]
fn no_strip_markers_multiple_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
trio ; python_version > '3.11'
trio ; sys_platform == 'win32'
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-strip-markers")
.arg("--python-platform")
.arg("windows"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform windows
attrs==23.2.0 ; python_version > '3.11' or sys_platform == 'win32'
# via
# outcome
# trio
cffi==1.16.0 ; implementation_name != 'pypy' and os_name == 'nt' and (python_version > '3.11' or sys_platform == 'win32')
# via trio
idna==3.6 ; python_version > '3.11' or sys_platform == 'win32'
# via trio
outcome==1.3.0.post0 ; python_version > '3.11' or sys_platform == 'win32'
# via trio
pycparser==2.21 ; implementation_name != 'pypy' and os_name == 'nt' and (python_version > '3.11' or sys_platform == 'win32')
# via cffi
sniffio==1.3.1 ; python_version > '3.11' or sys_platform == 'win32'
# via trio
sortedcontainers==2.4.0 ; python_version > '3.11' or sys_platform == 'win32'
# via trio
trio==0.25.0 ; python_version > '3.11' or sys_platform == 'win32'
# via -r requirements.in
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with `--no-strip-markers`. In this case, one of the dependencies has markers
/// on its own requirements.
#[test]
fn no_strip_markers_transitive_marker() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("trio ; python_version > '3.11'")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--no-strip-markers")
.arg("--python-platform")
.arg("windows"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --no-strip-markers --python-platform windows
attrs==23.2.0 ; python_version > '3.11'
# via
# outcome
# trio
cffi==1.16.0 ; python_version > '3.11' and implementation_name != 'pypy' and os_name == 'nt'
# via trio
idna==3.6 ; python_version > '3.11'
# via trio
outcome==1.3.0.post0 ; python_version > '3.11'
# via trio
pycparser==2.21 ; python_version > '3.11' and implementation_name != 'pypy' and os_name == 'nt'
# via cffi
sniffio==1.3.1 ; python_version > '3.11'
# via trio
sortedcontainers==2.4.0 ; python_version > '3.11'
# via trio
trio==0.25.0 ; python_version > '3.11'
# via -r requirements.in
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with a package that has a marker.
#[test]
fn universal() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
trio ; python_version > '3.11'
trio ; sys_platform == 'win32'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
attrs==23.2.0
# via
# outcome
# trio
cffi==1.16.0 ; implementation_name != 'pypy' and os_name == 'nt'
# via trio
idna==3.6
# via trio
outcome==1.3.0.post0
# via trio
pycparser==2.21 ; implementation_name != 'pypy' and os_name == 'nt'
# via cffi
sniffio==1.3.1
# via trio
sortedcontainers==2.4.0
# via trio
trio==0.25.0
# via -r requirements.in
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with conflicting versions and markers.
#[test]
fn universal_conflicting() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
trio==0.25.0 ; sys_platform == 'darwin'
trio==0.10.0 ; sys_platform == 'win32'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
async-generator==1.10 ; sys_platform == 'win32'
# via trio
attrs==23.2.0 ; sys_platform == 'darwin' or sys_platform == 'win32'
# via
# outcome
# trio
cffi==1.16.0 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'win32')
# via trio
idna==3.6 ; sys_platform == 'darwin' or sys_platform == 'win32'
# via trio
outcome==1.3.0.post0 ; sys_platform == 'darwin' or sys_platform == 'win32'
# via trio
pycparser==2.21 ; (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'win32')
# via cffi
sniffio==1.3.1 ; sys_platform == 'darwin' or sys_platform == 'win32'
# via trio
sortedcontainers==2.4.0 ; sys_platform == 'darwin' or sys_platform == 'win32'
# via trio
trio==0.10.0 ; sys_platform == 'win32'
# via -r requirements.in
trio==0.25.0 ; sys_platform == 'darwin'
# via -r requirements.in
----- stderr -----
Resolved 10 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with a package that contains cycles in its dependency graph.
#[test]
fn universal_cycles() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
testtools==2.3.0
fixtures==3.0.0
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
argparse==1.4.0
# via unittest2
extras==1.0.0
# via testtools
fixtures==3.0.0
# via
# -r requirements.in
# testtools
linecache2==1.0.0
# via traceback2
pbr==6.0.0
# via
# fixtures
# testtools
python-mimeparse==1.6.0
# via testtools
six==1.16.0
# via
# fixtures
# testtools
# unittest2
testtools==2.3.0
# via
# -r requirements.in
# fixtures
traceback2==1.4.0
# via
# testtools
# unittest2
unittest2==1.1.0
# via testtools
----- stderr -----
Resolved 10 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with a constraint.
#[test]
fn universal_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
anyio ; sys_platform == 'win32'
"})?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str(indoc::indoc! {r"
anyio==3.0.0
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-c")
.arg("constraints.txt")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal
anyio==3.0.0 ; sys_platform == 'win32'
# via
# -c constraints.txt
# -r requirements.in
idna==3.6 ; sys_platform == 'win32'
# via anyio
sniffio==1.3.1 ; sys_platform == 'win32'
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with a constraint, where the constraint itself has a marker.
#[test]
fn universal_constraint_marker() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
anyio ; sys_platform == 'win32'
"})?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str(indoc::indoc! {r"
anyio==3.0.0 ; os_name == 'nt'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-c")
.arg("constraints.txt")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt --universal
anyio==3.0.0 ; sys_platform == 'win32'
# via
# -c constraints.txt
# -r requirements.in
idna==3.6 ; sys_platform == 'win32'
# via anyio
sniffio==1.3.1 ; sys_platform == 'win32'
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution with a divergent requirement, and a third requirement that's
/// compatible with both forks.
///
/// This currently fails, but should succeed.
///
/// See: <https://github.com/astral-sh/uv/issues/4640>
#[test]
fn universal_multi_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
iniconfig
iniconfig==2.0.0 ; python_version > '3.12'
iniconfig==1.0.0 ; python_version == '3.12'
"})?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str(indoc::indoc! {r"
anyio==3.0.0 ; os_name == 'nt'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-c")
.arg("constraints.txt")
.arg("--universal"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because iniconfig{python_version > '3.12'}==2.0.0 depends on iniconfig==2.0.0 and you require iniconfig{python_version > '3.12'}==2.0.0, we can conclude that your requirements and iniconfig{python_version == '3.12'}==1.0.0 are incompatible.
And because you require iniconfig{python_version == '3.12'}==1.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
// Requested distinct local versions with disjoint markers.
#[test]
fn universal_disjoint_locals() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
--find-links https://download.pytorch.org/whl/torch_stable.html
torch==2.0.0+cu118 ; platform_machine == 'x86_64'
torch==2.0.0+cpu ; platform_machine != 'x86_64'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
filelock==3.13.1
# via
# torch
# triton
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
torch==2.0.0+cpu ; platform_machine != 'x86_64'
# via -r requirements.in
torch==2.0.0+cu118 ; platform_machine == 'x86_64'
# via
# -r requirements.in
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 12 packages in [TIME]
"###
);
Ok(())
}
// Requested distinct local versions with disjoint markers of a package
// that is also present as a transitive dependency.
#[test]
fn universal_transitive_disjoint_locals() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
--find-links https://download.pytorch.org/whl/torch_stable.html
torch==2.0.0+cu118 ; platform_machine == 'x86_64'
torch==2.0.0+cpu ; platform_machine != 'x86_64'
torchvision==0.15.1
"})?;
// Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
// but the local versions are still respected correctly.
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
filelock==3.13.1
# via
# torch
# triton
idna==3.6
# via requests
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
numpy==1.26.4
# via torchvision
pillow==10.2.0
# via torchvision
requests==2.31.0
# via torchvision
sympy==1.12
# via torch
torch==2.0.0+cpu
# via
# -r requirements.in
# torchvision
torch==2.0.0+cu118
# via
# -r requirements.in
# torchvision
# triton
torchvision==0.15.1
# via -r requirements.in
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
urllib3==2.2.1
# via requests
----- stderr -----
Resolved 20 packages in [TIME]
"###
);
Ok(())
}
/// Prefer local versions for dependencies of path requirements.
#[test]
fn universal_local_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0
.
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
.
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
torch==2.0.0+cu118
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 12 packages in [TIME]
"###
);
Ok(())
}
/// If a dependency requests a local version with an overlapping marker expression,
/// we should prefer the local in all cases.
#[test]
fn universal_overlapping_local_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118 ; platform_machine == 'x86_64'"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0
.
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
.
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
torch==2.0.0+cu118
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 12 packages in [TIME]
"###
);
Ok(())
}
/// If a dependency requests distinct local versions with disjoint marker expressions,
/// we should fork the root requirement.
#[test]
fn universal_disjoint_local_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118 ; platform_machine == 'x86_64'",
"torch==2.0.0+cpu ; platform_machine != 'x86_64'"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0
.
"})?;
// Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
// but the local versions are still respected correctly.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
.
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
torch==2.0.0+cpu
# via
# -r requirements.in
# example
torch==2.0.0+cu118
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 13 packages in [TIME]
"###
);
Ok(())
}
/// If a dependency requests distinct local versions and non-local versions with disjoint marker
/// expressions, we should fork the root requirement.
#[test]
fn universal_disjoint_base_or_local_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0; python_version < '3.10'",
"torch==2.0.0+cu118 ; python_version >= '3.10' and python_version <= '3.12'",
"torch==2.0.0+cpu ; python_version > '3.12'"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0
.
"})?;
// Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
// but the local versions are still respected correctly.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
.
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
torch==2.0.0+cpu
# via
# -r requirements.in
# example
torch==2.0.0+cu118
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 13 packages in [TIME]
"###
);
Ok(())
}
/// If a dependency requests a local version with an overlapping marker expression
/// that form a nested fork, we should prefer the local in both children of the outer
/// fork.
#[test]
fn universal_nested_overlapping_local_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118 ; platform_machine == 'x86_64' and os_name == 'Linux'"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0 ; platform_machine == 'x86_64'
torch==2.3.0 ; platform_machine != 'x86_64'
.
"})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
.
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
fsspec==2024.3.1 ; platform_machine != 'x86_64'
# via torch
intel-openmp==2021.4.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via mkl
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mkl==2021.4.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via torch
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
tbb==2021.11.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via mkl
torch==2.3.0 ; platform_machine != 'x86_64'
# via -r requirements.in
torch==2.0.0+cu118 ; platform_machine == 'x86_64'
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 17 packages in [TIME]
"###
);
// A similar case, except the nested marker is now on the path requirement.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0 ; platform_machine == 'x86_64'
torch==2.3.0 ; platform_machine != 'x86_64'
. ; os_name == 'Linux'
"})?;
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118 ; platform_machine == 'x86_64'",
]
requires-python = ">=3.11"
"#})?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
. ; os_name == 'Linux'
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
fsspec==2024.3.1 ; platform_machine != 'x86_64'
# via torch
intel-openmp==2021.4.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via mkl
jinja2==3.1.3
# via torch
lit==18.1.2 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mkl==2021.4.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via torch
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
sympy==1.12
# via torch
tbb==2021.11.0 ; platform_machine != 'x86_64' and platform_system == 'Windows'
# via mkl
torch==2.3.0 ; platform_machine != 'x86_64'
# via -r requirements.in
torch==2.0.0+cu118 ; platform_machine == 'x86_64'
# via
# -r requirements.in
# example
# triton
triton==2.0.0 ; platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 17 packages in [TIME]
"###
);
Ok(())
}
/// If a dependency requests distinct local versions with disjoint marker expressions
/// that form a nested fork, we should create a nested fork.
#[test]
fn universal_nested_disjoint_local_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "example"
version = "0.0.0"
dependencies = [
"torch==2.0.0+cu118 ; platform_machine == 'x86_64'",
"torch==2.0.0+cpu ; platform_machine != 'x86_64'"
]
requires-python = ">=3.11"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {"
torch==2.0.0 ; os_name == 'Linux'
torch==2.3.0 ; os_name != 'Linux'
. ; os_name == 'Linux'
"})?;
// Some marker expressions on the output here are missing due to https://github.com/astral-sh/uv/issues/5086,
// but the local versions are still respected correctly.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--universal")
.arg("--find-links")
.arg("https://download.pytorch.org/whl/torch_stable.html"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
cmake==3.28.4 ; os_name == 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
. ; os_name == 'Linux'
# via -r requirements.in
filelock==3.13.1
# via
# torch
# triton
fsspec==2024.3.1 ; os_name != 'Linux'
# via torch
intel-openmp==2021.4.0 ; os_name != 'Linux' and platform_system == 'Windows'
# via mkl
jinja2==3.1.3
# via torch
lit==18.1.2 ; os_name == 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via triton
markupsafe==2.1.5
# via jinja2
mkl==2021.4.0 ; os_name != 'Linux' and platform_system == 'Windows'
# via torch
mpmath==1.3.0
# via sympy
networkx==3.2.1
# via torch
nvidia-cublas-cu12==12.1.3.1 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via
# nvidia-cudnn-cu12
# nvidia-cusolver-cu12
# torch
nvidia-cuda-cupti-cu12==12.1.105 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cuda-nvrtc-cu12==12.1.105 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cuda-runtime-cu12==12.1.105 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cudnn-cu12==8.9.2.26 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cufft-cu12==11.0.2.54 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-curand-cu12==10.3.2.106 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cusolver-cu12==11.4.5.107 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-cusparse-cu12==12.1.0.106 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via
# nvidia-cusolver-cu12
# torch
nvidia-nccl-cu12==2.20.5 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
nvidia-nvjitlink-cu12==12.4.99 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via
# nvidia-cusolver-cu12
# nvidia-cusparse-cu12
nvidia-nvtx-cu12==12.1.105 ; os_name != 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
sympy==1.12
# via torch
tbb==2021.11.0 ; os_name != 'Linux' and platform_system == 'Windows'
# via mkl
torch==2.0.0+cu118 ; os_name == 'Linux'
# via
# -r requirements.in
# example
# triton
torch==2.3.0 ; os_name != 'Linux'
# via -r requirements.in
torch==2.0.0+cpu ; os_name == 'Linux'
# via
# -r requirements.in
# example
triton==2.0.0 ; os_name == 'Linux' and platform_machine == 'x86_64' and platform_system == 'Linux'
# via torch
typing-extensions==4.10.0
# via torch
----- stderr -----
Resolved 30 packages in [TIME]
"###
);
Ok(())
}
/// Perform a universal resolution that requires narrowing the supported Python range in one of the
/// fork branches.
///
/// Note that this test is currently asserted to be a failed resolution, which is part of
/// a small revert of the PR[1] that added Requires-Python version narrowing. This test
/// should ideally pass, but we aren't sure how to make it pass without producing
/// incorrect answers in other cases[2].
///
/// [1]: https://github.com/astral-sh/uv/pull/4707
/// [2]: https://github.com/astral-sh/uv/issues/4885
#[test]
fn universal_requires_python() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
numpy >=1.26 ; python_version >= '3.9'
numpy <1.26 ; python_version < '3.9'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-p")
.arg("3.8")
.arg("--universal"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
× No solution found when resolving dependencies for split (python_version >= '3.9'):
╰─▶ Because only the following versions of numpy{python_version >= '3.9'} are available:
numpy{python_version >= '3.9'}<=1.26.0
numpy{python_version >= '3.9'}==1.26.1
numpy{python_version >= '3.9'}==1.26.2
numpy{python_version >= '3.9'}==1.26.3
numpy{python_version >= '3.9'}==1.26.4
and the requested Python version (>=3.8) does not satisfy Python>=3.9, we can conclude that any of:
numpy{python_version >= '3.9'}>=1.26.0,<1.26.2
numpy{python_version >= '3.9'}>1.26.2,<1.26.3
numpy{python_version >= '3.9'}>1.26.3,<1.26.4
numpy{python_version >= '3.9'}>1.26.4
are incompatible.
And because the requested Python version (>=3.8) does not satisfy Python>=3.9 and you require numpy{python_version >= '3.9'}>=1.26, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
// This test captures a case[1] that was broken by Requires-Python version
// narrowing[2] in the universal resolver. When version narrowing is enabled
// (at time of writing), the `requirements.txt` generated includes several
// duplicate and unconditional dependencies without marker expressions.
//
// [1]: https://github.com/astral-sh/uv/issues/4885
// [2]: https://github.com/astral-sh/uv/pull/4707
#[test]
fn universal_no_repeated_unconditional_distributions() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
pylint
sphinx
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-p")
.arg("3.8")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
alabaster==0.7.13
# via sphinx
astroid==3.1.0
# via pylint
babel==2.14.0
# via sphinx
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
colorama==0.4.6 ; sys_platform == 'win32'
# via
# pylint
# sphinx
dill==0.3.8
# via pylint
docutils==0.20.1
# via sphinx
idna==3.6
# via requests
imagesize==1.4.1
# via sphinx
importlib-metadata==7.1.0 ; python_version < '3.10'
# via sphinx
isort==5.13.2
# via pylint
jinja2==3.1.3
# via sphinx
markupsafe==2.1.5
# via jinja2
mccabe==0.7.0
# via pylint
packaging==24.0
# via sphinx
platformdirs==4.2.0
# via pylint
pygments==2.17.2
# via sphinx
pylint==3.1.0
# via -r requirements.in
pytz==2024.1 ; python_version < '3.9'
# via babel
requests==2.31.0
# via sphinx
snowballstemmer==2.2.0
# via sphinx
sphinx==7.1.2
# via -r requirements.in
sphinxcontrib-applehelp==1.0.4
# via sphinx
sphinxcontrib-devhelp==1.0.2
# via sphinx
sphinxcontrib-htmlhelp==2.0.1
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.3
# via sphinx
sphinxcontrib-serializinghtml==1.1.5
# via sphinx
tomli==2.0.1 ; python_version < '3.11'
# via pylint
tomlkit==0.12.4
# via pylint
typing-extensions==4.10.0 ; python_version < '3.11'
# via
# astroid
# pylint
urllib3==2.2.1
# via requests
zipp==3.18.1 ; python_version < '3.10'
# via importlib-metadata
----- stderr -----
warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
Resolved 34 packages in [TIME]
"###
);
Ok(())
}
/// Remove `python_version` markers that are always true.
#[test]
fn universal_unnecessary_python() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc::indoc! {r"
iniconfig ; python_version >= '3.7'
"})?;
uv_snapshot!(context.filters(), windows_filters=false, context.pip_compile()
.arg("requirements.in")
.arg("-p")
.arg("3.8")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -p 3.8 --universal
iniconfig==2.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.8 is not available; 3.12.[X] will be used to build dependencies instead.
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its transitive dependencies to a specific version.
#[test]
fn compile_constraints_compatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("virtualenv")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
distlib==0.3.8
# via virtualenv
filelock==3.8.0
# via
# -c constraints.txt
# virtualenv
platformdirs==3.11.0
# via virtualenv
virtualenv==20.21.1
# via -r requirements.in
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn compile_constraints_incompatible_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and filelock==3.8.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file pinning one of
/// its direct dependencies to an incompatible version.
#[test]
fn conflicting_url_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("filelock==1.0.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("filelock==3.8.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require filelock==1.0.0 and filelock==3.8.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Override a regular package with an editable. This should resolve to the editable package.
#[test]
fn editable_override() -> Result<()> {
let context = TestContext::new("3.12");
// Add a non-editable requirement.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
// Add an editable override.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("-e ../../scripts/packages/black_editable")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.arg("--override")
.arg(overrides_txt.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --override [TEMP_DIR]/overrides.txt
-e ../../scripts/packages/black_editable
# via
# --override [TEMP_DIR]/overrides.txt
# -r [TEMP_DIR]/requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Override an editable with a regular package. This should resolve to the regular package.
#[test]
fn override_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/black_editable")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("black==23.10.1")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.arg("--override")
.arg(overrides_txt.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --override [TEMP_DIR]/overrides.txt
black==23.10.1
# via
# --override [TEMP_DIR]/overrides.txt
# -r [TEMP_DIR]/requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// compatible, but resolve to exactly the same version.
#[test]
fn override_with_compatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<=3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt --override overrides.txt
anyio==3.0.0
# via
# -c constraints.txt
# --override overrides.txt
# -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with both a constraint _and_ an override. The override and the constraint are
/// incompatible, and so should error. (The correctness of this behavior is subject to debate.)
#[test]
fn override_with_incompatible_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio<3.0.0")?;
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio>=3.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because you require anyio>=3.0.0 and anyio<3.0.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Resolve a package, marking a dependency as unsafe.
#[test]
fn unsafe_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--unsafe-package")
.arg("jinja2")
.arg("--unsafe-package")
.arg("pydantic"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --unsafe-package jinja2 --unsafe-package pydantic
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.2
# via -r requirements.in
itsdangerous==2.1.2
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
# The following packages were excluded from the output:
# jinja2
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound, allowing pre-releases. Per PEP 440, pre-releases
/// that match the bound (e.g., `2.0.0rc1`) should be _not_ allowed.
#[test]
fn pre_release_upper_bound_exclude() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --prerelease=allow
click==7.1.2
# via flask
flask==1.1.4
# via -r requirements.in
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package with a strict upper bound that includes a pre-release. Per PEP 440,
/// pre-releases _should_ be allowed.
#[test]
fn pre_release_upper_bound_include() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0rc4")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--prerelease=allow"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --prerelease=allow
click==8.1.7
# via flask
flask==2.0.0rc2
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow `--pre` as an alias for `--prerelease=allow`.
#[test]
fn pre_alias() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask<2.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--pre"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --pre
click==7.1.2
# via flask
flask==1.1.4
# via -r requirements.in
itsdangerous==1.1.0
# via flask
jinja2==2.11.3
# via flask
markupsafe==2.1.5
# via jinja2
werkzeug==1.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Allow a pre-release for a version specifier in a constraint file.
#[test]
fn pre_release_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask<=2.0.0rc2")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
click==8.1.7
# via flask
flask==2.0.0rc2
# via
# -c constraints.txt
# -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
/// Resolve from a `pyproject.toml` file with a mutually recursive extra.
#[test]
fn compile_pyproject_toml_mutually_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
"anyio"
]
[project.optional-dependencies]
test = [
"iniconfig",
"project[dev]"
]
dev = [
"project[test]",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
anyio==4.3.0
# via project (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via project (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve from a `pyproject.toml` file with a recursive extra.
#[test]
fn compile_pyproject_toml_recursive_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.0.1"
dependencies = [
"anyio"
]
[project.optional-dependencies]
test = [
"iniconfig",
]
dev = [
"project[test]",
]
"#,
)?;
uv_snapshot!(context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("dev"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra dev
anyio==4.3.0
# via project (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via project (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// The dependencies of a local editable dependency should be considered "direct" dependencies.
#[test]
fn editable_direct_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ../../scripts/packages/setuptools_editable")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.arg("--resolution")
.arg("lowest-direct")
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --resolution lowest-direct
-e ../../scripts/packages/setuptools_editable
# via -r [TEMP_DIR]/requirements.in
iniconfig==0.1
# via setuptools-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `EXTRA_UV_INDEX_URL` to the empty string should treat it as "unset".
#[test]
fn empty_extra_index_url_env_var() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("EXTRA_UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
--index-url https://pypi.org/simple
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Setting `UV_INDEX_URL` to the empty string should treat it as "unset", and so should be
/// overridden by an `--index-url` in a requirements file.
#[test]
fn empty_index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://test.pypi.org/simple\nidna")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", ""), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// The `UV_INDEX_URL` should override an `--index-url` in a requirements file.
#[test]
fn index_url_env_var_override() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("--index-url https://pypi.org/simple\nidna")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-url")
.env("UV_INDEX_URL", "https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-url
--index-url https://test.pypi.org/simple
idna==2.7
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Expand an environment variable in a `-r` path within a `requirements.in` file.
#[test]
fn expand_env_var_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-r ${PROJECT_ROOT}/requirements-dev.in")?;
let requirements_dev_in = context.temp_dir.child("requirements-dev.in");
requirements_dev_in.write_str("anyio")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==4.3.0
# via -r requirements-dev.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python<=3.8 and example==0.0.0 depends on Python<=3.8, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Raise an error when an editable's `Requires-Python` constraint is not met.
#[test]
fn requires_python_editable_target_version() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("-e {}", editable_dir.path().display()))?;
let filters: Vec<_> = [
// 3.11 may not be installed
(
"warning: The requested Python version 3.11 is not available; .* will be used to build dependencies instead.\n",
"",
),
]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("requirements.in")
.arg("--python-version=3.11"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (3.11.0) does not satisfy Python<=3.8 and example==0.0.0 depends on Python<=3.8, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
#[test]
fn editable_optional_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = []
requires-python = '>=3.8'
[project.optional-dependencies]
dev = [
"anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl"
]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .[dev]")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
-e .
# via -r requirements.in
anyio @ https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl
# via example
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Under `--resolution=lowest-direct`, ignore optional dependencies.
///
/// In the below example, ensure that `setuptools` does not resolve to the lowest-available version.
#[test]
fn editable_optional_lowest_direct() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with an optional URL dependency.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = ["setuptools-scm>=8.0.0"]
requires-python = '>=3.8'
[project.optional-dependencies]
dev = ["setuptools"]
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--resolution=lowest-direct"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --resolution=lowest-direct
-e .
# via -r requirements.in
packaging==24.0
# via setuptools-scm
setuptools==69.2.0
# via setuptools-scm
setuptools-scm==8.0.1
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a source distribution that leverages Metadata 2.2.
#[test]
fn metadata_2_2() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
boltons==23.1.1
# via pyo3-mixed
pyo3-mixed @ https://files.pythonhosted.org/packages/2b/b8/e04b783d3569d5b61b1dcdfda683ac2e3617340539aecd0f099fbade0b4a/pyo3_mixed-2.1.5.tar.gz
# via -r requirements.in
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve packages from an index that "doesn't support" zip file streaming (by way of using
/// data descriptors).
#[test]
fn no_stream() -> Result<()> {
let context = TestContext::new("3.12");
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("hashb_foxglove_protocolbuffers_python==25.3.0.1.20240226043130+465630478360")?;
let constraints_in = context.temp_dir.child("constraints.in");
constraints_in.write_str("protobuf==5.26.0")?;
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("-c")
.arg("constraints.in")
.arg("--extra-index-url")
.arg("https://buf.build/gen/python"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.in
hashb-foxglove-protocolbuffers-python==25.3.0.1.20240226043130+465630478360
# via -r requirements.in
protobuf==5.26.0
# via
# -c constraints.in
# hashb-foxglove-protocolbuffers-python
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a direct URL package with a URL that doesn't exist (i.e., returns a 404).
#[test]
fn not_found_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download: `iniconfig @ https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl`
Caused by: HTTP status client error (404 Not Found) for url (https://files.pythonhosted.org/packages/ef/a6/fake/iniconfig-2.0.0-py3-none-any.whl)
"###
);
Ok(())
}
/// Raise an error when a direct URL dependency's `Requires-Python` constraint is not met.
#[test]
fn requires_python_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
// Create an editable package with a `Requires-Python` constraint that is not met.
let editable_dir = context.temp_dir.child("editable");
editable_dir.create_dir_all()?;
let pyproject_toml = editable_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"anyio==4.0.0"
]
requires-python = "<=3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(&format!("example @ {}", editable_dir.path().display()))?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the current Python version (3.12.[X]) does not satisfy Python<=3.8 and example==0.0.0 depends on Python<=3.8, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Build an editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e ${ROOT_PATH}")?;
let root_path = current_dir()?.join("../../scripts/packages/root_editable");
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
-e ${ROOT_PATH}
# via -r requirements.in
black @ file://[WORKSPACE]/scripts/packages/root_editable/../black_editable
# via root-editable
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Build a non-editable package with Hatchling's {root:uri} feature.
#[test]
fn compile_root_uri_non_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("${ROOT_PATH}\n${BLACK_PATH}")?;
let root_path = current_dir()?.join("../../scripts/packages/root_editable");
let black_path = current_dir()?.join("../../scripts/packages/black_editable");
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.env("ROOT_PATH", root_path.as_os_str())
.env("BLACK_PATH", black_path.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
${BLACK_PATH}
# via
# -r requirements.in
# root-editable
${ROOT_PATH}
# via -r requirements.in
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Request a local wheel with a mismatched package name.
#[test]
fn requirement_wheel_name_mismatch() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("dateutil @ https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested package name `dateutil` does not match `python-dateutil` in the distribution filename: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl
"###
);
Ok(())
}
/// `--generate-hashes` should not update the hashes in the "lockfile" if the package is not
/// upgraded.
#[test]
fn preserve_hashes_no_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Avoid adding any additional hashes to the "lockfile".
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded via
/// `--upgrade`.
#[test]
fn preserve_hashes_upgrade() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.2")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting an upgrade should update the hashes, even if the version didn't change.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes")
.arg("--upgrade"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package does not have
/// hashes, even if `--upgrade` is _not_ specified.
#[test]
fn preserve_hashes_no_existing_hashes() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2
"})?;
// Add additional hashes to the "lockfile".
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \
--hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \
--hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \
--hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \
--hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \
--hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \
--hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \
--hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \
--hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \
--hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \
--hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \
--hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \
--hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \
--hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \
--hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \
--hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \
--hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \
--hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \
--hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \
--hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \
--hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \
--hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \
--hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \
--hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \
--hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \
--hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \
--hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \
--hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \
--hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \
--hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \
--hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \
--hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \
--hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \
--hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \
--hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \
--hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \
--hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \
--hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \
--hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \
--hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \
--hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \
--hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \
--hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \
--hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \
--hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \
--hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \
--hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \
--hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `--generate-hashes` should update the hashes in the "lockfile" if the package is upgraded due
/// to a change in requirements.
#[test]
fn preserve_hashes_newer_version() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("markupsafe==2.1.3")?;
// Write a subset of the hashes to the "lockfile".
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --python-version 3.12 --cache-dir [CACHE_DIR]
markupsafe==2.1.2 \
--hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \
--hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \
--hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2
"})?;
// Requesting a more specific version should update the hashes.
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt")
.arg("--generate-hashes"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt --generate-hashes
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
--hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
--hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
--hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
--hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
--hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
--hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
--hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
--hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
--hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
--hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
--hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
--hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
--hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
--hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
--hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
--hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
--hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
--hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
--hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
--hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
--hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
--hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
--hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
--hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
--hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
--hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
--hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
--hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
--hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
--hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
--hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
--hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
--hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
--hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
--hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
--hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
--hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
--hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
--hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
--hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
--hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
--hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
--hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
--hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
--hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
--hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
--hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
--hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
--hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
--hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
--hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
--hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
--hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
--hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
--hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
--hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
--hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
--hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
--hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Detect the package name from metadata sources from local directories.
#[test]
fn unnamed_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(indoc! {r"
../../scripts/packages/poetry_editable
../../scripts/packages/black_editable
../../scripts/packages/setup_py_editable
../../scripts/packages/setup_cfg_editable
"
})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
anyio==4.3.0
# via
# httpx
# poetry-editable
../../scripts/packages/black_editable
# via -r [TEMP_DIR]/requirements.in
certifi==2024.2.2
# via
# httpcore
# httpx
# requests
charset-normalizer==3.3.2
# via requests
h11==0.14.0
# via httpcore
httpcore==1.0.4
# via httpx
httpx==0.27.0
# via setup-py-editable
idna==3.6
# via
# anyio
# httpx
# requests
../../scripts/packages/poetry_editable
# via -r [TEMP_DIR]/requirements.in
requests==2.31.0
# via setup-cfg-editable
../../scripts/packages/setup_cfg_editable
# via -r [TEMP_DIR]/requirements.in
../../scripts/packages/setup_py_editable
# via -r [TEMP_DIR]/requirements.in
sniffio==1.3.1
# via
# anyio
# httpx
urllib3==2.2.1
# via requests
----- stderr -----
Resolved 14 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from an unnamed Git requirement.
#[test]
fn unnamed_git_requirement() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("git+https://github.com/pallets/flask.git@3.0.0")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ git+https://github.com/pallets/flask.git@735a4701d6d5e848241e7d7535db898efb62d400
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from an unnamed HTTPS requirement.
#[test]
fn unnamed_https_requirement() -> Result<()> {
// Given the filename `3.0.2.tar.gz`, we need to download the file to determine the package name.
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask @ https://github.com/pallets/flask/archive/refs/tags/3.0.2.tar.gz
# via -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###);
Ok(())
}
/// Detect the package name from metadata sources from local directories.
#[test]
fn dynamic_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("hatchling-dynamic @ ../../scripts/packages/hatchling_dynamic")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.path())
.current_dir(current_dir()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
anyio==4.3.0
# via hatchling-dynamic
../../scripts/packages/hatchling_dynamic
# via -r [TEMP_DIR]/requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// This tests the marker expressions emitted when depending on a package with
/// exciting markers like 'anyio'.
///
/// NOTE: This test runs on `linux` only because some of `anyio`'s markers
/// involve querying the specific platform being used to run `uv pip compile`.
/// Since this test was developed on Linux, the marker expression generated is
/// coupled with the Linux platform. Other tests for other platforms could be
/// added.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_exciting_linux() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# python_version == '3.12' and platform_python_implementation == 'CPython' and platform_system == 'Linux'
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`.
///
/// NOTE: This test runs on `linux` only because it requires that `sys_platform
/// == 'linux'` evaluates to `true`.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_direct() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio ; sys_platform == 'linux'")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# python_version == '3.12' and platform_python_implementation == 'CPython' and platform_system == 'Linux' and sys_platform == 'linux'
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// This tests that the marker expression emitted accounts for markers directly
/// in `requirements.in`, even when the marker evaluates to false on the
/// current platform. In this case, we set `sys_platform == 'macos'` so that on
/// Linux, this dependency is ignored. But the marker expression generated must
/// have `sys_platform == 'Linux'`, since the locked set of packages might be
/// different (and indeed are different) on other platforms.
///
/// NOTE: This test runs on `linux` because it requires that `sys_platform
/// == 'macos'` evaluates to `false`. While this technically only requires
/// `not(target_os = "macos")`, the marker expression generated during test
/// development was on Linux. So we require Linux.
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_conditional() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio ; sys_platform == 'macos'")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# sys_platform == 'linux'
----- stderr -----
Resolved 0 packages in [TIME]
"###);
Ok(())
}
/// This tests the marker expressions emitted when depending on a package with
/// a non-pypy dependency. Specifically, `pendulum` depends on `time-machine`,
/// but not when using pypy.
///
/// NOTE: This test runs on `linux` because it was written on Linux. While the
/// marker expression itself doesn't have anything in it that couples it to
/// Linux, it is possible for the resolution to change on other platforms. For
/// example, on Windows, the `tzdata` dependency is excluded. (It's actually
/// not clear why. The `tzdata` dependency appears to be an unconditional
/// dependency. And if anything, I'd expect it to be included on Windows and
/// excluded everywhere else... Odd.)
#[cfg(target_os = "linux")]
#[test]
fn emit_marker_expression_pypy() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pendulum")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("--emit-marker-expression"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-marker-expression
# Pinned dependencies known to be valid for:
# python_version == '3.12' and implementation_name == 'cpython'
pendulum==3.0.0
# via -r requirements.in
python-dateutil==2.9.0.post0
# via
# pendulum
# time-machine
six==1.16.0
# via python-dateutil
time-machine==2.14.1
# via pendulum
tzdata==2024.1
# via pendulum
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// A local version of a package shadowing a remote package is installed.
#[test]
fn local_version_of_remote_package() -> Result<()> {
let context = TestContext::new("3.12");
let root_path = context.workspace_root.join("scripts/packages");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Actually install the local dependency
let mut command = context.pip_install();
command.arg(root_path.join("anyio_local"));
uv_snapshot!(
context.filters(),
command, @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ anyio==4.3.0+foo (from file://[WORKSPACE]/scripts/packages/anyio_local)
"###
);
// The local version should _not_ be included in the resolution
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Write a lock file with the local version
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(&indoc::formatdoc! {r"
anyio @ {workspace_root}/scripts/packages/anyio_local
",
workspace_root = context.workspace_root.simplified_display(),
})?;
// The local version is _still_ excluded from the resolution
// `uv pip compile` does not have access to an environment and cannot consider installed packages
// We may want to allow the lock file to be preserved in this case in the future, but right now
// we require the URL to always be in the input file.
uv_snapshot!(context.filters(), context.pip_compile()
.arg(requirements_in.canonicalize()?)
.arg("--output-file")
.arg(requirements_txt.canonicalize()?), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] [TEMP_DIR]/requirements.in --output-file [TEMP_DIR]/requirements.txt
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn pendulum_no_tzdata_on_windows() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pendulum")?;
uv_snapshot!(
context.filters(),
windows_filters=false,
context.pip_compile().arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
pendulum==3.0.0
# via -r requirements.in
python-dateutil==2.9.0.post0
# via
# pendulum
# time-machine
six==1.16.0
# via python-dateutil
time-machine==2.14.1
# via pendulum
tzdata==2024.1
# via pendulum
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
/// Allow URL dependencies recursively for local source trees.
#[test]
fn allow_recursive_url_local_path() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio @ file://[TEMP_DIR]/anyio/
# via lib
./app
# via -r requirements.in
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect overrides.
#[test]
fn allow_recursive_url_local_path_override() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt
anyio==3.7.0
# via
# --override overrides.txt
# lib
./app
# via -r requirements.in
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Allow URL dependencies recursively for local source trees, but respect both overrides _and_
/// constraints.
///
/// We have app -> lib -> anyio and root has a directory requirement on app.
#[test]
fn allow_recursive_url_local_path_override_constraint() -> Result<()> {
let context = TestContext::new("3.12");
// Create a standalone library named "anyio".
let anyio = context.temp_dir.child("anyio");
anyio.create_dir_all()?;
let pyproject_toml = anyio.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "anyio"
version = "0.0.0"
dependencies = [
"idna"
]
requires-python = ">3.8"
"#,
)?;
// Create a library that depends on the standalone library.
let lib = context.temp_dir.child("lib");
lib.create_dir_all()?;
let pyproject_toml = lib.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "lib"
version = "0.0.0"
dependencies = [
"anyio @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(anyio.path()).unwrap().as_str(),
))?;
// Create an application that depends on the library.
let app = context.temp_dir.child("app");
app.create_dir_all()?;
let pyproject_toml = app.child("pyproject.toml");
pyproject_toml.write_str(&format!(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"lib @ {}"
]
requires-python = ">3.8"
"#,
Url::from_directory_path(lib.path()).unwrap().as_str(),
))?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("./app")?;
// Create an override that pulls `anyio` from PyPI.
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("anyio==0.0.0")?;
// Ensure that resolution fails, since `0.0.0` does not exist on PyPI.
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of anyio==0.0.0 and lib==0.0.0 depends on anyio==0.0.0, we can conclude that lib==0.0.0 cannot be used.
And because only lib==0.0.0 is available and example==0.0.0 depends on lib, we can conclude that example==0.0.0 cannot be used.
And because only example==0.0.0 is available and you require example, we can conclude that the requirements are unsatisfiable.
"###
);
// Now constrain `anyio` to the local version.
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("anyio @ ./anyio")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--override")
.arg("overrides.txt")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --override overrides.txt --constraint constraints.txt
./anyio
# via
# -c constraints.txt
# --override overrides.txt
# lib
./app
# via -r requirements.in
idna==3.6
# via anyio
lib @ file://[TEMP_DIR]/lib/
# via example
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
/// Allow pre-releases for dependencies of source path requirements.
#[test]
fn pre_release_path_requirement() -> Result<()> {
let context = TestContext::new("3.12");
// Create an a package that requires a pre-release version of `flask`.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(".")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
click==8.1.7
# via flask
.
# via -r requirements.in
flask==2.0.0rc1
# via example
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Allow pre-releases for dependencies of editable requirements.
#[test]
fn pre_release_editable_requirement() -> Result<()> {
let context = TestContext::new("3.12");
// Create an a package that requires a pre-release version of `flask`.r
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "example"
version = "0.0.0"
dependencies = [
"flask==2.0.0rc1"
]
requires-python = ">3.8"
"#,
)?;
// Write to a requirements file.
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("-e .")?;
uv_snapshot!( context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
-e .
# via -r requirements.in
click==8.1.7
# via flask
flask==2.0.0rc1
# via example
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists on the "extra" index, but at an incompatible version, the resolution
/// should fail by default (even though a compatible version exists on the "primary" index).
#[test]
fn compile_index_url_first_match() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2==3.1.0")?;
uv_snapshot!(context.pip_compile()
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://download.pytorch.org/whl/cpu")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of jinja2==3.1.0 and you require jinja2==3.1.0, we can conclude that the requirements are unsatisfiable.
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index, but at an incompatible version, the
/// resolution should fallback to the "primary" index when `--index-strategy unsafe-any-match`
/// is provided.
#[test]
fn compile_index_url_fallback() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("jinja2==3.1.0")?;
uv_snapshot!(context.pip_compile()
.arg("--index-strategy")
.arg("unsafe-any-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://download.pytorch.org/whl/cpu")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-any-match requirements.in --no-deps
jinja2==3.1.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// If the package exists exist on the "extra" index at a compatible version, the resolver should
/// prefer it, even if a newer versions exists on the "primary" index.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but newer versions are available on
/// the "primary" index. We should prefer the older version from the "extra" index, since it's the
/// preferred index.
#[test]
fn compile_index_url_fallback_prefer_primary() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.pip_compile()
.arg("--index-strategy")
.arg("unsafe-any-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://test.pypi.org/simple")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-any-match requirements.in --no-deps
anyio==3.5.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// With `unsafe-best-match`, the resolver should prefer the highest compatible version,
/// regardless of which index it comes from.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but newer versions are available on
/// the "primary" index. We should prefer the newer version from the "primary" index, despite the
/// "extra" index being the preferred index.
#[test]
fn compile_index_url_unsafe_highest() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
uv_snapshot!(context.pip_compile()
.arg("--index-strategy")
.arg("unsafe-best-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://test.pypi.org/simple")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --index-strategy unsafe-best-match requirements.in --no-deps
anyio==4.3.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Install a package via `--extra-index-url`.
///
/// With `unsafe-best-match`, the resolver should prefer the lowest compatible version,
/// regardless of which index it comes from.
///
/// In this case, anyio 3.5.0 is hosted on the "extra" index, but older versions are available on
/// the "primary" index. We should prefer the older version from the "primary" index, despite the
/// "extra" index being the preferred index.
///
/// We also test here that a warning is raised for missing lower bounds on direct dependencies with
/// `--resolution lowest`.
#[test]
fn compile_index_url_unsafe_lowest() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio<100")?;
uv_snapshot!(context.pip_compile()
.arg("--resolution")
.arg("lowest")
.arg("--index-strategy")
.arg("unsafe-best-match")
.arg("--index-url")
.arg("https://pypi.org/simple")
.arg("--extra-index-url")
.arg("https://test.pypi.org/simple")
.arg("requirements.in")
.arg("--no-deps"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --resolution lowest --index-strategy unsafe-best-match requirements.in --no-deps
anyio==1.0.0
# via -r requirements.in
----- stderr -----
warning: The direct dependency `anyio` is unpinned. Consider setting a lower bound when using `--resolution-strategy lowest` to avoid using outdated versions.
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Ensure that the username and the password are omitted when
/// index annotations are displayed via `--emit-index-annotation`.
#[test]
fn emit_index_annotation_hide_password() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.env("UV_INDEX_URL", "https://test-user:test-password@pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
certifi==2024.2.2
# via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2
# via requests
# from https://pypi.org/simple
idna==3.6
# via requests
# from https://pypi.org/simple
requests==2.31.0
# via -r requirements.in
# from https://pypi.org/simple
urllib3==2.2.1
# via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` prints the index URL for each package.
#[test]
fn emit_index_annotation_pypi_org_simple() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-annotation"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
certifi==2024.2.2
# via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2
# via requests
# from https://pypi.org/simple
idna==3.6
# via requests
# from https://pypi.org/simple
requests==2.31.0
# via -r requirements.in
# from https://pypi.org/simple
urllib3==2.2.1
# via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` plays nicely with `--no-annotate`.
///
/// For now, `--no-annotate` doesn't affect `--emit-index-annotation`, in that we still emit the
/// index annotation, and leave `--no-annotate` to only affect the package _source_ annotations.
#[test]
fn emit_index_annotation_no_annotate() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.arg("--no-annotate"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation --no-annotate
certifi==2024.2.2
# from https://pypi.org/simple
charset-normalizer==3.3.2
# from https://pypi.org/simple
idna==3.6
# from https://pypi.org/simple
requests==2.31.0
# from https://pypi.org/simple
urllib3==2.2.1
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`.
#[test]
fn emit_index_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("requests")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--emit-index-annotation")
.arg("--annotation-style")
.arg("line"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation --annotation-style line
certifi==2024.2.2 # via requests
# from https://pypi.org/simple
charset-normalizer==3.3.2 # via requests
# from https://pypi.org/simple
idna==3.6 # via requests
# from https://pypi.org/simple
requests==2.31.0 # via -r requirements.in
# from https://pypi.org/simple
urllib3==2.2.1 # via requests
# from https://pypi.org/simple
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
Ok(())
}
/// `--emit-index-annotation` where packages are pulled from two distinct indexes.
#[test]
fn emit_index_annotation_multiple_indexes() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("uv\nrequests")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
.arg("--extra-index-url")
.arg("https://test.pypi.org/simple")
.arg("--emit-index-annotation"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation
requests==2.5.4.1
# via -r requirements.in
# from https://test.pypi.org/simple
uv==0.1.24
# via -r requirements.in
# from https://pypi.org/simple
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}
/// Test error message when direct dependency is an empty set.
#[test]
fn no_version_for_direct_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("pypyp==1,>=1.2")?;
uv_snapshot!(context.pip_compile()
.arg("requirements.in")
// Must error before we make any network requests
.arg("--offline"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ you require pypyp ∅
"###
);
Ok(())
}
/// Compile against a dedicated platform, which may differ from the current platform.
#[test]
fn python_platform() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("black")?;
uv_snapshot!(context.filters(),
windows_filters=false,
context.pip_compile()
.arg("requirements.in")
.arg("--python-platform")
.arg("aarch64-unknown-linux-gnu"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform aarch64-unknown-linux-gnu
black==24.3.0
# via -r requirements.in
click==8.1.7
# via black
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
uv_snapshot!(context.filters(),
windows_filters=false,
context.pip_compile()
.arg("requirements.in")
.arg("--python-platform")
.arg("x86_64-pc-windows-msvc"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --python-platform x86_64-pc-windows-msvc
black==24.3.0
# via -r requirements.in
click==8.1.7
# via black
colorama==0.4.6
# via click
mypy-extensions==1.0.0
# via black
packaging==24.0
# via black
pathspec==0.12.1
# via black
platformdirs==4.2.0
# via black
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific source distribution via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_default_branch() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage" }
"#})?;
// In addition to the standard filters, remove the `main` commit, which will change frequently.
let filters: Vec<_> = [(r"@(\d|\w){40}", "@[COMMIT]")]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@[COMMIT]
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific branch via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_branch() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", branch = "test-branch" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_tag() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "test-tag" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_long_commit() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific commit via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_short_commit() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd6" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Resolve a specific ref via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
fn git_source_refs() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "refs/pull/4/head" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@9d01a806f17ddacb9c7b66b1b68574adf790b63f
# via project (pyproject.toml)
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Request a non-existent tag via a Git HTTPS dependency.
#[test]
#[cfg(feature = "git")]
#[cfg_attr(windows, ignore = "Git error messages differ on Windows")]
fn git_source_missing_tag() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "missing" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to download and build: `uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@missing`
Caused by: Git operation failed
Caused by: failed to clone into: [CACHE_DIR]/git-v0/db/8dab139913c4b566
Caused by: failed to fetch tag `missing`
Caused by: process didn't exit successfully: `git fetch --force --update-head-ok 'https://github.com/astral-test/uv-public-pypackage' '+refs/tags/missing:refs/remotes/origin/tags/missing'` (exit status: 128)
--- stderr
fatal: couldn't find remote ref refs/tags/missing
"###);
Ok(())
}
#[test]
fn warn_missing_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
"anyio==4.3.0",
]
[tool.uv.sources]
anyio = { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl
# via foo (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
tqdm==4.66.2
# via foo (pyproject.toml)
----- stderr -----
warning: Missing version constraint (e.g., a lower bound) for `tqdm`
Resolved 4 packages in [TIME]
"###);
Ok(())
}
/// Ensure that this behavior is constraint to preview mode.
#[test]
fn dont_warn_missing_constraint_without_sources() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "foo"
version = "0.0.0"
dependencies = [
"tqdm",
"anyio==4.3.0",
]
"#})?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--preview")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview pyproject.toml
anyio==4.3.0
# via foo (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
tqdm==4.66.2
# via foo (pyproject.toml)
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
#[test]
fn tool_uv_sources() -> Result<()> {
let context = TestContext::new("3.12");
// Use a subdir to test path normalization.
let require_path = "some_dir/pyproject.toml";
let pyproject_toml = context.temp_dir.child(require_path);
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.0.0"
dependencies = [
"tqdm>4,<=5",
"packaging @ git+https://github.com/pypa/packaging@32deafe8668a2130a3366b98154914d188f3718e",
"poetry_editable",
"urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl",
]
[project.optional-dependencies]
utils = [
"boltons==24.0.0"
]
dont_install_me = [
"broken @ https://example.org/does/not/exist"
]
[tool.uv.sources]
tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" }
boltons = { git = "https://github.com/mahmoud/boltons", rev = "57fbaa9b673ed85b32458b31baeeae230520e4a0" }
poetry_editable = { path = "../poetry_editable", editable = true }
"#})?;
let project_root = fs_err::canonicalize(std::env::current_dir()?.join("../.."))?;
fs_err::create_dir_all(context.temp_dir.join("poetry_editable/poetry_editable"))?;
fs_err::copy(
project_root.join("scripts/packages/poetry_editable/pyproject.toml"),
context.temp_dir.join("poetry_editable/pyproject.toml"),
)?;
fs_err::copy(
project_root.join("scripts/packages/poetry_editable/poetry_editable/__init__.py"),
context
.temp_dir
.join("poetry_editable/poetry_editable/__init__.py"),
)?;
// Install the editable packages.
uv_snapshot!(context.pip_compile()
.arg("--preview")
.arg(require_path)
.arg("--extra")
.arg("utils"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --preview some_dir/pyproject.toml --extra utils
-e ../poetry_editable
# via project (some_dir/pyproject.toml)
anyio==4.3.0
# via poetry-editable
boltons @ git+https://github.com/mahmoud/boltons@57fbaa9b673ed85b32458b31baeeae230520e4a0
# via project (some_dir/pyproject.toml)
idna==3.6
# via anyio
packaging @ git+https://github.com/pypa/packaging@32deafe8668a2130a3366b98154914d188f3718e
# via project (some_dir/pyproject.toml)
sniffio==1.3.1
# via anyio
tqdm @ https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl
# via project (some_dir/pyproject.toml)
urllib3 @ https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl
# via project (some_dir/pyproject.toml)
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
Ok(())
}
/// Check that a dynamic `pyproject.toml` is supported a compile input file.
#[test]
fn dynamic_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "bird-feeder"
version = "1.0.0"
dynamic = ["dependencies"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#})?;
let bird_feeder = context.temp_dir.child("bird_feeder/__init__.py");
bird_feeder.write_str("__all__= []")?;
uv_snapshot!(context.pip_compile().arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
----- stderr -----
Resolved 0 packages in [TIME]
"###);
Ok(())
}
/// Accept `file://` URLs as installation sources.
#[test]
fn file_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements file.txt");
requirements_txt.write_str("iniconfig")?;
let url = Url::from_file_path(requirements_txt.simple_canonicalize()?).expect("valid file URL");
uv_snapshot!(context.filters(), context.pip_compile().arg(url.to_string()), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] file://[TEMP_DIR]/requirements%20file.txt
iniconfig==2.0.0
# via -r requirements file.txt
----- stderr -----
Resolved 1 package in [TIME]
"###);
Ok(())
}
/// Allow `--no-binary` to override `--only-binary`, to allow select source distributions.
#[test]
fn no_binary_only_binary() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("source-distribution")?;
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--only-binary")
.arg(":all:"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only source-distribution==0.0.1 is available and source-distribution==0.0.1 has no usable wheels and building from source is disabled, we can conclude that all versions of source-distribution cannot be used.
And because you require source-distribution, we can conclude that the requirements are unsatisfiable.
"###
);
uv_snapshot!(context
.pip_compile()
.env_remove("UV_EXCLUDE_NEWER")
.arg("requirements.in")
.arg("--only-binary")
.arg(":all:")
.arg("--no-binary")
.arg("source-distribution"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --only-binary :all: --no-binary source-distribution
source-distribution==0.0.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// `gunicorn` only depends on `eventlet` via an extra, so the resolution should succeed despite
/// the nonsensical extra.
#[test]
fn ignore_invalid_constraint() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("gunicorn>=20")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("eventlet==9999.0.1.2.3.4.5")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in")
.arg("-c")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in -c constraints.txt
gunicorn==21.2.0
# via -r requirements.in
packaging==24.0
# via gunicorn
----- stderr -----
Resolved 2 packages in [TIME]
"###
);
Ok(())
}