mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-09 22:35:01 +00:00
Compare commits
46 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2514203964 | ||
![]() |
e798b09aa4 | ||
![]() |
812a3e7c34 | ||
![]() |
1958aa26bd | ||
![]() |
57338e558c | ||
![]() |
4d061a6fc3 | ||
![]() |
b1dc2b71a3 | ||
![]() |
2709c441a8 | ||
![]() |
afcbcc7498 | ||
![]() |
5e2dc5a9aa | ||
![]() |
7e48292fac | ||
![]() |
e31f556205 | ||
![]() |
dedced3265 | ||
![]() |
5c6d76ca8b | ||
![]() |
1d20530f2d | ||
![]() |
ddb1577a93 | ||
![]() |
d31e6ad7c7 | ||
![]() |
3a77b9cdd9 | ||
![]() |
1d027bd92a | ||
![]() |
bb738aeb44 | ||
![]() |
fc758bb755 | ||
![]() |
1308c85efe | ||
![]() |
f609e1ddaf | ||
![]() |
eaf517efd8 | ||
![]() |
e8bc3950ef | ||
![]() |
06af93fce7 | ||
![]() |
8afbd86f03 | ||
![]() |
a1cda6213c | ||
![]() |
39cdfe9981 | ||
![]() |
85c0fc963b | ||
![]() |
c3f13d2505 | ||
![]() |
38ee6ec800 | ||
![]() |
71b5ba13d7 | ||
![]() |
5f2857a1c7 | ||
![]() |
a58969feef | ||
![]() |
3bb8ac610c | ||
![]() |
ec54dce919 | ||
![]() |
a6bb65c78d | ||
![]() |
743260b1f5 | ||
![]() |
2f53ea5c5c | ||
![]() |
a9ea756d14 | ||
![]() |
43f67a4a4c | ||
![]() |
a7aa46acc5 | ||
![]() |
b0db548c80 | ||
![]() |
bf5dcf9929 | ||
![]() |
e40d3d5dff |
100 changed files with 4257 additions and 1456 deletions
|
@ -1,4 +1,4 @@
|
||||||
[profile.default]
|
[profile.default]
|
||||||
# Mark tests that take longer than 10s as slow.
|
# Mark tests that take longer than 10s as slow.
|
||||||
# Terminate after 90s as a stop-gap measure to terminate on deadlock.
|
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||||
slow-timeout = { period = "10s", terminate-after = 9 }
|
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||||
|
|
60
.github/workflows/ci.yml
vendored
60
.github/workflows/ci.yml
vendored
|
@ -470,6 +470,31 @@ jobs:
|
||||||
./target/debug/uvx
|
./target/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
|
build-binary-linux-aarch64:
|
||||||
|
timeout-minutes: 10
|
||||||
|
needs: determine_changes
|
||||||
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||||
|
runs-on: github-ubuntu-24.04-aarch64-4
|
||||||
|
name: "build binary | linux aarch64"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
|
- uses: rui314/setup-mold@v1
|
||||||
|
|
||||||
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
|
- name: "Build"
|
||||||
|
run: cargo build
|
||||||
|
|
||||||
|
- name: "Upload binary"
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: uv-linux-aarch64-${{ github.sha }}
|
||||||
|
path: |
|
||||||
|
./target/debug/uv
|
||||||
|
./target/debug/uvx
|
||||||
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-linux-musl:
|
build-binary-linux-musl:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
|
@ -770,6 +795,33 @@ jobs:
|
||||||
eval "$(./uv generate-shell-completion bash)"
|
eval "$(./uv generate-shell-completion bash)"
|
||||||
eval "$(./uvx --generate-shell-completion bash)"
|
eval "$(./uvx --generate-shell-completion bash)"
|
||||||
|
|
||||||
|
smoke-test-linux-aarch64:
|
||||||
|
timeout-minutes: 10
|
||||||
|
needs: build-binary-linux-aarch64
|
||||||
|
name: "smoke test | linux aarch64"
|
||||||
|
runs-on: github-ubuntu-24.04-aarch64-2
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
|
- name: "Download binary"
|
||||||
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
|
with:
|
||||||
|
name: uv-linux-aarch64-${{ github.sha }}
|
||||||
|
|
||||||
|
- name: "Prepare binary"
|
||||||
|
run: |
|
||||||
|
chmod +x ./uv
|
||||||
|
chmod +x ./uvx
|
||||||
|
|
||||||
|
- name: "Smoke test"
|
||||||
|
run: |
|
||||||
|
./uv run scripts/smoke-test
|
||||||
|
|
||||||
|
- name: "Test shell completions"
|
||||||
|
run: |
|
||||||
|
eval "$(./uv generate-shell-completion bash)"
|
||||||
|
eval "$(./uvx --generate-shell-completion bash)"
|
||||||
|
|
||||||
smoke-test-linux-musl:
|
smoke-test-linux-musl:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
needs: build-binary-linux-musl
|
needs: build-binary-linux-musl
|
||||||
|
@ -1533,7 +1585,7 @@ jobs:
|
||||||
run: chmod +x ./uv
|
run: chmod +x ./uv
|
||||||
|
|
||||||
- name: "Configure AWS credentials"
|
- name: "Configure AWS credentials"
|
||||||
uses: aws-actions/configure-aws-credentials@3d8cba388a057b13744d61818a337e40a119b1a7
|
uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
@ -1570,9 +1622,9 @@ jobs:
|
||||||
run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all
|
run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all
|
||||||
env:
|
env:
|
||||||
RUST_LOG: uv=debug
|
RUST_LOG: uv=debug
|
||||||
# UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
||||||
# UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
||||||
# UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
||||||
UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }}
|
UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }}
|
||||||
UV_TEST_AWS_USERNAME: aws
|
UV_TEST_AWS_USERNAME: aws
|
||||||
UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }}
|
UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }}
|
||||||
|
|
|
@ -12,7 +12,7 @@ repos:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.33.1
|
rev: v1.34.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ repos:
|
||||||
types_or: [yaml, json5]
|
types_or: [yaml, json5]
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.12.1
|
rev: v0.12.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|
76
CHANGELOG.md
76
CHANGELOG.md
|
@ -3,6 +3,80 @@
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
|
|
||||||
|
## 0.7.20
|
||||||
|
|
||||||
|
### Python
|
||||||
|
|
||||||
|
- Add Python 3.14.0b4
|
||||||
|
- Add zstd support to Python 3.14 on Unix (it already was available on Windows)
|
||||||
|
- Add PyPy 7.3.20 (for Python 3.11.13)
|
||||||
|
|
||||||
|
See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708) release notes for more details.
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496))
|
||||||
|
- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386))
|
||||||
|
- Drop trailing arguments when writing shebangs ([#14519](https://github.com/astral-sh/uv/pull/14519))
|
||||||
|
- Add debug message when skipping Python downloads ([#14509](https://github.com/astral-sh/uv/pull/14509))
|
||||||
|
- Add support for declaring multiple modules in namespace packages ([#14460](https://github.com/astral-sh/uv/pull/14460))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Revert normalization of trailing slashes on index URLs ([#14511](https://github.com/astral-sh/uv/pull/14511))
|
||||||
|
- Fix forced resolution with all extras in `uv version` ([#14434](https://github.com/astral-sh/uv/pull/14434))
|
||||||
|
- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498))
|
||||||
|
- Remove transparent variants in `uv-extract` to enable retries ([#14450](https://github.com/astral-sh/uv/pull/14450))
|
||||||
|
|
||||||
|
### Rust API
|
||||||
|
|
||||||
|
- Add method to get packages involved in a `NoSolutionError` ([#14457](https://github.com/astral-sh/uv/pull/14457))
|
||||||
|
- Make `ErrorTree` for `NoSolutionError` public ([#14444](https://github.com/astral-sh/uv/pull/14444))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Finish incomplete sentence in pip migration guide ([#14432](https://github.com/astral-sh/uv/pull/14432))
|
||||||
|
- Remove `cache-dependency-glob` examples for `setup-uv` ([#14493](https://github.com/astral-sh/uv/pull/14493))
|
||||||
|
- Remove `uv pip sync` suggestion with `pyproject.toml` ([#14510](https://github.com/astral-sh/uv/pull/14510))
|
||||||
|
- Update documentation for GitHub to use `setup-uv@v6` ([#14490](https://github.com/astral-sh/uv/pull/14490))
|
||||||
|
|
||||||
|
## 0.7.19
|
||||||
|
|
||||||
|
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and considered ready for production use.
|
||||||
|
|
||||||
|
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with the goal of requiring zero configuration for most users, but provides flexible configuration to accommodate most Python project structures. It integrates tightly with uv, to improve messaging and user experience. It validates project metadata and structures, preventing common mistakes. And, finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with other build backends.
|
||||||
|
|
||||||
|
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section in your `pyproject.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[build-system]
|
||||||
|
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||||
|
build-backend = "uv_build"
|
||||||
|
```
|
||||||
|
|
||||||
|
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will remain compatible with all standards-compliant build backends.
|
||||||
|
|
||||||
|
### Python
|
||||||
|
|
||||||
|
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||||
|
|
||||||
|
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702) for more details.
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Ignore Python patch version for `--universal` pip compile ([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||||
|
- Update the tilde version specifier warning to include more context ([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||||
|
- Clarify behavior and hint on tool install when no executables are available ([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Make project and interpreter lock acquisition non-fatal ([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||||
|
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects ([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Add a migration guide from pip to uv projects ([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||||
|
|
||||||
## 0.7.18
|
## 0.7.18
|
||||||
|
|
||||||
### Python
|
### Python
|
||||||
|
@ -12,6 +86,8 @@
|
||||||
These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows.
|
These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows.
|
||||||
However, they can be requested with `cpython-<version>-windows-aarch64`.
|
However, they can be requested with `cpython-<version>-windows-aarch64`.
|
||||||
|
|
||||||
|
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630) for more details.
|
||||||
|
|
||||||
### Enhancements
|
### Enhancements
|
||||||
|
|
||||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378))
|
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||||
|
|
122
Cargo.lock
generated
122
Cargo.lock
generated
|
@ -189,9 +189,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-channel"
|
name = "async-channel"
|
||||||
version = "2.3.1"
|
version = "2.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
|
checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"concurrent-queue",
|
"concurrent-queue",
|
||||||
"event-listener-strategy",
|
"event-listener-strategy",
|
||||||
|
@ -1165,9 +1165,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "event-listener-strategy"
|
name = "event-listener-strategy"
|
||||||
version = "0.5.3"
|
version = "0.5.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
|
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"event-listener",
|
"event-listener",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
@ -1698,7 +1698,7 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"webpki-roots",
|
"webpki-roots 0.26.8",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1707,6 +1707,7 @@ version = "0.1.14"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
|
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"base64 0.22.1",
|
||||||
"bytes",
|
"bytes",
|
||||||
"futures-channel",
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
@ -1714,7 +1715,9 @@ dependencies = [
|
||||||
"http",
|
"http",
|
||||||
"http-body",
|
"http-body",
|
||||||
"hyper",
|
"hyper",
|
||||||
|
"ipnet",
|
||||||
"libc",
|
"libc",
|
||||||
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"socket2",
|
"socket2",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
@ -1945,6 +1948,16 @@ version = "2.11.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
|
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iri-string"
|
||||||
|
version = "0.7.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "is-terminal"
|
name = "is-terminal"
|
||||||
version = "0.4.15"
|
version = "0.4.15"
|
||||||
|
@ -3062,9 +3075,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reqwest"
|
name = "reqwest"
|
||||||
version = "0.12.15"
|
version = "0.12.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb"
|
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-compression",
|
"async-compression",
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
|
@ -3079,18 +3092,14 @@ dependencies = [
|
||||||
"hyper",
|
"hyper",
|
||||||
"hyper-rustls",
|
"hyper-rustls",
|
||||||
"hyper-util",
|
"hyper-util",
|
||||||
"ipnet",
|
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"log",
|
"log",
|
||||||
"mime",
|
|
||||||
"mime_guess",
|
"mime_guess",
|
||||||
"once_cell",
|
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"quinn",
|
"quinn",
|
||||||
"rustls",
|
"rustls",
|
||||||
"rustls-native-certs",
|
"rustls-native-certs",
|
||||||
"rustls-pemfile",
|
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -3098,17 +3107,16 @@ dependencies = [
|
||||||
"sync_wrapper",
|
"sync_wrapper",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"tokio-socks",
|
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"tower",
|
"tower",
|
||||||
|
"tower-http",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"url",
|
"url",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
"wasm-streams",
|
"wasm-streams",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
"webpki-roots",
|
"webpki-roots 1.0.1",
|
||||||
"windows-registry 0.4.0",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -3351,15 +3359,6 @@ dependencies = [
|
||||||
"security-framework",
|
"security-framework",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustls-pemfile"
|
|
||||||
version = "2.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
|
|
||||||
dependencies = [
|
|
||||||
"rustls-pki-types",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls-pki-types"
|
name = "rustls-pki-types"
|
||||||
version = "1.11.0"
|
version = "1.11.0"
|
||||||
|
@ -3428,9 +3427,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "schemars"
|
name = "schemars"
|
||||||
version = "1.0.3"
|
version = "1.0.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d"
|
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"ref-cast",
|
"ref-cast",
|
||||||
|
@ -3442,9 +3441,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "schemars_derive"
|
name = "schemars_derive"
|
||||||
version = "1.0.3"
|
version = "1.0.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b13ed22d6d49fe23712e068770b5c4df4a693a2b02eeff8e7ca3135627a24f6"
|
checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -3968,9 +3967,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "test-log"
|
name = "test-log"
|
||||||
version = "0.2.17"
|
version = "0.2.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f"
|
checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"test-log-macros",
|
"test-log-macros",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
|
@ -3978,9 +3977,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "test-log-macros"
|
name = "test-log-macros"
|
||||||
version = "0.2.17"
|
version = "0.2.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f"
|
checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -4172,18 +4171,6 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tokio-socks"
|
|
||||||
version = "0.5.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f"
|
|
||||||
dependencies = [
|
|
||||||
"either",
|
|
||||||
"futures-util",
|
|
||||||
"thiserror 1.0.69",
|
|
||||||
"tokio",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-stream"
|
name = "tokio-stream"
|
||||||
version = "0.1.17"
|
version = "0.1.17"
|
||||||
|
@ -4266,6 +4253,24 @@ dependencies = [
|
||||||
"tower-service",
|
"tower-service",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower-http"
|
||||||
|
version = "0.6.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 2.9.1",
|
||||||
|
"bytes",
|
||||||
|
"futures-util",
|
||||||
|
"http",
|
||||||
|
"http-body",
|
||||||
|
"iri-string",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tower",
|
||||||
|
"tower-layer",
|
||||||
|
"tower-service",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tower-layer"
|
name = "tower-layer"
|
||||||
version = "0.3.3"
|
version = "0.3.3"
|
||||||
|
@ -4603,7 +4608,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uv"
|
name = "uv"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -4767,7 +4772,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"uv-build-backend",
|
"uv-build-backend",
|
||||||
|
@ -4785,6 +4790,7 @@ dependencies = [
|
||||||
"indoc",
|
"indoc",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
|
"rustc-hash",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"sha2",
|
"sha2",
|
||||||
|
@ -5635,7 +5641,7 @@ dependencies = [
|
||||||
"uv-trampoline-builder",
|
"uv-trampoline-builder",
|
||||||
"uv-warnings",
|
"uv-warnings",
|
||||||
"which",
|
"which",
|
||||||
"windows-registry 0.5.3",
|
"windows-registry",
|
||||||
"windows-result 0.3.4",
|
"windows-result 0.3.4",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
@ -5839,7 +5845,7 @@ dependencies = [
|
||||||
"tracing",
|
"tracing",
|
||||||
"uv-fs",
|
"uv-fs",
|
||||||
"uv-static",
|
"uv-static",
|
||||||
"windows-registry 0.5.3",
|
"windows-registry",
|
||||||
"windows-result 0.3.4",
|
"windows-result 0.3.4",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
@ -5957,7 +5963,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uv-version"
|
name = "uv-version"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uv-virtualenv"
|
name = "uv-virtualenv"
|
||||||
|
@ -6221,6 +6227,15 @@ dependencies = [
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "webpki-roots"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502"
|
||||||
|
dependencies = [
|
||||||
|
"rustls-pki-types",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "weezl"
|
name = "weezl"
|
||||||
version = "0.1.8"
|
version = "0.1.8"
|
||||||
|
@ -6448,17 +6463,6 @@ dependencies = [
|
||||||
"windows-link",
|
"windows-link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-registry"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
|
|
||||||
dependencies = [
|
|
||||||
"windows-result 0.3.4",
|
|
||||||
"windows-strings 0.3.1",
|
|
||||||
"windows-targets 0.53.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-registry"
|
name = "windows-registry"
|
||||||
version = "0.5.3"
|
version = "0.5.3"
|
||||||
|
|
|
@ -142,7 +142,7 @@ ref-cast = { version = "1.0.24" }
|
||||||
reflink-copy = { version = "0.1.19" }
|
reflink-copy = { version = "0.1.19" }
|
||||||
regex = { version = "1.10.6" }
|
regex = { version = "1.10.6" }
|
||||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||||
reqwest = { version = "=0.12.15", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||||
|
|
|
@ -31,6 +31,7 @@ flate2 = { workspace = true, default-features = false }
|
||||||
fs-err = { workspace = true }
|
fs-err = { workspace = true }
|
||||||
globset = { workspace = true }
|
globset = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
rustc-hash = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
sha2 = { workspace = true }
|
sha2 = { workspace = true }
|
||||||
|
|
|
@ -22,6 +22,7 @@ use uv_normalize::PackageName;
|
||||||
use uv_pypi_types::{Identifier, IdentifierParseError};
|
use uv_pypi_types::{Identifier, IdentifierParseError};
|
||||||
|
|
||||||
use crate::metadata::ValidationError;
|
use crate::metadata::ValidationError;
|
||||||
|
use crate::settings::ModuleName;
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
@ -184,7 +185,7 @@ fn check_metadata_directory(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the source root and the module path with the `__init__.py[i]` below to it while
|
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||||
/// checking the project layout and names.
|
/// checking the project layout and names.
|
||||||
///
|
///
|
||||||
/// Some target platforms have case-sensitive filesystems, while others have case-insensitive
|
/// Some target platforms have case-sensitive filesystems, while others have case-insensitive
|
||||||
|
@ -198,13 +199,15 @@ fn check_metadata_directory(
|
||||||
/// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and
|
/// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and
|
||||||
/// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string
|
/// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string
|
||||||
/// comparison with the module name.
|
/// comparison with the module name.
|
||||||
|
///
|
||||||
|
/// While we recommend one module per package, it is possible to declare a list of modules.
|
||||||
fn find_roots(
|
fn find_roots(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
pyproject_toml: &PyProjectToml,
|
pyproject_toml: &PyProjectToml,
|
||||||
relative_module_root: &Path,
|
relative_module_root: &Path,
|
||||||
module_name: Option<&str>,
|
module_name: Option<&ModuleName>,
|
||||||
namespace: bool,
|
namespace: bool,
|
||||||
) -> Result<(PathBuf, PathBuf), Error> {
|
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||||
let src_root = source_tree.join(&relative_module_root);
|
let src_root = source_tree.join(&relative_module_root);
|
||||||
if !src_root.starts_with(source_tree) {
|
if !src_root.starts_with(source_tree) {
|
||||||
|
@ -215,22 +218,45 @@ fn find_roots(
|
||||||
|
|
||||||
if namespace {
|
if namespace {
|
||||||
// `namespace = true` disables module structure checks.
|
// `namespace = true` disables module structure checks.
|
||||||
let module_relative = if let Some(module_name) = module_name {
|
let modules_relative = if let Some(module_name) = module_name {
|
||||||
module_name.split('.').collect::<PathBuf>()
|
match module_name {
|
||||||
|
ModuleName::Name(name) => {
|
||||||
|
vec![name.split('.').collect::<PathBuf>()]
|
||||||
|
}
|
||||||
|
ModuleName::Names(names) => names
|
||||||
|
.iter()
|
||||||
|
.map(|name| name.split('.').collect::<PathBuf>())
|
||||||
|
.collect(),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
PathBuf::from(pyproject_toml.name().as_dist_info_name().to_string())
|
vec![PathBuf::from(
|
||||||
|
pyproject_toml.name().as_dist_info_name().to_string(),
|
||||||
|
)]
|
||||||
};
|
};
|
||||||
|
for module_relative in &modules_relative {
|
||||||
debug!("Namespace module path: {}", module_relative.user_display());
|
debug!("Namespace module path: {}", module_relative.user_display());
|
||||||
return Ok((src_root, module_relative));
|
}
|
||||||
|
return Ok((src_root, modules_relative));
|
||||||
}
|
}
|
||||||
|
|
||||||
let module_relative = if let Some(module_name) = module_name {
|
let modules_relative = if let Some(module_name) = module_name {
|
||||||
module_path_from_module_name(&src_root, module_name)?
|
match module_name {
|
||||||
|
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||||
|
ModuleName::Names(names) => names
|
||||||
|
.iter()
|
||||||
|
.map(|name| module_path_from_module_name(&src_root, name))
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
find_module_path_from_package_name(&src_root, pyproject_toml.name())?
|
vec![find_module_path_from_package_name(
|
||||||
|
&src_root,
|
||||||
|
pyproject_toml.name(),
|
||||||
|
)?]
|
||||||
};
|
};
|
||||||
|
for module_relative in &modules_relative {
|
||||||
debug!("Module path: {}", module_relative.user_display());
|
debug!("Module path: {}", module_relative.user_display());
|
||||||
Ok((src_root, module_relative))
|
}
|
||||||
|
Ok((src_root, modules_relative))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Infer stubs packages from package name alone.
|
/// Infer stubs packages from package name alone.
|
||||||
|
@ -410,6 +436,15 @@ mod tests {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn build_err(source_root: &Path) -> String {
|
||||||
|
let dist = TempDir::new().unwrap();
|
||||||
|
let build_err = build(source_root, dist.path()).unwrap_err();
|
||||||
|
let err_message: String = format_err(&build_err)
|
||||||
|
.replace(&source_root.user_display().to_string(), "[TEMP_PATH]")
|
||||||
|
.replace('\\', "/");
|
||||||
|
err_message
|
||||||
|
}
|
||||||
|
|
||||||
fn sdist_contents(source_dist_path: &Path) -> Vec<String> {
|
fn sdist_contents(source_dist_path: &Path) -> Vec<String> {
|
||||||
let sdist_reader = BufReader::new(File::open(source_dist_path).unwrap());
|
let sdist_reader = BufReader::new(File::open(source_dist_path).unwrap());
|
||||||
let mut source_dist = tar::Archive::new(GzDecoder::new(sdist_reader));
|
let mut source_dist = tar::Archive::new(GzDecoder::new(sdist_reader));
|
||||||
|
@ -998,13 +1033,8 @@ mod tests {
|
||||||
fs_err::create_dir_all(src.path().join("src").join("simple_namespace").join("part"))
|
fs_err::create_dir_all(src.path().join("src").join("simple_namespace").join("part"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let dist = TempDir::new().unwrap();
|
|
||||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
|
||||||
let err_message = format_err(&build_err)
|
|
||||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
|
||||||
.replace('\\', "/");
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
err_message,
|
build_err(src.path()),
|
||||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1025,16 +1055,13 @@ mod tests {
|
||||||
.join("simple_namespace")
|
.join("simple_namespace")
|
||||||
.join("__init__.py");
|
.join("__init__.py");
|
||||||
File::create(&bogus_init_py).unwrap();
|
File::create(&bogus_init_py).unwrap();
|
||||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
|
||||||
let err_message = format_err(&build_err)
|
|
||||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
|
||||||
.replace('\\', "/");
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
err_message,
|
build_err(src.path()),
|
||||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||||
);
|
);
|
||||||
fs_err::remove_file(bogus_init_py).unwrap();
|
fs_err::remove_file(bogus_init_py).unwrap();
|
||||||
|
|
||||||
|
let dist = TempDir::new().unwrap();
|
||||||
let build1 = build(src.path(), dist.path()).unwrap();
|
let build1 = build(src.path(), dist.path()).unwrap();
|
||||||
assert_snapshot!(build1.source_dist_contents.join("\n"), @r"
|
assert_snapshot!(build1.source_dist_contents.join("\n"), @r"
|
||||||
simple_namespace_part-1.0.0/
|
simple_namespace_part-1.0.0/
|
||||||
|
@ -1209,4 +1236,117 @@ mod tests {
|
||||||
cloud_db_schema_stubs-1.0.0.dist-info/WHEEL
|
cloud_db_schema_stubs-1.0.0.dist-info/WHEEL
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A package with multiple modules, one a regular module and two namespace modules.
|
||||||
|
#[test]
|
||||||
|
fn multiple_module_names() {
|
||||||
|
let src = TempDir::new().unwrap();
|
||||||
|
let pyproject_toml = indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "simple-namespace-part"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[tool.uv.build-backend]
|
||||||
|
module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["uv_build>=0.5.15,<0.6"]
|
||||||
|
build-backend = "uv_build"
|
||||||
|
"#
|
||||||
|
};
|
||||||
|
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||||
|
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
||||||
|
fs_err::create_dir_all(
|
||||||
|
src.path()
|
||||||
|
.join("src")
|
||||||
|
.join("simple_namespace")
|
||||||
|
.join("part_a"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
fs_err::create_dir_all(
|
||||||
|
src.path()
|
||||||
|
.join("src")
|
||||||
|
.join("simple_namespace")
|
||||||
|
.join("part_b"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Most of these checks exist in other tests too, but we want to ensure that they apply
|
||||||
|
// with multiple modules too.
|
||||||
|
|
||||||
|
// The first module is missing an `__init__.py`.
|
||||||
|
assert_snapshot!(
|
||||||
|
build_err(src.path()),
|
||||||
|
@"Expected a Python module at: `[TEMP_PATH]/src/foo/__init__.py`"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create the first correct `__init__.py` file
|
||||||
|
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
||||||
|
|
||||||
|
// The second module, a namespace, is missing an `__init__.py`.
|
||||||
|
assert_snapshot!(
|
||||||
|
build_err(src.path()),
|
||||||
|
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part_a/__init__.py`"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create the other two correct `__init__.py` files
|
||||||
|
File::create(
|
||||||
|
src.path()
|
||||||
|
.join("src")
|
||||||
|
.join("simple_namespace")
|
||||||
|
.join("part_a")
|
||||||
|
.join("__init__.py"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
File::create(
|
||||||
|
src.path()
|
||||||
|
.join("src")
|
||||||
|
.join("simple_namespace")
|
||||||
|
.join("part_b")
|
||||||
|
.join("__init__.py"),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// For the second module, a namespace, there must not be an `__init__.py` here.
|
||||||
|
let bogus_init_py = src
|
||||||
|
.path()
|
||||||
|
.join("src")
|
||||||
|
.join("simple_namespace")
|
||||||
|
.join("__init__.py");
|
||||||
|
File::create(&bogus_init_py).unwrap();
|
||||||
|
assert_snapshot!(
|
||||||
|
build_err(src.path()),
|
||||||
|
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||||
|
);
|
||||||
|
fs_err::remove_file(bogus_init_py).unwrap();
|
||||||
|
|
||||||
|
let dist = TempDir::new().unwrap();
|
||||||
|
let build = build(src.path(), dist.path()).unwrap();
|
||||||
|
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
||||||
|
simple_namespace_part-1.0.0/
|
||||||
|
simple_namespace_part-1.0.0/PKG-INFO
|
||||||
|
simple_namespace_part-1.0.0/pyproject.toml
|
||||||
|
simple_namespace_part-1.0.0/src
|
||||||
|
simple_namespace_part-1.0.0/src/foo
|
||||||
|
simple_namespace_part-1.0.0/src/foo/__init__.py
|
||||||
|
simple_namespace_part-1.0.0/src/simple_namespace
|
||||||
|
simple_namespace_part-1.0.0/src/simple_namespace/part_a
|
||||||
|
simple_namespace_part-1.0.0/src/simple_namespace/part_a/__init__.py
|
||||||
|
simple_namespace_part-1.0.0/src/simple_namespace/part_b
|
||||||
|
simple_namespace_part-1.0.0/src/simple_namespace/part_b/__init__.py
|
||||||
|
");
|
||||||
|
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||||
|
foo/
|
||||||
|
foo/__init__.py
|
||||||
|
simple_namespace/
|
||||||
|
simple_namespace/part_a/
|
||||||
|
simple_namespace/part_a/__init__.py
|
||||||
|
simple_namespace/part_b/
|
||||||
|
simple_namespace/part_b/__init__.py
|
||||||
|
simple_namespace_part-1.0.0.dist-info/
|
||||||
|
simple_namespace_part-1.0.0.dist-info/METADATA
|
||||||
|
simple_namespace_part-1.0.0.dist-info/RECORD
|
||||||
|
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||||
|
");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,6 @@ use uv_macros::OptionsMetadata;
|
||||||
|
|
||||||
/// Settings for the uv build backend (`uv_build`).
|
/// Settings for the uv build backend (`uv_build`).
|
||||||
///
|
///
|
||||||
/// !!! note
|
|
||||||
///
|
|
||||||
/// The uv build backend is currently in preview and may change in any future release.
|
|
||||||
///
|
|
||||||
/// Note that those settings only apply when using the `uv_build` backend, other build backends
|
/// Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||||
/// (such as hatchling) have their own configuration.
|
/// (such as hatchling) have their own configuration.
|
||||||
///
|
///
|
||||||
|
@ -38,15 +34,19 @@ pub struct BuildBackendSettings {
|
||||||
/// For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
/// For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
||||||
/// `foo-stubs.bar`.
|
/// `foo-stubs.bar`.
|
||||||
///
|
///
|
||||||
|
/// For namespace packages with multiple modules, the path can be a list, e.g.,
|
||||||
|
/// `["foo", "bar"]`. We recommend using a single module per package, splitting multiple
|
||||||
|
/// packages into a workspace.
|
||||||
|
///
|
||||||
/// Note that using this option runs the risk of creating two packages with different names but
|
/// Note that using this option runs the risk of creating two packages with different names but
|
||||||
/// the same module names. Installing such packages together leads to unspecified behavior,
|
/// the same module names. Installing such packages together leads to unspecified behavior,
|
||||||
/// often with corrupted files or directory trees.
|
/// often with corrupted files or directory trees.
|
||||||
#[option(
|
#[option(
|
||||||
default = r#"None"#,
|
default = r#"None"#,
|
||||||
value_type = "str",
|
value_type = "str | list[str]",
|
||||||
example = r#"module-name = "sklearn""#
|
example = r#"module-name = "sklearn""#
|
||||||
)]
|
)]
|
||||||
pub module_name: Option<String>,
|
pub module_name: Option<ModuleName>,
|
||||||
|
|
||||||
/// Glob expressions which files and directories to additionally include in the source
|
/// Glob expressions which files and directories to additionally include in the source
|
||||||
/// distribution.
|
/// distribution.
|
||||||
|
@ -185,6 +185,17 @@ impl Default for BuildBackendSettings {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether to include a single module or multiple modules.
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||||
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
|
#[serde(untagged)]
|
||||||
|
pub enum ModuleName {
|
||||||
|
/// A single module name.
|
||||||
|
Name(String),
|
||||||
|
/// Multiple module names, which are all included.
|
||||||
|
Names(Vec<String>),
|
||||||
|
}
|
||||||
|
|
||||||
/// Data includes for wheels.
|
/// Data includes for wheels.
|
||||||
///
|
///
|
||||||
/// See `BuildBackendSettings::data`.
|
/// See `BuildBackendSettings::data`.
|
||||||
|
|
|
@ -68,13 +68,14 @@ fn source_dist_matcher(
|
||||||
includes.push(globset::escape("pyproject.toml"));
|
includes.push(globset::escape("pyproject.toml"));
|
||||||
|
|
||||||
// Check that the source tree contains a module.
|
// Check that the source tree contains a module.
|
||||||
let (src_root, module_relative) = find_roots(
|
let (src_root, modules_relative) = find_roots(
|
||||||
source_tree,
|
source_tree,
|
||||||
pyproject_toml,
|
pyproject_toml,
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_deref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
)?;
|
)?;
|
||||||
|
for module_relative in modules_relative {
|
||||||
// The wheel must not include any files included by the source distribution (at least until we
|
// The wheel must not include any files included by the source distribution (at least until we
|
||||||
// have files generated in the source dist -> wheel build step).
|
// have files generated in the source dist -> wheel build step).
|
||||||
let import_path = uv_fs::normalize_path(
|
let import_path = uv_fs::normalize_path(
|
||||||
|
@ -84,6 +85,7 @@ fn source_dist_matcher(
|
||||||
.portable_display()
|
.portable_display()
|
||||||
.to_string();
|
.to_string();
|
||||||
includes.push(format!("{}/**", globset::escape(&import_path)));
|
includes.push(format!("{}/**", globset::escape(&import_path)));
|
||||||
|
}
|
||||||
for include in includes {
|
for include in includes {
|
||||||
let glob = PortableGlobParser::Uv
|
let glob = PortableGlobParser::Uv
|
||||||
.parse(&include)
|
.parse(&include)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use fs_err::File;
|
use fs_err::File;
|
||||||
use globset::{GlobSet, GlobSetBuilder};
|
use globset::{GlobSet, GlobSetBuilder};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::io::{BufReader, Read, Write};
|
use std::io::{BufReader, Read, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
@ -127,19 +128,24 @@ fn write_wheel(
|
||||||
source_tree,
|
source_tree,
|
||||||
pyproject_toml,
|
pyproject_toml,
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_deref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
let mut files_visited = 0;
|
||||||
|
let mut prefix_directories = FxHashSet::default();
|
||||||
|
for module_relative in module_relative {
|
||||||
// For convenience, have directories for the whole tree in the wheel
|
// For convenience, have directories for the whole tree in the wheel
|
||||||
for ancestor in module_relative.ancestors().skip(1) {
|
for ancestor in module_relative.ancestors().skip(1) {
|
||||||
if ancestor == Path::new("") {
|
if ancestor == Path::new("") {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// Avoid duplicate directories in the zip.
|
||||||
|
if prefix_directories.insert(ancestor.to_path_buf()) {
|
||||||
wheel_writer.write_directory(&ancestor.portable_display().to_string())?;
|
wheel_writer.write_directory(&ancestor.portable_display().to_string())?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut files_visited = 0;
|
|
||||||
for entry in WalkDir::new(src_root.join(module_relative))
|
for entry in WalkDir::new(src_root.join(module_relative))
|
||||||
.sort_by_file_name()
|
.sort_by_file_name()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -177,6 +183,7 @@ fn write_wheel(
|
||||||
debug!("Adding to wheel: {entry_path}");
|
debug!("Adding to wheel: {entry_path}");
|
||||||
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
debug!("Visited {files_visited} files for wheel build");
|
debug!("Visited {files_visited} files for wheel build");
|
||||||
|
|
||||||
// Add the license files
|
// Add the license files
|
||||||
|
@ -269,7 +276,7 @@ pub fn build_editable(
|
||||||
source_tree,
|
source_tree,
|
||||||
&pyproject_toml,
|
&pyproject_toml,
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_deref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ use tempfile::TempDir;
|
||||||
use tokio::io::AsyncBufReadExt;
|
use tokio::io::AsyncBufReadExt;
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tokio::sync::{Mutex, Semaphore};
|
use tokio::sync::{Mutex, Semaphore};
|
||||||
use tracing::{Instrument, debug, info_span, instrument};
|
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||||
|
|
||||||
use uv_cache_key::cache_digest;
|
use uv_cache_key::cache_digest;
|
||||||
use uv_configuration::PreviewMode;
|
use uv_configuration::PreviewMode;
|
||||||
|
@ -456,8 +456,12 @@ impl SourceBuild {
|
||||||
"uv-setuptools-{}.lock",
|
"uv-setuptools-{}.lock",
|
||||||
cache_digest(&canonical_source_path)
|
cache_digest(&canonical_source_path)
|
||||||
));
|
));
|
||||||
source_tree_lock =
|
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||||
Some(LockedFile::acquire(lock_path, self.source_tree.to_string_lossy()).await?);
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire build lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
}
|
}
|
||||||
Ok(source_tree_lock)
|
Ok(source_tree_lock)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[project]
|
[project]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
description = "The uv build backend"
|
description = "The uv build backend"
|
||||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.8"
|
||||||
|
|
|
@ -3632,7 +3632,8 @@ pub struct AddArgs {
|
||||||
long,
|
long,
|
||||||
conflicts_with = "dev",
|
conflicts_with = "dev",
|
||||||
conflicts_with = "optional",
|
conflicts_with = "optional",
|
||||||
conflicts_with = "package"
|
conflicts_with = "package",
|
||||||
|
conflicts_with = "workspace"
|
||||||
)]
|
)]
|
||||||
pub script: Option<PathBuf>,
|
pub script: Option<PathBuf>,
|
||||||
|
|
||||||
|
@ -3648,6 +3649,13 @@ pub struct AddArgs {
|
||||||
value_parser = parse_maybe_string,
|
value_parser = parse_maybe_string,
|
||||||
)]
|
)]
|
||||||
pub python: Option<Maybe<String>>,
|
pub python: Option<Maybe<String>>,
|
||||||
|
|
||||||
|
/// Add the dependency as a workspace member.
|
||||||
|
///
|
||||||
|
/// When used with a path dependency, the package will be added to the workspace's `members`
|
||||||
|
/// list in the root `pyproject.toml` file.
|
||||||
|
#[arg(long)]
|
||||||
|
pub workspace: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Args)]
|
#[derive(Args)]
|
||||||
|
|
|
@ -982,6 +982,45 @@ mod tests {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_redirect_preserves_fragment() -> Result<()> {
|
||||||
|
for status in &[301, 302, 303, 307, 308] {
|
||||||
|
let server = MockServer::start().await;
|
||||||
|
Mock::given(method("GET"))
|
||||||
|
.respond_with(
|
||||||
|
ResponseTemplate::new(*status)
|
||||||
|
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||||
|
)
|
||||||
|
.mount(&server)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let request = Client::new()
|
||||||
|
.get(format!("{}#fragment", server.uri()))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let response = Client::builder()
|
||||||
|
.redirect(reqwest::redirect::Policy::none())
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
.execute(request.try_clone().unwrap())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let redirect_request =
|
||||||
|
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||||
|
.unwrap();
|
||||||
|
assert!(
|
||||||
|
redirect_request
|
||||||
|
.url()
|
||||||
|
.fragment()
|
||||||
|
.is_some_and(|fragment| fragment == "fragment")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> {
|
async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> {
|
||||||
for status in &[301, 302, 303, 307, 308] {
|
for status in &[301, 302, 303, 307, 308] {
|
||||||
|
|
|
@ -1416,44 +1416,6 @@ mod tests {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_redirect_preserve_fragment() -> Result<(), Error> {
|
|
||||||
let redirect_server = MockServer::start().await;
|
|
||||||
|
|
||||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
|
||||||
Mock::given(method("GET"))
|
|
||||||
.respond_with(ResponseTemplate::new(307).insert_header("Location", "/foo".to_string()))
|
|
||||||
.mount(&redirect_server)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
Mock::given(method("GET"))
|
|
||||||
.and(path_regex("/foo"))
|
|
||||||
.respond_with(ResponseTemplate::new(200))
|
|
||||||
.mount(&redirect_server)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let cache = Cache::temp()?;
|
|
||||||
let registry_client = RegistryClientBuilder::new(cache).build();
|
|
||||||
let client = registry_client.cached_client().uncached();
|
|
||||||
|
|
||||||
let mut url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
|
||||||
url.set_fragment(Some("fragment"));
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url.clone()))
|
|
||||||
.send()
|
|
||||||
.await?
|
|
||||||
.url()
|
|
||||||
.to_string(),
|
|
||||||
format!("{}/foo#fragment", redirect_server.uri()),
|
|
||||||
"Requests should preserve fragment"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn ignore_failing_files() {
|
fn ignore_failing_files() {
|
||||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use std::{borrow::Cow, str::FromStr};
|
#[cfg(feature = "schemars")]
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use uv_pep508::PackageName;
|
use uv_pep508::PackageName;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::str::FromStr;
|
#[cfg(feature = "schemars")]
|
||||||
use std::{borrow::Cow, fmt::Formatter};
|
use std::borrow::Cow;
|
||||||
|
use std::{fmt::Formatter, str::FromStr};
|
||||||
|
|
||||||
use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError};
|
use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError};
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
use std::{borrow::Cow, str::FromStr};
|
#[cfg(feature = "schemars")]
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::str::FromStr;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
/// A host specification (wildcard, or host, with optional scheme and/or port) for which
|
/// A host specification (wildcard, or host, with optional scheme and/or port) for which
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
||||||
use crate::generate_all::Mode;
|
use crate::generate_all::Mode;
|
||||||
|
|
||||||
/// Contains current supported targets
|
/// Contains current supported targets
|
||||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250630/cpython-unix/targets.yml";
|
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250702/cpython-unix/targets.yml";
|
||||||
|
|
||||||
#[derive(clap::Args)]
|
#[derive(clap::Args)]
|
||||||
pub(crate) struct Args {
|
pub(crate) struct Args {
|
||||||
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
||||||
output.push_str("//! DO NOT EDIT\n");
|
output.push_str("//! DO NOT EDIT\n");
|
||||||
output.push_str("//!\n");
|
output.push_str("//!\n");
|
||||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250630/cpython-unix/targets.yml>\n");
|
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250702/cpython-unix/targets.yml>\n");
|
||||||
output.push_str("//!\n");
|
output.push_str("//!\n");
|
||||||
|
|
||||||
// Disable clippy/fmt
|
// Disable clippy/fmt
|
||||||
|
|
|
@ -453,12 +453,6 @@ impl BuildContext for BuildDispatch<'_> {
|
||||||
build_kind: BuildKind,
|
build_kind: BuildKind,
|
||||||
version_id: Option<&'data str>,
|
version_id: Option<&'data str>,
|
||||||
) -> Result<Option<DistFilename>, BuildDispatchError> {
|
) -> Result<Option<DistFilename>, BuildDispatchError> {
|
||||||
// Direct builds are a preview feature with the uv build backend.
|
|
||||||
if self.preview.is_disabled() {
|
|
||||||
trace!("Preview is disabled, not checking for direct build");
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
let source_tree = if let Some(subdir) = subdirectory {
|
let source_tree = if let Some(subdir) = subdirectory {
|
||||||
source.join(subdir)
|
source.join(subdir)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -169,26 +169,6 @@ impl UrlString {
|
||||||
.map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path))))
|
.map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path))))
|
||||||
.unwrap_or(Cow::Borrowed(self))
|
.unwrap_or(Cow::Borrowed(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`UrlString`] (as a [`Cow`]) with trailing slash removed.
|
|
||||||
///
|
|
||||||
/// This matches the semantics of [`Url::pop_if_empty`], which will not trim a trailing slash if
|
|
||||||
/// it's the only path segment, e.g., `https://example.com/` would be unchanged.
|
|
||||||
#[must_use]
|
|
||||||
pub fn without_trailing_slash(&self) -> Cow<'_, Self> {
|
|
||||||
self.as_ref()
|
|
||||||
.strip_suffix('/')
|
|
||||||
.filter(|path| {
|
|
||||||
// Only strip the trailing slash if there's _another_ trailing slash that isn't a
|
|
||||||
// part of the scheme.
|
|
||||||
path.split_once("://")
|
|
||||||
.map(|(_scheme, rest)| rest)
|
|
||||||
.unwrap_or(path)
|
|
||||||
.contains('/')
|
|
||||||
})
|
|
||||||
.map(|path| Cow::Owned(UrlString(SmallString::from(path))))
|
|
||||||
.unwrap_or(Cow::Borrowed(self))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<str> for UrlString {
|
impl AsRef<str> for UrlString {
|
||||||
|
@ -283,38 +263,4 @@ mod tests {
|
||||||
);
|
);
|
||||||
assert!(matches!(url.without_fragment(), Cow::Owned(_)));
|
assert!(matches!(url.without_fragment(), Cow::Owned(_)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn without_trailing_slash() {
|
|
||||||
// Borrows a URL without a slash
|
|
||||||
let url = UrlString("https://example.com/path".into());
|
|
||||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
|
||||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
|
||||||
|
|
||||||
// Removes the trailing slash if present on the URL
|
|
||||||
let url = UrlString("https://example.com/path/".into());
|
|
||||||
assert_eq!(
|
|
||||||
&*url.without_trailing_slash(),
|
|
||||||
&UrlString("https://example.com/path".into())
|
|
||||||
);
|
|
||||||
assert!(matches!(url.without_trailing_slash(), Cow::Owned(_)));
|
|
||||||
|
|
||||||
// Does not remove a trailing slash if it's the only path segment
|
|
||||||
let url = UrlString("https://example.com/".into());
|
|
||||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
|
||||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
|
||||||
|
|
||||||
// Does not remove a trailing slash if it's the only path segment with a missing scheme
|
|
||||||
let url = UrlString("example.com/".into());
|
|
||||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
|
||||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
|
||||||
|
|
||||||
// Removes the trailing slash when the scheme is missing
|
|
||||||
let url = UrlString("example.com/path/".into());
|
|
||||||
assert_eq!(
|
|
||||||
&*url.without_trailing_slash(),
|
|
||||||
&UrlString("example.com/path".into())
|
|
||||||
);
|
|
||||||
assert!(matches!(url.without_trailing_slash(), Cow::Owned(_)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,8 +38,6 @@ impl IndexUrl {
|
||||||
///
|
///
|
||||||
/// If no root directory is provided, relative paths are resolved against the current working
|
/// If no root directory is provided, relative paths are resolved against the current working
|
||||||
/// directory.
|
/// directory.
|
||||||
///
|
|
||||||
/// Normalizes non-file URLs by removing trailing slashes for consistency.
|
|
||||||
pub fn parse(path: &str, root_dir: Option<&Path>) -> Result<Self, IndexUrlError> {
|
pub fn parse(path: &str, root_dir: Option<&Path>) -> Result<Self, IndexUrlError> {
|
||||||
let url = match split_scheme(path) {
|
let url = match split_scheme(path) {
|
||||||
Some((scheme, ..)) => {
|
Some((scheme, ..)) => {
|
||||||
|
@ -258,23 +256,16 @@ impl<'de> serde::de::Deserialize<'de> for IndexUrl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<VerbatimUrl> for IndexUrl {
|
impl From<VerbatimUrl> for IndexUrl {
|
||||||
fn from(mut url: VerbatimUrl) -> Self {
|
fn from(url: VerbatimUrl) -> Self {
|
||||||
if url.scheme() == "file" {
|
if url.scheme() == "file" {
|
||||||
Self::Path(Arc::new(url))
|
Self::Path(Arc::new(url))
|
||||||
} else {
|
} else if *url.raw() == *PYPI_URL {
|
||||||
// Remove trailing slashes for consistency. They'll be re-added if necessary when
|
|
||||||
// querying the Simple API.
|
|
||||||
if let Ok(mut path_segments) = url.raw_mut().path_segments_mut() {
|
|
||||||
path_segments.pop_if_empty();
|
|
||||||
}
|
|
||||||
if *url.raw() == *PYPI_URL {
|
|
||||||
Self::Pypi(Arc::new(url))
|
Self::Pypi(Arc::new(url))
|
||||||
} else {
|
} else {
|
||||||
Self::Url(Arc::new(url))
|
Self::Url(Arc::new(url))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl From<IndexUrl> for DisplaySafeUrl {
|
impl From<IndexUrl> for DisplaySafeUrl {
|
||||||
fn from(index: IndexUrl) -> Self {
|
fn from(index: IndexUrl) -> Self {
|
||||||
|
@ -462,6 +453,19 @@ impl<'a> IndexLocations {
|
||||||
indexes
|
indexes
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add all authenticated sources to the cache.
|
||||||
|
pub fn cache_index_credentials(&self) {
|
||||||
|
for index in self.allowed_indexes() {
|
||||||
|
if let Some(credentials) = index.credentials() {
|
||||||
|
let credentials = Arc::new(credentials);
|
||||||
|
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||||
|
if let Some(root_url) = index.root_url() {
|
||||||
|
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||||
|
|
|
@ -365,7 +365,7 @@ impl InstalledDist {
|
||||||
pub fn installer(&self) -> Result<Option<String>, InstalledDistError> {
|
pub fn installer(&self) -> Result<Option<String>, InstalledDistError> {
|
||||||
let path = self.install_path().join("INSTALLER");
|
let path = self.install_path().join("INSTALLER");
|
||||||
match fs::read_to_string(path) {
|
match fs::read_to_string(path) {
|
||||||
Ok(installer) => Ok(Some(installer)),
|
Ok(installer) => Ok(Some(installer.trim().to_owned())),
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||||
Err(err) => Err(err.into()),
|
Err(err) => Err(err.into()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,9 @@
|
||||||
//! flags set.
|
//! flags set.
|
||||||
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
use std::{borrow::Cow, path::Path};
|
#[cfg(feature = "schemars")]
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::{Index, IndexUrl};
|
use crate::{Index, IndexUrl};
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,10 @@ use version_ranges::Ranges;
|
||||||
use uv_distribution_filename::WheelFilename;
|
use uv_distribution_filename::WheelFilename;
|
||||||
use uv_pep440::{
|
use uv_pep440::{
|
||||||
LowerBound, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
|
LowerBound, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
|
||||||
release_specifier_to_range, release_specifiers_to_ranges,
|
release_specifiers_to_ranges,
|
||||||
};
|
};
|
||||||
use uv_pep508::{MarkerExpression, MarkerTree, MarkerValueVersion};
|
use uv_pep508::{MarkerExpression, MarkerTree, MarkerValueVersion};
|
||||||
use uv_platform_tags::{AbiTag, LanguageTag};
|
use uv_platform_tags::{AbiTag, LanguageTag};
|
||||||
use uv_warnings::warn_user_once;
|
|
||||||
|
|
||||||
/// The `Requires-Python` requirement specifier.
|
/// The `Requires-Python` requirement specifier.
|
||||||
///
|
///
|
||||||
|
@ -67,27 +66,7 @@ impl RequiresPython {
|
||||||
) -> Option<Self> {
|
) -> Option<Self> {
|
||||||
// Convert to PubGrub range and perform an intersection.
|
// Convert to PubGrub range and perform an intersection.
|
||||||
let range = specifiers
|
let range = specifiers
|
||||||
.map(|specs| {
|
.map(|specs| release_specifiers_to_ranges(specs.clone()))
|
||||||
// Warn if there’s exactly one `~=` specifier without a patch.
|
|
||||||
if let [spec] = &specs[..] {
|
|
||||||
if spec.is_tilde_without_patch() {
|
|
||||||
if let Some((lo_b, hi_b)) = release_specifier_to_range(spec.clone(), false)
|
|
||||||
.bounding_range()
|
|
||||||
.map(|(l, u)| (l.cloned(), u.cloned()))
|
|
||||||
{
|
|
||||||
let lo_spec = LowerBound::new(lo_b).specifier().unwrap();
|
|
||||||
let hi_spec = UpperBound::new(hi_b).specifier().unwrap();
|
|
||||||
warn_user_once!(
|
|
||||||
"The release specifier (`{spec}`) contains a compatible release \
|
|
||||||
match without a patch version. This will be interpreted as \
|
|
||||||
`{lo_spec}, {hi_spec}`. Did you mean `{spec}.0` to freeze the \
|
|
||||||
minor version?"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
release_specifiers_to_ranges(specs.clone())
|
|
||||||
})
|
|
||||||
.reduce(|acc, r| acc.intersection(&r))?;
|
.reduce(|acc, r| acc.intersection(&r))?;
|
||||||
|
|
||||||
// If the intersection is empty, return `None`.
|
// If the intersection is empty, return `None`.
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use std::{borrow::Cow, ops::Deref};
|
#[cfg(feature = "schemars")]
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
use http::StatusCode;
|
use http::StatusCode;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
|
@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf};
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(transparent)]
|
#[error("Failed to read from zip file")]
|
||||||
Zip(#[from] zip::result::ZipError),
|
Zip(#[from] zip::result::ZipError),
|
||||||
#[error(transparent)]
|
#[error("Failed to read from zip file")]
|
||||||
AsyncZip(#[from] async_zip::error::ZipError),
|
AsyncZip(#[from] async_zip::error::ZipError),
|
||||||
#[error(transparent)]
|
#[error("I/O operation failed during extraction")]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
#[error(
|
#[error(
|
||||||
"The top-level of the archive must only contain a list directory, but it contains: {0:?}"
|
"The top-level of the archive must only contain a list directory, but it contains: {0:?}"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::{BufReader, Read, Seek, Write};
|
use std::io::{BufReader, Read, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use data_encoding::BASE64URL_NOPAD;
|
use data_encoding::BASE64URL_NOPAD;
|
||||||
|
@ -144,7 +144,7 @@ fn format_shebang(executable: impl AsRef<Path>, os_name: &str, relocatable: bool
|
||||||
///
|
///
|
||||||
/// <https://github.com/pypa/pip/blob/76e82a43f8fb04695e834810df64f2d9a2ff6020/src/pip/_vendor/distlib/scripts.py#L121-L126>
|
/// <https://github.com/pypa/pip/blob/76e82a43f8fb04695e834810df64f2d9a2ff6020/src/pip/_vendor/distlib/scripts.py#L121-L126>
|
||||||
fn get_script_executable(python_executable: &Path, is_gui: bool) -> PathBuf {
|
fn get_script_executable(python_executable: &Path, is_gui: bool) -> PathBuf {
|
||||||
// Only check for pythonw.exe on Windows
|
// Only check for `pythonw.exe` on Windows.
|
||||||
if cfg!(windows) && is_gui {
|
if cfg!(windows) && is_gui {
|
||||||
python_executable
|
python_executable
|
||||||
.file_name()
|
.file_name()
|
||||||
|
@ -431,22 +431,41 @@ fn install_script(
|
||||||
Err(err) => return Err(Error::Io(err)),
|
Err(err) => return Err(Error::Io(err)),
|
||||||
}
|
}
|
||||||
let size_and_encoded_hash = if start == placeholder_python {
|
let size_and_encoded_hash = if start == placeholder_python {
|
||||||
let is_gui = {
|
// Read the rest of the first line, one byte at a time, until we hit a newline.
|
||||||
let mut buf = vec![0; 1];
|
let mut is_gui = false;
|
||||||
script.read_exact(&mut buf)?;
|
let mut first = true;
|
||||||
if buf == b"w" {
|
let mut byte = [0u8; 1];
|
||||||
true
|
loop {
|
||||||
} else {
|
match script.read_exact(&mut byte) {
|
||||||
script.seek_relative(-1)?;
|
Ok(()) => {
|
||||||
false
|
if byte[0] == b'\n' || byte[0] == b'\r' {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
// Check if this is a GUI script (starts with 'w').
|
||||||
|
if first {
|
||||||
|
is_gui = byte[0] == b'w';
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) if err.kind() == io::ErrorKind::UnexpectedEof => break,
|
||||||
|
Err(err) => return Err(Error::Io(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let executable = get_script_executable(&layout.sys_executable, is_gui);
|
let executable = get_script_executable(&layout.sys_executable, is_gui);
|
||||||
let executable = get_relocatable_executable(executable, layout, relocatable)?;
|
let executable = get_relocatable_executable(executable, layout, relocatable)?;
|
||||||
let start = format_shebang(&executable, &layout.os_name, relocatable)
|
let mut start = format_shebang(&executable, &layout.os_name, relocatable)
|
||||||
.as_bytes()
|
.as_bytes()
|
||||||
.to_vec();
|
.to_vec();
|
||||||
|
|
||||||
|
// Use appropriate line ending for the platform.
|
||||||
|
if layout.os_name == "nt" {
|
||||||
|
start.extend_from_slice(b"\r\n");
|
||||||
|
} else {
|
||||||
|
start.push(b'\n');
|
||||||
|
}
|
||||||
|
|
||||||
let mut target = uv_fs::tempfile_in(&layout.scheme.scripts)?;
|
let mut target = uv_fs::tempfile_in(&layout.scheme.scripts)?;
|
||||||
let size_and_encoded_hash = copy_and_hash(&mut start.chain(script), &mut target)?;
|
let size_and_encoded_hash = copy_and_hash(&mut start.chain(script), &mut target)?;
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ pub use {
|
||||||
VersionPatternParseError,
|
VersionPatternParseError,
|
||||||
},
|
},
|
||||||
version_specifier::{
|
version_specifier::{
|
||||||
VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
TildeVersionSpecifier, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||||
VersionSpecifiersParseError,
|
VersionSpecifiersParseError,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -665,11 +665,6 @@ impl VersionSpecifier {
|
||||||
| Operator::NotEqual => false,
|
| Operator::NotEqual => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if this is a `~=` specifier without a patch version (e.g. `~=3.11`).
|
|
||||||
pub fn is_tilde_without_patch(&self) -> bool {
|
|
||||||
self.operator == Operator::TildeEqual && self.version.release().len() == 2
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for VersionSpecifier {
|
impl FromStr for VersionSpecifier {
|
||||||
|
@ -893,6 +888,90 @@ pub(crate) fn parse_version_specifiers(
|
||||||
Ok(version_ranges)
|
Ok(version_ranges)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A simple `~=` version specifier with a major, minor and (optional) patch version, e.g., `~=3.13`
|
||||||
|
/// or `~=3.13.0`.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct TildeVersionSpecifier<'a> {
|
||||||
|
inner: Cow<'a, VersionSpecifier>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TildeVersionSpecifier<'a> {
|
||||||
|
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] value.
|
||||||
|
///
|
||||||
|
/// If a [`Operator::TildeEqual`] is not used, or the version includes more than minor and patch
|
||||||
|
/// segments, this will return [`None`].
|
||||||
|
pub fn from_specifier(specifier: VersionSpecifier) -> Option<TildeVersionSpecifier<'a>> {
|
||||||
|
TildeVersionSpecifier::new(Cow::Owned(specifier))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] reference.
|
||||||
|
///
|
||||||
|
/// See [`TildeVersionSpecifier::from_specifier`].
|
||||||
|
pub fn from_specifier_ref(
|
||||||
|
specifier: &'a VersionSpecifier,
|
||||||
|
) -> Option<TildeVersionSpecifier<'a>> {
|
||||||
|
TildeVersionSpecifier::new(Cow::Borrowed(specifier))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(specifier: Cow<'a, VersionSpecifier>) -> Option<Self> {
|
||||||
|
if specifier.operator != Operator::TildeEqual {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if specifier.version().release().len() < 2 || specifier.version().release().len() > 3 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if specifier.version().any_prerelease()
|
||||||
|
|| specifier.version().is_local()
|
||||||
|
|| specifier.version().is_post()
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some(Self { inner: specifier })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether a patch version is present in this tilde version specifier.
|
||||||
|
pub fn has_patch(&self) -> bool {
|
||||||
|
self.inner.version.release().len() == 3
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct the lower and upper bounding version specifiers for this tilde version specifier,
|
||||||
|
/// e.g., for `~=3.13` this would return `>=3.13` and `<4` and for `~=3.13.0` it would
|
||||||
|
/// return `>=3.13.0` and `<3.14`.
|
||||||
|
pub fn bounding_specifiers(&self) -> (VersionSpecifier, VersionSpecifier) {
|
||||||
|
let release = self.inner.version().release();
|
||||||
|
let lower = self.inner.version.clone();
|
||||||
|
let upper = if self.has_patch() {
|
||||||
|
Version::new([release[0], release[1] + 1])
|
||||||
|
} else {
|
||||||
|
Version::new([release[0] + 1])
|
||||||
|
};
|
||||||
|
(
|
||||||
|
VersionSpecifier::greater_than_equal_version(lower),
|
||||||
|
VersionSpecifier::less_than_version(upper),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a new tilde `VersionSpecifier` with the given patch version appended.
|
||||||
|
pub fn with_patch_version(&self, patch: u64) -> TildeVersionSpecifier {
|
||||||
|
let mut release = self.inner.version.release().to_vec();
|
||||||
|
if self.has_patch() {
|
||||||
|
release.pop();
|
||||||
|
}
|
||||||
|
release.push(patch);
|
||||||
|
TildeVersionSpecifier::from_specifier(
|
||||||
|
VersionSpecifier::from_version(Operator::TildeEqual, Version::new(release))
|
||||||
|
.expect("We should always derive a valid new version specifier"),
|
||||||
|
)
|
||||||
|
.expect("We should always derive a new tilde version specifier")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for TildeVersionSpecifier<'_> {
|
||||||
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", self.inner)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::{cmp::Ordering, str::FromStr};
|
use std::{cmp::Ordering, str::FromStr};
|
||||||
|
|
|
@ -18,11 +18,16 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
use crate::Pep508Url;
|
use crate::Pep508Url;
|
||||||
|
|
||||||
/// A wrapper around [`Url`] that preserves the original string.
|
/// A wrapper around [`Url`] that preserves the original string.
|
||||||
|
///
|
||||||
|
/// The original string is not preserved after serialization/deserialization.
|
||||||
#[derive(Debug, Clone, Eq)]
|
#[derive(Debug, Clone, Eq)]
|
||||||
pub struct VerbatimUrl {
|
pub struct VerbatimUrl {
|
||||||
/// The parsed URL.
|
/// The parsed URL.
|
||||||
url: DisplaySafeUrl,
|
url: DisplaySafeUrl,
|
||||||
/// The URL as it was provided by the user.
|
/// The URL as it was provided by the user.
|
||||||
|
///
|
||||||
|
/// Even if originally set, this will be [`None`] after
|
||||||
|
/// serialization/deserialization.
|
||||||
given: Option<ArcStr>,
|
given: Option<ArcStr>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,11 +171,6 @@ impl VerbatimUrl {
|
||||||
&self.url
|
&self.url
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a mutable reference to the underlying [`DisplaySafeUrl`].
|
|
||||||
pub fn raw_mut(&mut self) -> &mut DisplaySafeUrl {
|
|
||||||
&mut self.url
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`].
|
/// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`].
|
||||||
pub fn to_url(&self) -> DisplaySafeUrl {
|
pub fn to_url(&self) -> DisplaySafeUrl {
|
||||||
self.url.clone()
|
self.url.clone()
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -112,6 +112,7 @@ impl PythonInstallation {
|
||||||
&& client_builder.connectivity.is_online();
|
&& client_builder.connectivity.is_online();
|
||||||
|
|
||||||
if !downloads_enabled {
|
if !downloads_enabled {
|
||||||
|
debug!("Python downloads are disabled. Skipping check for available downloads...");
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! DO NOT EDIT
|
//! DO NOT EDIT
|
||||||
//!
|
//!
|
||||||
//! Generated with `cargo run dev generate-sysconfig-metadata`
|
//! Generated with `cargo run dev generate-sysconfig-metadata`
|
||||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250630/cpython-unix/targets.yml>
|
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250702/cpython-unix/targets.yml>
|
||||||
//!
|
//!
|
||||||
#![allow(clippy::all)]
|
#![allow(clippy::all)]
|
||||||
#![cfg_attr(any(), rustfmt::skip)]
|
#![cfg_attr(any(), rustfmt::skip)]
|
||||||
|
@ -15,7 +15,6 @@ use crate::sysconfig::replacements::{ReplacementEntry, ReplacementMode};
|
||||||
pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<ReplacementEntry>>> = LazyLock::new(|| {
|
pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<ReplacementEntry>>> = LazyLock::new(|| {
|
||||||
BTreeMap::from_iter([
|
BTreeMap::from_iter([
|
||||||
("BLDSHARED".to_string(), vec![
|
("BLDSHARED".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||||
|
@ -28,7 +27,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||||
]),
|
]),
|
||||||
("CC".to_string(), vec![
|
("CC".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||||
|
@ -41,7 +39,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||||
]),
|
]),
|
||||||
("CXX".to_string(), vec![
|
("CXX".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||||
|
@ -53,7 +50,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
||||||
]),
|
]),
|
||||||
("LDCXXSHARED".to_string(), vec![
|
("LDCXXSHARED".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||||
|
@ -65,7 +61,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
||||||
]),
|
]),
|
||||||
("LDSHARED".to_string(), vec![
|
("LDSHARED".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||||
|
@ -78,7 +73,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||||
]),
|
]),
|
||||||
("LINKCC".to_string(), vec![
|
("LINKCC".to_string(), vec![
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||||
|
|
|
@ -349,7 +349,7 @@ mod tests {
|
||||||
|
|
||||||
// Cross-compiles use GNU
|
// Cross-compiles use GNU
|
||||||
let sysconfigdata = [
|
let sysconfigdata = [
|
||||||
("CC", "/usr/bin/aarch64-linux-gnu-gcc"),
|
("CC", "/usr/bin/riscv64-linux-gnu-gcc"),
|
||||||
("CXX", "/usr/bin/x86_64-linux-gnu-g++"),
|
("CXX", "/usr/bin/x86_64-linux-gnu-g++"),
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -13,10 +13,9 @@ use uv_normalize::PackageName;
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_types::InstalledPackagesProvider;
|
use uv_types::InstalledPackagesProvider;
|
||||||
|
|
||||||
use crate::preferences::{Entry, Preferences};
|
use crate::preferences::{Entry, PreferenceSource, Preferences};
|
||||||
use crate::prerelease::{AllowPrerelease, PrereleaseStrategy};
|
use crate::prerelease::{AllowPrerelease, PrereleaseStrategy};
|
||||||
use crate::resolution_mode::ResolutionStrategy;
|
use crate::resolution_mode::ResolutionStrategy;
|
||||||
use crate::universal_marker::UniversalMarker;
|
|
||||||
use crate::version_map::{VersionMap, VersionMapDistHandle};
|
use crate::version_map::{VersionMap, VersionMapDistHandle};
|
||||||
use crate::{Exclusions, Manifest, Options, ResolverEnvironment};
|
use crate::{Exclusions, Manifest, Options, ResolverEnvironment};
|
||||||
|
|
||||||
|
@ -188,7 +187,7 @@ impl CandidateSelector {
|
||||||
if index.is_some_and(|index| !entry.index().matches(index)) {
|
if index.is_some_and(|index| !entry.index().matches(index)) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Either::Left(std::iter::once((entry.marker(), entry.pin().version())))
|
Either::Left(std::iter::once((entry.pin().version(), entry.source())))
|
||||||
}
|
}
|
||||||
[..] => {
|
[..] => {
|
||||||
type Entries<'a> = SmallVec<[&'a Entry; 3]>;
|
type Entries<'a> = SmallVec<[&'a Entry; 3]>;
|
||||||
|
@ -219,7 +218,7 @@ impl CandidateSelector {
|
||||||
Either::Right(
|
Either::Right(
|
||||||
preferences
|
preferences
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|entry| (entry.marker(), entry.pin().version())),
|
.map(|entry| (entry.pin().version(), entry.source())),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -238,7 +237,7 @@ impl CandidateSelector {
|
||||||
/// Return the first preference that satisfies the current range and is allowed.
|
/// Return the first preference that satisfies the current range and is allowed.
|
||||||
fn get_preferred_from_iter<'a, InstalledPackages: InstalledPackagesProvider>(
|
fn get_preferred_from_iter<'a, InstalledPackages: InstalledPackagesProvider>(
|
||||||
&'a self,
|
&'a self,
|
||||||
preferences: impl Iterator<Item = (&'a UniversalMarker, &'a Version)>,
|
preferences: impl Iterator<Item = (&'a Version, PreferenceSource)>,
|
||||||
package_name: &'a PackageName,
|
package_name: &'a PackageName,
|
||||||
range: &Range<Version>,
|
range: &Range<Version>,
|
||||||
version_maps: &'a [VersionMap],
|
version_maps: &'a [VersionMap],
|
||||||
|
@ -246,7 +245,7 @@ impl CandidateSelector {
|
||||||
reinstall: bool,
|
reinstall: bool,
|
||||||
env: &ResolverEnvironment,
|
env: &ResolverEnvironment,
|
||||||
) -> Option<Candidate<'a>> {
|
) -> Option<Candidate<'a>> {
|
||||||
for (marker, version) in preferences {
|
for (version, source) in preferences {
|
||||||
// Respect the version range for this requirement.
|
// Respect the version range for this requirement.
|
||||||
if !range.contains(version) {
|
if !range.contains(version) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -290,9 +289,14 @@ impl CandidateSelector {
|
||||||
let allow = match self.prerelease_strategy.allows(package_name, env) {
|
let allow = match self.prerelease_strategy.allows(package_name, env) {
|
||||||
AllowPrerelease::Yes => true,
|
AllowPrerelease::Yes => true,
|
||||||
AllowPrerelease::No => false,
|
AllowPrerelease::No => false,
|
||||||
// If the pre-release is "global" (i.e., provided via a lockfile, rather than
|
// If the pre-release was provided via an existing file, rather than from the
|
||||||
// a fork), accept it unless pre-releases are completely banned.
|
// current solve, accept it unless pre-releases are completely banned.
|
||||||
AllowPrerelease::IfNecessary => marker.is_true(),
|
AllowPrerelease::IfNecessary => match source {
|
||||||
|
PreferenceSource::Resolver => false,
|
||||||
|
PreferenceSource::Lock
|
||||||
|
| PreferenceSource::Environment
|
||||||
|
| PreferenceSource::RequirementsTxt => true,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
if !allow {
|
if !allow {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::fmt::Formatter;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use indexmap::IndexSet;
|
use indexmap::IndexSet;
|
||||||
|
use itertools::Itertools;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use pubgrub::{
|
use pubgrub::{
|
||||||
DefaultStringReporter, DerivationTree, Derived, External, Range, Ranges, Reporter, Term,
|
DefaultStringReporter, DerivationTree, Derived, External, Range, Ranges, Reporter, Term,
|
||||||
|
@ -156,7 +157,7 @@ impl<T> From<tokio::sync::mpsc::error::SendError<T>> for ResolveError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) type ErrorTree = DerivationTree<PubGrubPackage, Range<Version>, UnavailableReason>;
|
pub type ErrorTree = DerivationTree<PubGrubPackage, Range<Version>, UnavailableReason>;
|
||||||
|
|
||||||
/// A wrapper around [`pubgrub::error::NoSolutionError`] that displays a resolution failure report.
|
/// A wrapper around [`pubgrub::error::NoSolutionError`] that displays a resolution failure report.
|
||||||
pub struct NoSolutionError {
|
pub struct NoSolutionError {
|
||||||
|
@ -367,6 +368,11 @@ impl NoSolutionError {
|
||||||
NoSolutionHeader::new(self.env.clone())
|
NoSolutionHeader::new(self.env.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the conflict derivation tree for external analysis
|
||||||
|
pub fn derivation_tree(&self) -> &ErrorTree {
|
||||||
|
&self.error
|
||||||
|
}
|
||||||
|
|
||||||
/// Hint at limiting the resolver environment if universal resolution failed for a target
|
/// Hint at limiting the resolver environment if universal resolution failed for a target
|
||||||
/// that is not the current platform or not the current Python version.
|
/// that is not the current platform or not the current Python version.
|
||||||
fn hint_disjoint_targets(&self, f: &mut Formatter) -> std::fmt::Result {
|
fn hint_disjoint_targets(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||||
|
@ -404,6 +410,15 @@ impl NoSolutionError {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the packages that are involved in this error.
|
||||||
|
pub fn packages(&self) -> impl Iterator<Item = &PackageName> {
|
||||||
|
self.error
|
||||||
|
.packages()
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|p| p.name())
|
||||||
|
.unique()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Debug for NoSolutionError {
|
impl std::fmt::Debug for NoSolutionError {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use std::{borrow::Cow, str::FromStr};
|
#[cfg(feature = "schemars")]
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use jiff::{Timestamp, ToSpan, tz::TimeZone};
|
use jiff::{Timestamp, ToSpan, tz::TimeZone};
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
pub use dependency_mode::DependencyMode;
|
pub use dependency_mode::DependencyMode;
|
||||||
pub use error::{NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange};
|
pub use error::{ErrorTree, NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange};
|
||||||
pub use exclude_newer::ExcludeNewer;
|
pub use exclude_newer::ExcludeNewer;
|
||||||
pub use exclusions::Exclusions;
|
pub use exclusions::Exclusions;
|
||||||
pub use flat_index::{FlatDistributions, FlatIndex};
|
pub use flat_index::{FlatDistributions, FlatIndex};
|
||||||
|
@ -54,7 +54,7 @@ mod options;
|
||||||
mod pins;
|
mod pins;
|
||||||
mod preferences;
|
mod preferences;
|
||||||
mod prerelease;
|
mod prerelease;
|
||||||
mod pubgrub;
|
pub mod pubgrub;
|
||||||
mod python_requirement;
|
mod python_requirement;
|
||||||
mod redirect;
|
mod redirect;
|
||||||
mod resolution;
|
mod resolution;
|
||||||
|
|
|
@ -1478,11 +1478,9 @@ impl Lock {
|
||||||
if let Source::Registry(index) = &package.id.source {
|
if let Source::Registry(index) = &package.id.source {
|
||||||
match index {
|
match index {
|
||||||
RegistrySource::Url(url) => {
|
RegistrySource::Url(url) => {
|
||||||
// Normalize URL before validating.
|
|
||||||
let url = url.without_trailing_slash();
|
|
||||||
if remotes
|
if remotes
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_some_and(|remotes| !remotes.contains(&url))
|
.is_some_and(|remotes| !remotes.contains(url))
|
||||||
{
|
{
|
||||||
let name = &package.id.name;
|
let name = &package.id.name;
|
||||||
let version = &package
|
let version = &package
|
||||||
|
@ -1490,11 +1488,7 @@ impl Lock {
|
||||||
.version
|
.version
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.expect("version for registry source");
|
.expect("version for registry source");
|
||||||
return Ok(SatisfiesResult::MissingRemoteIndex(
|
return Ok(SatisfiesResult::MissingRemoteIndex(name, version, url));
|
||||||
name,
|
|
||||||
version,
|
|
||||||
url.into_owned(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RegistrySource::Path(path) => {
|
RegistrySource::Path(path) => {
|
||||||
|
@ -1799,7 +1793,7 @@ pub enum SatisfiesResult<'lock> {
|
||||||
/// The lockfile is missing a workspace member.
|
/// The lockfile is missing a workspace member.
|
||||||
MissingRoot(PackageName),
|
MissingRoot(PackageName),
|
||||||
/// The lockfile referenced a remote index that was not provided
|
/// The lockfile referenced a remote index that was not provided
|
||||||
MissingRemoteIndex(&'lock PackageName, &'lock Version, UrlString),
|
MissingRemoteIndex(&'lock PackageName, &'lock Version, &'lock UrlString),
|
||||||
/// The lockfile referenced a local index that was not provided
|
/// The lockfile referenced a local index that was not provided
|
||||||
MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path),
|
MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path),
|
||||||
/// A package in the lockfile contains different `requires-dist` metadata than expected.
|
/// A package in the lockfile contains different `requires-dist` metadata than expected.
|
||||||
|
|
|
@ -34,6 +34,8 @@ pub struct Preference {
|
||||||
/// is part of, otherwise `None`.
|
/// is part of, otherwise `None`.
|
||||||
fork_markers: Vec<UniversalMarker>,
|
fork_markers: Vec<UniversalMarker>,
|
||||||
hashes: HashDigests,
|
hashes: HashDigests,
|
||||||
|
/// The source of the preference.
|
||||||
|
source: PreferenceSource,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Preference {
|
impl Preference {
|
||||||
|
@ -73,6 +75,7 @@ impl Preference {
|
||||||
.map(String::as_str)
|
.map(String::as_str)
|
||||||
.map(HashDigest::from_str)
|
.map(HashDigest::from_str)
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
|
source: PreferenceSource::RequirementsTxt,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,6 +94,7 @@ impl Preference {
|
||||||
index: PreferenceIndex::from(package.index(install_path)?),
|
index: PreferenceIndex::from(package.index(install_path)?),
|
||||||
fork_markers: package.fork_markers().to_vec(),
|
fork_markers: package.fork_markers().to_vec(),
|
||||||
hashes: HashDigests::empty(),
|
hashes: HashDigests::empty(),
|
||||||
|
source: PreferenceSource::Lock,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,6 +116,7 @@ impl Preference {
|
||||||
// `pylock.toml` doesn't have fork annotations.
|
// `pylock.toml` doesn't have fork annotations.
|
||||||
fork_markers: vec![],
|
fork_markers: vec![],
|
||||||
hashes: HashDigests::empty(),
|
hashes: HashDigests::empty(),
|
||||||
|
source: PreferenceSource::Lock,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,6 +132,7 @@ impl Preference {
|
||||||
index: PreferenceIndex::Any,
|
index: PreferenceIndex::Any,
|
||||||
fork_markers: vec![],
|
fork_markers: vec![],
|
||||||
hashes: HashDigests::empty(),
|
hashes: HashDigests::empty(),
|
||||||
|
source: PreferenceSource::Environment,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,11 +177,24 @@ impl From<Option<IndexUrl>> for PreferenceIndex {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub(crate) enum PreferenceSource {
|
||||||
|
/// The preference is from an installed package in the environment.
|
||||||
|
Environment,
|
||||||
|
/// The preference is from a `uv.ock` file.
|
||||||
|
Lock,
|
||||||
|
/// The preference is from a `requirements.txt` file.
|
||||||
|
RequirementsTxt,
|
||||||
|
/// The preference is from the current solve.
|
||||||
|
Resolver,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct Entry {
|
pub(crate) struct Entry {
|
||||||
marker: UniversalMarker,
|
marker: UniversalMarker,
|
||||||
index: PreferenceIndex,
|
index: PreferenceIndex,
|
||||||
pin: Pin,
|
pin: Pin,
|
||||||
|
source: PreferenceSource,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entry {
|
impl Entry {
|
||||||
|
@ -193,6 +212,11 @@ impl Entry {
|
||||||
pub(crate) fn pin(&self) -> &Pin {
|
pub(crate) fn pin(&self) -> &Pin {
|
||||||
&self.pin
|
&self.pin
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the source of the entry.
|
||||||
|
pub(crate) fn source(&self) -> PreferenceSource {
|
||||||
|
self.source
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A set of pinned packages that should be preserved during resolution, if possible.
|
/// A set of pinned packages that should be preserved during resolution, if possible.
|
||||||
|
@ -245,6 +269,7 @@ impl Preferences {
|
||||||
version: preference.version,
|
version: preference.version,
|
||||||
hashes: preference.hashes,
|
hashes: preference.hashes,
|
||||||
},
|
},
|
||||||
|
source: preference.source,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
for fork_marker in preference.fork_markers {
|
for fork_marker in preference.fork_markers {
|
||||||
|
@ -255,6 +280,7 @@ impl Preferences {
|
||||||
version: preference.version.clone(),
|
version: preference.version.clone(),
|
||||||
hashes: preference.hashes.clone(),
|
hashes: preference.hashes.clone(),
|
||||||
},
|
},
|
||||||
|
source: preference.source,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -270,11 +296,13 @@ impl Preferences {
|
||||||
index: Option<IndexUrl>,
|
index: Option<IndexUrl>,
|
||||||
markers: UniversalMarker,
|
markers: UniversalMarker,
|
||||||
pin: impl Into<Pin>,
|
pin: impl Into<Pin>,
|
||||||
|
source: PreferenceSource,
|
||||||
) {
|
) {
|
||||||
self.0.entry(package_name).or_default().push(Entry {
|
self.0.entry(package_name).or_default().push(Entry {
|
||||||
marker: markers,
|
marker: markers,
|
||||||
index: PreferenceIndex::from(index),
|
index: PreferenceIndex::from(index),
|
||||||
pin: pin.into(),
|
pin: pin.into(),
|
||||||
|
source,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
pub(crate) use crate::pubgrub::dependencies::PubGrubDependency;
|
pub(crate) use crate::pubgrub::dependencies::PubGrubDependency;
|
||||||
pub(crate) use crate::pubgrub::distribution::PubGrubDistribution;
|
pub(crate) use crate::pubgrub::distribution::PubGrubDistribution;
|
||||||
pub(crate) use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython};
|
pub use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython};
|
||||||
pub(crate) use crate::pubgrub::priority::{PubGrubPriorities, PubGrubPriority, PubGrubTiebreaker};
|
pub(crate) use crate::pubgrub::priority::{PubGrubPriorities, PubGrubPriority, PubGrubTiebreaker};
|
||||||
pub(crate) use crate::pubgrub::report::PubGrubReportFormatter;
|
pub(crate) use crate::pubgrub::report::PubGrubReportFormatter;
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crate::python_requirement::PythonRequirement;
|
||||||
|
|
||||||
/// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap.
|
/// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap.
|
||||||
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||||
pub(crate) struct PubGrubPackage(Arc<PubGrubPackageInner>);
|
pub struct PubGrubPackage(Arc<PubGrubPackageInner>);
|
||||||
|
|
||||||
impl Deref for PubGrubPackage {
|
impl Deref for PubGrubPackage {
|
||||||
type Target = PubGrubPackageInner;
|
type Target = PubGrubPackageInner;
|
||||||
|
@ -39,7 +39,7 @@ impl From<PubGrubPackageInner> for PubGrubPackage {
|
||||||
/// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g.,
|
/// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g.,
|
||||||
/// `black`). We then discard the virtual packages at the end of the resolution process.
|
/// `black`). We then discard the virtual packages at the end of the resolution process.
|
||||||
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||||
pub(crate) enum PubGrubPackageInner {
|
pub enum PubGrubPackageInner {
|
||||||
/// The root package, which is used to start the resolution process.
|
/// The root package, which is used to start the resolution process.
|
||||||
Root(Option<PackageName>),
|
Root(Option<PackageName>),
|
||||||
/// A Python version.
|
/// A Python version.
|
||||||
|
@ -295,7 +295,7 @@ impl PubGrubPackage {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)]
|
||||||
pub(crate) enum PubGrubPython {
|
pub enum PubGrubPython {
|
||||||
/// The Python version installed in the current environment.
|
/// The Python version installed in the current environment.
|
||||||
Installed,
|
Installed,
|
||||||
/// The Python version for which dependencies are being resolved.
|
/// The Python version for which dependencies are being resolved.
|
||||||
|
|
|
@ -7,7 +7,7 @@ use uv_platform_tags::{AbiTag, Tags};
|
||||||
|
|
||||||
/// The reason why a package or a version cannot be used.
|
/// The reason why a package or a version cannot be used.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub(crate) enum UnavailableReason {
|
pub enum UnavailableReason {
|
||||||
/// The entire package cannot be used.
|
/// The entire package cannot be used.
|
||||||
Package(UnavailablePackage),
|
Package(UnavailablePackage),
|
||||||
/// A single version cannot be used.
|
/// A single version cannot be used.
|
||||||
|
@ -29,7 +29,7 @@ impl Display for UnavailableReason {
|
||||||
/// Most variant are from [`MetadataResponse`] without the error source, since we don't format
|
/// Most variant are from [`MetadataResponse`] without the error source, since we don't format
|
||||||
/// the source and we want to merge unavailable messages across versions.
|
/// the source and we want to merge unavailable messages across versions.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub(crate) enum UnavailableVersion {
|
pub enum UnavailableVersion {
|
||||||
/// Version is incompatible because it has no usable distributions
|
/// Version is incompatible because it has no usable distributions
|
||||||
IncompatibleDist(IncompatibleDist),
|
IncompatibleDist(IncompatibleDist),
|
||||||
/// The wheel metadata was found, but could not be parsed.
|
/// The wheel metadata was found, but could not be parsed.
|
||||||
|
@ -123,7 +123,7 @@ impl From<&MetadataUnavailable> for UnavailableVersion {
|
||||||
|
|
||||||
/// The package is unavailable and cannot be used.
|
/// The package is unavailable and cannot be used.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub(crate) enum UnavailablePackage {
|
pub enum UnavailablePackage {
|
||||||
/// Index lookups were disabled (i.e., `--no-index`) and the package was not found in a flat index (i.e. from `--find-links`).
|
/// Index lookups were disabled (i.e., `--no-index`) and the package was not found in a flat index (i.e. from `--find-links`).
|
||||||
NoIndex,
|
NoIndex,
|
||||||
/// Network requests were disabled (i.e., `--offline`), and the package was not found in the cache.
|
/// Network requests were disabled (i.e., `--offline`), and the package was not found in the cache.
|
||||||
|
|
|
@ -47,7 +47,7 @@ use crate::fork_strategy::ForkStrategy;
|
||||||
use crate::fork_urls::ForkUrls;
|
use crate::fork_urls::ForkUrls;
|
||||||
use crate::manifest::Manifest;
|
use crate::manifest::Manifest;
|
||||||
use crate::pins::FilePins;
|
use crate::pins::FilePins;
|
||||||
use crate::preferences::Preferences;
|
use crate::preferences::{PreferenceSource, Preferences};
|
||||||
use crate::pubgrub::{
|
use crate::pubgrub::{
|
||||||
PubGrubDependency, PubGrubDistribution, PubGrubPackage, PubGrubPackageInner, PubGrubPriorities,
|
PubGrubDependency, PubGrubDistribution, PubGrubPackage, PubGrubPackageInner, PubGrubPriorities,
|
||||||
PubGrubPython,
|
PubGrubPython,
|
||||||
|
@ -447,6 +447,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
||||||
.try_universal_markers()
|
.try_universal_markers()
|
||||||
.unwrap_or(UniversalMarker::TRUE),
|
.unwrap_or(UniversalMarker::TRUE),
|
||||||
version.clone(),
|
version.clone(),
|
||||||
|
PreferenceSource::Resolver,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use std::path::Path;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
@ -13,6 +14,8 @@ pub enum AcceleratorError {
|
||||||
Version(#[from] uv_pep440::VersionParseError),
|
Version(#[from] uv_pep440::VersionParseError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Utf8(#[from] std::string::FromUtf8Error),
|
Utf8(#[from] std::string::FromUtf8Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
ParseInt(#[from] std::num::ParseIntError),
|
||||||
#[error("Unknown AMD GPU architecture: {0}")]
|
#[error("Unknown AMD GPU architecture: {0}")]
|
||||||
UnknownAmdGpuArchitecture(String),
|
UnknownAmdGpuArchitecture(String),
|
||||||
}
|
}
|
||||||
|
@ -30,6 +33,10 @@ pub enum Accelerator {
|
||||||
Amd {
|
Amd {
|
||||||
gpu_architecture: AmdGpuArchitecture,
|
gpu_architecture: AmdGpuArchitecture,
|
||||||
},
|
},
|
||||||
|
/// The Intel GPU (XPU).
|
||||||
|
///
|
||||||
|
/// Currently, Intel GPUs do not depend on a driver or toolkit version at this level.
|
||||||
|
Xpu,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for Accelerator {
|
impl std::fmt::Display for Accelerator {
|
||||||
|
@ -37,21 +44,28 @@ impl std::fmt::Display for Accelerator {
|
||||||
match self {
|
match self {
|
||||||
Self::Cuda { driver_version } => write!(f, "CUDA {driver_version}"),
|
Self::Cuda { driver_version } => write!(f, "CUDA {driver_version}"),
|
||||||
Self::Amd { gpu_architecture } => write!(f, "AMD {gpu_architecture}"),
|
Self::Amd { gpu_architecture } => write!(f, "AMD {gpu_architecture}"),
|
||||||
|
Self::Xpu => write!(f, "Intel GPU (XPU)"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Accelerator {
|
impl Accelerator {
|
||||||
/// Detect the CUDA driver version from the system.
|
/// Detect the GPU driver and/or architecture version from the system.
|
||||||
///
|
///
|
||||||
/// Query, in order:
|
/// Query, in order:
|
||||||
/// 1. The `UV_CUDA_DRIVER_VERSION` environment variable.
|
/// 1. The `UV_CUDA_DRIVER_VERSION` environment variable.
|
||||||
/// 2. The `UV_AMD_GPU_ARCHITECTURE` environment variable.
|
/// 2. The `UV_AMD_GPU_ARCHITECTURE` environment variable.
|
||||||
/// 2. `/sys/module/nvidia/version`, which contains the driver version (e.g., `550.144.03`).
|
/// 3. `/sys/module/nvidia/version`, which contains the driver version (e.g., `550.144.03`).
|
||||||
/// 3. `/proc/driver/nvidia/version`, which contains the driver version among other information.
|
/// 4. `/proc/driver/nvidia/version`, which contains the driver version among other information.
|
||||||
/// 4. `nvidia-smi --query-gpu=driver_version --format=csv,noheader`.
|
/// 5. `nvidia-smi --query-gpu=driver_version --format=csv,noheader`.
|
||||||
/// 5. `rocm_agent_enumerator`, which lists the AMD GPU architectures.
|
/// 6. `rocm_agent_enumerator`, which lists the AMD GPU architectures.
|
||||||
|
/// 7. `/sys/bus/pci/devices`, filtering for the Intel GPU via PCI.
|
||||||
pub fn detect() -> Result<Option<Self>, AcceleratorError> {
|
pub fn detect() -> Result<Option<Self>, AcceleratorError> {
|
||||||
|
// Constants used for PCI device detection.
|
||||||
|
const PCI_BASE_CLASS_MASK: u32 = 0x00ff_0000;
|
||||||
|
const PCI_BASE_CLASS_DISPLAY: u32 = 0x0003_0000;
|
||||||
|
const PCI_VENDOR_ID_INTEL: u32 = 0x8086;
|
||||||
|
|
||||||
// Read from `UV_CUDA_DRIVER_VERSION`.
|
// Read from `UV_CUDA_DRIVER_VERSION`.
|
||||||
if let Ok(driver_version) = std::env::var(EnvVars::UV_CUDA_DRIVER_VERSION) {
|
if let Ok(driver_version) = std::env::var(EnvVars::UV_CUDA_DRIVER_VERSION) {
|
||||||
let driver_version = Version::from_str(&driver_version)?;
|
let driver_version = Version::from_str(&driver_version)?;
|
||||||
|
@ -150,6 +164,29 @@ impl Accelerator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Read from `/sys/bus/pci/devices` to filter for Intel GPU via PCI.
|
||||||
|
match fs_err::read_dir("/sys/bus/pci/devices") {
|
||||||
|
Ok(entries) => {
|
||||||
|
for entry in entries.flatten() {
|
||||||
|
match parse_pci_device_ids(&entry.path()) {
|
||||||
|
Ok((class, vendor)) => {
|
||||||
|
if (class & PCI_BASE_CLASS_MASK) == PCI_BASE_CLASS_DISPLAY
|
||||||
|
&& vendor == PCI_VENDOR_ID_INTEL
|
||||||
|
{
|
||||||
|
debug!("Detected Intel GPU from PCI: vendor=0x{:04x}", vendor);
|
||||||
|
return Ok(Some(Self::Xpu));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
debug!("Failed to parse PCI device IDs: {e}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
}
|
||||||
|
|
||||||
debug!("Failed to detect GPU driver version");
|
debug!("Failed to detect GPU driver version");
|
||||||
|
|
||||||
Ok(None)
|
Ok(None)
|
||||||
|
@ -180,6 +217,22 @@ fn parse_proc_driver_nvidia_version(content: &str) -> Result<Option<Version>, Ac
|
||||||
Ok(Some(driver_version))
|
Ok(Some(driver_version))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Reads and parses the PCI class and vendor ID from a given device path under `/sys/bus/pci/devices`.
|
||||||
|
fn parse_pci_device_ids(device_path: &Path) -> Result<(u32, u32), AcceleratorError> {
|
||||||
|
// Parse, e.g.:
|
||||||
|
// ```text
|
||||||
|
// - `class`: a hexadecimal string such as `0x030000`
|
||||||
|
// - `vendor`: a hexadecimal string such as `0x8086`
|
||||||
|
// ```
|
||||||
|
let class_content = fs_err::read_to_string(device_path.join("class"))?;
|
||||||
|
let pci_class = u32::from_str_radix(class_content.trim().trim_start_matches("0x"), 16)?;
|
||||||
|
|
||||||
|
let vendor_content = fs_err::read_to_string(device_path.join("vendor"))?;
|
||||||
|
let pci_vendor = u32::from_str_radix(vendor_content.trim().trim_start_matches("0x"), 16)?;
|
||||||
|
|
||||||
|
Ok((pci_class, pci_vendor))
|
||||||
|
}
|
||||||
|
|
||||||
/// A GPU architecture for AMD GPUs.
|
/// A GPU architecture for AMD GPUs.
|
||||||
///
|
///
|
||||||
/// See: <https://rocm.docs.amd.com/projects/install-on-linux/en/latest/reference/system-requirements.html>
|
/// See: <https://rocm.docs.amd.com/projects/install-on-linux/en/latest/reference/system-requirements.html>
|
||||||
|
|
|
@ -185,6 +185,8 @@ pub enum TorchStrategy {
|
||||||
os: Os,
|
os: Os,
|
||||||
gpu_architecture: AmdGpuArchitecture,
|
gpu_architecture: AmdGpuArchitecture,
|
||||||
},
|
},
|
||||||
|
/// Select the appropriate PyTorch index based on the operating system and Intel GPU presence.
|
||||||
|
Xpu { os: Os },
|
||||||
/// Use the specified PyTorch index.
|
/// Use the specified PyTorch index.
|
||||||
Backend(TorchBackend),
|
Backend(TorchBackend),
|
||||||
}
|
}
|
||||||
|
@ -202,6 +204,7 @@ impl TorchStrategy {
|
||||||
os: os.clone(),
|
os: os.clone(),
|
||||||
gpu_architecture,
|
gpu_architecture,
|
||||||
}),
|
}),
|
||||||
|
Some(Accelerator::Xpu) => Ok(Self::Xpu { os: os.clone() }),
|
||||||
None => Ok(Self::Backend(TorchBackend::Cpu)),
|
None => Ok(Self::Backend(TorchBackend::Cpu)),
|
||||||
},
|
},
|
||||||
TorchMode::Cpu => Ok(Self::Backend(TorchBackend::Cpu)),
|
TorchMode::Cpu => Ok(Self::Backend(TorchBackend::Cpu)),
|
||||||
|
@ -347,9 +350,27 @@ impl TorchStrategy {
|
||||||
Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url())))
|
Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url())))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TorchStrategy::Backend(backend) => {
|
TorchStrategy::Xpu { os } => match os {
|
||||||
Either::Right(Either::Right(std::iter::once(backend.index_url())))
|
Os::Manylinux { .. } => Either::Right(Either::Right(Either::Left(
|
||||||
|
std::iter::once(TorchBackend::Xpu.index_url()),
|
||||||
|
))),
|
||||||
|
Os::Windows
|
||||||
|
| Os::Musllinux { .. }
|
||||||
|
| Os::Macos { .. }
|
||||||
|
| Os::FreeBsd { .. }
|
||||||
|
| Os::NetBsd { .. }
|
||||||
|
| Os::OpenBsd { .. }
|
||||||
|
| Os::Dragonfly { .. }
|
||||||
|
| Os::Illumos { .. }
|
||||||
|
| Os::Haiku { .. }
|
||||||
|
| Os::Android { .. }
|
||||||
|
| Os::Pyodide { .. } => {
|
||||||
|
Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url())))
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
TorchStrategy::Backend(backend) => Either::Right(Either::Right(Either::Right(
|
||||||
|
std::iter::once(backend.index_url()),
|
||||||
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-version"
|
name = "uv-version"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
//!
|
//!
|
||||||
//! Then lowers them into a dependency specification.
|
//! Then lowers them into a dependency specification.
|
||||||
|
|
||||||
|
#[cfg(feature = "schemars")]
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fmt::Formatter;
|
use std::fmt::Formatter;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv"
|
name = "uv"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
|
|
@ -3,7 +3,6 @@ use std::fmt::Write as _;
|
||||||
use std::io::Write as _;
|
use std::io::Write as _;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
|
||||||
use std::{fmt, io};
|
use std::{fmt, io};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
|
@ -188,15 +187,6 @@ async fn build_impl(
|
||||||
printer: Printer,
|
printer: Printer,
|
||||||
preview: PreviewMode,
|
preview: PreviewMode,
|
||||||
) -> Result<BuildResult> {
|
) -> Result<BuildResult> {
|
||||||
if list && preview.is_disabled() {
|
|
||||||
// We need the direct build for list and that is preview only.
|
|
||||||
writeln!(
|
|
||||||
printer.stderr(),
|
|
||||||
"The `--list` option is only available in preview mode; add the `--preview` flag to use `--list`"
|
|
||||||
)?;
|
|
||||||
return Ok(BuildResult::Failure);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract the resolver settings.
|
// Extract the resolver settings.
|
||||||
let ResolverSettings {
|
let ResolverSettings {
|
||||||
index_locations,
|
index_locations,
|
||||||
|
@ -504,16 +494,7 @@ async fn build_package(
|
||||||
.await?
|
.await?
|
||||||
.into_interpreter();
|
.into_interpreter();
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read build constraints.
|
// Read build constraints.
|
||||||
let build_constraints =
|
let build_constraints =
|
||||||
|
@ -615,10 +596,7 @@ async fn build_package(
|
||||||
}
|
}
|
||||||
|
|
||||||
BuildAction::List
|
BuildAction::List
|
||||||
} else if preview.is_enabled()
|
} else if !force_pep517 && check_direct_build(source.path(), source.path().user_display()) {
|
||||||
&& !force_pep517
|
|
||||||
&& check_direct_build(source.path(), source.path().user_display())
|
|
||||||
{
|
|
||||||
BuildAction::DirectBuild
|
BuildAction::DirectBuild
|
||||||
} else {
|
} else {
|
||||||
BuildAction::Pep517
|
BuildAction::Pep517
|
||||||
|
|
|
@ -3,7 +3,6 @@ use std::env;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use anyhow::{Result, anyhow};
|
use anyhow::{Result, anyhow};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -338,13 +337,12 @@ pub(crate) async fn pip_compile(
|
||||||
|
|
||||||
// Determine the Python requirement, if the user requested a specific version.
|
// Determine the Python requirement, if the user requested a specific version.
|
||||||
let python_requirement = if universal {
|
let python_requirement = if universal {
|
||||||
let requires_python = RequiresPython::greater_than_equal_version(
|
let requires_python = if let Some(python_version) = python_version.as_ref() {
|
||||||
if let Some(python_version) = python_version.as_ref() {
|
RequiresPython::greater_than_equal_version(&python_version.version)
|
||||||
&python_version.version
|
|
||||||
} else {
|
} else {
|
||||||
interpreter.python_version()
|
let version = interpreter.python_minor_version();
|
||||||
},
|
RequiresPython::greater_than_equal_version(&version)
|
||||||
);
|
};
|
||||||
PythonRequirement::from_requires_python(&interpreter, requires_python)
|
PythonRequirement::from_requires_python(&interpreter, requires_python)
|
||||||
} else if let Some(python_version) = python_version.as_ref() {
|
} else if let Some(python_version) = python_version.as_ref() {
|
||||||
PythonRequirement::from_python_version(&interpreter, python_version)
|
PythonRequirement::from_python_version(&interpreter, python_version)
|
||||||
|
@ -388,16 +386,7 @@ pub(crate) async fn pip_compile(
|
||||||
no_index,
|
no_index,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the PyTorch backend.
|
// Determine the PyTorch backend.
|
||||||
let torch_backend = torch_backend
|
let torch_backend = torch_backend
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use tracing::{Level, debug, enabled};
|
use tracing::{Level, debug, enabled, warn};
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||||
|
@ -237,7 +236,13 @@ pub(crate) async fn pip_install(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Determine the markers to use for the resolution.
|
// Determine the markers to use for the resolution.
|
||||||
let interpreter = environment.interpreter();
|
let interpreter = environment.interpreter();
|
||||||
|
@ -334,16 +339,7 @@ pub(crate) async fn pip_install(
|
||||||
no_index,
|
no_index,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the PyTorch backend.
|
// Determine the PyTorch backend.
|
||||||
let torch_backend = torch_backend
|
let torch_backend = torch_backend
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use tracing::debug;
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||||
|
@ -212,7 +211,13 @@ pub(crate) async fn pip_sync(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
let interpreter = environment.interpreter();
|
let interpreter = environment.interpreter();
|
||||||
|
|
||||||
|
@ -267,16 +272,7 @@ pub(crate) async fn pip_sync(
|
||||||
no_index,
|
no_index,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the PyTorch backend.
|
// Determine the PyTorch backend.
|
||||||
let torch_backend = torch_backend
|
let torch_backend = torch_backend
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::fmt::Write;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use itertools::{Either, Itertools};
|
use itertools::{Either, Itertools};
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use tracing::debug;
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::BaseClientBuilder;
|
use uv_client::BaseClientBuilder;
|
||||||
|
@ -100,7 +100,13 @@ pub(crate) async fn pip_uninstall(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Index the current `site-packages` directory.
|
// Index the current `site-packages` directory.
|
||||||
let site_packages = uv_installer::SitePackages::from_environment(&environment)?;
|
let site_packages = uv_installer::SitePackages::from_environment(&environment)?;
|
||||||
|
|
|
@ -10,7 +10,7 @@ use anyhow::{Context, Result, bail};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||||
use tracing::debug;
|
use tracing::{debug, warn};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
|
@ -83,6 +83,7 @@ pub(crate) async fn add(
|
||||||
extras_of_dependency: Vec<ExtraName>,
|
extras_of_dependency: Vec<ExtraName>,
|
||||||
package: Option<PackageName>,
|
package: Option<PackageName>,
|
||||||
python: Option<String>,
|
python: Option<String>,
|
||||||
|
workspace: bool,
|
||||||
install_mirrors: PythonInstallMirrors,
|
install_mirrors: PythonInstallMirrors,
|
||||||
settings: ResolverInstallerSettings,
|
settings: ResolverInstallerSettings,
|
||||||
network_settings: NetworkSettings,
|
network_settings: NetworkSettings,
|
||||||
|
@ -151,7 +152,7 @@ pub(crate) async fn add(
|
||||||
// Default groups we need the actual project for, interpreter discovery will use this!
|
// Default groups we need the actual project for, interpreter discovery will use this!
|
||||||
let defaulted_groups;
|
let defaulted_groups;
|
||||||
|
|
||||||
let target = if let Some(script) = script {
|
let mut target = if let Some(script) = script {
|
||||||
// If we found a PEP 723 script and the user provided a project-only setting, warn.
|
// If we found a PEP 723 script and the user provided a project-only setting, warn.
|
||||||
if package.is_some() {
|
if package.is_some() {
|
||||||
warn_user_once!(
|
warn_user_once!(
|
||||||
|
@ -319,7 +320,13 @@ pub(crate) async fn add(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let _lock = target.acquire_lock().await?;
|
let _lock = target
|
||||||
|
.acquire_lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
let client_builder = BaseClientBuilder::new()
|
let client_builder = BaseClientBuilder::new()
|
||||||
.connectivity(network_settings.connectivity)
|
.connectivity(network_settings.connectivity)
|
||||||
|
@ -374,16 +381,7 @@ pub(crate) async fn add(
|
||||||
let hasher = HashStrategy::default();
|
let hasher = HashStrategy::default();
|
||||||
let sources = SourceStrategy::Enabled;
|
let sources = SourceStrategy::Enabled;
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
settings.resolver.index_locations.cache_index_credentials();
|
||||||
for index in settings.resolver.index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the registry client.
|
// Initialize the registry client.
|
||||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||||
|
@ -481,6 +479,9 @@ pub(crate) async fn add(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Store the content prior to any modifications.
|
||||||
|
let snapshot = target.snapshot().await?;
|
||||||
|
|
||||||
// If the user provides a single, named index, pin all requirements to that index.
|
// If the user provides a single, named index, pin all requirements to that index.
|
||||||
let index = indexes
|
let index = indexes
|
||||||
.first()
|
.first()
|
||||||
|
@ -491,7 +492,72 @@ pub(crate) async fn add(
|
||||||
debug!("Pinning all requirements to index: `{index}`");
|
debug!("Pinning all requirements to index: `{index}`");
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add the requirements to the `pyproject.toml` or script.
|
// Track modification status, for reverts.
|
||||||
|
let mut modified = false;
|
||||||
|
|
||||||
|
// If `--workspace` is provided, add any members to the `workspace` section of the
|
||||||
|
// `pyproject.toml` file.
|
||||||
|
if workspace {
|
||||||
|
let AddTarget::Project(project, python_target) = target else {
|
||||||
|
unreachable!("`--workspace` and `--script` are conflicting options");
|
||||||
|
};
|
||||||
|
|
||||||
|
let workspace = project.workspace();
|
||||||
|
let mut toml = PyProjectTomlMut::from_toml(
|
||||||
|
&workspace.pyproject_toml().raw,
|
||||||
|
DependencyTarget::PyProjectToml,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Check each requirement to see if it's a path dependency
|
||||||
|
for requirement in &requirements {
|
||||||
|
if let RequirementSource::Directory { install_path, .. } = &requirement.source {
|
||||||
|
let absolute_path = if install_path.is_absolute() {
|
||||||
|
install_path.to_path_buf()
|
||||||
|
} else {
|
||||||
|
project.root().join(install_path)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if the path is not already included in the workspace.
|
||||||
|
if !workspace.includes(&absolute_path)? {
|
||||||
|
let relative_path = absolute_path
|
||||||
|
.strip_prefix(workspace.install_path())
|
||||||
|
.unwrap_or(&absolute_path);
|
||||||
|
|
||||||
|
toml.add_workspace(relative_path)?;
|
||||||
|
modified |= true;
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
printer.stderr(),
|
||||||
|
"Added `{}` to workspace members",
|
||||||
|
relative_path.user_display().cyan()
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we modified the workspace root, we need to reload it entirely, since this can impact
|
||||||
|
// the discovered members, etc.
|
||||||
|
target = if modified {
|
||||||
|
let workspace_content = toml.to_string();
|
||||||
|
fs_err::write(
|
||||||
|
workspace.install_path().join("pyproject.toml"),
|
||||||
|
&workspace_content,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
AddTarget::Project(
|
||||||
|
VirtualProject::discover(
|
||||||
|
project.root(),
|
||||||
|
&DiscoveryOptions::default(),
|
||||||
|
&WorkspaceCache::default(),
|
||||||
|
)
|
||||||
|
.await?,
|
||||||
|
python_target,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
AddTarget::Project(project, python_target)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut toml = match &target {
|
let mut toml = match &target {
|
||||||
AddTarget::Script(script, _) => {
|
AddTarget::Script(script, _) => {
|
||||||
PyProjectTomlMut::from_toml(&script.metadata.raw, DependencyTarget::Script)
|
PyProjectTomlMut::from_toml(&script.metadata.raw, DependencyTarget::Script)
|
||||||
|
@ -501,6 +567,7 @@ pub(crate) async fn add(
|
||||||
DependencyTarget::PyProjectToml,
|
DependencyTarget::PyProjectToml,
|
||||||
),
|
),
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
let edits = edits(
|
let edits = edits(
|
||||||
requirements,
|
requirements,
|
||||||
&target,
|
&target,
|
||||||
|
@ -546,7 +613,7 @@ pub(crate) async fn add(
|
||||||
let content = toml.to_string();
|
let content = toml.to_string();
|
||||||
|
|
||||||
// Save the modified `pyproject.toml` or script.
|
// Save the modified `pyproject.toml` or script.
|
||||||
let modified = target.write(&content)?;
|
modified |= target.write(&content)?;
|
||||||
|
|
||||||
// If `--frozen`, exit early. There's no reason to lock and sync, since we don't need a `uv.lock`
|
// If `--frozen`, exit early. There's no reason to lock and sync, since we don't need a `uv.lock`
|
||||||
// to exist at all.
|
// to exist at all.
|
||||||
|
@ -566,9 +633,6 @@ pub(crate) async fn add(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store the content prior to any modifications.
|
|
||||||
let snapshot = target.snapshot().await?;
|
|
||||||
|
|
||||||
// Update the `pypackage.toml` in-memory.
|
// Update the `pypackage.toml` in-memory.
|
||||||
let target = target.update(&content)?;
|
let target = target.update(&content)?;
|
||||||
|
|
||||||
|
@ -1299,6 +1363,16 @@ impl AddTargetSnapshot {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Self::Project(project, lock) => {
|
Self::Project(project, lock) => {
|
||||||
|
// Write the workspace `pyproject.toml` back to disk.
|
||||||
|
let workspace = project.workspace();
|
||||||
|
if workspace.install_path() != project.root() {
|
||||||
|
debug!("Reverting changes to workspace `pyproject.toml`");
|
||||||
|
fs_err::write(
|
||||||
|
workspace.install_path().join("pyproject.toml"),
|
||||||
|
workspace.pyproject_toml().as_ref(),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Write the `pyproject.toml` back to disk.
|
// Write the `pyproject.toml` back to disk.
|
||||||
debug!("Reverting changes to `pyproject.toml`");
|
debug!("Reverting changes to `pyproject.toml`");
|
||||||
fs_err::write(
|
fs_err::write(
|
||||||
|
|
|
@ -44,13 +44,15 @@ impl CachedEnvironment {
|
||||||
printer: Printer,
|
printer: Printer,
|
||||||
preview: PreviewMode,
|
preview: PreviewMode,
|
||||||
) -> Result<Self, ProjectError> {
|
) -> Result<Self, ProjectError> {
|
||||||
let interpreter = Self::base_interpreter(interpreter, cache)?;
|
// Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the
|
||||||
|
// given interpreter is a virtual environment.
|
||||||
|
let base_interpreter = Self::base_interpreter(interpreter, cache)?;
|
||||||
|
|
||||||
// Resolve the requirements with the interpreter.
|
// Resolve the requirements with the interpreter.
|
||||||
let resolution = Resolution::from(
|
let resolution = Resolution::from(
|
||||||
resolve_environment(
|
resolve_environment(
|
||||||
spec,
|
spec,
|
||||||
&interpreter,
|
&base_interpreter,
|
||||||
build_constraints.clone(),
|
build_constraints.clone(),
|
||||||
&settings.resolver,
|
&settings.resolver,
|
||||||
network_settings,
|
network_settings,
|
||||||
|
@ -73,13 +75,34 @@ impl CachedEnvironment {
|
||||||
hash_digest(&distributions)
|
hash_digest(&distributions)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Hash the interpreter based on its path.
|
// Construct a hash for the environment.
|
||||||
// TODO(charlie): Come up with a robust hash for the interpreter.
|
//
|
||||||
let interpreter_hash =
|
// Use the canonicalized base interpreter path since that's the interpreter we performed the
|
||||||
cache_digest(&canonicalize_executable(interpreter.sys_executable())?);
|
// resolution with and the interpreter the environment will be created with.
|
||||||
|
//
|
||||||
|
// We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the
|
||||||
|
// virtual environment's path. Originally, we shared cached environments independent of the
|
||||||
|
// environment they'd be layered on top of. However, this causes collisions as the overlay
|
||||||
|
// `.pth` file can be overridden by another instance of uv. Including this element in the key
|
||||||
|
// avoids this problem at the cost of creating separate cached environments for identical
|
||||||
|
// `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so
|
||||||
|
// we can canonicalize it without invalidating the purpose of the element — it'd probably be
|
||||||
|
// safe to just use the absolute `sys.executable` as well.
|
||||||
|
//
|
||||||
|
// TODO(zanieb): Since we're not sharing these environmments across projects, we should move
|
||||||
|
// [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant
|
||||||
|
// now.
|
||||||
|
//
|
||||||
|
// TODO(zanieb): We should include the version of the base interpreter in the hash, so if
|
||||||
|
// the interpreter at the canonicalized path changes versions we construct a new
|
||||||
|
// environment.
|
||||||
|
let environment_hash = cache_digest(&(
|
||||||
|
&canonicalize_executable(base_interpreter.sys_executable())?,
|
||||||
|
&interpreter.sys_prefix().canonicalize()?,
|
||||||
|
));
|
||||||
|
|
||||||
// Search in the content-addressed cache.
|
// Search in the content-addressed cache.
|
||||||
let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash);
|
let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash);
|
||||||
|
|
||||||
if cache.refresh().is_none() {
|
if cache.refresh().is_none() {
|
||||||
if let Ok(root) = cache.resolve_link(cache_entry.path()) {
|
if let Ok(root) = cache.resolve_link(cache_entry.path()) {
|
||||||
|
@ -93,7 +116,7 @@ impl CachedEnvironment {
|
||||||
let temp_dir = cache.venv_dir()?;
|
let temp_dir = cache.venv_dir()?;
|
||||||
let venv = uv_virtualenv::create_venv(
|
let venv = uv_virtualenv::create_venv(
|
||||||
temp_dir.path(),
|
temp_dir.path(),
|
||||||
interpreter,
|
base_interpreter,
|
||||||
uv_virtualenv::Prompt::None,
|
uv_virtualenv::Prompt::None,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
|
|
|
@ -593,16 +593,7 @@ async fn do_lock(
|
||||||
.keyring(*keyring_provider)
|
.keyring(*keyring_provider)
|
||||||
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for index in target.indexes() {
|
for index in target.indexes() {
|
||||||
if let Some(credentials) = index.credentials() {
|
if let Some(credentials) = index.credentials() {
|
||||||
|
@ -942,7 +933,7 @@ impl ValidatedLock {
|
||||||
lock.prerelease_mode().cyan(),
|
lock.prerelease_mode().cyan(),
|
||||||
options.prerelease_mode.cyan()
|
options.prerelease_mode.cyan()
|
||||||
);
|
);
|
||||||
return Ok(Self::Unusable(lock));
|
return Ok(Self::Preferable(lock));
|
||||||
}
|
}
|
||||||
if lock.fork_strategy() != options.fork_strategy {
|
if lock.fork_strategy() != options.fork_strategy {
|
||||||
let _ = writeln!(
|
let _ = writeln!(
|
||||||
|
|
|
@ -25,7 +25,7 @@ use uv_fs::{CWD, LockedFile, Simplified};
|
||||||
use uv_git::ResolvedRepositoryReference;
|
use uv_git::ResolvedRepositoryReference;
|
||||||
use uv_installer::{SatisfiesResult, SitePackages};
|
use uv_installer::{SatisfiesResult, SitePackages};
|
||||||
use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, ExtraName, GroupName, PackageName};
|
use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, ExtraName, GroupName, PackageName};
|
||||||
use uv_pep440::{Version, VersionSpecifiers};
|
use uv_pep440::{TildeVersionSpecifier, Version, VersionSpecifiers};
|
||||||
use uv_pep508::MarkerTreeContents;
|
use uv_pep508::MarkerTreeContents;
|
||||||
use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts};
|
use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts};
|
||||||
use uv_python::{
|
use uv_python::{
|
||||||
|
@ -421,6 +421,30 @@ pub(crate) fn find_requires_python(
|
||||||
if requires_python.is_empty() {
|
if requires_python.is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
for ((package, group), specifiers) in &requires_python {
|
||||||
|
if let [spec] = &specifiers[..] {
|
||||||
|
if let Some(spec) = TildeVersionSpecifier::from_specifier_ref(spec) {
|
||||||
|
if spec.has_patch() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let (lower, upper) = spec.bounding_specifiers();
|
||||||
|
let spec_0 = spec.with_patch_version(0);
|
||||||
|
let (lower_0, upper_0) = spec_0.bounding_specifiers();
|
||||||
|
warn_user_once!(
|
||||||
|
"The `requires-python` specifier (`{spec}`) in `{package}{group}` \
|
||||||
|
uses the tilde specifier (`~=`) without a patch version. This will be \
|
||||||
|
interpreted as `{lower}, {upper}`. Did you mean `{spec_0}` to constrain the \
|
||||||
|
version as `{lower_0}, {upper_0}`? We recommend only using \
|
||||||
|
the tilde specifier with a patch version to avoid ambiguity.",
|
||||||
|
group = if let Some(group) = group {
|
||||||
|
format!(":{group}")
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
match RequiresPython::intersection(requires_python.iter().map(|(.., specifiers)| specifiers)) {
|
match RequiresPython::intersection(requires_python.iter().map(|(.., specifiers)| specifiers)) {
|
||||||
Some(requires_python) => Ok(Some(requires_python)),
|
Some(requires_python) => Ok(Some(requires_python)),
|
||||||
None => Err(ProjectError::DisjointRequiresPython(requires_python)),
|
None => Err(ProjectError::DisjointRequiresPython(requires_python)),
|
||||||
|
@ -1220,7 +1244,12 @@ impl ProjectEnvironment {
|
||||||
preview: PreviewMode,
|
preview: PreviewMode,
|
||||||
) -> Result<Self, ProjectError> {
|
) -> Result<Self, ProjectError> {
|
||||||
// Lock the project environment to avoid synchronization issues.
|
// Lock the project environment to avoid synchronization issues.
|
||||||
let _lock = ProjectInterpreter::lock(workspace).await?;
|
let _lock = ProjectInterpreter::lock(workspace)
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire project environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
let upgradeable = preview.is_enabled()
|
let upgradeable = preview.is_enabled()
|
||||||
&& python
|
&& python
|
||||||
|
@ -1438,7 +1467,13 @@ impl ScriptEnvironment {
|
||||||
preview: PreviewMode,
|
preview: PreviewMode,
|
||||||
) -> Result<Self, ProjectError> {
|
) -> Result<Self, ProjectError> {
|
||||||
// Lock the script environment to avoid synchronization issues.
|
// Lock the script environment to avoid synchronization issues.
|
||||||
let _lock = ScriptInterpreter::lock(script).await?;
|
let _lock = ScriptInterpreter::lock(script)
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire script environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
let upgradeable = python_request
|
let upgradeable = python_request
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_none_or(|request| !request.includes_patch());
|
.is_none_or(|request| !request.includes_patch());
|
||||||
|
@ -1626,16 +1661,7 @@ pub(crate) async fn resolve_names(
|
||||||
.keyring(*keyring_provider)
|
.keyring(*keyring_provider)
|
||||||
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the registry client.
|
// Initialize the registry client.
|
||||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||||
|
@ -1704,7 +1730,7 @@ pub(crate) async fn resolve_names(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) enum PreferenceSource<'lock> {
|
pub(crate) enum PreferenceLocation<'lock> {
|
||||||
/// The preferences should be extracted from a lockfile.
|
/// The preferences should be extracted from a lockfile.
|
||||||
Lock {
|
Lock {
|
||||||
lock: &'lock Lock,
|
lock: &'lock Lock,
|
||||||
|
@ -1719,7 +1745,7 @@ pub(crate) struct EnvironmentSpecification<'lock> {
|
||||||
/// The requirements to include in the environment.
|
/// The requirements to include in the environment.
|
||||||
requirements: RequirementsSpecification,
|
requirements: RequirementsSpecification,
|
||||||
/// The preferences to respect when resolving.
|
/// The preferences to respect when resolving.
|
||||||
preferences: Option<PreferenceSource<'lock>>,
|
preferences: Option<PreferenceLocation<'lock>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<RequirementsSpecification> for EnvironmentSpecification<'_> {
|
impl From<RequirementsSpecification> for EnvironmentSpecification<'_> {
|
||||||
|
@ -1732,9 +1758,9 @@ impl From<RequirementsSpecification> for EnvironmentSpecification<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'lock> EnvironmentSpecification<'lock> {
|
impl<'lock> EnvironmentSpecification<'lock> {
|
||||||
/// Set the [`PreferenceSource`] for the specification.
|
/// Set the [`PreferenceLocation`] for the specification.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub(crate) fn with_preferences(self, preferences: PreferenceSource<'lock>) -> Self {
|
pub(crate) fn with_preferences(self, preferences: PreferenceLocation<'lock>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
preferences: Some(preferences),
|
preferences: Some(preferences),
|
||||||
..self
|
..self
|
||||||
|
@ -1797,16 +1823,7 @@ pub(crate) async fn resolve_environment(
|
||||||
let marker_env = interpreter.resolver_marker_environment();
|
let marker_env = interpreter.resolver_marker_environment();
|
||||||
let python_requirement = PythonRequirement::from_interpreter(interpreter);
|
let python_requirement = PythonRequirement::from_interpreter(interpreter);
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the registry client.
|
// Initialize the registry client.
|
||||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||||
|
@ -1852,7 +1869,7 @@ pub(crate) async fn resolve_environment(
|
||||||
|
|
||||||
// If an existing lockfile exists, build up a set of preferences.
|
// If an existing lockfile exists, build up a set of preferences.
|
||||||
let preferences = match spec.preferences {
|
let preferences = match spec.preferences {
|
||||||
Some(PreferenceSource::Lock { lock, install_path }) => {
|
Some(PreferenceLocation::Lock { lock, install_path }) => {
|
||||||
let LockedRequirements { preferences, git } =
|
let LockedRequirements { preferences, git } =
|
||||||
read_lock_requirements(lock, install_path, &upgrade)?;
|
read_lock_requirements(lock, install_path, &upgrade)?;
|
||||||
|
|
||||||
|
@ -1864,7 +1881,7 @@ pub(crate) async fn resolve_environment(
|
||||||
|
|
||||||
preferences
|
preferences
|
||||||
}
|
}
|
||||||
Some(PreferenceSource::Entries(entries)) => entries,
|
Some(PreferenceLocation::Entries(entries)) => entries,
|
||||||
None => vec![],
|
None => vec![],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1978,16 +1995,7 @@ pub(crate) async fn sync_environment(
|
||||||
let interpreter = venv.interpreter();
|
let interpreter = venv.interpreter();
|
||||||
let tags = venv.interpreter().tags()?;
|
let tags = venv.interpreter().tags()?;
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the registry client.
|
// Initialize the registry client.
|
||||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||||
|
@ -2193,16 +2201,7 @@ pub(crate) async fn update_environment(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the registry client.
|
// Initialize the registry client.
|
||||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||||
|
|
|
@ -5,7 +5,7 @@ use std::str::FromStr;
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
use tracing::debug;
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_configuration::{
|
use uv_configuration::{
|
||||||
|
@ -281,7 +281,13 @@ pub(crate) async fn remove(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let _lock = target.acquire_lock().await?;
|
let _lock = target
|
||||||
|
.acquire_lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Determine the lock mode.
|
// Determine the lock mode.
|
||||||
let mode = if locked {
|
let mode = if locked {
|
||||||
|
|
|
@ -49,7 +49,7 @@ use crate::commands::project::install_target::InstallTarget;
|
||||||
use crate::commands::project::lock::LockMode;
|
use crate::commands::project::lock::LockMode;
|
||||||
use crate::commands::project::lock_target::LockTarget;
|
use crate::commands::project::lock_target::LockTarget;
|
||||||
use crate::commands::project::{
|
use crate::commands::project::{
|
||||||
EnvironmentSpecification, PreferenceSource, ProjectEnvironment, ProjectError,
|
EnvironmentSpecification, PreferenceLocation, ProjectEnvironment, ProjectError,
|
||||||
ScriptEnvironment, ScriptInterpreter, UniversalState, WorkspacePython,
|
ScriptEnvironment, ScriptInterpreter, UniversalState, WorkspacePython,
|
||||||
default_dependency_groups, script_specification, update_environment,
|
default_dependency_groups, script_specification, update_environment,
|
||||||
validate_project_requires_python,
|
validate_project_requires_python,
|
||||||
|
@ -240,7 +240,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
||||||
.await?
|
.await?
|
||||||
.into_environment()?;
|
.into_environment()?;
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Determine the lock mode.
|
// Determine the lock mode.
|
||||||
let mode = if frozen {
|
let mode = if frozen {
|
||||||
|
@ -386,7 +392,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
match update_environment(
|
match update_environment(
|
||||||
environment,
|
environment,
|
||||||
|
@ -699,7 +711,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
||||||
.map(|lock| (lock, project.workspace().install_path().to_owned()));
|
.map(|lock| (lock, project.workspace().install_path().to_owned()));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let _lock = venv.lock().await?;
|
let _lock = venv
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Determine the lock mode.
|
// Determine the lock mode.
|
||||||
let mode = if frozen {
|
let mode = if frozen {
|
||||||
|
@ -940,10 +958,10 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
||||||
let spec = EnvironmentSpecification::from(spec).with_preferences(
|
let spec = EnvironmentSpecification::from(spec).with_preferences(
|
||||||
if let Some((lock, install_path)) = base_lock.as_ref() {
|
if let Some((lock, install_path)) = base_lock.as_ref() {
|
||||||
// If we have a lockfile, use the locked versions as preferences.
|
// If we have a lockfile, use the locked versions as preferences.
|
||||||
PreferenceSource::Lock { lock, install_path }
|
PreferenceLocation::Lock { lock, install_path }
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, extract preferences from the base environment.
|
// Otherwise, extract preferences from the base environment.
|
||||||
PreferenceSource::Entries(
|
PreferenceLocation::Entries(
|
||||||
base_site_packages
|
base_site_packages
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(Preference::from_installed)
|
.filter_map(Preference::from_installed)
|
||||||
|
|
|
@ -6,6 +6,7 @@ use std::sync::Arc;
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use owo_colors::OwoColorize;
|
use owo_colors::OwoColorize;
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||||
|
@ -169,7 +170,13 @@ pub(crate) async fn sync(
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let _lock = environment.lock().await?;
|
let _lock = environment
|
||||||
|
.lock()
|
||||||
|
.await
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to acquire environment lock: {err}");
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
// Notify the user of any environment changes.
|
// Notify the user of any environment changes.
|
||||||
match &environment {
|
match &environment {
|
||||||
|
@ -682,16 +689,7 @@ pub(super) async fn do_sync(
|
||||||
// If necessary, convert editable to non-editable distributions.
|
// If necessary, convert editable to non-editable distributions.
|
||||||
let resolution = apply_editable_mode(resolution, editable);
|
let resolution = apply_editable_mode(resolution, editable);
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Populate credentials from the target.
|
// Populate credentials from the target.
|
||||||
store_credentials_from_target(target);
|
store_credentials_from_target(target);
|
||||||
|
|
|
@ -385,7 +385,7 @@ async fn lock_and_sync(
|
||||||
let default_groups = default_dependency_groups(project.pyproject_toml())?;
|
let default_groups = default_dependency_groups(project.pyproject_toml())?;
|
||||||
let default_extras = DefaultExtras::default();
|
let default_extras = DefaultExtras::default();
|
||||||
let groups = DependencyGroups::default().with_defaults(default_groups);
|
let groups = DependencyGroups::default().with_defaults(default_groups);
|
||||||
let extras = ExtrasSpecification::from_all_extras().with_defaults(default_extras);
|
let extras = ExtrasSpecification::default().with_defaults(default_extras);
|
||||||
let install_options = InstallOptions::default();
|
let install_options = InstallOptions::default();
|
||||||
|
|
||||||
// Convert to an `AddTarget` by attaching the appropriate interpreter or environment.
|
// Convert to an `AddTarget` by attaching the appropriate interpreter or environment.
|
||||||
|
|
|
@ -218,7 +218,7 @@ pub(crate) fn finalize_tool_install(
|
||||||
if target_entry_points.is_empty() {
|
if target_entry_points.is_empty() {
|
||||||
writeln!(
|
writeln!(
|
||||||
printer.stdout(),
|
printer.stdout(),
|
||||||
"No executables are provided by `{from}`",
|
"No executables are provided by package `{from}`; removing tool",
|
||||||
from = name.cyan()
|
from = name.cyan()
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -354,7 +354,9 @@ fn hint_executable_from_dependency(
|
||||||
let command = format!("uv tool install {}", package.name());
|
let command = format!("uv tool install {}", package.name());
|
||||||
writeln!(
|
writeln!(
|
||||||
printer.stdout(),
|
printer.stdout(),
|
||||||
"However, an executable with the name `{}` is available via dependency `{}`.\nDid you mean `{}`?",
|
"{}{} An executable with the name `{}` is available via dependency `{}`.\n Did you mean `{}`?",
|
||||||
|
"hint".bold().cyan(),
|
||||||
|
":".bold(),
|
||||||
name.cyan(),
|
name.cyan(),
|
||||||
package.name().cyan(),
|
package.name().cyan(),
|
||||||
command.bold(),
|
command.bold(),
|
||||||
|
@ -363,7 +365,9 @@ fn hint_executable_from_dependency(
|
||||||
packages => {
|
packages => {
|
||||||
writeln!(
|
writeln!(
|
||||||
printer.stdout(),
|
printer.stdout(),
|
||||||
"However, an executable with the name `{}` is available via the following dependencies::",
|
"{}{} An executable with the name `{}` is available via the following dependencies::",
|
||||||
|
"hint".bold().cyan(),
|
||||||
|
":".bold(),
|
||||||
name.cyan(),
|
name.cyan(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
@ -242,16 +242,7 @@ async fn venv_impl(
|
||||||
python.into_interpreter()
|
python.into_interpreter()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add all authenticated sources to the cache.
|
index_locations.cache_index_credentials();
|
||||||
for index in index_locations.allowed_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
let credentials = Arc::new(credentials);
|
|
||||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the discovered Python version is incompatible with the current workspace
|
// Check if the discovered Python version is incompatible with the current workspace
|
||||||
if let Some(requires_python) = requires_python {
|
if let Some(requires_python) = requires_python {
|
||||||
|
|
|
@ -1965,6 +1965,7 @@ async fn run_project(
|
||||||
args.extras,
|
args.extras,
|
||||||
args.package,
|
args.package,
|
||||||
args.python,
|
args.python,
|
||||||
|
args.workspace,
|
||||||
args.install_mirrors,
|
args.install_mirrors,
|
||||||
args.settings,
|
args.settings,
|
||||||
globals.network_settings,
|
globals.network_settings,
|
||||||
|
|
|
@ -1326,6 +1326,7 @@ pub(crate) struct AddSettings {
|
||||||
pub(crate) package: Option<PackageName>,
|
pub(crate) package: Option<PackageName>,
|
||||||
pub(crate) script: Option<PathBuf>,
|
pub(crate) script: Option<PathBuf>,
|
||||||
pub(crate) python: Option<String>,
|
pub(crate) python: Option<String>,
|
||||||
|
pub(crate) workspace: bool,
|
||||||
pub(crate) install_mirrors: PythonInstallMirrors,
|
pub(crate) install_mirrors: PythonInstallMirrors,
|
||||||
pub(crate) refresh: Refresh,
|
pub(crate) refresh: Refresh,
|
||||||
pub(crate) indexes: Vec<Index>,
|
pub(crate) indexes: Vec<Index>,
|
||||||
|
@ -1363,6 +1364,7 @@ impl AddSettings {
|
||||||
package,
|
package,
|
||||||
script,
|
script,
|
||||||
python,
|
python,
|
||||||
|
workspace,
|
||||||
} = args;
|
} = args;
|
||||||
|
|
||||||
let dependency_type = if let Some(extra) = optional {
|
let dependency_type = if let Some(extra) = optional {
|
||||||
|
@ -1463,6 +1465,7 @@ impl AddSettings {
|
||||||
package,
|
package,
|
||||||
script,
|
script,
|
||||||
python: python.and_then(Maybe::into_option),
|
python: python.and_then(Maybe::into_option),
|
||||||
|
workspace,
|
||||||
editable: flag(editable, no_editable, "editable"),
|
editable: flag(editable, no_editable, "editable"),
|
||||||
extras: extra.unwrap_or_default(),
|
extras: extra.unwrap_or_default(),
|
||||||
refresh: Refresh::from(refresh),
|
refresh: Refresh::from(refresh),
|
||||||
|
|
|
@ -15,7 +15,7 @@ fn build_basic() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -133,7 +133,7 @@ fn build_sdist() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -189,7 +189,7 @@ fn build_wheel() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -245,7 +245,7 @@ fn build_sdist_wheel() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -303,7 +303,7 @@ fn build_wheel_from_sdist() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -412,7 +412,7 @@ fn build_fail() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -488,7 +488,6 @@ fn build_workspace() -> Result<()> {
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"\\\.", ""),
|
(r"\\\.", ""),
|
||||||
(r"\[project\]", "[PKG]"),
|
(r"\[project\]", "[PKG]"),
|
||||||
(r"\[member\]", "[PKG]"),
|
(r"\[member\]", "[PKG]"),
|
||||||
|
@ -694,7 +693,6 @@ fn build_all_with_failure() -> Result<()> {
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"\\\.", ""),
|
(r"\\\.", ""),
|
||||||
(r"\[project\]", "[PKG]"),
|
(r"\[project\]", "[PKG]"),
|
||||||
(r"\[member-\w+\]", "[PKG]"),
|
(r"\[member-\w+\]", "[PKG]"),
|
||||||
|
@ -840,7 +838,7 @@ fn build_constraints() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -901,7 +899,7 @@ fn build_sha() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -1187,7 +1185,7 @@ fn build_tool_uv_sources() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let build = context.temp_dir.child("backend");
|
let build = context.temp_dir.child("backend");
|
||||||
|
@ -1337,7 +1335,6 @@ fn build_non_package() -> Result<()> {
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"\\\.", ""),
|
(r"\\\.", ""),
|
||||||
(r"\[project\]", "[PKG]"),
|
(r"\[project\]", "[PKG]"),
|
||||||
(r"\[member\]", "[PKG]"),
|
(r"\[member\]", "[PKG]"),
|
||||||
|
@ -1930,7 +1927,7 @@ fn build_with_nonnormalized_name() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
.chain([(r"\\\.", "")])
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
|
@ -1981,3 +1978,60 @@ fn build_with_nonnormalized_name() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check that `--force-pep517` is respected.
|
||||||
|
///
|
||||||
|
/// The error messages for a broken project are different for direct builds vs. PEP 517.
|
||||||
|
#[test]
|
||||||
|
fn force_pep517() -> Result<()> {
|
||||||
|
// We need to use a real `uv_build` package.
|
||||||
|
let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z");
|
||||||
|
|
||||||
|
context
|
||||||
|
.init()
|
||||||
|
.arg("--build-backend")
|
||||||
|
.arg("uv")
|
||||||
|
.assert()
|
||||||
|
.success();
|
||||||
|
|
||||||
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "1.0.0"
|
||||||
|
|
||||||
|
[tool.uv.build-backend]
|
||||||
|
module-name = "does_not_exist"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["uv_build>=0.5.15,<10000"]
|
||||||
|
build-backend = "uv_build"
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.build().env("RUST_BACKTRACE", "0"), @r"
|
||||||
|
success: false
|
||||||
|
exit_code: 2
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Building source distribution (uv build backend)...
|
||||||
|
× Failed to build `[TEMP_DIR]/`
|
||||||
|
╰─▶ Expected a Python module at: `src/does_not_exist/__init__.py`
|
||||||
|
");
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.build().arg("--force-pep517").env("RUST_BACKTRACE", "0"), @r"
|
||||||
|
success: false
|
||||||
|
exit_code: 2
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Building source distribution...
|
||||||
|
Error: Missing module directory for `does_not_exist` in `src`. Found: `temp`
|
||||||
|
× Failed to build `[TEMP_DIR]/`
|
||||||
|
├─▶ The build backend returned an error
|
||||||
|
╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1)
|
||||||
|
hint: This usually indicates a problem with the package or the build environment.
|
||||||
|
");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -224,7 +224,6 @@ fn preserve_executable_bit() -> Result<()> {
|
||||||
.init()
|
.init()
|
||||||
.arg("--build-backend")
|
.arg("--build-backend")
|
||||||
.arg("uv")
|
.arg("uv")
|
||||||
.arg("--preview")
|
|
||||||
.arg(&project_dir)
|
.arg(&project_dir)
|
||||||
.assert()
|
.assert()
|
||||||
.success();
|
.success();
|
||||||
|
@ -316,8 +315,7 @@ fn rename_module() -> Result<()> {
|
||||||
uv_snapshot!(context
|
uv_snapshot!(context
|
||||||
.build_backend()
|
.build_backend()
|
||||||
.arg("build-wheel")
|
.arg("build-wheel")
|
||||||
.arg(temp_dir.path())
|
.arg(temp_dir.path()), @r###"
|
||||||
.env("UV_PREVIEW", "1"), @r###"
|
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -391,8 +389,7 @@ fn rename_module_editable_build() -> Result<()> {
|
||||||
uv_snapshot!(context
|
uv_snapshot!(context
|
||||||
.build_backend()
|
.build_backend()
|
||||||
.arg("build-editable")
|
.arg("build-editable")
|
||||||
.arg(temp_dir.path())
|
.arg(temp_dir.path()), @r###"
|
||||||
.env("UV_PREVIEW", "1"), @r###"
|
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -568,8 +565,7 @@ fn build_sdist_with_long_path() -> Result<()> {
|
||||||
uv_snapshot!(context
|
uv_snapshot!(context
|
||||||
.build_backend()
|
.build_backend()
|
||||||
.arg("build-sdist")
|
.arg("build-sdist")
|
||||||
.arg(temp_dir.path())
|
.arg(temp_dir.path()), @r###"
|
||||||
.env("UV_PREVIEW", "1"), @r###"
|
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -602,8 +598,7 @@ fn sdist_error_without_module() -> Result<()> {
|
||||||
uv_snapshot!(context
|
uv_snapshot!(context
|
||||||
.build_backend()
|
.build_backend()
|
||||||
.arg("build-sdist")
|
.arg("build-sdist")
|
||||||
.arg(temp_dir.path())
|
.arg(temp_dir.path()), @r"
|
||||||
.env("UV_PREVIEW", "1"), @r"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 2
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -617,8 +612,7 @@ fn sdist_error_without_module() -> Result<()> {
|
||||||
uv_snapshot!(context
|
uv_snapshot!(context
|
||||||
.build_backend()
|
.build_backend()
|
||||||
.arg("build-sdist")
|
.arg("build-sdist")
|
||||||
.arg(temp_dir.path())
|
.arg(temp_dir.path()), @r"
|
||||||
.env("UV_PREVIEW", "1"), @r"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 2
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -682,7 +676,6 @@ fn complex_namespace_packages() -> Result<()> {
|
||||||
|
|
||||||
context
|
context
|
||||||
.build()
|
.build()
|
||||||
.arg("--preview")
|
|
||||||
.arg(project.path())
|
.arg(project.path())
|
||||||
.arg("--out-dir")
|
.arg("--out-dir")
|
||||||
.arg(dist.path())
|
.arg(dist.path())
|
||||||
|
@ -731,7 +724,6 @@ fn complex_namespace_packages() -> Result<()> {
|
||||||
context.filters(),
|
context.filters(),
|
||||||
context
|
context
|
||||||
.pip_install()
|
.pip_install()
|
||||||
.arg("--preview")
|
|
||||||
.arg("-e")
|
.arg("-e")
|
||||||
.arg("complex-project-part_a")
|
.arg("complex-project-part_a")
|
||||||
.arg("-e")
|
.arg("-e")
|
||||||
|
@ -778,7 +770,6 @@ fn symlinked_file() -> Result<()> {
|
||||||
let project = context.temp_dir.child("project");
|
let project = context.temp_dir.child("project");
|
||||||
context
|
context
|
||||||
.init()
|
.init()
|
||||||
.arg("--preview")
|
|
||||||
.arg("--build-backend")
|
.arg("--build-backend")
|
||||||
.arg("uv")
|
.arg("uv")
|
||||||
.arg(project.path())
|
.arg(project.path())
|
||||||
|
|
|
@ -517,6 +517,8 @@ impl TestContext {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
filters.push((" --link-mode <LINK_MODE>".to_string(), String::new()));
|
filters.push((" --link-mode <LINK_MODE>".to_string(), String::new()));
|
||||||
filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new()));
|
filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new()));
|
||||||
|
// Unix uses "exit status", Windows uses "exit code"
|
||||||
|
filters.push((r"exit code: ".to_string(), "exit status: ".to_string()));
|
||||||
}
|
}
|
||||||
|
|
||||||
filters.extend(
|
filters.extend(
|
||||||
|
|
|
@ -4374,7 +4374,7 @@ fn add_lower_bound_local() -> Result<()> {
|
||||||
filters => context.filters(),
|
filters => context.filters(),
|
||||||
}, {
|
}, {
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml, @r#"
|
pyproject_toml, @r###"
|
||||||
[project]
|
[project]
|
||||||
name = "project"
|
name = "project"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
@ -4384,8 +4384,8 @@ fn add_lower_bound_local() -> Result<()> {
|
||||||
]
|
]
|
||||||
|
|
||||||
[[tool.uv.index]]
|
[[tool.uv.index]]
|
||||||
url = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html"
|
url = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/"
|
||||||
"#
|
"###
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -4403,7 +4403,7 @@ fn add_lower_bound_local() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "local-simple-a"
|
name = "local-simple-a"
|
||||||
version = "1.2.3+foo"
|
version = "1.2.3+foo"
|
||||||
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }
|
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
|
||||||
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo.tar.gz", hash = "sha256:ebd55c4a79d0a5759126657cb289ff97558902abcfb142e036b993781497edac" }
|
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo.tar.gz", hash = "sha256:ebd55c4a79d0a5759126657cb289ff97558902abcfb142e036b993781497edac" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo-py3-none-any.whl", hash = "sha256:6f30e2e709b3e171cd734bb58705229a582587c29e0a7041227435583c7224cc" },
|
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo-py3-none-any.whl", hash = "sha256:6f30e2e709b3e171cd734bb58705229a582587c29e0a7041227435583c7224cc" },
|
||||||
|
@ -7210,6 +7210,7 @@ fn remove_include_default_groups() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Revert changes to the `pyproject.toml` and `uv.lock` when the `add` operation fails.
|
/// Revert changes to the `pyproject.toml` and `uv.lock` when the `add` operation fails.
|
||||||
#[test]
|
#[test]
|
||||||
fn fail_to_add_revert_project() -> Result<()> {
|
fn fail_to_add_revert_project() -> Result<()> {
|
||||||
|
@ -7246,10 +7247,7 @@ fn fail_to_add_revert_project() -> Result<()> {
|
||||||
.child("setup.py")
|
.child("setup.py")
|
||||||
.write_str("1/0")?;
|
.write_str("1/0")?;
|
||||||
|
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.add().arg("./child"), @r#"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -7351,10 +7349,7 @@ fn fail_to_edit_revert_project() -> Result<()> {
|
||||||
.child("setup.py")
|
.child("setup.py")
|
||||||
.write_str("1/0")?;
|
.write_str("1/0")?;
|
||||||
|
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.add().arg("./child"), @r#"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -7407,6 +7402,256 @@ fn fail_to_edit_revert_project() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Revert changes to the root `pyproject.toml` and `uv.lock` when the `add` operation fails.
|
||||||
|
#[test]
|
||||||
|
fn fail_to_add_revert_workspace_root() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
context
|
||||||
|
.temp_dir
|
||||||
|
.child("pyproject.toml")
|
||||||
|
.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = []
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Add a dependency on a package that declares static metadata (so can always resolve), but
|
||||||
|
// can't be installed.
|
||||||
|
let pyproject_toml = context.temp_dir.child("child/pyproject.toml");
|
||||||
|
pyproject_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "child"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
"#})?;
|
||||||
|
context
|
||||||
|
.temp_dir
|
||||||
|
.child("child")
|
||||||
|
.child("setup.py")
|
||||||
|
.write_str("1/0")?;
|
||||||
|
|
||||||
|
// Add a dependency on a package that declares static metadata (so can always resolve), but
|
||||||
|
// can't be installed.
|
||||||
|
let pyproject_toml = context.temp_dir.child("broken").child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "broken"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
"#})?;
|
||||||
|
context
|
||||||
|
.temp_dir
|
||||||
|
.child("broken")
|
||||||
|
.child("setup.py")
|
||||||
|
.write_str("1/0")?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.add().arg("--workspace").arg("./broken"), @r#"
|
||||||
|
success: false
|
||||||
|
exit_code: 1
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Added `broken` to workspace members
|
||||||
|
Resolved 3 packages in [TIME]
|
||||||
|
× Failed to build `broken @ file://[TEMP_DIR]/broken`
|
||||||
|
├─▶ The build backend returned an error
|
||||||
|
╰─▶ Call to `setuptools.build_meta.build_editable` failed (exit status: 1)
|
||||||
|
|
||||||
|
[stderr]
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<string>", line 14, in <module>
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 448, in get_requires_for_build_editable
|
||||||
|
return self.get_requires_for_build_wheel(config_settings)
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel
|
||||||
|
return self._get_build_requires(config_settings, requirements=['wheel'])
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
|
||||||
|
self.run_setup()
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
|
||||||
|
exec(code, locals())
|
||||||
|
File "<string>", line 1, in <module>
|
||||||
|
ZeroDivisionError: division by zero
|
||||||
|
|
||||||
|
hint: This usually indicates a problem with the package or the build environment.
|
||||||
|
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
|
||||||
|
"#);
|
||||||
|
|
||||||
|
let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?;
|
||||||
|
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = []
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// The lockfile should not exist, even though resolution succeeded.
|
||||||
|
assert!(!context.temp_dir.join("uv.lock").exists());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Revert changes to the root `pyproject.toml` and `uv.lock` when the `add` operation fails.
|
||||||
|
#[test]
|
||||||
|
fn fail_to_add_revert_workspace_member() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
context
|
||||||
|
.temp_dir
|
||||||
|
.child("pyproject.toml")
|
||||||
|
.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["child"]
|
||||||
|
|
||||||
|
[tool.uv.workspace]
|
||||||
|
members = ["child"]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
child = { workspace = true }
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Add a workspace dependency.
|
||||||
|
let project = context.temp_dir.child("child");
|
||||||
|
project.child("pyproject.toml").write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "child"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
"#})?;
|
||||||
|
project
|
||||||
|
.child("src")
|
||||||
|
.child("child")
|
||||||
|
.child("__init__.py")
|
||||||
|
.touch()?;
|
||||||
|
|
||||||
|
// Add a dependency on a package that declares static metadata (so can always resolve), but
|
||||||
|
// can't be installed.
|
||||||
|
let pyproject_toml = context.temp_dir.child("broken/pyproject.toml");
|
||||||
|
pyproject_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "broken"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
"#})?;
|
||||||
|
context
|
||||||
|
.temp_dir
|
||||||
|
.child("broken")
|
||||||
|
.child("setup.py")
|
||||||
|
.write_str("1/0")?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.add().current_dir(&project).arg("--workspace").arg("../broken"), @r#"
|
||||||
|
success: false
|
||||||
|
exit_code: 1
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Added `broken` to workspace members
|
||||||
|
Resolved 4 packages in [TIME]
|
||||||
|
× Failed to build `broken @ file://[TEMP_DIR]/broken`
|
||||||
|
├─▶ The build backend returned an error
|
||||||
|
╰─▶ Call to `setuptools.build_meta.build_editable` failed (exit status: 1)
|
||||||
|
|
||||||
|
[stderr]
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<string>", line 14, in <module>
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 448, in get_requires_for_build_editable
|
||||||
|
return self.get_requires_for_build_wheel(config_settings)
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel
|
||||||
|
return self._get_build_requires(config_settings, requirements=['wheel'])
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
|
||||||
|
self.run_setup()
|
||||||
|
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
|
||||||
|
exec(code, locals())
|
||||||
|
File "<string>", line 1, in <module>
|
||||||
|
ZeroDivisionError: division by zero
|
||||||
|
|
||||||
|
hint: This usually indicates a problem with the package or the build environment.
|
||||||
|
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
|
||||||
|
"#);
|
||||||
|
|
||||||
|
let pyproject_toml = fs_err::read_to_string(context.temp_dir.join("pyproject.toml"))?;
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["child"]
|
||||||
|
|
||||||
|
[tool.uv.workspace]
|
||||||
|
members = ["child"]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
child = { workspace = true }
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
let pyproject_toml =
|
||||||
|
fs_err::read_to_string(context.temp_dir.join("child").join("pyproject.toml"))?;
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "child"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["hatchling"]
|
||||||
|
build-backend = "hatchling.build"
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// The lockfile should not exist, even though resolution succeeded.
|
||||||
|
assert!(!context.temp_dir.join("uv.lock").exists());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Ensure that the added dependencies are sorted if the dependency list was already sorted prior
|
/// Ensure that the added dependencies are sorted if the dependency list was already sorted prior
|
||||||
/// to the operation.
|
/// to the operation.
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -9265,7 +9510,7 @@ fn add_index_with_trailing_slash() -> Result<()> {
|
||||||
filters => context.filters(),
|
filters => context.filters(),
|
||||||
}, {
|
}, {
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml, @r#"
|
pyproject_toml, @r###"
|
||||||
[project]
|
[project]
|
||||||
name = "project"
|
name = "project"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
@ -9278,8 +9523,8 @@ fn add_index_with_trailing_slash() -> Result<()> {
|
||||||
constraint-dependencies = ["markupsafe<3"]
|
constraint-dependencies = ["markupsafe<3"]
|
||||||
|
|
||||||
[[tool.uv.index]]
|
[[tool.uv.index]]
|
||||||
url = "https://pypi.org/simple"
|
url = "https://pypi.org/simple/"
|
||||||
"#
|
"###
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -9303,7 +9548,7 @@ fn add_index_with_trailing_slash() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple/" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
|
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
|
||||||
|
@ -11200,7 +11445,7 @@ fn repeated_index_cli_reversed() -> Result<()> {
|
||||||
filters => context.filters(),
|
filters => context.filters(),
|
||||||
}, {
|
}, {
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml, @r#"
|
pyproject_toml, @r###"
|
||||||
[project]
|
[project]
|
||||||
name = "project"
|
name = "project"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
@ -11210,8 +11455,8 @@ fn repeated_index_cli_reversed() -> Result<()> {
|
||||||
]
|
]
|
||||||
|
|
||||||
[[tool.uv.index]]
|
[[tool.uv.index]]
|
||||||
url = "https://test.pypi.org/simple"
|
url = "https://test.pypi.org/simple/"
|
||||||
"#
|
"###
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -11232,7 +11477,7 @@ fn repeated_index_cli_reversed() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
source = { registry = "https://test.pypi.org/simple" }
|
source = { registry = "https://test.pypi.org/simple/" }
|
||||||
sdist = { url = "https://test-files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:16.826Z" }
|
sdist = { url = "https://test-files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:16.826Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://test-files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:14.843Z" },
|
{ url = "https://test-files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:14.843Z" },
|
||||||
|
@ -12635,3 +12880,163 @@ fn add_bounds_requirement_over_bounds_kind() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a path dependency with `--workspace` flag to add it to workspace members. The root already
|
||||||
|
/// contains a workspace definition, so the package should be added to the workspace members.
|
||||||
|
#[test]
|
||||||
|
fn add_path_with_existing_workspace() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let workspace_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
workspace_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[tool.uv.workspace]
|
||||||
|
members = ["project"]
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Create a project within the workspace.
|
||||||
|
let project_dir = context.temp_dir.child("project");
|
||||||
|
project_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
let project_toml = project_dir.child("pyproject.toml");
|
||||||
|
project_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = []
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Create a dependency package outside the workspace members.
|
||||||
|
let dep_dir = context.temp_dir.child("dep");
|
||||||
|
dep_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
let dep_toml = dep_dir.child("pyproject.toml");
|
||||||
|
dep_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "dep"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = []
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Add the dependency with `--workspace` flag from the project directory.
|
||||||
|
uv_snapshot!(context.filters(), context
|
||||||
|
.add()
|
||||||
|
.current_dir(&project_dir)
|
||||||
|
.arg("../dep")
|
||||||
|
.arg("--workspace"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Added `dep` to workspace members
|
||||||
|
Resolved 3 packages in [TIME]
|
||||||
|
Audited in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
let pyproject_toml = context.read("pyproject.toml");
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[tool.uv.workspace]
|
||||||
|
members = [
|
||||||
|
"project",
|
||||||
|
"dep",
|
||||||
|
]
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let pyproject_toml = context.read("project/pyproject.toml");
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"dep",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
dep = { workspace = true }
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a path dependency with `--workspace` flag to add it to workspace members. The root doesn't
|
||||||
|
/// contain a workspace definition, so `uv add` should create one.
|
||||||
|
#[test]
|
||||||
|
fn add_path_with_workspace() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let workspace_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
workspace_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Create a dependency package outside the workspace members.
|
||||||
|
let dep_dir = context.temp_dir.child("dep");
|
||||||
|
dep_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
let dep_toml = dep_dir.child("pyproject.toml");
|
||||||
|
dep_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "dep"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = []
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Add the dependency with `--workspace` flag from the project directory.
|
||||||
|
uv_snapshot!(context.filters(), context
|
||||||
|
.add()
|
||||||
|
.arg("./dep")
|
||||||
|
.arg("--workspace"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Added `dep` to workspace members
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
Audited in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
let pyproject_toml = context.read("pyproject.toml");
|
||||||
|
assert_snapshot!(
|
||||||
|
pyproject_toml, @r#"
|
||||||
|
[project]
|
||||||
|
name = "parent"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"dep",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.uv.workspace]
|
||||||
|
members = [
|
||||||
|
"dep",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
dep = { workspace = true }
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -4551,15 +4551,15 @@ fn lock_requires_python_compatible_specifier() -> Result<()> {
|
||||||
"#,
|
"#,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
warning: The release specifier (`~=3.13`) contains a compatible release match without a patch version. This will be interpreted as `>=3.13, <4`. Did you mean `~=3.13.0` to freeze the minor version?
|
warning: The `requires-python` specifier (`~=3.13`) in `warehouse` uses the tilde specifier (`~=`) without a patch version. This will be interpreted as `>=3.13, <4`. Did you mean `~=3.13.0` to constrain the version as `>=3.13.0, <3.14`? We recommend only using the tilde specifier with a patch version to avoid ambiguity.
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
"###);
|
");
|
||||||
|
|
||||||
pyproject_toml.write_str(
|
pyproject_toml.write_str(
|
||||||
r#"
|
r#"
|
||||||
|
@ -15500,7 +15500,7 @@ fn lock_add_empty_dependency_group() -> Result<()> {
|
||||||
|
|
||||||
/// Use a trailing slash on the declared index.
|
/// Use a trailing slash on the declared index.
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_trailing_slash() -> Result<()> {
|
fn lock_trailing_slash_index_url() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
@ -15543,7 +15543,7 @@ fn lock_trailing_slash() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "3.7.0"
|
version = "3.7.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple/" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "idna" },
|
{ name = "idna" },
|
||||||
{ name = "sniffio" },
|
{ name = "sniffio" },
|
||||||
|
@ -15556,7 +15556,7 @@ fn lock_trailing_slash() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.6"
|
version = "3.6"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple/" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
|
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
|
||||||
|
@ -15576,7 +15576,7 @@ fn lock_trailing_slash() -> Result<()> {
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sniffio"
|
name = "sniffio"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple/" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||||
|
@ -23617,10 +23617,7 @@ fn lock_derivation_chain_prod() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.lock(), @r###"
|
uv_snapshot!(filters, context.lock(), @r###"
|
||||||
|
@ -23677,10 +23674,7 @@ fn lock_derivation_chain_extra() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.lock(), @r###"
|
uv_snapshot!(filters, context.lock(), @r###"
|
||||||
|
@ -23739,10 +23733,7 @@ fn lock_derivation_chain_group() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.lock(), @r###"
|
uv_snapshot!(filters, context.lock(), @r###"
|
||||||
|
@ -23812,10 +23803,7 @@ fn lock_derivation_chain_extended() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.lock(), @r###"
|
uv_snapshot!(filters, context.lock(), @r###"
|
||||||
|
@ -28322,10 +28310,10 @@ fn lock_conflict_for_disjoint_platform() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a package with an `--index` URL with no trailing slash. Run `uv lock --locked`
|
/// Add a package with an `--index` URL with no trailing slash while an index with the same URL
|
||||||
/// with a `pyproject.toml` with that same URL but with a trailing slash.
|
/// exists with a trailing slash in the `pyproject.toml`.
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_with_inconsistent_trailing_slash() -> Result<()> {
|
fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
@ -28420,20 +28408,22 @@ fn lock_with_inconsistent_trailing_slash() -> Result<()> {
|
||||||
|
|
||||||
// Re-run with `--locked`.
|
// Re-run with `--locked`.
|
||||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||||
success: true
|
success: false
|
||||||
exit_code: 0
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 4 packages in [TIME]
|
Resolved 4 packages in [TIME]
|
||||||
|
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||||
");
|
");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs.
|
/// Run `uv lock --locked` with a lockfile with trailing slashes on the index URL but a
|
||||||
|
/// `pyproject.toml` without a trailing slash on the index URL.
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_with_index_trailing_slashes_in_lockfile() -> Result<()> {
|
fn lock_trailing_slash_index_url_in_lockfile_not_pyproject() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
@ -28509,20 +28499,22 @@ fn lock_with_index_trailing_slashes_in_lockfile() -> Result<()> {
|
||||||
|
|
||||||
// Run `uv lock --locked`.
|
// Run `uv lock --locked`.
|
||||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||||
success: true
|
success: false
|
||||||
exit_code: 0
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 4 packages in [TIME]
|
Resolved 4 packages in [TIME]
|
||||||
|
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||||
");
|
");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs.
|
/// Run `uv lock --locked` with `pyproject.toml` with trailing slashes on the index URL but a
|
||||||
|
/// lockfile without trailing slashes on the index URL.
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_with_index_trailing_slashes_in_pyproject_toml() -> Result<()> {
|
fn lock_trailing_slash_index_url_in_pyproject_and_not_lockfile() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
@ -28598,20 +28590,22 @@ fn lock_with_index_trailing_slashes_in_pyproject_toml() -> Result<()> {
|
||||||
|
|
||||||
// Run `uv lock --locked`.
|
// Run `uv lock --locked`.
|
||||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||||
success: true
|
success: false
|
||||||
exit_code: 0
|
exit_code: 2
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 4 packages in [TIME]
|
Resolved 4 packages in [TIME]
|
||||||
|
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||||
");
|
");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs.
|
/// Run `uv lock --locked` with a lockfile and `pyproject.toml` with trailing slashes on the index
|
||||||
|
/// URL.
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_with_index_trailing_slashes_in_lockfile_and_pyproject_toml() -> Result<()> {
|
fn lock_trailing_slash_index_url_in_lockfile_and_pyproject_toml() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
@ -28698,6 +28692,152 @@ fn lock_with_index_trailing_slashes_in_lockfile_and_pyproject_toml() -> Result<(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lock_trailing_slash_find_links() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["packaging==23.2"]
|
||||||
|
[tool.uv]
|
||||||
|
no-index = true
|
||||||
|
find-links = ["https://pypi.org/simple/packaging"]
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
let lock = context.read("uv.lock");
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
lock, @r#"
|
||||||
|
version = 1
|
||||||
|
revision = 2
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[options]
|
||||||
|
exclude-newer = "2024-03-25T00:00:00Z"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "23.2"
|
||||||
|
source = { registry = "https://pypi.org/simple/packaging" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { virtual = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "packaging" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [{ name = "packaging", specifier = "==23.2" }]
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Re-run with `--locked`.
|
||||||
|
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
// Add a trailing slash, which should invalidate the lockfile
|
||||||
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["packaging==23.2"]
|
||||||
|
[tool.uv]
|
||||||
|
no-index = true
|
||||||
|
find-links = ["https://pypi.org/simple/packaging/"]
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Re-run with `--locked`
|
||||||
|
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||||
|
success: false
|
||||||
|
exit_code: 2
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||||
|
");
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
let lock = context.read("uv.lock");
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => context.filters(),
|
||||||
|
}, {
|
||||||
|
assert_snapshot!(
|
||||||
|
lock, @r#"
|
||||||
|
version = 1
|
||||||
|
revision = 2
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[options]
|
||||||
|
exclude-newer = "2024-03-25T00:00:00Z"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "23.2"
|
||||||
|
source = { registry = "https://pypi.org/simple/packaging/" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = { virtual = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "packaging" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [{ name = "packaging", specifier = "==23.2" }]
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lock_prefix_match() -> Result<()> {
|
fn lock_prefix_match() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -14679,10 +14679,7 @@ fn compile_derivation_chain() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.pip_compile().arg("pyproject.toml"), @r###"
|
uv_snapshot!(filters, context.pip_compile().arg("pyproject.toml"), @r###"
|
||||||
|
@ -16345,7 +16342,7 @@ fn pep_751_compile_registry_wheel() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -16394,7 +16391,7 @@ fn pep_751_compile_registry_sdist() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "source-distribution"
|
name = "source-distribution"
|
||||||
|
@ -16478,7 +16475,7 @@ fn pep_751_compile_directory() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -16549,7 +16546,7 @@ fn pep_751_compile_git() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "uv-public-pypackage"
|
name = "uv-public-pypackage"
|
||||||
|
@ -16599,7 +16596,7 @@ fn pep_751_compile_url_wheel() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -16663,7 +16660,7 @@ fn pep_751_compile_url_sdist() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -16732,7 +16729,7 @@ fn pep_751_compile_path_wheel() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -16770,7 +16767,7 @@ fn pep_751_compile_path_wheel() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -16811,7 +16808,7 @@ fn pep_751_compile_path_sdist() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -16850,7 +16847,7 @@ fn pep_751_compile_path_sdist() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -16887,7 +16884,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -16928,7 +16925,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -16968,7 +16965,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -17007,7 +17004,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -17055,7 +17052,7 @@ fn pep_751_compile_warn() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --emit-index-url
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --emit-index-url
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
|
@ -17268,7 +17265,7 @@ fn pep_751_compile_no_emit_package() -> Result<()> {
|
||||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --no-emit-package idna
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --no-emit-package idna
|
||||||
lock-version = "1.0"
|
lock-version = "1.0"
|
||||||
created-by = "uv"
|
created-by = "uv"
|
||||||
requires-python = ">=3.12.[X]"
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
[[packages]]
|
[[packages]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
|
@ -17562,3 +17559,47 @@ fn git_path_transitive_dependency() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`.
|
||||||
|
#[test]
|
||||||
|
fn omit_python_patch_universal() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.11");
|
||||||
|
|
||||||
|
let requirements_in = context.temp_dir.child("requirements.in");
|
||||||
|
requirements_in.write_str("redis")?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.pip_compile()
|
||||||
|
.arg("requirements.in"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
# This file was autogenerated by uv via the following command:
|
||||||
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
|
||||||
|
redis==5.0.3
|
||||||
|
# via -r requirements.in
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 1 package in [TIME]
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.pip_compile()
|
||||||
|
.arg("requirements.in")
|
||||||
|
.arg("--universal"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
# This file was autogenerated by uv via the following command:
|
||||||
|
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
|
||||||
|
async-timeout==4.0.3 ; python_full_version < '3.11.[X]'
|
||||||
|
# via redis
|
||||||
|
redis==5.0.3
|
||||||
|
# via -r requirements.in
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -342,10 +342,7 @@ dependencies = ["flask==1.0.x"]
|
||||||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||||
requirements_txt.write_str("./path_dep")?;
|
requirements_txt.write_str("./path_dep")?;
|
||||||
|
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.pip_install()
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.pip_install()
|
|
||||||
.arg("-r")
|
.arg("-r")
|
||||||
.arg("requirements.txt"), @r###"
|
.arg("requirements.txt"), @r###"
|
||||||
success: false
|
success: false
|
||||||
|
@ -4930,10 +4927,7 @@ fn no_build_isolation() -> Result<()> {
|
||||||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
||||||
|
|
||||||
// We expect the build to fail, because `setuptools` is not installed.
|
// We expect the build to fail, because `setuptools` is not installed.
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.pip_install()
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.pip_install()
|
|
||||||
.arg("-r")
|
.arg("-r")
|
||||||
.arg("requirements.in")
|
.arg("requirements.in")
|
||||||
.arg("--no-build-isolation"), @r###"
|
.arg("--no-build-isolation"), @r###"
|
||||||
|
@ -5001,10 +4995,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> {
|
||||||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
||||||
|
|
||||||
// We expect the build to fail, because `setuptools` is not installed.
|
// We expect the build to fail, because `setuptools` is not installed.
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.pip_install()
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.pip_install()
|
|
||||||
.arg("-r")
|
.arg("-r")
|
||||||
.arg("requirements.in")
|
.arg("requirements.in")
|
||||||
.env(EnvVars::UV_NO_BUILD_ISOLATION, "yes"), @r###"
|
.env(EnvVars::UV_NO_BUILD_ISOLATION, "yes"), @r###"
|
||||||
|
@ -8601,10 +8592,7 @@ fn install_build_isolation_package() -> Result<()> {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Running `uv pip install` should fail for iniconfig.
|
// Running `uv pip install` should fail for iniconfig.
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.pip_install()
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.pip_install()
|
|
||||||
.arg("--no-build-isolation-package")
|
.arg("--no-build-isolation-package")
|
||||||
.arg("iniconfig")
|
.arg("iniconfig")
|
||||||
.arg(package.path()), @r###"
|
.arg(package.path()), @r###"
|
||||||
|
@ -8931,10 +8919,7 @@ fn missing_top_level() {
|
||||||
fn sklearn() {
|
fn sklearn() {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.pip_install().arg("sklearn"), @r###"
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.pip_install().arg("sklearn"), @r###"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -8984,10 +8969,7 @@ fn resolve_derivation_chain() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.pip_install()
|
uv_snapshot!(filters, context.pip_install()
|
||||||
|
@ -11508,3 +11490,110 @@ fn conflicting_flags_clap_bug() {
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Test that shebang arguments are stripped when installing scripts
|
||||||
|
#[test]
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn strip_shebang_arguments() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let project_dir = context.temp_dir.child("shebang_test");
|
||||||
|
project_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
// Create a package with scripts that have shebang arguments.
|
||||||
|
let pyproject_toml = project_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(indoc! {r#"
|
||||||
|
[project]
|
||||||
|
name = "shebang-test"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
packages = ["shebang_test"]
|
||||||
|
|
||||||
|
[tool.setuptools.data-files]
|
||||||
|
"scripts" = ["scripts/custom_script", "scripts/custom_gui_script"]
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Create the package directory.
|
||||||
|
let package_dir = project_dir.child("shebang_test");
|
||||||
|
package_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
// Create an `__init__.py` file in the package directory.
|
||||||
|
let init_file = package_dir.child("__init__.py");
|
||||||
|
init_file.touch()?;
|
||||||
|
|
||||||
|
// Create scripts directory with scripts that have shebangs with arguments
|
||||||
|
let scripts_dir = project_dir.child("scripts");
|
||||||
|
scripts_dir.create_dir_all()?;
|
||||||
|
|
||||||
|
let script_with_args = scripts_dir.child("custom_script");
|
||||||
|
script_with_args.write_str(indoc! {r#"
|
||||||
|
#!python -E -s
|
||||||
|
# This is a test script with shebang arguments
|
||||||
|
import sys
|
||||||
|
print(f"Hello from {sys.executable}")
|
||||||
|
print(f"Arguments: {sys.argv}")
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
let gui_script_with_args = scripts_dir.child("custom_gui_script");
|
||||||
|
gui_script_with_args.write_str(indoc! {r#"
|
||||||
|
#!pythonw -E
|
||||||
|
# This is a test GUI script with shebang arguments
|
||||||
|
import sys
|
||||||
|
print(f"Hello from GUI script: {sys.executable}")
|
||||||
|
"#})?;
|
||||||
|
|
||||||
|
// Create a `setup.py` that explicitly handles scripts.
|
||||||
|
let setup_py = project_dir.child("setup.py");
|
||||||
|
setup_py.write_str(indoc! {r"
|
||||||
|
from setuptools import setup
|
||||||
|
setup(scripts=['scripts/custom_script', 'scripts/custom_gui_script'])
|
||||||
|
"})?;
|
||||||
|
|
||||||
|
// Install the package.
|
||||||
|
uv_snapshot!(context.filters(), context.pip_install().arg(project_dir.path()), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 1 package in [TIME]
|
||||||
|
Prepared 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ shebang-test==0.1.0 (from file://[TEMP_DIR]/shebang_test)
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// Check the installed scripts have their shebangs stripped of arguments.
|
||||||
|
let custom_script_path = venv_bin_path(&context.venv).join("custom_script");
|
||||||
|
let script_content = fs::read_to_string(&custom_script_path)?;
|
||||||
|
|
||||||
|
insta::with_settings!({filters => context.filters()
|
||||||
|
}, {
|
||||||
|
insta::assert_snapshot!(script_content, @r#"
|
||||||
|
#![VENV]/bin/python3
|
||||||
|
# This is a test script with shebang arguments
|
||||||
|
import sys
|
||||||
|
print(f"Hello from {sys.executable}")
|
||||||
|
print(f"Arguments: {sys.argv}")
|
||||||
|
"#);
|
||||||
|
});
|
||||||
|
|
||||||
|
let custom_gui_script_path = venv_bin_path(&context.venv).join("custom_gui_script");
|
||||||
|
let gui_script_content = fs::read_to_string(&custom_gui_script_path)?;
|
||||||
|
|
||||||
|
insta::with_settings!({filters => context.filters()
|
||||||
|
}, {
|
||||||
|
insta::assert_snapshot!(gui_script_content, @r#"
|
||||||
|
#![VENV]/bin/python3
|
||||||
|
# This is a test GUI script with shebang arguments
|
||||||
|
import sys
|
||||||
|
print(f"Hello from GUI script: {sys.executable}")
|
||||||
|
"#);
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -1444,8 +1444,8 @@ fn python_install_314() {
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Installed Python 3.14.0b3 in [TIME]
|
Installed Python 3.14.0b4 in [TIME]
|
||||||
+ cpython-3.14.0b3-[PLATFORM]
|
+ cpython-3.14.0b4-[PLATFORM]
|
||||||
");
|
");
|
||||||
|
|
||||||
// Install a specific pre-release
|
// Install a specific pre-release
|
||||||
|
@ -1465,7 +1465,7 @@ fn python_install_314() {
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
[TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python
|
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
@ -1475,7 +1475,7 @@ fn python_install_314() {
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
[TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python
|
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
@ -1484,7 +1484,7 @@ fn python_install_314() {
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
[TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python
|
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/python
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
|
|
@ -4876,7 +4876,7 @@ fn exit_status_signal() -> Result<()> {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn run_repeated() -> Result<()> {
|
fn run_repeated() -> Result<()> {
|
||||||
let context = TestContext::new_with_versions(&["3.13"]);
|
let context = TestContext::new_with_versions(&["3.13", "3.12"]);
|
||||||
|
|
||||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
pyproject_toml.write_str(indoc! { r#"
|
pyproject_toml.write_str(indoc! { r#"
|
||||||
|
@ -4923,22 +4923,25 @@ fn run_repeated() -> Result<()> {
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// Re-running as a tool shouldn't require reinstalling `typing-extensions`, since the environment is cached.
|
// Re-running as a tool does require reinstalling `typing-extensions`, since the base venv is
|
||||||
|
// different.
|
||||||
uv_snapshot!(
|
uv_snapshot!(
|
||||||
context.filters(),
|
context.filters(),
|
||||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###"
|
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ typing-extensions==4.10.0
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
File "<string>", line 1, in <module>
|
File "<string>", line 1, in <module>
|
||||||
import typing_extensions; import iniconfig
|
import typing_extensions; import iniconfig
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
ModuleNotFoundError: No module named 'iniconfig'
|
ModuleNotFoundError: No module named 'iniconfig'
|
||||||
"###);
|
"#);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -4979,22 +4982,25 @@ fn run_without_overlay() -> Result<()> {
|
||||||
+ typing-extensions==4.10.0
|
+ typing-extensions==4.10.0
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// Import `iniconfig` in the context of a `tool run` command, which should fail.
|
// Import `iniconfig` in the context of a `tool run` command, which should fail. Note that
|
||||||
|
// typing-extensions gets installed again, because the venv is not shared.
|
||||||
uv_snapshot!(
|
uv_snapshot!(
|
||||||
context.filters(),
|
context.filters(),
|
||||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###"
|
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ typing-extensions==4.10.0
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
File "<string>", line 1, in <module>
|
File "<string>", line 1, in <module>
|
||||||
import typing_extensions; import iniconfig
|
import typing_extensions; import iniconfig
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
ModuleNotFoundError: No module named 'iniconfig'
|
ModuleNotFoundError: No module named 'iniconfig'
|
||||||
"###);
|
"#);
|
||||||
|
|
||||||
// Re-running in the context of the project should reset the overlay.
|
// Re-running in the context of the project should reset the overlay.
|
||||||
uv_snapshot!(
|
uv_snapshot!(
|
||||||
|
|
|
@ -3,13 +3,14 @@ use assert_cmd::prelude::*;
|
||||||
use assert_fs::{fixture::ChildPath, prelude::*};
|
use assert_fs::{fixture::ChildPath, prelude::*};
|
||||||
use indoc::{formatdoc, indoc};
|
use indoc::{formatdoc, indoc};
|
||||||
use insta::assert_snapshot;
|
use insta::assert_snapshot;
|
||||||
|
|
||||||
use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path};
|
|
||||||
use predicates::prelude::predicate;
|
use predicates::prelude::predicate;
|
||||||
use tempfile::tempdir_in;
|
use tempfile::tempdir_in;
|
||||||
|
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
|
use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn sync() -> Result<()> {
|
fn sync() -> Result<()> {
|
||||||
let context = TestContext::new("3.12");
|
let context = TestContext::new("3.12");
|
||||||
|
@ -1121,10 +1122,7 @@ fn sync_build_isolation_package() -> Result<()> {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Running `uv sync` should fail for iniconfig.
|
// Running `uv sync` should fail for iniconfig.
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -1214,10 +1212,7 @@ fn sync_build_isolation_extra() -> Result<()> {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Running `uv sync` should fail for the `compile` extra.
|
// Running `uv sync` should fail for the `compile` extra.
|
||||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###"
|
||||||
.chain(context.filters())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
uv_snapshot!(&filters, context.sync().arg("--extra").arg("compile"), @r###"
|
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -1238,7 +1233,7 @@ fn sync_build_isolation_extra() -> Result<()> {
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// Running `uv sync` with `--all-extras` should also fail.
|
// Running `uv sync` with `--all-extras` should also fail.
|
||||||
uv_snapshot!(&filters, context.sync().arg("--all-extras"), @r###"
|
uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r###"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -6984,10 +6979,7 @@ fn sync_derivation_chain() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.sync(), @r###"
|
uv_snapshot!(filters, context.sync(), @r###"
|
||||||
|
@ -7050,10 +7042,7 @@ fn sync_derivation_chain_extra() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###"
|
uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###"
|
||||||
|
@ -7118,10 +7107,7 @@ fn sync_derivation_chain_group() -> Result<()> {
|
||||||
let filters = context
|
let filters = context
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"/.*/src", "/[TMP]/src"),
|
|
||||||
])
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###"
|
uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###"
|
||||||
|
@ -9953,7 +9939,7 @@ fn sync_required_environment_hint() -> Result<()> {
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 2 packages in [TIME]
|
Resolved 2 packages in [TIME]
|
||||||
error: Distribution `no-sdist-no-wheels-with-matching-platform-a==1.0.0 @ registry+https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html` can't be installed because it doesn't have a source distribution or wheel for the current platform
|
error: Distribution `no-sdist-no-wheels-with-matching-platform-a==1.0.0 @ registry+https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/` can't be installed because it doesn't have a source distribution or wheel for the current platform
|
||||||
|
|
||||||
hint: You're on [PLATFORM] (`[TAG]`), but `no-sdist-no-wheels-with-matching-platform-a` (v1.0.0) only has wheels for the following platform: `macosx_10_0_ppc64`; consider adding your platform to `tool.uv.required-environments` to ensure uv resolves to a version with compatible wheels
|
hint: You're on [PLATFORM] (`[TAG]`), but `no-sdist-no-wheels-with-matching-platform-a` (v1.0.0) only has wheels for the following platform: `macosx_10_0_ppc64`; consider adding your platform to `tool.uv.required-environments` to ensure uv resolves to a version with compatible wheels
|
||||||
");
|
");
|
||||||
|
@ -9989,3 +9975,54 @@ fn sync_url_with_query_parameters() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn read_only() -> Result<()> {
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
name = "project"
|
||||||
|
version = "0.1.0"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = ["iniconfig"]
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.sync(), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
Prepared 1 package in [TIME]
|
||||||
|
Installed 1 package in [TIME]
|
||||||
|
+ iniconfig==2.0.0
|
||||||
|
"###);
|
||||||
|
|
||||||
|
assert!(context.temp_dir.child("uv.lock").exists());
|
||||||
|
|
||||||
|
// Remove the flock.
|
||||||
|
fs_err::remove_file(context.venv.child(".lock"))?;
|
||||||
|
|
||||||
|
// Make the virtual environment read and execute (but not write).
|
||||||
|
fs_err::set_permissions(&context.venv, std::fs::Permissions::from_mode(0o555))?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 2 packages in [TIME]
|
||||||
|
Audited 1 package in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -448,12 +448,12 @@ fn tool_install_suggest_other_packages_with_executable() {
|
||||||
uv_snapshot!(filters, context.tool_install()
|
uv_snapshot!(filters, context.tool_install()
|
||||||
.arg("fastapi==0.111.0")
|
.arg("fastapi==0.111.0")
|
||||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
|
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
No executables are provided by `fastapi`
|
No executables are provided by package `fastapi`; removing tool
|
||||||
However, an executable with the name `fastapi` is available via dependency `fastapi-cli`.
|
hint: An executable with the name `fastapi` is available via dependency `fastapi-cli`.
|
||||||
Did you mean `uv tool install fastapi-cli`?
|
Did you mean `uv tool install fastapi-cli`?
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
@ -494,7 +494,7 @@ fn tool_install_suggest_other_packages_with_executable() {
|
||||||
+ uvicorn==0.29.0
|
+ uvicorn==0.29.0
|
||||||
+ watchfiles==0.21.0
|
+ watchfiles==0.21.0
|
||||||
+ websockets==12.0
|
+ websockets==12.0
|
||||||
"###);
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test installing a tool at a version
|
/// Test installing a tool at a version
|
||||||
|
@ -821,11 +821,11 @@ fn tool_install_remove_on_empty() -> Result<()> {
|
||||||
.arg(black.path())
|
.arg(black.path())
|
||||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
||||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
|
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
No executables are provided by `black`
|
No executables are provided by package `black`; removing tool
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
|
@ -839,7 +839,7 @@ fn tool_install_remove_on_empty() -> Result<()> {
|
||||||
- packaging==24.0
|
- packaging==24.0
|
||||||
- pathspec==0.12.1
|
- pathspec==0.12.1
|
||||||
- platformdirs==4.2.0
|
- platformdirs==4.2.0
|
||||||
"###);
|
");
|
||||||
|
|
||||||
// Re-request `black`. It should reinstall, without requiring `--force`.
|
// Re-request `black`. It should reinstall, without requiring `--force`.
|
||||||
uv_snapshot!(context.filters(), context.tool_install()
|
uv_snapshot!(context.filters(), context.tool_install()
|
||||||
|
@ -1649,18 +1649,18 @@ fn tool_install_no_entrypoints() {
|
||||||
.arg("iniconfig")
|
.arg("iniconfig")
|
||||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
||||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
|
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
No executables are provided by `iniconfig`
|
No executables are provided by package `iniconfig`; removing tool
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Resolved 1 package in [TIME]
|
Resolved 1 package in [TIME]
|
||||||
Prepared 1 package in [TIME]
|
Prepared 1 package in [TIME]
|
||||||
Installed 1 package in [TIME]
|
Installed 1 package in [TIME]
|
||||||
+ iniconfig==2.0.0
|
+ iniconfig==2.0.0
|
||||||
"###);
|
");
|
||||||
|
|
||||||
// Ensure the tool environment is not created.
|
// Ensure the tool environment is not created.
|
||||||
tool_dir
|
tool_dir
|
||||||
|
@ -1682,7 +1682,6 @@ fn tool_install_uninstallable() {
|
||||||
.filters()
|
.filters()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain([
|
.chain([
|
||||||
(r"exit code: 1", "exit status: 1"),
|
|
||||||
(r"bdist\.[^/\\\s]+(-[^/\\\s]+)?", "bdist.linux-x86_64"),
|
(r"bdist\.[^/\\\s]+(-[^/\\\s]+)?", "bdist.linux-x86_64"),
|
||||||
(r"\\\.", ""),
|
(r"\\\.", ""),
|
||||||
(r"#+", "#"),
|
(r"#+", "#"),
|
||||||
|
|
|
@ -1958,3 +1958,57 @@ fn version_set_evil_constraints() -> Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Bump the version with conflicting extras, to ensure we're activating the correct subset of
|
||||||
|
/// extras during the resolve.
|
||||||
|
#[test]
|
||||||
|
fn version_extras() -> Result<()> {
|
||||||
|
let context = TestContext::new("3.12");
|
||||||
|
|
||||||
|
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||||
|
pyproject_toml.write_str(
|
||||||
|
r#"
|
||||||
|
[project]
|
||||||
|
name = "myproject"
|
||||||
|
version = "1.10.31"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
foo = ["requests"]
|
||||||
|
bar = ["httpx"]
|
||||||
|
baz = ["flask"]
|
||||||
|
|
||||||
|
[tool.uv]
|
||||||
|
conflicts = [[{"extra" = "foo"}, {"extra" = "bar"}]]
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.version()
|
||||||
|
.arg("--bump").arg("patch"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
myproject 1.10.31 => 1.10.32
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 19 packages in [TIME]
|
||||||
|
Audited in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
// Sync an extra, we should not remove it.
|
||||||
|
context.sync().arg("--extra").arg("foo").assert().success();
|
||||||
|
|
||||||
|
uv_snapshot!(context.filters(), context.version()
|
||||||
|
.arg("--bump").arg("patch"), @r"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
myproject 1.10.32 => 1.10.33
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Resolved 19 packages in [TIME]
|
||||||
|
Audited in [TIME]
|
||||||
|
");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -1351,7 +1351,7 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> {
|
||||||
leaf.child("src/__init__.py").touch()?;
|
leaf.child("src/__init__.py").touch()?;
|
||||||
|
|
||||||
// Resolving should fail.
|
// Resolving should fail.
|
||||||
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###"
|
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r"
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
|
@ -1359,9 +1359,9 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> {
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||||
× No solution found when resolving dependencies:
|
× No solution found when resolving dependencies:
|
||||||
╰─▶ Because only httpx<=1.0.0b0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable.
|
╰─▶ Because only httpx<=0.27.0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable.
|
||||||
And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable.
|
And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable.
|
||||||
"###
|
"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -2,9 +2,8 @@
|
||||||
|
|
||||||
!!! note
|
!!! note
|
||||||
|
|
||||||
The uv build backend is currently in preview and may change without warning.
|
Currently, the default build backend for `uv init` is
|
||||||
|
[hatchling](https://pypi.org/project/hatchling/). This will change to `uv` in a future version.
|
||||||
When preview mode is not enabled, uv uses [hatchling](https://pypi.org/project/hatchling/) as the default build backend.
|
|
||||||
|
|
||||||
A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel.
|
A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel.
|
||||||
|
|
||||||
|
@ -12,19 +11,32 @@ uv supports all build backends (as specified by [PEP 517](https://peps.python.or
|
||||||
also provides a native build backend (`uv_build`) that integrates tightly with uv to improve
|
also provides a native build backend (`uv_build`) that integrates tightly with uv to improve
|
||||||
performance and user experience.
|
performance and user experience.
|
||||||
|
|
||||||
|
## Choosing a build backend
|
||||||
|
|
||||||
|
The uv build backend is a great choice for most Python projects. It has reasonable defaults, with
|
||||||
|
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||||
|
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||||
|
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||||
|
finally, it's very fast.
|
||||||
|
|
||||||
|
The uv build backend currently **only supports pure Python code**. An alternative backend is
|
||||||
|
required to build a
|
||||||
|
[library with extension modules](../concepts/projects/init.md#projects-with-extension-modules).
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
While the backend supports a number of options for configuring your project structure, when build scripts or
|
||||||
|
a more flexible project layout are required, consider using the
|
||||||
|
[hatchling](https://hatch.pypa.io/latest/config/build/#build-system) build backend instead.
|
||||||
|
|
||||||
## Using the uv build backend
|
## Using the uv build backend
|
||||||
|
|
||||||
!!! important
|
|
||||||
|
|
||||||
The uv build backend currently **only supports pure Python code**. An alternative backend is to
|
|
||||||
build a [library with extension modules](../concepts/projects/init.md#projects-with-extension-modules).
|
|
||||||
|
|
||||||
To use uv as a build backend in an existing project, add `uv_build` to the
|
To use uv as a build backend in an existing project, add `uv_build` to the
|
||||||
[`[build-system]`](../concepts/projects/config.md#build-systems) section in your `pyproject.toml`:
|
[`[build-system]`](../concepts/projects/config.md#build-systems) section in your `pyproject.toml`:
|
||||||
|
|
||||||
```toml title="pyproject.toml"
|
```toml title="pyproject.toml"
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["uv_build>=0.7.18,<0.8.0"]
|
requires = ["uv_build>=0.7.20,<0.8.0"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -122,16 +134,50 @@ the project structure:
|
||||||
pyproject.toml
|
pyproject.toml
|
||||||
src
|
src
|
||||||
├── foo
|
├── foo
|
||||||
│ └── __init__.py
|
│ └── __init__.py
|
||||||
└── bar
|
└── bar
|
||||||
└── __init__.py
|
└── __init__.py
|
||||||
```
|
```
|
||||||
|
|
||||||
While we do not recommend this structure (i.e., you should use a workspace with multiple packages
|
While we do not recommend this structure (i.e., you should use a workspace with multiple packages
|
||||||
instead), it is supported via the `namespace` option:
|
instead), it is supported by setting `module-name` to a list of names:
|
||||||
|
|
||||||
```toml title="pyproject.toml"
|
```toml title="pyproject.toml"
|
||||||
[tool.uv.build-backend]
|
[tool.uv.build-backend]
|
||||||
|
module-name = ["foo", "bar"]
|
||||||
|
```
|
||||||
|
|
||||||
|
For packages with many modules or complex namespaces, the `namespace = true` option can be used to
|
||||||
|
avoid explicitly declaring each module name, e.g.:
|
||||||
|
|
||||||
|
```toml title="pyproject.toml"
|
||||||
|
[tool.uv.build-backend]
|
||||||
|
namespace = true
|
||||||
|
```
|
||||||
|
|
||||||
|
!!! warning
|
||||||
|
|
||||||
|
Using `namespace = true` disables safety checks. Using an explicit list of module names is
|
||||||
|
strongly recommended outside of legacy projects.
|
||||||
|
|
||||||
|
The `namespace` option can also be used with `module-name` to explicitly declare the root, e.g., for
|
||||||
|
the project structure:
|
||||||
|
|
||||||
|
```text
|
||||||
|
pyproject.toml
|
||||||
|
src
|
||||||
|
└── foo
|
||||||
|
├── bar
|
||||||
|
│ └── __init__.py
|
||||||
|
└── baz
|
||||||
|
└── __init__.py
|
||||||
|
```
|
||||||
|
|
||||||
|
The recommended configuration would be:
|
||||||
|
|
||||||
|
```toml title="pyproject.toml"
|
||||||
|
[tool.uv.build-backend]
|
||||||
|
module-name = "foo"
|
||||||
namespace = true
|
namespace = true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv:
|
||||||
Request a specific version by including it in the URL:
|
Request a specific version by including it in the URL:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ curl -LsSf https://astral.sh/uv/0.7.18/install.sh | sh
|
$ curl -LsSf https://astral.sh/uv/0.7.20/install.sh | sh
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "Windows"
|
=== "Windows"
|
||||||
|
@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv:
|
||||||
Request a specific version by including it in the URL:
|
Request a specific version by including it in the URL:
|
||||||
|
|
||||||
```pwsh-session
|
```pwsh-session
|
||||||
PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.18/install.ps1 | iex"
|
PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.20/install.ps1 | iex"
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! tip
|
!!! tip
|
||||||
|
|
|
@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th
|
||||||
other unnecessary files.
|
other unnecessary files.
|
||||||
|
|
||||||
```dockerfile title="Dockerfile"
|
```dockerfile title="Dockerfile"
|
||||||
FROM ghcr.io/astral-sh/uv:0.7.18 AS uv
|
FROM ghcr.io/astral-sh/uv:0.7.20 AS uv
|
||||||
|
|
||||||
# First, bundle the dependencies into the task root.
|
# First, bundle the dependencies into the task root.
|
||||||
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
||||||
|
@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell
|
||||||
Finally, we'll update the Dockerfile to include the local library in the deployment package:
|
Finally, we'll update the Dockerfile to include the local library in the deployment package:
|
||||||
|
|
||||||
```dockerfile title="Dockerfile"
|
```dockerfile title="Dockerfile"
|
||||||
FROM ghcr.io/astral-sh/uv:0.7.18 AS uv
|
FROM ghcr.io/astral-sh/uv:0.7.20 AS uv
|
||||||
|
|
||||||
# First, bundle the dependencies into the task root.
|
# First, bundle the dependencies into the task root.
|
||||||
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
||||||
|
|
|
@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help
|
||||||
The following distroless images are available:
|
The following distroless images are available:
|
||||||
|
|
||||||
- `ghcr.io/astral-sh/uv:latest`
|
- `ghcr.io/astral-sh/uv:latest`
|
||||||
- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.18`
|
- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.20`
|
||||||
- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
|
- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
|
||||||
version)
|
version)
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ And the following derived images are available:
|
||||||
|
|
||||||
As with the distroless image, each derived image is published with uv version tags as
|
As with the distroless image, each derived image is published with uv version tags as
|
||||||
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
|
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
|
||||||
`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.18-alpine`.
|
`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.20-alpine`.
|
||||||
|
|
||||||
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
|
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
|
||||||
page.
|
page.
|
||||||
|
@ -113,7 +113,7 @@ Note this requires `curl` to be available.
|
||||||
In either case, it is best practice to pin to a specific uv version, e.g., with:
|
In either case, it is best practice to pin to a specific uv version, e.g., with:
|
||||||
|
|
||||||
```dockerfile
|
```dockerfile
|
||||||
COPY --from=ghcr.io/astral-sh/uv:0.7.18 /uv /uvx /bin/
|
COPY --from=ghcr.io/astral-sh/uv:0.7.20 /uv /uvx /bin/
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! tip
|
!!! tip
|
||||||
|
@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.18 /uv /uvx /bin/
|
||||||
Or, with the installer:
|
Or, with the installer:
|
||||||
|
|
||||||
```dockerfile
|
```dockerfile
|
||||||
ADD https://astral.sh/uv/0.7.18/install.sh /uv-installer.sh
|
ADD https://astral.sh/uv/0.7.20/install.sh /uv-installer.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### Installing a project
|
### Installing a project
|
||||||
|
@ -557,5 +557,5 @@ Verified OK
|
||||||
!!! tip
|
!!! tip
|
||||||
|
|
||||||
These examples use `latest`, but best practice is to verify the attestation for a specific
|
These examples use `latest`, but best practice is to verify the attestation for a specific
|
||||||
version tag, e.g., `ghcr.io/astral-sh/uv:0.7.18`, or (even better) the specific image digest,
|
version tag, e.g., `ghcr.io/astral-sh/uv:0.7.20`, or (even better) the specific image digest,
|
||||||
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
|
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
|
||||||
|
|
|
@ -27,7 +27,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
```
|
```
|
||||||
|
|
||||||
It is considered best practice to pin to a specific uv version, e.g., with:
|
It is considered best practice to pin to a specific uv version, e.g., with:
|
||||||
|
@ -44,10 +44,10 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
with:
|
with:
|
||||||
# Install a specific version of uv.
|
# Install a specific version of uv.
|
||||||
version: "0.7.18"
|
version: "0.7.20"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Setting up Python
|
## Setting up Python
|
||||||
|
@ -66,7 +66,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
run: uv python install
|
run: uv python install
|
||||||
|
@ -93,10 +93,10 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
|
|
||||||
- name: "Set up Python"
|
- name: "Set up Python"
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version-file: ".python-version"
|
python-version-file: ".python-version"
|
||||||
```
|
```
|
||||||
|
@ -116,10 +116,10 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
|
|
||||||
- name: "Set up Python"
|
- name: "Set up Python"
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version-file: "pyproject.toml"
|
python-version-file: "pyproject.toml"
|
||||||
```
|
```
|
||||||
|
@ -146,7 +146,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv and set the python version
|
- name: Install uv and set the python version
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
```
|
```
|
||||||
|
@ -187,7 +187,7 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
|
|
||||||
- name: Install the project
|
- name: Install the project
|
||||||
run: uv sync --locked --all-extras --dev
|
run: uv sync --locked --all-extras --dev
|
||||||
|
@ -212,44 +212,11 @@ persisting the cache:
|
||||||
|
|
||||||
```yaml title="example.yml"
|
```yaml title="example.yml"
|
||||||
- name: Enable caching
|
- name: Enable caching
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v6
|
||||||
with:
|
with:
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
```
|
```
|
||||||
|
|
||||||
You can configure the action to use a custom cache directory on the runner:
|
|
||||||
|
|
||||||
```yaml title="example.yml"
|
|
||||||
- name: Define a custom uv cache path
|
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
|
||||||
enable-cache: true
|
|
||||||
cache-local-path: "/path/to/cache"
|
|
||||||
```
|
|
||||||
|
|
||||||
Or invalidate it when the lockfile changes:
|
|
||||||
|
|
||||||
```yaml title="example.yml"
|
|
||||||
- name: Define a cache dependency glob
|
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
|
||||||
enable-cache: true
|
|
||||||
cache-dependency-glob: "uv.lock"
|
|
||||||
```
|
|
||||||
|
|
||||||
Or when any requirements file changes:
|
|
||||||
|
|
||||||
```yaml title="example.yml"
|
|
||||||
- name: Define a cache dependency glob
|
|
||||||
uses: astral-sh/setup-uv@v5
|
|
||||||
with:
|
|
||||||
enable-cache: true
|
|
||||||
cache-dependency-glob: "requirements**.txt"
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that `astral-sh/setup-uv` will automatically use a separate cache key for each host
|
|
||||||
architecture and platform.
|
|
||||||
|
|
||||||
Alternatively, you can manage the cache manually with the `actions/cache` action:
|
Alternatively, you can manage the cache manually with the `actions/cache` action:
|
||||||
|
|
||||||
```yaml title="example.yml"
|
```yaml title="example.yml"
|
||||||
|
|
|
@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
# uv version.
|
# uv version.
|
||||||
rev: 0.7.18
|
rev: 0.7.20
|
||||||
hooks:
|
hooks:
|
||||||
- id: uv-lock
|
- id: uv-lock
|
||||||
```
|
```
|
||||||
|
@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file:
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
# uv version.
|
# uv version.
|
||||||
rev: 0.7.18
|
rev: 0.7.20
|
||||||
hooks:
|
hooks:
|
||||||
- id: uv-export
|
- id: uv-export
|
||||||
```
|
```
|
||||||
|
@ -41,7 +41,7 @@ To compile requirements files:
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
# uv version.
|
# uv version.
|
||||||
rev: 0.7.18
|
rev: 0.7.20
|
||||||
hooks:
|
hooks:
|
||||||
# Compile requirements
|
# Compile requirements
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
|
@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`:
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
# uv version.
|
# uv version.
|
||||||
rev: 0.7.18
|
rev: 0.7.20
|
||||||
hooks:
|
hooks:
|
||||||
# Compile requirements
|
# Compile requirements
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
|
@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries:
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||||
# uv version.
|
# uv version.
|
||||||
rev: 0.7.18
|
rev: 0.7.20
|
||||||
hooks:
|
hooks:
|
||||||
# Compile requirements
|
# Compile requirements
|
||||||
- id: pip-compile
|
- id: pip-compile
|
||||||
|
|
|
@ -444,10 +444,10 @@ $ # With an environment variable.
|
||||||
$ UV_TORCH_BACKEND=auto uv pip install torch
|
$ UV_TORCH_BACKEND=auto uv pip install torch
|
||||||
```
|
```
|
||||||
|
|
||||||
When enabled, uv will query for the installed CUDA driver and AMD GPU versions then use the
|
When enabled, uv will query for the installed CUDA driver, AMD GPU versions, and Intel GPU presence,
|
||||||
most-compatible PyTorch index for all relevant packages (e.g., `torch`, `torchvision`, etc.). If no
|
then use the most-compatible PyTorch index for all relevant packages (e.g., `torch`, `torchvision`,
|
||||||
such GPU is found, uv will fall back to the CPU-only index. uv will continue to respect existing
|
etc.). If no such GPU is found, uv will fall back to the CPU-only index. uv will continue to respect
|
||||||
index configuration for any packages outside the PyTorch ecosystem.
|
existing index configuration for any packages outside the PyTorch ecosystem.
|
||||||
|
|
||||||
You can also select a specific backend (e.g., CUDA 12.6) with `--torch-backend=cu126` (or
|
You can also select a specific backend (e.g., CUDA 12.6) with `--torch-backend=cu126` (or
|
||||||
`UV_TORCH_BACKEND=cu126`):
|
`UV_TORCH_BACKEND=cu126`):
|
||||||
|
@ -460,4 +460,12 @@ $ # With an environment variable.
|
||||||
$ UV_TORCH_BACKEND=cu126 uv pip install torch torchvision
|
$ UV_TORCH_BACKEND=cu126 uv pip install torch torchvision
|
||||||
```
|
```
|
||||||
|
|
||||||
|
On Windows, Intel GPU (XPU) is not automatically selected with `--torch-backend=auto`, but you can
|
||||||
|
manually specify it using `--torch-backend=xpu`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
$ # Manual selection for Intel GPU.
|
||||||
|
$ uv pip install torch torchvision --torch-backend=xpu
|
||||||
|
```
|
||||||
|
|
||||||
At present, `--torch-backend` is only available in the `uv pip` interface.
|
At present, `--torch-backend` is only available in the `uv pip` interface.
|
||||||
|
|
14
docs/guides/migration/index.md
Normal file
14
docs/guides/migration/index.md
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# Migration guides
|
||||||
|
|
||||||
|
Learn how to migrate from other tools to uv:
|
||||||
|
|
||||||
|
- [Migrate from pip to uv projects](./pip-to-project.md)
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
Other guides, such as migrating from another project management tool, or from pip to `uv pip`
|
||||||
|
are not yet available. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track
|
||||||
|
progress.
|
||||||
|
|
||||||
|
Or, explore the [integration guides](../integration/index.md) to learn how to use uv with other
|
||||||
|
software.
|
472
docs/guides/migration/pip-to-project.md
Normal file
472
docs/guides/migration/pip-to-project.md
Normal file
|
@ -0,0 +1,472 @@
|
||||||
|
# Migrating from pip to a uv project
|
||||||
|
|
||||||
|
This guide will discuss converting from a `pip` and `pip-tools` workflow centered on `requirements`
|
||||||
|
files to uv's project workflow using a `pyproject.toml` and `uv.lock` file.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
If you're looking to migrate from `pip` and `pip-tools` to uv's drop-in interface or from an
|
||||||
|
existing workflow where you're already using a `pyproject.toml`, those guides are not yet
|
||||||
|
written. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track progress.
|
||||||
|
|
||||||
|
We'll start with an overview of developing with `pip`, then discuss migrating to uv.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
If you're familiar with the ecosystem, you can jump ahead to the
|
||||||
|
[requirements file import](#importing-requirements-files) instructions.
|
||||||
|
|
||||||
|
## Understanding pip workflows
|
||||||
|
|
||||||
|
### Project dependencies
|
||||||
|
|
||||||
|
When you want to use a package in your project, you need to install it first. `pip` supports
|
||||||
|
imperative installation of packages, e.g.:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip install fastapi
|
||||||
|
```
|
||||||
|
|
||||||
|
This installs the package into the environment that `pip` is installed in. This may be a virtual
|
||||||
|
environment, or, the global environment of your system's Python installation.
|
||||||
|
|
||||||
|
Then, you can run a Python script that requires the package:
|
||||||
|
|
||||||
|
```python title="example.py"
|
||||||
|
import fastapi
|
||||||
|
```
|
||||||
|
|
||||||
|
It's best practice to create a virtual environment for each project, to avoid mixing packages
|
||||||
|
between them. For example:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ python -m venv
|
||||||
|
$ source .venv/bin/activate
|
||||||
|
$ pip ...
|
||||||
|
```
|
||||||
|
|
||||||
|
We will revisit this topic in the [project environments section](#project-environments) below.
|
||||||
|
|
||||||
|
### Requirements files
|
||||||
|
|
||||||
|
When sharing projects with others, it's useful to declare all the packages you require upfront.
|
||||||
|
`pip` supports installing requirements from a file, e.g.:
|
||||||
|
|
||||||
|
```python title="requirements.txt"
|
||||||
|
fastapi
|
||||||
|
```
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice above that `fastapi` is not "locked" to a specific version — each person working on the
|
||||||
|
project may have a different version of `fastapi` installed. `pip-tools` was created to improve this
|
||||||
|
experience.
|
||||||
|
|
||||||
|
When using `pip-tools`, requirements files specify both the dependencies for your project and lock
|
||||||
|
dependencies to a specific version — the file extension is used to differentiate between the two.
|
||||||
|
For example, if you require `fastapi` and `pydantic`, you'd specify these in a `requirements.in`
|
||||||
|
file:
|
||||||
|
|
||||||
|
```python title="requirements.in"
|
||||||
|
fastapi
|
||||||
|
pydantic>2
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice there's a version constraint on `pydantic` — this means only `pydantic` versions later than
|
||||||
|
`2.0.0` can be used. In contrast, `fastapi` does not have a version constraint — any version can be
|
||||||
|
used.
|
||||||
|
|
||||||
|
These dependencies can be compiled into a `requirements.txt` file:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip-compile requirements.in -o requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
```python title="requirements.txt"
|
||||||
|
annotated-types==0.7.0
|
||||||
|
# via pydantic
|
||||||
|
anyio==4.8.0
|
||||||
|
# via starlette
|
||||||
|
fastapi==0.115.11
|
||||||
|
# via -r requirements.in
|
||||||
|
idna==3.10
|
||||||
|
# via anyio
|
||||||
|
pydantic==2.10.6
|
||||||
|
# via
|
||||||
|
# -r requirements.in
|
||||||
|
# fastapi
|
||||||
|
pydantic-core==2.27.2
|
||||||
|
# via pydantic
|
||||||
|
sniffio==1.3.1
|
||||||
|
# via anyio
|
||||||
|
starlette==0.46.1
|
||||||
|
# via fastapi
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via
|
||||||
|
# fastapi
|
||||||
|
# pydantic
|
||||||
|
# pydantic-core
|
||||||
|
```
|
||||||
|
|
||||||
|
Here, all the versions constraints are _exact_. Only a single version of each package can be used.
|
||||||
|
The above example was generated with `uv pip compile`, but could also be generated with
|
||||||
|
`pip-compile` from `pip-tools`.
|
||||||
|
|
||||||
|
Though less common, the `requirements.txt` can also be generated using `pip freeze`, by first
|
||||||
|
installing the input dependencies into the environment then exporting the installed versions:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip install -r requirements.in
|
||||||
|
$ pip freeze > requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
```python title="requirements.txt"
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.8.0
|
||||||
|
fastapi==0.115.11
|
||||||
|
idna==3.10
|
||||||
|
pydantic==2.10.6
|
||||||
|
pydantic-core==2.27.2
|
||||||
|
sniffio==1.3.1
|
||||||
|
starlette==0.46.1
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
```
|
||||||
|
|
||||||
|
After compiling dependencies into a locked set of versions, these files are committed to version
|
||||||
|
control and distributed with the project.
|
||||||
|
|
||||||
|
Then, when someone wants to use the project, they install from the requirements file:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--- TODO: Discuss equivalent commands for `uv pip compile` and `pip compile` -->
|
||||||
|
|
||||||
|
### Development dependencies
|
||||||
|
|
||||||
|
The requirements file format can only describe a single set of dependencies at once. This means if
|
||||||
|
you have additional _groups_ of dependencies, such as development dependencies, they need separate
|
||||||
|
files. For example, we'll create a `-dev` dependency file:
|
||||||
|
|
||||||
|
```python title="requirements-dev.in"
|
||||||
|
-r requirements.in
|
||||||
|
-c requirements.txt
|
||||||
|
|
||||||
|
pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice the base requirements are included with `-r requirements.in`. This ensures your development
|
||||||
|
environment considers _all_ of the dependencies together. The `-c requirements.txt` _constrains_ the
|
||||||
|
package version to ensure that the `requirements-dev.txt` uses the same versions as
|
||||||
|
`requirements.txt`.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
It's common to use `-r requirements.txt` directly instead of using both
|
||||||
|
`-r requirements.in`, and `-c requirements.txt`. There's no difference in the resulting package
|
||||||
|
versions, but using both files produces annotations which allow you to determine which
|
||||||
|
dependencies are _direct_ (annotated with `-r requirements.in`) and which are _indirect_ (only
|
||||||
|
annotated with `-c requirements.txt`).
|
||||||
|
|
||||||
|
The compiled development dependencies look like:
|
||||||
|
|
||||||
|
```python title="requirements-dev.txt"
|
||||||
|
annotated-types==0.7.0
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# pydantic
|
||||||
|
anyio==4.8.0
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# starlette
|
||||||
|
fastapi==0.115.11
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# -r requirements.in
|
||||||
|
idna==3.10
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# anyio
|
||||||
|
iniconfig==2.0.0
|
||||||
|
# via pytest
|
||||||
|
packaging==24.2
|
||||||
|
# via pytest
|
||||||
|
pluggy==1.5.0
|
||||||
|
# via pytest
|
||||||
|
pydantic==2.10.6
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# -r requirements.in
|
||||||
|
# fastapi
|
||||||
|
pydantic-core==2.27.2
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# pydantic
|
||||||
|
pytest==8.3.5
|
||||||
|
# via -r requirements-dev.in
|
||||||
|
sniffio==1.3.1
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# anyio
|
||||||
|
starlette==0.46.1
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# fastapi
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via
|
||||||
|
# -c requirements.txt
|
||||||
|
# fastapi
|
||||||
|
# pydantic
|
||||||
|
# pydantic-core
|
||||||
|
```
|
||||||
|
|
||||||
|
As with the base dependency files, these are committed to version control and distributed with the
|
||||||
|
project. When someone wants to work on the project, they'll install from the requirements file:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ pip install -r requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
### Platform-specific dependencies
|
||||||
|
|
||||||
|
When compiling dependencies with `pip` or `pip-tools`, the result is only usable on the same
|
||||||
|
platform as it is generated on. This poses a problem for projects which need to be usable on
|
||||||
|
multiple platforms, such as Windows and macOS.
|
||||||
|
|
||||||
|
For example, take a simple dependency:
|
||||||
|
|
||||||
|
```python title="requirements.in"
|
||||||
|
tqdm
|
||||||
|
```
|
||||||
|
|
||||||
|
On Linux, this compiles to:
|
||||||
|
|
||||||
|
```python title="requirements-linux.txt"
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
While on Windows, this compiles to:
|
||||||
|
|
||||||
|
```python title="requirements-win.txt"
|
||||||
|
colorama==0.4.6
|
||||||
|
# via tqdm
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
`colorama` is a Windows-only dependency of `tqdm`.
|
||||||
|
|
||||||
|
When using `pip` and `pip-tools`, a project needs to declare a requirements lock file for each
|
||||||
|
supported platform.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
uv's resolver can compile dependencies for multiple platforms at once (see ["universal resolution"](../../concepts/resolution.md#universal-resolution)),
|
||||||
|
allowing you to use a single `requirements.txt` for all platforms:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv pip compile --universal requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
```python title="requirements.txt"
|
||||||
|
colorama==0.4.6 ; sys_platform == 'win32'
|
||||||
|
# via tqdm
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
This resolution mode is also used when using a `pyproject.toml` and `uv.lock`.
|
||||||
|
|
||||||
|
## Migrating to a uv project
|
||||||
|
|
||||||
|
### The `pyproject.toml`
|
||||||
|
|
||||||
|
The `pyproject.toml` is a standardized file for Python project metadata. It replaces
|
||||||
|
`requirements.in` files, allowing you to represent arbitrary groups of project dependencies. It also
|
||||||
|
provides a centralized location for metadata about your project, such as the build system or tool
|
||||||
|
settings.
|
||||||
|
|
||||||
|
<!-- TODO: Link to the official docs on this or write more -->
|
||||||
|
|
||||||
|
For example, the `requirements.in` and `requirements-dev.in` files above can be translated to a
|
||||||
|
`pyproject.toml` as follows:
|
||||||
|
|
||||||
|
```toml title="pyproject.toml"
|
||||||
|
[project]
|
||||||
|
name = "example"
|
||||||
|
version = "0.0.1"
|
||||||
|
dependencies = [
|
||||||
|
"fastapi",
|
||||||
|
"pydantic>2"
|
||||||
|
]
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = ["pytest"]
|
||||||
|
```
|
||||||
|
|
||||||
|
We'll discuss the commands necessary to automate these imports below.
|
||||||
|
|
||||||
|
### The uv lockfile
|
||||||
|
|
||||||
|
uv uses a lockfile (`uv.lock`) file to lock package versions. The format of this file is specific to
|
||||||
|
uv, allowing uv to support advanced features. It replaces `requirements.txt` files.
|
||||||
|
|
||||||
|
The lockfile will be automatically created and populated when adding dependencies, but you can
|
||||||
|
explicitly create it with `uv lock`.
|
||||||
|
|
||||||
|
Unlike `requirements.txt` files, the `uv.lock` file can represent arbitrary groups of dependencies,
|
||||||
|
so multiple files are not needed to lock development dependencies.
|
||||||
|
|
||||||
|
The uv lockfile is always [universal](../../concepts/resolution.md#universal-resolution), so
|
||||||
|
multiple files are not needed to
|
||||||
|
[lock dependencies for each platform](#platform-specific-dependencies). This ensures that all
|
||||||
|
developers are using consistent, locked versions of dependencies regardless of their machine.
|
||||||
|
|
||||||
|
The uv lockfile also supports concepts like
|
||||||
|
[pinning packages to specific indexes](../../concepts/indexes.md#pinning-a-package-to-an-index),
|
||||||
|
which is not representable in `requirements.txt` files.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
|
||||||
|
If you only need to lock for a subset of platforms, use the
|
||||||
|
[`tool.uv.environments`](../../concepts/resolution.md#limited-resolution-environments) setting
|
||||||
|
to limit the resolution and lockfile.
|
||||||
|
|
||||||
|
To learn more, see the [lockfile](../../concepts/projects/layout.md#the-lockfile) documentation.
|
||||||
|
|
||||||
|
### Importing requirements files
|
||||||
|
|
||||||
|
First, create a `pyproject.toml` if you have not already:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv init
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, the easiest way to import requirements is with `uv add`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv add -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
However, there is some nuance to this transition. Notice we used the `requirements.in` file, which
|
||||||
|
does not pin to exact versions of packages so uv will solve for new versions of these packages. You
|
||||||
|
may want to continue using your previously locked versions from your `requirements.txt` so, when
|
||||||
|
switching over to uv, none of your dependency versions change.
|
||||||
|
|
||||||
|
The solution is to add your locked versions as _constraints_. uv supports using these on `add` to
|
||||||
|
preserve locked versions:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv add -r requirements.in -c requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Your existing versions will be retained when producing a `uv.lock` file.
|
||||||
|
|
||||||
|
#### Importing platform-specific constraints
|
||||||
|
|
||||||
|
If your platform-specific dependencies have been compiled into separate files, you can still
|
||||||
|
transition to a universal lockfile. However, you cannot just use `-c` to specify constraints from
|
||||||
|
your existing platform-specific `requirements.txt` files because they do not include markers
|
||||||
|
describing the environment and will consequently conflict.
|
||||||
|
|
||||||
|
To add the necessary markers, use `uv pip compile` to convert your existing files. For example,
|
||||||
|
given the following:
|
||||||
|
|
||||||
|
```python title="requirements-win.txt"
|
||||||
|
colorama==0.4.6
|
||||||
|
# via tqdm
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
The markers can be added with:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv pip compile requirements.in -o requirements-win.txt --python-platform windows --no-strip-markers
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice the resulting output includes a Windows marker on `colorama`:
|
||||||
|
|
||||||
|
```python title="requirements-win.txt"
|
||||||
|
colorama==0.4.6 ; sys_platform == 'win32'
|
||||||
|
# via tqdm
|
||||||
|
tqdm==4.67.1
|
||||||
|
# via -r requirements.in
|
||||||
|
```
|
||||||
|
|
||||||
|
When using `-o`, uv will constrain the versions to match the existing output file, if it can.
|
||||||
|
|
||||||
|
Markers can be added for other platforms by changing the `--python-platform` and `-o` values for
|
||||||
|
each requirements file you need to import, e.g., to `linux` and `macos`.
|
||||||
|
|
||||||
|
Once each `requirements.txt` file has been transformed, the dependencies can be imported to the
|
||||||
|
`pyproject.toml` and `uv.lock` with `uv add`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv add -r requirements.in -c requirements-win.txt -c requirements-linux.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Importing development dependency files
|
||||||
|
|
||||||
|
As discussed in the [development dependencies](#development-dependencies) section, it's common to
|
||||||
|
have groups of dependencies for development purposes.
|
||||||
|
|
||||||
|
To import development dependencies, use the `--dev` flag during `uv add`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv add --dev -r requirements-dev.in -c requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
If the `requirements-dev.in` includes the parent `requirements.in` via `-r`, it will need to be
|
||||||
|
stripped to avoid adding the base requirements to the `dev` dependency group. The following example
|
||||||
|
uses `sed` to strip lines that start with `-r`, then pipes the result to `uv add`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ sed '/^-r /d' requirements-dev.in | uv add --dev -r - -c requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
In addition to the `dev` dependency group, uv supports arbitrary group names. For example, if you
|
||||||
|
also have a dedicated set of dependencies for building your documentation, those can be imported to
|
||||||
|
a `docs` group:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv add -r requirements-docs.in -c requirements-docs.txt --group docs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Project environments
|
||||||
|
|
||||||
|
Unlike `pip`, uv is not centered around the concept of an "active" virtual environment. Instead, uv
|
||||||
|
uses a dedicated virtual environment for each project in a `.venv` directory. This environment is
|
||||||
|
automatically managed, so when you run a command, like `uv add`, the environment is synced with the
|
||||||
|
project dependencies.
|
||||||
|
|
||||||
|
The preferred way to execute commands in the environment is with `uv run`, e.g.:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ uv run pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
Prior to every `uv run` invocation, uv will verify that the lockfile is up-to-date with the
|
||||||
|
`pyproject.toml`, and that the environment is up-to-date with the lockfile, keeping your project
|
||||||
|
in-sync without the need for manual intervention. `uv run` guarantees that your command is run in a
|
||||||
|
consistent, locked environment.
|
||||||
|
|
||||||
|
The project environment can also be explicitly created with `uv sync`, e.g., for use with editors.
|
||||||
|
|
||||||
|
!!! note
|
||||||
|
|
||||||
|
When in projects, uv will prefer a `.venv` in the project directory and ignore the active
|
||||||
|
environment as declared by the `VIRTUAL_ENV` variable by default. You can opt-in to using the
|
||||||
|
active environment with the `--active` flag.
|
||||||
|
|
||||||
|
To learn more, see the
|
||||||
|
[project environment](../../concepts/projects/layout.md#the-project-environment) documentation.
|
||||||
|
|
||||||
|
## Next steps
|
||||||
|
|
||||||
|
Now that you've migrated to uv, take a look at the
|
||||||
|
[project concept](../../concepts/projects/index.md) page for more details about uv projects.
|
|
@ -127,10 +127,10 @@ To sync an environment with a `requirements.txt` file:
|
||||||
$ uv pip sync requirements.txt
|
$ uv pip sync requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
To sync an environment with a `pyproject.toml` file:
|
To sync an environment with a [PEP 751](https://peps.python.org/pep-0751/) `pylock.toml` file:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ uv pip sync pyproject.toml
|
$ uv pip sync pylock.toml
|
||||||
```
|
```
|
||||||
|
|
||||||
## Adding constraints
|
## Adding constraints
|
||||||
|
|
|
@ -582,6 +582,8 @@ uv add [OPTIONS] <PACKAGES|--requirements <REQUIREMENTS>>
|
||||||
</dd><dt id="uv-add--upgrade-package"><a href="#uv-add--upgrade-package"><code>--upgrade-package</code></a>, <code>-P</code> <i>upgrade-package</i></dt><dd><p>Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies <code>--refresh-package</code></p>
|
</dd><dt id="uv-add--upgrade-package"><a href="#uv-add--upgrade-package"><code>--upgrade-package</code></a>, <code>-P</code> <i>upgrade-package</i></dt><dd><p>Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies <code>--refresh-package</code></p>
|
||||||
</dd><dt id="uv-add--verbose"><a href="#uv-add--verbose"><code>--verbose</code></a>, <code>-v</code></dt><dd><p>Use verbose output.</p>
|
</dd><dt id="uv-add--verbose"><a href="#uv-add--verbose"><code>--verbose</code></a>, <code>-v</code></dt><dd><p>Use verbose output.</p>
|
||||||
<p>You can configure fine-grained logging using the <code>RUST_LOG</code> environment variable. (<a href="https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives">https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives</a>)</p>
|
<p>You can configure fine-grained logging using the <code>RUST_LOG</code> environment variable. (<a href="https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives">https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives</a>)</p>
|
||||||
|
</dd><dt id="uv-add--workspace"><a href="#uv-add--workspace"><code>--workspace</code></a></dt><dd><p>Add the dependency as a workspace member.</p>
|
||||||
|
<p>When used with a path dependency, the package will be added to the workspace's <code>members</code> list in the root <code>pyproject.toml</code> file.</p>
|
||||||
</dd></dl>
|
</dd></dl>
|
||||||
|
|
||||||
## uv remove
|
## uv remove
|
||||||
|
|
|
@ -396,10 +396,6 @@ pydantic = { path = "/path/to/pydantic", editable = true }
|
||||||
|
|
||||||
Settings for the uv build backend (`uv_build`).
|
Settings for the uv build backend (`uv_build`).
|
||||||
|
|
||||||
!!! note
|
|
||||||
|
|
||||||
The uv build backend is currently in preview and may change in any future release.
|
|
||||||
|
|
||||||
Note that those settings only apply when using the `uv_build` backend, other build backends
|
Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||||
(such as hatchling) have their own configuration.
|
(such as hatchling) have their own configuration.
|
||||||
|
|
||||||
|
@ -478,13 +474,17 @@ being the module name, and which contain a `__init__.pyi` file.
|
||||||
For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
||||||
`foo-stubs.bar`.
|
`foo-stubs.bar`.
|
||||||
|
|
||||||
|
For namespace packages with multiple modules, the path can be a list, e.g.,
|
||||||
|
`["foo", "bar"]`. We recommend using a single module per package, splitting multiple
|
||||||
|
packages into a workspace.
|
||||||
|
|
||||||
Note that using this option runs the risk of creating two packages with different names but
|
Note that using this option runs the risk of creating two packages with different names but
|
||||||
the same module names. Installing such packages together leads to unspecified behavior,
|
the same module names. Installing such packages together leads to unspecified behavior,
|
||||||
often with corrupted files or directory trees.
|
often with corrupted files or directory trees.
|
||||||
|
|
||||||
**Default value**: `None`
|
**Default value**: `None`
|
||||||
|
|
||||||
**Type**: `str`
|
**Type**: `str | list[str]`
|
||||||
|
|
||||||
**Example usage**:
|
**Example usage**:
|
||||||
|
|
||||||
|
|
|
@ -174,6 +174,9 @@ nav:
|
||||||
- Using tools: guides/tools.md
|
- Using tools: guides/tools.md
|
||||||
- Working on projects: guides/projects.md
|
- Working on projects: guides/projects.md
|
||||||
- Publishing packages: guides/package.md
|
- Publishing packages: guides/package.md
|
||||||
|
- Migration:
|
||||||
|
- guides/migration/index.md
|
||||||
|
- From pip to a uv project: guides/migration/pip-to-project.md
|
||||||
- Integrations:
|
- Integrations:
|
||||||
- guides/integration/index.md
|
- guides/integration/index.md
|
||||||
- Docker: guides/integration/docker.md
|
- Docker: guides/integration/docker.md
|
||||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "maturin"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "uv"
|
name = "uv"
|
||||||
version = "0.7.18"
|
version = "0.7.20"
|
||||||
description = "An extremely fast Python package and project manager, written in Rust."
|
description = "An extremely fast Python package and project manager, written in Rust."
|
||||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.8"
|
||||||
|
|
|
@ -56,8 +56,7 @@ DEFAULT_TIMEOUT = 30
|
||||||
DEFAULT_PKG_NAME = "astral-registries-test-pkg"
|
DEFAULT_PKG_NAME = "astral-registries-test-pkg"
|
||||||
|
|
||||||
KNOWN_REGISTRIES = [
|
KNOWN_REGISTRIES = [
|
||||||
# TODO(john): Restore this when subscription starts up again
|
"artifactory",
|
||||||
# "artifactory",
|
|
||||||
"azure",
|
"azure",
|
||||||
"aws",
|
"aws",
|
||||||
"cloudsmith",
|
"cloudsmith",
|
||||||
|
|
30
uv.schema.json
generated
30
uv.schema.json
generated
|
@ -644,7 +644,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"BuildBackendSettings": {
|
"BuildBackendSettings": {
|
||||||
"description": "Settings for the uv build backend (`uv_build`).\n\n!!! note\n\n The uv build backend is currently in preview and may change in any future release.\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from\n[PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).",
|
"description": "Settings for the uv build backend (`uv_build`).\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from\n[PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -668,10 +668,14 @@
|
||||||
"default": true
|
"default": true
|
||||||
},
|
},
|
||||||
"module-name": {
|
"module-name": {
|
||||||
"description": "The name of the module directory inside `module-root`.\n\nThe default module name is the package name with dots and dashes replaced by underscores.\n\nPackage names need to be valid Python identifiers, and the directory needs to contain a\n`__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem\nbeing the module name, and which contain a `__init__.pyi` file.\n\nFor namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or\n`foo-stubs.bar`.\n\nNote that using this option runs the risk of creating two packages with different names but\nthe same module names. Installing such packages together leads to unspecified behavior,\noften with corrupted files or directory trees.",
|
"description": "The name of the module directory inside `module-root`.\n\nThe default module name is the package name with dots and dashes replaced by underscores.\n\nPackage names need to be valid Python identifiers, and the directory needs to contain a\n`__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem\nbeing the module name, and which contain a `__init__.pyi` file.\n\nFor namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or\n`foo-stubs.bar`.\n\nFor namespace packages with multiple modules, the path can be a list, e.g.,\n`[\"foo\", \"bar\"]`. We recommend using a single module per package, splitting multiple\npackages into a workspace.\n\nNote that using this option runs the risk of creating two packages with different names but\nthe same module names. Installing such packages together leads to unspecified behavior,\noften with corrupted files or directory trees.",
|
||||||
"type": [
|
"anyOf": [
|
||||||
"string",
|
{
|
||||||
"null"
|
"$ref": "#/definitions/ModuleName"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "null"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"default": null
|
"default": null
|
||||||
},
|
},
|
||||||
|
@ -1052,6 +1056,22 @@
|
||||||
"description": "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`",
|
"description": "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"ModuleName": {
|
||||||
|
"description": "Whether to include a single module or multiple modules.",
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"description": "A single module name.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Multiple module names, which are all included.",
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"PackageName": {
|
"PackageName": {
|
||||||
"description": "The normalized name of a package.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.\nFor example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: <https://packaging.python.org/en/latest/specifications/name-normalization/>",
|
"description": "The normalized name of a package.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.\nFor example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: <https://packaging.python.org/en/latest/specifications/name-normalization/>",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue