mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
Compare commits
31 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ddb1577a93 | ||
![]() |
d31e6ad7c7 | ||
![]() |
3a77b9cdd9 | ||
![]() |
1d027bd92a | ||
![]() |
bb738aeb44 | ||
![]() |
fc758bb755 | ||
![]() |
1308c85efe | ||
![]() |
f609e1ddaf | ||
![]() |
eaf517efd8 | ||
![]() |
e8bc3950ef | ||
![]() |
06af93fce7 | ||
![]() |
8afbd86f03 | ||
![]() |
a1cda6213c | ||
![]() |
39cdfe9981 | ||
![]() |
85c0fc963b | ||
![]() |
c3f13d2505 | ||
![]() |
38ee6ec800 | ||
![]() |
71b5ba13d7 | ||
![]() |
5f2857a1c7 | ||
![]() |
a58969feef | ||
![]() |
3bb8ac610c | ||
![]() |
ec54dce919 | ||
![]() |
a6bb65c78d | ||
![]() |
743260b1f5 | ||
![]() |
2f53ea5c5c | ||
![]() |
a9ea756d14 | ||
![]() |
43f67a4a4c | ||
![]() |
a7aa46acc5 | ||
![]() |
b0db548c80 | ||
![]() |
bf5dcf9929 | ||
![]() |
e40d3d5dff |
78 changed files with 1823 additions and 1000 deletions
|
@ -1,4 +1,4 @@
|
|||
[profile.default]
|
||||
# Mark tests that take longer than 10s as slow.
|
||||
# Terminate after 90s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 9 }
|
||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||
|
|
60
.github/workflows/ci.yml
vendored
60
.github/workflows/ci.yml
vendored
|
@ -470,6 +470,31 @@ jobs:
|
|||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-ubuntu-24.04-aarch64-4
|
||||
name: "build binary | linux aarch64"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
|
@ -770,6 +795,33 @@ jobs:
|
|||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-aarch64
|
||||
name: "smoke test | linux aarch64"
|
||||
runs-on: github-ubuntu-24.04-aarch64-2
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: |
|
||||
chmod +x ./uv
|
||||
chmod +x ./uvx
|
||||
|
||||
- name: "Smoke test"
|
||||
run: |
|
||||
./uv run scripts/smoke-test
|
||||
|
||||
- name: "Test shell completions"
|
||||
run: |
|
||||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-musl
|
||||
|
@ -1533,7 +1585,7 @@ jobs:
|
|||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@3d8cba388a057b13744d61818a337e40a119b1a7
|
||||
uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
@ -1570,9 +1622,9 @@ jobs:
|
|||
run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all
|
||||
env:
|
||||
RUST_LOG: uv=debug
|
||||
# UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
||||
# UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
||||
# UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
||||
UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
||||
UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
||||
UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
||||
UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }}
|
||||
UV_TEST_AWS_USERNAME: aws
|
||||
UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }}
|
||||
|
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.33.1
|
||||
rev: v1.34.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.1
|
||||
rev: v0.12.2
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
39
CHANGELOG.md
39
CHANGELOG.md
|
@ -3,6 +3,43 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with the goal of requiring zero configuration for most users, but provides flexible configuration to accommodate most Python project structures. It integrates tightly with uv, to improve messaging and user experience. It validates project metadata and structures, preventing common mistakes. And, finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702) for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile ([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context ([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available ([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal ([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects ([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects ([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
@ -12,6 +49,8 @@
|
|||
These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows.
|
||||
However, they can be requested with `cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630) for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
|
|
121
Cargo.lock
generated
121
Cargo.lock
generated
|
@ -189,9 +189,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "async-channel"
|
||||
version = "2.3.1"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
|
||||
checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2"
|
||||
dependencies = [
|
||||
"concurrent-queue",
|
||||
"event-listener-strategy",
|
||||
|
@ -1165,9 +1165,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "event-listener-strategy"
|
||||
version = "0.5.3"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
|
||||
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
|
||||
dependencies = [
|
||||
"event-listener",
|
||||
"pin-project-lite",
|
||||
|
@ -1698,7 +1698,7 @@ dependencies = [
|
|||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tower-service",
|
||||
"webpki-roots",
|
||||
"webpki-roots 0.26.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1707,6 +1707,7 @@ version = "0.1.14"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
|
@ -1714,7 +1715,9 @@ dependencies = [
|
|||
"http",
|
||||
"http-body",
|
||||
"hyper",
|
||||
"ipnet",
|
||||
"libc",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"tokio",
|
||||
|
@ -1945,6 +1948,16 @@ version = "2.11.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
|
||||
|
||||
[[package]]
|
||||
name = "iri-string"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.15"
|
||||
|
@ -3062,9 +3075,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.15"
|
||||
version = "0.12.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb"
|
||||
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"base64 0.22.1",
|
||||
|
@ -3079,18 +3092,14 @@ dependencies = [
|
|||
"hyper",
|
||||
"hyper-rustls",
|
||||
"hyper-util",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"quinn",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pemfile",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
@ -3098,17 +3107,16 @@ dependencies = [
|
|||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tokio-socks",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tower-service",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"wasm-streams",
|
||||
"web-sys",
|
||||
"webpki-roots",
|
||||
"windows-registry 0.4.0",
|
||||
"webpki-roots 1.0.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3351,15 +3359,6 @@ dependencies = [
|
|||
"security-framework",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pemfile"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
|
||||
dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pki-types"
|
||||
version = "1.11.0"
|
||||
|
@ -3428,9 +3427,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "1.0.3"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d"
|
||||
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"ref-cast",
|
||||
|
@ -3442,9 +3441,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "1.0.3"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b13ed22d6d49fe23712e068770b5c4df4a693a2b02eeff8e7ca3135627a24f6"
|
||||
checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -3968,9 +3967,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test-log"
|
||||
version = "0.2.17"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f"
|
||||
checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b"
|
||||
dependencies = [
|
||||
"test-log-macros",
|
||||
"tracing-subscriber",
|
||||
|
@ -3978,9 +3977,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "test-log-macros"
|
||||
version = "0.2.17"
|
||||
version = "0.2.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f"
|
||||
checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -4172,18 +4171,6 @@ dependencies = [
|
|||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-socks"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f"
|
||||
dependencies = [
|
||||
"either",
|
||||
"futures-util",
|
||||
"thiserror 1.0.69",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-stream"
|
||||
version = "0.1.17"
|
||||
|
@ -4266,6 +4253,24 @@ dependencies = [
|
|||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"iri-string",
|
||||
"pin-project-lite",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-layer"
|
||||
version = "0.3.3"
|
||||
|
@ -4603,7 +4608,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anyhow",
|
||||
|
@ -4767,7 +4772,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-build"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"uv-build-backend",
|
||||
|
@ -5635,7 +5640,7 @@ dependencies = [
|
|||
"uv-trampoline-builder",
|
||||
"uv-warnings",
|
||||
"which",
|
||||
"windows-registry 0.5.3",
|
||||
"windows-registry",
|
||||
"windows-result 0.3.4",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
@ -5839,7 +5844,7 @@ dependencies = [
|
|||
"tracing",
|
||||
"uv-fs",
|
||||
"uv-static",
|
||||
"windows-registry 0.5.3",
|
||||
"windows-registry",
|
||||
"windows-result 0.3.4",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
@ -5957,7 +5962,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-version"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
|
||||
[[package]]
|
||||
name = "uv-virtualenv"
|
||||
|
@ -6221,6 +6226,15 @@ dependencies = [
|
|||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502"
|
||||
dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "weezl"
|
||||
version = "0.1.8"
|
||||
|
@ -6448,17 +6462,6 @@ dependencies = [
|
|||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-registry"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3"
|
||||
dependencies = [
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.3.1",
|
||||
"windows-targets 0.53.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-registry"
|
||||
version = "0.5.3"
|
||||
|
|
|
@ -142,7 +142,7 @@ ref-cast = { version = "1.0.24" }
|
|||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqwest = { version = "=0.12.15", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
|
|
|
@ -4,10 +4,6 @@ use uv_macros::OptionsMetadata;
|
|||
|
||||
/// Settings for the uv build backend (`uv_build`).
|
||||
///
|
||||
/// !!! note
|
||||
///
|
||||
/// The uv build backend is currently in preview and may change in any future release.
|
||||
///
|
||||
/// Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||
/// (such as hatchling) have their own configuration.
|
||||
///
|
||||
|
|
|
@ -25,7 +25,7 @@ use tempfile::TempDir;
|
|||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::PreviewMode;
|
||||
|
@ -456,8 +456,12 @@ impl SourceBuild {
|
|||
"uv-setuptools-{}.lock",
|
||||
cache_digest(&canonical_source_path)
|
||||
));
|
||||
source_tree_lock =
|
||||
Some(LockedFile::acquire(lock_path, self.source_tree.to_string_lossy()).await?);
|
||||
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Ok(source_tree_lock)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
@ -982,6 +982,45 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserves_fragment() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(format!("{}#fragment", server.uri()))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(
|
||||
redirect_request
|
||||
.url()
|
||||
.fragment()
|
||||
.is_some_and(|fragment| fragment == "fragment")
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
|
|
|
@ -1416,44 +1416,6 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserve_fragment() -> Result<(), Error> {
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(ResponseTemplate::new(307).insert_header("Location", "/foo".to_string()))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo"))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache).build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
let mut url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
||||
url.set_fragment(Some("fragment"));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&url)
|
||||
.get(Url::from(url.clone()))
|
||||
.send()
|
||||
.await?
|
||||
.url()
|
||||
.to_string(),
|
||||
format!("{}/foo#fragment", redirect_server.uri()),
|
||||
"Requests should preserve fragment"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use std::{borrow::Cow, str::FromStr};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_pep508::PackageName;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::str::FromStr;
|
||||
use std::{borrow::Cow, fmt::Formatter};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt::Formatter, str::FromStr};
|
||||
|
||||
use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError};
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use serde::{Deserialize, Deserializer};
|
||||
use std::{borrow::Cow, str::FromStr};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
use url::Url;
|
||||
|
||||
/// A host specification (wildcard, or host, with optional scheme and/or port) for which
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
|||
use crate::generate_all::Mode;
|
||||
|
||||
/// Contains current supported targets
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250630/cpython-unix/targets.yml";
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250702/cpython-unix/targets.yml";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
|||
output.push_str("//! DO NOT EDIT\n");
|
||||
output.push_str("//!\n");
|
||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250630/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250702/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//!\n");
|
||||
|
||||
// Disable clippy/fmt
|
||||
|
|
|
@ -453,12 +453,6 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
build_kind: BuildKind,
|
||||
version_id: Option<&'data str>,
|
||||
) -> Result<Option<DistFilename>, BuildDispatchError> {
|
||||
// Direct builds are a preview feature with the uv build backend.
|
||||
if self.preview.is_disabled() {
|
||||
trace!("Preview is disabled, not checking for direct build");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let source_tree = if let Some(subdir) = subdirectory {
|
||||
source.join(subdir)
|
||||
} else {
|
||||
|
|
|
@ -462,6 +462,19 @@ impl<'a> IndexLocations {
|
|||
indexes
|
||||
}
|
||||
}
|
||||
|
||||
/// Add all authenticated sources to the cache.
|
||||
pub fn cache_index_credentials(&self) {
|
||||
for index in self.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||
|
|
|
@ -3,7 +3,9 @@
|
|||
//! flags set.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::{borrow::Cow, path::Path};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{Index, IndexUrl};
|
||||
|
||||
|
|
|
@ -5,11 +5,10 @@ use version_ranges::Ranges;
|
|||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_pep440::{
|
||||
LowerBound, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
|
||||
release_specifier_to_range, release_specifiers_to_ranges,
|
||||
release_specifiers_to_ranges,
|
||||
};
|
||||
use uv_pep508::{MarkerExpression, MarkerTree, MarkerValueVersion};
|
||||
use uv_platform_tags::{AbiTag, LanguageTag};
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
/// The `Requires-Python` requirement specifier.
|
||||
///
|
||||
|
@ -67,27 +66,7 @@ impl RequiresPython {
|
|||
) -> Option<Self> {
|
||||
// Convert to PubGrub range and perform an intersection.
|
||||
let range = specifiers
|
||||
.map(|specs| {
|
||||
// Warn if there’s exactly one `~=` specifier without a patch.
|
||||
if let [spec] = &specs[..] {
|
||||
if spec.is_tilde_without_patch() {
|
||||
if let Some((lo_b, hi_b)) = release_specifier_to_range(spec.clone(), false)
|
||||
.bounding_range()
|
||||
.map(|(l, u)| (l.cloned(), u.cloned()))
|
||||
{
|
||||
let lo_spec = LowerBound::new(lo_b).specifier().unwrap();
|
||||
let hi_spec = UpperBound::new(hi_b).specifier().unwrap();
|
||||
warn_user_once!(
|
||||
"The release specifier (`{spec}`) contains a compatible release \
|
||||
match without a patch version. This will be interpreted as \
|
||||
`{lo_spec}, {hi_spec}`. Did you mean `{spec}.0` to freeze the \
|
||||
minor version?"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
release_specifiers_to_ranges(specs.clone())
|
||||
})
|
||||
.map(|specs| release_specifiers_to_ranges(specs.clone()))
|
||||
.reduce(|acc, r| acc.intersection(&r))?;
|
||||
|
||||
// If the intersection is empty, return `None`.
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use std::{borrow::Cow, ops::Deref};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
|
||||
use http::StatusCode;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
|
|
@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf};
|
|||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
AsyncZip(#[from] async_zip::error::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("I/O operation failed during extraction")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error(
|
||||
"The top-level of the archive must only contain a list directory, but it contains: {0:?}"
|
||||
|
|
|
@ -34,7 +34,7 @@ pub use {
|
|||
VersionPatternParseError,
|
||||
},
|
||||
version_specifier::{
|
||||
VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||
TildeVersionSpecifier, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||
VersionSpecifiersParseError,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -665,11 +665,6 @@ impl VersionSpecifier {
|
|||
| Operator::NotEqual => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if this is a `~=` specifier without a patch version (e.g. `~=3.11`).
|
||||
pub fn is_tilde_without_patch(&self) -> bool {
|
||||
self.operator == Operator::TildeEqual && self.version.release().len() == 2
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for VersionSpecifier {
|
||||
|
@ -893,6 +888,90 @@ pub(crate) fn parse_version_specifiers(
|
|||
Ok(version_ranges)
|
||||
}
|
||||
|
||||
/// A simple `~=` version specifier with a major, minor and (optional) patch version, e.g., `~=3.13`
|
||||
/// or `~=3.13.0`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TildeVersionSpecifier<'a> {
|
||||
inner: Cow<'a, VersionSpecifier>,
|
||||
}
|
||||
|
||||
impl<'a> TildeVersionSpecifier<'a> {
|
||||
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] value.
|
||||
///
|
||||
/// If a [`Operator::TildeEqual`] is not used, or the version includes more than minor and patch
|
||||
/// segments, this will return [`None`].
|
||||
pub fn from_specifier(specifier: VersionSpecifier) -> Option<TildeVersionSpecifier<'a>> {
|
||||
TildeVersionSpecifier::new(Cow::Owned(specifier))
|
||||
}
|
||||
|
||||
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] reference.
|
||||
///
|
||||
/// See [`TildeVersionSpecifier::from_specifier`].
|
||||
pub fn from_specifier_ref(
|
||||
specifier: &'a VersionSpecifier,
|
||||
) -> Option<TildeVersionSpecifier<'a>> {
|
||||
TildeVersionSpecifier::new(Cow::Borrowed(specifier))
|
||||
}
|
||||
|
||||
fn new(specifier: Cow<'a, VersionSpecifier>) -> Option<Self> {
|
||||
if specifier.operator != Operator::TildeEqual {
|
||||
return None;
|
||||
}
|
||||
if specifier.version().release().len() < 2 || specifier.version().release().len() > 3 {
|
||||
return None;
|
||||
}
|
||||
if specifier.version().any_prerelease()
|
||||
|| specifier.version().is_local()
|
||||
|| specifier.version().is_post()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(Self { inner: specifier })
|
||||
}
|
||||
|
||||
/// Whether a patch version is present in this tilde version specifier.
|
||||
pub fn has_patch(&self) -> bool {
|
||||
self.inner.version.release().len() == 3
|
||||
}
|
||||
|
||||
/// Construct the lower and upper bounding version specifiers for this tilde version specifier,
|
||||
/// e.g., for `~=3.13` this would return `>=3.13` and `<4` and for `~=3.13.0` it would
|
||||
/// return `>=3.13.0` and `<3.14`.
|
||||
pub fn bounding_specifiers(&self) -> (VersionSpecifier, VersionSpecifier) {
|
||||
let release = self.inner.version().release();
|
||||
let lower = self.inner.version.clone();
|
||||
let upper = if self.has_patch() {
|
||||
Version::new([release[0], release[1] + 1])
|
||||
} else {
|
||||
Version::new([release[0] + 1])
|
||||
};
|
||||
(
|
||||
VersionSpecifier::greater_than_equal_version(lower),
|
||||
VersionSpecifier::less_than_version(upper),
|
||||
)
|
||||
}
|
||||
|
||||
/// Construct a new tilde `VersionSpecifier` with the given patch version appended.
|
||||
pub fn with_patch_version(&self, patch: u64) -> TildeVersionSpecifier {
|
||||
let mut release = self.inner.version.release().to_vec();
|
||||
if self.has_patch() {
|
||||
release.pop();
|
||||
}
|
||||
release.push(patch);
|
||||
TildeVersionSpecifier::from_specifier(
|
||||
VersionSpecifier::from_version(Operator::TildeEqual, Version::new(release))
|
||||
.expect("We should always derive a valid new version specifier"),
|
||||
)
|
||||
.expect("We should always derive a new tilde version specifier")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TildeVersionSpecifier<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
|
|
@ -18,11 +18,16 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use crate::Pep508Url;
|
||||
|
||||
/// A wrapper around [`Url`] that preserves the original string.
|
||||
///
|
||||
/// The original string is not preserved after serialization/deserialization.
|
||||
#[derive(Debug, Clone, Eq)]
|
||||
pub struct VerbatimUrl {
|
||||
/// The parsed URL.
|
||||
url: DisplaySafeUrl,
|
||||
/// The URL as it was provided by the user.
|
||||
///
|
||||
/// Even if originally set, this will be [`None`] after
|
||||
/// serialization/deserialization.
|
||||
given: Option<ArcStr>,
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,3 +1,4 @@
|
|||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::ops::Deref;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! DO NOT EDIT
|
||||
//!
|
||||
//! Generated with `cargo run dev generate-sysconfig-metadata`
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250630/cpython-unix/targets.yml>
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250702/cpython-unix/targets.yml>
|
||||
//!
|
||||
#![allow(clippy::all)]
|
||||
#![cfg_attr(any(), rustfmt::skip)]
|
||||
|
@ -15,7 +15,6 @@ use crate::sysconfig::replacements::{ReplacementEntry, ReplacementMode};
|
|||
pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<ReplacementEntry>>> = LazyLock::new(|| {
|
||||
BTreeMap::from_iter([
|
||||
("BLDSHARED".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
|
@ -28,7 +27,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
|||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||
]),
|
||||
("CC".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
|
@ -41,7 +39,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
|||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||
]),
|
||||
("CXX".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||
|
@ -53,7 +50,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
|||
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
||||
]),
|
||||
("LDCXXSHARED".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() },
|
||||
|
@ -65,7 +61,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
|||
ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() },
|
||||
]),
|
||||
("LDSHARED".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
|
@ -78,7 +73,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock<BTreeMap<String, Vec<Replac
|
|||
ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() },
|
||||
]),
|
||||
("LINKCC".to_string(), vec![
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() },
|
||||
ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() },
|
||||
|
|
|
@ -349,7 +349,7 @@ mod tests {
|
|||
|
||||
// Cross-compiles use GNU
|
||||
let sysconfigdata = [
|
||||
("CC", "/usr/bin/aarch64-linux-gnu-gcc"),
|
||||
("CC", "/usr/bin/riscv64-linux-gnu-gcc"),
|
||||
("CXX", "/usr/bin/x86_64-linux-gnu-g++"),
|
||||
]
|
||||
.into_iter()
|
||||
|
|
|
@ -3,6 +3,7 @@ use std::fmt::Formatter;
|
|||
use std::sync::Arc;
|
||||
|
||||
use indexmap::IndexSet;
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use pubgrub::{
|
||||
DefaultStringReporter, DerivationTree, Derived, External, Range, Ranges, Reporter, Term,
|
||||
|
@ -156,7 +157,7 @@ impl<T> From<tokio::sync::mpsc::error::SendError<T>> for ResolveError {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) type ErrorTree = DerivationTree<PubGrubPackage, Range<Version>, UnavailableReason>;
|
||||
pub type ErrorTree = DerivationTree<PubGrubPackage, Range<Version>, UnavailableReason>;
|
||||
|
||||
/// A wrapper around [`pubgrub::error::NoSolutionError`] that displays a resolution failure report.
|
||||
pub struct NoSolutionError {
|
||||
|
@ -367,6 +368,11 @@ impl NoSolutionError {
|
|||
NoSolutionHeader::new(self.env.clone())
|
||||
}
|
||||
|
||||
/// Get the conflict derivation tree for external analysis
|
||||
pub fn derivation_tree(&self) -> &ErrorTree {
|
||||
&self.error
|
||||
}
|
||||
|
||||
/// Hint at limiting the resolver environment if universal resolution failed for a target
|
||||
/// that is not the current platform or not the current Python version.
|
||||
fn hint_disjoint_targets(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||
|
@ -404,6 +410,15 @@ impl NoSolutionError {
|
|||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the packages that are involved in this error.
|
||||
pub fn packages(&self) -> impl Iterator<Item = &PackageName> {
|
||||
self.error
|
||||
.packages()
|
||||
.into_iter()
|
||||
.filter_map(|p| p.name())
|
||||
.unique()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for NoSolutionError {
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use std::{borrow::Cow, str::FromStr};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use jiff::{Timestamp, ToSpan, tz::TimeZone};
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
pub use dependency_mode::DependencyMode;
|
||||
pub use error::{NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange};
|
||||
pub use error::{ErrorTree, NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange};
|
||||
pub use exclude_newer::ExcludeNewer;
|
||||
pub use exclusions::Exclusions;
|
||||
pub use flat_index::{FlatDistributions, FlatIndex};
|
||||
|
@ -54,7 +54,7 @@ mod options;
|
|||
mod pins;
|
||||
mod preferences;
|
||||
mod prerelease;
|
||||
mod pubgrub;
|
||||
pub mod pubgrub;
|
||||
mod python_requirement;
|
||||
mod redirect;
|
||||
mod resolution;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
pub(crate) use crate::pubgrub::dependencies::PubGrubDependency;
|
||||
pub(crate) use crate::pubgrub::distribution::PubGrubDistribution;
|
||||
pub(crate) use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython};
|
||||
pub use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython};
|
||||
pub(crate) use crate::pubgrub::priority::{PubGrubPriorities, PubGrubPriority, PubGrubTiebreaker};
|
||||
pub(crate) use crate::pubgrub::report::PubGrubReportFormatter;
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::python_requirement::PythonRequirement;
|
|||
|
||||
/// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap.
|
||||
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub(crate) struct PubGrubPackage(Arc<PubGrubPackageInner>);
|
||||
pub struct PubGrubPackage(Arc<PubGrubPackageInner>);
|
||||
|
||||
impl Deref for PubGrubPackage {
|
||||
type Target = PubGrubPackageInner;
|
||||
|
@ -39,7 +39,7 @@ impl From<PubGrubPackageInner> for PubGrubPackage {
|
|||
/// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g.,
|
||||
/// `black`). We then discard the virtual packages at the end of the resolution process.
|
||||
#[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub(crate) enum PubGrubPackageInner {
|
||||
pub enum PubGrubPackageInner {
|
||||
/// The root package, which is used to start the resolution process.
|
||||
Root(Option<PackageName>),
|
||||
/// A Python version.
|
||||
|
@ -295,7 +295,7 @@ impl PubGrubPackage {
|
|||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)]
|
||||
pub(crate) enum PubGrubPython {
|
||||
pub enum PubGrubPython {
|
||||
/// The Python version installed in the current environment.
|
||||
Installed,
|
||||
/// The Python version for which dependencies are being resolved.
|
||||
|
|
|
@ -7,7 +7,7 @@ use uv_platform_tags::{AbiTag, Tags};
|
|||
|
||||
/// The reason why a package or a version cannot be used.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) enum UnavailableReason {
|
||||
pub enum UnavailableReason {
|
||||
/// The entire package cannot be used.
|
||||
Package(UnavailablePackage),
|
||||
/// A single version cannot be used.
|
||||
|
@ -29,7 +29,7 @@ impl Display for UnavailableReason {
|
|||
/// Most variant are from [`MetadataResponse`] without the error source, since we don't format
|
||||
/// the source and we want to merge unavailable messages across versions.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) enum UnavailableVersion {
|
||||
pub enum UnavailableVersion {
|
||||
/// Version is incompatible because it has no usable distributions
|
||||
IncompatibleDist(IncompatibleDist),
|
||||
/// The wheel metadata was found, but could not be parsed.
|
||||
|
@ -123,7 +123,7 @@ impl From<&MetadataUnavailable> for UnavailableVersion {
|
|||
|
||||
/// The package is unavailable and cannot be used.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) enum UnavailablePackage {
|
||||
pub enum UnavailablePackage {
|
||||
/// Index lookups were disabled (i.e., `--no-index`) and the package was not found in a flat index (i.e. from `--find-links`).
|
||||
NoIndex,
|
||||
/// Network requests were disabled (i.e., `--offline`), and the package was not found in the cache.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-version"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
//!
|
||||
//! Then lowers them into a dependency specification.
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Formatter;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
|
|
|
@ -3,7 +3,6 @@ use std::fmt::Write as _;
|
|||
use std::io::Write as _;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::{fmt, io};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
@ -188,15 +187,6 @@ async fn build_impl(
|
|||
printer: Printer,
|
||||
preview: PreviewMode,
|
||||
) -> Result<BuildResult> {
|
||||
if list && preview.is_disabled() {
|
||||
// We need the direct build for list and that is preview only.
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
"The `--list` option is only available in preview mode; add the `--preview` flag to use `--list`"
|
||||
)?;
|
||||
return Ok(BuildResult::Failure);
|
||||
}
|
||||
|
||||
// Extract the resolver settings.
|
||||
let ResolverSettings {
|
||||
index_locations,
|
||||
|
@ -504,16 +494,7 @@ async fn build_package(
|
|||
.await?
|
||||
.into_interpreter();
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Read build constraints.
|
||||
let build_constraints =
|
||||
|
@ -615,10 +596,7 @@ async fn build_package(
|
|||
}
|
||||
|
||||
BuildAction::List
|
||||
} else if preview.is_enabled()
|
||||
&& !force_pep517
|
||||
&& check_direct_build(source.path(), source.path().user_display())
|
||||
{
|
||||
} else if !force_pep517 && check_direct_build(source.path(), source.path().user_display()) {
|
||||
BuildAction::DirectBuild
|
||||
} else {
|
||||
BuildAction::Pep517
|
||||
|
|
|
@ -3,7 +3,6 @@ use std::env;
|
|||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use itertools::Itertools;
|
||||
|
@ -338,13 +337,12 @@ pub(crate) async fn pip_compile(
|
|||
|
||||
// Determine the Python requirement, if the user requested a specific version.
|
||||
let python_requirement = if universal {
|
||||
let requires_python = RequiresPython::greater_than_equal_version(
|
||||
if let Some(python_version) = python_version.as_ref() {
|
||||
&python_version.version
|
||||
} else {
|
||||
interpreter.python_version()
|
||||
},
|
||||
);
|
||||
let requires_python = if let Some(python_version) = python_version.as_ref() {
|
||||
RequiresPython::greater_than_equal_version(&python_version.version)
|
||||
} else {
|
||||
let version = interpreter.python_minor_version();
|
||||
RequiresPython::greater_than_equal_version(&version)
|
||||
};
|
||||
PythonRequirement::from_requires_python(&interpreter, requires_python)
|
||||
} else if let Some(python_version) = python_version.as_ref() {
|
||||
PythonRequirement::from_python_version(&interpreter, python_version)
|
||||
|
@ -388,16 +386,7 @@ pub(crate) async fn pip_compile(
|
|||
no_index,
|
||||
);
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Determine the PyTorch backend.
|
||||
let torch_backend = torch_backend
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fmt::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::{Level, debug, enabled};
|
||||
use tracing::{Level, debug, enabled, warn};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||
|
@ -237,7 +236,13 @@ pub(crate) async fn pip_install(
|
|||
}
|
||||
}
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Determine the markers to use for the resolution.
|
||||
let interpreter = environment.interpreter();
|
||||
|
@ -334,16 +339,7 @@ pub(crate) async fn pip_install(
|
|||
no_index,
|
||||
);
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Determine the PyTorch backend.
|
||||
let torch_backend = torch_backend
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||
|
@ -212,7 +211,13 @@ pub(crate) async fn pip_sync(
|
|||
}
|
||||
}
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
let interpreter = environment.interpreter();
|
||||
|
||||
|
@ -267,16 +272,7 @@ pub(crate) async fn pip_sync(
|
|||
no_index,
|
||||
);
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Determine the PyTorch backend.
|
||||
let torch_backend = torch_backend
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::fmt::Write;
|
|||
use anyhow::Result;
|
||||
use itertools::{Either, Itertools};
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::BaseClientBuilder;
|
||||
|
@ -100,7 +100,13 @@ pub(crate) async fn pip_uninstall(
|
|||
}
|
||||
}
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Index the current `site-packages` directory.
|
||||
let site_packages = uv_installer::SitePackages::from_environment(&environment)?;
|
||||
|
|
|
@ -10,7 +10,7 @@ use anyhow::{Context, Result, bail};
|
|||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
use tracing::debug;
|
||||
use tracing::{debug, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::Cache;
|
||||
|
@ -319,7 +319,13 @@ pub(crate) async fn add(
|
|||
}
|
||||
};
|
||||
|
||||
let _lock = target.acquire_lock().await?;
|
||||
let _lock = target
|
||||
.acquire_lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
let client_builder = BaseClientBuilder::new()
|
||||
.connectivity(network_settings.connectivity)
|
||||
|
@ -374,16 +380,7 @@ pub(crate) async fn add(
|
|||
let hasher = HashStrategy::default();
|
||||
let sources = SourceStrategy::Enabled;
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in settings.resolver.index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
settings.resolver.index_locations.cache_index_credentials();
|
||||
|
||||
// Initialize the registry client.
|
||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||
|
|
|
@ -44,13 +44,15 @@ impl CachedEnvironment {
|
|||
printer: Printer,
|
||||
preview: PreviewMode,
|
||||
) -> Result<Self, ProjectError> {
|
||||
let interpreter = Self::base_interpreter(interpreter, cache)?;
|
||||
// Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the
|
||||
// given interpreter is a virtual environment.
|
||||
let base_interpreter = Self::base_interpreter(interpreter, cache)?;
|
||||
|
||||
// Resolve the requirements with the interpreter.
|
||||
let resolution = Resolution::from(
|
||||
resolve_environment(
|
||||
spec,
|
||||
&interpreter,
|
||||
&base_interpreter,
|
||||
build_constraints.clone(),
|
||||
&settings.resolver,
|
||||
network_settings,
|
||||
|
@ -73,13 +75,34 @@ impl CachedEnvironment {
|
|||
hash_digest(&distributions)
|
||||
};
|
||||
|
||||
// Hash the interpreter based on its path.
|
||||
// TODO(charlie): Come up with a robust hash for the interpreter.
|
||||
let interpreter_hash =
|
||||
cache_digest(&canonicalize_executable(interpreter.sys_executable())?);
|
||||
// Construct a hash for the environment.
|
||||
//
|
||||
// Use the canonicalized base interpreter path since that's the interpreter we performed the
|
||||
// resolution with and the interpreter the environment will be created with.
|
||||
//
|
||||
// We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the
|
||||
// virtual environment's path. Originally, we shared cached environments independent of the
|
||||
// environment they'd be layered on top of. However, this causes collisions as the overlay
|
||||
// `.pth` file can be overridden by another instance of uv. Including this element in the key
|
||||
// avoids this problem at the cost of creating separate cached environments for identical
|
||||
// `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so
|
||||
// we can canonicalize it without invalidating the purpose of the element — it'd probably be
|
||||
// safe to just use the absolute `sys.executable` as well.
|
||||
//
|
||||
// TODO(zanieb): Since we're not sharing these environmments across projects, we should move
|
||||
// [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant
|
||||
// now.
|
||||
//
|
||||
// TODO(zanieb): We should include the version of the base interpreter in the hash, so if
|
||||
// the interpreter at the canonicalized path changes versions we construct a new
|
||||
// environment.
|
||||
let environment_hash = cache_digest(&(
|
||||
&canonicalize_executable(base_interpreter.sys_executable())?,
|
||||
&interpreter.sys_prefix().canonicalize()?,
|
||||
));
|
||||
|
||||
// Search in the content-addressed cache.
|
||||
let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash);
|
||||
let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash);
|
||||
|
||||
if cache.refresh().is_none() {
|
||||
if let Ok(root) = cache.resolve_link(cache_entry.path()) {
|
||||
|
@ -93,7 +116,7 @@ impl CachedEnvironment {
|
|||
let temp_dir = cache.venv_dir()?;
|
||||
let venv = uv_virtualenv::create_venv(
|
||||
temp_dir.path(),
|
||||
interpreter,
|
||||
base_interpreter,
|
||||
uv_virtualenv::Prompt::None,
|
||||
false,
|
||||
false,
|
||||
|
|
|
@ -593,16 +593,7 @@ async fn do_lock(
|
|||
.keyring(*keyring_provider)
|
||||
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
for index in target.indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
|
|
|
@ -25,7 +25,7 @@ use uv_fs::{CWD, LockedFile, Simplified};
|
|||
use uv_git::ResolvedRepositoryReference;
|
||||
use uv_installer::{SatisfiesResult, SitePackages};
|
||||
use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::{Version, VersionSpecifiers};
|
||||
use uv_pep440::{TildeVersionSpecifier, Version, VersionSpecifiers};
|
||||
use uv_pep508::MarkerTreeContents;
|
||||
use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts};
|
||||
use uv_python::{
|
||||
|
@ -421,6 +421,30 @@ pub(crate) fn find_requires_python(
|
|||
if requires_python.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
for ((package, group), specifiers) in &requires_python {
|
||||
if let [spec] = &specifiers[..] {
|
||||
if let Some(spec) = TildeVersionSpecifier::from_specifier_ref(spec) {
|
||||
if spec.has_patch() {
|
||||
continue;
|
||||
}
|
||||
let (lower, upper) = spec.bounding_specifiers();
|
||||
let spec_0 = spec.with_patch_version(0);
|
||||
let (lower_0, upper_0) = spec_0.bounding_specifiers();
|
||||
warn_user_once!(
|
||||
"The `requires-python` specifier (`{spec}`) in `{package}{group}` \
|
||||
uses the tilde specifier (`~=`) without a patch version. This will be \
|
||||
interpreted as `{lower}, {upper}`. Did you mean `{spec_0}` to constrain the \
|
||||
version as `{lower_0}, {upper_0}`? We recommend only using \
|
||||
the tilde specifier with a patch version to avoid ambiguity.",
|
||||
group = if let Some(group) = group {
|
||||
format!(":{group}")
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
match RequiresPython::intersection(requires_python.iter().map(|(.., specifiers)| specifiers)) {
|
||||
Some(requires_python) => Ok(Some(requires_python)),
|
||||
None => Err(ProjectError::DisjointRequiresPython(requires_python)),
|
||||
|
@ -1220,7 +1244,12 @@ impl ProjectEnvironment {
|
|||
preview: PreviewMode,
|
||||
) -> Result<Self, ProjectError> {
|
||||
// Lock the project environment to avoid synchronization issues.
|
||||
let _lock = ProjectInterpreter::lock(workspace).await?;
|
||||
let _lock = ProjectInterpreter::lock(workspace)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire project environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
let upgradeable = preview.is_enabled()
|
||||
&& python
|
||||
|
@ -1438,7 +1467,13 @@ impl ScriptEnvironment {
|
|||
preview: PreviewMode,
|
||||
) -> Result<Self, ProjectError> {
|
||||
// Lock the script environment to avoid synchronization issues.
|
||||
let _lock = ScriptInterpreter::lock(script).await?;
|
||||
let _lock = ScriptInterpreter::lock(script)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire script environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
let upgradeable = python_request
|
||||
.as_ref()
|
||||
.is_none_or(|request| !request.includes_patch());
|
||||
|
@ -1626,16 +1661,7 @@ pub(crate) async fn resolve_names(
|
|||
.keyring(*keyring_provider)
|
||||
.allow_insecure_host(network_settings.allow_insecure_host.clone());
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Initialize the registry client.
|
||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||
|
@ -1797,16 +1823,7 @@ pub(crate) async fn resolve_environment(
|
|||
let marker_env = interpreter.resolver_marker_environment();
|
||||
let python_requirement = PythonRequirement::from_interpreter(interpreter);
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Initialize the registry client.
|
||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||
|
@ -1978,16 +1995,7 @@ pub(crate) async fn sync_environment(
|
|||
let interpreter = venv.interpreter();
|
||||
let tags = venv.interpreter().tags()?;
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Initialize the registry client.
|
||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||
|
@ -2193,16 +2201,7 @@ pub(crate) async fn update_environment(
|
|||
}
|
||||
}
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Initialize the registry client.
|
||||
let client = RegistryClientBuilder::try_from(client_builder)?
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::str::FromStr;
|
|||
|
||||
use anyhow::{Context, Result};
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_configuration::{
|
||||
|
@ -281,7 +281,13 @@ pub(crate) async fn remove(
|
|||
}
|
||||
};
|
||||
|
||||
let _lock = target.acquire_lock().await?;
|
||||
let _lock = target
|
||||
.acquire_lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Determine the lock mode.
|
||||
let mode = if locked {
|
||||
|
|
|
@ -240,7 +240,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
|||
.await?
|
||||
.into_environment()?;
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Determine the lock mode.
|
||||
let mode = if frozen {
|
||||
|
@ -386,7 +392,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
|||
)
|
||||
});
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
match update_environment(
|
||||
environment,
|
||||
|
@ -699,7 +711,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
|||
.map(|lock| (lock, project.workspace().install_path().to_owned()));
|
||||
}
|
||||
} else {
|
||||
let _lock = venv.lock().await?;
|
||||
let _lock = venv
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Determine the lock mode.
|
||||
let mode = if frozen {
|
||||
|
|
|
@ -6,6 +6,7 @@ use std::sync::Arc;
|
|||
use anyhow::{Context, Result};
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::warn;
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
|
||||
|
@ -169,7 +170,13 @@ pub(crate) async fn sync(
|
|||
),
|
||||
};
|
||||
|
||||
let _lock = environment.lock().await?;
|
||||
let _lock = environment
|
||||
.lock()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire environment lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Notify the user of any environment changes.
|
||||
match &environment {
|
||||
|
@ -682,16 +689,7 @@ pub(super) async fn do_sync(
|
|||
// If necessary, convert editable to non-editable distributions.
|
||||
let resolution = apply_editable_mode(resolution, editable);
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Populate credentials from the target.
|
||||
store_credentials_from_target(target);
|
||||
|
|
|
@ -385,7 +385,7 @@ async fn lock_and_sync(
|
|||
let default_groups = default_dependency_groups(project.pyproject_toml())?;
|
||||
let default_extras = DefaultExtras::default();
|
||||
let groups = DependencyGroups::default().with_defaults(default_groups);
|
||||
let extras = ExtrasSpecification::from_all_extras().with_defaults(default_extras);
|
||||
let extras = ExtrasSpecification::default().with_defaults(default_extras);
|
||||
let install_options = InstallOptions::default();
|
||||
|
||||
// Convert to an `AddTarget` by attaching the appropriate interpreter or environment.
|
||||
|
|
|
@ -218,7 +218,7 @@ pub(crate) fn finalize_tool_install(
|
|||
if target_entry_points.is_empty() {
|
||||
writeln!(
|
||||
printer.stdout(),
|
||||
"No executables are provided by `{from}`",
|
||||
"No executables are provided by package `{from}`; removing tool",
|
||||
from = name.cyan()
|
||||
)?;
|
||||
|
||||
|
@ -354,7 +354,9 @@ fn hint_executable_from_dependency(
|
|||
let command = format!("uv tool install {}", package.name());
|
||||
writeln!(
|
||||
printer.stdout(),
|
||||
"However, an executable with the name `{}` is available via dependency `{}`.\nDid you mean `{}`?",
|
||||
"{}{} An executable with the name `{}` is available via dependency `{}`.\n Did you mean `{}`?",
|
||||
"hint".bold().cyan(),
|
||||
":".bold(),
|
||||
name.cyan(),
|
||||
package.name().cyan(),
|
||||
command.bold(),
|
||||
|
@ -363,7 +365,9 @@ fn hint_executable_from_dependency(
|
|||
packages => {
|
||||
writeln!(
|
||||
printer.stdout(),
|
||||
"However, an executable with the name `{}` is available via the following dependencies::",
|
||||
"{}{} An executable with the name `{}` is available via the following dependencies::",
|
||||
"hint".bold().cyan(),
|
||||
":".bold(),
|
||||
name.cyan(),
|
||||
)?;
|
||||
|
||||
|
@ -372,7 +376,7 @@ fn hint_executable_from_dependency(
|
|||
}
|
||||
writeln!(
|
||||
printer.stdout(),
|
||||
"Did you mean to install one of them instead?"
|
||||
" Did you mean to install one of them instead?"
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -242,16 +242,7 @@ async fn venv_impl(
|
|||
python.into_interpreter()
|
||||
};
|
||||
|
||||
// Add all authenticated sources to the cache.
|
||||
for index in index_locations.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
index_locations.cache_index_credentials();
|
||||
|
||||
// Check if the discovered Python version is incompatible with the current workspace
|
||||
if let Some(requires_python) = requires_python {
|
||||
|
|
|
@ -15,7 +15,7 @@ fn build_basic() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -133,7 +133,7 @@ fn build_sdist() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -189,7 +189,7 @@ fn build_wheel() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -245,7 +245,7 @@ fn build_sdist_wheel() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -303,7 +303,7 @@ fn build_wheel_from_sdist() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -412,7 +412,7 @@ fn build_fail() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -488,7 +488,6 @@ fn build_workspace() -> Result<()> {
|
|||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"\\\.", ""),
|
||||
(r"\[project\]", "[PKG]"),
|
||||
(r"\[member\]", "[PKG]"),
|
||||
|
@ -694,7 +693,6 @@ fn build_all_with_failure() -> Result<()> {
|
|||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"\\\.", ""),
|
||||
(r"\[project\]", "[PKG]"),
|
||||
(r"\[member-\w+\]", "[PKG]"),
|
||||
|
@ -840,7 +838,7 @@ fn build_constraints() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -901,7 +899,7 @@ fn build_sha() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -1187,7 +1185,7 @@ fn build_tool_uv_sources() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let build = context.temp_dir.child("backend");
|
||||
|
@ -1337,7 +1335,6 @@ fn build_non_package() -> Result<()> {
|
|||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"\\\.", ""),
|
||||
(r"\[project\]", "[PKG]"),
|
||||
(r"\[member\]", "[PKG]"),
|
||||
|
@ -1930,7 +1927,7 @@ fn build_with_nonnormalized_name() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")])
|
||||
.chain([(r"\\\.", "")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let project = context.temp_dir.child("project");
|
||||
|
@ -1981,3 +1978,60 @@ fn build_with_nonnormalized_name() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check that `--force-pep517` is respected.
|
||||
///
|
||||
/// The error messages for a broken project are different for direct builds vs. PEP 517.
|
||||
#[test]
|
||||
fn force_pep517() -> Result<()> {
|
||||
// We need to use a real `uv_build` package.
|
||||
let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z");
|
||||
|
||||
context
|
||||
.init()
|
||||
.arg("--build-backend")
|
||||
.arg("uv")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(indoc! {r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = "does_not_exist"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<10000"]
|
||||
build-backend = "uv_build"
|
||||
"#})?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.build().env("RUST_BACKTRACE", "0"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Building source distribution (uv build backend)...
|
||||
× Failed to build `[TEMP_DIR]/`
|
||||
╰─▶ Expected a Python module at: `src/does_not_exist/__init__.py`
|
||||
");
|
||||
|
||||
uv_snapshot!(context.filters(), context.build().arg("--force-pep517").env("RUST_BACKTRACE", "0"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Building source distribution...
|
||||
Error: Missing module directory for `does_not_exist` in `src`. Found: `temp`
|
||||
× Failed to build `[TEMP_DIR]/`
|
||||
├─▶ The build backend returned an error
|
||||
╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1)
|
||||
hint: This usually indicates a problem with the package or the build environment.
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -224,7 +224,6 @@ fn preserve_executable_bit() -> Result<()> {
|
|||
.init()
|
||||
.arg("--build-backend")
|
||||
.arg("uv")
|
||||
.arg("--preview")
|
||||
.arg(&project_dir)
|
||||
.assert()
|
||||
.success();
|
||||
|
@ -316,8 +315,7 @@ fn rename_module() -> Result<()> {
|
|||
uv_snapshot!(context
|
||||
.build_backend()
|
||||
.arg("build-wheel")
|
||||
.arg(temp_dir.path())
|
||||
.env("UV_PREVIEW", "1"), @r###"
|
||||
.arg(temp_dir.path()), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
@ -391,8 +389,7 @@ fn rename_module_editable_build() -> Result<()> {
|
|||
uv_snapshot!(context
|
||||
.build_backend()
|
||||
.arg("build-editable")
|
||||
.arg(temp_dir.path())
|
||||
.env("UV_PREVIEW", "1"), @r###"
|
||||
.arg(temp_dir.path()), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
@ -568,8 +565,7 @@ fn build_sdist_with_long_path() -> Result<()> {
|
|||
uv_snapshot!(context
|
||||
.build_backend()
|
||||
.arg("build-sdist")
|
||||
.arg(temp_dir.path())
|
||||
.env("UV_PREVIEW", "1"), @r###"
|
||||
.arg(temp_dir.path()), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
@ -602,8 +598,7 @@ fn sdist_error_without_module() -> Result<()> {
|
|||
uv_snapshot!(context
|
||||
.build_backend()
|
||||
.arg("build-sdist")
|
||||
.arg(temp_dir.path())
|
||||
.env("UV_PREVIEW", "1"), @r"
|
||||
.arg(temp_dir.path()), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
@ -617,8 +612,7 @@ fn sdist_error_without_module() -> Result<()> {
|
|||
uv_snapshot!(context
|
||||
.build_backend()
|
||||
.arg("build-sdist")
|
||||
.arg(temp_dir.path())
|
||||
.env("UV_PREVIEW", "1"), @r"
|
||||
.arg(temp_dir.path()), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
@ -682,7 +676,6 @@ fn complex_namespace_packages() -> Result<()> {
|
|||
|
||||
context
|
||||
.build()
|
||||
.arg("--preview")
|
||||
.arg(project.path())
|
||||
.arg("--out-dir")
|
||||
.arg(dist.path())
|
||||
|
@ -731,7 +724,6 @@ fn complex_namespace_packages() -> Result<()> {
|
|||
context.filters(),
|
||||
context
|
||||
.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-e")
|
||||
.arg("complex-project-part_a")
|
||||
.arg("-e")
|
||||
|
@ -778,7 +770,6 @@ fn symlinked_file() -> Result<()> {
|
|||
let project = context.temp_dir.child("project");
|
||||
context
|
||||
.init()
|
||||
.arg("--preview")
|
||||
.arg("--build-backend")
|
||||
.arg("uv")
|
||||
.arg(project.path())
|
||||
|
|
|
@ -517,6 +517,8 @@ impl TestContext {
|
|||
if cfg!(windows) {
|
||||
filters.push((" --link-mode <LINK_MODE>".to_string(), String::new()));
|
||||
filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new()));
|
||||
// Unix uses "exit status", Windows uses "exit code"
|
||||
filters.push((r"exit code: ".to_string(), "exit status: ".to_string()));
|
||||
}
|
||||
|
||||
filters.extend(
|
||||
|
|
|
@ -7246,10 +7246,7 @@ fn fail_to_add_revert_project() -> Result<()> {
|
|||
.child("setup.py")
|
||||
.write_str("1/0")?;
|
||||
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.add().arg("./child"), @r#"
|
||||
uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -7351,10 +7348,7 @@ fn fail_to_edit_revert_project() -> Result<()> {
|
|||
.child("setup.py")
|
||||
.write_str("1/0")?;
|
||||
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.add().arg("./child"), @r#"
|
||||
uv_snapshot!(context.filters(), context.add().arg("./child"), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
|
|
@ -4551,15 +4551,15 @@ fn lock_requires_python_compatible_specifier() -> Result<()> {
|
|||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: The release specifier (`~=3.13`) contains a compatible release match without a patch version. This will be interpreted as `>=3.13, <4`. Did you mean `~=3.13.0` to freeze the minor version?
|
||||
warning: The `requires-python` specifier (`~=3.13`) in `warehouse` uses the tilde specifier (`~=`) without a patch version. This will be interpreted as `>=3.13, <4`. Did you mean `~=3.13.0` to constrain the version as `>=3.13.0, <3.14`? We recommend only using the tilde specifier with a patch version to avoid ambiguity.
|
||||
Resolved 1 package in [TIME]
|
||||
"###);
|
||||
");
|
||||
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
|
@ -23617,10 +23617,7 @@ fn lock_derivation_chain_prod() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.lock(), @r###"
|
||||
|
@ -23677,10 +23674,7 @@ fn lock_derivation_chain_extra() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.lock(), @r###"
|
||||
|
@ -23739,10 +23733,7 @@ fn lock_derivation_chain_group() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.lock(), @r###"
|
||||
|
@ -23812,10 +23803,7 @@ fn lock_derivation_chain_extended() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.lock(), @r###"
|
||||
|
|
|
@ -14679,10 +14679,7 @@ fn compile_derivation_chain() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.pip_compile().arg("pyproject.toml"), @r###"
|
||||
|
@ -16345,7 +16342,7 @@ fn pep_751_compile_registry_wheel() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -16394,7 +16391,7 @@ fn pep_751_compile_registry_sdist() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "source-distribution"
|
||||
|
@ -16478,7 +16475,7 @@ fn pep_751_compile_directory() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -16549,7 +16546,7 @@ fn pep_751_compile_git() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "uv-public-pypackage"
|
||||
|
@ -16599,7 +16596,7 @@ fn pep_751_compile_url_wheel() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -16663,7 +16660,7 @@ fn pep_751_compile_url_sdist() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -16732,7 +16729,7 @@ fn pep_751_compile_path_wheel() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -16770,7 +16767,7 @@ fn pep_751_compile_path_wheel() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -16811,7 +16808,7 @@ fn pep_751_compile_path_sdist() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -16850,7 +16847,7 @@ fn pep_751_compile_path_sdist() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -16887,7 +16884,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -16928,7 +16925,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -16968,7 +16965,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -17007,7 +17004,7 @@ fn pep_751_compile_preferences() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -17055,7 +17052,7 @@ fn pep_751_compile_warn() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --emit-index-url
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
|
@ -17268,7 +17265,7 @@ fn pep_751_compile_no_emit_package() -> Result<()> {
|
|||
# uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --no-emit-package idna
|
||||
lock-version = "1.0"
|
||||
created-by = "uv"
|
||||
requires-python = ">=3.12.[X]"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
|
@ -17562,3 +17559,47 @@ fn git_path_transitive_dependency() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`.
|
||||
#[test]
|
||||
fn omit_python_patch_universal() -> Result<()> {
|
||||
let context = TestContext::new("3.11");
|
||||
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("redis")?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.pip_compile()
|
||||
.arg("requirements.in"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
|
||||
redis==5.0.3
|
||||
# via -r requirements.in
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
"
|
||||
);
|
||||
|
||||
uv_snapshot!(context.filters(), context.pip_compile()
|
||||
.arg("requirements.in")
|
||||
.arg("--universal"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# This file was autogenerated by uv via the following command:
|
||||
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal
|
||||
async-timeout==4.0.3 ; python_full_version < '3.11.[X]'
|
||||
# via redis
|
||||
redis==5.0.3
|
||||
# via -r requirements.in
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -342,10 +342,7 @@ dependencies = ["flask==1.0.x"]
|
|||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||
requirements_txt.write_str("./path_dep")?;
|
||||
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.pip_install()
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("-r")
|
||||
.arg("requirements.txt"), @r###"
|
||||
success: false
|
||||
|
@ -4930,10 +4927,7 @@ fn no_build_isolation() -> Result<()> {
|
|||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
||||
|
||||
// We expect the build to fail, because `setuptools` is not installed.
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.pip_install()
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("-r")
|
||||
.arg("requirements.in")
|
||||
.arg("--no-build-isolation"), @r###"
|
||||
|
@ -5001,10 +4995,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> {
|
|||
requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?;
|
||||
|
||||
// We expect the build to fail, because `setuptools` is not installed.
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.pip_install()
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("-r")
|
||||
.arg("requirements.in")
|
||||
.env(EnvVars::UV_NO_BUILD_ISOLATION, "yes"), @r###"
|
||||
|
@ -8601,10 +8592,7 @@ fn install_build_isolation_package() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running `uv pip install` should fail for iniconfig.
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.pip_install()
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--no-build-isolation-package")
|
||||
.arg("iniconfig")
|
||||
.arg(package.path()), @r###"
|
||||
|
@ -8931,10 +8919,7 @@ fn missing_top_level() {
|
|||
fn sklearn() {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.pip_install().arg("sklearn"), @r###"
|
||||
uv_snapshot!(context.filters(), context.pip_install().arg("sklearn"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -8984,10 +8969,7 @@ fn resolve_derivation_chain() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.pip_install()
|
||||
|
|
|
@ -4777,7 +4777,7 @@ fn run_groups_include_requires_python() -> Result<()> {
|
|||
bar = ["iniconfig"]
|
||||
baz = ["iniconfig"]
|
||||
dev = ["sniffio", {include-group = "foo"}, {include-group = "baz"}]
|
||||
|
||||
|
||||
|
||||
[tool.uv.dependency-groups]
|
||||
foo = {requires-python="<3.13"}
|
||||
|
@ -4876,7 +4876,7 @@ fn exit_status_signal() -> Result<()> {
|
|||
|
||||
#[test]
|
||||
fn run_repeated() -> Result<()> {
|
||||
let context = TestContext::new_with_versions(&["3.13"]);
|
||||
let context = TestContext::new_with_versions(&["3.13", "3.12"]);
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(indoc! { r#"
|
||||
|
@ -4923,22 +4923,25 @@ fn run_repeated() -> Result<()> {
|
|||
Resolved 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
// Re-running as a tool shouldn't require reinstalling `typing-extensions`, since the environment is cached.
|
||||
// Re-running as a tool does require reinstalling `typing-extensions`, since the base venv is
|
||||
// different.
|
||||
uv_snapshot!(
|
||||
context.filters(),
|
||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###"
|
||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ typing-extensions==4.10.0
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 1, in <module>
|
||||
import typing_extensions; import iniconfig
|
||||
^^^^^^^^^^^^^^^^
|
||||
ModuleNotFoundError: No module named 'iniconfig'
|
||||
"###);
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -4979,22 +4982,25 @@ fn run_without_overlay() -> Result<()> {
|
|||
+ typing-extensions==4.10.0
|
||||
"###);
|
||||
|
||||
// Import `iniconfig` in the context of a `tool run` command, which should fail.
|
||||
// Import `iniconfig` in the context of a `tool run` command, which should fail. Note that
|
||||
// typing-extensions gets installed again, because the venv is not shared.
|
||||
uv_snapshot!(
|
||||
context.filters(),
|
||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###"
|
||||
context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ typing-extensions==4.10.0
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 1, in <module>
|
||||
import typing_extensions; import iniconfig
|
||||
^^^^^^^^^^^^^^^^
|
||||
ModuleNotFoundError: No module named 'iniconfig'
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// Re-running in the context of the project should reset the overlay.
|
||||
uv_snapshot!(
|
||||
|
|
|
@ -3,13 +3,14 @@ use assert_cmd::prelude::*;
|
|||
use assert_fs::{fixture::ChildPath, prelude::*};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path};
|
||||
use predicates::prelude::predicate;
|
||||
use tempfile::tempdir_in;
|
||||
|
||||
use uv_fs::Simplified;
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path};
|
||||
|
||||
#[test]
|
||||
fn sync() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
@ -1121,10 +1122,7 @@ fn sync_build_isolation_package() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running `uv sync` should fail for iniconfig.
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -1214,10 +1212,7 @@ fn sync_build_isolation_extra() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running `uv sync` should fail for the `compile` extra.
|
||||
let filters = std::iter::once((r"exit code: 1", "exit status: 1"))
|
||||
.chain(context.filters())
|
||||
.collect::<Vec<_>>();
|
||||
uv_snapshot!(&filters, context.sync().arg("--extra").arg("compile"), @r###"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -1238,7 +1233,7 @@ fn sync_build_isolation_extra() -> Result<()> {
|
|||
"###);
|
||||
|
||||
// Running `uv sync` with `--all-extras` should also fail.
|
||||
uv_snapshot!(&filters, context.sync().arg("--all-extras"), @r###"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -6984,10 +6979,7 @@ fn sync_derivation_chain() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.sync(), @r###"
|
||||
|
@ -7050,10 +7042,7 @@ fn sync_derivation_chain_extra() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###"
|
||||
|
@ -7118,10 +7107,7 @@ fn sync_derivation_chain_group() -> Result<()> {
|
|||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"/.*/src", "/[TMP]/src"),
|
||||
])
|
||||
.chain([(r"/.*/src", "/[TMP]/src")])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###"
|
||||
|
@ -9989,3 +9975,54 @@ fn sync_url_with_query_parameters() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn read_only() -> Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["iniconfig"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
"###);
|
||||
|
||||
assert!(context.temp_dir.child("uv.lock").exists());
|
||||
|
||||
// Remove the flock.
|
||||
fs_err::remove_file(context.venv.child(".lock"))?;
|
||||
|
||||
// Make the virtual environment read and execute (but not write).
|
||||
fs_err::set_permissions(&context.venv, std::fs::Permissions::from_mode(0o555))?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Audited 1 package in [TIME]
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -448,13 +448,13 @@ fn tool_install_suggest_other_packages_with_executable() {
|
|||
uv_snapshot!(filters, context.tool_install()
|
||||
.arg("fastapi==0.111.0")
|
||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
|
||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
No executables are provided by `fastapi`
|
||||
However, an executable with the name `fastapi` is available via dependency `fastapi-cli`.
|
||||
Did you mean `uv tool install fastapi-cli`?
|
||||
No executables are provided by package `fastapi`; removing tool
|
||||
hint: An executable with the name `fastapi` is available via dependency `fastapi-cli`.
|
||||
Did you mean `uv tool install fastapi-cli`?
|
||||
|
||||
----- stderr -----
|
||||
Resolved 35 packages in [TIME]
|
||||
|
@ -494,7 +494,7 @@ fn tool_install_suggest_other_packages_with_executable() {
|
|||
+ uvicorn==0.29.0
|
||||
+ watchfiles==0.21.0
|
||||
+ websockets==12.0
|
||||
"###);
|
||||
");
|
||||
}
|
||||
|
||||
/// Test installing a tool at a version
|
||||
|
@ -821,11 +821,11 @@ fn tool_install_remove_on_empty() -> Result<()> {
|
|||
.arg(black.path())
|
||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
|
||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
No executables are provided by `black`
|
||||
No executables are provided by package `black`; removing tool
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
|
@ -839,7 +839,7 @@ fn tool_install_remove_on_empty() -> Result<()> {
|
|||
- packaging==24.0
|
||||
- pathspec==0.12.1
|
||||
- platformdirs==4.2.0
|
||||
"###);
|
||||
");
|
||||
|
||||
// Re-request `black`. It should reinstall, without requiring `--force`.
|
||||
uv_snapshot!(context.filters(), context.tool_install()
|
||||
|
@ -1649,18 +1649,18 @@ fn tool_install_no_entrypoints() {
|
|||
.arg("iniconfig")
|
||||
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
|
||||
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
|
||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
|
||||
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
No executables are provided by `iniconfig`
|
||||
No executables are provided by package `iniconfig`; removing tool
|
||||
|
||||
----- stderr -----
|
||||
Resolved 1 package in [TIME]
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
"###);
|
||||
");
|
||||
|
||||
// Ensure the tool environment is not created.
|
||||
tool_dir
|
||||
|
@ -1682,7 +1682,6 @@ fn tool_install_uninstallable() {
|
|||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(r"exit code: 1", "exit status: 1"),
|
||||
(r"bdist\.[^/\\\s]+(-[^/\\\s]+)?", "bdist.linux-x86_64"),
|
||||
(r"\\\.", ""),
|
||||
(r"#+", "#"),
|
||||
|
|
|
@ -1958,3 +1958,57 @@ fn version_set_evil_constraints() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Bump the version with conflicting extras, to ensure we're activating the correct subset of
|
||||
/// extras during the resolve.
|
||||
#[test]
|
||||
fn version_extras() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "myproject"
|
||||
version = "1.10.31"
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[project.optional-dependencies]
|
||||
foo = ["requests"]
|
||||
bar = ["httpx"]
|
||||
baz = ["flask"]
|
||||
|
||||
[tool.uv]
|
||||
conflicts = [[{"extra" = "foo"}, {"extra" = "bar"}]]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.version()
|
||||
.arg("--bump").arg("patch"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
myproject 1.10.31 => 1.10.32
|
||||
|
||||
----- stderr -----
|
||||
Resolved 19 packages in [TIME]
|
||||
Audited in [TIME]
|
||||
");
|
||||
|
||||
// Sync an extra, we should not remove it.
|
||||
context.sync().arg("--extra").arg("foo").assert().success();
|
||||
|
||||
uv_snapshot!(context.filters(), context.version()
|
||||
.arg("--bump").arg("patch"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
myproject 1.10.32 => 1.10.33
|
||||
|
||||
----- stderr -----
|
||||
Resolved 19 packages in [TIME]
|
||||
Audited in [TIME]
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1351,7 +1351,7 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> {
|
|||
leaf.child("src/__init__.py").touch()?;
|
||||
|
||||
// Resolving should fail.
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
@ -1359,9 +1359,9 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> {
|
|||
----- stderr -----
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
× No solution found when resolving dependencies:
|
||||
╰─▶ Because only httpx<=1.0.0b0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable.
|
||||
╰─▶ Because only httpx<=0.27.0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable.
|
||||
And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable.
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -2,9 +2,8 @@
|
|||
|
||||
!!! note
|
||||
|
||||
The uv build backend is currently in preview and may change without warning.
|
||||
|
||||
When preview mode is not enabled, uv uses [hatchling](https://pypi.org/project/hatchling/) as the default build backend.
|
||||
Currently, the default build backend for `uv init` is
|
||||
[hatchling](https://pypi.org/project/hatchling/). This will change to `uv` in a future version.
|
||||
|
||||
A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel.
|
||||
|
||||
|
@ -12,19 +11,32 @@ uv supports all build backends (as specified by [PEP 517](https://peps.python.or
|
|||
also provides a native build backend (`uv_build`) that integrates tightly with uv to improve
|
||||
performance and user experience.
|
||||
|
||||
## Choosing a build backend
|
||||
|
||||
The uv build backend is a great choice for most Python projects. It has reasonable defaults, with
|
||||
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||
finally, it's very fast.
|
||||
|
||||
The uv build backend currently **only supports pure Python code**. An alternative backend is
|
||||
required to build a
|
||||
[library with extension modules](../concepts/projects/init.md#projects-with-extension-modules).
|
||||
|
||||
!!! tip
|
||||
|
||||
While the backend supports a number of options for configuring your project structure, when build scripts or
|
||||
a more flexible project layout are required, consider using the
|
||||
[hatchling](https://hatch.pypa.io/latest/config/build/#build-system) build backend instead.
|
||||
|
||||
## Using the uv build backend
|
||||
|
||||
!!! important
|
||||
|
||||
The uv build backend currently **only supports pure Python code**. An alternative backend is to
|
||||
build a [library with extension modules](../concepts/projects/init.md#projects-with-extension-modules).
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the
|
||||
[`[build-system]`](../concepts/projects/config.md#build-systems) section in your `pyproject.toml`:
|
||||
|
||||
```toml title="pyproject.toml"
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.18,<0.8.0"]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv:
|
|||
Request a specific version by including it in the URL:
|
||||
|
||||
```console
|
||||
$ curl -LsSf https://astral.sh/uv/0.7.18/install.sh | sh
|
||||
$ curl -LsSf https://astral.sh/uv/0.7.19/install.sh | sh
|
||||
```
|
||||
|
||||
=== "Windows"
|
||||
|
@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv:
|
|||
Request a specific version by including it in the URL:
|
||||
|
||||
```pwsh-session
|
||||
PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.18/install.ps1 | iex"
|
||||
PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.19/install.ps1 | iex"
|
||||
```
|
||||
|
||||
!!! tip
|
||||
|
|
|
@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th
|
|||
other unnecessary files.
|
||||
|
||||
```dockerfile title="Dockerfile"
|
||||
FROM ghcr.io/astral-sh/uv:0.7.18 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.7.19 AS uv
|
||||
|
||||
# First, bundle the dependencies into the task root.
|
||||
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
||||
|
@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell
|
|||
Finally, we'll update the Dockerfile to include the local library in the deployment package:
|
||||
|
||||
```dockerfile title="Dockerfile"
|
||||
FROM ghcr.io/astral-sh/uv:0.7.18 AS uv
|
||||
FROM ghcr.io/astral-sh/uv:0.7.19 AS uv
|
||||
|
||||
# First, bundle the dependencies into the task root.
|
||||
FROM public.ecr.aws/lambda/python:3.13 AS builder
|
||||
|
|
|
@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help
|
|||
The following distroless images are available:
|
||||
|
||||
- `ghcr.io/astral-sh/uv:latest`
|
||||
- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.18`
|
||||
- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.19`
|
||||
- `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch
|
||||
version)
|
||||
|
||||
|
@ -75,7 +75,7 @@ And the following derived images are available:
|
|||
|
||||
As with the distroless image, each derived image is published with uv version tags as
|
||||
`ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and
|
||||
`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.18-alpine`.
|
||||
`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.19-alpine`.
|
||||
|
||||
For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv)
|
||||
page.
|
||||
|
@ -113,7 +113,7 @@ Note this requires `curl` to be available.
|
|||
In either case, it is best practice to pin to a specific uv version, e.g., with:
|
||||
|
||||
```dockerfile
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.7.18 /uv /uvx /bin/
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.7.19 /uv /uvx /bin/
|
||||
```
|
||||
|
||||
!!! tip
|
||||
|
@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.18 /uv /uvx /bin/
|
|||
Or, with the installer:
|
||||
|
||||
```dockerfile
|
||||
ADD https://astral.sh/uv/0.7.18/install.sh /uv-installer.sh
|
||||
ADD https://astral.sh/uv/0.7.19/install.sh /uv-installer.sh
|
||||
```
|
||||
|
||||
### Installing a project
|
||||
|
@ -557,5 +557,5 @@ Verified OK
|
|||
!!! tip
|
||||
|
||||
These examples use `latest`, but best practice is to verify the attestation for a specific
|
||||
version tag, e.g., `ghcr.io/astral-sh/uv:0.7.18`, or (even better) the specific image digest,
|
||||
version tag, e.g., `ghcr.io/astral-sh/uv:0.7.19`, or (even better) the specific image digest,
|
||||
such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`.
|
||||
|
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
# Install a specific version of uv.
|
||||
version: "0.7.18"
|
||||
version: "0.7.19"
|
||||
```
|
||||
|
||||
## Setting up Python
|
||||
|
|
|
@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
- id: uv-lock
|
||||
```
|
||||
|
@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file:
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
- id: uv-export
|
||||
```
|
||||
|
@ -41,7 +41,7 @@ To compile requirements files:
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
# Compile requirements
|
||||
- id: pip-compile
|
||||
|
@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`:
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
# Compile requirements
|
||||
- id: pip-compile
|
||||
|
@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries:
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
# Compile requirements
|
||||
- id: pip-compile
|
||||
|
|
14
docs/guides/migration/index.md
Normal file
14
docs/guides/migration/index.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Migration guides
|
||||
|
||||
Learn how to migrate from other tools to uv:
|
||||
|
||||
- [Migrate from pip to uv projects](./pip-to-project.md)
|
||||
|
||||
!!! note
|
||||
|
||||
Other guides, such as migrating from another project management tool, or from pip to `uv pip`
|
||||
are not yet available. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track
|
||||
progress.
|
||||
|
||||
Or, explore the [integration guides](../integration/index.md) to learn how to use uv with other
|
||||
software.
|
472
docs/guides/migration/pip-to-project.md
Normal file
472
docs/guides/migration/pip-to-project.md
Normal file
|
@ -0,0 +1,472 @@
|
|||
# Migrating from pip to a uv project
|
||||
|
||||
This guide will discuss converting from a `pip` and `pip-tools` workflow centered on `requirements`
|
||||
files to uv's project workflow using a `pyproject.toml` and `uv.lock` file.
|
||||
|
||||
!!! note
|
||||
|
||||
If you're looking to migrate from `pip` and `pip-tools` to uv's drop-in interface or from an
|
||||
existing workflow where you're already using a `pyproject.toml`, those guides are not yet
|
||||
written. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track progress.
|
||||
|
||||
We'll start with an overview of developing with `pip`, then discuss migrating to uv.
|
||||
|
||||
!!! tip
|
||||
|
||||
If you're familiar with the ecosystem, you can jump ahead to the
|
||||
[requirements file import](#importing-requirements-files) instructions.
|
||||
|
||||
## Understanding pip workflows
|
||||
|
||||
### Project dependencies
|
||||
|
||||
When you want to use a package in your project, you need to install it first. `pip` supports
|
||||
imperative installation of packages, e.g.:
|
||||
|
||||
```console
|
||||
$ pip install fastapi
|
||||
```
|
||||
|
||||
This installs the package into the environment that `pip` is installed in. This may be a virtual
|
||||
environment, or, the global environment of your system's Python installation.
|
||||
|
||||
Then, you can run a Python script that requires the package:
|
||||
|
||||
```python title="example.py"
|
||||
import fastapi
|
||||
```
|
||||
|
||||
It's best practice to create a virtual environment for each project, to avoid mixing packages
|
||||
between them. For example:
|
||||
|
||||
```console
|
||||
$ python -m venv
|
||||
$ source .venv/bin/activate
|
||||
$ pip ...
|
||||
```
|
||||
|
||||
We will revisit this topic in the [project environments section](#project-environments) below.
|
||||
|
||||
### Requirements files
|
||||
|
||||
When sharing projects with others, it's useful to declare all the packages you require upfront.
|
||||
`pip` supports installing requirements from a file, e.g.:
|
||||
|
||||
```python title="requirements.txt"
|
||||
fastapi
|
||||
```
|
||||
|
||||
```console
|
||||
$ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Notice above that `fastapi` is not "locked" to a specific version — each person working on the
|
||||
project may have a different version of `fastapi` installed. `pip-tools` was created to improve this
|
||||
experience.
|
||||
|
||||
When using `pip-tools`, requirements files specify both the dependencies for your project and lock
|
||||
dependencies to a specific version — the file extension is used to differentiate between the two.
|
||||
For example, if you require `fastapi` and `pydantic`, you'd specify these in a `requirements.in`
|
||||
file:
|
||||
|
||||
```python title="requirements.in"
|
||||
fastapi
|
||||
pydantic>2
|
||||
```
|
||||
|
||||
Notice there's a version constraint on `pydantic` — this means only `pydantic` versions later than
|
||||
`2.0.0` can be used. In contrast, `fastapi` does not have a version constraint — any version can be
|
||||
used.
|
||||
|
||||
These dependencies can be compiled into a `requirements.txt` file:
|
||||
|
||||
```console
|
||||
$ pip-compile requirements.in -o requirements.txt
|
||||
```
|
||||
|
||||
```python title="requirements.txt"
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
anyio==4.8.0
|
||||
# via starlette
|
||||
fastapi==0.115.11
|
||||
# via -r requirements.in
|
||||
idna==3.10
|
||||
# via anyio
|
||||
pydantic==2.10.6
|
||||
# via
|
||||
# -r requirements.in
|
||||
# fastapi
|
||||
pydantic-core==2.27.2
|
||||
# via pydantic
|
||||
sniffio==1.3.1
|
||||
# via anyio
|
||||
starlette==0.46.1
|
||||
# via fastapi
|
||||
typing-extensions==4.12.2
|
||||
# via
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
```
|
||||
|
||||
Here, all the versions constraints are _exact_. Only a single version of each package can be used.
|
||||
The above example was generated with `uv pip compile`, but could also be generated with
|
||||
`pip-compile` from `pip-tools`.
|
||||
|
||||
Though less common, the `requirements.txt` can also be generated using `pip freeze`, by first
|
||||
installing the input dependencies into the environment then exporting the installed versions:
|
||||
|
||||
```console
|
||||
$ pip install -r requirements.in
|
||||
$ pip freeze > requirements.txt
|
||||
```
|
||||
|
||||
```python title="requirements.txt"
|
||||
annotated-types==0.7.0
|
||||
anyio==4.8.0
|
||||
fastapi==0.115.11
|
||||
idna==3.10
|
||||
pydantic==2.10.6
|
||||
pydantic-core==2.27.2
|
||||
sniffio==1.3.1
|
||||
starlette==0.46.1
|
||||
typing-extensions==4.12.2
|
||||
```
|
||||
|
||||
After compiling dependencies into a locked set of versions, these files are committed to version
|
||||
control and distributed with the project.
|
||||
|
||||
Then, when someone wants to use the project, they install from the requirements file:
|
||||
|
||||
```console
|
||||
$ pip install -r requirements.txt
|
||||
```
|
||||
|
||||
<!--- TODO: Discuss equivalent commands for `uv pip compile` and `pip compile` -->
|
||||
|
||||
### Development dependencies
|
||||
|
||||
The requirements file format can only describe a single set of dependencies at once. This means if
|
||||
you have additional _groups_ of dependencies, such as development dependencies, they need separate
|
||||
files. For example, we'll create a `-dev` dependency file:
|
||||
|
||||
```python title="requirements-dev.in"
|
||||
-r requirements.in
|
||||
-c requirements.txt
|
||||
|
||||
pytest
|
||||
```
|
||||
|
||||
Notice the base requirements are included with `-r requirements.in`. This ensures your development
|
||||
environment considers _all_ of the dependencies together. The `-c requirements.txt` _constrains_ the
|
||||
package version to ensure that the `requirements-dev.txt` uses the same versions as
|
||||
`requirements.txt`.
|
||||
|
||||
!!! note
|
||||
|
||||
It's common to use `-r requirements.txt` directly instead of using both
|
||||
`-r requirements.in`, and `-c requirements.txt`. There's no difference in the resulting package
|
||||
versions, but using both files produces annotations which allow you to determine which
|
||||
dependencies are _direct_ (annotated with `-r requirements.in`) and which are _indirect_ (only
|
||||
annotated with `-c requirements.txt`).
|
||||
|
||||
The compiled development dependencies look like:
|
||||
|
||||
```python title="requirements-dev.txt"
|
||||
annotated-types==0.7.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# pydantic
|
||||
anyio==4.8.0
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# starlette
|
||||
fastapi==0.115.11
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# -r requirements.in
|
||||
idna==3.10
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# anyio
|
||||
iniconfig==2.0.0
|
||||
# via pytest
|
||||
packaging==24.2
|
||||
# via pytest
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
pydantic==2.10.6
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# -r requirements.in
|
||||
# fastapi
|
||||
pydantic-core==2.27.2
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# pydantic
|
||||
pytest==8.3.5
|
||||
# via -r requirements-dev.in
|
||||
sniffio==1.3.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# anyio
|
||||
starlette==0.46.1
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# fastapi
|
||||
typing-extensions==4.12.2
|
||||
# via
|
||||
# -c requirements.txt
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
```
|
||||
|
||||
As with the base dependency files, these are committed to version control and distributed with the
|
||||
project. When someone wants to work on the project, they'll install from the requirements file:
|
||||
|
||||
```console
|
||||
$ pip install -r requirements-dev.txt
|
||||
```
|
||||
|
||||
### Platform-specific dependencies
|
||||
|
||||
When compiling dependencies with `pip` or `pip-tools`, the result is only usable on the same
|
||||
platform as it is generated on. This poses a problem for projects which need to be usable on
|
||||
multiple platforms, such as Windows and macOS.
|
||||
|
||||
For example, take a simple dependency:
|
||||
|
||||
```python title="requirements.in"
|
||||
tqdm
|
||||
```
|
||||
|
||||
On Linux, this compiles to:
|
||||
|
||||
```python title="requirements-linux.txt"
|
||||
tqdm==4.67.1
|
||||
# via -r requirements.in
|
||||
```
|
||||
|
||||
While on Windows, this compiles to:
|
||||
|
||||
```python title="requirements-win.txt"
|
||||
colorama==0.4.6
|
||||
# via tqdm
|
||||
tqdm==4.67.1
|
||||
# via -r requirements.in
|
||||
```
|
||||
|
||||
`colorama` is a Windows-only dependency of `tqdm`.
|
||||
|
||||
When using `pip` and `pip-tools`, a project needs to declare a requirements lock file for each
|
||||
supported platform.
|
||||
|
||||
!!! note
|
||||
|
||||
uv's resolver can compile dependencies for multiple platforms at once (see ["universal resolution"](../../concepts/resolution.md#universal-resolution)),
|
||||
allowing you to use a single `requirements.txt` for all platforms:
|
||||
|
||||
```console
|
||||
$ uv pip compile --universal requirements.in
|
||||
```
|
||||
|
||||
```python title="requirements.txt"
|
||||
colorama==0.4.6 ; sys_platform == 'win32'
|
||||
# via tqdm
|
||||
tqdm==4.67.1
|
||||
# via -r requirements.in
|
||||
```
|
||||
|
||||
This resolution mode is also used when using a `pyproject.toml` and `uv.lock`.
|
||||
|
||||
## Migrating to a uv project
|
||||
|
||||
### The `pyproject.toml`
|
||||
|
||||
The `pyproject.toml` is a standardized file for Python project metadata. It replaces
|
||||
`requirements.in` files, allowing you to represent arbitrary groups of project dependencies. It also
|
||||
provides a centralized location for metadata about your project, such as the build system or tool
|
||||
settings.
|
||||
|
||||
<!-- TODO: Link to the official docs on this or write more -->
|
||||
|
||||
For example, the `requirements.in` and `requirements-dev.in` files above can be translated to a
|
||||
`pyproject.toml` as follows:
|
||||
|
||||
```toml title="pyproject.toml"
|
||||
[project]
|
||||
name = "example"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"fastapi",
|
||||
"pydantic>2"
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = ["pytest"]
|
||||
```
|
||||
|
||||
We'll discuss the commands necessary to automate these imports below.
|
||||
|
||||
### The uv lockfile
|
||||
|
||||
uv uses a lockfile (`uv.lock`) file to lock package versions. The format of this file is specific to
|
||||
uv, allowing uv to support advanced features. It replaces `requirements.txt` files.
|
||||
|
||||
The lockfile will be automatically created and populated when adding dependencies, but you can
|
||||
explicitly create it with `uv lock`.
|
||||
|
||||
Unlike `requirements.txt` files, the `uv.lock` file can represent arbitrary groups of dependencies,
|
||||
so multiple files are not needed to lock development dependencies.
|
||||
|
||||
The uv lockfile is always [universal](../../concepts/resolution.md#universal-resolution), so
|
||||
multiple files are not needed to
|
||||
[lock dependencies for each platform](#platform-specific-dependencies). This ensures that all
|
||||
developers are using consistent, locked versions of dependencies regardless of their machine.
|
||||
|
||||
The uv lockfile also supports concepts like
|
||||
[pinning packages to specific indexes](../../concepts/indexes.md#pinning-a-package-to-an-index),
|
||||
which is not representable in `requirements.txt` files.
|
||||
|
||||
!!! tip
|
||||
|
||||
If you only need to lock for a subset of platforms, use the
|
||||
[`tool.uv.environments`](../../concepts/resolution.md#limited-resolution-environments) setting
|
||||
to limit the resolution and lockfile.
|
||||
|
||||
To learn more, see the [lockfile](../../concepts/projects/layout.md#the-lockfile) documentation.
|
||||
|
||||
### Importing requirements files
|
||||
|
||||
First, create a `pyproject.toml` if you have not already:
|
||||
|
||||
```console
|
||||
$ uv init
|
||||
```
|
||||
|
||||
Then, the easiest way to import requirements is with `uv add`:
|
||||
|
||||
```console
|
||||
$ uv add -r requirements.in
|
||||
```
|
||||
|
||||
However, there is some nuance to this transition. Notice we used the `requirements.in` file, which
|
||||
does not pin to exact versions of packages so uv will solve for new versions of these packages. You
|
||||
may want to continue using your previously locked versions from your `requirements.txt` so, when
|
||||
switching over to uv, none of your dependency versions change.
|
||||
|
||||
The solution is to add your locked versions as _constraints_. uv supports using these on `add` to
|
||||
preserve locked versions:
|
||||
|
||||
```console
|
||||
$ uv add -r requirements.in -c requirements.txt
|
||||
```
|
||||
|
||||
Your existing versions will be retained when producing a `uv.lock` file.
|
||||
|
||||
#### Importing platform-specific constraints
|
||||
|
||||
If your platform-specific dependencies have been compiled into separate files, you can still
|
||||
transition to a universal lockfile. However, you cannot just use `-c` to specify constraints from
|
||||
your existing platform-specific `requirements.txt` files because they do not include markers
|
||||
describing the environment and will consequently conflict.
|
||||
|
||||
To add the necessary markers, use `uv pip compile` to convert your existing files. For example,
|
||||
given the following:
|
||||
|
||||
```python title="requirements-win.txt"
|
||||
colorama==0.4.6
|
||||
# via tqdm
|
||||
tqdm==4.67.1
|
||||
# via -r requirements.in
|
||||
```
|
||||
|
||||
The markers can be added with:
|
||||
|
||||
```console
|
||||
$ uv pip compile requirements.in -o requirements-win.txt --python-platform windows --no-strip-markers
|
||||
```
|
||||
|
||||
Notice the resulting output includes a Windows marker on `colorama`:
|
||||
|
||||
```python title="requirements-win.txt"
|
||||
colorama==0.4.6 ; sys_platform == 'win32'
|
||||
# via tqdm
|
||||
tqdm==4.67.1
|
||||
# via -r requirements.in
|
||||
```
|
||||
|
||||
When using `-o`, uv will constrain the versions to match the existing output file, if it can.
|
||||
|
||||
Markers can be added for other platforms by changing the `--python-platform` and `-o` values for
|
||||
each requirements file you need to import, e.g., to `linux` and `macos`.
|
||||
|
||||
Once each `requirements.txt` file has been transformed, the dependencies can be imported to the
|
||||
`pyproject.toml` and `uv.lock` with `uv add`:
|
||||
|
||||
```console
|
||||
$ uv add -r requirements.in -c requirements-win.txt -c requirements-linux.txt
|
||||
```
|
||||
|
||||
#### Importing development dependency files
|
||||
|
||||
As discussed in the [development dependencies](#development-dependencies) section, it's common to
|
||||
have groups of dependencies for development purposes.
|
||||
|
||||
To import development dependencies, use the `--dev` flag during `uv add`:
|
||||
|
||||
```console
|
||||
$ uv add --dev -r requirements-dev.in -c requirements-dev.txt
|
||||
```
|
||||
|
||||
If the `requirements-dev.in` includes the parent `requirements.in` via `-r`, it will need to be
|
||||
stripped to avoid adding the base requirements to the `dev` dependency group. The following example
|
||||
uses `sed` to strip lines that start with `-r`, then pipes the result to `uv add`:
|
||||
|
||||
```console
|
||||
$ sed '/^-r /d' requirements-dev.in | uv add --dev -r - -c requirements-dev.txt
|
||||
```
|
||||
|
||||
In addition to the `dev` dependency group, uv supports arbitrary group names. For example, if you
|
||||
also have a dedicated set of dependencies for building your documentation, those can be imported to
|
||||
a `docs` group:
|
||||
|
||||
```console
|
||||
$ uv add -r requirements-docs.in -c requirements-docs.txt --group docs
|
||||
```
|
||||
|
||||
### Project environments
|
||||
|
||||
Unlike `pip`, uv is not centered around the concept of an "active" virtual environment. Instead, uv
|
||||
uses a dedicated virtual environment for each project in a `.venv` directory. This environment is
|
||||
automatically managed, so when you run a command, like `uv add`, the environment is synced with the
|
||||
project dependencies.
|
||||
|
||||
The preferred way to execute commands in the environment is with `uv run`, e.g.:
|
||||
|
||||
```console
|
||||
$ uv run pytest
|
||||
```
|
||||
|
||||
Prior to every `uv run` invocation, uv will verify that the lockfile is up-to-date with the
|
||||
`pyproject.toml`, and that the environment is up-to-date with the lockfile, keeping your project
|
||||
in-sync without the need for manual intervention. `uv run` guarantees that your command is run in a
|
||||
consistent, locked environment.
|
||||
|
||||
The project environment can also be explicitly created with `uv sync`, e.g., for use with editors.
|
||||
|
||||
!!! note
|
||||
|
||||
When in projects, uv will prefer a `.venv` in the project directory and ignore the active
|
||||
environment as declared by the `VIRTUAL_ENV` variable by default. You can opt-in to using the
|
||||
active environment with the `--active` flag.
|
||||
|
||||
To learn more, see the
|
||||
[project environment](../../concepts/projects/layout.md#the-project-environment) documentation.
|
||||
|
||||
## Next steps
|
||||
|
||||
Now that you've migrated to uv, take a look at the
|
||||
[project concept](../../concepts/projects/index.md) page for more details about uv projects.
|
|
@ -396,10 +396,6 @@ pydantic = { path = "/path/to/pydantic", editable = true }
|
|||
|
||||
Settings for the uv build backend (`uv_build`).
|
||||
|
||||
!!! note
|
||||
|
||||
The uv build backend is currently in preview and may change in any future release.
|
||||
|
||||
Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||
(such as hatchling) have their own configuration.
|
||||
|
||||
|
|
|
@ -174,6 +174,9 @@ nav:
|
|||
- Using tools: guides/tools.md
|
||||
- Working on projects: guides/projects.md
|
||||
- Publishing packages: guides/package.md
|
||||
- Migration:
|
||||
- guides/migration/index.md
|
||||
- From pip to a uv project: guides/migration/pip-to-project.md
|
||||
- Integrations:
|
||||
- guides/integration/index.md
|
||||
- Docker: guides/integration/docker.md
|
||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "maturin"
|
|||
|
||||
[project]
|
||||
name = "uv"
|
||||
version = "0.7.18"
|
||||
version = "0.7.19"
|
||||
description = "An extremely fast Python package and project manager, written in Rust."
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
@ -56,8 +56,7 @@ DEFAULT_TIMEOUT = 30
|
|||
DEFAULT_PKG_NAME = "astral-registries-test-pkg"
|
||||
|
||||
KNOWN_REGISTRIES = [
|
||||
# TODO(john): Restore this when subscription starts up again
|
||||
# "artifactory",
|
||||
"artifactory",
|
||||
"azure",
|
||||
"aws",
|
||||
"cloudsmith",
|
||||
|
|
2
uv.schema.json
generated
2
uv.schema.json
generated
|
@ -644,7 +644,7 @@
|
|||
]
|
||||
},
|
||||
"BuildBackendSettings": {
|
||||
"description": "Settings for the uv build backend (`uv_build`).\n\n!!! note\n\n The uv build backend is currently in preview and may change in any future release.\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from\n[PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).",
|
||||
"description": "Settings for the uv build backend (`uv_build`).\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from\n[PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue