mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 13:25:00 +00:00
Compare commits
221 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ddb1577a93 | ||
![]() |
d31e6ad7c7 | ||
![]() |
3a77b9cdd9 | ||
![]() |
1d027bd92a | ||
![]() |
bb738aeb44 | ||
![]() |
fc758bb755 | ||
![]() |
1308c85efe | ||
![]() |
f609e1ddaf | ||
![]() |
eaf517efd8 | ||
![]() |
e8bc3950ef | ||
![]() |
06af93fce7 | ||
![]() |
8afbd86f03 | ||
![]() |
a1cda6213c | ||
![]() |
39cdfe9981 | ||
![]() |
85c0fc963b | ||
![]() |
c3f13d2505 | ||
![]() |
38ee6ec800 | ||
![]() |
71b5ba13d7 | ||
![]() |
5f2857a1c7 | ||
![]() |
a58969feef | ||
![]() |
3bb8ac610c | ||
![]() |
ec54dce919 | ||
![]() |
a6bb65c78d | ||
![]() |
743260b1f5 | ||
![]() |
2f53ea5c5c | ||
![]() |
a9ea756d14 | ||
![]() |
43f67a4a4c | ||
![]() |
a7aa46acc5 | ||
![]() |
b0db548c80 | ||
![]() |
bf5dcf9929 | ||
![]() |
e40d3d5dff | ||
![]() |
87e9ccfb92 | ||
![]() |
06df95adbf | ||
![]() |
29fcd6faee | ||
![]() |
d9f9ed4aec | ||
![]() |
85358fe9c6 | ||
![]() |
c078683217 | ||
![]() |
c777491bf4 | ||
![]() |
9af3e9b6ec | ||
![]() |
43745d2ecf | ||
![]() |
3774a656d7 | ||
![]() |
b1812d111a | ||
![]() |
a3db9a9ae4 | ||
![]() |
c5ca240fb7 | ||
![]() |
7bbdc08dae | ||
![]() |
5f8d4bbf02 | ||
![]() |
9e9505df50 | ||
![]() |
2f9061dcd0 | ||
![]() |
317ce6e245 | ||
![]() |
1c7c174bc8 | ||
![]() |
0372a5b05d | ||
![]() |
ae500c95d2 | ||
![]() |
5cfabd7085 | ||
![]() |
15551a0201 | ||
![]() |
61482da319 | ||
![]() |
b2979d25a8 | ||
![]() |
e44a64ee13 | ||
![]() |
e9533a0e29 | ||
![]() |
40386e438f | ||
![]() |
a8b838dee9 | ||
![]() |
d7e1fced43 | ||
![]() |
7603153f5b | ||
![]() |
d15efb7d91 | ||
![]() |
17b7eec287 | ||
![]() |
c0ebe6871d | ||
![]() |
41c218a89b | ||
![]() |
734b228edf | ||
![]() |
f9d3f8ea3b | ||
![]() |
ec18f4813a | ||
![]() |
0cfbdcec09 | ||
![]() |
608a1020c6 | ||
![]() |
692667cbb0 | ||
![]() |
db14cc3005 | ||
![]() |
731689e503 | ||
![]() |
b6b7409d13 | ||
![]() |
eab938b7b4 | ||
![]() |
6a5d2f1ec4 | ||
![]() |
4eef79e5e8 | ||
![]() |
f892b8564f | ||
![]() |
74468dac15 | ||
![]() |
880c5e4949 | ||
![]() |
5754f2f2db | ||
![]() |
a824468c8b | ||
![]() |
56266447e2 | ||
![]() |
efc361223c | ||
![]() |
9ee34dc69b | ||
![]() |
326e4497da | ||
![]() |
05ab266200 | ||
![]() |
c291d4329a | ||
![]() |
d4d6ede23b | ||
![]() |
60528e3e25 | ||
![]() |
1ff8fc0947 | ||
![]() |
8c27c2b494 | ||
![]() |
d27cec78b4 | ||
![]() |
1e02008d8b | ||
![]() |
469246d177 | ||
![]() |
a27e60a22f | ||
![]() |
4b348512c2 | ||
![]() |
4ed9c5791b | ||
![]() |
177df19f30 | ||
![]() |
5b2c3595a7 | ||
![]() |
283323a78a | ||
![]() |
ac788d7cde | ||
![]() |
9fba7a4768 | ||
![]() |
fe11ceedfa | ||
![]() |
61265b0c14 | ||
![]() |
606633d35f | ||
![]() |
f20659e1ce | ||
![]() |
093e9d6ff0 | ||
![]() |
19c58c7fbb | ||
![]() |
aa2448ef83 | ||
![]() |
d9351d52fc | ||
![]() |
e7f5967111 | ||
![]() |
92de53f4eb | ||
![]() |
2d2dd0c1a3 | ||
![]() |
b06dec8398 | ||
![]() |
6481aa3e64 | ||
![]() |
a9a9e71481 | ||
![]() |
ac1405e06c | ||
![]() |
3e9dbe8b7d | ||
![]() |
46221b40c3 | ||
![]() |
a52595b61a | ||
![]() |
7fce3a88b8 | ||
![]() |
a82c210cab | ||
![]() |
f0407e4b6f | ||
![]() |
b18f45db14 | ||
![]() |
8352560b98 | ||
![]() |
0fef253c4b | ||
![]() |
e59835d50c | ||
![]() |
0133bcc8ca | ||
![]() |
1dbe750452 | ||
![]() |
563e9495ba | ||
![]() |
c710246d76 | ||
![]() |
e9d5780369 | ||
![]() |
62365d4ec8 | ||
![]() |
cc8d5a9215 | ||
![]() |
c3e4b63806 | ||
![]() |
e1046242e7 | ||
![]() |
1fc65a1d9d | ||
![]() |
75d4cd30d6 | ||
![]() |
611a13c841 | ||
![]() |
ee0ba65eb2 | ||
![]() |
4d9c9a1e76 | ||
![]() |
0cac73dc1f | ||
![]() |
499c8aa808 | ||
![]() |
2fc922144a | ||
![]() |
47c522f9be | ||
![]() |
6c096246d8 | ||
![]() |
8808e67cff | ||
![]() |
c25c800367 | ||
![]() |
10e1d17cfc | ||
![]() |
3d4f0c934e | ||
![]() |
d653fbb133 | ||
![]() |
e02cd74e64 | ||
![]() |
cf67d9c633 | ||
![]() |
d73d3e8b53 | ||
![]() |
423cfaabf5 | ||
![]() |
5c1ebf902b | ||
![]() |
cd71ad1672 | ||
![]() |
7c90c5be02 | ||
![]() |
77ec5f9b17 | ||
![]() |
5beeda7cdc | ||
![]() |
9d0d612131 | ||
![]() |
87827b6d82 | ||
![]() |
67c0f93e37 | ||
![]() |
ba6413f81f | ||
![]() |
a0ea520fe3 | ||
![]() |
4d104dd004 | ||
![]() |
4b5da24fe0 | ||
![]() |
f38e96bddd | ||
![]() |
59f1b4bee4 | ||
![]() |
96cb90a1fd | ||
![]() |
f0e0ad4d09 | ||
![]() |
3c00d6d7df | ||
![]() |
ce16a0fc9b | ||
![]() |
15256722d8 | ||
![]() |
ff9c2c35d7 | ||
![]() |
5021840919 | ||
![]() |
26db29caac | ||
![]() |
d9b76b97c2 | ||
![]() |
49b450109b | ||
![]() |
da2eca4e2c | ||
![]() |
881e17600f | ||
![]() |
b95e66019d | ||
![]() |
e10881d49c | ||
![]() |
62ed17b230 | ||
![]() |
7316bd01a3 | ||
![]() |
95ad8e5e82 | ||
![]() |
e067118700 | ||
![]() |
f82909ad68 | ||
![]() |
d9c34259b3 | ||
![]() |
a1f9f28762 | ||
![]() |
806cc5cad9 | ||
![]() |
87ab57e902 | ||
![]() |
b3d7f79770 | ||
![]() |
81aebf921d | ||
![]() |
f530565323 | ||
![]() |
357fc91c3c | ||
![]() |
4c877b7dc6 | ||
![]() |
21986c3fc9 | ||
![]() |
90a7208a73 | ||
![]() |
f20a25f91f | ||
![]() |
210b579188 | ||
![]() |
1b82a3ac99 | ||
![]() |
28685633c0 | ||
![]() |
c54f131500 | ||
![]() |
9129d2a9a3 | ||
![]() |
619132bd8a | ||
![]() |
9ff07c113f | ||
![]() |
dc455bfc26 | ||
![]() |
f5382c010b | ||
![]() |
e67dff85cc | ||
![]() |
dd46985e27 | ||
![]() |
2940122e69 | ||
![]() |
1f2bba9b3d | ||
![]() |
64dacab913 | ||
![]() |
00b517dbd1 | ||
![]() |
2a66349e96 | ||
![]() |
619a0eafa1 | ||
![]() |
ea45acaf4f | ||
![]() |
5dae9ac5f2 |
292 changed files with 19710 additions and 6155 deletions
|
@ -1,4 +1,4 @@
|
|||
[profile.default]
|
||||
# Mark tests that take longer than 10s as slow.
|
||||
# Terminate after 90s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 9 }
|
||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||
|
|
137
.github/workflows/build-binaries.yml
vendored
137
.github/workflows/build-binaries.yml
vendored
|
@ -54,7 +54,7 @@ jobs:
|
|||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
|
@ -74,7 +74,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build sdist uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
@ -103,7 +103,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist --features self-update
|
||||
|
@ -133,7 +133,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - x86_64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
@ -157,7 +157,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist --features self-update
|
||||
|
@ -193,7 +193,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - aarch64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
@ -231,7 +231,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||
|
@ -267,7 +267,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
@ -303,7 +303,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
# Generally, we try to build in a target docker container. In this case however, a
|
||||
|
@ -368,7 +368,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
|
@ -412,7 +412,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
|
@ -461,7 +461,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
|
@ -509,7 +509,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
@ -561,7 +561,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
@ -614,7 +614,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
@ -671,7 +671,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
@ -691,6 +691,103 @@ jobs:
|
|||
name: wheels_uv_build-${{ matrix.platform.target }}
|
||||
path: crates/uv-build/dist
|
||||
|
||||
# Like `linux-arm`.
|
||||
linux-riscv64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
timeout-minutes: 30
|
||||
runs-on: depot-ubuntu-latest-4
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
arch: riscv64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
name: "Test wheel"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels_uv-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/release/uv $ARCHIVE_NAME/uv
|
||||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: artifacts-${{ matrix.platform.target }}
|
||||
path: |
|
||||
*.tar.gz
|
||||
*.sha256
|
||||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
name: "Test wheel uv-build"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
distro: ubuntu20.04
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${{ env.MODULE_NAME }}-build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: wheels_uv_build-${{ matrix.platform.target }}
|
||||
path: crates/uv-build/dist
|
||||
|
||||
musllinux:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -710,7 +807,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
@ -757,7 +854,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
@ -804,7 +901,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
@ -869,7 +966,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
|
380
.github/workflows/build-docker.yml
vendored
380
.github/workflows/build-docker.yml
vendored
|
@ -1,11 +1,19 @@
|
|||
# Build and publish a Docker image.
|
||||
# Build and publish Docker images.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
# Uses Depot for multi-platform builds. Includes both a `uv` base image, which
|
||||
# is just the binary in a scratch image, and a set of extra, common images with
|
||||
# the uv binary installed.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "Build Docker image"
|
||||
# Images are built on all runs.
|
||||
#
|
||||
# On release, assumed to run as a subworkflow of .github/workflows/release.yml;
|
||||
# specifically, as a local artifacts job within `cargo-dist`. In this case,
|
||||
# images are published based on the `plan`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within
|
||||
# `cargo-dist`, but sharing the built image as an artifact between jobs is
|
||||
# challenging.
|
||||
name: "Docker images"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
@ -29,35 +37,67 @@ on:
|
|||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
UV_BASE_IMG: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
name: Build Docker image (ghcr.io/astral-sh/uv) for ${{ matrix.platform }}
|
||||
docker-plan:
|
||||
name: plan
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
login: ${{ steps.plan.outputs.login }}
|
||||
push: ${{ steps.plan.outputs.push }}
|
||||
tag: ${{ steps.plan.outputs.tag }}
|
||||
action: ${{ steps.plan.outputs.action }}
|
||||
steps:
|
||||
- name: Set push variable
|
||||
env:
|
||||
DRY_RUN: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag }}
|
||||
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
id: plan
|
||||
run: |
|
||||
if [ "${{ env.DRY_RUN }}" == "false" ]; then
|
||||
echo "login=true" >> "$GITHUB_OUTPUT"
|
||||
echo "push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT"
|
||||
echo "push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
docker-publish-base:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
name: ${{ needs.docker-plan.outputs.action }} uv
|
||||
needs:
|
||||
- docker-plan
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
outputs:
|
||||
image-tags: ${{ steps.meta.outputs.tags }}
|
||||
image-annotations: ${{ steps.meta.outputs.annotations }}
|
||||
image-digest: ${{ steps.build.outputs.digest }}
|
||||
image-version: ${{ steps.meta.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
# PRs from forks don't have access to secrets, disable this step in that case.
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
@ -65,13 +105,15 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${{ needs.docker-plan.outputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
|
@ -81,107 +123,50 @@ jobs:
|
|||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }}
|
||||
type=pep440,pattern={{ version }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=uv-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=uv-${{ env.PLATFORM_TUPLE }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.UV_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Export digests
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- name: Generate artifact attestation for base image
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/uv)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<UV_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <UV_BASE_IMG>@sha256:<sha256_1> <UV_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *)
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
name: ${{ needs.docker-plan.outputs.action }} ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
permissions:
|
||||
packages: write
|
||||
attestations: write # needed to push image attestations to the Github attestation store
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -213,13 +198,12 @@ jobs:
|
|||
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
@ -227,6 +211,8 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
run: |
|
||||
|
@ -238,7 +224,7 @@ jobs:
|
|||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${{ env.UV_BASE_IMG }}:latest /uv /uvx /usr/local/bin/
|
||||
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/uv"]
|
||||
EOF
|
||||
|
@ -249,17 +235,14 @@ jobs:
|
|||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
|
@ -274,7 +257,9 @@ jobs:
|
|||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
|
@ -282,67 +267,84 @@ jobs:
|
|||
|
||||
- name: Build and push
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
context: .
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=uv-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=uv-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_BASE_IMG }}
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
# push-to-registry is explicitly not enabled to maintain full control over the top image
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/uv/pkgs/container/uv
|
||||
# show the uv base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/uv)
|
||||
# Push annotations manually.
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Add annotations to images
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
ANNOTATIONS: ${{ steps.meta.outputs.annotations }}
|
||||
run: |
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ steps.meta.outputs.version }}
|
||||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
echo "digest=${digest}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Generate artifact attestation
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
|
||||
# Annotate the base image
|
||||
docker-annotate-base:
|
||||
name: annotate uv
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
permissions:
|
||||
packages: write
|
||||
attestations: write # needed to push image attestations to the Github attestation store
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
username: astral
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
@ -350,22 +352,37 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# Depot doesn't support annotating images, so we need to do so manually
|
||||
# afterwards. Mutating the manifest is desirable regardless, because we
|
||||
# want to bump the base image to appear at the top of the list on GHCR.
|
||||
# However, once annotation support is added to Depot, this step can be
|
||||
# minimized to just touch the GHCR manifest.
|
||||
- name: Add annotations to images
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }}
|
||||
TAGS: ${{ needs.docker-publish-base.outputs.image-tags }}
|
||||
ANNOTATIONS: ${{ needs.docker-publish-base.outputs.image-annotations }}
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<UV_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <UV_BASE_IMG>@sha256:<sha256_1> <UV_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
# The final command becomes `docker buildx imagetools create --annotation 'index:foo=1' --annotation 'index:bar=2' ... -t tag1 -t tag2 ... <IMG>@sha256:<sha256>`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *)
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
- name: Share manifest digest
|
||||
# Now that we've modified the manifest, we need to attest it again.
|
||||
# Note we only generate an attestation for GHCR.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ needs.docker-publish-base.outputs.image-version }}
|
||||
# To sign the manifest, we need it's digest. Unfortunately "docker
|
||||
# buildx imagetools create" does not (yet) have a clean way of sharing
|
||||
# the digest of the manifest it creates (see docker/buildx#2407), so
|
||||
|
@ -377,15 +394,14 @@ jobs:
|
|||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${UV_BASE_IMG}:${DOCKER_METADATA_OUTPUT_VERSION}" \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
echo "digest=${digest}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_BASE_IMG }}
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
# push-to-registry is explicitly not enabled to maintain full control over the top image
|
||||
|
|
379
.github/workflows/ci.yml
vendored
379
.github/workflows/ci.yml
vendored
|
@ -14,8 +14,9 @@ env:
|
|||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PYTHON_VERSION: "3.12"
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
|
@ -81,7 +82,7 @@ jobs:
|
|||
run: rustup component add rustfmt
|
||||
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
|
||||
- name: "rustfmt"
|
||||
run: cargo fmt --all --check
|
||||
|
@ -125,11 +126,11 @@ jobs:
|
|||
name: "cargo clippy | ubuntu"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Check uv_build dependencies"
|
||||
uses: EmbarkStudios/cargo-deny-action@34899fc7ba81ca6268d5947a7a16b4649013fea1 # v2.0.11
|
||||
uses: EmbarkStudios/cargo-deny-action@30f817c6f72275c6d54dc744fbca09ebc958599f # v2.0.12
|
||||
with:
|
||||
command: check bans
|
||||
manifest-path: crates/uv-build/Cargo.toml
|
||||
|
@ -155,7 +156,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
@ -174,7 +175,7 @@ jobs:
|
|||
name: "cargo dev generate-all"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Generate all"
|
||||
|
@ -187,7 +188,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: "Install cargo shear"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-shear
|
||||
- run: cargo shear
|
||||
|
@ -207,17 +208,17 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
|
@ -239,17 +240,17 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
|
@ -265,7 +266,7 @@ jobs:
|
|||
timeout-minutes: 15
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-windows-2025-x86_64-16
|
||||
runs-on: depot-windows-2022-16
|
||||
name: "cargo test | windows"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
@ -278,11 +279,11 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
@ -291,27 +292,11 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
# Get crash dumps to debug the `exit_code: -1073741819` failures
|
||||
- name: Configure crash dumps
|
||||
if: runner.os == 'Windows'
|
||||
shell: powershell
|
||||
run: |
|
||||
$dumps = "$env:GITHUB_WORKSPACE\dumps"
|
||||
New-Item -Path $dumps -ItemType Directory -Force
|
||||
|
||||
# https://github.com/microsoft/terminal/wiki/Troubleshooting-Tips#capture-automatically
|
||||
$reg = "HKLM:\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps"
|
||||
New-Item -Path $reg -Force | Out-Null
|
||||
Set-ItemProperty -Path $reg -Name "DumpFolder" -Value $dumps
|
||||
Set-ItemProperty -Path $reg -Name "DumpType" -Value 2
|
||||
|
||||
- name: "Cargo test"
|
||||
id: test
|
||||
continue-on-error: true
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
env:
|
||||
# Avoid permission errors during concurrent tests
|
||||
|
@ -325,42 +310,6 @@ jobs:
|
|||
--workspace \
|
||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
|
||||
|
||||
# Get crash dumps to debug the `exit_code: -1073741819` failures (contd.)
|
||||
- name: Analyze crashes
|
||||
if: steps.test.outcome == 'failure'
|
||||
shell: powershell
|
||||
run: |
|
||||
$dumps = Get-ChildItem "$env:GITHUB_WORKSPACE\dumps\*.dmp" -ErrorAction SilentlyContinue
|
||||
if (!$dumps) { exit 0 }
|
||||
|
||||
Write-Host "Found $($dumps.Count) crash dump(s)"
|
||||
|
||||
# Download cdb if needed
|
||||
$cdb = "C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\cdb.exe"
|
||||
if (!(Test-Path $cdb)) {
|
||||
# https://github.com/microsoft/react-native-windows/blob/f1570a5ef1c4fc1e78d0a0ad5af848ab91a4061c/vnext/Scripts/Analyze-Crash.ps1#L44-L56
|
||||
Invoke-WebRequest "https://go.microsoft.com/fwlink/?linkid=2173743" -OutFile "$env:TEMP\sdk.exe"
|
||||
Start-Process "$env:TEMP\sdk.exe" -ArgumentList "/features OptionId.WindowsDesktopDebuggers /quiet" -Wait
|
||||
}
|
||||
|
||||
# Analyze each dump
|
||||
foreach ($dump in $dumps) {
|
||||
Write-Host "`n=== $($dump.Name) ==="
|
||||
& $cdb -z $dump -c "!analyze -v; .ecxr; k; q" 2>&1 | Select-String -Pattern "(ExceptionCode:|SYMBOL_NAME:|IMAGE_NAME:|STACK_TEXT:)" -Context 0,2
|
||||
}
|
||||
|
||||
- name: Upload crash dumps
|
||||
if: steps.test.outcome == 'failure'
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: crash-dumps-${{ github.run_number }}
|
||||
path: dumps/*.dmp
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Fail if tests failed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: exit 1
|
||||
|
||||
# Separate jobs for the nightly crate
|
||||
windows-trampoline-check:
|
||||
timeout-minutes: 15
|
||||
|
@ -383,7 +332,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
|
||||
|
@ -394,7 +343,7 @@ jobs:
|
|||
rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||
|
||||
- name: "Install cargo-bloat"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-bloat
|
||||
|
||||
|
@ -439,7 +388,7 @@ jobs:
|
|||
- name: Copy Git Repo to Dev Drive
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
- name: "Install Rust toolchain"
|
||||
|
@ -481,7 +430,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
@ -494,7 +443,7 @@ jobs:
|
|||
|
||||
- name: "Build docs (insiders)"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.insiders.yml
|
||||
run: uvx --with-requirements docs/requirements-insiders.txt mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
build-binary-linux-libc:
|
||||
timeout-minutes: 10
|
||||
|
@ -507,7 +456,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build
|
||||
|
@ -521,6 +470,31 @@ jobs:
|
|||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-ubuntu-24.04-aarch64-4
|
||||
name: "build binary | linux aarch64"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
|
@ -537,7 +511,7 @@ jobs:
|
|||
sudo apt-get install musl-tools
|
||||
rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
||||
|
@ -562,7 +536,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
|
@ -586,7 +560,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
|
@ -616,7 +590,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
@ -651,7 +625,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
@ -688,7 +662,7 @@ jobs:
|
|||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- run: cargo +${{ steps.msrv.outputs.value }} build
|
||||
- run: ./target/debug/uv --version
|
||||
|
||||
|
@ -701,7 +675,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Cross build"
|
||||
run: |
|
||||
# Install cross from `freebsd-firecracker`
|
||||
|
@ -712,7 +686,7 @@ jobs:
|
|||
cross build --target x86_64-unknown-freebsd
|
||||
|
||||
- name: Test in Firecracker VM
|
||||
uses: acj/freebsd-firecracker-action@5b4c9938e8b5ff1041c58e21515909a0e1500d59 # v0.4.1
|
||||
uses: acj/freebsd-firecracker-action@136ca0bce2adade21e526ceb07db643ad23dd2dd # v0.5.1
|
||||
with:
|
||||
verbose: false
|
||||
checkout: false
|
||||
|
@ -821,6 +795,33 @@ jobs:
|
|||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-aarch64
|
||||
name: "smoke test | linux aarch64"
|
||||
runs-on: github-ubuntu-24.04-aarch64-2
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: |
|
||||
chmod +x ./uv
|
||||
chmod +x ./uvx
|
||||
|
||||
- name: "Smoke test"
|
||||
run: |
|
||||
./uv run scripts/smoke-test
|
||||
|
||||
- name: "Test shell completions"
|
||||
run: |
|
||||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-musl
|
||||
|
@ -903,7 +904,7 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "smoke test | windows aarch64"
|
||||
runs-on: github-windows-11-aarch64-4
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
|
@ -934,7 +935,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1
|
||||
- uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0
|
||||
with:
|
||||
miniconda-version: latest
|
||||
activate-environment: uv
|
||||
|
@ -1051,6 +1052,96 @@ jobs:
|
|||
./uv run python -c ""
|
||||
./uv run -p 3.13t python -c ""
|
||||
|
||||
integration-test-windows-aarch64-implicit:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "integration test | aarch64 windows implicit"
|
||||
runs-on: windows-11-arm
|
||||
|
||||
steps:
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Install Python via uv (implicitly select x64)"
|
||||
run: |
|
||||
./uv python install -v 3.13
|
||||
|
||||
- name: "Create a virtual environment (stdlib)"
|
||||
run: |
|
||||
& (./uv python find 3.13) -m venv .venv
|
||||
|
||||
- name: "Check version (stdlib)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Check is x64"
|
||||
run: |
|
||||
.venv/Scripts/python -c "import sys; exit(1) if 'AMD64' not in sys.version else exit(0)"
|
||||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install -v anyio
|
||||
|
||||
- name: "Check uv run"
|
||||
run: |
|
||||
./uv run python -c ""
|
||||
./uv run -p 3.13 python -c ""
|
||||
|
||||
integration-test-windows-aarch64-explicit:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "integration test | aarch64 windows explicit"
|
||||
runs-on: windows-11-arm
|
||||
|
||||
steps:
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Install Python via uv (explicitly select aarch64)"
|
||||
run: |
|
||||
./uv python install -v cpython-3.13-windows-aarch64-none
|
||||
|
||||
- name: "Create a virtual environment (stdlib)"
|
||||
run: |
|
||||
& (./uv python find 3.13) -m venv .venv
|
||||
|
||||
- name: "Check version (stdlib)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Check is NOT x64"
|
||||
run: |
|
||||
.venv/Scripts/python -c "import sys; exit(1) if 'AMD64' in sys.version else exit(0)"
|
||||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install -v anyio
|
||||
|
||||
- name: "Check uv run"
|
||||
run: |
|
||||
./uv run python -c ""
|
||||
./uv run -p 3.13 python -c ""
|
||||
|
||||
integration-test-pypy-linux:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-libc
|
||||
|
@ -1467,6 +1558,90 @@ jobs:
|
|||
done <<< "${CHANGED_FILES}"
|
||||
echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
integration-test-registries:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-libc
|
||||
name: "integration test | registries"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event.pull_request.head.repo.fork != true }}
|
||||
environment: uv-test-registries
|
||||
env:
|
||||
PYTHON_VERSION: 3.12
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-linux-libc-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: "Get AWS CodeArtifact token"
|
||||
run: |
|
||||
UV_TEST_AWS_TOKEN=$(aws codeartifact get-authorization-token \
|
||||
--domain tests \
|
||||
--domain-owner ${{ secrets.AWS_ACCOUNT_ID }} \
|
||||
--region us-east-1 \
|
||||
--query authorizationToken \
|
||||
--output text)
|
||||
echo "::add-mask::$UV_TEST_AWS_TOKEN"
|
||||
echo "UV_TEST_AWS_TOKEN=$UV_TEST_AWS_TOKEN" >> $GITHUB_ENV
|
||||
|
||||
- name: "Authenticate with GCP"
|
||||
id: "auth"
|
||||
uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}"
|
||||
|
||||
- name: "Set up GCP SDK"
|
||||
uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b"
|
||||
|
||||
- name: "Get GCP Artifact Registry token"
|
||||
id: get_token
|
||||
run: |
|
||||
UV_TEST_GCP_TOKEN=$(gcloud auth print-access-token)
|
||||
echo "::add-mask::$UV_TEST_GCP_TOKEN"
|
||||
echo "UV_TEST_GCP_TOKEN=$UV_TEST_GCP_TOKEN" >> $GITHUB_ENV
|
||||
|
||||
- name: "Run registry tests"
|
||||
run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all
|
||||
env:
|
||||
RUST_LOG: uv=debug
|
||||
UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
||||
UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
||||
UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
||||
UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }}
|
||||
UV_TEST_AWS_USERNAME: aws
|
||||
UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }}
|
||||
UV_TEST_AZURE_URL: ${{ secrets.UV_TEST_AZURE_URL }}
|
||||
UV_TEST_AZURE_USERNAME: dummy
|
||||
UV_TEST_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_CLOUDSMITH_TOKEN }}
|
||||
UV_TEST_CLOUDSMITH_URL: ${{ secrets.UV_TEST_CLOUDSMITH_URL }}
|
||||
UV_TEST_CLOUDSMITH_USERNAME: ${{ secrets.UV_TEST_CLOUDSMITH_USERNAME }}
|
||||
UV_TEST_GCP_URL: ${{ secrets.UV_TEST_GCP_URL }}
|
||||
UV_TEST_GCP_USERNAME: oauth2accesstoken
|
||||
UV_TEST_GEMFURY_TOKEN: ${{ secrets.UV_TEST_GEMFURY_TOKEN }}
|
||||
UV_TEST_GEMFURY_URL: ${{ secrets.UV_TEST_GEMFURY_URL }}
|
||||
UV_TEST_GEMFURY_USERNAME: ${{ secrets.UV_TEST_GEMFURY_USERNAME }}
|
||||
UV_TEST_GITLAB_TOKEN: ${{ secrets.UV_TEST_GITLAB_TOKEN }}
|
||||
UV_TEST_GITLAB_URL: ${{ secrets.UV_TEST_GITLAB_URL }}
|
||||
UV_TEST_GITLAB_USERNAME: token
|
||||
|
||||
integration-test-publish:
|
||||
timeout-minutes: 20
|
||||
needs: integration-test-publish-changed
|
||||
|
@ -2039,7 +2214,7 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "check system | x86-64 python3.13 on windows aarch64"
|
||||
runs-on: github-windows-11-aarch64-4
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
|
@ -2057,6 +2232,28 @@ jobs:
|
|||
- name: "Validate global Python install"
|
||||
run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
|
||||
system-test-windows-aarch64-aarch64-python-313:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "check system | aarch64 python3.13 on windows aarch64"
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
architecture: "arm64"
|
||||
allow-prereleases: true
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
|
||||
# Test our PEP 514 integration that installs Python into the Windows registry.
|
||||
system-test-windows-registry:
|
||||
timeout-minutes: 10
|
||||
|
@ -2202,7 +2399,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1
|
||||
- uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0
|
||||
with:
|
||||
miniconda-version: "latest"
|
||||
activate-environment: uv
|
||||
|
@ -2304,13 +2501,13 @@ jobs:
|
|||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
@ -2341,13 +2538,13 @@ jobs:
|
|||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
8
.github/workflows/publish-pypi.yml
vendored
8
.github/workflows/publish-pypi.yml
vendored
|
@ -22,12 +22,14 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
path: wheels_uv
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv/*
|
||||
|
||||
|
@ -41,11 +43,13 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
path: wheels_uv_build
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv_build/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv_build/*
|
||||
|
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
|
@ -69,7 +69,7 @@ jobs:
|
|||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
|
|
46
.github/workflows/setup-dev-drive.ps1
vendored
46
.github/workflows/setup-dev-drive.ps1
vendored
|
@ -1,13 +1,43 @@
|
|||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
# When not using a GitHub Actions "larger runner", the `D:` drive is present and
|
||||
# has similar or better performance characteristics than a ReFS dev drive.
|
||||
# Sometimes using a larger runner is still more performant (e.g., when running
|
||||
# the test suite) and we need to create a dev drive. This script automatically
|
||||
# configures the appropriate drive.
|
||||
|
||||
# Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
if (Test-Path "D:\") {
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\uv_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
|
@ -55,10 +85,8 @@ Write-Output `
|
|||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"UV_INTERNAL__TEST_DIR=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"UV_WORKSPACE=$($Drive)/uv" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
|
||||
|
|
2
.github/workflows/sync-python-releases.yml
vendored
2
.github/workflows/sync-python-releases.yml
vendored
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -3,9 +3,10 @@
|
|||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/vendor/
|
||||
debug/
|
||||
target/
|
||||
target-alpine/
|
||||
target/
|
||||
|
||||
# Bootstrapped Python versions
|
||||
/bin/
|
||||
|
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.32.0
|
||||
rev: v1.34.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.12
|
||||
rev: v0.12.2
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
216
CHANGELOG.md
216
CHANGELOG.md
|
@ -3,6 +3,221 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with the goal of requiring zero configuration for most users, but provides flexible configuration to accommodate most Python project structures. It integrates tightly with uv, to improve messaging and user experience. It validates project metadata and structures, preventing common mistakes. And, finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702) for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile ([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context ([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available ([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal ([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects ([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects ([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
||||
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14
|
||||
|
||||
These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows.
|
||||
However, they can be requested with `cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630) for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
- Reuse build (virtual) environments across resolution and installation ([#14338](https://github.com/astral-sh/uv/pull/14338))
|
||||
- Improve trace message for cached Python interpreter query ([#14328](https://github.com/astral-sh/uv/pull/14328))
|
||||
- Use parsed URLs for conflicting URL error message ([#14380](https://github.com/astral-sh/uv/pull/14380))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ignore invalid build backend settings when not building ([#14372](https://github.com/astral-sh/uv/pull/14372))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix equals-star and tilde-equals with `python_version` and `python_full_version` ([#14271](https://github.com/astral-sh/uv/pull/14271))
|
||||
- Include the canonical path in the interpreter query cache key ([#14331](https://github.com/astral-sh/uv/pull/14331))
|
||||
- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304))
|
||||
- Error instead of panic on conflict between global and subcommand flags ([#14368](https://github.com/astral-sh/uv/pull/14368))
|
||||
- Consistently normalize trailing slashes on URLs with no path segments ([#14349](https://github.com/astral-sh/uv/pull/14349))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions for publishing to JFrog's Artifactory ([#14253](https://github.com/astral-sh/uv/pull/14253))
|
||||
- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376))
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply build constraints when resolving `--with` dependencies ([#14340](https://github.com/astral-sh/uv/pull/14340))
|
||||
- Drop trailing slashes when converting index URL from URL ([#14346](https://github.com/astral-sh/uv/pull/14346))
|
||||
- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336))
|
||||
- Fix error message ordering for `pyvenv.cfg` version conflict ([#14329](https://github.com/astral-sh/uv/pull/14329))
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b3
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Include path or URL when failing to convert in lockfile ([#14292](https://github.com/astral-sh/uv/pull/14292))
|
||||
- Warn when `~=` is used as a Python version specifier without a patch version ([#14008](https://github.com/astral-sh/uv/pull/14008))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ensure preview default Python installs are upgradeable ([#14261](https://github.com/astral-sh/uv/pull/14261))
|
||||
|
||||
### Performance
|
||||
|
||||
- Share workspace cache between lock and sync operations ([#14321](https://github.com/astral-sh/uv/pull/14321))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local indexes to reference remote files ([#14294](https://github.com/astral-sh/uv/pull/14294))
|
||||
- Avoid rendering desugared prefix matches in error messages ([#14195](https://github.com/astral-sh/uv/pull/14195))
|
||||
- Avoid using path URL for workspace Git dependencies in `requirements.txt` ([#14288](https://github.com/astral-sh/uv/pull/14288))
|
||||
- Normalize index URLs to remove trailing slash ([#14245](https://github.com/astral-sh/uv/pull/14245))
|
||||
- Respect URL-encoded credentials in redirect location ([#14315](https://github.com/astral-sh/uv/pull/14315))
|
||||
- Lock the source tree when running setuptools, to protect concurrent builds ([#14174](https://github.com/astral-sh/uv/pull/14174))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note that GCP Artifact Registry download URLs must have `/simple` component ([#14251](https://github.com/astral-sh/uv/pull/14251))
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Consistently use `Ordering::Relaxed` for standalone atomic use cases ([#14190](https://github.com/astral-sh/uv/pull/14190))
|
||||
- Warn on ambiguous relative paths for `--index` ([#14152](https://github.com/astral-sh/uv/pull/14152))
|
||||
- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033))
|
||||
- Preserve newlines in `schema.json` descriptions ([#13693](https://github.com/astral-sh/uv/pull/13693))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add check for using minor version link when creating a venv on Windows ([#14252](https://github.com/astral-sh/uv/pull/14252))
|
||||
- Strip query parameters when parsing source URL ([#14224](https://github.com/astral-sh/uv/pull/14224))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a link to PyPI FAQ to clarify what per-project token is ([#14242](https://github.com/astral-sh/uv/pull/14242))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212))
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172))
|
||||
- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120))
|
||||
- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119))
|
||||
- Add `[tool.uv.dependency-groups].mygroup.requires-python` ([#13735](https://github.com/astral-sh/uv/pull/13735))
|
||||
- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176))
|
||||
- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897))
|
||||
- Support transparent Python patch version upgrades ([#13954](https://github.com/astral-sh/uv/pull/13954))
|
||||
- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940))
|
||||
- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088))
|
||||
|
||||
### Performance
|
||||
|
||||
- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't use walrus operator in interpreter query script ([#14108](https://github.com/astral-sh/uv/pull/14108))
|
||||
- Fix handling of changes to `requires-python` ([#14076](https://github.com/astral-sh/uv/pull/14076))
|
||||
- Fix implied `platform_machine` marker for `win_amd64` platform tag ([#14041](https://github.com/astral-sh/uv/pull/14041))
|
||||
- Only update existing symlink directories on preview uninstall ([#14179](https://github.com/astral-sh/uv/pull/14179))
|
||||
- Serialize Python requests for tools as canonicalized strings ([#14109](https://github.com/astral-sh/uv/pull/14109))
|
||||
- Support netrc and same-origin credential propagation on index redirects ([#14126](https://github.com/astral-sh/uv/pull/14126))
|
||||
- Support reading `dependency-groups` from pyproject.tomls with no `[project]` ([#13742](https://github.com/astral-sh/uv/pull/13742))
|
||||
- Handle an existing shebang in `uv init --script` ([#14141](https://github.com/astral-sh/uv/pull/14141))
|
||||
- Prevent concurrent updates of the environment in `uv run` ([#14153](https://github.com/astral-sh/uv/pull/14153))
|
||||
- Filter managed Python distributions by platform before querying when included in request ([#13936](https://github.com/astral-sh/uv/pull/13936))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168))
|
||||
- Document the way member sources shadow workspace sources ([#14136](https://github.com/astral-sh/uv/pull/14136))
|
||||
- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website ([#14100](https://github.com/astral-sh/uv/pull/14100))
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found ([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled ([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked ([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding` ([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index ([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin` ([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference ([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference ([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list ([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory ([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Enhancements
|
||||
|
@ -11,7 +226,6 @@
|
|||
- Don't hint at versions removed by `excluded-newer` ([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error ([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error ([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
|
483
Cargo.lock
generated
483
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
98
Cargo.toml
98
Cargo.toml
|
@ -12,7 +12,7 @@ resolver = "2"
|
|||
|
||||
[workspace.package]
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
rust-version = "1.86"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
|
@ -142,16 +142,16 @@ ref-cast = { version = "1.0.24" }
|
|||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqwest = { version = "=0.12.15", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { version = "0.4.0", features = ["multipart"] }
|
||||
reqwest-retry = { version = "0.7.0" }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
rust-netrc = { version = "0.1.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"] }
|
||||
same-file = { version = "1.0.6" }
|
||||
schemars = { version = "0.8.21", features = ["url"] }
|
||||
schemars = { version = "1.0.0", features = ["url2"] }
|
||||
seahash = { version = "4.1.0" }
|
||||
self-replace = { version = "1.5.0" }
|
||||
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
||||
|
@ -183,13 +183,13 @@ unscanny = { version = "0.1.0" }
|
|||
url = { version = "2.5.2", features = ["serde"] }
|
||||
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "7.0.0", features = ["regex"] }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] }
|
||||
windows-core = { version = "0.59.0" }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
windows-result = { version = "0.3.0" }
|
||||
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] }
|
||||
wiremock = { version = "0.6.2" }
|
||||
wiremock = { version = "0.6.4" }
|
||||
xz2 = { version = "0.1.7" }
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||
|
||||
|
@ -214,6 +214,7 @@ missing_panics_doc = "allow"
|
|||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
struct_excessive_bools = "allow"
|
||||
too_many_arguments = "allow"
|
||||
too_many_lines = "allow"
|
||||
used_underscore_binding = "allow"
|
||||
|
@ -296,83 +297,6 @@ codegen-units = 1
|
|||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.28.4"
|
||||
# make a package being included in our releases opt-in instead of opt-out
|
||||
dist = false
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "plan"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = ["./publish-docs"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read", id-token = "write", attestations = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
[workspace.metadata.dist.github-custom-runners]
|
||||
global = "depot-ubuntu-latest-4"
|
||||
|
||||
[workspace.metadata.dist.min-glibc-version]
|
||||
# Override glibc version for specific target triplets.
|
||||
aarch64-unknown-linux-gnu = "2.28"
|
||||
# Override all remaining glibc versions.
|
||||
"*" = "2.17"
|
||||
|
||||
[workspace.metadata.dist.github-action-commits]
|
||||
"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4
|
||||
"actions/upload-artifact" = "6027e3dd177782cd8ab9af838c04fd81a07f1d47" # v4.6.2
|
||||
"actions/download-artifact" = "d3f86a106a0bac45b974a628896c90dbdf5c8093" # v4.3.0
|
||||
"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3
|
||||
|
||||
[workspace.metadata.dist.binaries]
|
||||
"*" = ["uv", "uvx"]
|
||||
# Add "uvw" binary for Windows targets
|
||||
aarch64-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
i686-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
x86_64-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
[patch.crates-io]
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
|
|
|
@ -6,6 +6,8 @@ doc-valid-idents = [
|
|||
"GraalPy",
|
||||
"ReFS",
|
||||
"PyTorch",
|
||||
"ROCm",
|
||||
"XPU",
|
||||
".." # Include the defaults
|
||||
]
|
||||
|
||||
|
@ -35,7 +37,7 @@ disallowed-methods = [
|
|||
"std::fs::soft_link",
|
||||
"std::fs::symlink_metadata",
|
||||
"std::fs::write",
|
||||
"std::os::unix::fs::symlink",
|
||||
"std::os::windows::fs::symlink_dir",
|
||||
"std::os::windows::fs::symlink_file",
|
||||
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
||||
]
|
||||
|
|
|
@ -86,7 +86,7 @@ impl Indexes {
|
|||
Self(FxHashSet::default())
|
||||
}
|
||||
|
||||
/// Create a new [`AuthIndexUrls`] from an iterator of [`AuthIndexUrl`]s.
|
||||
/// Create a new [`Indexes`] instance from an iterator of [`Index`]s.
|
||||
pub fn from_indexes(urls: impl IntoIterator<Item = Index>) -> Self {
|
||||
let mut index_urls = Self::new();
|
||||
for url in urls {
|
||||
|
|
|
@ -18,11 +18,6 @@ workspace = true
|
|||
doctest = false
|
||||
bench = false
|
||||
|
||||
[[bench]]
|
||||
name = "distribution-filename"
|
||||
path = "benches/distribution_filename.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "uv"
|
||||
path = "benches/uv.rs"
|
||||
|
@ -34,7 +29,6 @@ uv-client = { workspace = true }
|
|||
uv-configuration = { workspace = true }
|
||||
uv-dispatch = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-extract = { workspace = true, optional = true }
|
||||
uv-install-wheel = { workspace = true }
|
||||
|
@ -48,8 +42,10 @@ uv-types = { workspace = true }
|
|||
uv-workspace = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.6.0", default-features = false, features = ["async_tokio"] }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.6.0", default-features = false, features = [
|
||||
"async_tokio",
|
||||
] }
|
||||
jiff = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
|
|
|
@ -1,168 +0,0 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_bench::criterion::{
|
||||
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
|
||||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags};
|
||||
|
||||
/// A set of platform tags extracted from burntsushi's Archlinux workstation.
|
||||
/// We could just re-create these via `Tags::from_env`, but those might differ
|
||||
/// depending on the platform. This way, we always use the same data. It also
|
||||
/// lets us assert tag compatibility regardless of where the benchmarks run.
|
||||
const PLATFORM_TAGS: &[(&str, &str, &str)] = include!("../inputs/platform_tags.rs");
|
||||
|
||||
/// A set of wheel names used in the benchmarks below. We pick short and long
|
||||
/// names, as well as compatible and not-compatibles (with `PLATFORM_TAGS`)
|
||||
/// names.
|
||||
///
|
||||
/// The tuple is (name, filename, compatible) where `name` is a descriptive
|
||||
/// name for humans used in the benchmark definition. And `filename` is the
|
||||
/// actual wheel filename we want to benchmark operation on. And `compatible`
|
||||
/// indicates whether the tags in the wheel filename are expected to be
|
||||
/// compatible with the tags in `PLATFORM_TAGS`.
|
||||
const WHEEL_NAMES: &[(&str, &str, bool)] = &[
|
||||
// This tests a case with a very short name that *is* compatible with
|
||||
// PLATFORM_TAGS. It only uses one tag for each component (one Python
|
||||
// version, one ABI and one platform).
|
||||
(
|
||||
"flyte-short-compatible",
|
||||
"ipython-2.1.0-py3-none-any.whl",
|
||||
true,
|
||||
),
|
||||
// This tests a case with a long name that is *not* compatible. That
|
||||
// is, all platform tags need to be checked against the tags in the
|
||||
// wheel filename. This is essentially the worst possible practical
|
||||
// case.
|
||||
(
|
||||
"flyte-long-incompatible",
|
||||
"protobuf-3.5.2.post1-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
|
||||
false,
|
||||
),
|
||||
// This tests a case with a long name that *is* compatible. We
|
||||
// expect this to be (on average) quicker because the compatibility
|
||||
// check stops as soon as a positive match is found. (Where as the
|
||||
// incompatible case needs to check all tags.)
|
||||
(
|
||||
"flyte-long-compatible",
|
||||
"coverage-6.6.0b1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
true,
|
||||
),
|
||||
];
|
||||
|
||||
/// A list of names that are candidates for wheel filenames but will ultimately
|
||||
/// fail to parse.
|
||||
const INVALID_WHEEL_NAMES: &[(&str, &str)] = &[
|
||||
("flyte-short-extension", "mock-5.1.0.tar.gz"),
|
||||
(
|
||||
"flyte-long-extension",
|
||||
"Pillow-5.4.0.dev0-py3.7-macosx-10.13-x86_64.egg",
|
||||
),
|
||||
];
|
||||
|
||||
/// Benchmarks the construction of platform tags.
|
||||
///
|
||||
/// This only happens ~once per program startup. Originally, construction was
|
||||
/// trivial. But to speed up `WheelFilename::is_compatible`, we added some
|
||||
/// extra processing. We thus expect construction to become slower, but we
|
||||
/// write a benchmark to ensure it is still "reasonable."
|
||||
fn benchmark_build_platform_tags(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| {
|
||||
(
|
||||
LanguageTag::from_str(py).unwrap(),
|
||||
AbiTag::from_str(abi).unwrap(),
|
||||
PlatformTag::from_str(plat).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut group = c.benchmark_group("build_platform_tags");
|
||||
group.bench_function(BenchmarkId::from_parameter("burntsushi-archlinux"), |b| {
|
||||
b.iter(|| std::hint::black_box(Tags::new(tags.clone())));
|
||||
});
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str`. This has been observed to take some
|
||||
/// non-trivial time in profiling (although, at time of writing, not as much
|
||||
/// as tag compatibility). In the process of optimizing tag compatibility,
|
||||
/// we tweaked wheel filename parsing. This benchmark was therefore added to
|
||||
/// ensure we didn't regress here.
|
||||
fn benchmark_wheelname_parsing(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing");
|
||||
for (name, filename, _) in WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect("valid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str` when it fails. This routine is called
|
||||
/// on every filename in a package's metadata. A non-trivial portion of which
|
||||
/// are not wheel filenames. Ensuring that the error path is fast is thus
|
||||
/// probably a good idea.
|
||||
fn benchmark_wheelname_parsing_failure(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing_failure");
|
||||
for (name, filename) in INVALID_WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect_err("invalid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks the `WheelFilename::is_compatible` routine. This was revealed
|
||||
/// to be the #1 bottleneck in the resolver. The main issue was that the
|
||||
/// set of platform tags (generated once) is quite large, and the original
|
||||
/// implementation did an exhaustive search over each of them for each tag in
|
||||
/// the wheel filename.
|
||||
fn benchmark_wheelname_tag_compatibility(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| {
|
||||
(
|
||||
LanguageTag::from_str(py).unwrap(),
|
||||
AbiTag::from_str(abi).unwrap(),
|
||||
PlatformTag::from_str(plat).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let tags = Tags::new(tags);
|
||||
|
||||
let mut group = c.benchmark_group("wheelname_tag_compatibility");
|
||||
for (name, filename, expected) in WHEEL_NAMES.iter().copied() {
|
||||
let wheelname: WheelFilename = filename.parse().expect("valid wheel filename");
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
assert_eq!(expected, wheelname.is_compatible(&tags));
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
uv_distribution_filename,
|
||||
benchmark_build_platform_tags,
|
||||
benchmark_wheelname_parsing,
|
||||
benchmark_wheelname_parsing_failure,
|
||||
benchmark_wheelname_tag_compatibility,
|
||||
);
|
||||
criterion_main!(uv_distribution_filename);
|
|
@ -1,6 +1,6 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_bench::criterion::black_box;
|
||||
use std::hint::black_box;
|
||||
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClientBuilder;
|
||||
|
@ -91,7 +91,7 @@ mod resolver {
|
|||
};
|
||||
use uv_dispatch::{BuildDispatch, SharedState};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
use uv_distribution_types::{DependencyMetadata, IndexLocations};
|
||||
use uv_distribution_types::{DependencyMetadata, IndexLocations, RequiresPython};
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder};
|
||||
|
@ -99,8 +99,8 @@ mod resolver {
|
|||
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
|
||||
use uv_python::Interpreter;
|
||||
use uv_resolver::{
|
||||
FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, RequiresPython,
|
||||
Resolver, ResolverEnvironment, ResolverOutput,
|
||||
FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver,
|
||||
ResolverEnvironment, ResolverOutput,
|
||||
};
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
|
||||
use uv_workspace::WorkspaceCache;
|
||||
|
|
|
@ -9,21 +9,19 @@ pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
|||
pub use source_dist::{build_source_dist, list_source_dist};
|
||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||
|
||||
use std::fs::FileType;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
use walkdir::DirEntry;
|
||||
|
||||
use uv_fs::Simplified;
|
||||
use uv_globfilter::PortableGlobError;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::IdentifierParseError;
|
||||
use uv_pypi_types::{Identifier, IdentifierParseError};
|
||||
|
||||
use crate::metadata::ValidationError;
|
||||
use crate::settings::ModuleName;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
|
@ -33,8 +31,8 @@ pub enum Error {
|
|||
Toml(#[from] toml::de::Error),
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Validation(#[from] ValidationError),
|
||||
#[error(transparent)]
|
||||
Identifier(#[from] IdentifierParseError),
|
||||
#[error("Invalid module name: {0}")]
|
||||
InvalidModuleName(String, #[source] IdentifierParseError),
|
||||
#[error("Unsupported glob expression in: `{field}`")]
|
||||
PortableGlob {
|
||||
field: String,
|
||||
|
@ -56,33 +54,14 @@ pub enum Error {
|
|||
#[source]
|
||||
err: walkdir::Error,
|
||||
},
|
||||
#[error("Unsupported file type {:?}: `{}`", _1, _0.user_display())]
|
||||
UnsupportedFileType(PathBuf, FileType),
|
||||
#[error("Failed to write wheel zip archive")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error("Failed to write RECORD file")]
|
||||
Csv(#[from] csv::Error),
|
||||
#[error(
|
||||
"Missing source directory at: `{}`",
|
||||
_0.user_display()
|
||||
)]
|
||||
MissingSrc(PathBuf),
|
||||
#[error(
|
||||
"Expected a Python module directory at: `{}`",
|
||||
_0.user_display()
|
||||
)]
|
||||
#[error("Expected a Python module at: `{}`", _0.user_display())]
|
||||
MissingInitPy(PathBuf),
|
||||
#[error(
|
||||
"Missing module directory for `{}` in `{}`. Found: `{}`",
|
||||
module_name,
|
||||
src_root.user_display(),
|
||||
dir_listing.join("`, `")
|
||||
)]
|
||||
MissingModuleDir {
|
||||
module_name: String,
|
||||
src_root: PathBuf,
|
||||
dir_listing: Vec<String>,
|
||||
},
|
||||
#[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: `{}`", _0.user_display())]
|
||||
NotANamespace(PathBuf),
|
||||
/// Either an absolute path or a parent path through `..`.
|
||||
#[error("Module root must be inside the project: `{}`", _0.user_display())]
|
||||
InvalidModuleRoot(PathBuf),
|
||||
|
@ -105,6 +84,16 @@ trait DirectoryWriter {
|
|||
/// Files added through the method are considered generated when listing included files.
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error>;
|
||||
|
||||
/// Add the file or directory to the path.
|
||||
fn write_dir_entry(&mut self, entry: &DirEntry, target_path: &str) -> Result<(), Error> {
|
||||
if entry.file_type().is_dir() {
|
||||
self.write_directory(target_path)?;
|
||||
} else {
|
||||
self.write_file(target_path, entry.path())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add a local file.
|
||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error>;
|
||||
|
||||
|
@ -195,12 +184,26 @@ fn check_metadata_directory(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Resolve the source root, module root and the module name.
|
||||
/// Returns the source root and the module path with the `__init__.py[i]` below to it while
|
||||
/// checking the project layout and names.
|
||||
///
|
||||
/// Some target platforms have case-sensitive filesystems, while others have case-insensitive
|
||||
/// filesystems. We always lower case the package name, our default for the module, while some
|
||||
/// users want uppercase letters in their module names. For example, the package name is `pil_util`,
|
||||
/// but the module `PIL_util`. To make the behavior as consistent as possible across platforms as
|
||||
/// possible, we require that an upper case name is given explicitly through
|
||||
/// `tool.uv.build-backend.module-name`.
|
||||
///
|
||||
/// By default, the dist-info-normalized package name is the module name. For
|
||||
/// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and
|
||||
/// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string
|
||||
/// comparison with the module name.
|
||||
fn find_roots(
|
||||
source_tree: &Path,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
relative_module_root: &Path,
|
||||
module_name: Option<&ModuleName>,
|
||||
module_name: Option<&str>,
|
||||
namespace: bool,
|
||||
) -> Result<(PathBuf, PathBuf), Error> {
|
||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||
let src_root = source_tree.join(&relative_module_root);
|
||||
|
@ -208,93 +211,114 @@ fn find_roots(
|
|||
return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf()));
|
||||
}
|
||||
let src_root = source_tree.join(&relative_module_root);
|
||||
let module_root = find_module_root(&src_root, module_name, pyproject_toml.name())?;
|
||||
Ok((src_root, module_root))
|
||||
debug!("Source root: {}", src_root.user_display());
|
||||
|
||||
if namespace {
|
||||
// `namespace = true` disables module structure checks.
|
||||
let module_relative = if let Some(module_name) = module_name {
|
||||
module_name.split('.').collect::<PathBuf>()
|
||||
} else {
|
||||
PathBuf::from(pyproject_toml.name().as_dist_info_name().to_string())
|
||||
};
|
||||
debug!("Namespace module path: {}", module_relative.user_display());
|
||||
return Ok((src_root, module_relative));
|
||||
}
|
||||
|
||||
let module_relative = if let Some(module_name) = module_name {
|
||||
module_path_from_module_name(&src_root, module_name)?
|
||||
} else {
|
||||
find_module_path_from_package_name(&src_root, pyproject_toml.name())?
|
||||
};
|
||||
debug!("Module path: {}", module_relative.user_display());
|
||||
Ok((src_root, module_relative))
|
||||
}
|
||||
|
||||
/// Match the module name to its module directory with potentially different casing.
|
||||
/// Infer stubs packages from package name alone.
|
||||
///
|
||||
/// Some target platforms have case-sensitive filesystems, while others have case-insensitive
|
||||
/// filesystems and we always lower case the package name, our default for the module, while some
|
||||
/// users want uppercase letters in their module names. For example, the package name is `pil_util`,
|
||||
/// but the module `PIL_util`.
|
||||
///
|
||||
/// By default, the dist-info-normalized package name is the module name. For
|
||||
/// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and
|
||||
/// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string
|
||||
/// comparison with the module name.
|
||||
///
|
||||
/// To make the behavior as consistent as possible across platforms as possible, we require that an
|
||||
/// upper case name is given explicitly through `tool.uv.module-name`.
|
||||
///
|
||||
/// Returns the module root path, the directory below which the `__init__.py` lives.
|
||||
fn find_module_root(
|
||||
/// There are potential false positives if someone had a regular package with `-stubs`.
|
||||
/// The `Identifier` checks in `module_path_from_module_name` are here covered by the `PackageName`
|
||||
/// validation.
|
||||
fn find_module_path_from_package_name(
|
||||
src_root: &Path,
|
||||
module_name: Option<&ModuleName>,
|
||||
package_name: &PackageName,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let (module_name, stubs) = if let Some(module_name) = module_name {
|
||||
// This name can be uppercase.
|
||||
match module_name {
|
||||
ModuleName::Identifier(module_name) => (module_name.to_string(), false),
|
||||
ModuleName::Stubs(module_name) => (module_name.to_string(), true),
|
||||
if let Some(stem) = package_name.to_string().strip_suffix("-stubs") {
|
||||
debug!("Building stubs package instead of a regular package");
|
||||
let module_name = PackageName::from_str(stem)
|
||||
.expect("non-empty package name prefix must be valid package name")
|
||||
.as_dist_info_name()
|
||||
.to_string();
|
||||
let module_relative = PathBuf::from(format!("{module_name}-stubs"));
|
||||
let init_pyi = src_root.join(&module_relative).join("__init__.pyi");
|
||||
if !init_pyi.is_file() {
|
||||
return Err(Error::MissingInitPy(init_pyi));
|
||||
}
|
||||
Ok(module_relative)
|
||||
} else {
|
||||
// Infer stubs packages from package name alone. There are potential false positives if
|
||||
// someone had a regular package with `-stubs`.
|
||||
if let Some(stem) = package_name.to_string().strip_suffix("-stubs") {
|
||||
debug!("Building stubs package instead of a regular package");
|
||||
let module_name = PackageName::from_str(stem)
|
||||
.expect("non-empty package name prefix must be valid package name")
|
||||
.as_dist_info_name()
|
||||
.to_string();
|
||||
(format!("{module_name}-stubs"), true)
|
||||
} else {
|
||||
// This name is always lowercase.
|
||||
(package_name.as_dist_info_name().to_string(), false)
|
||||
// This name is always lowercase.
|
||||
let module_relative = PathBuf::from(package_name.as_dist_info_name().to_string());
|
||||
let init_py = src_root.join(&module_relative).join("__init__.py");
|
||||
if !init_py.is_file() {
|
||||
return Err(Error::MissingInitPy(init_py));
|
||||
}
|
||||
Ok(module_relative)
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine the relative module path from an explicit module name.
|
||||
fn module_path_from_module_name(src_root: &Path, module_name: &str) -> Result<PathBuf, Error> {
|
||||
// This name can be uppercase.
|
||||
let module_relative = module_name.split('.').collect::<PathBuf>();
|
||||
|
||||
// Check if we have a regular module or a namespace.
|
||||
let (root_name, namespace_segments) =
|
||||
if let Some((root_name, namespace_segments)) = module_name.split_once('.') {
|
||||
(
|
||||
root_name,
|
||||
namespace_segments.split('.').collect::<Vec<&str>>(),
|
||||
)
|
||||
} else {
|
||||
(module_name, Vec::new())
|
||||
};
|
||||
|
||||
// Check if we have an implementation or a stubs package.
|
||||
// For stubs for a namespace, the `-stubs` prefix must be on the root.
|
||||
let stubs = if let Some(stem) = root_name.strip_suffix("-stubs") {
|
||||
// Check that the stubs belong to a valid module.
|
||||
Identifier::from_str(stem)
|
||||
.map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?;
|
||||
true
|
||||
} else {
|
||||
Identifier::from_str(root_name)
|
||||
.map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?;
|
||||
false
|
||||
};
|
||||
|
||||
let dir = match fs_err::read_dir(src_root) {
|
||||
Ok(dir_iterator) => dir_iterator.collect::<Result<Vec<_>, _>>()?,
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
return Err(Error::MissingSrc(src_root.to_path_buf()));
|
||||
}
|
||||
Err(err) => return Err(Error::Io(err)),
|
||||
};
|
||||
let module_root = dir.iter().find_map(|entry| {
|
||||
// TODO(konsti): Do we ever need to check if `dir/{module_name}/__init__.py` exists because
|
||||
// the wrong casing may be recorded on disk?
|
||||
if entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.is_some_and(|file_name| file_name == module_name)
|
||||
// For a namespace, check that all names below the root is valid.
|
||||
for segment in namespace_segments {
|
||||
Identifier::from_str(segment)
|
||||
.map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?;
|
||||
}
|
||||
|
||||
// Check that an `__init__.py[i]` exists for the module.
|
||||
let init_py =
|
||||
src_root
|
||||
.join(&module_relative)
|
||||
.join(if stubs { "__init__.pyi" } else { "__init__.py" });
|
||||
if !init_py.is_file() {
|
||||
return Err(Error::MissingInitPy(init_py));
|
||||
}
|
||||
|
||||
// For a namespace, check that the directories above the lowest are namespace directories.
|
||||
for namespace_dir in module_relative.ancestors().skip(1) {
|
||||
if src_root.join(namespace_dir).join("__init__.py").exists()
|
||||
|| src_root.join(namespace_dir).join("__init__.pyi").exists()
|
||||
{
|
||||
Some(entry.path())
|
||||
} else {
|
||||
None
|
||||
return Err(Error::NotANamespace(src_root.join(namespace_dir)));
|
||||
}
|
||||
});
|
||||
let init_py = if stubs { "__init__.pyi" } else { "__init__.py" };
|
||||
let module_root = if let Some(module_root) = module_root {
|
||||
if module_root.join(init_py).is_file() {
|
||||
module_root.clone()
|
||||
} else {
|
||||
return Err(Error::MissingInitPy(module_root.join(init_py)));
|
||||
}
|
||||
} else {
|
||||
return Err(Error::MissingModuleDir {
|
||||
module_name,
|
||||
src_root: src_root.to_path_buf(),
|
||||
dir_listing: dir
|
||||
.into_iter()
|
||||
.filter_map(|entry| Some(entry.file_name().to_str()?.to_string()))
|
||||
.collect(),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
debug!("Module name: `{}`", module_name);
|
||||
Ok(module_root)
|
||||
Ok(module_relative)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -845,7 +869,7 @@ mod tests {
|
|||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"Missing module directory for `camel_case` in `[TEMP_PATH]/src`. Found: `camelCase`"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/camel_case/__init__.py`"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -872,14 +896,10 @@ mod tests {
|
|||
let err_message = format_err(&build_err);
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@r#"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 10, column 15
|
||||
|
|
||||
10 | module-name = "django@home-stubs"
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
Invalid character `@` at position 7 for identifier `django@home`, expected an underscore or an alphanumeric character
|
||||
"#
|
||||
@r"
|
||||
Invalid module name: django@home-stubs
|
||||
Caused by: Invalid character `@` at position 7 for identifier `django@home`, expected an underscore or an alphanumeric character
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -914,7 +934,7 @@ mod tests {
|
|||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"Expected a Python module directory at: `[TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi`"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi`"
|
||||
);
|
||||
|
||||
// Create the correct file
|
||||
|
@ -956,4 +976,237 @@ mod tests {
|
|||
let build2 = build(src.path(), dist.path()).unwrap();
|
||||
assert_eq!(build1.wheel_contents, build2.wheel_contents);
|
||||
}
|
||||
|
||||
/// A simple namespace package with a single root `__init__.py`.
|
||||
#[test]
|
||||
fn simple_namespace_package() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "simple-namespace-part"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = "simple_namespace.part"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("simple_namespace").join("part"))
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
||||
let err_message = format_err(&build_err)
|
||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
||||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the correct file
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("part")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// For a namespace package, there must not be an `__init__.py` here.
|
||||
let bogus_init_py = src
|
||||
.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("__init__.py");
|
||||
File::create(&bogus_init_py).unwrap();
|
||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
||||
let err_message = format_err(&build_err)
|
||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
||||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||
);
|
||||
fs_err::remove_file(bogus_init_py).unwrap();
|
||||
|
||||
let build1 = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build1.source_dist_contents.join("\n"), @r"
|
||||
simple_namespace_part-1.0.0/
|
||||
simple_namespace_part-1.0.0/PKG-INFO
|
||||
simple_namespace_part-1.0.0/pyproject.toml
|
||||
simple_namespace_part-1.0.0/src
|
||||
simple_namespace_part-1.0.0/src/simple_namespace
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part/__init__.py
|
||||
");
|
||||
assert_snapshot!(build1.wheel_contents.join("\n"), @r"
|
||||
simple_namespace/
|
||||
simple_namespace/part/
|
||||
simple_namespace/part/__init__.py
|
||||
simple_namespace_part-1.0.0.dist-info/
|
||||
simple_namespace_part-1.0.0.dist-info/METADATA
|
||||
simple_namespace_part-1.0.0.dist-info/RECORD
|
||||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
|
||||
// Check that `namespace = true` works too.
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "simple-namespace-part"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = "simple_namespace.part"
|
||||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
|
||||
let build2 = build(src.path(), dist.path()).unwrap();
|
||||
assert_eq!(build1, build2);
|
||||
}
|
||||
|
||||
/// A complex namespace package with a multiple root `__init__.py`.
|
||||
#[test]
|
||||
fn complex_namespace_package() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "complex-namespace"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("complex_namespace")
|
||||
.join("part_a"),
|
||||
)
|
||||
.unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("complex_namespace")
|
||||
.join("part_a")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
fs_err::create_dir_all(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("complex_namespace")
|
||||
.join("part_b"),
|
||||
)
|
||||
.unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("complex_namespace")
|
||||
.join("part_b")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build1 = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build1.wheel_contents.join("\n"), @r"
|
||||
complex_namespace-1.0.0.dist-info/
|
||||
complex_namespace-1.0.0.dist-info/METADATA
|
||||
complex_namespace-1.0.0.dist-info/RECORD
|
||||
complex_namespace-1.0.0.dist-info/WHEEL
|
||||
complex_namespace/
|
||||
complex_namespace/part_a/
|
||||
complex_namespace/part_a/__init__.py
|
||||
complex_namespace/part_b/
|
||||
complex_namespace/part_b/__init__.py
|
||||
");
|
||||
|
||||
// Check that setting the name manually works equally.
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "complex-namespace"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = "complex_namespace"
|
||||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
|
||||
let build2 = build(src.path(), dist.path()).unwrap();
|
||||
assert_eq!(build1, build2);
|
||||
}
|
||||
|
||||
/// Stubs for a namespace package.
|
||||
#[test]
|
||||
fn stubs_namespace() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "cloud.db.schema-stubs"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = "cloud-stubs.db.schema"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("cloud-stubs")
|
||||
.join("db")
|
||||
.join("schema"),
|
||||
)
|
||||
.unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("cloud-stubs")
|
||||
.join("db")
|
||||
.join("schema")
|
||||
.join("__init__.pyi"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||
cloud-stubs/
|
||||
cloud-stubs/db/
|
||||
cloud-stubs/db/schema/
|
||||
cloud-stubs/db/schema/__init__.pyi
|
||||
cloud_db_schema_stubs-1.0.0.dist-info/
|
||||
cloud_db_schema_stubs-1.0.0.dist-info/METADATA
|
||||
cloud_db_schema_stubs-1.0.0.dist-info/RECORD
|
||||
cloud_db_schema_stubs-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,9 @@
|
|||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use std::fmt::Display;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use uv_macros::OptionsMetadata;
|
||||
use uv_pypi_types::Identifier;
|
||||
|
||||
/// Settings for the uv build backend (`uv_build`).
|
||||
///
|
||||
/// !!! note
|
||||
///
|
||||
/// The uv build backend is currently in preview and may change in any future release.
|
||||
///
|
||||
/// Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||
/// (such as hatchling) have their own configuration.
|
||||
///
|
||||
|
@ -38,6 +31,9 @@ pub struct BuildBackendSettings {
|
|||
/// `__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem
|
||||
/// being the module name, and which contain a `__init__.pyi` file.
|
||||
///
|
||||
/// For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
||||
/// `foo-stubs.bar`.
|
||||
///
|
||||
/// Note that using this option runs the risk of creating two packages with different names but
|
||||
/// the same module names. Installing such packages together leads to unspecified behavior,
|
||||
/// often with corrupted files or directory trees.
|
||||
|
@ -46,7 +42,7 @@ pub struct BuildBackendSettings {
|
|||
value_type = "str",
|
||||
example = r#"module-name = "sklearn""#
|
||||
)]
|
||||
pub module_name: Option<ModuleName>,
|
||||
pub module_name: Option<String>,
|
||||
|
||||
/// Glob expressions which files and directories to additionally include in the source
|
||||
/// distribution.
|
||||
|
@ -85,6 +81,56 @@ pub struct BuildBackendSettings {
|
|||
)]
|
||||
pub wheel_exclude: Vec<String>,
|
||||
|
||||
/// Build a namespace package.
|
||||
///
|
||||
/// Build a PEP 420 implicit namespace package, allowing more than one root `__init__.py`.
|
||||
///
|
||||
/// Use this option when the namespace package contains multiple root `__init__.py`, for
|
||||
/// namespace packages with a single root `__init__.py` use a dotted `module-name` instead.
|
||||
///
|
||||
/// To compare dotted `module-name` and `namespace = true`, the first example below can be
|
||||
/// expressed with `module-name = "cloud.database"`: There is one root `__init__.py` `database`.
|
||||
/// In the second example, we have three roots (`cloud.database`, `cloud.database_pro`,
|
||||
/// `billing.modules.database_pro`), so `namespace = true` is required.
|
||||
///
|
||||
/// ```text
|
||||
/// src
|
||||
/// └── cloud
|
||||
/// └── database
|
||||
/// ├── __init__.py
|
||||
/// ├── query_builder
|
||||
/// │ └── __init__.py
|
||||
/// └── sql
|
||||
/// ├── parser.py
|
||||
/// └── __init__.py
|
||||
/// ```
|
||||
///
|
||||
/// ```text
|
||||
/// src
|
||||
/// ├── cloud
|
||||
/// │ ├── database
|
||||
/// │ │ ├── __init__.py
|
||||
/// │ │ ├── query_builder
|
||||
/// │ │ │ └── __init__.py
|
||||
/// │ │ └── sql
|
||||
/// │ │ ├── __init__.py
|
||||
/// │ │ └── parser.py
|
||||
/// │ └── database_pro
|
||||
/// │ ├── __init__.py
|
||||
/// │ └── query_builder.py
|
||||
/// └── billing
|
||||
/// └── modules
|
||||
/// └── database_pro
|
||||
/// ├── __init__.py
|
||||
/// └── sql.py
|
||||
/// ```
|
||||
#[option(
|
||||
default = r#"false"#,
|
||||
value_type = "bool",
|
||||
example = r#"namespace = true"#
|
||||
)]
|
||||
pub namespace: bool,
|
||||
|
||||
/// Data includes for wheels.
|
||||
///
|
||||
/// Each entry is a directory, whose contents are copied to the matching directory in the wheel
|
||||
|
@ -129,88 +175,12 @@ impl Default for BuildBackendSettings {
|
|||
default_excludes: true,
|
||||
source_exclude: Vec::new(),
|
||||
wheel_exclude: Vec::new(),
|
||||
namespace: false,
|
||||
data: WheelDataIncludes::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Packages come in two kinds: Regular packages, where the name must be a valid Python identifier,
|
||||
/// and stubs packages.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ModuleName {
|
||||
/// A Python module name, which needs to be a valid Python identifier to be used with `import`.
|
||||
Identifier(Identifier),
|
||||
/// A type stubs package, whose name ends with `-stubs` with the stem being the module name.
|
||||
Stubs(String),
|
||||
}
|
||||
|
||||
impl Display for ModuleName {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ModuleName::Identifier(module_name) => Display::fmt(module_name, f),
|
||||
ModuleName::Stubs(module_name) => Display::fmt(module_name, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ModuleName {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let module_name = String::deserialize(deserializer)?;
|
||||
if let Some(stem) = module_name.strip_suffix("-stubs") {
|
||||
// Check that the stubs belong to a valid module.
|
||||
Identifier::from_str(stem)
|
||||
.map(ModuleName::Identifier)
|
||||
.map_err(serde::de::Error::custom)?;
|
||||
Ok(ModuleName::Stubs(module_name))
|
||||
} else {
|
||||
Identifier::from_str(&module_name)
|
||||
.map(ModuleName::Identifier)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ModuleName {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
ModuleName::Identifier(module_name) => module_name.serialize(serializer),
|
||||
ModuleName::Stubs(module_name) => module_name.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for ModuleName {
|
||||
fn schema_name() -> String {
|
||||
"ModuleName".to_string()
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
string: Some(Box::new(schemars::schema::StringValidation {
|
||||
// Best-effort Unicode support (https://stackoverflow.com/a/68844380/3549270)
|
||||
pattern: Some(r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*(-stubs)?$".to_string()),
|
||||
..schemars::schema::StringValidation::default()
|
||||
})),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some(
|
||||
"The name of the module, or the name of a stubs package".to_string(),
|
||||
),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Data includes for wheels.
|
||||
///
|
||||
/// See `BuildBackendSettings::data`.
|
||||
|
|
|
@ -68,16 +68,18 @@ fn source_dist_matcher(
|
|||
includes.push(globset::escape("pyproject.toml"));
|
||||
|
||||
// Check that the source tree contains a module.
|
||||
let (_, module_root) = find_roots(
|
||||
let (src_root, module_relative) = find_roots(
|
||||
source_tree,
|
||||
pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.module_name.as_deref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
// The wheel must not include any files included by the source distribution (at least until we
|
||||
// have files generated in the source dist -> wheel build step).
|
||||
let import_path = uv_fs::normalize_path(
|
||||
&uv_fs::relative_to(module_root, source_tree).expect("module root is inside source tree"),
|
||||
&uv_fs::relative_to(src_root.join(module_relative), source_tree)
|
||||
.expect("module root is inside source tree"),
|
||||
)
|
||||
.portable_display()
|
||||
.to_string();
|
||||
|
@ -248,32 +250,16 @@ fn write_source_dist(
|
|||
.expect("walkdir starts with root");
|
||||
|
||||
if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) {
|
||||
trace!("Excluding: `{}`", relative.user_display());
|
||||
trace!("Excluding from sdist: `{}`", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Including {}", relative.user_display());
|
||||
if entry.file_type().is_dir() {
|
||||
writer.write_directory(
|
||||
&Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string(),
|
||||
)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
writer.write_file(
|
||||
&Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string(),
|
||||
entry.path(),
|
||||
)?;
|
||||
} else {
|
||||
return Err(Error::UnsupportedFileType(
|
||||
relative.to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
}
|
||||
let entry_path = Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string();
|
||||
debug!("Adding to sdist: {}", relative.user_display());
|
||||
writer.write_dir_entry(&entry, &entry_path)?;
|
||||
}
|
||||
debug!("Visited {files_visited} files for source dist build");
|
||||
|
||||
|
|
|
@ -17,8 +17,7 @@ use uv_warnings::warn_user_once;
|
|||
|
||||
use crate::metadata::DEFAULT_EXCLUDES;
|
||||
use crate::{
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
||||
find_module_root, find_roots,
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||
};
|
||||
|
||||
/// Build a wheel from the source tree and place it in the output directory.
|
||||
|
@ -124,15 +123,24 @@ fn write_wheel(
|
|||
let exclude_matcher = build_exclude_matcher(excludes)?;
|
||||
|
||||
debug!("Adding content files to wheel");
|
||||
let (src_root, module_root) = find_roots(
|
||||
let (src_root, module_relative) = find_roots(
|
||||
source_tree,
|
||||
pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.module_name.as_deref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
|
||||
// For convenience, have directories for the whole tree in the wheel
|
||||
for ancestor in module_relative.ancestors().skip(1) {
|
||||
if ancestor == Path::new("") {
|
||||
continue;
|
||||
}
|
||||
wheel_writer.write_directory(&ancestor.portable_display().to_string())?;
|
||||
}
|
||||
|
||||
let mut files_visited = 0;
|
||||
for entry in WalkDir::new(module_root)
|
||||
for entry in WalkDir::new(src_root.join(module_relative))
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
.filter_entry(|entry| !exclude_matcher.is_match(entry.path()))
|
||||
|
@ -156,7 +164,7 @@ fn write_wheel(
|
|||
.path()
|
||||
.strip_prefix(source_tree)
|
||||
.expect("walkdir starts with root");
|
||||
let wheel_path = entry
|
||||
let entry_path = entry
|
||||
.path()
|
||||
.strip_prefix(&src_root)
|
||||
.expect("walkdir starts with root");
|
||||
|
@ -164,21 +172,10 @@ fn write_wheel(
|
|||
trace!("Excluding from module: `{}`", match_path.user_display());
|
||||
continue;
|
||||
}
|
||||
let wheel_path = wheel_path.portable_display().to_string();
|
||||
|
||||
debug!("Adding to wheel: `{wheel_path}`");
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
wheel_writer.write_directory(&wheel_path)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
wheel_writer.write_file(&wheel_path, entry.path())?;
|
||||
} else {
|
||||
// TODO(konsti): We may want to support symlinks, there is support for installing them.
|
||||
return Err(Error::UnsupportedFileType(
|
||||
entry.path().to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
}
|
||||
let entry_path = entry_path.portable_display().to_string();
|
||||
debug!("Adding to wheel: {entry_path}");
|
||||
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
||||
}
|
||||
debug!("Visited {files_visited} files for wheel build");
|
||||
|
||||
|
@ -267,16 +264,13 @@ pub fn build_editable(
|
|||
let mut wheel_writer = ZipDirectoryWriter::new_wheel(File::create(&wheel_path)?);
|
||||
|
||||
debug!("Adding pth file to {}", wheel_path.user_display());
|
||||
let src_root = source_tree.join(&settings.module_root);
|
||||
if !src_root.starts_with(source_tree) {
|
||||
return Err(Error::InvalidModuleRoot(settings.module_root.clone()));
|
||||
}
|
||||
|
||||
// Check that a module root exists in the directory we're linking from the `.pth` file
|
||||
find_module_root(
|
||||
&src_root,
|
||||
settings.module_name.as_ref(),
|
||||
pyproject_toml.name(),
|
||||
let (src_root, _module_relative) = find_roots(
|
||||
source_tree,
|
||||
&pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_deref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
|
||||
wheel_writer.write_bytes(
|
||||
|
@ -514,23 +508,12 @@ fn wheel_subdir_from_globs(
|
|||
continue;
|
||||
}
|
||||
|
||||
let relative_licenses = Path::new(target)
|
||||
let license_path = Path::new(target)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string();
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
wheel_writer.write_directory(&relative_licenses)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
debug!("Adding {} file: `{}`", globs_field, relative.user_display());
|
||||
wheel_writer.write_file(&relative_licenses, entry.path())?;
|
||||
} else {
|
||||
// TODO(konsti): We may want to support symlinks, there is support for installing them.
|
||||
return Err(Error::UnsupportedFileType(
|
||||
entry.path().to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
}
|
||||
debug!("Adding for {}: `{}`", globs_field, relative.user_display());
|
||||
wheel_writer.write_dir_entry(&entry, &license_path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
|
|
|
@ -25,11 +25,14 @@ use tempfile::TempDir;
|
|||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::PreviewMode;
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution::BuildRequires;
|
||||
use uv_distribution_types::{IndexLocations, Requirement, Resolution};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_fs::{PythonExt, Simplified};
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::PackageName;
|
||||
|
@ -200,6 +203,11 @@ impl Pep517Backend {
|
|||
{import}
|
||||
"#, backend_path = backend_path_encoded}
|
||||
}
|
||||
|
||||
fn is_setuptools(&self) -> bool {
|
||||
// either `setuptools.build_meta` or `setuptools.build_meta:__legacy__`
|
||||
self.backend.split(':').next() == Some("setuptools.build_meta")
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses an [`Rc`] internally, clone freely.
|
||||
|
@ -278,6 +286,7 @@ impl SourceBuild {
|
|||
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||
level: BuildOutput,
|
||||
concurrent_builds: usize,
|
||||
preview: PreviewMode,
|
||||
) -> Result<Self, Error> {
|
||||
let temp_dir = build_context.cache().venv_dir()?;
|
||||
|
||||
|
@ -325,6 +334,8 @@ impl SourceBuild {
|
|||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
preview,
|
||||
)?
|
||||
};
|
||||
|
||||
|
@ -430,6 +441,31 @@ impl SourceBuild {
|
|||
})
|
||||
}
|
||||
|
||||
/// Acquire a lock on the source tree, if necessary.
|
||||
async fn acquire_lock(&self) -> Result<Option<LockedFile>, Error> {
|
||||
// Depending on the command, setuptools puts `*.egg-info`, `build/`, and `dist/` in the
|
||||
// source tree, and concurrent invocations of setuptools using the same source dir can
|
||||
// stomp on each other. We need to lock something to fix that, but we don't want to dump a
|
||||
// `.lock` file into the source tree that the user will need to .gitignore. Take a global
|
||||
// proxy lock instead.
|
||||
let mut source_tree_lock = None;
|
||||
if self.pep517_backend.is_setuptools() {
|
||||
debug!("Locking the source tree for setuptools");
|
||||
let canonical_source_path = self.source_tree.canonicalize()?;
|
||||
let lock_path = env::temp_dir().join(format!(
|
||||
"uv-setuptools-{}.lock",
|
||||
cache_digest(&canonical_source_path)
|
||||
));
|
||||
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Ok(source_tree_lock)
|
||||
}
|
||||
|
||||
async fn get_resolved_requirements(
|
||||
build_context: &impl BuildContext,
|
||||
source_build_context: SourceBuildContext,
|
||||
|
@ -600,6 +636,9 @@ impl SourceBuild {
|
|||
return Ok(Some(metadata_dir.clone()));
|
||||
}
|
||||
|
||||
// Lock the source tree, if necessary.
|
||||
let _lock = self.acquire_lock().await?;
|
||||
|
||||
// Hatch allows for highly dynamic customization of metadata via hooks. In such cases, Hatch
|
||||
// can't uphold the PEP 517 contract, in that the metadata Hatch would return by
|
||||
// `prepare_metadata_for_build_wheel` isn't guaranteed to match that of the built wheel.
|
||||
|
@ -712,16 +751,15 @@ impl SourceBuild {
|
|||
pub async fn build(&self, wheel_dir: &Path) -> Result<String, Error> {
|
||||
// The build scripts run with the extracted root as cwd, so they need the absolute path.
|
||||
let wheel_dir = std::path::absolute(wheel_dir)?;
|
||||
let filename = self.pep517_build(&wheel_dir, &self.pep517_backend).await?;
|
||||
let filename = self.pep517_build(&wheel_dir).await?;
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
/// Perform a PEP 517 build for a wheel or source distribution (sdist).
|
||||
async fn pep517_build(
|
||||
&self,
|
||||
output_dir: &Path,
|
||||
pep517_backend: &Pep517Backend,
|
||||
) -> Result<String, Error> {
|
||||
async fn pep517_build(&self, output_dir: &Path) -> Result<String, Error> {
|
||||
// Lock the source tree, if necessary.
|
||||
let _lock = self.acquire_lock().await?;
|
||||
|
||||
// Write the hook output to a file so that we can read it back reliably.
|
||||
let outfile = self
|
||||
.temp_dir
|
||||
|
@ -733,7 +771,7 @@ impl SourceBuild {
|
|||
BuildKind::Sdist => {
|
||||
debug!(
|
||||
r#"Calling `{}.build_{}("{}", {})`"#,
|
||||
pep517_backend.backend,
|
||||
self.pep517_backend.backend,
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
@ -746,7 +784,7 @@ impl SourceBuild {
|
|||
with open("{}", "w") as fp:
|
||||
fp.write(sdist_filename)
|
||||
"#,
|
||||
pep517_backend.backend_import(),
|
||||
self.pep517_backend.backend_import(),
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
@ -762,7 +800,7 @@ impl SourceBuild {
|
|||
});
|
||||
debug!(
|
||||
r#"Calling `{}.build_{}("{}", {}, {})`"#,
|
||||
pep517_backend.backend,
|
||||
self.pep517_backend.backend,
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
@ -776,7 +814,7 @@ impl SourceBuild {
|
|||
with open("{}", "w") as fp:
|
||||
fp.write(wheel_filename)
|
||||
"#,
|
||||
pep517_backend.backend_import(),
|
||||
self.pep517_backend.backend_import(),
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
@ -806,7 +844,7 @@ impl SourceBuild {
|
|||
return Err(Error::from_command_output(
|
||||
format!(
|
||||
"Call to `{}.build_{}` failed",
|
||||
pep517_backend.backend, self.build_kind
|
||||
self.pep517_backend.backend, self.build_kind
|
||||
),
|
||||
&output,
|
||||
self.level,
|
||||
|
@ -821,7 +859,7 @@ impl SourceBuild {
|
|||
return Err(Error::from_command_output(
|
||||
format!(
|
||||
"Call to `{}.build_{}` failed",
|
||||
pep517_backend.backend, self.build_kind
|
||||
self.pep517_backend.backend, self.build_kind
|
||||
),
|
||||
&output,
|
||||
self.level,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.7.12"
|
||||
version = "0.7.19"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.7.12"
|
||||
version = "0.7.19"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
2
crates/uv-build/ruff.toml
Normal file
2
crates/uv-build/ruff.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
# It is important retain compatibility with old versions in the build backend
|
||||
target-version = "py37"
|
|
@ -13,7 +13,6 @@ pub trait CompatArgs {
|
|||
/// For example, users often pass `--allow-unsafe`, which is unnecessary with uv. But it's a
|
||||
/// nice user experience to warn, rather than fail, when users pass `--allow-unsafe`.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipCompileCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
allow_unsafe: bool,
|
||||
|
@ -159,7 +158,6 @@ impl CompatArgs for PipCompileCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip list` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipListCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
@ -184,7 +182,6 @@ impl CompatArgs for PipListCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip-sync` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipSyncCompatArgs {
|
||||
#[clap(short, long, hide = true)]
|
||||
ask: bool,
|
||||
|
@ -268,7 +265,6 @@ enum Resolver {
|
|||
///
|
||||
/// These represent a subset of the `virtualenv` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VenvCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
clear: bool,
|
||||
|
@ -327,7 +323,6 @@ impl CompatArgs for VenvCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip install` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipInstallCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
@ -361,7 +356,6 @@ impl CompatArgs for PipInstallCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip` interface that exists on all commands.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipGlobalCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
|
|
@ -85,7 +85,6 @@ const STYLES: Styles = Styles::styled()
|
|||
disable_version_flag = true
|
||||
)]
|
||||
#[command(styles=STYLES)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
pub command: Box<Commands>,
|
||||
|
@ -133,7 +132,6 @@ pub struct TopLevelArgs {
|
|||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[command(next_help_heading = "Global options", next_display_order = 1000)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct GlobalArgs {
|
||||
#[arg(
|
||||
global = true,
|
||||
|
@ -526,7 +524,6 @@ pub struct HelpArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("operation"))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VersionArgs {
|
||||
/// Set the project version to this value
|
||||
///
|
||||
|
@ -657,7 +654,6 @@ pub struct SelfUpdateArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CacheNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: CacheCommand,
|
||||
|
@ -687,14 +683,12 @@ pub enum CacheCommand {
|
|||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CleanArgs {
|
||||
/// The packages to remove from the cache.
|
||||
pub package: Vec<PackageName>,
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PruneArgs {
|
||||
/// Optimize the cache for persistence in a continuous integration environment, like GitHub
|
||||
/// Actions.
|
||||
|
@ -714,7 +708,6 @@ pub struct PruneArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: PipCommand,
|
||||
|
@ -1095,7 +1088,6 @@ fn parse_maybe_string(input: &str) -> Result<Maybe<String>, String> {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
pub struct PipCompileArgs {
|
||||
/// Include all packages listed in the given `requirements.in` files.
|
||||
|
@ -1443,7 +1435,6 @@ pub struct PipCompileArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipSyncArgs {
|
||||
/// Include all packages listed in the given `requirements.txt` files.
|
||||
///
|
||||
|
@ -1491,7 +1482,7 @@ pub struct PipSyncArgs {
|
|||
/// Hash-checking mode introduces a number of additional constraints:
|
||||
///
|
||||
/// - Git dependencies are not supported.
|
||||
/// - Editable installs are not supported.
|
||||
/// - Editable installations are not supported.
|
||||
/// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or
|
||||
/// source archive (`.zip`, `.tar.gz`), as opposed to a directory.
|
||||
#[arg(
|
||||
|
@ -1700,7 +1691,6 @@ pub struct PipSyncArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipInstallArgs {
|
||||
/// Install all listed packages.
|
||||
///
|
||||
|
@ -1801,7 +1791,7 @@ pub struct PipInstallArgs {
|
|||
/// Hash-checking mode introduces a number of additional constraints:
|
||||
///
|
||||
/// - Git dependencies are not supported.
|
||||
/// - Editable installs are not supported.
|
||||
/// - Editable installations are not supported.
|
||||
/// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or
|
||||
/// source archive (`.zip`, `.tar.gz`), as opposed to a directory.
|
||||
#[arg(
|
||||
|
@ -2015,7 +2005,6 @@ pub struct PipInstallArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipUninstallArgs {
|
||||
/// Uninstall all listed packages.
|
||||
#[arg(group = "sources")]
|
||||
|
@ -2104,7 +2093,6 @@ pub struct PipUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipFreezeArgs {
|
||||
/// Exclude any editable packages from output.
|
||||
#[arg(long)]
|
||||
|
@ -2159,7 +2147,6 @@ pub struct PipFreezeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipListArgs {
|
||||
/// Only include editable projects.
|
||||
#[arg(short, long)]
|
||||
|
@ -2235,7 +2222,6 @@ pub struct PipListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipCheckArgs {
|
||||
/// The Python interpreter for which packages should be checked.
|
||||
///
|
||||
|
@ -2271,7 +2257,6 @@ pub struct PipCheckArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipShowArgs {
|
||||
/// The package(s) to display.
|
||||
pub package: Vec<PackageName>,
|
||||
|
@ -2325,7 +2310,6 @@ pub struct PipShowArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipTreeArgs {
|
||||
/// Show the version constraint(s) imposed on each package.
|
||||
#[arg(long)]
|
||||
|
@ -2382,7 +2366,6 @@ pub struct PipTreeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct BuildArgs {
|
||||
/// The directory from which distributions should be built, or a source
|
||||
/// distribution archive to build into a wheel.
|
||||
|
@ -2471,7 +2454,7 @@ pub struct BuildArgs {
|
|||
/// Hash-checking mode introduces a number of additional constraints:
|
||||
///
|
||||
/// - Git dependencies are not supported.
|
||||
/// - Editable installs are not supported.
|
||||
/// - Editable installations are not supported.
|
||||
/// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or
|
||||
/// source archive (`.zip`, `.tar.gz`), as opposed to a directory.
|
||||
#[arg(
|
||||
|
@ -2529,7 +2512,6 @@ pub struct BuildArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VenvArgs {
|
||||
/// The Python interpreter to use for the virtual environment.
|
||||
///
|
||||
|
@ -2725,7 +2707,6 @@ pub enum AuthorFrom {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct InitArgs {
|
||||
/// The path to use for the project/script.
|
||||
///
|
||||
|
@ -2883,7 +2864,6 @@ pub struct InitArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RunArgs {
|
||||
/// Include optional dependencies from the specified extra name.
|
||||
///
|
||||
|
@ -3170,7 +3150,6 @@ pub struct RunArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct SyncArgs {
|
||||
/// Include optional dependencies from the specified extra name.
|
||||
///
|
||||
|
@ -3427,7 +3406,6 @@ pub struct SyncArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct LockArgs {
|
||||
/// Check if the lockfile is up-to-date.
|
||||
///
|
||||
|
@ -3489,7 +3467,6 @@ pub struct LockArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct AddArgs {
|
||||
/// The packages to add, as PEP 508 requirements (e.g., `ruff==0.5.0`).
|
||||
#[arg(group = "sources")]
|
||||
|
@ -3516,7 +3493,12 @@ pub struct AddArgs {
|
|||
/// Add the requirements to the development dependency group.
|
||||
///
|
||||
/// This option is an alias for `--group dev`.
|
||||
#[arg(long, conflicts_with("optional"), conflicts_with("group"))]
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with("optional"),
|
||||
conflicts_with("group"),
|
||||
conflicts_with("script")
|
||||
)]
|
||||
pub dev: bool,
|
||||
|
||||
/// Add the requirements to the package's optional dependencies for the specified extra.
|
||||
|
@ -3530,7 +3512,12 @@ pub struct AddArgs {
|
|||
/// Add the requirements to the specified dependency group.
|
||||
///
|
||||
/// These requirements will not be included in the published metadata for the project.
|
||||
#[arg(long, conflicts_with("dev"), conflicts_with("optional"))]
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with("dev"),
|
||||
conflicts_with("optional"),
|
||||
conflicts_with("script")
|
||||
)]
|
||||
pub group: Option<GroupName>,
|
||||
|
||||
/// Add the requirements as editable.
|
||||
|
@ -3664,7 +3651,6 @@ pub struct AddArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RemoveArgs {
|
||||
/// The names of the dependencies to remove (e.g., `ruff`).
|
||||
#[arg(required = true)]
|
||||
|
@ -3677,11 +3663,21 @@ pub struct RemoveArgs {
|
|||
pub dev: bool,
|
||||
|
||||
/// Remove the packages from the project's optional dependencies for the specified extra.
|
||||
#[arg(long, conflicts_with("dev"), conflicts_with("group"))]
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with("dev"),
|
||||
conflicts_with("group"),
|
||||
conflicts_with("script")
|
||||
)]
|
||||
pub optional: Option<ExtraName>,
|
||||
|
||||
/// Remove the packages from the specified dependency group.
|
||||
#[arg(long, conflicts_with("dev"), conflicts_with("optional"))]
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with("dev"),
|
||||
conflicts_with("optional"),
|
||||
conflicts_with("script")
|
||||
)]
|
||||
pub group: Option<GroupName>,
|
||||
|
||||
/// Avoid syncing the virtual environment after re-locking the project.
|
||||
|
@ -3749,7 +3745,6 @@ pub struct RemoveArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct TreeArgs {
|
||||
/// Show a platform-independent dependency tree.
|
||||
///
|
||||
|
@ -3889,7 +3884,6 @@ pub struct TreeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ExportArgs {
|
||||
/// The format to which `uv.lock` should be exported.
|
||||
///
|
||||
|
@ -4104,7 +4098,6 @@ pub struct ExportArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: ToolCommand,
|
||||
|
@ -4197,7 +4190,6 @@ pub enum ToolCommand {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolRunArgs {
|
||||
/// The command to run.
|
||||
///
|
||||
|
@ -4316,7 +4308,6 @@ pub struct UvxArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolInstallArgs {
|
||||
/// The package to install commands from.
|
||||
pub package: String,
|
||||
|
@ -4405,7 +4396,6 @@ pub struct ToolInstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolListArgs {
|
||||
/// Whether to display the path to each tool environment and installed executable.
|
||||
#[arg(long)]
|
||||
|
@ -4432,7 +4422,6 @@ pub struct ToolListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolDirArgs {
|
||||
/// Show the directory into which `uv tool` will install executables.
|
||||
///
|
||||
|
@ -4451,7 +4440,6 @@ pub struct ToolDirArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolUninstallArgs {
|
||||
/// The name of the tool to uninstall.
|
||||
#[arg(required = true)]
|
||||
|
@ -4463,7 +4451,6 @@ pub struct ToolUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolUpgradeArgs {
|
||||
/// The name of the tool to upgrade, along with an optional version specifier.
|
||||
#[arg(required = true)]
|
||||
|
@ -4693,7 +4680,6 @@ pub struct ToolUpgradeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: PythonCommand,
|
||||
|
@ -4728,13 +4714,32 @@ pub enum PythonCommand {
|
|||
///
|
||||
/// A `python` executable is not made globally available, managed Python versions are only used
|
||||
/// in uv commands or in active virtual environments. There is experimental support for adding
|
||||
/// Python executables to the `PATH` — use the `--preview` flag to enable this behavior.
|
||||
/// Python executables to a directory on the path — use the `--preview` flag to enable this
|
||||
/// behavior and `uv python dir --bin` to retrieve the target directory.
|
||||
///
|
||||
/// Multiple Python versions may be requested.
|
||||
///
|
||||
/// See `uv help python` to view supported request formats.
|
||||
Install(PythonInstallArgs),
|
||||
|
||||
/// Upgrade installed Python versions to the latest supported patch release (requires the
|
||||
/// `--preview` flag).
|
||||
///
|
||||
/// A target Python minor version to upgrade may be provided, e.g., `3.13`. Multiple versions
|
||||
/// may be provided to perform more than one upgrade.
|
||||
///
|
||||
/// If no target version is provided, then uv will upgrade all managed CPython versions.
|
||||
///
|
||||
/// During an upgrade, uv will not uninstall outdated patch versions.
|
||||
///
|
||||
/// When an upgrade is performed, virtual environments created by uv will automatically
|
||||
/// use the new version. However, if the virtual environment was created before the
|
||||
/// upgrade functionality was added, it will continue to use the old Python version; to enable
|
||||
/// upgrades, the environment must be recreated.
|
||||
///
|
||||
/// Upgrades are not yet supported for alternative implementations, like PyPy.
|
||||
Upgrade(PythonUpgradeArgs),
|
||||
|
||||
/// Search for a Python installation.
|
||||
///
|
||||
/// Displays the path to the Python executable.
|
||||
|
@ -4763,7 +4768,8 @@ pub enum PythonCommand {
|
|||
/// The Python installation directory may be overridden with `$UV_PYTHON_INSTALL_DIR`.
|
||||
///
|
||||
/// To view the directory where uv installs Python executables instead, use the `--bin` flag.
|
||||
/// Note that Python executables are only installed when preview mode is enabled.
|
||||
/// The Python executable directory may be overridden with `$UV_PYTHON_BIN_DIR`. Note that
|
||||
/// Python executables are only installed when preview mode is enabled.
|
||||
Dir(PythonDirArgs),
|
||||
|
||||
/// Uninstall Python versions.
|
||||
|
@ -4771,7 +4777,6 @@ pub enum PythonCommand {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonListArgs {
|
||||
/// A Python request to filter by.
|
||||
///
|
||||
|
@ -4826,7 +4831,6 @@ pub struct PythonListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonDirArgs {
|
||||
/// Show the directory into which `uv python` will install Python executables.
|
||||
///
|
||||
|
@ -4844,7 +4848,6 @@ pub struct PythonDirArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonInstallArgs {
|
||||
/// The directory to store the Python installation in.
|
||||
///
|
||||
|
@ -4923,7 +4926,50 @@ pub struct PythonInstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonUpgradeArgs {
|
||||
/// The directory Python installations are stored in.
|
||||
///
|
||||
/// If provided, `UV_PYTHON_INSTALL_DIR` will need to be set for subsequent operations for uv to
|
||||
/// discover the Python installation.
|
||||
///
|
||||
/// See `uv python dir` to view the current Python installation directory. Defaults to
|
||||
/// `~/.local/share/uv/python`.
|
||||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
pub install_dir: Option<PathBuf>,
|
||||
|
||||
/// The Python minor version(s) to upgrade.
|
||||
///
|
||||
/// If no target version is provided, then uv will upgrade all managed CPython versions.
|
||||
#[arg(env = EnvVars::UV_PYTHON)]
|
||||
pub targets: Vec<String>,
|
||||
|
||||
/// Set the URL to use as the source for downloading Python installations.
|
||||
///
|
||||
/// The provided URL will replace
|
||||
/// `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g.,
|
||||
/// `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.
|
||||
///
|
||||
/// Distributions can be read from a local directory by using the `file://` URL scheme.
|
||||
#[arg(long, env = EnvVars::UV_PYTHON_INSTALL_MIRROR)]
|
||||
pub mirror: Option<String>,
|
||||
|
||||
/// Set the URL to use as the source for downloading PyPy installations.
|
||||
///
|
||||
/// The provided URL will replace `https://downloads.python.org/pypy` in, e.g.,
|
||||
/// `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`.
|
||||
///
|
||||
/// Distributions can be read from a local directory by using the `file://` URL scheme.
|
||||
#[arg(long, env = EnvVars::UV_PYPY_INSTALL_MIRROR)]
|
||||
pub pypy_mirror: Option<String>,
|
||||
|
||||
/// URL pointing to JSON of custom Python installations.
|
||||
///
|
||||
/// Note that currently, only local paths are supported.
|
||||
#[arg(long, env = EnvVars::UV_PYTHON_DOWNLOADS_JSON_URL)]
|
||||
pub python_downloads_json_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct PythonUninstallArgs {
|
||||
/// The directory where the Python was installed.
|
||||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
|
@ -4941,7 +4987,6 @@ pub struct PythonUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonFindArgs {
|
||||
/// The Python request.
|
||||
///
|
||||
|
@ -4990,7 +5035,6 @@ pub struct PythonFindArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonPinArgs {
|
||||
/// The Python version request.
|
||||
///
|
||||
|
@ -5039,7 +5083,6 @@ pub struct PythonPinArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct GenerateShellCompletionArgs {
|
||||
/// The shell to generate the completion script for
|
||||
pub shell: clap_complete_command::Shell,
|
||||
|
@ -5078,7 +5121,6 @@ pub struct GenerateShellCompletionArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct IndexArgs {
|
||||
/// The URLs to use when resolving dependencies, in addition to the default index.
|
||||
///
|
||||
|
@ -5088,6 +5130,9 @@ pub struct IndexArgs {
|
|||
/// All indexes provided via this flag take priority over the index specified by
|
||||
/// `--default-index` (which defaults to PyPI). When multiple `--index` flags are provided,
|
||||
/// earlier values take priority.
|
||||
///
|
||||
/// Index names are not supported as values. Relative paths must be disambiguated from index
|
||||
/// names with `./` or `../` on Unix or `.\\`, `..\\`, `./` or `../` on Windows.
|
||||
//
|
||||
// The nested Vec structure (`Vec<Vec<Maybe<Index>>>`) is required for clap's
|
||||
// value parsing mechanism, which processes one value at a time, in order to handle
|
||||
|
@ -5153,7 +5198,6 @@ pub struct IndexArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RefreshArgs {
|
||||
/// Refresh all cached data.
|
||||
#[arg(
|
||||
|
@ -5179,7 +5223,6 @@ pub struct RefreshArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct BuildOptionsArgs {
|
||||
/// Don't build source distributions.
|
||||
///
|
||||
|
@ -5235,7 +5278,6 @@ pub struct BuildOptionsArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to install (but not resolve) packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct InstallerArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
@ -5377,7 +5419,6 @@ pub struct InstallerArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to resolve (but not install) packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ResolverArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
@ -5544,7 +5585,6 @@ pub struct ResolverArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to resolve and install packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ResolverInstallerArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
@ -5761,7 +5801,6 @@ pub struct ResolverInstallerArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to fetch from the Simple API.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FetchArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
use anstream::eprintln;
|
||||
|
||||
use uv_cache::Refresh;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_resolver::PrereleaseMode;
|
||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
||||
use crate::{
|
||||
BuildOptionsArgs, FetchArgs, IndexArgs, InstallerArgs, Maybe, RefreshArgs, ResolverArgs,
|
||||
|
@ -9,12 +12,27 @@ use crate::{
|
|||
};
|
||||
|
||||
/// Given a boolean flag pair (like `--upgrade` and `--no-upgrade`), resolve the value of the flag.
|
||||
pub fn flag(yes: bool, no: bool) -> Option<bool> {
|
||||
pub fn flag(yes: bool, no: bool, name: &str) -> Option<bool> {
|
||||
match (yes, no) {
|
||||
(true, false) => Some(true),
|
||||
(false, true) => Some(false),
|
||||
(false, false) => None,
|
||||
(..) => unreachable!("Clap should make this impossible"),
|
||||
(..) => {
|
||||
eprintln!(
|
||||
"{}{} `{}` and `{}` cannot be used together. \
|
||||
Boolean flags on different levels are currently not supported \
|
||||
(https://github.com/clap-rs/clap/issues/6049)",
|
||||
"error".bold().red(),
|
||||
":".bold(),
|
||||
format!("--{name}").green(),
|
||||
format!("--no-{name}").green(),
|
||||
);
|
||||
// No error forwarding since should eventually be solved on the clap side.
|
||||
#[allow(clippy::exit)]
|
||||
{
|
||||
std::process::exit(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,7 +44,7 @@ impl From<RefreshArgs> for Refresh {
|
|||
refresh_package,
|
||||
} = value;
|
||||
|
||||
Self::from_args(flag(refresh, no_refresh), refresh_package)
|
||||
Self::from_args(flag(refresh, no_refresh, "no-refresh"), refresh_package)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,7 +71,7 @@ impl From<ResolverArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
@ -66,7 +84,7 @@ impl From<ResolverArgs> for PipOptions {
|
|||
},
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
|
@ -96,16 +114,16 @@ impl From<InstallerArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: Some(reinstall_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
|
@ -140,9 +158,9 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: Some(reinstall_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
@ -155,11 +173,11 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
fork_strategy,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
|
@ -289,7 +307,7 @@ pub fn resolver_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
@ -303,13 +321,13 @@ pub fn resolver_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
no_build: flag(no_build, build),
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: Some(no_build_package),
|
||||
no_binary: flag(no_binary, binary),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
no_binary_package: Some(no_binary_package),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
}
|
||||
|
@ -386,13 +404,13 @@ pub fn resolver_installer_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package: if upgrade_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(upgrade_package)
|
||||
},
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: if reinstall_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
@ -410,7 +428,7 @@ pub fn resolver_installer_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: if no_build_isolation_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
@ -418,14 +436,14 @@ pub fn resolver_installer_options(
|
|||
},
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
no_build: flag(no_build, build),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: if no_build_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(no_build_package)
|
||||
},
|
||||
no_binary: flag(no_binary, binary),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
no_binary_package: if no_binary_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
|
@ -65,3 +65,4 @@ hyper = { version = "1.4.1", features = ["server", "http1"] }
|
|||
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||
insta = { version = "1.40.0", features = ["filters", "json", "redactions"] }
|
||||
tokio = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
|
|
@ -6,16 +6,26 @@ use std::sync::Arc;
|
|||
use std::time::Duration;
|
||||
use std::{env, io, iter};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use http::{
|
||||
HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
|
||||
header::{
|
||||
AUTHORIZATION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, COOKIE, LOCATION,
|
||||
PROXY_AUTHORIZATION, REFERER, TRANSFER_ENCODING, WWW_AUTHENTICATE,
|
||||
},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use reqwest::{Client, ClientBuilder, Proxy, Response};
|
||||
use reqwest::{Client, ClientBuilder, IntoUrl, Proxy, Request, Response, multipart};
|
||||
use reqwest_middleware::{ClientWithMiddleware, Middleware};
|
||||
use reqwest_retry::policies::ExponentialBackoff;
|
||||
use reqwest_retry::{
|
||||
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
|
||||
};
|
||||
use tracing::{debug, trace};
|
||||
use url::ParseError;
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::Credentials;
|
||||
use uv_auth::{AuthMiddleware, Indexes};
|
||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||
use uv_fs::Simplified;
|
||||
|
@ -32,6 +42,10 @@ use crate::middleware::OfflineMiddleware;
|
|||
use crate::tls::read_identity;
|
||||
|
||||
pub const DEFAULT_RETRIES: u32 = 3;
|
||||
/// Maximum number of redirects to follow before giving up.
|
||||
///
|
||||
/// This is the default used by [`reqwest`].
|
||||
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
|
||||
/// Selectively skip parts or the entire auth middleware.
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
|
@ -61,6 +75,31 @@ pub struct BaseClientBuilder<'a> {
|
|||
default_timeout: Duration,
|
||||
extra_middleware: Option<ExtraMiddleware>,
|
||||
proxies: Vec<Proxy>,
|
||||
redirect_policy: RedirectPolicy,
|
||||
/// Whether credentials should be propagated during cross-origin redirects.
|
||||
///
|
||||
/// A policy allowing propagation is insecure and should only be available for test code.
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||
}
|
||||
|
||||
/// The policy for handling HTTP redirects.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub enum RedirectPolicy {
|
||||
/// Use reqwest's built-in redirect handling. This bypasses our custom middleware
|
||||
/// on redirect.
|
||||
#[default]
|
||||
BypassMiddleware,
|
||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||
RetriggerMiddleware,
|
||||
}
|
||||
|
||||
impl RedirectPolicy {
|
||||
pub fn reqwest_policy(self) -> reqwest::redirect::Policy {
|
||||
match self {
|
||||
RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||
RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of user-defined middlewares to be applied to the client.
|
||||
|
@ -96,6 +135,8 @@ impl BaseClientBuilder<'_> {
|
|||
default_timeout: Duration::from_secs(30),
|
||||
extra_middleware: None,
|
||||
proxies: vec![],
|
||||
redirect_policy: RedirectPolicy::default(),
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -173,6 +214,24 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn redirect(mut self, policy: RedirectPolicy) -> Self {
|
||||
self.redirect_policy = policy;
|
||||
self
|
||||
}
|
||||
|
||||
/// Allows credentials to be propagated on cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, propagating credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
#[must_use]
|
||||
pub fn allow_cross_origin_credentials(mut self) -> Self {
|
||||
self.cross_origin_credential_policy = CrossOriginCredentialsPolicy::Insecure;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn is_offline(&self) -> bool {
|
||||
matches!(self.connectivity, Connectivity::Offline)
|
||||
}
|
||||
|
@ -229,6 +288,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Secure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Create an insecure client that accepts invalid certificates.
|
||||
|
@ -237,11 +297,20 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Insecure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
let client = self.apply_middleware(raw_client.clone());
|
||||
let dangerous_client = self.apply_middleware(raw_dangerous_client.clone());
|
||||
let client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(raw_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
let dangerous_client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(raw_dangerous_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
|
||||
BaseClient {
|
||||
connectivity: self.connectivity,
|
||||
|
@ -258,8 +327,16 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
/// Share the underlying client between two different middleware configurations.
|
||||
pub fn wrap_existing(&self, existing: &BaseClient) -> BaseClient {
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
let client = self.apply_middleware(existing.raw_client.clone());
|
||||
let dangerous_client = self.apply_middleware(existing.raw_dangerous_client.clone());
|
||||
let client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(existing.raw_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
let dangerous_client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(existing.raw_dangerous_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
|
||||
BaseClient {
|
||||
connectivity: self.connectivity,
|
||||
|
@ -279,6 +356,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout: Duration,
|
||||
ssl_cert_file_exists: bool,
|
||||
security: Security,
|
||||
redirect_policy: RedirectPolicy,
|
||||
) -> Client {
|
||||
// Configure the builder.
|
||||
let client_builder = ClientBuilder::new()
|
||||
|
@ -286,7 +364,8 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
.user_agent(user_agent)
|
||||
.pool_max_idle_per_host(20)
|
||||
.read_timeout(timeout)
|
||||
.tls_built_in_root_certs(false);
|
||||
.tls_built_in_root_certs(false)
|
||||
.redirect(redirect_policy.reqwest_policy());
|
||||
|
||||
// If necessary, accept invalid certificates.
|
||||
let client_builder = match security {
|
||||
|
@ -381,9 +460,9 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct BaseClient {
|
||||
/// The underlying HTTP client that enforces valid certificates.
|
||||
client: ClientWithMiddleware,
|
||||
client: RedirectClientWithMiddleware,
|
||||
/// The underlying HTTP client that accepts invalid certificates.
|
||||
dangerous_client: ClientWithMiddleware,
|
||||
dangerous_client: RedirectClientWithMiddleware,
|
||||
/// The HTTP client without middleware.
|
||||
raw_client: Client,
|
||||
/// The HTTP client that accepts invalid certificates without middleware.
|
||||
|
@ -408,7 +487,7 @@ enum Security {
|
|||
|
||||
impl BaseClient {
|
||||
/// Selects the appropriate client based on the host's trustworthiness.
|
||||
pub fn for_host(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware {
|
||||
pub fn for_host(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware {
|
||||
if self.disable_ssl(url) {
|
||||
&self.dangerous_client
|
||||
} else {
|
||||
|
@ -416,6 +495,12 @@ impl BaseClient {
|
|||
}
|
||||
}
|
||||
|
||||
/// Executes a request, applying redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||
client.execute(req).await
|
||||
}
|
||||
|
||||
/// Returns `true` if the host is trusted to use the insecure client.
|
||||
pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool {
|
||||
self.allow_insecure_host
|
||||
|
@ -439,6 +524,326 @@ impl BaseClient {
|
|||
}
|
||||
}
|
||||
|
||||
/// Wrapper around [`ClientWithMiddleware`] that manages redirects.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RedirectClientWithMiddleware {
|
||||
client: ClientWithMiddleware,
|
||||
redirect_policy: RedirectPolicy,
|
||||
/// Whether credentials should be preserved during cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, preserving credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
}
|
||||
|
||||
impl RedirectClientWithMiddleware {
|
||||
/// Convenience method to make a `GET` request to a URL.
|
||||
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.get(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `POST` request to a URL.
|
||||
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.post(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `HEAD` request to a URL.
|
||||
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.head(url), self)
|
||||
}
|
||||
|
||||
/// Executes a request, applying the redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
match self.redirect_policy {
|
||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
/// Executes a request. If the response is a redirect (one of HTTP 301, 302, 303, 307, or 308), the
|
||||
/// request is executed again with the redirect location URL (up to a maximum number of
|
||||
/// redirects).
|
||||
///
|
||||
/// Unlike the built-in reqwest redirect policies, this sends the redirect request through the
|
||||
/// entire middleware pipeline again.
|
||||
///
|
||||
/// See RFC 7231 7.1.2 <https://www.rfc-editor.org/rfc/rfc7231#section-7.1.2> for details on
|
||||
/// redirect semantics.
|
||||
async fn execute_with_redirect_handling(
|
||||
&self,
|
||||
req: Request,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
let mut request = req;
|
||||
let mut redirects = 0;
|
||||
let max_redirects = DEFAULT_MAX_REDIRECTS;
|
||||
|
||||
loop {
|
||||
let result = self
|
||||
.client
|
||||
.execute(request.try_clone().expect("HTTP request must be cloneable"))
|
||||
.await;
|
||||
let Ok(response) = result else {
|
||||
return result;
|
||||
};
|
||||
|
||||
if redirects >= max_redirects {
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
let Some(redirect_request) =
|
||||
request_into_redirect(request, &response, self.cross_origin_credentials_policy)?
|
||||
else {
|
||||
return Ok(response);
|
||||
};
|
||||
|
||||
redirects += 1;
|
||||
request = redirect_request;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn raw_client(&self) -> &ClientWithMiddleware {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RedirectClientWithMiddleware> for ClientWithMiddleware {
|
||||
fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware {
|
||||
item.client
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is should be a redirect and, if so, return a new redirect request.
|
||||
///
|
||||
/// This implementation is based on the [`reqwest`] crate redirect implementation.
|
||||
/// It takes ownership of the original [`Request`] and mutates it to create the new
|
||||
/// redirect [`Request`].
|
||||
fn request_into_redirect(
|
||||
mut req: Request,
|
||||
res: &Response,
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
) -> reqwest_middleware::Result<Option<Request>> {
|
||||
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||
let status = res.status();
|
||||
let should_redirect = match status {
|
||||
StatusCode::MOVED_PERMANENTLY
|
||||
| StatusCode::FOUND
|
||||
| StatusCode::TEMPORARY_REDIRECT
|
||||
| StatusCode::PERMANENT_REDIRECT => true,
|
||||
StatusCode::SEE_OTHER => {
|
||||
// Per RFC 7231, HTTP 303 is intended for the user agent
|
||||
// to perform a GET or HEAD request to the redirect target.
|
||||
// Historically, some browsers also changed method from POST
|
||||
// to GET on 301 or 302, but this is not required by RFC 7231
|
||||
// and was not intended by the HTTP spec.
|
||||
*req.body_mut() = None;
|
||||
for header in &[
|
||||
TRANSFER_ENCODING,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_TYPE,
|
||||
CONTENT_LENGTH,
|
||||
] {
|
||||
req.headers_mut().remove(header);
|
||||
}
|
||||
|
||||
match *req.method() {
|
||||
Method::GET | Method::HEAD => {}
|
||||
_ => {
|
||||
*req.method_mut() = Method::GET;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if !should_redirect {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let location = res
|
||||
.headers()
|
||||
.get(LOCATION)
|
||||
.ok_or(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Server returned redirect (HTTP {status}) without destination URL. This may indicate a server configuration issue"
|
||||
)))?
|
||||
.to_str()
|
||||
.map_err(|_| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value: must only contain visible ascii characters"
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||
Ok(url) => url,
|
||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||
))
|
||||
})?,
|
||||
Err(err) => {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}`: {err}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
// Per RFC 7231, fragments must be propagated
|
||||
if let Some(fragment) = original_req_url.fragment() {
|
||||
redirect_url.set_fragment(Some(fragment));
|
||||
}
|
||||
|
||||
// Ensure the URL is a valid HTTP URI.
|
||||
if let Err(err) = redirect_url.as_str().parse::<http::Uri>() {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"HTTP {status} 'Location' value `{redirect_url}` is not a valid HTTP URI: {err}"
|
||||
)));
|
||||
}
|
||||
|
||||
if redirect_url.scheme() != "http" && redirect_url.scheme() != "https" {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{redirect_url}`: scheme needs to be https or http"
|
||||
)));
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
std::mem::swap(req.headers_mut(), &mut headers);
|
||||
|
||||
let cross_host = redirect_url.host_str() != original_req_url.host_str()
|
||||
|| redirect_url.port_or_known_default() != original_req_url.port_or_known_default();
|
||||
if cross_host {
|
||||
if cross_origin_credentials_policy == CrossOriginCredentialsPolicy::Secure {
|
||||
debug!("Received a cross-origin redirect. Removing sensitive headers.");
|
||||
headers.remove(AUTHORIZATION);
|
||||
headers.remove(COOKIE);
|
||||
headers.remove(PROXY_AUTHORIZATION);
|
||||
headers.remove(WWW_AUTHENTICATE);
|
||||
}
|
||||
// If the redirect request is not a cross-origin request and the original request already
|
||||
// had a Referer header, attempt to set the Referer header for the redirect request.
|
||||
} else if headers.contains_key(REFERER) {
|
||||
if let Some(referer) = make_referer(&redirect_url, &original_req_url) {
|
||||
headers.insert(REFERER, referer);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there are credentials on the redirect location itself.
|
||||
// If so, move them to Authorization header.
|
||||
if !redirect_url.username().is_empty() {
|
||||
if let Some(credentials) = Credentials::from_url(&redirect_url) {
|
||||
let _ = redirect_url.set_username("");
|
||||
let _ = redirect_url.set_password(None);
|
||||
headers.insert(AUTHORIZATION, credentials.to_header_value());
|
||||
}
|
||||
}
|
||||
|
||||
std::mem::swap(req.headers_mut(), &mut headers);
|
||||
*req.url_mut() = Url::from(redirect_url);
|
||||
debug!(
|
||||
"Received HTTP {status}. Redirecting to {}",
|
||||
DisplaySafeUrl::ref_cast(req.url())
|
||||
);
|
||||
Ok(Some(req))
|
||||
}
|
||||
|
||||
/// Return a Referer [`HeaderValue`] according to RFC 7231.
|
||||
///
|
||||
/// Return [`None`] if https has been downgraded in the redirect location.
|
||||
fn make_referer(
|
||||
redirect_url: &DisplaySafeUrl,
|
||||
original_url: &DisplaySafeUrl,
|
||||
) -> Option<HeaderValue> {
|
||||
if redirect_url.scheme() == "http" && original_url.scheme() == "https" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut referer = original_url.clone();
|
||||
referer.remove_credentials();
|
||||
referer.set_fragment(None);
|
||||
referer.as_str().parse().ok()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub(crate) enum CrossOriginCredentialsPolicy {
|
||||
/// Do not propagate credentials on cross-origin requests.
|
||||
#[default]
|
||||
Secure,
|
||||
|
||||
/// Propagate credentials on cross-origin requests.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, preserving credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
Insecure,
|
||||
}
|
||||
|
||||
/// A builder to construct the properties of a `Request`.
|
||||
///
|
||||
/// This wraps [`reqwest_middleware::RequestBuilder`] to ensure that the [`BaseClient`]
|
||||
/// redirect policy is respected if `send()` is called.
|
||||
#[derive(Debug)]
|
||||
#[must_use]
|
||||
pub struct RequestBuilder<'a> {
|
||||
builder: reqwest_middleware::RequestBuilder,
|
||||
client: &'a RedirectClientWithMiddleware,
|
||||
}
|
||||
|
||||
impl<'a> RequestBuilder<'a> {
|
||||
pub fn new(
|
||||
builder: reqwest_middleware::RequestBuilder,
|
||||
client: &'a RedirectClientWithMiddleware,
|
||||
) -> Self {
|
||||
Self { builder, client }
|
||||
}
|
||||
|
||||
/// Add a `Header` to this Request.
|
||||
pub fn header<K, V>(mut self, key: K, value: V) -> Self
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
|
||||
HeaderValue: TryFrom<V>,
|
||||
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
|
||||
{
|
||||
self.builder = self.builder.header(key, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a set of Headers to the existing ones on this Request.
|
||||
///
|
||||
/// The headers will be merged in to any already set.
|
||||
pub fn headers(mut self, headers: HeaderMap) -> Self {
|
||||
self.builder = self.builder.headers(headers);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn version(mut self, version: reqwest::Version) -> Self {
|
||||
self.builder = self.builder.version(version);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "multipart")))]
|
||||
pub fn multipart(mut self, multipart: multipart::Form) -> Self {
|
||||
self.builder = self.builder.multipart(multipart);
|
||||
self
|
||||
}
|
||||
|
||||
/// Build a `Request`.
|
||||
pub fn build(self) -> reqwest::Result<Request> {
|
||||
self.builder.build()
|
||||
}
|
||||
|
||||
/// Constructs the Request and sends it to the target URL, returning a
|
||||
/// future Response.
|
||||
pub async fn send(self) -> reqwest_middleware::Result<Response> {
|
||||
self.client.execute(self.build()?).await
|
||||
}
|
||||
|
||||
pub fn raw_builder(&self) -> &reqwest_middleware::RequestBuilder {
|
||||
&self.builder
|
||||
}
|
||||
}
|
||||
|
||||
/// Extends [`DefaultRetryableStrategy`], to log transient request failures and additional retry cases.
|
||||
pub struct UvRetryableStrategy;
|
||||
|
||||
|
@ -528,3 +933,204 @@ fn find_source<E: Error + 'static>(orig: &dyn Error) -> Option<&E> {
|
|||
fn find_sources<E: Error + 'static>(orig: &dyn Error) -> impl Iterator<Item = &E> {
|
||||
iter::successors(find_source::<E>(orig), |&err| find_source(err))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
|
||||
use reqwest::{Client, Method};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
use crate::base_client::request_into_redirect;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserves_authorization_header_on_same_origin() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(request.headers().contains_key(AUTHORIZATION));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(redirect_request.headers().contains_key(AUTHORIZATION));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserves_fragment() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(format!("{}#fragment", server.uri()))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(
|
||||
redirect_request
|
||||
.url()
|
||||
.fragment()
|
||||
.is_some_and(|fragment| fragment == "fragment")
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", "https://cross-origin.com/simple"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(request.headers().contains_key(AUTHORIZATION));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(!redirect_request.headers().contains_key(AUTHORIZATION));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_303_changes_post_to_get() -> Result<()> {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("POST"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(303)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.post(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(request.method(), Method::POST);
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert_eq!(redirect_request.method(), Method::GET);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_no_referer_if_disabled() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::builder()
|
||||
.referer(false)
|
||||
.build()
|
||||
.unwrap()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(!request.headers().contains_key(REFERER));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
|
||||
assert!(!redirect_request.headers().contains_key(REFERER));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::time::{Duration, SystemTime};
|
||||
use std::{borrow::Cow, path::Path};
|
||||
|
||||
|
@ -100,44 +99,62 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Either a cached client error or a (user specified) error from the callback
|
||||
/// Dispatch type: Either a cached client error or a (user specified) error from the callback
|
||||
pub enum CachedClientError<CallbackError: std::error::Error + 'static> {
|
||||
Client(Error),
|
||||
Callback(CallbackError),
|
||||
Client {
|
||||
retries: Option<u32>,
|
||||
err: Error,
|
||||
},
|
||||
Callback {
|
||||
retries: Option<u32>,
|
||||
err: CallbackError,
|
||||
},
|
||||
}
|
||||
|
||||
impl<CallbackError: std::error::Error + 'static> Display for CachedClientError<CallbackError> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
impl<CallbackError: std::error::Error + 'static> CachedClientError<CallbackError> {
|
||||
/// Attach the number of retries to the error context.
|
||||
///
|
||||
/// Adds to existing errors if any, in case different layers retried.
|
||||
fn with_retries(self, retries: u32) -> Self {
|
||||
match self {
|
||||
CachedClientError::Client(err) => write!(f, "{err}"),
|
||||
CachedClientError::Callback(err) => write!(f, "{err}"),
|
||||
CachedClientError::Client {
|
||||
retries: existing_retries,
|
||||
err,
|
||||
} => CachedClientError::Client {
|
||||
retries: Some(existing_retries.unwrap_or_default() + retries),
|
||||
err,
|
||||
},
|
||||
CachedClientError::Callback {
|
||||
retries: existing_retries,
|
||||
err,
|
||||
} => CachedClientError::Callback {
|
||||
retries: Some(existing_retries.unwrap_or_default() + retries),
|
||||
err,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<CallbackError: std::error::Error + 'static> Debug for CachedClientError<CallbackError> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
fn retries(&self) -> Option<u32> {
|
||||
match self {
|
||||
CachedClientError::Client(err) => write!(f, "{err:?}"),
|
||||
CachedClientError::Callback(err) => write!(f, "{err:?}"),
|
||||
CachedClientError::Client { retries, .. } => *retries,
|
||||
CachedClientError::Callback { retries, .. } => *retries,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<CallbackError: std::error::Error + 'static> std::error::Error
|
||||
for CachedClientError<CallbackError>
|
||||
{
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
fn error(&self) -> &dyn std::error::Error {
|
||||
match self {
|
||||
CachedClientError::Client(err) => Some(err),
|
||||
CachedClientError::Callback(err) => Some(err),
|
||||
CachedClientError::Client { err, .. } => err,
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<CallbackError: std::error::Error + 'static> From<Error> for CachedClientError<CallbackError> {
|
||||
fn from(error: Error) -> Self {
|
||||
Self::Client(error)
|
||||
Self::Client {
|
||||
retries: None,
|
||||
err: error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,15 +162,35 @@ impl<CallbackError: std::error::Error + 'static> From<ErrorKind>
|
|||
for CachedClientError<CallbackError>
|
||||
{
|
||||
fn from(error: ErrorKind) -> Self {
|
||||
Self::Client(error.into())
|
||||
Self::Client {
|
||||
retries: None,
|
||||
err: error.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<Self> + std::error::Error + 'static> From<CachedClientError<E>> for Error {
|
||||
/// Attach retry error context, if there were retries.
|
||||
fn from(error: CachedClientError<E>) -> Self {
|
||||
match error {
|
||||
CachedClientError::Client(error) => error,
|
||||
CachedClientError::Callback(error) => error.into(),
|
||||
CachedClientError::Client {
|
||||
retries: Some(retries),
|
||||
err,
|
||||
} => ErrorKind::RequestWithRetries {
|
||||
source: Box::new(err.into_kind()),
|
||||
retries,
|
||||
}
|
||||
.into(),
|
||||
CachedClientError::Client { retries: None, err } => err,
|
||||
CachedClientError::Callback {
|
||||
retries: Some(retries),
|
||||
err,
|
||||
} => ErrorKind::RequestWithRetries {
|
||||
source: Box::new(err.into().into_kind()),
|
||||
retries,
|
||||
}
|
||||
.into(),
|
||||
CachedClientError::Callback { retries: None, err } => err.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -385,7 +422,7 @@ impl CachedClient {
|
|||
let data = response_callback(response)
|
||||
.boxed_local()
|
||||
.await
|
||||
.map_err(|err| CachedClientError::Callback(err))?;
|
||||
.map_err(|err| CachedClientError::Callback { retries: None, err })?;
|
||||
let Some(cache_policy) = cache_policy else {
|
||||
return Ok(data.into_target());
|
||||
};
|
||||
|
@ -486,7 +523,6 @@ impl CachedClient {
|
|||
debug!("Sending revalidation request for: {url}");
|
||||
let response = self
|
||||
.0
|
||||
.for_host(&url)
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
.await
|
||||
|
@ -527,12 +563,23 @@ impl CachedClient {
|
|||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let response = self
|
||||
.0
|
||||
.for_host(&url)
|
||||
.execute(req)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
let retry_count = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
.map(|retries| retries.value());
|
||||
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
return Err(CachedClientError::<Error>::Client {
|
||||
retries: retry_count,
|
||||
err: ErrorKind::from_reqwest(url, status_error).into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
let cache_policy = cache_policy_builder.build(&response);
|
||||
let cache_policy = if cache_policy.to_archived().is_storable() {
|
||||
Some(Box::new(cache_policy))
|
||||
|
@ -579,7 +626,7 @@ impl CachedClient {
|
|||
cache_control: CacheControl,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let mut n_past_retries = 0;
|
||||
let mut past_retries = 0;
|
||||
let start_time = SystemTime::now();
|
||||
let retry_policy = self.uncached().retry_policy();
|
||||
loop {
|
||||
|
@ -587,11 +634,20 @@ impl CachedClient {
|
|||
let result = self
|
||||
.get_cacheable(fresh_req, cache_entry, cache_control, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
let middleware_retries = match &result {
|
||||
Err(err) => err.retries().unwrap_or_default(),
|
||||
Ok(_) => 0,
|
||||
};
|
||||
|
||||
if result
|
||||
.as_ref()
|
||||
.is_err_and(|err| is_extended_transient_error(err))
|
||||
.is_err_and(|err| is_extended_transient_error(err.error()))
|
||||
{
|
||||
let retry_decision = retry_policy.should_retry(start_time, n_past_retries);
|
||||
// If middleware already retried, consider that in our retry budget
|
||||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
|
@ -601,10 +657,15 @@ impl CachedClient {
|
|||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
tokio::time::sleep(duration).await;
|
||||
n_past_retries += 1;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if past_retries > 0 {
|
||||
return result.map_err(|err| err.with_retries(past_retries));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -622,7 +683,7 @@ impl CachedClient {
|
|||
cache_entry: &CacheEntry,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let mut n_past_retries = 0;
|
||||
let mut past_retries = 0;
|
||||
let start_time = SystemTime::now();
|
||||
let retry_policy = self.uncached().retry_policy();
|
||||
loop {
|
||||
|
@ -630,12 +691,20 @@ impl CachedClient {
|
|||
let result = self
|
||||
.skip_cache(fresh_req, cache_entry, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
let middleware_retries = match &result {
|
||||
Err(err) => err.retries().unwrap_or_default(),
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
if result
|
||||
.as_ref()
|
||||
.err()
|
||||
.is_some_and(|err| is_extended_transient_error(err))
|
||||
.is_some_and(|err| is_extended_transient_error(err.error()))
|
||||
{
|
||||
let retry_decision = retry_policy.should_retry(start_time, n_past_retries);
|
||||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
|
@ -645,10 +714,15 @@ impl CachedClient {
|
|||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
tokio::time::sleep(duration).await;
|
||||
n_past_retries += 1;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if past_retries > 0 {
|
||||
return result.map_err(|err| err.with_retries(past_retries));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -197,6 +197,13 @@ pub enum ErrorKind {
|
|||
#[error("Failed to fetch: `{0}`")]
|
||||
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
||||
|
||||
/// Add the number of failed retries to the error.
|
||||
#[error("Request failed after {retries} retries")]
|
||||
RequestWithRetries {
|
||||
source: Box<ErrorKind>,
|
||||
retries: u32,
|
||||
},
|
||||
|
||||
#[error("Received some unexpected JSON from {}", url)]
|
||||
BadJson {
|
||||
source: serde_json::Error,
|
||||
|
|
|
@ -246,7 +246,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
.collect();
|
||||
Ok(FlatIndexEntries::from_entries(files))
|
||||
}
|
||||
Err(CachedClientError::Client(err)) if err.is_offline() => {
|
||||
Err(CachedClientError::Client { err, .. }) if err.is_offline() => {
|
||||
Ok(FlatIndexEntries::offline())
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
|
|
|
@ -21,7 +21,6 @@ use crate::rkyvutil::OwnedArchive;
|
|||
rkyv::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CacheControl {
|
||||
// directives for requests and responses
|
||||
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
pub use base_client::{
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||
UvRetryableStrategy, is_extended_transient_error,
|
||||
RedirectClientWithMiddleware, RequestBuilder, UvRetryableStrategy, is_extended_transient_error,
|
||||
};
|
||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||
|
|
|
@ -10,7 +10,6 @@ use futures::{FutureExt, StreamExt, TryStreamExt};
|
|||
use http::{HeaderMap, StatusCode};
|
||||
use itertools::Either;
|
||||
use reqwest::{Proxy, Response};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||
|
@ -35,15 +34,15 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use uv_small_str::SmallString;
|
||||
use uv_torch::TorchStrategy;
|
||||
|
||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware};
|
||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
||||
use crate::cached_client::CacheControl;
|
||||
use crate::flat_index::FlatIndexEntry;
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
||||
use crate::rkyvutil::OwnedArchive;
|
||||
use crate::{
|
||||
BaseClient, CachedClient, CachedClientError, Error, ErrorKind, FlatIndexClient,
|
||||
FlatIndexEntries,
|
||||
BaseClient, CachedClient, Error, ErrorKind, FlatIndexClient, FlatIndexEntries,
|
||||
RedirectClientWithMiddleware,
|
||||
};
|
||||
|
||||
/// A builder for an [`RegistryClient`].
|
||||
|
@ -152,9 +151,23 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Allows credentials to be propagated on cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, propagating credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
#[must_use]
|
||||
pub fn allow_cross_origin_credentials(mut self) -> Self {
|
||||
self.base_client_builder = self.base_client_builder.allow_cross_origin_credentials();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> RegistryClient {
|
||||
// Build a base client
|
||||
let builder = self.base_client_builder;
|
||||
let builder = self
|
||||
.base_client_builder
|
||||
.redirect(RedirectPolicy::RetriggerMiddleware);
|
||||
|
||||
let client = builder.build();
|
||||
|
||||
|
@ -251,7 +264,7 @@ impl RegistryClient {
|
|||
}
|
||||
|
||||
/// Return the [`BaseClient`] used by this client.
|
||||
pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware {
|
||||
pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware {
|
||||
self.client.uncached().for_host(url)
|
||||
}
|
||||
|
||||
|
@ -558,7 +571,7 @@ impl RegistryClient {
|
|||
let simple_request = self
|
||||
.uncached_client(url)
|
||||
.get(Url::from(url.clone()))
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept-Encoding", "gzip, deflate, zstd")
|
||||
.header("Accept", MediaType::accepts())
|
||||
.build()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
@ -607,18 +620,16 @@ impl RegistryClient {
|
|||
.boxed_local()
|
||||
.instrument(info_span!("parse_simple_api", package = %package_name))
|
||||
};
|
||||
self.cached_client()
|
||||
let simple = self
|
||||
.cached_client()
|
||||
.get_cacheable_with_retry(
|
||||
simple_request,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
parse_simple_response,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Client(err) => err,
|
||||
CachedClientError::Callback(err) => err,
|
||||
})
|
||||
.await?;
|
||||
Ok(simple)
|
||||
}
|
||||
|
||||
/// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory
|
||||
|
@ -900,15 +911,13 @@ impl RegistryClient {
|
|||
.map_err(|err| ErrorKind::AsyncHttpRangeReader(url.clone(), err))?;
|
||||
trace!("Getting metadata for {filename} by range request");
|
||||
let text = wheel_metadata_from_remote_zip(filename, url, &mut reader).await?;
|
||||
let metadata =
|
||||
ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| {
|
||||
Error::from(ErrorKind::MetadataParseError(
|
||||
filename.clone(),
|
||||
url.to_string(),
|
||||
Box::new(err),
|
||||
))
|
||||
})?;
|
||||
Ok::<ResolutionMetadata, CachedClientError<Error>>(metadata)
|
||||
ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| {
|
||||
Error::from(ErrorKind::MetadataParseError(
|
||||
filename.clone(),
|
||||
url.to_string(),
|
||||
Box::new(err),
|
||||
))
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
.instrument(info_span!("read_metadata_range_request", wheel = %filename))
|
||||
|
@ -1222,12 +1231,191 @@ impl Connectivity {
|
|||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||
|
||||
use uv_cache::Cache;
|
||||
use wiremock::matchers::{basic_auth, method, path_regex};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
use crate::RegistryClientBuilder;
|
||||
|
||||
type Error = Box<dyn std::error::Error>;
|
||||
|
||||
async fn start_test_server(username: &'static str, password: &'static str) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(ResponseTemplate::new(401))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_to_server_with_credentials() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let auth_server = start_test_server(username, password).await;
|
||||
let auth_base_url = DisplaySafeUrl::parse(&auth_server.uri())?;
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 302 to the auth server
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(302).insert_header("Location", format!("{auth_base_url}")),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&redirect_server_url)
|
||||
.get(redirect_server.uri())
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
401,
|
||||
"Requests should fail if credentials are missing"
|
||||
);
|
||||
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&redirect_server_url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed if credentials are present"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_root_relative_url() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(307).insert_header("Location", "/bar/baz/".to_string()),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/bar/baz/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed for relative URL"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_relative_url() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/bar/baz/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(307).insert_header("Location", "bar/baz/".to_string()),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed for relative URL"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
|
|
|
@ -4,7 +4,7 @@ use uv_pep508::PackageName;
|
|||
|
||||
use crate::{PackageNameSpecifier, PackageNameSpecifiers};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub enum BuildKind {
|
||||
/// A PEP 517 wheel build.
|
||||
#[default]
|
||||
|
|
|
@ -295,6 +295,15 @@ pub struct DependencyGroupsWithDefaults {
|
|||
}
|
||||
|
||||
impl DependencyGroupsWithDefaults {
|
||||
/// Do not enable any groups
|
||||
///
|
||||
/// Many places in the code need to know what dependency-groups are active,
|
||||
/// but various commands or subsystems never enable any dependency-groups,
|
||||
/// in which case they want this.
|
||||
pub fn none() -> Self {
|
||||
DependencyGroups::default().with_defaults(DefaultGroups::default())
|
||||
}
|
||||
|
||||
/// Returns `true` if the specification was enabled, and *only* because it was a default
|
||||
pub fn contains_because_default(&self, group: &GroupName) -> bool {
|
||||
self.cur.contains(group) && !self.prev.contains(group)
|
||||
|
|
|
@ -263,6 +263,14 @@ pub struct ExtrasSpecificationWithDefaults {
|
|||
}
|
||||
|
||||
impl ExtrasSpecificationWithDefaults {
|
||||
/// Do not enable any extras
|
||||
///
|
||||
/// Many places in the code need to know what extras are active,
|
||||
/// but various commands or subsystems never enable any extras,
|
||||
/// in which case they want this.
|
||||
pub fn none() -> Self {
|
||||
ExtrasSpecification::default().with_defaults(DefaultExtras::default())
|
||||
}
|
||||
/// Returns `true` if the specification was enabled, and *only* because it was a default
|
||||
pub fn contains_because_default(&self, extra: &ExtraName) -> bool {
|
||||
self.cur.contains(extra) && !self.prev.contains(extra)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_pep508::PackageName;
|
||||
|
@ -63,28 +65,16 @@ impl<'de> serde::Deserialize<'de> for PackageNameSpecifier {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for PackageNameSpecifier {
|
||||
fn schema_name() -> String {
|
||||
"PackageNameSpecifier".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("PackageNameSpecifier")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
string: Some(Box::new(schemars::schema::StringValidation {
|
||||
// See: https://packaging.python.org/en/latest/specifications/name-normalization/#name-format
|
||||
pattern: Some(
|
||||
r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$"
|
||||
.to_string(),
|
||||
),
|
||||
..schemars::schema::StringValidation::default()
|
||||
})),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"pattern": r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$",
|
||||
"description": "The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::fmt::Formatter;
|
||||
use std::str::FromStr;
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt::Formatter, str::FromStr};
|
||||
|
||||
use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError};
|
||||
|
||||
|
@ -36,20 +37,15 @@ impl FromStr for RequiredVersion {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for RequiredVersion {
|
||||
fn schema_name() -> String {
|
||||
String::from("RequiredVersion")
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("RequiredVersion")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("A version specifier, e.g. `>=0.5.0` or `==0.5.0`.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A version specifier, e.g. `>=0.5.0` or `==0.5.0`."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
||||
#[derive(
|
||||
Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub enum SourceStrategy {
|
||||
/// Use `tool.uv.sources` when resolving dependencies.
|
||||
|
|
|
@ -62,7 +62,7 @@ pub static RAYON_PARALLELISM: AtomicUsize = AtomicUsize::new(0);
|
|||
/// `LazyLock::force(&RAYON_INITIALIZE)`.
|
||||
pub static RAYON_INITIALIZE: LazyLock<()> = LazyLock::new(|| {
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(RAYON_PARALLELISM.load(Ordering::SeqCst))
|
||||
.num_threads(RAYON_PARALLELISM.load(Ordering::Relaxed))
|
||||
.stack_size(min_stack_size())
|
||||
.build_global()
|
||||
.expect("failed to initialize global rayon pool");
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use serde::{Deserialize, Deserializer};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
use url::Url;
|
||||
|
||||
|
@ -143,20 +145,15 @@ impl std::fmt::Display for TrustedHost {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for TrustedHost {
|
||||
fn schema_name() -> String {
|
||||
"TrustedHost".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("TrustedHost")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("A host or host-port pair.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A host or host-port pair."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ use clap::Parser;
|
|||
use tracing::info;
|
||||
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_configuration::Concurrency;
|
||||
use uv_configuration::{Concurrency, PreviewMode};
|
||||
use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest};
|
||||
|
||||
#[derive(Parser)]
|
||||
|
@ -26,6 +26,7 @@ pub(crate) async fn compile(args: CompileArgs) -> anyhow::Result<()> {
|
|||
&PythonRequest::default(),
|
||||
EnvironmentPreference::OnlyVirtual,
|
||||
&cache,
|
||||
PreviewMode::Disabled,
|
||||
)?
|
||||
.into_interpreter();
|
||||
interpreter.sys_executable().to_path_buf()
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
use anstream::println;
|
||||
use anyhow::{Result, bail};
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::{JsonSchema, schema_for};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
|
||||
use uv_settings::Options as SettingsOptions;
|
||||
|
@ -91,7 +91,10 @@ const REPLACEMENTS: &[(&str, &str)] = &[
|
|||
|
||||
/// Generate the JSON schema for the combined options as a string.
|
||||
fn generate() -> String {
|
||||
let schema = schema_for!(CombinedOptions);
|
||||
let settings = schemars::generate::SchemaSettings::draft07();
|
||||
let generator = schemars::SchemaGenerator::new(settings);
|
||||
let schema = generator.into_root_schema_for::<CombinedOptions>();
|
||||
|
||||
let mut output = serde_json::to_string_pretty(&schema).unwrap();
|
||||
|
||||
for (value, replacement) in REPLACEMENTS {
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
|||
use crate::generate_all::Mode;
|
||||
|
||||
/// Contains current supported targets
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250604/cpython-unix/targets.yml";
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250702/cpython-unix/targets.yml";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
|||
output.push_str("//! DO NOT EDIT\n");
|
||||
output.push_str("//!\n");
|
||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250604/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250702/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//!\n");
|
||||
|
||||
// Disable clippy/fmt
|
||||
|
|
|
@ -11,6 +11,7 @@ use itertools::Itertools;
|
|||
use rustc_hash::FxHashMap;
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, instrument, trace};
|
||||
|
||||
use uv_build_backend::check_direct_build;
|
||||
use uv_build_frontend::{SourceBuild, SourceBuildContext};
|
||||
use uv_cache::Cache;
|
||||
|
@ -35,8 +36,8 @@ use uv_resolver::{
|
|||
PythonRequirement, Resolver, ResolverEnvironment,
|
||||
};
|
||||
use uv_types::{
|
||||
AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, HashStrategy,
|
||||
InFlight,
|
||||
AnyErrorBuild, BuildArena, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages,
|
||||
HashStrategy, InFlight,
|
||||
};
|
||||
use uv_workspace::WorkspaceCache;
|
||||
|
||||
|
@ -179,6 +180,10 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
&self.shared_state.git
|
||||
}
|
||||
|
||||
fn build_arena(&self) -> &BuildArena<SourceBuild> {
|
||||
&self.shared_state.build_arena
|
||||
}
|
||||
|
||||
fn capabilities(&self) -> &IndexCapabilities {
|
||||
&self.shared_state.capabilities
|
||||
}
|
||||
|
@ -433,6 +438,7 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.build_extra_env_vars.clone(),
|
||||
build_output,
|
||||
self.concurrency.builds,
|
||||
self.preview,
|
||||
)
|
||||
.boxed_local()
|
||||
.await?;
|
||||
|
@ -447,12 +453,6 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
build_kind: BuildKind,
|
||||
version_id: Option<&'data str>,
|
||||
) -> Result<Option<DistFilename>, BuildDispatchError> {
|
||||
// Direct builds are a preview feature with the uv build backend.
|
||||
if self.preview.is_disabled() {
|
||||
trace!("Preview is disabled, not checking for direct build");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let source_tree = if let Some(subdir) = subdirectory {
|
||||
source.join(subdir)
|
||||
} else {
|
||||
|
@ -520,6 +520,8 @@ pub struct SharedState {
|
|||
index: InMemoryIndex,
|
||||
/// The downloaded distributions.
|
||||
in_flight: InFlight,
|
||||
/// Build directories for any PEP 517 builds executed during resolution or installation.
|
||||
build_arena: BuildArena<SourceBuild>,
|
||||
}
|
||||
|
||||
impl SharedState {
|
||||
|
@ -532,6 +534,7 @@ impl SharedState {
|
|||
Self {
|
||||
git: self.git.clone(),
|
||||
capabilities: self.capabilities.clone(),
|
||||
build_arena: self.build_arena.clone(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
@ -555,4 +558,9 @@ impl SharedState {
|
|||
pub fn capabilities(&self) -> &IndexCapabilities {
|
||||
&self.capabilities
|
||||
}
|
||||
|
||||
/// Return the [`BuildArena`] used by the [`SharedState`].
|
||||
pub fn build_arena(&self) -> &BuildArena<SourceBuild> {
|
||||
&self.build_arena
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ rkyv = { workspace = true, features = ["smallvec-1"] }
|
|||
serde = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0" }
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::str::FromStr;
|
|||
use memchr::memchr;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_normalize::{InvalidNameError, PackageName};
|
||||
|
@ -300,29 +299,6 @@ impl WheelFilename {
|
|||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for WheelFilename {
|
||||
type Error = WheelFilenameError;
|
||||
|
||||
fn try_from(url: &Url) -> Result<Self, Self::Error> {
|
||||
let filename = url
|
||||
.path_segments()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must have a path".to_string(),
|
||||
)
|
||||
})?
|
||||
.next_back()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must contain a filename".to_string(),
|
||||
)
|
||||
})?;
|
||||
Self::from_str(filename)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for WheelFilename {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
|
|
|
@ -29,6 +29,7 @@ uv-platform-tags = { workspace = true }
|
|||
uv-pypi-types = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
uv-small-str = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
arcstr = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
|
|
|
@ -26,7 +26,11 @@ impl std::fmt::Display for SourceAnnotation {
|
|||
write!(f, "{project_name} ({})", path.portable_display())
|
||||
}
|
||||
RequirementOrigin::Group(path, project_name, group) => {
|
||||
write!(f, "{project_name} ({}:{group})", path.portable_display())
|
||||
if let Some(project_name) = project_name {
|
||||
write!(f, "{project_name} ({}:{group})", path.portable_display())
|
||||
} else {
|
||||
write!(f, "({}:{group})", path.portable_display())
|
||||
}
|
||||
}
|
||||
RequirementOrigin::Workspace => {
|
||||
write!(f, "(workspace)")
|
||||
|
@ -45,11 +49,15 @@ impl std::fmt::Display for SourceAnnotation {
|
|||
}
|
||||
RequirementOrigin::Group(path, project_name, group) => {
|
||||
// Group is not used for override
|
||||
write!(
|
||||
f,
|
||||
"--override {project_name} ({}:{group})",
|
||||
path.portable_display()
|
||||
)
|
||||
if let Some(project_name) = project_name {
|
||||
write!(
|
||||
f,
|
||||
"--override {project_name} ({}:{group})",
|
||||
path.portable_display()
|
||||
)
|
||||
} else {
|
||||
write!(f, "--override ({}:{group})", path.portable_display())
|
||||
}
|
||||
}
|
||||
RequirementOrigin::Workspace => {
|
||||
write!(f, "--override (workspace)")
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -160,16 +161,33 @@ impl UrlString {
|
|||
.unwrap_or(self.as_ref())
|
||||
}
|
||||
|
||||
/// Return the [`UrlString`] with any fragments removed.
|
||||
/// Return the [`UrlString`] (as a [`Cow`]) with any fragments removed.
|
||||
#[must_use]
|
||||
pub fn without_fragment(&self) -> Self {
|
||||
Self(
|
||||
self.as_ref()
|
||||
.split_once('#')
|
||||
.map(|(path, _)| path)
|
||||
.map(SmallString::from)
|
||||
.unwrap_or_else(|| self.0.clone()),
|
||||
)
|
||||
pub fn without_fragment(&self) -> Cow<'_, Self> {
|
||||
self.as_ref()
|
||||
.split_once('#')
|
||||
.map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path))))
|
||||
.unwrap_or(Cow::Borrowed(self))
|
||||
}
|
||||
|
||||
/// Return the [`UrlString`] (as a [`Cow`]) with trailing slash removed.
|
||||
///
|
||||
/// This matches the semantics of [`Url::pop_if_empty`], which will not trim a trailing slash if
|
||||
/// it's the only path segment, e.g., `https://example.com/` would be unchanged.
|
||||
#[must_use]
|
||||
pub fn without_trailing_slash(&self) -> Cow<'_, Self> {
|
||||
self.as_ref()
|
||||
.strip_suffix('/')
|
||||
.filter(|path| {
|
||||
// Only strip the trailing slash if there's _another_ trailing slash that isn't a
|
||||
// part of the scheme.
|
||||
path.split_once("://")
|
||||
.map(|(_scheme, rest)| rest)
|
||||
.unwrap_or(path)
|
||||
.contains('/')
|
||||
})
|
||||
.map(|path| Cow::Owned(UrlString(SmallString::from(path))))
|
||||
.unwrap_or(Cow::Borrowed(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -252,16 +270,51 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn without_fragment() {
|
||||
// Borrows a URL without a fragment
|
||||
let url = UrlString("https://example.com/path".into());
|
||||
assert_eq!(&*url.without_fragment(), &url);
|
||||
assert!(matches!(url.without_fragment(), Cow::Borrowed(_)));
|
||||
|
||||
// Removes the fragment if present on the URL
|
||||
let url = UrlString("https://example.com/path?query#fragment".into());
|
||||
assert_eq!(
|
||||
url.without_fragment(),
|
||||
UrlString("https://example.com/path?query".into())
|
||||
&*url.without_fragment(),
|
||||
&UrlString("https://example.com/path?query".into())
|
||||
);
|
||||
assert!(matches!(url.without_fragment(), Cow::Owned(_)));
|
||||
}
|
||||
|
||||
let url = UrlString("https://example.com/path#fragment".into());
|
||||
assert_eq!(url.base_str(), "https://example.com/path");
|
||||
|
||||
#[test]
|
||||
fn without_trailing_slash() {
|
||||
// Borrows a URL without a slash
|
||||
let url = UrlString("https://example.com/path".into());
|
||||
assert_eq!(url.base_str(), "https://example.com/path");
|
||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
||||
|
||||
// Removes the trailing slash if present on the URL
|
||||
let url = UrlString("https://example.com/path/".into());
|
||||
assert_eq!(
|
||||
&*url.without_trailing_slash(),
|
||||
&UrlString("https://example.com/path".into())
|
||||
);
|
||||
assert!(matches!(url.without_trailing_slash(), Cow::Owned(_)));
|
||||
|
||||
// Does not remove a trailing slash if it's the only path segment
|
||||
let url = UrlString("https://example.com/".into());
|
||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
||||
|
||||
// Does not remove a trailing slash if it's the only path segment with a missing scheme
|
||||
let url = UrlString("example.com/".into());
|
||||
assert_eq!(&*url.without_trailing_slash(), &url);
|
||||
assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_)));
|
||||
|
||||
// Removes the trailing slash when the scheme is missing
|
||||
let url = UrlString("example.com/path/".into());
|
||||
assert_eq!(
|
||||
&*url.without_trailing_slash(),
|
||||
&UrlString("example.com/path".into())
|
||||
);
|
||||
assert!(matches!(url.without_trailing_slash(), Cow::Owned(_)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ use url::{ParseError, Url};
|
|||
|
||||
use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::{Index, IndexStatusCodeStrategy, Verbatim};
|
||||
|
||||
|
@ -37,6 +38,8 @@ impl IndexUrl {
|
|||
///
|
||||
/// If no root directory is provided, relative paths are resolved against the current working
|
||||
/// directory.
|
||||
///
|
||||
/// Normalizes non-file URLs by removing trailing slashes for consistency.
|
||||
pub fn parse(path: &str, root_dir: Option<&Path>) -> Result<Self, IndexUrlError> {
|
||||
let url = match split_scheme(path) {
|
||||
Some((scheme, ..)) => {
|
||||
|
@ -92,20 +95,15 @@ impl IndexUrl {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for IndexUrl {
|
||||
fn schema_name() -> String {
|
||||
"IndexUrl".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("IndexUrl")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,6 +138,30 @@ impl IndexUrl {
|
|||
Cow::Owned(url)
|
||||
}
|
||||
}
|
||||
|
||||
/// Warn user if the given URL was provided as an ambiguous relative path.
|
||||
///
|
||||
/// This is a temporary warning. Ambiguous values will not be
|
||||
/// accepted in the future.
|
||||
pub fn warn_on_disambiguated_relative_path(&self) {
|
||||
let Self::Path(verbatim_url) = &self else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(path) = verbatim_url.given() {
|
||||
if !is_disambiguated_path(path) {
|
||||
if cfg!(windows) {
|
||||
warn_user!(
|
||||
"Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `.\\{path}` or `./{path}`). Support for ambiguous values will be removed in the future"
|
||||
);
|
||||
} else {
|
||||
warn_user!(
|
||||
"Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `./{path}`). Support for ambiguous values will be removed in the future"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for IndexUrl {
|
||||
|
@ -162,6 +184,28 @@ impl Verbatim for IndexUrl {
|
|||
}
|
||||
}
|
||||
|
||||
/// Checks if a path is disambiguated.
|
||||
///
|
||||
/// Disambiguated paths are absolute paths, paths with valid schemes,
|
||||
/// and paths starting with "./" or "../" on Unix or ".\\", "..\\",
|
||||
/// "./", or "../" on Windows.
|
||||
fn is_disambiguated_path(path: &str) -> bool {
|
||||
if cfg!(windows) {
|
||||
if path.starts_with(".\\") || path.starts_with("..\\") || path.starts_with('/') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if path.starts_with("./") || path.starts_with("../") || Path::new(path).is_absolute() {
|
||||
return true;
|
||||
}
|
||||
// Check if the path has a scheme (like `file://`)
|
||||
if let Some((scheme, _)) = split_scheme(path) {
|
||||
return Scheme::parse(scheme).is_some();
|
||||
}
|
||||
// This is an ambiguous relative path
|
||||
false
|
||||
}
|
||||
|
||||
/// An error that can occur when parsing an [`IndexUrl`].
|
||||
#[derive(Error, Debug)]
|
||||
pub enum IndexUrlError {
|
||||
|
@ -214,13 +258,20 @@ impl<'de> serde::de::Deserialize<'de> for IndexUrl {
|
|||
}
|
||||
|
||||
impl From<VerbatimUrl> for IndexUrl {
|
||||
fn from(url: VerbatimUrl) -> Self {
|
||||
fn from(mut url: VerbatimUrl) -> Self {
|
||||
if url.scheme() == "file" {
|
||||
Self::Path(Arc::new(url))
|
||||
} else if *url.raw() == *PYPI_URL {
|
||||
Self::Pypi(Arc::new(url))
|
||||
} else {
|
||||
Self::Url(Arc::new(url))
|
||||
// Remove trailing slashes for consistency. They'll be re-added if necessary when
|
||||
// querying the Simple API.
|
||||
if let Ok(mut path_segments) = url.raw_mut().path_segments_mut() {
|
||||
path_segments.pop_if_empty();
|
||||
}
|
||||
if *url.raw() == *PYPI_URL {
|
||||
Self::Pypi(Arc::new(url))
|
||||
} else {
|
||||
Self::Url(Arc::new(url))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,6 +462,19 @@ impl<'a> IndexLocations {
|
|||
indexes
|
||||
}
|
||||
}
|
||||
|
||||
/// Add all authenticated sources to the cache.
|
||||
pub fn cache_index_credentials(&self) {
|
||||
for index in self.allowed_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||
|
@ -511,30 +575,23 @@ impl<'a> IndexUrls {
|
|||
/// iterator.
|
||||
pub fn defined_indexes(&'a self) -> impl Iterator<Item = &'a Index> + 'a {
|
||||
if self.no_index {
|
||||
Either::Left(std::iter::empty())
|
||||
} else {
|
||||
Either::Right(
|
||||
{
|
||||
let mut seen = FxHashSet::default();
|
||||
self.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
index.name.as_ref().is_none_or(|name| seen.insert(name))
|
||||
})
|
||||
.filter(|index| !index.default)
|
||||
}
|
||||
.chain({
|
||||
let mut seen = FxHashSet::default();
|
||||
self.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
index.name.as_ref().is_none_or(|name| seen.insert(name))
|
||||
})
|
||||
.find(|index| index.default)
|
||||
.into_iter()
|
||||
}),
|
||||
)
|
||||
return Either::Left(std::iter::empty());
|
||||
}
|
||||
|
||||
let mut seen = FxHashSet::default();
|
||||
let (non_default, default) = self
|
||||
.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
if let Some(name) = &index.name {
|
||||
seen.insert(name)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
.partition::<Vec<_>, _>(|index| !index.default);
|
||||
|
||||
Either::Right(non_default.into_iter().chain(default))
|
||||
}
|
||||
|
||||
/// Return the `--no-index` flag.
|
||||
|
@ -632,3 +689,41 @@ impl IndexCapabilities {
|
|||
.insert(Flags::FORBIDDEN);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_valid_paths() {
|
||||
// Absolute path
|
||||
assert!(is_disambiguated_path("/absolute/path"));
|
||||
// Relative path
|
||||
assert!(is_disambiguated_path("./relative/path"));
|
||||
assert!(is_disambiguated_path("../../relative/path"));
|
||||
if cfg!(windows) {
|
||||
// Windows absolute path
|
||||
assert!(is_disambiguated_path("C:/absolute/path"));
|
||||
// Windows relative path
|
||||
assert!(is_disambiguated_path(".\\relative\\path"));
|
||||
assert!(is_disambiguated_path("..\\..\\relative\\path"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_ambiguous_paths() {
|
||||
// Test single-segment ambiguous path
|
||||
assert!(!is_disambiguated_path("index"));
|
||||
// Test multi-segment ambiguous path
|
||||
assert!(!is_disambiguated_path("relative/path"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_with_schemes() {
|
||||
assert!(is_disambiguated_path("file:///absolute/path"));
|
||||
assert!(is_disambiguated_path("https://registry.com/simple/"));
|
||||
assert!(is_disambiguated_path(
|
||||
"git+https://github.com/example/repo.git"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,6 +73,7 @@ pub use crate::pip_index::*;
|
|||
pub use crate::prioritized_distribution::*;
|
||||
pub use crate::requested::*;
|
||||
pub use crate::requirement::*;
|
||||
pub use crate::requires_python::*;
|
||||
pub use crate::resolution::*;
|
||||
pub use crate::resolved::*;
|
||||
pub use crate::specified_requirement::*;
|
||||
|
@ -100,6 +101,7 @@ mod pip_index;
|
|||
mod prioritized_distribution;
|
||||
mod requested;
|
||||
mod requirement;
|
||||
mod requires_python;
|
||||
mod resolution;
|
||||
mod resolved;
|
||||
mod specified_requirement;
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
//! flags set.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{Index, IndexUrl};
|
||||
|
@ -50,14 +52,14 @@ macro_rules! impl_index {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for $name {
|
||||
fn schema_name() -> String {
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
IndexUrl::schema_name()
|
||||
}
|
||||
|
||||
fn json_schema(
|
||||
r#gen: &mut schemars::r#gen::SchemaGenerator,
|
||||
) -> schemars::schema::Schema {
|
||||
IndexUrl::json_schema(r#gen)
|
||||
generator: &mut schemars::generate::SchemaGenerator,
|
||||
) -> schemars::Schema {
|
||||
IndexUrl::json_schema(generator)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -831,7 +831,7 @@ pub fn implied_markers(filename: &WheelFilename) -> MarkerTree {
|
|||
tag_marker.and(MarkerTree::expression(MarkerExpression::String {
|
||||
key: MarkerValueString::PlatformMachine,
|
||||
operator: MarkerOperator::Equal,
|
||||
value: arcstr::literal!("x86_64"),
|
||||
value: arcstr::literal!("AMD64"),
|
||||
}));
|
||||
marker.or(tag_marker);
|
||||
}
|
||||
|
@ -925,7 +925,7 @@ mod tests {
|
|||
);
|
||||
assert_markers(
|
||||
"numpy-2.2.1-cp313-cp313t-win_amd64.whl",
|
||||
"sys_platform == 'win32' and platform_machine == 'x86_64'",
|
||||
"sys_platform == 'win32' and platform_machine == 'AMD64'",
|
||||
);
|
||||
assert_markers(
|
||||
"numpy-2.2.1-cp313-cp313t-win_arm64.whl",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::collections::Bound;
|
||||
|
||||
use pubgrub::Range;
|
||||
use version_ranges::Ranges;
|
||||
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_pep440::{
|
||||
|
@ -66,15 +66,8 @@ impl RequiresPython {
|
|||
) -> Option<Self> {
|
||||
// Convert to PubGrub range and perform an intersection.
|
||||
let range = specifiers
|
||||
.into_iter()
|
||||
.map(|specifier| release_specifiers_to_ranges(specifier.clone()))
|
||||
.fold(None, |range: Option<Range<Version>>, requires_python| {
|
||||
if let Some(range) = range {
|
||||
Some(range.intersection(&requires_python))
|
||||
} else {
|
||||
Some(requires_python)
|
||||
}
|
||||
})?;
|
||||
.map(|specs| release_specifiers_to_ranges(specs.clone()))
|
||||
.reduce(|acc, r| acc.intersection(&r))?;
|
||||
|
||||
// If the intersection is empty, return `None`.
|
||||
if range.is_empty() {
|
||||
|
@ -97,12 +90,12 @@ impl RequiresPython {
|
|||
pub fn split(&self, bound: Bound<Version>) -> Option<(Self, Self)> {
|
||||
let RequiresPythonRange(.., upper) = &self.range;
|
||||
|
||||
let upper = Range::from_range_bounds((bound, upper.clone().into()));
|
||||
let upper = Ranges::from_range_bounds((bound, upper.clone().into()));
|
||||
let lower = upper.complement();
|
||||
|
||||
// Intersect left and right with the existing range.
|
||||
let lower = lower.intersection(&Range::from(self.range.clone()));
|
||||
let upper = upper.intersection(&Range::from(self.range.clone()));
|
||||
let lower = lower.intersection(&Ranges::from(self.range.clone()));
|
||||
let upper = upper.intersection(&Ranges::from(self.range.clone()));
|
||||
|
||||
if lower.is_empty() || upper.is_empty() {
|
||||
None
|
||||
|
@ -353,7 +346,7 @@ impl RequiresPython {
|
|||
/// a lock file are deserialized and turned into a `ResolutionGraph`, the
|
||||
/// markers are "complexified" to put the `requires-python` assumption back
|
||||
/// into the marker explicitly.
|
||||
pub(crate) fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree {
|
||||
pub fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree {
|
||||
let (lower, upper) = (self.range().lower(), self.range().upper());
|
||||
marker.simplify_python_versions(lower.as_ref(), upper.as_ref())
|
||||
}
|
||||
|
@ -373,7 +366,7 @@ impl RequiresPython {
|
|||
/// ```text
|
||||
/// python_full_version >= '3.8' and python_full_version < '3.12'
|
||||
/// ```
|
||||
pub(crate) fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree {
|
||||
pub fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree {
|
||||
let (lower, upper) = (self.range().lower(), self.range().upper());
|
||||
marker.complexify_python_versions(lower.as_ref(), upper.as_ref())
|
||||
}
|
||||
|
@ -537,7 +530,7 @@ pub struct RequiresPythonRange(LowerBound, UpperBound);
|
|||
|
||||
impl RequiresPythonRange {
|
||||
/// Initialize a [`RequiresPythonRange`] from a [`Range`].
|
||||
pub fn from_range(range: &Range<Version>) -> Self {
|
||||
pub fn from_range(range: &Ranges<Version>) -> Self {
|
||||
let (lower, upper) = range
|
||||
.bounding_range()
|
||||
.map(|(lower_bound, upper_bound)| (lower_bound.cloned(), upper_bound.cloned()))
|
||||
|
@ -575,9 +568,9 @@ impl Default for RequiresPythonRange {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<RequiresPythonRange> for Range<Version> {
|
||||
impl From<RequiresPythonRange> for Ranges<Version> {
|
||||
fn from(value: RequiresPythonRange) -> Self {
|
||||
Range::from_range_bounds::<(Bound<Version>, Bound<Version>), _>((
|
||||
Ranges::from_range_bounds::<(Bound<Version>, Bound<Version>), _>((
|
||||
value.0.into(),
|
||||
value.1.into(),
|
||||
))
|
||||
|
@ -592,21 +585,18 @@ impl From<RequiresPythonRange> for Range<Version> {
|
|||
/// a simplified marker, one must re-contextualize it by adding the
|
||||
/// `requires-python` constraint back to the marker.
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)]
|
||||
pub(crate) struct SimplifiedMarkerTree(MarkerTree);
|
||||
pub struct SimplifiedMarkerTree(MarkerTree);
|
||||
|
||||
impl SimplifiedMarkerTree {
|
||||
/// Simplifies the given markers by assuming the given `requires-python`
|
||||
/// bound is true.
|
||||
pub(crate) fn new(
|
||||
requires_python: &RequiresPython,
|
||||
marker: MarkerTree,
|
||||
) -> SimplifiedMarkerTree {
|
||||
pub fn new(requires_python: &RequiresPython, marker: MarkerTree) -> SimplifiedMarkerTree {
|
||||
SimplifiedMarkerTree(requires_python.simplify_markers(marker))
|
||||
}
|
||||
|
||||
/// Complexifies the given markers by adding the given `requires-python` as
|
||||
/// a constraint to these simplified markers.
|
||||
pub(crate) fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree {
|
||||
pub fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree {
|
||||
requires_python.complexify_markers(self.0)
|
||||
}
|
||||
|
||||
|
@ -614,12 +604,12 @@ impl SimplifiedMarkerTree {
|
|||
///
|
||||
/// This only returns `None` when the underlying marker is always true,
|
||||
/// i.e., it matches all possible marker environments.
|
||||
pub(crate) fn try_to_string(self) -> Option<String> {
|
||||
pub fn try_to_string(self) -> Option<String> {
|
||||
self.0.try_to_string()
|
||||
}
|
||||
|
||||
/// Returns the underlying marker tree without re-complexifying them.
|
||||
pub(crate) fn as_simplified_marker_tree(self) -> MarkerTree {
|
||||
pub fn as_simplified_marker_tree(self) -> MarkerTree {
|
||||
self.0
|
||||
}
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
|
||||
use http::StatusCode;
|
||||
|
@ -136,17 +138,17 @@ impl<'de> Deserialize<'de> for SerializableStatusCode {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for SerializableStatusCode {
|
||||
fn schema_name() -> String {
|
||||
"StatusCode".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("StatusCode")
|
||||
}
|
||||
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
let mut schema = r#gen.subschema_for::<u16>().into_object();
|
||||
schema.metadata().description = Some("HTTP status code (100-599)".to_string());
|
||||
schema.number().minimum = Some(100.0);
|
||||
schema.number().maximum = Some(599.0);
|
||||
|
||||
schema.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "number",
|
||||
"minimum": 100,
|
||||
"maximum": 599,
|
||||
"description": "HTTP status code (100-599)"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -644,8 +644,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
})
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})?;
|
||||
|
||||
// If the archive is missing the required hashes, or has since been removed, force a refresh.
|
||||
|
@ -663,8 +663,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})
|
||||
})
|
||||
.await?
|
||||
|
@ -811,8 +811,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
})
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})?;
|
||||
|
||||
// If the archive is missing the required hashes, or has since been removed, force a refresh.
|
||||
|
@ -830,8 +830,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})
|
||||
})
|
||||
.await?
|
||||
|
|
|
@ -108,6 +108,8 @@ pub enum Error {
|
|||
CacheHeal(String, HashAlgorithm),
|
||||
#[error("The source distribution requires Python {0}, but {1} is installed")]
|
||||
RequiresPython(VersionSpecifiers, Version),
|
||||
#[error("Failed to identify base Python interpreter")]
|
||||
BaseInterpreter(#[source] std::io::Error),
|
||||
|
||||
/// A generic request middleware error happened while making a request.
|
||||
/// Refer to the error message for more details.
|
||||
|
|
|
@ -4,7 +4,7 @@ pub use error::Error;
|
|||
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
|
||||
pub use metadata::{
|
||||
ArchiveMetadata, BuildRequires, FlatRequiresDist, LoweredRequirement, LoweringError, Metadata,
|
||||
MetadataError, RequiresDist,
|
||||
MetadataError, RequiresDist, SourcedDependencyGroups,
|
||||
};
|
||||
pub use reporter::Reporter;
|
||||
pub use source::prune;
|
||||
|
|
208
crates/uv-distribution/src/metadata/dependency_groups.rs
Normal file
208
crates/uv-distribution/src/metadata/dependency_groups.rs
Normal file
|
@ -0,0 +1,208 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use uv_configuration::SourceStrategy;
|
||||
use uv_distribution_types::{IndexLocations, Requirement};
|
||||
use uv_normalize::{GroupName, PackageName};
|
||||
use uv_workspace::dependency_groups::FlatDependencyGroups;
|
||||
use uv_workspace::pyproject::{Sources, ToolUvSources};
|
||||
use uv_workspace::{
|
||||
DiscoveryOptions, MemberDiscovery, VirtualProject, WorkspaceCache, WorkspaceError,
|
||||
};
|
||||
|
||||
use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError};
|
||||
|
||||
/// Like [`crate::RequiresDist`] but only supporting dependency-groups.
|
||||
///
|
||||
/// PEP 735 says:
|
||||
///
|
||||
/// > A pyproject.toml file with only `[dependency-groups]` and no other tables is valid.
|
||||
///
|
||||
/// This is a special carveout to enable users to adopt dependency-groups without having
|
||||
/// to learn about projects. It is supported by `pip install --group`, and thus interfaces
|
||||
/// like `uv pip install --group` must also support it for interop and conformance.
|
||||
///
|
||||
/// On paper this is trivial to support because dependency-groups are so self-contained
|
||||
/// that they're basically a `requirements.txt` embedded within a pyproject.toml, so it's
|
||||
/// fine to just grab that section and handle it independently.
|
||||
///
|
||||
/// However several uv extensions make this complicated, notably, as of this writing:
|
||||
///
|
||||
/// * tool.uv.sources
|
||||
/// * tool.uv.index
|
||||
///
|
||||
/// These fields may also be present in the pyproject.toml, and, critically,
|
||||
/// may be defined and inherited in a parent workspace pyproject.toml.
|
||||
///
|
||||
/// Therefore, we need to gracefully degrade from a full workspacey situation all
|
||||
/// the way down to one of these stub pyproject.tomls the PEP defines. This is why
|
||||
/// we avoid going through `RequiresDist` -- we don't want to muddy up the "compile a package"
|
||||
/// logic with support for non-project/workspace pyproject.tomls, and we don't want to
|
||||
/// muddy this logic up with setuptools fallback modes that `RequiresDist` wants.
|
||||
///
|
||||
/// (We used to shove this feature into that path, and then we would see there's no metadata
|
||||
/// and try to run setuptools to try to desperately find any metadata, and then error out.)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SourcedDependencyGroups {
|
||||
pub name: Option<PackageName>,
|
||||
pub dependency_groups: BTreeMap<GroupName, Box<[Requirement]>>,
|
||||
}
|
||||
|
||||
impl SourcedDependencyGroups {
|
||||
/// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
|
||||
/// dependencies.
|
||||
pub async fn from_virtual_project(
|
||||
pyproject_path: &Path,
|
||||
git_member: Option<&GitWorkspaceMember<'_>>,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
cache: &WorkspaceCache,
|
||||
) -> Result<Self, MetadataError> {
|
||||
let discovery = DiscoveryOptions {
|
||||
stop_discovery_at: git_member.map(|git_member| {
|
||||
git_member
|
||||
.fetch_root
|
||||
.parent()
|
||||
.expect("git checkout has a parent")
|
||||
.to_path_buf()
|
||||
}),
|
||||
members: match source_strategy {
|
||||
SourceStrategy::Enabled => MemberDiscovery::default(),
|
||||
SourceStrategy::Disabled => MemberDiscovery::None,
|
||||
},
|
||||
};
|
||||
|
||||
// The subsequent API takes an absolute path to the dir the pyproject is in
|
||||
let empty = PathBuf::new();
|
||||
let absolute_pyproject_path =
|
||||
std::path::absolute(pyproject_path).map_err(WorkspaceError::Normalize)?;
|
||||
let project_dir = absolute_pyproject_path.parent().unwrap_or(&empty);
|
||||
let project = VirtualProject::discover_defaulted(project_dir, &discovery, cache).await?;
|
||||
|
||||
// Collect the dependency groups.
|
||||
let dependency_groups =
|
||||
FlatDependencyGroups::from_pyproject_toml(project.root(), project.pyproject_toml())?;
|
||||
|
||||
// If sources/indexes are disabled we can just stop here
|
||||
let SourceStrategy::Enabled = source_strategy else {
|
||||
return Ok(Self {
|
||||
name: project.project_name().cloned(),
|
||||
dependency_groups: dependency_groups
|
||||
.into_iter()
|
||||
.map(|(name, group)| {
|
||||
let requirements = group
|
||||
.requirements
|
||||
.into_iter()
|
||||
.map(Requirement::from)
|
||||
.collect();
|
||||
(name, requirements)
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
};
|
||||
|
||||
// Collect any `tool.uv.index` entries.
|
||||
let empty = vec![];
|
||||
let project_indexes = project
|
||||
.pyproject_toml()
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.index.as_deref())
|
||||
.unwrap_or(&empty);
|
||||
|
||||
// Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`.
|
||||
let empty = BTreeMap::default();
|
||||
let project_sources = project
|
||||
.pyproject_toml()
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.sources.as_ref())
|
||||
.map(ToolUvSources::inner)
|
||||
.unwrap_or(&empty);
|
||||
|
||||
// Now that we've resolved the dependency groups, we can validate that each source references
|
||||
// a valid extra or group, if present.
|
||||
Self::validate_sources(project_sources, &dependency_groups)?;
|
||||
|
||||
// Lower the dependency groups.
|
||||
let dependency_groups = dependency_groups
|
||||
.into_iter()
|
||||
.map(|(name, group)| {
|
||||
let requirements = group
|
||||
.requirements
|
||||
.into_iter()
|
||||
.flat_map(|requirement| {
|
||||
let requirement_name = requirement.name.clone();
|
||||
let group = name.clone();
|
||||
let extra = None;
|
||||
LoweredRequirement::from_requirement(
|
||||
requirement,
|
||||
project.project_name(),
|
||||
project.root(),
|
||||
project_sources,
|
||||
project_indexes,
|
||||
extra,
|
||||
Some(&group),
|
||||
locations,
|
||||
project.workspace(),
|
||||
git_member,
|
||||
)
|
||||
.map(move |requirement| match requirement {
|
||||
Ok(requirement) => Ok(requirement.into_inner()),
|
||||
Err(err) => Err(MetadataError::GroupLoweringError(
|
||||
group.clone(),
|
||||
requirement_name.clone(),
|
||||
Box::new(err),
|
||||
)),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Box<_>, _>>()?;
|
||||
Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
|
||||
})
|
||||
.collect::<Result<BTreeMap<_, _>, _>>()?;
|
||||
|
||||
Ok(Self {
|
||||
name: project.project_name().cloned(),
|
||||
dependency_groups,
|
||||
})
|
||||
}
|
||||
|
||||
/// Validate the sources.
|
||||
///
|
||||
/// If a source is requested with `group`, ensure that the relevant dependency is
|
||||
/// present in the relevant `dependency-groups` section.
|
||||
fn validate_sources(
|
||||
sources: &BTreeMap<PackageName, Sources>,
|
||||
dependency_groups: &FlatDependencyGroups,
|
||||
) -> Result<(), MetadataError> {
|
||||
for (name, sources) in sources {
|
||||
for source in sources.iter() {
|
||||
if let Some(group) = source.group() {
|
||||
// If the group doesn't exist at all, error.
|
||||
let Some(flat_group) = dependency_groups.get(group) else {
|
||||
return Err(MetadataError::MissingSourceGroup(
|
||||
name.clone(),
|
||||
group.clone(),
|
||||
));
|
||||
};
|
||||
|
||||
// If there is no such requirement with the group, error.
|
||||
if !flat_group
|
||||
.requirements
|
||||
.iter()
|
||||
.any(|requirement| requirement.name == *name)
|
||||
{
|
||||
return Err(MetadataError::IncompleteSourceGroup(
|
||||
name.clone(),
|
||||
group.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -13,7 +13,7 @@ use uv_git_types::{GitReference, GitUrl, GitUrlParseError};
|
|||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository};
|
||||
use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl};
|
||||
use uv_pypi_types::{ConflictItem, ParsedGitUrl, ParsedUrlError, VerbatimParsedUrl};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_workspace::Workspace;
|
||||
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
|
||||
|
@ -700,17 +700,23 @@ fn path_source(
|
|||
};
|
||||
if is_dir {
|
||||
if let Some(git_member) = git_member {
|
||||
let git = git_member.git_source.git.clone();
|
||||
let subdirectory = uv_fs::relative_to(install_path, git_member.fetch_root)
|
||||
.expect("Workspace member must be relative");
|
||||
let subdirectory = uv_fs::normalize_path_buf(subdirectory);
|
||||
let subdirectory = if subdirectory == PathBuf::new() {
|
||||
None
|
||||
} else {
|
||||
Some(subdirectory.into_boxed_path())
|
||||
};
|
||||
let url = DisplaySafeUrl::from(ParsedGitUrl {
|
||||
url: git.clone(),
|
||||
subdirectory: subdirectory.clone(),
|
||||
});
|
||||
return Ok(RequirementSource::Git {
|
||||
git: git_member.git_source.git.clone(),
|
||||
subdirectory: if subdirectory == PathBuf::new() {
|
||||
None
|
||||
} else {
|
||||
Some(subdirectory.into_boxed_path())
|
||||
},
|
||||
url,
|
||||
git,
|
||||
subdirectory,
|
||||
url: VerbatimUrl::from_url(url),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -12,11 +12,13 @@ use uv_workspace::dependency_groups::DependencyGroupError;
|
|||
use uv_workspace::{WorkspaceCache, WorkspaceError};
|
||||
|
||||
pub use crate::metadata::build_requires::BuildRequires;
|
||||
pub use crate::metadata::dependency_groups::SourcedDependencyGroups;
|
||||
pub use crate::metadata::lowering::LoweredRequirement;
|
||||
pub use crate::metadata::lowering::LoweringError;
|
||||
pub use crate::metadata::requires_dist::{FlatRequiresDist, RequiresDist};
|
||||
|
||||
mod build_requires;
|
||||
mod dependency_groups;
|
||||
mod lowering;
|
||||
mod requires_dist;
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use uv_configuration::SourceStrategy;
|
||||
use uv_distribution_types::{IndexLocations, Requirement};
|
||||
use uv_normalize::{DEV_DEPENDENCIES, ExtraName, GroupName, PackageName};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_workspace::dependency_groups::FlatDependencyGroups;
|
||||
use uv_workspace::pyproject::{Sources, ToolUvSources};
|
||||
|
@ -107,41 +107,10 @@ impl RequiresDist {
|
|||
SourceStrategy::Disabled => &empty,
|
||||
};
|
||||
|
||||
// Collect the dependency groups.
|
||||
let dependency_groups = {
|
||||
// First, collect `tool.uv.dev_dependencies`
|
||||
let dev_dependencies = project_workspace
|
||||
.current_project()
|
||||
.pyproject_toml()
|
||||
.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.uv.as_ref())
|
||||
.and_then(|uv| uv.dev_dependencies.as_ref());
|
||||
|
||||
// Then, collect `dependency-groups`
|
||||
let dependency_groups = project_workspace
|
||||
.current_project()
|
||||
.pyproject_toml()
|
||||
.dependency_groups
|
||||
.iter()
|
||||
.flatten()
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
// Flatten the dependency groups.
|
||||
let mut dependency_groups =
|
||||
FlatDependencyGroups::from_dependency_groups(&dependency_groups)
|
||||
.map_err(|err| err.with_dev_dependencies(dev_dependencies))?;
|
||||
|
||||
// Add the `dev` group, if `dev-dependencies` is defined.
|
||||
if let Some(dev_dependencies) = dev_dependencies {
|
||||
dependency_groups
|
||||
.entry(DEV_DEPENDENCIES.clone())
|
||||
.or_insert_with(Vec::new)
|
||||
.extend(dev_dependencies.clone());
|
||||
}
|
||||
|
||||
dependency_groups
|
||||
};
|
||||
let dependency_groups = FlatDependencyGroups::from_pyproject_toml(
|
||||
project_workspace.current_project().root(),
|
||||
project_workspace.current_project().pyproject_toml(),
|
||||
)?;
|
||||
|
||||
// Now that we've resolved the dependency groups, we can validate that each source references
|
||||
// a valid extra or group, if present.
|
||||
|
@ -150,9 +119,10 @@ impl RequiresDist {
|
|||
// Lower the dependency groups.
|
||||
let dependency_groups = dependency_groups
|
||||
.into_iter()
|
||||
.map(|(name, requirements)| {
|
||||
.map(|(name, flat_group)| {
|
||||
let requirements = match source_strategy {
|
||||
SourceStrategy::Enabled => requirements
|
||||
SourceStrategy::Enabled => flat_group
|
||||
.requirements
|
||||
.into_iter()
|
||||
.flat_map(|requirement| {
|
||||
let requirement_name = requirement.name.clone();
|
||||
|
@ -182,9 +152,11 @@ impl RequiresDist {
|
|||
)
|
||||
})
|
||||
.collect::<Result<Box<_>, _>>(),
|
||||
SourceStrategy::Disabled => {
|
||||
Ok(requirements.into_iter().map(Requirement::from).collect())
|
||||
}
|
||||
SourceStrategy::Disabled => Ok(flat_group
|
||||
.requirements
|
||||
.into_iter()
|
||||
.map(Requirement::from)
|
||||
.collect()),
|
||||
}?;
|
||||
Ok::<(GroupName, Box<_>), MetadataError>((name, requirements))
|
||||
})
|
||||
|
@ -265,7 +237,7 @@ impl RequiresDist {
|
|||
|
||||
if let Some(group) = source.group() {
|
||||
// If the group doesn't exist at all, error.
|
||||
let Some(dependencies) = dependency_groups.get(group) else {
|
||||
let Some(flat_group) = dependency_groups.get(group) else {
|
||||
return Err(MetadataError::MissingSourceGroup(
|
||||
name.clone(),
|
||||
group.clone(),
|
||||
|
@ -273,7 +245,8 @@ impl RequiresDist {
|
|||
};
|
||||
|
||||
// If there is no such requirement with the group, error.
|
||||
if !dependencies
|
||||
if !flat_group
|
||||
.requirements
|
||||
.iter()
|
||||
.any(|requirement| requirement.name == *name)
|
||||
{
|
||||
|
|
|
@ -43,7 +43,7 @@ use uv_normalize::PackageName;
|
|||
use uv_pep440::{Version, release_specifiers_to_ranges};
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata};
|
||||
use uv_types::{BuildContext, BuildStack, SourceBuildTrait};
|
||||
use uv_types::{BuildContext, BuildKey, BuildStack, SourceBuildTrait};
|
||||
use uv_workspace::pyproject::ToolUvSources;
|
||||
|
||||
use crate::distribution_database::ManagedClient;
|
||||
|
@ -728,8 +728,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
})
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})?;
|
||||
|
||||
// If the archive is missing the required hashes, force a refresh.
|
||||
|
@ -747,8 +747,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})
|
||||
})
|
||||
.await
|
||||
|
@ -1583,7 +1583,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
client
|
||||
.unmanaged
|
||||
.uncached_client(resource.git.repository())
|
||||
.clone(),
|
||||
.raw_client(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
|
@ -1860,13 +1860,22 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
};
|
||||
|
||||
// If the URL is already precise, return it.
|
||||
if self.build_context.git().get_precise(git).is_some() {
|
||||
debug!("Precise commit already known: {source}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If this is GitHub URL, attempt to resolve to a precise commit using the GitHub API.
|
||||
if self
|
||||
.build_context
|
||||
.git()
|
||||
.github_fast_path(
|
||||
git,
|
||||
client.unmanaged.uncached_client(git.repository()).clone(),
|
||||
client
|
||||
.unmanaged
|
||||
.uncached_client(git.repository())
|
||||
.raw_client(),
|
||||
)
|
||||
.await?
|
||||
.is_some()
|
||||
|
@ -2084,8 +2093,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback(err) => err,
|
||||
CachedClientError::Client(err) => Error::Client(err),
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => Error::Client(err),
|
||||
})
|
||||
})
|
||||
.await
|
||||
|
@ -2267,6 +2276,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
fs::create_dir_all(&cache_shard)
|
||||
.await
|
||||
.map_err(Error::CacheWrite)?;
|
||||
|
||||
// Try a direct build if that isn't disabled and the uv build backend is used.
|
||||
let disk_filename = if let Some(name) = self
|
||||
.build_context
|
||||
|
@ -2287,27 +2297,73 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
// In the uv build backend, the normalized filename and the disk filename are the same.
|
||||
name.to_string()
|
||||
} else {
|
||||
self.build_context
|
||||
.setup_build(
|
||||
source_root,
|
||||
subdirectory,
|
||||
source_root,
|
||||
Some(&source.to_string()),
|
||||
source.as_dist(),
|
||||
source_strategy,
|
||||
if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
},
|
||||
BuildOutput::Debug,
|
||||
self.build_stack.cloned().unwrap_or_default(),
|
||||
)
|
||||
.await
|
||||
.map_err(|err| Error::Build(err.into()))?
|
||||
.wheel(temp_dir.path())
|
||||
.await
|
||||
.map_err(Error::Build)?
|
||||
// Identify the base Python interpreter to use in the cache key.
|
||||
let base_python = if cfg!(unix) {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.find_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
} else {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.to_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
};
|
||||
|
||||
let build_kind = if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
};
|
||||
|
||||
let build_key = BuildKey {
|
||||
base_python: base_python.into_boxed_path(),
|
||||
source_root: source_root.to_path_buf().into_boxed_path(),
|
||||
subdirectory: subdirectory
|
||||
.map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()),
|
||||
source_strategy,
|
||||
build_kind,
|
||||
};
|
||||
|
||||
if let Some(builder) = self.build_context.build_arena().remove(&build_key) {
|
||||
debug!("Creating build environment for: {source}");
|
||||
let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(build_key, builder);
|
||||
|
||||
wheel
|
||||
} else {
|
||||
debug!("Reusing existing build environment for: {source}");
|
||||
|
||||
let builder = self
|
||||
.build_context
|
||||
.setup_build(
|
||||
source_root,
|
||||
subdirectory,
|
||||
source_root,
|
||||
Some(&source.to_string()),
|
||||
source.as_dist(),
|
||||
source_strategy,
|
||||
if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
},
|
||||
BuildOutput::Debug,
|
||||
self.build_stack.cloned().unwrap_or_default(),
|
||||
)
|
||||
.await
|
||||
.map_err(|err| Error::Build(err.into()))?;
|
||||
|
||||
// Build the wheel.
|
||||
let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(build_key, builder);
|
||||
|
||||
wheel
|
||||
}
|
||||
};
|
||||
|
||||
// Read the metadata from the wheel.
|
||||
|
@ -2362,6 +2418,26 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
// Identify the base Python interpreter to use in the cache key.
|
||||
let base_python = if cfg!(unix) {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.find_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
} else {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.to_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
};
|
||||
|
||||
// Determine whether this is an editable or non-editable build.
|
||||
let build_kind = if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
};
|
||||
|
||||
// Set up the builder.
|
||||
let mut builder = self
|
||||
.build_context
|
||||
|
@ -2372,11 +2448,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
Some(&source.to_string()),
|
||||
source.as_dist(),
|
||||
source_strategy,
|
||||
if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
},
|
||||
build_kind,
|
||||
BuildOutput::Debug,
|
||||
self.build_stack.cloned().unwrap_or_default(),
|
||||
)
|
||||
|
@ -2385,6 +2457,21 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Build the metadata.
|
||||
let dist_info = builder.metadata().await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(
|
||||
BuildKey {
|
||||
base_python: base_python.into_boxed_path(),
|
||||
source_root: source_root.to_path_buf().into_boxed_path(),
|
||||
subdirectory: subdirectory
|
||||
.map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()),
|
||||
source_strategy,
|
||||
build_kind,
|
||||
},
|
||||
builder,
|
||||
);
|
||||
|
||||
// Return the `.dist-info` directory, if it exists.
|
||||
let Some(dist_info) = dist_info else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
|
|
@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf};
|
|||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
AsyncZip(#[from] async_zip::error::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("I/O operation failed during extraction")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error(
|
||||
"The top-level of the archive must only contain a list directory, but it contains: {0:?}"
|
||||
|
|
|
@ -16,7 +16,6 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
||||
dunce = { workspace = true }
|
||||
either = { workspace = true }
|
||||
encoding_rs_io = { workspace = true }
|
||||
|
|
|
@ -575,8 +575,33 @@ pub fn is_temporary(path: impl AsRef<Path>) -> bool {
|
|||
.is_some_and(|name| name.starts_with(".tmp"))
|
||||
}
|
||||
|
||||
/// Checks if the grandparent directory of the given executable is the base
|
||||
/// of a virtual environment.
|
||||
///
|
||||
/// The procedure described in PEP 405 includes checking both the parent and
|
||||
/// grandparent directory of an executable, but in practice we've found this to
|
||||
/// be unnecessary.
|
||||
pub fn is_virtualenv_executable(executable: impl AsRef<Path>) -> bool {
|
||||
executable
|
||||
.as_ref()
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.is_some_and(is_virtualenv_base)
|
||||
}
|
||||
|
||||
/// Returns `true` if a path is the base path of a virtual environment,
|
||||
/// indicated by the presence of a `pyvenv.cfg` file.
|
||||
///
|
||||
/// The procedure described in PEP 405 includes scanning `pyvenv.cfg`
|
||||
/// for a `home` key, but in practice we've found this to be
|
||||
/// unnecessary.
|
||||
pub fn is_virtualenv_base(path: impl AsRef<Path>) -> bool {
|
||||
path.as_ref().join("pyvenv.cfg").is_file()
|
||||
}
|
||||
|
||||
/// A file lock that is automatically released when dropped.
|
||||
#[derive(Debug)]
|
||||
#[must_use]
|
||||
pub struct LockedFile(fs_err::File);
|
||||
|
||||
impl LockedFile {
|
||||
|
|
|
@ -277,21 +277,6 @@ fn normalized(path: &Path) -> PathBuf {
|
|||
normalized
|
||||
}
|
||||
|
||||
/// Like `fs_err::canonicalize`, but avoids attempting to resolve symlinks on Windows.
|
||||
pub fn canonicalize_executable(path: impl AsRef<Path>) -> std::io::Result<PathBuf> {
|
||||
let path = path.as_ref();
|
||||
debug_assert!(
|
||||
path.is_absolute(),
|
||||
"path must be absolute: {}",
|
||||
path.display()
|
||||
);
|
||||
if cfg!(windows) {
|
||||
Ok(path.to_path_buf())
|
||||
} else {
|
||||
fs_err::canonicalize(path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a path describing `path` relative to `base`.
|
||||
///
|
||||
/// `lib/python/site-packages/foo/__init__.py` and `lib/python/site-packages` -> `foo/__init__.py`
|
||||
|
@ -345,11 +330,11 @@ pub struct PortablePathBuf(Box<Path>);
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for PortablePathBuf {
|
||||
fn schema_name() -> String {
|
||||
PathBuf::schema_name()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("PortablePathBuf")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
PathBuf::json_schema(_gen)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ fn get_binary_type(path: &Path) -> windows::core::Result<u32> {
|
|||
.chain(Some(0))
|
||||
.collect::<Vec<u16>>();
|
||||
// SAFETY: winapi call
|
||||
unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &mut binary_type)? };
|
||||
unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &raw mut binary_type)? };
|
||||
Ok(binary_type)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,31 +5,36 @@ use thiserror::Error;
|
|||
|
||||
/// Unique identity of any Git object (commit, tree, blob, tag).
|
||||
///
|
||||
/// Note this type does not validate whether the input is a valid hash.
|
||||
/// This type's `FromStr` implementation validates that it's exactly 40 hex characters, i.e. a
|
||||
/// full-length git commit.
|
||||
///
|
||||
/// If Git's SHA-256 support becomes more widespread in the future (in particular if GitHub ever
|
||||
/// adds support), we might need to make this an enum.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct GitOid {
|
||||
len: usize,
|
||||
bytes: [u8; 40],
|
||||
}
|
||||
|
||||
impl GitOid {
|
||||
/// Return the string representation of an object ID.
|
||||
pub fn as_str(&self) -> &str {
|
||||
str::from_utf8(&self.bytes[..self.len]).unwrap()
|
||||
str::from_utf8(&self.bytes).unwrap()
|
||||
}
|
||||
|
||||
/// Return a truncated representation, i.e., the first 16 characters of the SHA.
|
||||
pub fn as_short_str(&self) -> &str {
|
||||
self.as_str().get(..16).unwrap_or(self.as_str())
|
||||
&self.as_str()[..16]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, PartialEq)]
|
||||
pub enum OidParseError {
|
||||
#[error("Object ID can be at most 40 hex characters")]
|
||||
TooLong,
|
||||
#[error("Object ID cannot be parsed from empty string")]
|
||||
Empty,
|
||||
#[error("Object ID must be exactly 40 hex characters")]
|
||||
WrongLength,
|
||||
#[error("Object ID must be valid hex characters")]
|
||||
NotHex,
|
||||
}
|
||||
|
||||
impl FromStr for GitOid {
|
||||
|
@ -40,17 +45,17 @@ impl FromStr for GitOid {
|
|||
return Err(OidParseError::Empty);
|
||||
}
|
||||
|
||||
if s.len() > 40 {
|
||||
return Err(OidParseError::TooLong);
|
||||
if s.len() != 40 {
|
||||
return Err(OidParseError::WrongLength);
|
||||
}
|
||||
|
||||
let mut out = [0; 40];
|
||||
out[..s.len()].copy_from_slice(s.as_bytes());
|
||||
if !s.chars().all(|ch| ch.is_ascii_hexdigit()) {
|
||||
return Err(OidParseError::NotHex);
|
||||
}
|
||||
|
||||
Ok(GitOid {
|
||||
len: s.len(),
|
||||
bytes: out,
|
||||
})
|
||||
let mut bytes = [0; 40];
|
||||
bytes.copy_from_slice(s.as_bytes());
|
||||
Ok(GitOid { bytes })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,11 +106,20 @@ mod tests {
|
|||
#[test]
|
||||
fn git_oid() {
|
||||
GitOid::from_str("4a23745badf5bf5ef7928f1e346e9986bd696d82").unwrap();
|
||||
GitOid::from_str("4A23745BADF5BF5EF7928F1E346E9986BD696D82").unwrap();
|
||||
|
||||
assert_eq!(GitOid::from_str(""), Err(OidParseError::Empty));
|
||||
assert_eq!(
|
||||
GitOid::from_str(&str::repeat("a", 41)),
|
||||
Err(OidParseError::TooLong)
|
||||
Err(OidParseError::WrongLength)
|
||||
);
|
||||
assert_eq!(
|
||||
GitOid::from_str(&str::repeat("a", 39)),
|
||||
Err(OidParseError::WrongLength)
|
||||
);
|
||||
assert_eq!(
|
||||
GitOid::from_str(&str::repeat("x", 40)),
|
||||
Err(OidParseError::NotHex)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
|
||||
use crate::rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited};
|
||||
|
||||
/// A file indicates that if present, `git reset` has been done and a repo
|
||||
/// checkout is ready to go. See [`GitCheckout::reset`] for why we need this.
|
||||
const CHECKOUT_READY_LOCK: &str = ".ok";
|
||||
|
@ -787,7 +789,15 @@ fn github_fast_path(
|
|||
}
|
||||
};
|
||||
|
||||
let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{github_branch_name}");
|
||||
// Check if we're rate-limited by GitHub before determining the FastPathRev
|
||||
if GITHUB_RATE_LIMIT_STATUS.is_active() {
|
||||
debug!("Skipping GitHub fast path attempt for: {url} (rate-limited)");
|
||||
return Ok(FastPathRev::Indeterminate);
|
||||
}
|
||||
|
||||
let base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL)
|
||||
.unwrap_or("https://api.github.com/repos".to_owned());
|
||||
let url = format!("{base_url}/{owner}/{repo}/commits/{github_branch_name}");
|
||||
|
||||
let runtime = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
|
@ -807,6 +817,11 @@ fn github_fast_path(
|
|||
|
||||
let response = request.send().await?;
|
||||
|
||||
if is_github_rate_limited(&response) {
|
||||
// Mark that we are being rate-limited by GitHub
|
||||
GITHUB_RATE_LIMIT_STATUS.activate();
|
||||
}
|
||||
|
||||
// GitHub returns a 404 if the repository does not exist, and a 422 if it exists but GitHub
|
||||
// is unable to resolve the requested revision.
|
||||
response.error_for_status_ref()?;
|
||||
|
|
|
@ -7,5 +7,6 @@ pub use crate::source::{Fetch, GitSource, Reporter};
|
|||
|
||||
mod credentials;
|
||||
mod git;
|
||||
mod rate_limit;
|
||||
mod resolver;
|
||||
mod source;
|
||||
|
|
37
crates/uv-git/src/rate_limit.rs
Normal file
37
crates/uv-git/src/rate_limit.rs
Normal file
|
@ -0,0 +1,37 @@
|
|||
use reqwest::{Response, StatusCode};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
/// A global state on whether we are being rate-limited by GitHub's REST API.
|
||||
/// If we are, avoid "fast-path" attempts.
|
||||
pub(crate) static GITHUB_RATE_LIMIT_STATUS: GitHubRateLimitStatus = GitHubRateLimitStatus::new();
|
||||
|
||||
/// GitHub REST API rate limit status tracker.
|
||||
///
|
||||
/// ## Assumptions
|
||||
///
|
||||
/// The rate limit timeout duration is much longer than the runtime of a `uv` command.
|
||||
/// And so we do not need to invalidate this state based on `x-ratelimit-reset`.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct GitHubRateLimitStatus(AtomicBool);
|
||||
|
||||
impl GitHubRateLimitStatus {
|
||||
const fn new() -> Self {
|
||||
Self(AtomicBool::new(false))
|
||||
}
|
||||
|
||||
pub(crate) fn activate(&self) {
|
||||
self.0.store(true, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub(crate) fn is_active(&self) -> bool {
|
||||
self.0.load(Ordering::Relaxed)
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if GitHub is applying rate-limiting based on the response
|
||||
pub(crate) fn is_github_rate_limited(response: &Response) -> bool {
|
||||
// HTTP 403 and 429 are possible status codes in the event of a primary or secondary rate limit.
|
||||
// Source: https://docs.github.com/en/rest/using-the-rest-api/troubleshooting-the-rest-api?apiVersion=2022-11-28#rate-limit-errors
|
||||
let status_code = response.status();
|
||||
status_code == StatusCode::FORBIDDEN || status_code == StatusCode::TOO_MANY_REQUESTS
|
||||
}
|
|
@ -12,9 +12,13 @@ use tracing::debug;
|
|||
use uv_cache_key::{RepositoryUrl, cache_digest};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl};
|
||||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
|
||||
use crate::{Fetch, GitSource, Reporter};
|
||||
use crate::{
|
||||
Fetch, GitSource, Reporter,
|
||||
rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited},
|
||||
};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum GitResolverError {
|
||||
|
@ -45,6 +49,21 @@ impl GitResolver {
|
|||
self.0.get(reference)
|
||||
}
|
||||
|
||||
pub fn get_precise(&self, url: &GitUrl) -> Option<GitOid> {
|
||||
// If the URL is already precise, return it.
|
||||
if let Some(precise) = url.precise() {
|
||||
return Some(precise);
|
||||
}
|
||||
|
||||
// If we know the precise commit already, return it.
|
||||
let reference = RepositoryReference::from(url);
|
||||
if let Some(precise) = self.get(&reference) {
|
||||
return Some(*precise);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Resolve a Git URL to a specific commit without performing any Git operations.
|
||||
///
|
||||
/// Returns a [`GitOid`] if the URL has already been resolved (i.e., is available in the cache),
|
||||
|
@ -52,18 +71,15 @@ impl GitResolver {
|
|||
pub async fn github_fast_path(
|
||||
&self,
|
||||
url: &GitUrl,
|
||||
client: ClientWithMiddleware,
|
||||
client: &ClientWithMiddleware,
|
||||
) -> Result<Option<GitOid>, GitResolverError> {
|
||||
let reference = RepositoryReference::from(url);
|
||||
|
||||
// If the URL is already precise, return it.
|
||||
if let Some(precise) = url.precise() {
|
||||
return Ok(Some(precise));
|
||||
if std::env::var_os(EnvVars::UV_NO_GITHUB_FAST_PATH).is_some() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// If we know the precise commit already, return it.
|
||||
if let Some(precise) = self.get(&reference) {
|
||||
return Ok(Some(*precise));
|
||||
// If the URL is already precise or we know the precise commit, return it.
|
||||
if let Some(precise) = self.get_precise(url) {
|
||||
return Ok(Some(precise));
|
||||
}
|
||||
|
||||
// If the URL is a GitHub URL, attempt to resolve it via the GitHub API.
|
||||
|
@ -72,13 +88,21 @@ impl GitResolver {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
// Check if we're rate-limited by GitHub, before determining the Git reference
|
||||
if GITHUB_RATE_LIMIT_STATUS.is_active() {
|
||||
debug!("Rate-limited by GitHub. Skipping GitHub fast path attempt for: {url}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Determine the Git reference.
|
||||
let rev = url.reference().as_rev();
|
||||
|
||||
let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{rev}");
|
||||
let github_api_base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL)
|
||||
.unwrap_or("https://api.github.com/repos".to_owned());
|
||||
let github_api_url = format!("{github_api_base_url}/{owner}/{repo}/commits/{rev}");
|
||||
|
||||
debug!("Querying GitHub for commit at: {url}");
|
||||
let mut request = client.get(&url);
|
||||
debug!("Querying GitHub for commit at: {github_api_url}");
|
||||
let mut request = client.get(&github_api_url);
|
||||
request = request.header("Accept", "application/vnd.github.3.sha");
|
||||
request = request.header(
|
||||
"User-Agent",
|
||||
|
@ -86,13 +110,20 @@ impl GitResolver {
|
|||
);
|
||||
|
||||
let response = request.send().await?;
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
if !status.is_success() {
|
||||
// Returns a 404 if the repository does not exist, and a 422 if GitHub is unable to
|
||||
// resolve the requested rev.
|
||||
debug!(
|
||||
"GitHub API request failed for: {url} ({})",
|
||||
"GitHub API request failed for: {github_api_url} ({})",
|
||||
response.status()
|
||||
);
|
||||
|
||||
if is_github_rate_limited(&response) {
|
||||
// Mark that we are being rate-limited by GitHub
|
||||
GITHUB_RATE_LIMIT_STATUS.activate();
|
||||
}
|
||||
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
|
@ -103,7 +134,7 @@ impl GitResolver {
|
|||
|
||||
// Insert the resolved URL into the in-memory cache. This ensures that subsequent fetches
|
||||
// resolve to the same precise commit.
|
||||
self.insert(reference, precise);
|
||||
self.insert(RepositoryReference::from(url), precise);
|
||||
|
||||
Ok(Some(precise))
|
||||
}
|
||||
|
@ -112,7 +143,7 @@ impl GitResolver {
|
|||
pub async fn fetch(
|
||||
&self,
|
||||
url: &GitUrl,
|
||||
client: ClientWithMiddleware,
|
||||
client: impl Into<ClientWithMiddleware>,
|
||||
disable_ssl: bool,
|
||||
offline: bool,
|
||||
cache: PathBuf,
|
||||
|
|
|
@ -11,11 +11,11 @@ use reqwest_middleware::ClientWithMiddleware;
|
|||
use tracing::{debug, instrument};
|
||||
|
||||
use uv_cache_key::{RepositoryUrl, cache_digest};
|
||||
use uv_git_types::GitUrl;
|
||||
use uv_git_types::{GitOid, GitReference, GitUrl};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::GIT_STORE;
|
||||
use crate::git::GitRemote;
|
||||
use crate::git::{GitDatabase, GitRemote};
|
||||
|
||||
/// A remote Git source that can be checked out locally.
|
||||
pub struct GitSource {
|
||||
|
@ -86,40 +86,59 @@ impl GitSource {
|
|||
Cow::Borrowed(self.git.repository())
|
||||
};
|
||||
|
||||
let remote = GitRemote::new(&remote);
|
||||
let (db, actual_rev, task) = match (self.git.precise(), remote.db_at(&db_path).ok()) {
|
||||
// If we have a locked revision, and we have a preexisting database
|
||||
// which has that revision, then no update needs to happen.
|
||||
(Some(rev), Some(db)) if db.contains(rev) => {
|
||||
debug!("Using existing Git source `{}`", self.git.repository());
|
||||
(db, rev, None)
|
||||
// Fetch the commit, if we don't already have it. Wrapping this section in a closure makes
|
||||
// it easier to short-circuit this in the cases where we do have the commit.
|
||||
let (db, actual_rev, maybe_task) = || -> Result<(GitDatabase, GitOid, Option<usize>)> {
|
||||
let git_remote = GitRemote::new(&remote);
|
||||
let maybe_db = git_remote.db_at(&db_path).ok();
|
||||
|
||||
// If we have a locked revision, and we have a pre-existing database which has that
|
||||
// revision, then no update needs to happen.
|
||||
if let (Some(rev), Some(db)) = (self.git.precise(), &maybe_db) {
|
||||
if db.contains(rev) {
|
||||
debug!("Using existing Git source `{}`", self.git.repository());
|
||||
return Ok((maybe_db.unwrap(), rev, None));
|
||||
}
|
||||
}
|
||||
|
||||
// ... otherwise we use this state to update the git database. Note
|
||||
// that we still check for being offline here, for example in the
|
||||
// situation that we have a locked revision but the database
|
||||
// doesn't have it.
|
||||
(locked_rev, db) => {
|
||||
debug!("Updating Git source `{}`", self.git.repository());
|
||||
|
||||
// Report the checkout operation to the reporter.
|
||||
let task = self.reporter.as_ref().map(|reporter| {
|
||||
reporter.on_checkout_start(remote.url(), self.git.reference().as_rev())
|
||||
});
|
||||
|
||||
let (db, actual_rev) = remote.checkout(
|
||||
&db_path,
|
||||
db,
|
||||
self.git.reference(),
|
||||
locked_rev,
|
||||
&self.client,
|
||||
self.disable_ssl,
|
||||
self.offline,
|
||||
)?;
|
||||
|
||||
(db, actual_rev, task)
|
||||
// If the revision isn't locked, but it looks like it might be an exact commit hash,
|
||||
// and we do have a pre-existing database, then check whether it is, in fact, a commit
|
||||
// hash. If so, treat it like it's locked.
|
||||
if let Some(db) = &maybe_db {
|
||||
if let GitReference::BranchOrTagOrCommit(maybe_commit) = self.git.reference() {
|
||||
if let Ok(oid) = maybe_commit.parse::<GitOid>() {
|
||||
if db.contains(oid) {
|
||||
// This reference is an exact commit. Treat it like it's
|
||||
// locked.
|
||||
debug!("Using existing Git source `{}`", self.git.repository());
|
||||
return Ok((maybe_db.unwrap(), oid, None));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// ... otherwise, we use this state to update the Git database. Note that we still check
|
||||
// for being offline here, for example in the situation that we have a locked revision
|
||||
// but the database doesn't have it.
|
||||
debug!("Updating Git source `{}`", self.git.repository());
|
||||
|
||||
// Report the checkout operation to the reporter.
|
||||
let task = self.reporter.as_ref().map(|reporter| {
|
||||
reporter.on_checkout_start(git_remote.url(), self.git.reference().as_rev())
|
||||
});
|
||||
|
||||
let (db, actual_rev) = git_remote.checkout(
|
||||
&db_path,
|
||||
maybe_db,
|
||||
self.git.reference(),
|
||||
self.git.precise(),
|
||||
&self.client,
|
||||
self.disable_ssl,
|
||||
self.offline,
|
||||
)?;
|
||||
|
||||
Ok((db, actual_rev, task))
|
||||
}()?;
|
||||
|
||||
// Don’t use the full hash, in order to contribute less to reaching the
|
||||
// path length limit on Windows.
|
||||
|
@ -137,9 +156,9 @@ impl GitSource {
|
|||
db.copy_to(actual_rev, &checkout_path)?;
|
||||
|
||||
// Report the checkout operation to the reporter.
|
||||
if let Some(task) = task {
|
||||
if let Some(task) = maybe_task {
|
||||
if let Some(reporter) = self.reporter.as_ref() {
|
||||
reporter.on_checkout_complete(remote.url(), actual_rev.as_str(), task);
|
||||
reporter.on_checkout_complete(remote.as_ref(), actual_rev.as_str(), task);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ pub use {
|
|||
VersionPatternParseError,
|
||||
},
|
||||
version_specifier::{
|
||||
VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||
TildeVersionSpecifier, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers,
|
||||
VersionSpecifiersParseError,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -610,6 +610,24 @@ impl Version {
|
|||
Self::new(self.release().iter().copied())
|
||||
}
|
||||
|
||||
/// Return the version with any segments apart from the release removed, with trailing zeroes
|
||||
/// trimmed.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn only_release_trimmed(&self) -> Self {
|
||||
if let Some(last_non_zero) = self.release().iter().rposition(|segment| *segment != 0) {
|
||||
if last_non_zero == self.release().len() {
|
||||
// Already trimmed.
|
||||
self.clone()
|
||||
} else {
|
||||
Self::new(self.release().iter().take(last_non_zero + 1).copied())
|
||||
}
|
||||
} else {
|
||||
// `0` is a valid version.
|
||||
Self::new([0])
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the version with trailing `.0` release segments removed.
|
||||
///
|
||||
/// # Panics
|
||||
|
|
|
@ -132,7 +132,7 @@ impl From<VersionSpecifier> for Ranges<Version> {
|
|||
pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges<Version> {
|
||||
let mut range = Ranges::full();
|
||||
for specifier in specifiers {
|
||||
range = range.intersection(&release_specifier_to_range(specifier));
|
||||
range = range.intersection(&release_specifier_to_range(specifier, false));
|
||||
}
|
||||
range
|
||||
}
|
||||
|
@ -148,67 +148,57 @@ pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges<Ver
|
|||
/// is allowed for projects that declare `requires-python = ">3.13"`.
|
||||
///
|
||||
/// See: <https://github.com/pypa/pip/blob/a432c7f4170b9ef798a15f035f5dfdb4cc939f35/src/pip/_internal/resolution/resolvelib/candidates.py#L540>
|
||||
pub fn release_specifier_to_range(specifier: VersionSpecifier) -> Ranges<Version> {
|
||||
pub fn release_specifier_to_range(specifier: VersionSpecifier, trim: bool) -> Ranges<Version> {
|
||||
let VersionSpecifier { operator, version } = specifier;
|
||||
// Note(konsti): We switched strategies to trimmed for the markers, but we don't want to cause
|
||||
// churn in lockfile requires-python, so we only trim for markers.
|
||||
let version_trimmed = if trim {
|
||||
version.only_release_trimmed()
|
||||
} else {
|
||||
version.only_release()
|
||||
};
|
||||
match operator {
|
||||
Operator::Equal => {
|
||||
let version = version.only_release();
|
||||
Ranges::singleton(version)
|
||||
}
|
||||
Operator::ExactEqual => {
|
||||
let version = version.only_release();
|
||||
Ranges::singleton(version)
|
||||
}
|
||||
Operator::NotEqual => {
|
||||
let version = version.only_release();
|
||||
Ranges::singleton(version).complement()
|
||||
}
|
||||
// Trailing zeroes are not semantically relevant.
|
||||
Operator::Equal => Ranges::singleton(version_trimmed),
|
||||
Operator::ExactEqual => Ranges::singleton(version_trimmed),
|
||||
Operator::NotEqual => Ranges::singleton(version_trimmed).complement(),
|
||||
Operator::LessThan => Ranges::strictly_lower_than(version_trimmed),
|
||||
Operator::LessThanEqual => Ranges::lower_than(version_trimmed),
|
||||
Operator::GreaterThan => Ranges::strictly_higher_than(version_trimmed),
|
||||
Operator::GreaterThanEqual => Ranges::higher_than(version_trimmed),
|
||||
|
||||
// Trailing zeroes are semantically relevant.
|
||||
Operator::TildeEqual => {
|
||||
let release = version.release();
|
||||
let [rest @ .., last, _] = &*release else {
|
||||
unreachable!("~= must have at least two segments");
|
||||
};
|
||||
let upper = Version::new(rest.iter().chain([&(last + 1)]));
|
||||
let version = version.only_release();
|
||||
Ranges::from_range_bounds(version..upper)
|
||||
}
|
||||
Operator::LessThan => {
|
||||
let version = version.only_release();
|
||||
Ranges::strictly_lower_than(version)
|
||||
}
|
||||
Operator::LessThanEqual => {
|
||||
let version = version.only_release();
|
||||
Ranges::lower_than(version)
|
||||
}
|
||||
Operator::GreaterThan => {
|
||||
let version = version.only_release();
|
||||
Ranges::strictly_higher_than(version)
|
||||
}
|
||||
Operator::GreaterThanEqual => {
|
||||
let version = version.only_release();
|
||||
Ranges::higher_than(version)
|
||||
Ranges::from_range_bounds(version_trimmed..upper)
|
||||
}
|
||||
Operator::EqualStar => {
|
||||
let low = version.only_release();
|
||||
// For (not-)equal-star, trailing zeroes are still before the star.
|
||||
let low_full = version.only_release();
|
||||
let high = {
|
||||
let mut high = low.clone();
|
||||
let mut high = low_full.clone();
|
||||
let mut release = high.release().to_vec();
|
||||
*release.last_mut().unwrap() += 1;
|
||||
high = high.with_release(release);
|
||||
high
|
||||
};
|
||||
Ranges::from_range_bounds(low..high)
|
||||
Ranges::from_range_bounds(version..high)
|
||||
}
|
||||
Operator::NotEqualStar => {
|
||||
let low = version.only_release();
|
||||
// For (not-)equal-star, trailing zeroes are still before the star.
|
||||
let low_full = version.only_release();
|
||||
let high = {
|
||||
let mut high = low.clone();
|
||||
let mut high = low_full.clone();
|
||||
let mut release = high.release().to_vec();
|
||||
*release.last_mut().unwrap() += 1;
|
||||
high = high.with_release(release);
|
||||
high
|
||||
};
|
||||
Ranges::from_range_bounds(low..high).complement()
|
||||
Ranges::from_range_bounds(version..high).complement()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -223,8 +213,8 @@ impl LowerBound {
|
|||
/// These bounds use release-only semantics when comparing versions.
|
||||
pub fn new(bound: Bound<Version>) -> Self {
|
||||
Self(match bound {
|
||||
Bound::Included(version) => Bound::Included(version.only_release()),
|
||||
Bound::Excluded(version) => Bound::Excluded(version.only_release()),
|
||||
Bound::Included(version) => Bound::Included(version.only_release_trimmed()),
|
||||
Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
})
|
||||
}
|
||||
|
@ -358,8 +348,8 @@ impl UpperBound {
|
|||
/// These bounds use release-only semantics when comparing versions.
|
||||
pub fn new(bound: Bound<Version>) -> Self {
|
||||
Self(match bound {
|
||||
Bound::Included(version) => Bound::Included(version.only_release()),
|
||||
Bound::Excluded(version) => Bound::Excluded(version.only_release()),
|
||||
Bound::Included(version) => Bound::Included(version.only_release_trimmed()),
|
||||
Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()),
|
||||
Bound::Unbounded => Bound::Unbounded,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -80,24 +80,38 @@ impl VersionSpecifiers {
|
|||
|
||||
// Add specifiers for the holes between the bounds.
|
||||
for (lower, upper) in bounds {
|
||||
match (next, lower) {
|
||||
let specifier = match (next, lower) {
|
||||
// Ex) [3.7, 3.8.5), (3.8.5, 3.9] -> >=3.7,!=3.8.5,<=3.9
|
||||
(Bound::Excluded(prev), Bound::Excluded(lower)) if prev == lower => {
|
||||
specifiers.push(VersionSpecifier::not_equals_version(prev.clone()));
|
||||
Some(VersionSpecifier::not_equals_version(prev.clone()))
|
||||
}
|
||||
// Ex) [3.7, 3.8), (3.8, 3.9] -> >=3.7,!=3.8.*,<=3.9
|
||||
(Bound::Excluded(prev), Bound::Included(lower))
|
||||
if prev.release().len() == 2
|
||||
&& *lower.release() == [prev.release()[0], prev.release()[1] + 1] =>
|
||||
{
|
||||
specifiers.push(VersionSpecifier::not_equals_star_version(prev.clone()));
|
||||
}
|
||||
_ => {
|
||||
#[cfg(feature = "tracing")]
|
||||
warn!(
|
||||
"Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}"
|
||||
);
|
||||
(Bound::Excluded(prev), Bound::Included(lower)) => {
|
||||
match *prev.only_release_trimmed().release() {
|
||||
[major] if *lower.only_release_trimmed().release() == [major, 1] => {
|
||||
Some(VersionSpecifier::not_equals_star_version(Version::new([
|
||||
major, 0,
|
||||
])))
|
||||
}
|
||||
[major, minor]
|
||||
if *lower.only_release_trimmed().release() == [major, minor + 1] =>
|
||||
{
|
||||
Some(VersionSpecifier::not_equals_star_version(Version::new([
|
||||
major, minor,
|
||||
])))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
if let Some(specifier) = specifier {
|
||||
specifiers.push(specifier);
|
||||
} else {
|
||||
#[cfg(feature = "tracing")]
|
||||
warn!(
|
||||
"Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}"
|
||||
);
|
||||
}
|
||||
next = upper;
|
||||
}
|
||||
|
@ -348,6 +362,33 @@ impl VersionSpecifier {
|
|||
Ok(Self { operator, version })
|
||||
}
|
||||
|
||||
/// Remove all non-release parts of the version.
|
||||
///
|
||||
/// The marker decision diagram relies on the assumption that the negation of a marker tree is
|
||||
/// the complement of the marker space. However, pre-release versions violate this assumption.
|
||||
///
|
||||
/// For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'`
|
||||
/// does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However,
|
||||
/// its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not
|
||||
/// match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams
|
||||
/// rely on. For this reason we ignore pre-release versions entirely when evaluating markers.
|
||||
///
|
||||
/// Note that `python_version` cannot take on pre-release values as it is truncated to just the
|
||||
/// major and minor version segments. Thus using release-only specifiers is definitely necessary
|
||||
/// for `python_version` to fully simplify any ranges, such as
|
||||
/// `python_version > '3.9' or python_version <= '3.9'`, which is always `true` for
|
||||
/// `python_version`. For `python_full_version` however, this decision is a semantic change.
|
||||
///
|
||||
/// For Python versions, the major.minor is considered the API version, so unlike the rules
|
||||
/// for package versions in PEP 440, we Python `3.9.0a0` is acceptable for `>= "3.9"`.
|
||||
#[must_use]
|
||||
pub fn only_release(self) -> Self {
|
||||
Self {
|
||||
operator: self.operator,
|
||||
version: self.version.only_release(),
|
||||
}
|
||||
}
|
||||
|
||||
/// `==<version>`
|
||||
pub fn equals_version(version: Version) -> Self {
|
||||
Self {
|
||||
|
@ -416,7 +457,7 @@ impl VersionSpecifier {
|
|||
&self.operator
|
||||
}
|
||||
|
||||
/// Get the version, e.g. `<=` in `<= 2.0.0`
|
||||
/// Get the version, e.g. `2.0.0` in `<= 2.0.0`
|
||||
pub fn version(&self) -> &Version {
|
||||
&self.version
|
||||
}
|
||||
|
@ -442,14 +483,23 @@ impl VersionSpecifier {
|
|||
(Some(VersionSpecifier::equals_version(v1.clone())), None)
|
||||
}
|
||||
// `v >= 3.7 && v < 3.8` is equivalent to `v == 3.7.*`
|
||||
(Bound::Included(v1), Bound::Excluded(v2))
|
||||
if v1.release().len() == 2
|
||||
&& *v2.release() == [v1.release()[0], v1.release()[1] + 1] =>
|
||||
{
|
||||
(
|
||||
Some(VersionSpecifier::equals_star_version(v1.clone())),
|
||||
None,
|
||||
)
|
||||
(Bound::Included(v1), Bound::Excluded(v2)) => {
|
||||
match *v1.only_release_trimmed().release() {
|
||||
[major] if *v2.only_release_trimmed().release() == [major, 1] => {
|
||||
let version = Version::new([major, 0]);
|
||||
(Some(VersionSpecifier::equals_star_version(version)), None)
|
||||
}
|
||||
[major, minor]
|
||||
if *v2.only_release_trimmed().release() == [major, minor + 1] =>
|
||||
{
|
||||
let version = Version::new([major, minor]);
|
||||
(Some(VersionSpecifier::equals_star_version(version)), None)
|
||||
}
|
||||
_ => (
|
||||
VersionSpecifier::from_lower_bound(&Bound::Included(v1.clone())),
|
||||
VersionSpecifier::from_upper_bound(&Bound::Excluded(v2.clone())),
|
||||
),
|
||||
}
|
||||
}
|
||||
(lower, upper) => (
|
||||
VersionSpecifier::from_lower_bound(lower),
|
||||
|
@ -838,6 +888,90 @@ pub(crate) fn parse_version_specifiers(
|
|||
Ok(version_ranges)
|
||||
}
|
||||
|
||||
/// A simple `~=` version specifier with a major, minor and (optional) patch version, e.g., `~=3.13`
|
||||
/// or `~=3.13.0`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TildeVersionSpecifier<'a> {
|
||||
inner: Cow<'a, VersionSpecifier>,
|
||||
}
|
||||
|
||||
impl<'a> TildeVersionSpecifier<'a> {
|
||||
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] value.
|
||||
///
|
||||
/// If a [`Operator::TildeEqual`] is not used, or the version includes more than minor and patch
|
||||
/// segments, this will return [`None`].
|
||||
pub fn from_specifier(specifier: VersionSpecifier) -> Option<TildeVersionSpecifier<'a>> {
|
||||
TildeVersionSpecifier::new(Cow::Owned(specifier))
|
||||
}
|
||||
|
||||
/// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] reference.
|
||||
///
|
||||
/// See [`TildeVersionSpecifier::from_specifier`].
|
||||
pub fn from_specifier_ref(
|
||||
specifier: &'a VersionSpecifier,
|
||||
) -> Option<TildeVersionSpecifier<'a>> {
|
||||
TildeVersionSpecifier::new(Cow::Borrowed(specifier))
|
||||
}
|
||||
|
||||
fn new(specifier: Cow<'a, VersionSpecifier>) -> Option<Self> {
|
||||
if specifier.operator != Operator::TildeEqual {
|
||||
return None;
|
||||
}
|
||||
if specifier.version().release().len() < 2 || specifier.version().release().len() > 3 {
|
||||
return None;
|
||||
}
|
||||
if specifier.version().any_prerelease()
|
||||
|| specifier.version().is_local()
|
||||
|| specifier.version().is_post()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(Self { inner: specifier })
|
||||
}
|
||||
|
||||
/// Whether a patch version is present in this tilde version specifier.
|
||||
pub fn has_patch(&self) -> bool {
|
||||
self.inner.version.release().len() == 3
|
||||
}
|
||||
|
||||
/// Construct the lower and upper bounding version specifiers for this tilde version specifier,
|
||||
/// e.g., for `~=3.13` this would return `>=3.13` and `<4` and for `~=3.13.0` it would
|
||||
/// return `>=3.13.0` and `<3.14`.
|
||||
pub fn bounding_specifiers(&self) -> (VersionSpecifier, VersionSpecifier) {
|
||||
let release = self.inner.version().release();
|
||||
let lower = self.inner.version.clone();
|
||||
let upper = if self.has_patch() {
|
||||
Version::new([release[0], release[1] + 1])
|
||||
} else {
|
||||
Version::new([release[0] + 1])
|
||||
};
|
||||
(
|
||||
VersionSpecifier::greater_than_equal_version(lower),
|
||||
VersionSpecifier::less_than_version(upper),
|
||||
)
|
||||
}
|
||||
|
||||
/// Construct a new tilde `VersionSpecifier` with the given patch version appended.
|
||||
pub fn with_patch_version(&self, patch: u64) -> TildeVersionSpecifier {
|
||||
let mut release = self.inner.version.release().to_vec();
|
||||
if self.has_patch() {
|
||||
release.pop();
|
||||
}
|
||||
release.push(patch);
|
||||
TildeVersionSpecifier::from_specifier(
|
||||
VersionSpecifier::from_version(Operator::TildeEqual, Version::new(release))
|
||||
.expect("We should always derive a valid new version specifier"),
|
||||
)
|
||||
.expect("We should always derive a new tilde version specifier")
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TildeVersionSpecifier<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{cmp::Ordering, str::FromStr};
|
||||
|
|
|
@ -41,7 +41,7 @@ version-ranges = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0" }
|
||||
serde_json = { version = "1.0.128" }
|
||||
serde_json = { workspace = true }
|
||||
tracing-test = { version = "0.2.5" }
|
||||
|
||||
[features]
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#![warn(missing_docs)]
|
||||
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::path::Path;
|
||||
|
@ -334,22 +336,15 @@ impl Reporter for TracingReporter {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl<T: Pep508Url> schemars::JsonSchema for Requirement<T> {
|
||||
fn schema_name() -> String {
|
||||
"Requirement".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("Requirement")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some(
|
||||
"A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`".to_string(),
|
||||
),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -172,7 +172,7 @@ impl InternerGuard<'_> {
|
|||
),
|
||||
// Normalize `python_version` markers to `python_full_version` nodes.
|
||||
MarkerValueVersion::PythonVersion => {
|
||||
match python_version_to_full_version(normalize_specifier(specifier)) {
|
||||
match python_version_to_full_version(specifier.only_release()) {
|
||||
Ok(specifier) => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion),
|
||||
Edges::from_specifier(specifier),
|
||||
|
@ -1214,7 +1214,7 @@ impl Edges {
|
|||
|
||||
/// Returns the [`Edges`] for a version specifier.
|
||||
fn from_specifier(specifier: VersionSpecifier) -> Edges {
|
||||
let specifier = release_specifier_to_range(normalize_specifier(specifier));
|
||||
let specifier = release_specifier_to_range(specifier.only_release(), true);
|
||||
Edges::Version {
|
||||
edges: Edges::from_range(&specifier),
|
||||
}
|
||||
|
@ -1227,9 +1227,9 @@ impl Edges {
|
|||
let mut range: Ranges<Version> = versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
let specifier = VersionSpecifier::equals_version(version.clone());
|
||||
let specifier = VersionSpecifier::equals_version(version.only_release());
|
||||
let specifier = python_version_to_full_version(specifier)?;
|
||||
Ok(release_specifier_to_range(normalize_specifier(specifier)))
|
||||
Ok(release_specifier_to_range(specifier, true))
|
||||
})
|
||||
.flatten_ok()
|
||||
.collect::<Result<Ranges<_>, NodeId>>()?;
|
||||
|
@ -1526,57 +1526,62 @@ impl Edges {
|
|||
}
|
||||
}
|
||||
|
||||
// Normalize a [`VersionSpecifier`] before adding it to the tree.
|
||||
fn normalize_specifier(specifier: VersionSpecifier) -> VersionSpecifier {
|
||||
let (operator, version) = specifier.into_parts();
|
||||
|
||||
// The decision diagram relies on the assumption that the negation of a marker tree is
|
||||
// the complement of the marker space. However, pre-release versions violate this assumption.
|
||||
//
|
||||
// For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'`
|
||||
// does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However,
|
||||
// its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not
|
||||
// match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams
|
||||
// rely on. For this reason we ignore pre-release versions entirely when evaluating markers.
|
||||
//
|
||||
// Note that `python_version` cannot take on pre-release values as it is truncated to just the
|
||||
// major and minor version segments. Thus using release-only specifiers is definitely necessary
|
||||
// for `python_version` to fully simplify any ranges, such as `python_version > '3.9' or python_version <= '3.9'`,
|
||||
// which is always `true` for `python_version`. For `python_full_version` however, this decision
|
||||
// is a semantic change.
|
||||
let mut release = &*version.release();
|
||||
|
||||
// Strip any trailing `0`s.
|
||||
//
|
||||
// The [`Version`] type ignores trailing `0`s for equality, but still preserves them in its
|
||||
// [`Display`] output. We must normalize all versions by stripping trailing `0`s to remove the
|
||||
// distinction between versions like `3.9` and `3.9.0`. Otherwise, their output would depend on
|
||||
// which form was added to the global marker interner first.
|
||||
//
|
||||
// Note that we cannot strip trailing `0`s for star equality, as `==3.0.*` is different from `==3.*`.
|
||||
if !operator.is_star() {
|
||||
if let Some(end) = release.iter().rposition(|segment| *segment != 0) {
|
||||
if end > 0 {
|
||||
release = &release[..=end];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
VersionSpecifier::from_version(operator, Version::new(release)).unwrap()
|
||||
}
|
||||
|
||||
/// Returns the equivalent `python_full_version` specifier for a `python_version` specifier.
|
||||
///
|
||||
/// Returns `Err` with a constant node if the equivalent comparison is always `true` or `false`.
|
||||
fn python_version_to_full_version(specifier: VersionSpecifier) -> Result<VersionSpecifier, NodeId> {
|
||||
// Trailing zeroes matter only for (not-)equals-star and tilde-equals. This means that below
|
||||
// the next two blocks, we can use the trimmed release as the release.
|
||||
if specifier.operator().is_star() {
|
||||
// Input python_version python_full_version
|
||||
// ==3.* 3.* 3.*
|
||||
// ==3.0.* 3.0 3.0.*
|
||||
// ==3.0.0.* 3.0 3.0.*
|
||||
// ==3.9.* 3.9 3.9.*
|
||||
// ==3.9.0.* 3.9 3.9.*
|
||||
// ==3.9.0.0.* 3.9 3.9.*
|
||||
// ==3.9.1.* FALSE FALSE
|
||||
// ==3.9.1.0.* FALSE FALSE
|
||||
// ==3.9.1.0.0.* FALSE FALSE
|
||||
return match &*specifier.version().release() {
|
||||
// `3.*`
|
||||
[_major] => Ok(specifier),
|
||||
// Ex) `3.9.*`, `3.9.0.*`, or `3.9.0.0.*`
|
||||
[major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => {
|
||||
let python_version = Version::new([major, minor]);
|
||||
// Unwrap safety: A star operator with two version segments is always valid.
|
||||
Ok(VersionSpecifier::from_version(*specifier.operator(), python_version).unwrap())
|
||||
}
|
||||
// Ex) `3.9.1.*` or `3.9.0.1.*`
|
||||
_ => Err(NodeId::FALSE),
|
||||
};
|
||||
}
|
||||
|
||||
if *specifier.operator() == Operator::TildeEqual {
|
||||
// python_version python_full_version
|
||||
// ~=3 (not possible)
|
||||
// ~= 3.0 >= 3.0, < 4.0
|
||||
// ~= 3.9 >= 3.9, < 4.0
|
||||
// ~= 3.9.0 == 3.9.*
|
||||
// ~= 3.9.1 FALSE
|
||||
// ~= 3.9.0.0 == 3.9.*
|
||||
// ~= 3.9.0.1 FALSE
|
||||
return match &*specifier.version().release() {
|
||||
// Ex) `3.0`, `3.7`
|
||||
[_major, _minor] => Ok(specifier),
|
||||
// Ex) `3.9`, `3.9.0`, or `3.9.0.0`
|
||||
[major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => {
|
||||
let python_version = Version::new([major, minor]);
|
||||
Ok(VersionSpecifier::equals_star_version(python_version))
|
||||
}
|
||||
// Ex) `3.9.1` or `3.9.0.1`
|
||||
_ => Err(NodeId::FALSE),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract the major and minor version segments if the specifier contains exactly
|
||||
// those segments, or if it contains a major segment with an implied minor segment of `0`.
|
||||
let major_minor = match *specifier.version().release() {
|
||||
// For star operators, we cannot add a trailing `0`.
|
||||
//
|
||||
// `python_version == 3.*` is equivalent to `python_full_version == 3.*`. Adding a
|
||||
// trailing `0` would result in `python_version == 3.0.*`, which is incorrect.
|
||||
[_major] if specifier.operator().is_star() => return Ok(specifier),
|
||||
let major_minor = match *specifier.version().only_release_trimmed().release() {
|
||||
// Add a trailing `0` for the minor version, which is implied.
|
||||
// For example, `python_version == 3` matches `3.0.1`, `3.0.2`, etc.
|
||||
[major] => Some((major, 0)),
|
||||
|
@ -1614,9 +1619,10 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result<Version
|
|||
VersionSpecifier::less_than_version(Version::new([major, minor + 1]))
|
||||
}
|
||||
|
||||
// `==3.7.*`, `!=3.7.*`, `~=3.7` already represent the equivalent `python_full_version`
|
||||
// comparison.
|
||||
Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => specifier,
|
||||
Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => {
|
||||
// Handled above.
|
||||
unreachable!()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
let [major, minor, ..] = *specifier.version().release() else {
|
||||
|
@ -1624,13 +1630,14 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result<Version
|
|||
};
|
||||
|
||||
Ok(match specifier.operator() {
|
||||
// `python_version` cannot have more than two release segments, so equality is impossible.
|
||||
Operator::Equal | Operator::ExactEqual | Operator::EqualStar | Operator::TildeEqual => {
|
||||
// `python_version` cannot have more than two release segments, and we know
|
||||
// that the following release segments aren't purely zeroes so equality is impossible.
|
||||
Operator::Equal | Operator::ExactEqual => {
|
||||
return Err(NodeId::FALSE);
|
||||
}
|
||||
|
||||
// Similarly, inequalities are always `true`.
|
||||
Operator::NotEqual | Operator::NotEqualStar => return Err(NodeId::TRUE),
|
||||
Operator::NotEqual => return Err(NodeId::TRUE),
|
||||
|
||||
// `python_version {<,<=} 3.7.8` is equivalent to `python_full_version < 3.8`.
|
||||
Operator::LessThan | Operator::LessThanEqual => {
|
||||
|
@ -1641,6 +1648,11 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result<Version
|
|||
Operator::GreaterThan | Operator::GreaterThanEqual => {
|
||||
VersionSpecifier::greater_than_equal_version(Version::new([major, minor + 1]))
|
||||
}
|
||||
|
||||
Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => {
|
||||
// Handled above.
|
||||
unreachable!()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,8 +64,8 @@ fn collect_dnf(
|
|||
continue;
|
||||
}
|
||||
|
||||
// Detect whether the range for this edge can be simplified as a star inequality.
|
||||
if let Some(specifier) = star_range_inequality(&range) {
|
||||
// Detect whether the range for this edge can be simplified as a star specifier.
|
||||
if let Some(specifier) = star_range_specifier(&range) {
|
||||
path.push(MarkerExpression::Version {
|
||||
key: marker.key().into(),
|
||||
specifier,
|
||||
|
@ -343,22 +343,34 @@ where
|
|||
Some(excluded)
|
||||
}
|
||||
|
||||
/// Returns `Some` if the version expression can be simplified as a star inequality with the given
|
||||
/// specifier.
|
||||
/// Returns `Some` if the version range can be simplified as a star specifier.
|
||||
///
|
||||
/// For example, `python_full_version < '3.8' or python_full_version >= '3.9'` can be simplified to
|
||||
/// `python_full_version != '3.8.*'`.
|
||||
fn star_range_inequality(range: &Ranges<Version>) -> Option<VersionSpecifier> {
|
||||
/// Only for the two bounds case not covered by [`VersionSpecifier::from_release_only_bounds`].
|
||||
///
|
||||
/// For negative ranges like `python_full_version < '3.8' or python_full_version >= '3.9'`,
|
||||
/// returns `!= '3.8.*'`.
|
||||
fn star_range_specifier(range: &Ranges<Version>) -> Option<VersionSpecifier> {
|
||||
if range.iter().count() != 2 {
|
||||
return None;
|
||||
}
|
||||
// Check for negative star range: two segments [(Unbounded, Excluded(v1)), (Included(v2), Unbounded)]
|
||||
let (b1, b2) = range.iter().collect_tuple()?;
|
||||
|
||||
match (b1, b2) {
|
||||
((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded))
|
||||
if v1.release().len() == 2
|
||||
&& *v2.release() == [v1.release()[0], v1.release()[1] + 1] =>
|
||||
{
|
||||
Some(VersionSpecifier::not_equals_star_version(v1.clone()))
|
||||
if let ((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded)) =
|
||||
(b1, b2)
|
||||
{
|
||||
match *v1.only_release_trimmed().release() {
|
||||
[major] if *v2.release() == [major, 1] => {
|
||||
Some(VersionSpecifier::not_equals_star_version(Version::new([
|
||||
major, 0,
|
||||
])))
|
||||
}
|
||||
[major, minor] if *v2.release() == [major, minor + 1] => {
|
||||
Some(VersionSpecifier::not_equals_star_version(v1.clone()))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
_ => None,
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1707,23 +1707,15 @@ impl Display for MarkerTreeContents {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for MarkerTree {
|
||||
fn schema_name() -> String {
|
||||
"MarkerTree".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("MarkerTree")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some(
|
||||
"A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`"
|
||||
.to_string(),
|
||||
),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2279,13 +2271,13 @@ mod test {
|
|||
#[test]
|
||||
fn test_marker_simplification() {
|
||||
assert_false("python_version == '3.9.1'");
|
||||
assert_false("python_version == '3.9.0.*'");
|
||||
assert_true("python_version != '3.9.1'");
|
||||
|
||||
// Technically these is are valid substring comparison, but we do not allow them.
|
||||
// e.g., using a version with patch components with `python_version` is considered
|
||||
// impossible to satisfy since the value it is truncated at the minor version
|
||||
assert_false("python_version in '3.9.0'");
|
||||
// This is an edge case that happens to be supported, but is not critical to support.
|
||||
assert_simplifies(
|
||||
"python_version in '3.9.0'",
|
||||
"python_full_version == '3.9.*'",
|
||||
);
|
||||
// e.g., using a version that is not PEP 440 compliant is considered arbitrary
|
||||
assert_true("python_version in 'foo'");
|
||||
// e.g., including `*` versions, which would require tracking a version specifier
|
||||
|
@ -2295,16 +2287,25 @@ mod test {
|
|||
assert_true("python_version in '3.9,3.10'");
|
||||
assert_true("python_version in '3.9 or 3.10'");
|
||||
|
||||
// e.g, when one of the values cannot be true
|
||||
// TODO(zanieb): This seems like a quirk of the `python_full_version` normalization, this
|
||||
// should just act as though the patch version isn't present
|
||||
assert_false("python_version in '3.9 3.10.0 3.11'");
|
||||
// This is an edge case that happens to be supported, but is not critical to support.
|
||||
assert_simplifies(
|
||||
"python_version in '3.9 3.10.0 3.11'",
|
||||
"python_full_version >= '3.9' and python_full_version < '3.12'",
|
||||
);
|
||||
|
||||
assert_simplifies("python_version == '3.9'", "python_full_version == '3.9.*'");
|
||||
assert_simplifies(
|
||||
"python_version == '3.9.0'",
|
||||
"python_full_version == '3.9.*'",
|
||||
);
|
||||
assert_simplifies(
|
||||
"python_version == '3.9.0.*'",
|
||||
"python_full_version == '3.9.*'",
|
||||
);
|
||||
assert_simplifies(
|
||||
"python_version == '3.*'",
|
||||
"python_full_version >= '3' and python_full_version < '4'",
|
||||
);
|
||||
|
||||
// `<version> in`
|
||||
// e.g., when the range is not contiguous
|
||||
|
@ -2515,7 +2516,7 @@ mod test {
|
|||
#[test]
|
||||
fn test_simplification_extra_versus_other() {
|
||||
// Here, the `extra != 'foo'` cannot be simplified out, because
|
||||
// `extra == 'foo'` can be true even when `extra == 'bar`' is true.
|
||||
// `extra == 'foo'` can be true even when `extra == 'bar'`' is true.
|
||||
assert_simplifies(
|
||||
r#"extra != "foo" and (extra == "bar" or extra == "baz")"#,
|
||||
"(extra == 'bar' and extra != 'foo') or (extra == 'baz' and extra != 'foo')",
|
||||
|
@ -2536,6 +2537,68 @@ mod test {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_python_version_equal_star() {
|
||||
// Input, equivalent with python_version, equivalent with python_full_version
|
||||
let cases = [
|
||||
("3.*", "3.*", "3.*"),
|
||||
("3.0.*", "3.0", "3.0.*"),
|
||||
("3.0.0.*", "3.0", "3.0.*"),
|
||||
("3.9.*", "3.9", "3.9.*"),
|
||||
("3.9.0.*", "3.9", "3.9.*"),
|
||||
("3.9.0.0.*", "3.9", "3.9.*"),
|
||||
];
|
||||
for (input, equal_python_version, equal_python_full_version) in cases {
|
||||
assert_eq!(
|
||||
m(&format!("python_version == '{input}'")),
|
||||
m(&format!("python_version == '{equal_python_version}'")),
|
||||
"{input} {equal_python_version}"
|
||||
);
|
||||
assert_eq!(
|
||||
m(&format!("python_version == '{input}'")),
|
||||
m(&format!(
|
||||
"python_full_version == '{equal_python_full_version}'"
|
||||
)),
|
||||
"{input} {equal_python_full_version}"
|
||||
);
|
||||
}
|
||||
|
||||
let cases_false = ["3.9.1.*", "3.9.1.0.*", "3.9.1.0.0.*"];
|
||||
for input in cases_false {
|
||||
assert!(
|
||||
m(&format!("python_version == '{input}'")).is_false(),
|
||||
"{input}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tilde_equal_normalization() {
|
||||
assert_eq!(
|
||||
m("python_version ~= '3.10.0'"),
|
||||
m("python_version >= '3.10.0' and python_version < '3.11.0'")
|
||||
);
|
||||
|
||||
// Two digit versions such as `python_version` get padded with a zero, so they can never
|
||||
// match
|
||||
assert_eq!(m("python_version ~= '3.10.1'"), MarkerTree::FALSE);
|
||||
|
||||
assert_eq!(
|
||||
m("python_version ~= '3.10'"),
|
||||
m("python_version >= '3.10' and python_version < '4.0'")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
m("python_full_version ~= '3.10.0'"),
|
||||
m("python_full_version >= '3.10.0' and python_full_version < '3.11.0'")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
m("python_full_version ~= '3.10'"),
|
||||
m("python_full_version >= '3.10' and python_full_version < '4.0'")
|
||||
);
|
||||
}
|
||||
|
||||
/// This tests marker implication.
|
||||
///
|
||||
/// Specifically, these test cases come from a [bug] where `foo` and `bar`
|
||||
|
@ -3332,4 +3395,32 @@ mod test {
|
|||
]
|
||||
);
|
||||
}
|
||||
|
||||
/// Case a: There is no version `3` (no trailing zero) in the interner yet.
|
||||
#[test]
|
||||
fn marker_normalization_a() {
|
||||
let left_tree = m("python_version == '3.0.*'");
|
||||
let left = left_tree.try_to_string().unwrap();
|
||||
let right = "python_full_version == '3.0.*'";
|
||||
assert_eq!(left, right, "{left} != {right}");
|
||||
}
|
||||
|
||||
/// Case b: There is already a version `3` (no trailing zero) in the interner.
|
||||
#[test]
|
||||
fn marker_normalization_b() {
|
||||
m("python_version >= '3' and python_version <= '3.0'");
|
||||
|
||||
let left_tree = m("python_version == '3.0.*'");
|
||||
let left = left_tree.try_to_string().unwrap();
|
||||
let right = "python_full_version == '3.0.*'";
|
||||
assert_eq!(left, right, "{left} != {right}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn marker_normalization_c() {
|
||||
let left_tree = MarkerTree::from_str("python_version == '3.10.0.*'").unwrap();
|
||||
let left = left_tree.try_to_string().unwrap();
|
||||
let right = "python_full_version == '3.10.*'";
|
||||
assert_eq!(left, right, "{left} != {right}");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,8 +12,8 @@ pub enum RequirementOrigin {
|
|||
File(PathBuf),
|
||||
/// The requirement was provided via a local project (e.g., a `pyproject.toml` file).
|
||||
Project(PathBuf, PackageName),
|
||||
/// The requirement was provided via a local project (e.g., a `pyproject.toml` file).
|
||||
Group(PathBuf, PackageName, GroupName),
|
||||
/// The requirement was provided via a local project's group (e.g., a `pyproject.toml` file).
|
||||
Group(PathBuf, Option<PackageName>, GroupName),
|
||||
/// The requirement was provided via a workspace.
|
||||
Workspace,
|
||||
}
|
||||
|
|
|
@ -18,11 +18,16 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use crate::Pep508Url;
|
||||
|
||||
/// A wrapper around [`Url`] that preserves the original string.
|
||||
///
|
||||
/// The original string is not preserved after serialization/deserialization.
|
||||
#[derive(Debug, Clone, Eq)]
|
||||
pub struct VerbatimUrl {
|
||||
/// The parsed URL.
|
||||
url: DisplaySafeUrl,
|
||||
/// The URL as it was provided by the user.
|
||||
///
|
||||
/// Even if originally set, this will be [`None`] after
|
||||
/// serialization/deserialization.
|
||||
given: Option<ArcStr>,
|
||||
}
|
||||
|
||||
|
@ -106,10 +111,8 @@ impl VerbatimUrl {
|
|||
let (path, fragment) = split_fragment(&path);
|
||||
|
||||
// Convert to a URL.
|
||||
let mut url = DisplaySafeUrl::from(
|
||||
Url::from_file_path(path.clone())
|
||||
.unwrap_or_else(|()| panic!("path is absolute: {}", path.display())),
|
||||
);
|
||||
let mut url = DisplaySafeUrl::from_file_path(path.clone())
|
||||
.unwrap_or_else(|()| panic!("path is absolute: {}", path.display()));
|
||||
|
||||
// Set the fragment, if it exists.
|
||||
if let Some(fragment) = fragment {
|
||||
|
@ -168,6 +171,11 @@ impl VerbatimUrl {
|
|||
&self.url
|
||||
}
|
||||
|
||||
/// Return a mutable reference to the underlying [`DisplaySafeUrl`].
|
||||
pub fn raw_mut(&mut self) -> &mut DisplaySafeUrl {
|
||||
&mut self.url
|
||||
}
|
||||
|
||||
/// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`].
|
||||
pub fn to_url(&self) -> DisplaySafeUrl {
|
||||
self.url.clone()
|
||||
|
|
|
@ -19,9 +19,9 @@ checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
|
|||
|
||||
[[package]]
|
||||
name = "libmimalloc-sys"
|
||||
version = "0.1.42"
|
||||
version = "0.1.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4"
|
||||
checksum = "bf88cd67e9de251c1781dbe2f641a1a3ad66eaae831b8a2c38fbdc5ddae16d4d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
|
@ -29,9 +29,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
version = "0.1.46"
|
||||
version = "0.1.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af"
|
||||
checksum = "b1791cbe101e95af5764f06f20f6760521f7158f69dbf9d6baf941ee1bf6bc40"
|
||||
dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
|
|
|
@ -12,7 +12,6 @@ use itertools::Itertools;
|
|||
use reqwest::header::AUTHORIZATION;
|
||||
use reqwest::multipart::Part;
|
||||
use reqwest::{Body, Response, StatusCode};
|
||||
use reqwest_middleware::RequestBuilder;
|
||||
use reqwest_retry::policies::ExponentialBackoff;
|
||||
use reqwest_retry::{RetryPolicy, Retryable, RetryableStrategy};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
@ -29,7 +28,7 @@ use uv_auth::Credentials;
|
|||
use uv_cache::{Cache, Refresh};
|
||||
use uv_client::{
|
||||
BaseClient, DEFAULT_RETRIES, MetadataFormat, OwnedArchive, RegistryClientBuilder,
|
||||
UvRetryableStrategy,
|
||||
RequestBuilder, UvRetryableStrategy,
|
||||
};
|
||||
use uv_configuration::{KeyringProviderType, TrustedPublishing};
|
||||
use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename};
|
||||
|
@ -330,7 +329,9 @@ pub async fn check_trusted_publishing(
|
|||
debug!(
|
||||
"Running on GitHub Actions without explicit credentials, checking for trusted publishing"
|
||||
);
|
||||
match trusted_publishing::get_token(registry, client.for_host(registry)).await {
|
||||
match trusted_publishing::get_token(registry, client.for_host(registry).raw_client())
|
||||
.await
|
||||
{
|
||||
Ok(token) => Ok(TrustedPublishResult::Configured(token)),
|
||||
Err(err) => {
|
||||
// TODO(konsti): It would be useful if we could differentiate between actual errors
|
||||
|
@ -364,7 +365,9 @@ pub async fn check_trusted_publishing(
|
|||
);
|
||||
}
|
||||
|
||||
let token = trusted_publishing::get_token(registry, client.for_host(registry)).await?;
|
||||
let token =
|
||||
trusted_publishing::get_token(registry, client.for_host(registry).raw_client())
|
||||
.await?;
|
||||
Ok(TrustedPublishResult::Configured(token))
|
||||
}
|
||||
TrustedPublishing::Never => Ok(TrustedPublishResult::Skipped),
|
||||
|
@ -387,7 +390,7 @@ pub async fn upload(
|
|||
download_concurrency: &Semaphore,
|
||||
reporter: Arc<impl Reporter>,
|
||||
) -> Result<bool, PublishError> {
|
||||
let form_metadata = form_metadata(file, filename)
|
||||
let form_metadata = FormMetadata::read_from_file(file, filename)
|
||||
.await
|
||||
.map_err(|err| PublishError::PublishPrepare(file.to_path_buf(), Box::new(err)))?;
|
||||
|
||||
|
@ -641,125 +644,143 @@ async fn metadata(file: &Path, filename: &DistFilename) -> Result<Metadata23, Pu
|
|||
Ok(Metadata23::parse(&contents)?)
|
||||
}
|
||||
|
||||
/// Collect the non-file fields for the multipart request from the package METADATA.
|
||||
///
|
||||
/// Reference implementation: <https://github.com/pypi/warehouse/blob/d2c36d992cf9168e0518201d998b2707a3ef1e72/warehouse/forklift/legacy.py#L1376-L1430>
|
||||
async fn form_metadata(
|
||||
file: &Path,
|
||||
filename: &DistFilename,
|
||||
) -> Result<Vec<(&'static str, String)>, PublishPrepareError> {
|
||||
let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?;
|
||||
#[derive(Debug, Clone)]
|
||||
struct FormMetadata(Vec<(&'static str, String)>);
|
||||
|
||||
let Metadata23 {
|
||||
metadata_version,
|
||||
name,
|
||||
version,
|
||||
platforms,
|
||||
// Not used by PyPI legacy upload
|
||||
supported_platforms: _,
|
||||
summary,
|
||||
description,
|
||||
description_content_type,
|
||||
keywords,
|
||||
home_page,
|
||||
download_url,
|
||||
author,
|
||||
author_email,
|
||||
maintainer,
|
||||
maintainer_email,
|
||||
license,
|
||||
license_expression,
|
||||
license_files,
|
||||
classifiers,
|
||||
requires_dist,
|
||||
provides_dist,
|
||||
obsoletes_dist,
|
||||
requires_python,
|
||||
requires_external,
|
||||
project_urls,
|
||||
provides_extras,
|
||||
dynamic,
|
||||
} = metadata(file, filename).await?;
|
||||
impl FormMetadata {
|
||||
/// Collect the non-file fields for the multipart request from the package METADATA.
|
||||
///
|
||||
/// Reference implementation: <https://github.com/pypi/warehouse/blob/d2c36d992cf9168e0518201d998b2707a3ef1e72/warehouse/forklift/legacy.py#L1376-L1430>
|
||||
async fn read_from_file(
|
||||
file: &Path,
|
||||
filename: &DistFilename,
|
||||
) -> Result<Self, PublishPrepareError> {
|
||||
let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?;
|
||||
|
||||
let mut form_metadata = vec![
|
||||
(":action", "file_upload".to_string()),
|
||||
("sha256_digest", hash_hex.digest.to_string()),
|
||||
("protocol_version", "1".to_string()),
|
||||
("metadata_version", metadata_version.clone()),
|
||||
// Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)`
|
||||
// * <https://github.com/pypa/twine/issues/743>
|
||||
// * <https://github.com/pypa/twine/blob/5bf3f38ff3d8b2de47b7baa7b652c697d7a64776/twine/package.py#L57-L65>
|
||||
// warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate
|
||||
// `normalized_name`, so we'll start with this and we'll readjust if there are user reports.
|
||||
("name", name.clone()),
|
||||
("version", version.clone()),
|
||||
("filetype", filename.filetype().to_string()),
|
||||
];
|
||||
let Metadata23 {
|
||||
metadata_version,
|
||||
name,
|
||||
version,
|
||||
platforms,
|
||||
// Not used by PyPI legacy upload
|
||||
supported_platforms: _,
|
||||
summary,
|
||||
description,
|
||||
description_content_type,
|
||||
keywords,
|
||||
home_page,
|
||||
download_url,
|
||||
author,
|
||||
author_email,
|
||||
maintainer,
|
||||
maintainer_email,
|
||||
license,
|
||||
license_expression,
|
||||
license_files,
|
||||
classifiers,
|
||||
requires_dist,
|
||||
provides_dist,
|
||||
obsoletes_dist,
|
||||
requires_python,
|
||||
requires_external,
|
||||
project_urls,
|
||||
provides_extras,
|
||||
dynamic,
|
||||
} = metadata(file, filename).await?;
|
||||
|
||||
if let DistFilename::WheelFilename(wheel) = filename {
|
||||
form_metadata.push(("pyversion", wheel.python_tags().iter().join(".")));
|
||||
} else {
|
||||
form_metadata.push(("pyversion", "source".to_string()));
|
||||
let mut form_metadata = vec![
|
||||
(":action", "file_upload".to_string()),
|
||||
("sha256_digest", hash_hex.digest.to_string()),
|
||||
("protocol_version", "1".to_string()),
|
||||
("metadata_version", metadata_version.clone()),
|
||||
// Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)`
|
||||
// * <https://github.com/pypa/twine/issues/743>
|
||||
// * <https://github.com/pypa/twine/blob/5bf3f38ff3d8b2de47b7baa7b652c697d7a64776/twine/package.py#L57-L65>
|
||||
// warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate
|
||||
// `normalized_name`, so we'll start with this and we'll readjust if there are user reports.
|
||||
("name", name.clone()),
|
||||
("version", version.clone()),
|
||||
("filetype", filename.filetype().to_string()),
|
||||
];
|
||||
|
||||
if let DistFilename::WheelFilename(wheel) = filename {
|
||||
form_metadata.push(("pyversion", wheel.python_tags().iter().join(".")));
|
||||
} else {
|
||||
form_metadata.push(("pyversion", "source".to_string()));
|
||||
}
|
||||
|
||||
let mut add_option = |name, value: Option<String>| {
|
||||
if let Some(some) = value.clone() {
|
||||
form_metadata.push((name, some));
|
||||
}
|
||||
};
|
||||
|
||||
add_option("author", author);
|
||||
add_option("author_email", author_email);
|
||||
add_option("description", description);
|
||||
add_option("description_content_type", description_content_type);
|
||||
add_option("download_url", download_url);
|
||||
add_option("home_page", home_page);
|
||||
add_option("keywords", keywords);
|
||||
add_option("license", license);
|
||||
add_option("license_expression", license_expression);
|
||||
add_option("maintainer", maintainer);
|
||||
add_option("maintainer_email", maintainer_email);
|
||||
add_option("summary", summary);
|
||||
|
||||
// The GitLab PyPI repository API implementation requires this metadata field and twine always
|
||||
// includes it in the request, even when it's empty.
|
||||
form_metadata.push(("requires_python", requires_python.unwrap_or(String::new())));
|
||||
|
||||
let mut add_vec = |name, values: Vec<String>| {
|
||||
for i in values {
|
||||
form_metadata.push((name, i.clone()));
|
||||
}
|
||||
};
|
||||
|
||||
add_vec("classifiers", classifiers);
|
||||
add_vec("dynamic", dynamic);
|
||||
add_vec("license_file", license_files);
|
||||
add_vec("obsoletes_dist", obsoletes_dist);
|
||||
add_vec("platform", platforms);
|
||||
add_vec("project_urls", project_urls);
|
||||
add_vec("provides_dist", provides_dist);
|
||||
add_vec("provides_extra", provides_extras);
|
||||
add_vec("requires_dist", requires_dist);
|
||||
add_vec("requires_external", requires_external);
|
||||
|
||||
Ok(Self(form_metadata))
|
||||
}
|
||||
|
||||
let mut add_option = |name, value: Option<String>| {
|
||||
if let Some(some) = value.clone() {
|
||||
form_metadata.push((name, some));
|
||||
}
|
||||
};
|
||||
/// Returns an iterator over the metadata fields.
|
||||
fn iter(&self) -> std::slice::Iter<'_, (&'static str, String)> {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
add_option("author", author);
|
||||
add_option("author_email", author_email);
|
||||
add_option("description", description);
|
||||
add_option("description_content_type", description_content_type);
|
||||
add_option("download_url", download_url);
|
||||
add_option("home_page", home_page);
|
||||
add_option("keywords", keywords);
|
||||
add_option("license", license);
|
||||
add_option("license_expression", license_expression);
|
||||
add_option("maintainer", maintainer);
|
||||
add_option("maintainer_email", maintainer_email);
|
||||
add_option("summary", summary);
|
||||
|
||||
// The GitLab PyPI repository API implementation requires this metadata field and twine always
|
||||
// includes it in the request, even when it's empty.
|
||||
form_metadata.push(("requires_python", requires_python.unwrap_or(String::new())));
|
||||
|
||||
let mut add_vec = |name, values: Vec<String>| {
|
||||
for i in values {
|
||||
form_metadata.push((name, i.clone()));
|
||||
}
|
||||
};
|
||||
|
||||
add_vec("classifiers", classifiers);
|
||||
add_vec("dynamic", dynamic);
|
||||
add_vec("license_file", license_files);
|
||||
add_vec("obsoletes_dist", obsoletes_dist);
|
||||
add_vec("platform", platforms);
|
||||
add_vec("project_urls", project_urls);
|
||||
add_vec("provides_dist", provides_dist);
|
||||
add_vec("provides_extra", provides_extras);
|
||||
add_vec("requires_dist", requires_dist);
|
||||
add_vec("requires_external", requires_external);
|
||||
|
||||
Ok(form_metadata)
|
||||
impl<'a> IntoIterator for &'a FormMetadata {
|
||||
type Item = &'a (&'a str, String);
|
||||
type IntoIter = std::slice::Iter<'a, (&'a str, String)>;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the upload request.
|
||||
///
|
||||
/// Returns the request and the reporter progress bar id.
|
||||
async fn build_request(
|
||||
async fn build_request<'a>(
|
||||
file: &Path,
|
||||
raw_filename: &str,
|
||||
filename: &DistFilename,
|
||||
registry: &DisplaySafeUrl,
|
||||
client: &BaseClient,
|
||||
client: &'a BaseClient,
|
||||
credentials: &Credentials,
|
||||
form_metadata: &[(&'static str, String)],
|
||||
form_metadata: &FormMetadata,
|
||||
reporter: Arc<impl Reporter>,
|
||||
) -> Result<(RequestBuilder, usize), PublishPrepareError> {
|
||||
) -> Result<(RequestBuilder<'a>, usize), PublishPrepareError> {
|
||||
let mut form = reqwest::multipart::Form::new();
|
||||
for (key, value) in form_metadata {
|
||||
for (key, value) in form_metadata.iter() {
|
||||
form = form.text(*key, value.clone());
|
||||
}
|
||||
|
||||
|
@ -885,16 +906,19 @@ async fn handle_response(registry: &Url, response: Response) -> Result<(), Publi
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{Reporter, build_request, form_metadata};
|
||||
use insta::{assert_debug_snapshot, assert_snapshot};
|
||||
use itertools::Itertools;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use insta::{assert_debug_snapshot, assert_snapshot};
|
||||
use itertools::Itertools;
|
||||
|
||||
use uv_auth::Credentials;
|
||||
use uv_client::BaseClientBuilder;
|
||||
use uv_distribution_filename::DistFilename;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::{FormMetadata, Reporter, build_request};
|
||||
|
||||
struct DummyReporter;
|
||||
|
||||
impl Reporter for DummyReporter {
|
||||
|
@ -913,7 +937,9 @@ mod tests {
|
|||
let file = PathBuf::from("../../scripts/links/").join(raw_filename);
|
||||
let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap();
|
||||
|
||||
let form_metadata = form_metadata(&file, &filename).await.unwrap();
|
||||
let form_metadata = FormMetadata::read_from_file(&file, &filename)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let formatted_metadata = form_metadata
|
||||
.iter()
|
||||
|
@ -969,12 +995,13 @@ mod tests {
|
|||
project_urls: Source, https://github.com/unknown/tqdm
|
||||
"###);
|
||||
|
||||
let client = BaseClientBuilder::new().build();
|
||||
let (request, _) = build_request(
|
||||
&file,
|
||||
raw_filename,
|
||||
&filename,
|
||||
&DisplaySafeUrl::parse("https://example.org/upload").unwrap(),
|
||||
&BaseClientBuilder::new().build(),
|
||||
&client,
|
||||
&Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())),
|
||||
&form_metadata,
|
||||
Arc::new(DummyReporter),
|
||||
|
@ -985,7 +1012,7 @@ mod tests {
|
|||
insta::with_settings!({
|
||||
filters => [("boundary=[0-9a-f-]+", "boundary=[...]")],
|
||||
}, {
|
||||
assert_debug_snapshot!(&request, @r#"
|
||||
assert_debug_snapshot!(&request.raw_builder(), @r#"
|
||||
RequestBuilder {
|
||||
inner: RequestBuilder {
|
||||
method: POST,
|
||||
|
@ -1024,7 +1051,9 @@ mod tests {
|
|||
let file = PathBuf::from("../../scripts/links/").join(raw_filename);
|
||||
let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap();
|
||||
|
||||
let form_metadata = form_metadata(&file, &filename).await.unwrap();
|
||||
let form_metadata = FormMetadata::read_from_file(&file, &filename)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let formatted_metadata = form_metadata
|
||||
.iter()
|
||||
|
@ -1118,12 +1147,13 @@ mod tests {
|
|||
requires_dist: requests ; extra == 'telegram'
|
||||
"###);
|
||||
|
||||
let client = BaseClientBuilder::new().build();
|
||||
let (request, _) = build_request(
|
||||
&file,
|
||||
raw_filename,
|
||||
&filename,
|
||||
&DisplaySafeUrl::parse("https://example.org/upload").unwrap(),
|
||||
&BaseClientBuilder::new().build(),
|
||||
&client,
|
||||
&Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())),
|
||||
&form_metadata,
|
||||
Arc::new(DummyReporter),
|
||||
|
@ -1134,7 +1164,7 @@ mod tests {
|
|||
insta::with_settings!({
|
||||
filters => [("boundary=[0-9a-f-]+", "boundary=[...]")],
|
||||
}, {
|
||||
assert_debug_snapshot!(&request, @r#"
|
||||
assert_debug_snapshot!(&request.raw_builder(), @r#"
|
||||
RequestBuilder {
|
||||
inner: RequestBuilder {
|
||||
method: POST,
|
||||
|
|
|
@ -3,6 +3,8 @@ use petgraph::{
|
|||
graph::{DiGraph, NodeIndex},
|
||||
};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::{collections::BTreeSet, hash::Hash, rc::Rc};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
|
||||
|
@ -638,12 +640,12 @@ pub struct SchemaConflictItem {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for SchemaConflictItem {
|
||||
fn schema_name() -> String {
|
||||
"SchemaConflictItem".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("SchemaConflictItem")
|
||||
}
|
||||
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
<ConflictItemWire as schemars::JsonSchema>::json_schema(r#gen)
|
||||
fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
<ConflictItemWire as schemars::JsonSchema>::json_schema(generator)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
use serde::{Serialize, Serializer};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
|
@ -99,25 +101,16 @@ impl Serialize for Identifier {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for Identifier {
|
||||
fn schema_name() -> String {
|
||||
"Identifier".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("Identifier")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
string: Some(Box::new(schemars::schema::StringValidation {
|
||||
// Best-effort Unicode support (https://stackoverflow.com/a/68844380/3549270)
|
||||
pattern: Some(r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$".to_string()),
|
||||
..schemars::schema::StringValidation::default()
|
||||
})),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("An identifier in Python".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"pattern": r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$",
|
||||
"description": "An identifier in Python"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ uv-cache = { workspace = true }
|
|||
uv-cache-info = { workspace = true }
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-client = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-dirs = { workspace = true }
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-extract = { workspace = true }
|
||||
|
@ -38,11 +39,14 @@ uv-warnings = { workspace = true }
|
|||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
configparser = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
goblin = { workspace = true, default-features = false }
|
||||
indexmap = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
owo-colors = { workspace = true }
|
||||
ref-cast = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
reqwest-middleware = { workspace = true }
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -630,7 +630,9 @@ class GraalPyFinder(Finder):
|
|||
for download in batch:
|
||||
url = download.url + ".sha256"
|
||||
checksum_requests.append(self.client.get(url))
|
||||
for download, resp in zip(batch, await asyncio.gather(*checksum_requests)):
|
||||
for download, resp in zip(
|
||||
batch, await asyncio.gather(*checksum_requests), strict=False
|
||||
):
|
||||
try:
|
||||
resp.raise_for_status()
|
||||
except httpx.HTTPStatusError as e:
|
||||
|
@ -729,7 +731,7 @@ async def find() -> None:
|
|||
}
|
||||
if token:
|
||||
headers["Authorization"] = "Bearer " + token
|
||||
client = httpx.AsyncClient(follow_redirects=True, headers=headers, timeout=15)
|
||||
client = httpx.AsyncClient(follow_redirects=True, headers=headers, timeout=60)
|
||||
|
||||
finders = [
|
||||
CPythonFinder(client),
|
||||
|
|
|
@ -39,10 +39,9 @@ if hasattr(sys, "implementation"):
|
|||
# GraalPy reports the CPython version as sys.implementation.version,
|
||||
# so we need to discover the GraalPy version from the cache_tag
|
||||
import re
|
||||
|
||||
implementation_version = re.sub(
|
||||
r"graalpy(\d)(\d+)-\d+",
|
||||
r"\1.\2",
|
||||
sys.implementation.cache_tag
|
||||
r"graalpy(\d)(\d+)-\d+", r"\1.\2", sys.implementation.cache_tag
|
||||
)
|
||||
else:
|
||||
implementation_version = format_full_version(sys.implementation.version)
|
||||
|
@ -583,7 +582,6 @@ def main() -> None:
|
|||
elif os_and_arch["os"]["name"] == "musllinux":
|
||||
manylinux_compatible = True
|
||||
|
||||
|
||||
# By default, pip uses sysconfig on Python 3.10+.
|
||||
# But Python distributors can override this decision by setting:
|
||||
# sysconfig._PIP_USE_SYSCONFIG = True / False
|
||||
|
@ -608,7 +606,7 @@ def main() -> None:
|
|||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
import distutils.dist
|
||||
import distutils.dist # noqa: F401
|
||||
except ImportError:
|
||||
# We require distutils, but it's not installed; this is fairly
|
||||
# common in, e.g., deadsnakes where distutils is packaged
|
||||
|
@ -641,7 +639,10 @@ def main() -> None:
|
|||
# Prior to the introduction of `sysconfig` patching, python-build-standalone installations would always use
|
||||
# "/install" as the prefix. With `sysconfig` patching, we rewrite the prefix to match the actual installation
|
||||
# location. So in newer versions, we also write a dedicated flag to indicate standalone builds.
|
||||
"standalone": sysconfig.get_config_var("prefix") == "/install" or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE")),
|
||||
"standalone": (
|
||||
sysconfig.get_config_var("prefix") == "/install"
|
||||
or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE"))
|
||||
),
|
||||
"scheme": get_scheme(use_sysconfig_scheme),
|
||||
"virtualenv": get_virtualenv(),
|
||||
"platform": os_and_arch,
|
||||
|
|
|
@ -69,8 +69,7 @@ class ELFFile:
|
|||
}[(self.capacity, self.encoding)]
|
||||
except KeyError:
|
||||
raise ELFInvalid(
|
||||
f"unrecognized capacity ({self.capacity}) or "
|
||||
f"encoding ({self.encoding})"
|
||||
f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue