diff --git a/.config/nextest.toml b/.config/nextest.toml index c063bb861..cc1a18dbe 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -1,4 +1,4 @@ [profile.default] # Mark tests that take longer than 10s as slow. -# Terminate after 90s as a stop-gap measure to terminate on deadlock. -slow-timeout = { period = "10s", terminate-after = 9 } +# Terminate after 120s as a stop-gap measure to terminate on deadlock. +slow-timeout = { period = "10s", terminate-after = 12 } diff --git a/.github/workflows/build-binaries.yml b/.github/workflows/build-binaries.yml index 1cb50b27e..ccd3ef3ee 100644 --- a/.github/workflows/build-binaries.yml +++ b/.github/workflows/build-binaries.yml @@ -54,7 +54,7 @@ jobs: - name: "Prep README.md" run: python scripts/transform_readme.py --target pypi - name: "Build sdist" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: command: sdist args: --out dist @@ -74,7 +74,7 @@ jobs: # uv-build - name: "Build sdist uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: command: sdist args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -103,7 +103,7 @@ jobs: # uv - name: "Build wheels - x86_64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: x86_64 args: --release --locked --out dist --features self-update @@ -133,7 +133,7 @@ jobs: # uv-build - name: "Build wheels uv-build - x86_64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: x86_64 args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -157,7 +157,7 @@ jobs: # uv - name: "Build wheels - aarch64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: aarch64 args: --release --locked --out dist --features self-update @@ -193,7 +193,7 @@ jobs: # uv-build - name: "Build wheels uv-build - aarch64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: aarch64 args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -231,10 +231,10 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} - args: --release --locked --out dist --features self-update + args: --release --locked --out dist --features self-update,windows-gui-bin - name: "Test wheel" if: ${{ !startsWith(matrix.platform.target, 'aarch64') }} shell: bash @@ -243,6 +243,7 @@ jobs: ${{ env.MODULE_NAME }} --help python -m ${{ env.MODULE_NAME }} --help uvx --help + uvw --help - name: "Upload wheels" uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: @@ -254,6 +255,7 @@ jobs: ARCHIVE_FILE=uv-${{ matrix.platform.target }}.zip 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uv.exe 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uvx.exe + 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uvw.exe sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 @@ -265,7 +267,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -301,7 +303,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} # Generally, we try to build in a target docker container. In this case however, a @@ -366,7 +368,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: auto @@ -410,19 +412,18 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} # On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`. manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }} docker-options: ${{ matrix.platform.maturin_docker_options }} args: --release --locked --out dist --features self-update - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel" with: arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }} distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }} - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -460,19 +461,18 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} # On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`. manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }} docker-options: ${{ matrix.platform.maturin_docker_options }} args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel uv-build" with: arch: ${{ matrix.platform.arch == 'arm' && 'armv6' || matrix.platform.arch }} distro: ${{ matrix.platform.arch == 'arm' && 'bullseye' || 'ubuntu20.04' }} - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -509,19 +509,21 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} args: --release --locked --out dist --features self-update - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + # Until the llvm updates hit stable + # https://github.com/rust-lang/rust/issues/141287 + rust-toolchain: nightly-2025-05-25 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 if: matrix.platform.arch != 'ppc64' name: "Test wheel" with: arch: ${{ matrix.platform.arch }} distro: ubuntu20.04 - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -559,19 +561,18 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto docker-options: ${{ matrix.platform.maturin_docker_options }} args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 if: matrix.platform.arch != 'ppc64' name: "Test wheel uv-build" with: arch: ${{ matrix.platform.arch }} distro: ubuntu20.04 - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -613,7 +614,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -627,13 +628,12 @@ jobs: yum install -y gcc-powerpc64-linux-gnu fi # TODO(charlie): Re-enable testing for PPC wheels. - # - uses: uraimo/run-on-arch-action@v2 + # - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 # if: matrix.platform.arch != 'ppc64' # name: "Test wheel" # with: # arch: ${{ matrix.platform.arch }} # distro: ubuntu20.04 - # githubToken: ${{ github.token }} # install: | # apt-get update # apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -671,7 +671,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -691,6 +691,103 @@ jobs: name: wheels_uv_build-${{ matrix.platform.target }} path: crates/uv-build/dist + # Like `linux-arm`. + linux-riscv64: + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} + timeout-minutes: 30 + runs-on: depot-ubuntu-latest-4 + strategy: + matrix: + platform: + - target: riscv64gc-unknown-linux-gnu + arch: riscv64 + + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: "Prep README.md" + run: python scripts/transform_readme.py --target pypi + + # uv + - name: "Build wheels" + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 + with: + target: ${{ matrix.platform.target }} + manylinux: auto + docker-options: ${{ matrix.platform.maturin_docker_options }} + args: --release --locked --out dist --features self-update + - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + name: "Test wheel" + with: + arch: ${{ matrix.platform.arch }} + distro: ubuntu20.04 + githubToken: ${{ github.token }} + install: | + apt-get update + apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 + pip3 install -U pip + run: | + pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall + ${{ env.MODULE_NAME }} --help + # TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here. + # python -m ${{ env.MODULE_NAME }} --help + uvx --help + - name: "Upload wheels" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: wheels_uv-${{ matrix.platform.target }} + path: dist + - name: "Archive binary" + shell: bash + run: | + TARGET=${{ matrix.platform.target }} + ARCHIVE_NAME=uv-$TARGET + ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz + + mkdir -p $ARCHIVE_NAME + cp target/$TARGET/release/uv $ARCHIVE_NAME/uv + cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx + tar czvf $ARCHIVE_FILE $ARCHIVE_NAME + shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 + - name: "Upload binary" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: artifacts-${{ matrix.platform.target }} + path: | + *.tar.gz + *.sha256 + + # uv-build + - name: "Build wheels uv-build" + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 + with: + target: ${{ matrix.platform.target }} + manylinux: auto + docker-options: ${{ matrix.platform.maturin_docker_options }} + args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml + - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + name: "Test wheel uv-build" + with: + arch: ${{ matrix.platform.arch }} + distro: ubuntu20.04 + githubToken: ${{ github.token }} + install: | + apt-get update + apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 + pip3 install -U pip + run: | + pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall + ${{ env.MODULE_NAME }}-build --help + # TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here. + # python -m ${{ env.MODULE_NAME }}-build --help + - name: "Upload wheels uv-build" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: wheels_uv_build-${{ matrix.platform.target }} + path: crates/uv-build/dist + musllinux: if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} runs-on: ubuntu-latest @@ -710,7 +807,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: musllinux_1_1 @@ -757,7 +854,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: musllinux_1_1 @@ -804,19 +901,18 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: musllinux_1_1 args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} docker-options: ${{ matrix.platform.maturin_docker_options }} rust-toolchain: ${{ matrix.platform.toolchain || null }} - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel" with: arch: ${{ matrix.platform.arch }} distro: alpine_latest - githubToken: ${{ github.token }} install: | apk add python3 run: | @@ -826,13 +922,12 @@ jobs: # TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here. # .venv/bin/python -m ${{ env.MODULE_NAME }} --help .venv/bin/uvx --help - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel (manylinux)" if: matrix.platform.arch == 'aarch64' with: arch: aarch64 distro: ubuntu20.04 - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 @@ -871,19 +966,18 @@ jobs: # uv-build - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: musllinux_1_1 args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml docker-options: ${{ matrix.platform.maturin_docker_options }} rust-toolchain: ${{ matrix.platform.toolchain || null }} - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel" with: arch: ${{ matrix.platform.arch }} distro: alpine_latest - githubToken: ${{ github.token }} install: | apk add python3 run: | @@ -892,13 +986,12 @@ jobs: .venv/bin/${{ env.MODULE_NAME }}-build --help # TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here. # .venv/bin/python -m ${{ env.MODULE_NAME }}_build --help - - uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2 + - uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1 name: "Test wheel (manylinux)" if: matrix.platform.arch == 'aarch64' with: arch: aarch64 distro: ubuntu20.04 - githubToken: ${{ github.token }} install: | apt-get update apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 7b3834cdb..843ee8dfb 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -1,11 +1,19 @@ -# Build and publish a Docker image. +# Build and publish Docker images. # -# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local -# artifacts job within `cargo-dist`. +# Uses Depot for multi-platform builds. Includes both a `uv` base image, which +# is just the binary in a scratch image, and a set of extra, common images with +# the uv binary installed. # -# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but -# sharing the built image as an artifact between jobs is challenging. -name: "Build Docker image" +# Images are built on all runs. +# +# On release, assumed to run as a subworkflow of .github/workflows/release.yml; +# specifically, as a local artifacts job within `cargo-dist`. In this case, +# images are published based on the `plan`. +# +# TODO(charlie): Ideally, the publish step would happen as a publish job within +# `cargo-dist`, but sharing the built image as an artifact between jobs is +# challenging. +name: "Docker images" on: workflow_call: @@ -29,35 +37,67 @@ on: - .github/workflows/build-docker.yml env: - UV_BASE_IMG: ghcr.io/${{ github.repository_owner }}/uv + UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv + UV_DOCKERHUB_IMAGE: docker.io/astral/uv jobs: - docker-build: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} - name: Build Docker image (ghcr.io/astral-sh/uv) for ${{ matrix.platform }} + docker-plan: + name: plan runs-on: ubuntu-latest + outputs: + login: ${{ steps.plan.outputs.login }} + push: ${{ steps.plan.outputs.push }} + tag: ${{ steps.plan.outputs.tag }} + action: ${{ steps.plan.outputs.action }} + steps: + - name: Set push variable + env: + DRY_RUN: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }} + TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag }} + IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }} + id: plan + run: | + if [ "${{ env.DRY_RUN }}" == "false" ]; then + echo "login=true" >> "$GITHUB_OUTPUT" + echo "push=true" >> "$GITHUB_OUTPUT" + echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT" + echo "action=build and publish" >> "$GITHUB_OUTPUT" + else + echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT" + echo "push=false" >> "$GITHUB_OUTPUT" + echo "tag=dry-run" >> "$GITHUB_OUTPUT" + echo "action=build" >> "$GITHUB_OUTPUT" + fi + + docker-publish-base: + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} + name: ${{ needs.docker-plan.outputs.action }} uv + needs: + - docker-plan + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write # for Depot OIDC and GHCR signing + packages: write # for GHCR image pushes + attestations: write # for GHCR attestations environment: - name: release - strategy: - fail-fast: false - matrix: - platform: - - linux/amd64 - - linux/arm64 + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} + outputs: + image-tags: ${{ steps.meta.outputs.tags }} + image-annotations: ${{ steps.meta.outputs.annotations }} + image-digest: ${{ steps.build.outputs.digest }} + image-version: ${{ steps.meta.outputs.version }} steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: submodules: recursive - # Login to DockerHub first, to avoid rate-limiting + # Login to DockerHub (when not pushing, it's to avoid rate-limiting) - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - # PRs from forks don't have access to secrets, disable this step in that case. - if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }} + if: ${{ needs.docker-plan.outputs.login == 'true' }} with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 + username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }} + password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -65,13 +105,15 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + - uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 + - name: Check tag consistency - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + if: ${{ needs.docker-plan.outputs.push == 'true' }} run: | version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g') - if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then + if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then echo "The input tag does not match the version from pyproject.toml:" >&2 - echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2 + echo "${{ needs.docker-plan.outputs.tag }}" >&2 echo "${version}" >&2 exit 1 else @@ -81,107 +123,50 @@ jobs: - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 + env: + DOCKER_METADATA_ANNOTATIONS_LEVELS: index with: - images: ${{ env.UV_BASE_IMG }} + images: | + ${{ env.UV_GHCR_IMAGE }} + ${{ env.UV_DOCKERHUB_IMAGE }} # Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name tags: | - type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }} - type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }} + type=pep440,pattern={{ version }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }} + type=pep440,pattern={{ major }}.{{ minor }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }} - - name: Normalize Platform Pair (replace / with -) - run: | - platform=${{ matrix.platform }} - echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV - - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - name: Build and push by digest id: build - uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0 + uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0 with: + project: 7hd4vdzmw5 # astral-sh/uv context: . - platforms: ${{ matrix.platform }} - cache-from: type=gha,scope=uv-${{ env.PLATFORM_TUPLE }} - cache-to: type=gha,mode=min,scope=uv-${{ env.PLATFORM_TUPLE }} + platforms: linux/amd64,linux/arm64 + push: ${{ needs.docker-plan.outputs.push }} + tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - outputs: type=image,name=${{ env.UV_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + # TODO(zanieb): Annotations are not supported by Depot yet and are ignored + annotations: ${{ steps.meta.outputs.annotations }} - - name: Export digests - run: | - mkdir -p /tmp/digests - digest="${{ steps.build.outputs.digest }}" - touch "/tmp/digests/${digest#sha256:}" - - - name: Upload digests - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + - name: Generate artifact attestation for base image + if: ${{ needs.docker-plan.outputs.push == 'true' }} + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 with: - name: digests-${{ env.PLATFORM_TUPLE }} - path: /tmp/digests/* - if-no-files-found: error - retention-days: 1 - - docker-publish: - name: Publish Docker image (ghcr.io/astral-sh/uv) - runs-on: ubuntu-latest - environment: - name: release - needs: - - docker-build - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} - steps: - # Login to DockerHub first, to avoid rate-limiting - - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - name: Download digests - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - path: /tmp/digests - pattern: digests-* - merge-multiple: true - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 - with: - images: ${{ env.UV_BASE_IMG }} - # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version - tags: | - type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} - type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} - - - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - - name: Create manifest list and push - working-directory: /tmp/digests - # The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array - # The printf will expand the base image with the `@sha256: ...` for each sha256 in the directory - # The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... @sha256: @sha256: ...` - run: | - docker buildx imagetools create \ - $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *) + subject-name: ${{ env.UV_GHCR_IMAGE }} + subject-digest: ${{ steps.build.outputs.digest }} docker-publish-extra: - name: Publish additional Docker image based on ${{ matrix.image-mapping }} + name: ${{ needs.docker-plan.outputs.action }} ${{ matrix.image-mapping }} runs-on: ubuntu-latest environment: - name: release + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} needs: - - docker-publish - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + - docker-plan + - docker-publish-base permissions: - packages: write - attestations: write # needed to push image attestations to the Github attestation store - id-token: write # needed for signing the images with GitHub OIDC Token + id-token: write # for Depot OIDC and GHCR signing + packages: write # for GHCR image pushes + attestations: write # for GHCR attestations strategy: fail-fast: false matrix: @@ -213,13 +198,12 @@ jobs: - python:3.9-slim-bookworm,python3.9-bookworm-slim - python:3.8-slim-bookworm,python3.8-bookworm-slim steps: - # Login to DockerHub first, to avoid rate-limiting + # Login to DockerHub (when not pushing, it's to avoid rate-limiting) - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + if: ${{ needs.docker-plan.outputs.login == 'true' }} with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 + username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }} + password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -227,6 +211,8 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + - uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 + - name: Generate Dynamic Dockerfile Tags shell: bash run: | @@ -238,7 +224,7 @@ jobs: # Generate Dockerfile content cat < Dockerfile FROM ${BASE_IMAGE} - COPY --from=${{ env.UV_BASE_IMG }}:latest /uv /uvx /usr/local/bin/ + COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/ ENTRYPOINT [] CMD ["/usr/local/bin/uv"] EOF @@ -249,17 +235,14 @@ jobs: # Loop through all base tags and append its docker metadata pattern to the list # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version IFS=','; for TAG in ${BASE_TAGS}; do - TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n" - TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n" + TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n" + TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n" TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n" done # Remove the trailing newline from the pattern list TAG_PATTERNS="${TAG_PATTERNS%\\n}" - # Export image cache name - echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV - # Export tag patterns using the multiline env var syntax { echo "TAG_PATTERNS<> "$GITHUB_OUTPUT" + + # See `docker-annotate-base` for details. + - name: Generate artifact attestation + if: ${{ needs.docker-plan.outputs.push == 'true' }} + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 + with: + subject-name: ${{ env.UV_GHCR_IMAGE }} + subject-digest: ${{ steps.manifest-digest.outputs.digest }} + + # Annotate the base image + docker-annotate-base: + name: annotate uv runs-on: ubuntu-latest environment: - name: release + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} needs: + - docker-plan + - docker-publish-base - docker-publish-extra - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} - permissions: - packages: write - attestations: write # needed to push image attestations to the Github attestation store - id-token: write # needed for signing the images with GitHub OIDC Token + if: ${{ needs.docker-plan.outputs.push == 'true' }} steps: - # Login to DockerHub first, to avoid rate-limiting - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - name: Download digests - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - path: /tmp/digests - pattern: digests-* - merge-multiple: true - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 - env: - DOCKER_METADATA_ANNOTATIONS_LEVELS: index - with: - images: ${{ env.UV_BASE_IMG }} - # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version - tags: | - type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} - type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} + username: astral + password: ${{ secrets.DOCKERHUB_TOKEN_RW }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -350,22 +352,37 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - - name: Create manifest list and push - working-directory: /tmp/digests + # Depot doesn't support annotating images, so we need to do so manually + # afterwards. Mutating the manifest is desirable regardless, because we + # want to bump the base image to appear at the top of the list on GHCR. + # However, once annotation support is added to Depot, this step can be + # minimized to just touch the GHCR manifest. + - name: Add annotations to images + env: + IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}" + DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }} + TAGS: ${{ needs.docker-publish-base.outputs.image-tags }} + ANNOTATIONS: ${{ needs.docker-publish-base.outputs.image-annotations }} # The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces) - # The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array - # The printf will expand the base image with the `@sha256: ...` for each sha256 in the directory - # The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... @sha256: @sha256: ...` + # The final command becomes `docker buildx imagetools create --annotation 'index:foo=1' --annotation 'index:bar=2' ... -t tag1 -t tag2 ... @sha256:` run: | - readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done - docker buildx imagetools create \ - "${annotations[@]}" \ - $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *) + set -x + readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done + for image in $IMAGES; do + readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done + docker buildx imagetools create \ + "${annotations[@]}" \ + "${tags[@]}" \ + "${image}@${DIGEST}" + done - - name: Share manifest digest + # Now that we've modified the manifest, we need to attest it again. + # Note we only generate an attestation for GHCR. + - name: Export manifest digest id: manifest-digest + env: + IMAGE: ${{ env.UV_GHCR_IMAGE }} + VERSION: ${{ needs.docker-publish-base.outputs.image-version }} # To sign the manifest, we need it's digest. Unfortunately "docker # buildx imagetools create" does not (yet) have a clean way of sharing # the digest of the manifest it creates (see docker/buildx#2407), so @@ -377,15 +394,14 @@ jobs: run: | digest="$( docker buildx imagetools inspect \ - "${UV_BASE_IMG}:${DOCKER_METADATA_OUTPUT_VERSION}" \ + "${IMAGE}:${VERSION}" \ --format '{{json .Manifest}}' \ | jq -r '.digest' )" echo "digest=${digest}" >> "$GITHUB_OUTPUT" - name: Generate artifact attestation - uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 with: - subject-name: ${{ env.UV_BASE_IMG }} + subject-name: ${{ env.UV_GHCR_IMAGE }} subject-digest: ${{ steps.manifest-digest.outputs.digest }} - # push-to-registry is explicitly not enabled to maintain full control over the top image diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa9b97675..ba7a4b4d1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,8 +14,9 @@ env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 CARGO_TERM_COLOR: always - RUSTUP_MAX_RETRIES: 10 PYTHON_VERSION: "3.12" + RUSTUP_MAX_RETRIES: 10 + RUST_BACKTRACE: 1 jobs: determine_changes: @@ -39,7 +40,7 @@ jobs: while IFS= read -r file; do # Generated markdown and JSON files are checked during test runs. - if [[ "${file}" =~ ^docs/ && ! "${file}" =~ ^docs/reference/(cli|settings).md && ! "${file}" =~ ^docs/configuration/environment.md ]]; then + if [[ "${file}" =~ ^docs/ && ! "${file}" =~ ^docs/reference/(cli|settings).md && ! "${file}" =~ ^docs/reference/environment.md ]]; then echo "Skipping ${file} (matches docs/ pattern)" continue fi @@ -81,7 +82,7 @@ jobs: run: rustup component add rustfmt - name: "Install uv" - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "rustfmt" run: cargo fmt --all --check @@ -125,11 +126,11 @@ jobs: name: "cargo clippy | ubuntu" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Check uv_build dependencies" - uses: EmbarkStudios/cargo-deny-action@34899fc7ba81ca6268d5947a7a16b4649013fea1 # v2.0.11 + uses: EmbarkStudios/cargo-deny-action@30f817c6f72275c6d54dc744fbca09ebc958599f # v2.0.12 with: command: check bans manifest-path: crates/uv-build/Cargo.toml @@ -155,7 +156,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -174,7 +175,7 @@ jobs: name: "cargo dev generate-all" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Generate all" @@ -187,7 +188,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: "Install cargo shear" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-shear - run: cargo shear @@ -207,21 +208,24 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest - name: "Cargo test" + env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 run: | cargo nextest run \ --features python-patch \ @@ -229,9 +233,10 @@ jobs: --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow cargo-test-macos: - timeout-minutes: 10 + timeout-minutes: 15 needs: determine_changes - if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + # Only run macOS tests on main without opt-in + if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }} runs-on: macos-latest-xlarge # github-macos-14-aarch64-6 name: "cargo test | macos" steps: @@ -239,21 +244,24 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest - name: "Cargo test" + env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 run: | cargo nextest run \ --no-default-features \ @@ -265,7 +273,7 @@ jobs: timeout-minutes: 15 needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} - runs-on: github-windows-2025-x86_64-16 + runs-on: depot-windows-2022-16 name: "cargo test | windows" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -278,11 +286,11 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -291,13 +299,15 @@ jobs: run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest - name: "Cargo test" working-directory: ${{ env.UV_WORKSPACE }} env: + # Retry more than default to reduce flakes in CI + UV_HTTP_RETRIES: 5 # Avoid permission errors during concurrent tests # See https://github.com/astral-sh/uv/issues/6940 UV_LINK_MODE: copy @@ -331,7 +341,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline @@ -342,7 +352,7 @@ jobs: rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc - name: "Install cargo-bloat" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-bloat @@ -387,7 +397,7 @@ jobs: - name: Copy Git Repo to Dev Drive run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline - name: "Install Rust toolchain" @@ -429,7 +439,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} @@ -442,7 +452,7 @@ jobs: - name: "Build docs (insiders)" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} - run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.insiders.yml + run: uvx --with-requirements docs/requirements-insiders.txt mkdocs build --strict -f mkdocs.insiders.yml build-binary-linux-libc: timeout-minutes: 10 @@ -455,7 +465,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build @@ -469,6 +479,31 @@ jobs: ./target/debug/uvx retention-days: 1 + build-binary-linux-aarch64: + timeout-minutes: 10 + needs: determine_changes + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + runs-on: github-ubuntu-24.04-aarch64-4 + name: "build binary | linux aarch64" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: rui314/setup-mold@v1 + + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 + + - name: "Build" + run: cargo build + + - name: "Upload binary" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: uv-linux-aarch64-${{ github.sha }} + path: | + ./target/debug/uv + ./target/debug/uvx + retention-days: 1 + build-binary-linux-musl: timeout-minutes: 10 needs: determine_changes @@ -485,7 +520,7 @@ jobs: sudo apt-get install musl-tools rustup target add x86_64-unknown-linux-musl - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx @@ -510,7 +545,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --bin uv --bin uvx @@ -534,7 +569,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --bin uv --bin uvx @@ -564,7 +599,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -599,7 +634,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -636,7 +671,7 @@ jobs: run: rustup default ${{ steps.msrv.outputs.value }} - name: "Install mold" uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - run: cargo +${{ steps.msrv.outputs.value }} build - run: ./target/debug/uv --version @@ -649,7 +684,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Cross build" run: | # Install cross from `freebsd-firecracker` @@ -660,7 +695,7 @@ jobs: cross build --target x86_64-unknown-freebsd - name: Test in Firecracker VM - uses: acj/freebsd-firecracker-action@4d93174d9eea32cd2b2650f964af69f8c72eaff2 # v0.3.0 + uses: acj/freebsd-firecracker-action@136ca0bce2adade21e526ceb07db643ad23dd2dd # v0.5.1 with: verbose: false checkout: false @@ -769,6 +804,33 @@ jobs: eval "$(./uv generate-shell-completion bash)" eval "$(./uvx --generate-shell-completion bash)" + smoke-test-linux-aarch64: + timeout-minutes: 10 + needs: build-binary-linux-aarch64 + name: "smoke test | linux aarch64" + runs-on: github-ubuntu-24.04-aarch64-2 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-linux-aarch64-${{ github.sha }} + + - name: "Prepare binary" + run: | + chmod +x ./uv + chmod +x ./uvx + + - name: "Smoke test" + run: | + ./uv run scripts/smoke-test + + - name: "Test shell completions" + run: | + eval "$(./uv generate-shell-completion bash)" + eval "$(./uvx --generate-shell-completion bash)" + smoke-test-linux-musl: timeout-minutes: 10 needs: build-binary-linux-musl @@ -851,7 +913,7 @@ jobs: timeout-minutes: 10 needs: build-binary-windows-aarch64 name: "smoke test | windows aarch64" - runs-on: github-windows-11-aarch64-4 + runs-on: windows-11-arm steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -882,7 +944,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + - uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0 with: miniconda-version: latest activate-environment: uv @@ -907,7 +969,7 @@ jobs: ./uv pip install anyio integration-test-deadsnakes-39-linux: - timeout-minutes: 5 + timeout-minutes: 15 needs: build-binary-linux-libc name: "integration test | deadsnakes python3.9 on ubuntu" runs-on: ubuntu-latest @@ -954,59 +1016,6 @@ jobs: run: | ./uv pip install -v anyio - integration-test-free-threaded-linux: - timeout-minutes: 5 - needs: build-binary-linux-libc - name: "integration test | free-threaded on linux" - runs-on: ubuntu-latest - steps: - - name: "Install python3.13-nogil" - run: | - sudo add-apt-repository ppa:deadsnakes - sudo apt-get update - sudo apt-get install python3.13-nogil - - - name: "Download binary" - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - name: uv-linux-libc-${{ github.sha }} - - - name: "Prepare binary" - run: chmod +x ./uv - - - name: "Create a virtual environment" - run: | - ./uv venv -p 3.13t --python-preference only-system - - - name: "Check version" - run: | - .venv/bin/python --version - - - name: "Check is free-threaded" - run: | - .venv/bin/python -c "import sys; exit(1) if sys._is_gil_enabled() else exit(0)" - - - name: "Check install" - run: | - ./uv pip install -v anyio - - - name: "Install free-threaded Python via uv" - run: | - ./uv python install -v 3.13t - ./uv venv -p 3.13t --managed-python - - - name: "Check version" - run: | - .venv/bin/python --version - - - name: "Check is free-threaded" - run: | - .venv/bin/python -c "import sys; exit(1) if sys._is_gil_enabled() else exit(0)" - - - name: "Check install" - run: | - ./uv pip install -v anyio - integration-test-free-threaded-windows-x86_64: timeout-minutes: 10 needs: build-binary-windows-x86_64 @@ -1052,6 +1061,96 @@ jobs: ./uv run python -c "" ./uv run -p 3.13t python -c "" + integration-test-windows-aarch64-implicit: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "integration test | aarch64 windows implicit" + runs-on: windows-11-arm + + steps: + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Install Python via uv (implicitly select x64)" + run: | + ./uv python install -v 3.13 + + - name: "Create a virtual environment (stdlib)" + run: | + & (./uv python find 3.13) -m venv .venv + + - name: "Check version (stdlib)" + run: | + .venv/Scripts/python --version + + - name: "Create a virtual environment (uv)" + run: | + ./uv venv -p 3.13 --managed-python + + - name: "Check version (uv)" + run: | + .venv/Scripts/python --version + + - name: "Check is x64" + run: | + .venv/Scripts/python -c "import sys; exit(1) if 'AMD64' not in sys.version else exit(0)" + + - name: "Check install" + run: | + ./uv pip install -v anyio + + - name: "Check uv run" + run: | + ./uv run python -c "" + ./uv run -p 3.13 python -c "" + + integration-test-windows-aarch64-explicit: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "integration test | aarch64 windows explicit" + runs-on: windows-11-arm + + steps: + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Install Python via uv (explicitly select aarch64)" + run: | + ./uv python install -v cpython-3.13-windows-aarch64-none + + - name: "Create a virtual environment (stdlib)" + run: | + & (./uv python find 3.13) -m venv .venv + + - name: "Check version (stdlib)" + run: | + .venv/Scripts/python --version + + - name: "Create a virtual environment (uv)" + run: | + ./uv venv -p 3.13 --managed-python + + - name: "Check version (uv)" + run: | + .venv/Scripts/python --version + + - name: "Check is NOT x64" + run: | + .venv/Scripts/python -c "import sys; exit(1) if 'AMD64' in sys.version else exit(0)" + + - name: "Check install" + run: | + ./uv pip install -v anyio + + - name: "Check uv run" + run: | + ./uv run python -c "" + ./uv run -p 3.13 python -c "" + integration-test-pypy-linux: timeout-minutes: 10 needs: build-binary-linux-libc @@ -1295,6 +1394,45 @@ jobs: run: | .\uv.exe pip install anyio + integration-test-pyodide-linux: + timeout-minutes: 10 + needs: build-binary-linux-libc + name: "integration test | pyodide on ubuntu" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-linux-libc-${{ github.sha }} + + - name: "Prepare binary" + run: chmod +x ./uv + + - name: "Create a native virtual environment" + run: | + ./uv venv venv-native -p 3.12 + # We use features added in 0.30.3 but there is no known breakage in + # newer versions. + ./uv pip install -p venv-native/bin/python pyodide-build==0.30.3 pip + + - name: "Install pyodide interpreter" + run: | + source ./venv-native/bin/activate + pyodide xbuildenv install 0.27.5 + PYODIDE_PYTHON=$(pyodide config get interpreter) + PYODIDE_INDEX=$(pyodide config get package_index) + echo "PYODIDE_PYTHON=$PYODIDE_PYTHON" >> $GITHUB_ENV + echo "PYODIDE_INDEX=$PYODIDE_INDEX" >> $GITHUB_ENV + + - name: "Create pyodide virtual environment" + run: | + ./uv venv -p $PYODIDE_PYTHON venv-pyodide + source ./venv-pyodide/bin/activate + ./uv pip install --extra-index-url=$PYODIDE_INDEX --no-build numpy + python -c 'import numpy' + integration-test-github-actions: timeout-minutes: 10 needs: build-binary-linux-libc @@ -1384,6 +1522,14 @@ jobs: run: | ./uv pip install anyio --system --python 3.13t + - name: "Create a virtual environment" + run: | + ./uv venv -p 3.13t --python-preference only-system + + - name: "Check is free-threaded" + run: | + .venv/bin/python -c "import sys; exit(1) if sys._is_gil_enabled() else exit(0)" + integration-test-publish-changed: timeout-minutes: 10 needs: build-binary-linux-libc @@ -1421,8 +1567,92 @@ jobs: done <<< "${CHANGED_FILES}" echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}" - integration-test-publish: + integration-test-registries: timeout-minutes: 10 + needs: build-binary-linux-libc + name: "integration test | registries" + runs-on: ubuntu-latest + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event.pull_request.head.repo.fork != true }} + environment: uv-test-registries + env: + PYTHON_VERSION: 3.12 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-linux-libc-${{ github.sha }} + + - name: "Prepare binary" + run: chmod +x ./uv + + - name: "Configure AWS credentials" + uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: "Get AWS CodeArtifact token" + run: | + UV_TEST_AWS_TOKEN=$(aws codeartifact get-authorization-token \ + --domain tests \ + --domain-owner ${{ secrets.AWS_ACCOUNT_ID }} \ + --region us-east-1 \ + --query authorizationToken \ + --output text) + echo "::add-mask::$UV_TEST_AWS_TOKEN" + echo "UV_TEST_AWS_TOKEN=$UV_TEST_AWS_TOKEN" >> $GITHUB_ENV + + - name: "Authenticate with GCP" + id: "auth" + uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up GCP SDK" + uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b" + + - name: "Get GCP Artifact Registry token" + id: get_token + run: | + UV_TEST_GCP_TOKEN=$(gcloud auth print-access-token) + echo "::add-mask::$UV_TEST_GCP_TOKEN" + echo "UV_TEST_GCP_TOKEN=$UV_TEST_GCP_TOKEN" >> $GITHUB_ENV + + - name: "Run registry tests" + run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all + env: + RUST_LOG: uv=debug + UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }} + UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }} + UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }} + UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }} + UV_TEST_AWS_USERNAME: aws + UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }} + UV_TEST_AZURE_URL: ${{ secrets.UV_TEST_AZURE_URL }} + UV_TEST_AZURE_USERNAME: dummy + UV_TEST_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_CLOUDSMITH_TOKEN }} + UV_TEST_CLOUDSMITH_URL: ${{ secrets.UV_TEST_CLOUDSMITH_URL }} + UV_TEST_CLOUDSMITH_USERNAME: ${{ secrets.UV_TEST_CLOUDSMITH_USERNAME }} + UV_TEST_GCP_URL: ${{ secrets.UV_TEST_GCP_URL }} + UV_TEST_GCP_USERNAME: oauth2accesstoken + UV_TEST_GEMFURY_TOKEN: ${{ secrets.UV_TEST_GEMFURY_TOKEN }} + UV_TEST_GEMFURY_URL: ${{ secrets.UV_TEST_GEMFURY_URL }} + UV_TEST_GEMFURY_USERNAME: ${{ secrets.UV_TEST_GEMFURY_USERNAME }} + UV_TEST_GITLAB_TOKEN: ${{ secrets.UV_TEST_GITLAB_TOKEN }} + UV_TEST_GITLAB_URL: ${{ secrets.UV_TEST_GITLAB_URL }} + UV_TEST_GITLAB_USERNAME: token + + integration-test-publish: + timeout-minutes: 20 needs: integration-test-publish-changed name: "integration test | uv publish" runs-on: ubuntu-latest @@ -1470,6 +1700,7 @@ jobs: # With this GitHub action, we can't do as rigid checks as with our custom Python script, so we publish more # leniently skip-existing: "true" + verbose: "true" repository-url: "https://test.pypi.org/legacy/" packages-dir: "astral-test-pypa-gh-action/dist" @@ -1667,38 +1898,39 @@ jobs: - name: "Validate global Python install" run: python scripts/check_system_python.py --uv ./uv - system-test-opensuse: - timeout-minutes: 5 - needs: build-binary-linux-libc - name: "check system | python on opensuse" - runs-on: ubuntu-latest - container: opensuse/tumbleweed - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + # Currently failing, see https://github.com/astral-sh/uv/issues/13811 + # system-test-opensuse: + # timeout-minutes: 5 + # needs: build-binary-linux-libc + # name: "check system | python on opensuse" + # runs-on: ubuntu-latest + # container: opensuse/tumbleweed + # steps: + # - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - name: "Install Python" - run: > - until - zypper install -y python310 which && python3.10 -m ensurepip && mv /usr/bin/python3.10 /usr/bin/python3; - do sleep 10; - done + # - name: "Install Python" + # run: > + # until + # zypper install -y python310 which && python3.10 -m ensurepip && mv /usr/bin/python3.10 /usr/bin/python3; + # do sleep 10; + # done - # We retry because `zypper` can fail during remote repository updates - # The above will not sleep forever due to the job level timeout + # # We retry because `zypper` can fail during remote repository updates + # # The above will not sleep forever due to the job level timeout - - name: "Download binary" - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - name: uv-linux-libc-${{ github.sha }} + # - name: "Download binary" + # uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + # with: + # name: uv-linux-libc-${{ github.sha }} - - name: "Prepare binary" - run: chmod +x ./uv + # - name: "Prepare binary" + # run: chmod +x ./uv - - name: "Print Python path" - run: echo $(which python3) + # - name: "Print Python path" + # run: echo $(which python3) - - name: "Validate global Python install" - run: python3 scripts/check_system_python.py --uv ./uv + # - name: "Validate global Python install" + # run: python3 scripts/check_system_python.py --uv ./uv # Note: rockylinux is a 1-1 code compatible distro to rhel # rockylinux mimics centos but with added maintenance stability @@ -1753,7 +1985,7 @@ jobs: python-version: "graalpy24.1" - name: "Download binary" - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: uv-linux-libc-${{ github.sha }} @@ -1991,7 +2223,7 @@ jobs: timeout-minutes: 10 needs: build-binary-windows-aarch64 name: "check system | x86-64 python3.13 on windows aarch64" - runs-on: github-windows-11-aarch64-4 + runs-on: windows-11-arm steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -2009,6 +2241,28 @@ jobs: - name: "Validate global Python install" run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe + system-test-windows-aarch64-aarch64-python-313: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "check system | aarch64 python3.13 on windows aarch64" + runs-on: windows-11-arm + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.13" + architecture: "arm64" + allow-prereleases: true + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Validate global Python install" + run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe + # Test our PEP 514 integration that installs Python into the Windows registry. system-test-windows-registry: timeout-minutes: 10 @@ -2154,7 +2408,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + - uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0 with: miniconda-version: "latest" activate-environment: uv @@ -2246,8 +2500,9 @@ jobs: - name: "Validate embedded Python install" run: python ./scripts/check_embedded_python.py --uv ./uv.exe - benchmarks: - runs-on: ubuntu-latest + benchmarks-walltime: + name: "benchmarks | walltime aarch64 linux" + runs-on: codspeed-macro needs: determine_changes if: ${{ github.repository == 'astral-sh/uv' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} timeout-minutes: 20 @@ -2255,13 +2510,50 @@ jobs: - name: "Checkout Branch" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@ab3728c7ba6948b9b429627f4d55a68842b27f18 # v2.50.3 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 + with: + tool: cargo-codspeed + + - name: "Install requirements and prime cache" + run: | + sudo apt-get update + sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev + cargo run --bin uv -- venv --cache-dir .cache + cargo run --bin uv -- pip compile scripts/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache + cargo run --bin uv -- pip compile scripts/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache + + - name: "Build benchmarks" + run: cargo codspeed build --profile profiling --features codspeed -p uv-bench + + - name: "Run benchmarks" + uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0 + with: + run: cargo codspeed run + token: ${{ secrets.CODSPEED_TOKEN }} + + benchmarks-instrumented: + name: "benchmarks | instrumented" + runs-on: ubuntu-latest + needs: determine_changes + if: ${{ github.repository == 'astral-sh/uv' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + timeout-minutes: 20 + steps: + - name: "Checkout Branch" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 + + - name: "Install Rust toolchain" + run: rustup show + + - name: "Install codspeed" + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-codspeed diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index ce4c19fd9..e4435ff17 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -22,12 +22,14 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv-* path: wheels_uv merge-multiple: true + - name: Remove wheels unsupported by PyPI + run: rm wheels_uv/*riscv* - name: Publish to PyPI run: uv publish -v wheels_uv/* @@ -41,11 +43,13 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv_build-* path: wheels_uv_build merge-multiple: true + - name: Remove wheels unsupported by PyPI + run: rm wheels_uv_build/*riscv* - name: Publish to PyPI run: uv publish -v wheels_uv_build/* diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b1c77c316..2688c3fc8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,7 +69,7 @@ jobs: # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh" - name: Cache dist uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47 with: diff --git a/.github/workflows/setup-dev-drive.ps1 b/.github/workflows/setup-dev-drive.ps1 index e0e2a765b..474b082dc 100644 --- a/.github/workflows/setup-dev-drive.ps1 +++ b/.github/workflows/setup-dev-drive.ps1 @@ -1,13 +1,43 @@ # Configures a drive for testing in CI. +# +# When using standard GitHub Actions runners, a `D:` drive is present and has +# similar or better performance characteristics than a ReFS dev drive. Sometimes +# using a larger runner is still more performant (e.g., when running the test +# suite) and we need to create a dev drive. This script automatically configures +# the appropriate drive. +# +# When using GitHub Actions' "larger runners", the `D:` drive is not present and +# we create a DevDrive mount on `C:`. This is purported to be more performant +# than an ReFS drive, though we did not see a change when we switched over. +# +# When using Depot runners, the underling infrastructure is EC2, which does not +# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can +# create a ReFS drive using `diskpart` and `format` directory. We cannot use a +# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already, +# so we must check if it's a Depot runner first, and we use `V:` as the target +# instead. -# When not using a GitHub Actions "larger runner", the `D:` drive is present and -# has similar or better performance characteristics than a ReFS dev drive. -# Sometimes using a larger runner is still more performant (e.g., when running -# the test suite) and we need to create a dev drive. This script automatically -# configures the appropriate drive. -# Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned. -if (Test-Path "D:\") { +if ($env:DEPOT_RUNNER -eq "1") { + Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..." + + # Create VHD and configure drive using diskpart + $vhdPath = "C:\uv_dev_drive.vhdx" + @" +create vdisk file="$vhdPath" maximum=20480 type=expandable +attach vdisk +create partition primary +active +assign letter=V +"@ | diskpart + + # Format the drive as ReFS + format V: /fs:ReFS /q /y + $Drive = "V:" + + Write-Output "Custom dev drive created at $Drive" +} elseif (Test-Path "D:\") { + # Note `Get-PSDrive` is not sufficient because the drive letter is assigned. Write-Output "Using existing drive at D:" $Drive = "D:" } else { @@ -55,10 +85,8 @@ Write-Output ` "DEV_DRIVE=$($Drive)" ` "TMP=$($Tmp)" ` "TEMP=$($Tmp)" ` - "UV_INTERNAL__TEST_DIR=$($Tmp)" ` "RUSTUP_HOME=$($Drive)/.rustup" ` "CARGO_HOME=$($Drive)/.cargo" ` "UV_WORKSPACE=$($Drive)/uv" ` "PATH=$($Drive)/.cargo/bin;$env:PATH" ` >> $env:GITHUB_ENV - diff --git a/.github/workflows/sync-python-releases.yml b/.github/workflows/sync-python-releases.yml index 8029c9305..166458507 100644 --- a/.github/workflows/sync-python-releases.yml +++ b/.github/workflows/sync-python-releases.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 with: version: "latest" enable-cache: true @@ -28,12 +28,20 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Sync Sysconfig Targets + run: ${{ github.workspace }}/crates/uv-dev/sync_sysconfig_targets.sh + working-directory: ./crates/uv-dev + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: "Create Pull Request" uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 with: commit-message: "Sync latest Python releases" add-paths: | crates/uv-python/download-metadata.json + crates/uv-dev/src/generate_sysconfig_mappings.rs + crates/uv-python/src/sysconfig/generated_mappings.rs branch: "sync-python-releases" title: "Sync latest Python releases" body: "Automated update for Python releases." diff --git a/.gitignore b/.gitignore index 97dfd0c29..8ccf60790 100644 --- a/.gitignore +++ b/.gitignore @@ -3,9 +3,10 @@ # Generated by Cargo # will have compiled files and executables +/vendor/ debug/ -target/ target-alpine/ +target/ # Bootstrapped Python versions /bin/ @@ -31,6 +32,7 @@ flamegraph.svg perf.data perf.data.old profile.json +profile.json.gz # MkDocs /site diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 42d5b30bb..1c8965c0f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: - id: validate-pyproject - repo: https://github.com/crate-ci/typos - rev: v1.32.0 + rev: v1.34.0 hooks: - id: typos @@ -42,7 +42,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.10 + rev: v0.12.2 hooks: - id: ruff-format - id: ruff diff --git a/.prettierignore b/.prettierignore index dc54f51ff..1f89dc11b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,6 +3,6 @@ CHANGELOG.md PREVIEW-CHANGELOG.md docs/reference/cli.md docs/reference/settings.md -docs/configuration/environment.md +docs/reference/environment.md ecosystem/home-assistant-core/LICENSE.md docs/guides/integration/gitlab.md diff --git a/.python-versions b/.python-versions index 1f8f24173..957687cb4 100644 --- a/.python-versions +++ b/.python-versions @@ -5,7 +5,8 @@ 3.9.21 3.8.20 # The following are required for packse scenarios -3.8.18 -3.8.12 +3.9.20 +3.9.18 +3.9.12 # The following is needed for `==3.13` request tests 3.13.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ac1c71eb..9fa93fb03 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,420 @@ +## 0.7.20 + +### Python + +- Add Python 3.14.0b4 +- Add zstd support to Python 3.14 on Unix (it already was available on Windows) +- Add PyPy 7.3.20 (for Python 3.11.13) + +See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708) release notes for more details. + +### Enhancements + +- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496)) +- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386)) +- Drop trailing arguments when writing shebangs ([#14519](https://github.com/astral-sh/uv/pull/14519)) +- Add debug message when skipping Python downloads ([#14509](https://github.com/astral-sh/uv/pull/14509)) +- Add support for declaring multiple modules in namespace packages ([#14460](https://github.com/astral-sh/uv/pull/14460)) + +### Bug fixes + +- Revert normalization of trailing slashes on index URLs ([#14511](https://github.com/astral-sh/uv/pull/14511)) +- Fix forced resolution with all extras in `uv version` ([#14434](https://github.com/astral-sh/uv/pull/14434)) +- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498)) +- Remove transparent variants in `uv-extract` to enable retries ([#14450](https://github.com/astral-sh/uv/pull/14450)) + +### Rust API + +- Add method to get packages involved in a `NoSolutionError` ([#14457](https://github.com/astral-sh/uv/pull/14457)) +- Make `ErrorTree` for `NoSolutionError` public ([#14444](https://github.com/astral-sh/uv/pull/14444)) + +### Documentation + +- Finish incomplete sentence in pip migration guide ([#14432](https://github.com/astral-sh/uv/pull/14432)) +- Remove `cache-dependency-glob` examples for `setup-uv` ([#14493](https://github.com/astral-sh/uv/pull/14493)) +- Remove `uv pip sync` suggestion with `pyproject.toml` ([#14510](https://github.com/astral-sh/uv/pull/14510)) +- Update documentation for GitHub to use `setup-uv@v6` ([#14490](https://github.com/astral-sh/uv/pull/14490)) + +## 0.7.19 + +The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and considered ready for production use. + +The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with the goal of requiring zero configuration for most users, but provides flexible configuration to accommodate most Python project structures. It integrates tightly with uv, to improve messaging and user experience. It validates project metadata and structures, preventing common mistakes. And, finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with other build backends. + +To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section in your `pyproject.toml`: + +```toml +[build-system] +requires = ["uv_build>=0.7.19,<0.8.0"] +build-backend = "uv_build" +``` + +In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will remain compatible with all standards-compliant build backends. + +### Python + +- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance + +See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702) for more details. + +### Enhancements + +- Ignore Python patch version for `--universal` pip compile ([#14405](https://github.com/astral-sh/uv/pull/14405)) +- Update the tilde version specifier warning to include more context ([#14335](https://github.com/astral-sh/uv/pull/14335)) +- Clarify behavior and hint on tool install when no executables are available ([#14423](https://github.com/astral-sh/uv/pull/14423)) + +### Bug fixes + +- Make project and interpreter lock acquisition non-fatal ([#14404](https://github.com/astral-sh/uv/pull/14404)) +- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects ([#14403](https://github.com/astral-sh/uv/pull/14403)) + +### Documentation + +- Add a migration guide from pip to uv projects ([#12382](https://github.com/astral-sh/uv/pull/12382)) + +## 0.7.18 + +### Python + +- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 + + These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows. +However, they can be requested with `cpython--windows-aarch64`. + +See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630) for more details. + +### Enhancements + +- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378)) +- Reuse build (virtual) environments across resolution and installation ([#14338](https://github.com/astral-sh/uv/pull/14338)) +- Improve trace message for cached Python interpreter query ([#14328](https://github.com/astral-sh/uv/pull/14328)) +- Use parsed URLs for conflicting URL error message ([#14380](https://github.com/astral-sh/uv/pull/14380)) + +### Preview features + +- Ignore invalid build backend settings when not building ([#14372](https://github.com/astral-sh/uv/pull/14372)) + +### Bug fixes + +- Fix equals-star and tilde-equals with `python_version` and `python_full_version` ([#14271](https://github.com/astral-sh/uv/pull/14271)) +- Include the canonical path in the interpreter query cache key ([#14331](https://github.com/astral-sh/uv/pull/14331)) +- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304)) +- Error instead of panic on conflict between global and subcommand flags ([#14368](https://github.com/astral-sh/uv/pull/14368)) +- Consistently normalize trailing slashes on URLs with no path segments ([#14349](https://github.com/astral-sh/uv/pull/14349)) + +### Documentation + +- Add instructions for publishing to JFrog's Artifactory ([#14253](https://github.com/astral-sh/uv/pull/14253)) +- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376)) + +## 0.7.17 + +### Bug fixes + +- Apply build constraints when resolving `--with` dependencies ([#14340](https://github.com/astral-sh/uv/pull/14340)) +- Drop trailing slashes when converting index URL from URL ([#14346](https://github.com/astral-sh/uv/pull/14346)) +- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336)) +- Fix error message ordering for `pyvenv.cfg` version conflict ([#14329](https://github.com/astral-sh/uv/pull/14329)) + +## 0.7.16 + +### Python + +- Add Python 3.14.0b3 + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626) +for more details. + +### Enhancements + +- Include path or URL when failing to convert in lockfile ([#14292](https://github.com/astral-sh/uv/pull/14292)) +- Warn when `~=` is used as a Python version specifier without a patch version ([#14008](https://github.com/astral-sh/uv/pull/14008)) + +### Preview features + +- Ensure preview default Python installs are upgradeable ([#14261](https://github.com/astral-sh/uv/pull/14261)) + +### Performance + +- Share workspace cache between lock and sync operations ([#14321](https://github.com/astral-sh/uv/pull/14321)) + +### Bug fixes + +- Allow local indexes to reference remote files ([#14294](https://github.com/astral-sh/uv/pull/14294)) +- Avoid rendering desugared prefix matches in error messages ([#14195](https://github.com/astral-sh/uv/pull/14195)) +- Avoid using path URL for workspace Git dependencies in `requirements.txt` ([#14288](https://github.com/astral-sh/uv/pull/14288)) +- Normalize index URLs to remove trailing slash ([#14245](https://github.com/astral-sh/uv/pull/14245)) +- Respect URL-encoded credentials in redirect location ([#14315](https://github.com/astral-sh/uv/pull/14315)) +- Lock the source tree when running setuptools, to protect concurrent builds ([#14174](https://github.com/astral-sh/uv/pull/14174)) + +### Documentation + +- Note that GCP Artifact Registry download URLs must have `/simple` component ([#14251](https://github.com/astral-sh/uv/pull/14251)) + +## 0.7.15 + +### Enhancements + +- Consistently use `Ordering::Relaxed` for standalone atomic use cases ([#14190](https://github.com/astral-sh/uv/pull/14190)) +- Warn on ambiguous relative paths for `--index` ([#14152](https://github.com/astral-sh/uv/pull/14152)) +- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033)) +- Preserve newlines in `schema.json` descriptions ([#13693](https://github.com/astral-sh/uv/pull/13693)) + +### Bug fixes + +- Add check for using minor version link when creating a venv on Windows ([#14252](https://github.com/astral-sh/uv/pull/14252)) +- Strip query parameters when parsing source URL ([#14224](https://github.com/astral-sh/uv/pull/14224)) + +### Documentation + +- Add a link to PyPI FAQ to clarify what per-project token is ([#14242](https://github.com/astral-sh/uv/pull/14242)) + +### Preview features + +- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212)) + +## 0.7.14 + +### Enhancements + +- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172)) +- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120)) +- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119)) +- Add `[tool.uv.dependency-groups].mygroup.requires-python` ([#13735](https://github.com/astral-sh/uv/pull/13735)) +- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176)) +- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897)) +- Support transparent Python patch version upgrades ([#13954](https://github.com/astral-sh/uv/pull/13954)) +- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940)) +- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088)) + +### Performance + +- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035)) + +### Bug fixes + +- Don't use walrus operator in interpreter query script ([#14108](https://github.com/astral-sh/uv/pull/14108)) +- Fix handling of changes to `requires-python` ([#14076](https://github.com/astral-sh/uv/pull/14076)) +- Fix implied `platform_machine` marker for `win_amd64` platform tag ([#14041](https://github.com/astral-sh/uv/pull/14041)) +- Only update existing symlink directories on preview uninstall ([#14179](https://github.com/astral-sh/uv/pull/14179)) +- Serialize Python requests for tools as canonicalized strings ([#14109](https://github.com/astral-sh/uv/pull/14109)) +- Support netrc and same-origin credential propagation on index redirects ([#14126](https://github.com/astral-sh/uv/pull/14126)) +- Support reading `dependency-groups` from pyproject.tomls with no `[project]` ([#13742](https://github.com/astral-sh/uv/pull/13742)) +- Handle an existing shebang in `uv init --script` ([#14141](https://github.com/astral-sh/uv/pull/14141)) +- Prevent concurrent updates of the environment in `uv run` ([#14153](https://github.com/astral-sh/uv/pull/14153)) +- Filter managed Python distributions by platform before querying when included in request ([#13936](https://github.com/astral-sh/uv/pull/13936)) + +### Documentation + +- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168)) +- Document the way member sources shadow workspace sources ([#14136](https://github.com/astral-sh/uv/pull/14136)) +- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website ([#14100](https://github.com/astral-sh/uv/pull/14100)) + +## 0.7.13 + +### Python + +- Add Python 3.14.0b2 +- Add Python 3.13.5 +- Fix stability of `uuid.getnode` on 3.13 + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612) +for more details. + +### Enhancements + +- Download versions in `uv python pin` if not found ([#13946](https://github.com/astral-sh/uv/pull/13946)) +- Use TTY detection to determine if SIGINT forwarding is enabled ([#13925](https://github.com/astral-sh/uv/pull/13925)) +- Avoid fetching an exact, cached Git commit, even if it isn't locked ([#13748](https://github.com/astral-sh/uv/pull/13748)) +- Add `zstd` and `deflate` to `Accept-Encoding` ([#13982](https://github.com/astral-sh/uv/pull/13982)) +- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688)) + +### Bug fixes + +- Check if relative URL is valid directory before treating as index ([#13917](https://github.com/astral-sh/uv/pull/13917)) +- Ignore Python discovery errors during `uv python pin` ([#13944](https://github.com/astral-sh/uv/pull/13944)) +- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997)) + +### Preview changes + +- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833)) + +### Documentation + +- Add 3.14 to the supported platform reference ([#13990](https://github.com/astral-sh/uv/pull/13990)) +- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929)) +- Add supported macOS version to the platform reference ([#13993](https://github.com/astral-sh/uv/pull/13993)) +- Update platform support reference to include Python implementation list ([#13991](https://github.com/astral-sh/uv/pull/13991)) +- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899)) +- Update the CLI help and reference to include references to the Python bin directory ([#13978](https://github.com/astral-sh/uv/pull/13978)) + +## 0.7.12 + +### Enhancements + +- Add `uv python pin --rm` to remove `.python-version` pins ([#13860](https://github.com/astral-sh/uv/pull/13860)) +- Don't hint at versions removed by `excluded-newer` ([#13884](https://github.com/astral-sh/uv/pull/13884)) +- Add hint to use `tool.uv.environments` on resolution error ([#13455](https://github.com/astral-sh/uv/pull/13455)) +- Add hint to use `tool.uv.required-environments` on resolution error ([#13575](https://github.com/astral-sh/uv/pull/13575)) +- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862)) + +### Bug fixes + +- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions ([#13869](https://github.com/astral-sh/uv/pull/13869)) +- Add `--no-editable` to `uv export` for `pylock.toml` ([#13852](https://github.com/astral-sh/uv/pull/13852)) + +### Documentation + +- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855)) +- Move the pip interface documentation into the concepts section ([#13841](https://github.com/astral-sh/uv/pull/13841)) +- Remove the configuration section in favor of concepts / reference ([#13842](https://github.com/astral-sh/uv/pull/13842)) +- Update Git and GitHub Actions docs to mention `gh auth login` ([#13850](https://github.com/astral-sh/uv/pull/13850)) + +### Preview + +- Fix directory glob traversal fallback preventing exclusion of all files ([#13882](https://github.com/astral-sh/uv/pull/13882)) + +## 0.7.11 + +### Python + +- Add Python 3.14.0b1 +- Add Python 3.13.4 +- Add Python 3.12.11 +- Add Python 3.11.13 +- Add Python 3.10.18 +- Add Python 3.9.23 + +### Enhancements + +- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731)) +- Better error message for version specifier with missing operator ([#13803](https://github.com/astral-sh/uv/pull/13803)) + +### Bug fixes + +- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6 ([#13835](https://github.com/astral-sh/uv/pull/13835)) +- Prefer `uv`'s binary's version when checking if it's up to date ([#13840](https://github.com/astral-sh/uv/pull/13840)) + +### Documentation + +- Use "terminal driver" instead of "shell" in `SIGINT` docs ([#13787](https://github.com/astral-sh/uv/pull/13787)) + +## 0.7.10 + +### Enhancements + +- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783)) +- Add dynamically generated sysconfig replacement mappings ([#13441](https://github.com/astral-sh/uv/pull/13441)) +- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797)) + +### Bug fixes + +- Avoid redaction of placeholder `git` username when using SSH authentication ([#13799](https://github.com/astral-sh/uv/pull/13799)) +- Propagate credentials to files on devpi indexes ending in `/+simple` ([#13743](https://github.com/astral-sh/uv/pull/13743)) +- Restore retention of credentials for direct URLs in `uv export` ([#13809](https://github.com/astral-sh/uv/pull/13809)) + +## 0.7.9 + +### Python + +The changes reverted in [0.7.8](#078) have been restored. + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529) +for more details. + +### Enhancements + +- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560)) +- Allow running non-default Python implementations via `uvx` ([#13583](https://github.com/astral-sh/uv/pull/13583)) +- Add `uvw` as alias for `uv` without console window on Windows ([#11786](https://github.com/astral-sh/uv/pull/11786)) +- Allow discovery of x86-64 managed Python builds on macOS ([#13722](https://github.com/astral-sh/uv/pull/13722)) +- Differentiate between implicit vs explicit architecture requests ([#13723](https://github.com/astral-sh/uv/pull/13723)) +- Implement ordering for Python architectures to prefer native installations ([#13709](https://github.com/astral-sh/uv/pull/13709)) +- Only show the first match per platform (and architecture) by default in `uv python list` ([#13721](https://github.com/astral-sh/uv/pull/13721)) +- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598)) +- Improve the error message when libc cannot be found, e.g., when using the distroless containers ([#13549](https://github.com/astral-sh/uv/pull/13549)) + +### Performance + +- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642)) +- Improve performance of `uv-python` crate's manylinux submodule ([#11131](https://github.com/astral-sh/uv/pull/11131)) +- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643)) +- Reduce number of reference-checks for `uv cache clean` ([#13669](https://github.com/astral-sh/uv/pull/13669)) + +### Bug fixes + +- Avoid reinstalling dependency group members with `--all-packages` ([#13678](https://github.com/astral-sh/uv/pull/13678)) +- Don't fail direct URL hash checking with dependency metadata ([#13736](https://github.com/astral-sh/uv/pull/13736)) +- Exit early on `self update` if global `--offline` is set ([#13663](https://github.com/astral-sh/uv/pull/13663)) +- Fix cases where the uv lock is incorrectly marked as out of date ([#13635](https://github.com/astral-sh/uv/pull/13635)) +- Include pre-release versions in `uv python install --reinstall` ([#13645](https://github.com/astral-sh/uv/pull/13645)) +- Set `LC_ALL=C` for git when checking git worktree ([#13637](https://github.com/astral-sh/uv/pull/13637)) +- Avoid rejecting Windows paths for remote Python download JSON targets ([#13625](https://github.com/astral-sh/uv/pull/13625)) + +### Preview + +- Add `uv add --bounds` to configure version constraints ([#12946](https://github.com/astral-sh/uv/pull/12946)) + +### Documentation + +- Add documentation about Python versions to Tools concept page ([#7673](https://github.com/astral-sh/uv/pull/7673)) +- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692)) +- Fix `exclude-newer` date format for persistent configuration files ([#13706](https://github.com/astral-sh/uv/pull/13706)) +- Quote versions variables in GitLab documentation ([#13679](https://github.com/astral-sh/uv/pull/13679)) +- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690)) +- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml` ([#10243](https://github.com/astral-sh/uv/pull/10243)) +- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691)) +- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336)) + +## 0.7.8 + +### Python + +We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to +a miscompilation that makes the Python interpreter behave incorrectly, resulting +in spurious type-errors involving str. This issue seems to be isolated to +x86_64 Linux, and affected at least Python 3.12, 3.13, and 3.14. + +The following changes that were introduced in those versions of uv are temporarily +being reverted while we test and deploy a proper fix for the miscompilation: + +- Add Python 3.14 on musl +- free-threaded Python on musl +- Add Python 3.14.0a7 +- Statically link `libpython` into the interpreter on Linux for a significant performance boost + +See [the issue for details](https://github.com/astral-sh/uv/issues/13610). + +### Documentation + +- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611)) + +## 0.7.7 + +### Python + +- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking libpython +- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64 aka Apple Silicon + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521) +for more details. + +### Bug fixes + +- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317)) +- Fix references to `ldd` in diagnostics to correctly refer to `ld.so` ([#13552](https://github.com/astral-sh/uv/pull/13552)) + +### Documentation + +- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534)) + ## 0.7.6 ### Python diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8565a7ec0..f7be958a4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -74,7 +74,7 @@ system just resolving requirements. To prevent this, there's a Docker container you can run commands in: ```console -$ docker buildx build -t uv-builder -f builder.dockerfile --load . +$ docker build -t uv-builder -f crates/uv-dev/builder.dockerfile --load . # Build for musl to avoid glibc errors, might not be required with your OS version cargo build --target x86_64-unknown-linux-musl --profile profiling docker run --rm -it -v $(pwd):/app uv-builder /app/target/x86_64-unknown-linux-musl/profiling/uv-dev resolve-many --cache-dir /app/cache-docker /app/scripts/popular_packages/pypi_10k_most_dependents.txt @@ -165,6 +165,13 @@ After making changes to the documentation, format the markdown files with: npx prettier --prose-wrap always --write "**/*.md" ``` +Note that the command above requires Node.js and npm to be installed on your system. As an +alternative, you can run this command using Docker: + +```console +$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md" +``` + ## Releases Releases can only be performed by Astral team members. diff --git a/Cargo.lock b/Cargo.lock index 802af3261..bc42e30af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,9 +40,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" dependencies = [ "anstyle", "anstyle-parse", @@ -94,6 +94,15 @@ version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "arbitrary" version = "1.4.1" @@ -180,9 +189,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -196,7 +205,7 @@ version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" dependencies = [ - "bzip2", + "bzip2 0.4.4", "flate2", "futures-core", "futures-io", @@ -242,7 +251,7 @@ dependencies = [ [[package]] name = "async_zip" version = "0.0.17" -source = "git+https://github.com/charliermarsh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d" +source = "git+https://github.com/astral-sh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d" dependencies = [ "async-compression", "crc32fast", @@ -328,9 +337,9 @@ dependencies = [ [[package]] name = "backon" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0b50b1b78dbadd44ab18b3c794e496f3a139abb9fbc27d9c94c4eebbb96496" +checksum = "302eaff5357a264a2c42f127ecb8bac761cf99749fc3dc95677e2743991f99e7" dependencies = [ "fastrand", "gloo-timers", @@ -364,6 +373,15 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bisection" version = "0.1.0" @@ -402,9 +420,9 @@ dependencies = [ [[package]] name = "boxcar" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf" +checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" [[package]] name = "bstr" @@ -480,6 +498,16 @@ dependencies = [ "libc", ] +[[package]] +name = "bzip2" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bafdbf26611df8c14810e268ddceda071c297570a5fb360ceddf617fe417ef58" +dependencies = [ + "bzip2-sys", + "libc", +] + [[package]] name = "bzip2-sys" version = "0.1.11+1.0.8" @@ -502,9 +530,9 @@ dependencies = [ [[package]] name = "cargo-util" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767bc85f367f6483a6072430b56f5c0d6ee7636751a21a800526d0711753d76" +checksum = "c95ec8b2485b20aed818bd7460f8eecc6c87c35c84191b353a3aba9aa1736c36" dependencies = [ "anyhow", "core-foundation", @@ -591,9 +619,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.38" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" +checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" dependencies = [ "clap_builder", "clap_derive", @@ -601,9 +629,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.38" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" +checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" dependencies = [ "anstream", "anstyle", @@ -644,9 +672,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" dependencies = [ "heck", "proc-macro2", @@ -662,22 +690,27 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "codspeed" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c" +checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf" dependencies = [ + "anyhow", + "bincode", "colored", + "glob", "libc", + "nix 0.29.0", "serde", "serde_json", + "statrs", "uuid", ] [[package]] name = "codspeed-criterion-compat" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725" +checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a" dependencies = [ "codspeed", "codspeed-criterion-compat-walltime", @@ -686,9 +719,9 @@ dependencies = [ [[package]] name = "codspeed-criterion-compat-walltime" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb" +checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64" dependencies = [ "anes", "cast", @@ -755,7 +788,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "windows-sys 0.59.0", ] @@ -784,6 +817,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.4.2" @@ -795,24 +843,20 @@ dependencies = [ [[package]] name = "criterion" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679" dependencies = [ "anes", "cast", "ciborium", "clap", "criterion-plot", - "futures", - "is-terminal", - "itertools 0.10.5", + "itertools 0.13.0", "num-traits", - "once_cell", "oorandom", "regex", "serde", - "serde_derive", "serde_json", "tinytemplate", "tokio", @@ -912,7 +956,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] @@ -1121,9 +1165,9 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ "event-listener", "pin-project-lite", @@ -1164,9 +1208,9 @@ checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" [[package]] name = "flate2" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", "libz-rs-sys", @@ -1241,9 +1285,9 @@ dependencies = [ [[package]] name = "fs-err" -version = "3.1.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f89bda4c2a21204059a977ed3bfe746677dfd137b83c339e702b0ac91d482aa" +checksum = "88d7be93788013f265201256d58f04936a8079ad5dc898743aa20525f503b683" dependencies = [ "autocfg", "tokio", @@ -1456,9 +1500,9 @@ dependencies = [ [[package]] name = "goblin" -version = "0.9.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daa0a64d21a7eb230583b4c5f4e23b7e4e57974f96620f42a7e75e08ae66d745" +checksum = "0e961b33649994dcf69303af6b3a332c1228549e604d455d61ec5d2ab5e68d3a" dependencies = [ "log", "plain", @@ -1502,9 +1546,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.15.3" +version = "0.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" dependencies = [ "allocator-api2", "equivalent", @@ -1654,22 +1698,26 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", + "webpki-roots 0.26.8", ] [[package]] name = "hyper-util" -version = "0.1.11" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2" +checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http", "http-body", "hyper", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -1851,12 +1899,12 @@ checksum = "b72ad49b554c1728b1e83254a1b1565aea4161e28dabbfa171fc15fe62299caf" [[package]] name = "indexmap" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "serde", ] @@ -1869,7 +1917,7 @@ dependencies = [ "console", "number_prefix", "portable-atomic", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "web-time", ] @@ -1894,24 +1942,22 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "ipnet" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" version = "0.4.15" @@ -1970,9 +2016,9 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jiff" -version = "0.2.13" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806" +checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" dependencies = [ "jiff-static", "jiff-tzdb-platform", @@ -1985,9 +2031,9 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.13" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48" +checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", @@ -2092,14 +2138,14 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.9.1", "libc", - "redox_syscall 0.5.8", + "redox_syscall", ] [[package]] name = "libz-rs-sys" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6489ca9bd760fe9642d7644e827b0c9add07df89857b0416ee15c1cc1a3b8c5a" +checksum = "172a788537a2221661b480fee8dc5f96c580eb34fa88764d3205dc356c7e4221" dependencies = [ "zlib-rs", ] @@ -2144,6 +2190,16 @@ version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +[[package]] +name = "lzma-rs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" +dependencies = [ + "byteorder", + "crc", +] + [[package]] name = "lzma-sys" version = "0.1.20" @@ -2168,13 +2224,11 @@ dependencies = [ [[package]] name = "markdown" -version = "0.3.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef3aab6a1d529b112695f72beec5ee80e729cb45af58663ec902c8fac764ecdd" +checksum = "a5cab8f2cadc416a82d2e783a1946388b31654d391d1c7d92cc1f03e295b1deb" dependencies = [ - "lazy_static", - "pipeline", - "regex", + "unicode-id", ] [[package]] @@ -2198,9 +2252,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memmap2" @@ -2456,9 +2510,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "4.2.1" +version = "4.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" +checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" [[package]] name = "parking" @@ -2466,17 +2520,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -2484,21 +2527,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -2509,7 +2538,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.8", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -2585,12 +2614,12 @@ dependencies = [ [[package]] name = "petgraph" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06" +checksum = "54acf3a685220b533e437e264e4d932cfbdc4cc7ec0cd232ed73c08d03b8a7ca" dependencies = [ "fixedbitset", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "indexmap", "serde", ] @@ -2633,12 +2662,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "pipeline" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d15b6607fa632996eb8a17c9041cb6071cb75ac057abd45dece578723ea8c7c0" - [[package]] name = "pkg-config" version = "0.3.31" @@ -2803,7 +2826,7 @@ dependencies = [ [[package]] name = "pubgrub" version = "0.3.0" -source = "git+https://github.com/astral-sh/pubgrub?rev=73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb#73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb" +source = "git+https://github.com/astral-sh/pubgrub?rev=06ec5a5f59ffaeb6cf5079c6cb184467da06c9db#06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" dependencies = [ "indexmap", "log", @@ -2945,15 +2968,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b42e27ef78c35d3998403c1d26f3efd9e135d3e5121b0a4845cc5cc27547f4f" -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.8" @@ -2974,6 +2988,26 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "reflink-copy" version = "0.1.26" @@ -3041,9 +3075,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" dependencies = [ "async-compression", "base64 0.22.1", @@ -3058,18 +3092,14 @@ dependencies = [ "hyper", "hyper-rustls", "hyper-util", - "ipnet", "js-sys", "log", - "mime", "mime_guess", - "once_cell", "percent-encoding", "pin-project-lite", "quinn", "rustls", "rustls-native-certs", - "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", @@ -3077,24 +3107,22 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-rustls", - "tokio-socks", "tokio-util", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", - "windows-registry 0.4.0", + "webpki-roots 1.0.1", ] [[package]] name = "reqwest-middleware" version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e" +source = "git+https://github.com/astral-sh/reqwest-middleware?rev=ad8b9d332d1773fde8b4cd008486de5973e0a3f8#ad8b9d332d1773fde8b4cd008486de5973e0a3f8" dependencies = [ "anyhow", "async-trait", @@ -3108,8 +3136,7 @@ dependencies = [ [[package]] name = "reqwest-retry" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c73e4195a6bfbcb174b790d9b3407ab90646976c55de58a6515da25d851178" +source = "git+https://github.com/astral-sh/reqwest-middleware?rev=ad8b9d332d1773fde8b4cd008486de5973e0a3f8#ad8b9d332d1773fde8b4cd008486de5973e0a3f8" dependencies = [ "anyhow", "async-trait", @@ -3117,14 +3144,13 @@ dependencies = [ "getrandom 0.2.15", "http", "hyper", - "parking_lot 0.11.2", "reqwest", "reqwest-middleware", "retry-policies", "thiserror 1.0.69", "tokio", "tracing", - "wasm-timer", + "wasmtimer", ] [[package]] @@ -3186,7 +3212,7 @@ checksum = "1e147371c75553e1e2fcdb483944a8540b8438c31426279553b9a8182a9b7b65" dependencies = [ "bytecheck", "bytes", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "indexmap", "munge", "ptr_meta", @@ -3333,15 +3359,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "rustls-pemfile" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "rustls-pki-types" version = "1.11.0" @@ -3410,11 +3427,12 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", + "ref-cast", "schemars_derive", "serde", "serde_json", @@ -3423,9 +3441,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" dependencies = [ "proc-macro2", "quote", @@ -3441,18 +3459,18 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scroll" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab8598aa408498679922eff7fa985c25d58a90771bd6be794434c5277eab1a6" +checksum = "c1257cd4248b4132760d6524d6dda4e053bc648c9070b960929bf50cfb1e7add" dependencies = [ "scroll_derive", ] [[package]] name = "scroll_derive" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f81c2fde025af7e69b1d1420531c8a8811ca898919db177141a85313b1cb932" +checksum = "22fc4f90c27b57691bbaf11d8ecc7cfbfe98a4da6dbe60226115d322aa80c06e" dependencies = [ "proc-macro2", "quote", @@ -3561,9 +3579,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" dependencies = [ "serde", ] @@ -3580,6 +3598,19 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "sha2" version = "0.10.9" @@ -3676,9 +3707,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "smawk" @@ -3711,6 +3742,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "statrs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a3fe7c28c6512e766b0874335db33c94ad7b8f9054228ae1c2abd47ce7d335e" +dependencies = [ + "approx", + "num-traits", +] + [[package]] name = "strict-num" version = "0.1.1" @@ -3791,9 +3832,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -3859,14 +3900,14 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96374855068f47402c3121c6eed88d29cb1de8f3ab27090e273e420bdabcf050" dependencies = [ - "parking_lot 0.12.3", + "parking_lot", ] [[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", "getrandom 0.3.1", @@ -3926,9 +3967,9 @@ dependencies = [ [[package]] name = "test-log" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f" +checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b" dependencies = [ "test-log-macros", "tracing-subscriber", @@ -3936,9 +3977,9 @@ dependencies = [ [[package]] name = "test-log-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f" +checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36" dependencies = [ "proc-macro2", "quote", @@ -3953,7 +3994,7 @@ checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", - "unicode-width 0.2.0", + "unicode-width 0.2.1", ] [[package]] @@ -4093,15 +4134,15 @@ source = "git+https://github.com/astral-sh/tl.git?rev=6e25b2ee2513d75385101a8ff9 [[package]] name = "tokio" -version = "1.44.2" +version = "1.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" +checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" dependencies = [ "backtrace", "bytes", "libc", "mio", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", @@ -4130,18 +4171,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-socks" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f" -dependencies = [ - "either", - "futures-util", - "thiserror 1.0.69", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.17" @@ -4170,9 +4199,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.22" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned", @@ -4182,18 +4211,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.9" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.26" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap", "serde", @@ -4205,9 +4234,9 @@ dependencies = [ [[package]] name = "toml_write" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" @@ -4224,6 +4253,24 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -4421,6 +4468,12 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2281c8c1d221438e373249e065ca4989c4c36952c211ff21a0ee91c44a3869e7" +[[package]] +name = "unicode-id" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10103c57044730945224467c09f71a4db0071c123a0648cc3e818913bde6b561" + [[package]] name = "unicode-ident" version = "1.0.16" @@ -4453,9 +4506,15 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "unscanny" @@ -4549,7 +4608,7 @@ dependencies = [ [[package]] name = "uv" -version = "0.7.6" +version = "0.7.20" dependencies = [ "anstream", "anyhow", @@ -4562,20 +4621,21 @@ dependencies = [ "console", "ctrlc", "dotenvy", - "etcetera", + "dunce", "filetime", "flate2", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "http", "ignore", + "indexmap", "indicatif", "indoc", "insta", "itertools 0.14.0", "jiff", "miette", - "nix 0.29.0", + "nix 0.30.1", "owo-colors", "petgraph", "predicates", @@ -4598,7 +4658,7 @@ dependencies = [ "tracing-durations-export", "tracing-subscriber", "tracing-tree", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "url", "uv-auth", "uv-build-backend", @@ -4648,6 +4708,7 @@ dependencies = [ "version-ranges", "walkdir", "which", + "whoami", "wiremock", "zip", ] @@ -4673,9 +4734,9 @@ dependencies = [ "test-log", "tokio", "tracing", - "tracing-test", "url", "uv-once-map", + "uv-redacted", "uv-small-str", "uv-static", "uv-warnings", @@ -4696,7 +4757,6 @@ dependencies = [ "uv-configuration", "uv-dispatch", "uv-distribution", - "uv-distribution-filename", "uv-distribution-types", "uv-extract", "uv-install-wheel", @@ -4712,7 +4772,7 @@ dependencies = [ [[package]] name = "uv-build" -version = "0.7.6" +version = "0.7.20" dependencies = [ "anyhow", "uv-build-backend", @@ -4725,11 +4785,12 @@ version = "0.1.0" dependencies = [ "csv", "flate2", - "fs-err 3.1.0", + "fs-err 3.1.1", "globset", "indoc", "insta", "itertools 0.14.0", + "rustc-hash", "schemars", "serde", "sha2", @@ -4761,7 +4822,7 @@ name = "uv-build-frontend" version = "0.0.1" dependencies = [ "anstream", - "fs-err 3.1.0", + "fs-err 3.1.1", "indoc", "insta", "itertools 0.14.0", @@ -4775,6 +4836,7 @@ dependencies = [ "tokio", "toml_edit", "tracing", + "uv-cache-key", "uv-configuration", "uv-distribution", "uv-distribution-types", @@ -4795,7 +4857,7 @@ name = "uv-cache" version = "0.0.1" dependencies = [ "clap", - "fs-err 3.1.0", + "fs-err 3.1.1", "nanoid", "rmp-serde", "rustc-hash", @@ -4803,7 +4865,6 @@ dependencies = [ "serde", "tempfile", "tracing", - "url", "uv-cache-info", "uv-cache-key", "uv-dirs", @@ -4811,6 +4872,7 @@ dependencies = [ "uv-fs", "uv-normalize", "uv-pypi-types", + "uv-redacted", "uv-static", "walkdir", ] @@ -4819,7 +4881,7 @@ dependencies = [ name = "uv-cache-info" version = "0.0.1" dependencies = [ - "fs-err 3.1.0", + "fs-err 3.1.1", "globwalk", "schemars", "serde", @@ -4838,6 +4900,7 @@ dependencies = [ "percent-encoding", "seahash", "url", + "uv-redacted", ] [[package]] @@ -4848,7 +4911,7 @@ dependencies = [ "anyhow", "clap", "clap_complete_command", - "fs-err 3.1.0", + "fs-err 3.1.1", "insta", "serde", "url", @@ -4860,12 +4923,14 @@ dependencies = [ "uv-pep508", "uv-pypi-types", "uv-python", + "uv-redacted", "uv-resolver", "uv-settings", "uv-static", "uv-torch", "uv-version", "uv-warnings", + "uv-workspace", ] [[package]] @@ -4877,7 +4942,7 @@ dependencies = [ "async_http_range_reader", "async_zip", "bytecheck", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "html-escape", "http", @@ -4922,6 +4987,7 @@ dependencies = [ "uv-torch", "uv-version", "uv-warnings", + "wiremock", ] [[package]] @@ -4931,7 +4997,7 @@ dependencies = [ "anyhow", "clap", "either", - "fs-err 3.1.0", + "fs-err 3.1.1", "rayon", "rustc-hash", "same-file", @@ -4969,16 +5035,18 @@ dependencies = [ "anstream", "anyhow", "clap", - "fs-err 3.1.0", + "fs-err 3.1.1", "itertools 0.14.0", "markdown", "owo-colors", "poloto", "pretty_assertions", + "reqwest", "resvg", "schemars", "serde", "serde_json", + "serde_yaml", "tagu", "textwrap", "tokio", @@ -5011,7 +5079,7 @@ version = "0.0.1" dependencies = [ "assert_fs", "etcetera", - "fs-err 3.1.0", + "fs-err 3.1.1", "indoc", "tracing", "uv-static", @@ -5054,7 +5122,7 @@ version = "0.0.1" dependencies = [ "anyhow", "either", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "indoc", "insta", @@ -5089,6 +5157,7 @@ dependencies = [ "uv-pep508", "uv-platform-tags", "uv-pypi-types", + "uv-redacted", "uv-types", "uv-workspace", "walkdir", @@ -5105,7 +5174,6 @@ dependencies = [ "serde", "smallvec", "thiserror 2.0.12", - "url", "uv-cache-key", "uv-normalize", "uv-pep440", @@ -5119,7 +5187,7 @@ version = "0.0.1" dependencies = [ "arcstr", "bitflags 2.9.1", - "fs-err 3.1.0", + "fs-err 3.1.1", "http", "itertools 0.14.0", "jiff", @@ -5146,7 +5214,9 @@ dependencies = [ "uv-pep508", "uv-platform-tags", "uv-pypi-types", + "uv-redacted", "uv-small-str", + "uv-warnings", "version-ranges", ] @@ -5158,7 +5228,7 @@ dependencies = [ "async-compression", "async_zip", "blake2", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "md-5", "rayon", @@ -5184,7 +5254,7 @@ dependencies = [ "dunce", "either", "encoding_rs_io", - "fs-err 3.1.0", + "fs-err 3.1.1", "fs2", "junction", "path-slash", @@ -5196,7 +5266,8 @@ dependencies = [ "tempfile", "tokio", "tracing", - "winsafe 0.0.23", + "windows 0.59.0", + "windows-core 0.59.0", ] [[package]] @@ -5206,7 +5277,7 @@ dependencies = [ "anyhow", "cargo-util", "dashmap", - "fs-err 3.1.0", + "fs-err 3.1.1", "reqwest", "reqwest-middleware", "thiserror 2.0.12", @@ -5239,7 +5310,7 @@ name = "uv-globfilter" version = "0.1.0" dependencies = [ "anstream", - "fs-err 3.1.0", + "fs-err 3.1.1", "globset", "insta", "owo-colors", @@ -5261,7 +5332,7 @@ dependencies = [ "configparser", "csv", "data-encoding", - "fs-err 3.1.0", + "fs-err 3.1.1", "indoc", "mailparse", "pathdiff", @@ -5295,7 +5366,7 @@ version = "0.0.1" dependencies = [ "anyhow", "async-channel", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "rayon", "rustc-hash", @@ -5320,6 +5391,7 @@ dependencies = [ "uv-platform-tags", "uv-pypi-types", "uv-python", + "uv-redacted", "uv-static", "uv-types", "uv-warnings", @@ -5341,7 +5413,7 @@ name = "uv-metadata" version = "0.1.0" dependencies = [ "async_zip", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "thiserror 2.0.12", "tokio", @@ -5387,7 +5459,7 @@ dependencies = [ "rkyv", "serde", "tracing", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "unscanny", "version-ranges", ] @@ -5410,11 +5482,12 @@ dependencies = [ "thiserror 2.0.12", "tracing", "tracing-test", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "url", "uv-fs", "uv-normalize", "uv-pep440", + "uv-redacted", "version-ranges", ] @@ -5446,7 +5519,7 @@ dependencies = [ "astral-tokio-tar", "async-compression", "base64 0.22.1", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "glob", "insta", @@ -5472,6 +5545,7 @@ dependencies = [ "uv-fs", "uv-metadata", "uv-pypi-types", + "uv-redacted", "uv-static", "uv-warnings", ] @@ -5481,7 +5555,7 @@ name = "uv-pypi-types" version = "0.0.1" dependencies = [ "anyhow", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "indexmap", "insta", "itertools 0.14.0", @@ -5503,6 +5577,7 @@ dependencies = [ "uv-normalize", "uv-pep440", "uv-pep508", + "uv-redacted", "uv-small-str", ] @@ -5514,15 +5589,18 @@ dependencies = [ "assert_fs", "clap", "configparser", - "fs-err 3.1.0", + "dunce", + "fs-err 3.1.1", "futures", "goblin", + "indexmap", "indoc", "insta", "itertools 0.14.0", "once_cell", "owo-colors", "procfs", + "ref-cast", "regex", "reqwest", "reqwest-middleware", @@ -5547,6 +5625,7 @@ dependencies = [ "uv-cache-info", "uv-cache-key", "uv-client", + "uv-configuration", "uv-dirs", "uv-distribution-filename", "uv-extract", @@ -5556,13 +5635,14 @@ dependencies = [ "uv-pep508", "uv-platform-tags", "uv-pypi-types", + "uv-redacted", "uv-state", "uv-static", "uv-trampoline-builder", "uv-warnings", "which", - "windows-registry 0.5.2", - "windows-result 0.3.3", + "windows-registry", + "windows-result 0.3.4", "windows-sys 0.59.0", ] @@ -5570,6 +5650,9 @@ dependencies = [ name = "uv-redacted" version = "0.0.1" dependencies = [ + "ref-cast", + "schemars", + "serde", "url", ] @@ -5580,7 +5663,7 @@ dependencies = [ "anyhow", "configparser", "console", - "fs-err 3.1.0", + "fs-err 3.1.1", "futures", "rustc-hash", "serde", @@ -5600,6 +5683,7 @@ dependencies = [ "uv-normalize", "uv-pep508", "uv-pypi-types", + "uv-redacted", "uv-requirements-txt", "uv-resolver", "uv-types", @@ -5613,7 +5697,7 @@ version = "0.0.1" dependencies = [ "anyhow", "assert_fs", - "fs-err 3.1.0", + "fs-err 3.1.1", "indoc", "insta", "itertools 0.14.0", @@ -5635,6 +5719,7 @@ dependencies = [ "uv-normalize", "uv-pep508", "uv-pypi-types", + "uv-redacted", "uv-warnings", ] @@ -5647,7 +5732,7 @@ dependencies = [ "dashmap", "either", "futures", - "hashbrown 0.15.3", + "hashbrown 0.15.4", "indexmap", "insta", "itertools 0.14.0", @@ -5686,6 +5771,7 @@ dependencies = [ "uv-platform-tags", "uv-pypi-types", "uv-python", + "uv-redacted", "uv-requirements-txt", "uv-small-str", "uv-static", @@ -5699,9 +5785,10 @@ dependencies = [ name = "uv-scripts" version = "0.0.1" dependencies = [ - "fs-err 3.1.0", + "fs-err 3.1.1", "indoc", "memchr", + "regex", "serde", "thiserror 2.0.12", "toml", @@ -5709,7 +5796,9 @@ dependencies = [ "uv-pep440", "uv-pep508", "uv-pypi-types", + "uv-redacted", "uv-settings", + "uv-warnings", "uv-workspace", ] @@ -5718,7 +5807,7 @@ name = "uv-settings" version = "0.0.1" dependencies = [ "clap", - "fs-err 3.1.0", + "fs-err 3.1.1", "schemars", "serde", "textwrap", @@ -5738,10 +5827,12 @@ dependencies = [ "uv-pep508", "uv-pypi-types", "uv-python", + "uv-redacted", "uv-resolver", "uv-static", "uv-torch", "uv-warnings", + "uv-workspace", ] [[package]] @@ -5754,8 +5845,8 @@ dependencies = [ "tracing", "uv-fs", "uv-static", - "windows-registry 0.5.2", - "windows-result 0.3.3", + "windows-registry", + "windows-result 0.3.4", "windows-sys 0.59.0", ] @@ -5773,7 +5864,7 @@ dependencies = [ name = "uv-state" version = "0.0.1" dependencies = [ - "fs-err 3.1.0", + "fs-err 3.1.1", "tempfile", "uv-dirs", ] @@ -5789,7 +5880,7 @@ dependencies = [ name = "uv-tool" version = "0.0.1" dependencies = [ - "fs-err 3.1.0", + "fs-err 3.1.1", "pathdiff", "self-replace", "serde", @@ -5798,6 +5889,7 @@ dependencies = [ "toml_edit", "tracing", "uv-cache", + "uv-configuration", "uv-dirs", "uv-distribution-types", "uv-fs", @@ -5819,7 +5911,7 @@ version = "0.1.0" dependencies = [ "clap", "either", - "fs-err 3.1.0", + "fs-err 3.1.1", "schemars", "serde", "thiserror 2.0.12", @@ -5839,7 +5931,7 @@ dependencies = [ "anyhow", "assert_cmd", "assert_fs", - "fs-err 3.1.0", + "fs-err 3.1.1", "thiserror 2.0.12", "uv-fs", "which", @@ -5851,9 +5943,9 @@ name = "uv-types" version = "0.0.1" dependencies = [ "anyhow", + "dashmap", "rustc-hash", "thiserror 2.0.12", - "url", "uv-cache", "uv-configuration", "uv-distribution-filename", @@ -5865,23 +5957,25 @@ dependencies = [ "uv-pep508", "uv-pypi-types", "uv-python", + "uv-redacted", "uv-workspace", ] [[package]] name = "uv-version" -version = "0.7.6" +version = "0.7.20" [[package]] name = "uv-virtualenv" version = "0.0.4" dependencies = [ - "fs-err 3.1.0", + "fs-err 3.1.1", "itertools 0.14.0", "pathdiff", "self-replace", "thiserror 2.0.12", "tracing", + "uv-configuration", "uv-fs", "uv-pypi-types", "uv-python", @@ -5904,7 +5998,8 @@ version = "0.0.1" dependencies = [ "anyhow", "assert_fs", - "fs-err 3.1.0", + "clap", + "fs-err 3.1.1", "glob", "insta", "itertools 0.14.0", @@ -5919,9 +6014,9 @@ dependencies = [ "toml", "toml_edit", "tracing", - "url", "uv-build-backend", "uv-cache-key", + "uv-configuration", "uv-distribution-types", "uv-fs", "uv-git-types", @@ -5931,6 +6026,7 @@ dependencies = [ "uv-pep440", "uv-pep508", "uv-pypi-types", + "uv-redacted", "uv-static", "uv-warnings", ] @@ -5944,7 +6040,7 @@ checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "version-ranges" version = "0.1.1" -source = "git+https://github.com/astral-sh/pubgrub?rev=73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb#73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb" +source = "git+https://github.com/astral-sh/pubgrub?rev=06ec5a5f59ffaeb6cf5079c6cb184467da06c9db#06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" dependencies = [ "smallvec", ] @@ -5998,6 +6094,12 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.100" @@ -6083,18 +6185,17 @@ dependencies = [ ] [[package]] -name = "wasm-timer" -version = "0.2.5" +name = "wasmtimer" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" dependencies = [ "futures", "js-sys", - "parking_lot 0.11.2", + "parking_lot", "pin-utils", + "slab", "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", ] [[package]] @@ -6126,6 +6227,15 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "webpki-roots" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "weezl" version = "0.1.8" @@ -6134,15 +6244,25 @@ checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" [[package]] name = "which" -version = "7.0.3" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" +checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d" dependencies = [ - "either", "env_home", "regex", "rustix 1.0.7", - "winsafe 0.0.19", + "winsafe", +] + +[[package]] +name = "whoami" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +dependencies = [ + "redox_syscall", + "wasite", + "web-sys", ] [[package]] @@ -6192,6 +6312,16 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1" +dependencies = [ + "windows-core 0.59.0", + "windows-targets 0.53.0", +] + [[package]] name = "windows" version = "0.61.1" @@ -6226,6 +6356,19 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-core" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce" +dependencies = [ + "windows-implement 0.59.0", + "windows-interface 0.59.1", + "windows-result 0.3.4", + "windows-strings 0.3.1", + "windows-targets 0.53.0", +] + [[package]] name = "windows-core" version = "0.61.0" @@ -6235,8 +6378,8 @@ dependencies = [ "windows-implement 0.60.0", "windows-interface 0.59.1", "windows-link", - "windows-result 0.3.3", - "windows-strings 0.4.1", + "windows-result 0.3.4", + "windows-strings 0.4.2", ] [[package]] @@ -6260,6 +6403,17 @@ dependencies = [ "syn", ] +[[package]] +name = "windows-implement" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-implement" version = "0.60.0" @@ -6295,9 +6449,9 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-numerics" @@ -6311,24 +6465,13 @@ dependencies = [ [[package]] name = "windows-registry" -version = "0.4.0" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.3", - "windows-strings 0.3.1", - "windows-targets 0.53.0", -] - -[[package]] -name = "windows-registry" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3bab093bdd303a1240bb99b8aba8ea8a69ee19d34c9e2ef9594e708a4878820" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ "windows-link", - "windows-result 0.3.3", - "windows-strings 0.4.1", + "windows-result 0.3.4", + "windows-strings 0.4.2", ] [[package]] @@ -6342,9 +6485,9 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b895b5356fc36103d0f64dd1e94dfa7ac5633f1c9dd6e80fe9ec4adef69e09d" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] @@ -6360,9 +6503,9 @@ dependencies = [ [[package]] name = "windows-strings" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a7ab927b2637c19b3dbe0965e75d8f2d30bdd697a1516191cad2ec4df8fb28a" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ "windows-link", ] @@ -6581,9 +6724,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.7" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" dependencies = [ "memchr", ] @@ -6594,17 +6737,11 @@ version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" -[[package]] -name = "winsafe" -version = "0.0.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a096fc628cb2c601e13c401ca0c354806424a7f5716000d69b76044eb8e624b9" - [[package]] name = "wiremock" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "101681b74cd87b5899e87bcf5a64e83334dd313fcd3053ea72e6dba18928e301" +checksum = "a2b8b99d4cdbf36b239a9532e31fe4fb8acc38d1897c1761e161550a7dc78e6a" dependencies = [ "assert-json-diff", "async-trait", @@ -6778,21 +6915,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84e9a772a54b54236b9b744aaaf8d7be01b4d6e99725523cb82cb32d1c81b1d7" dependencies = [ "arbitrary", + "bzip2 0.5.0", "crc32fast", "crossbeam-utils", "displaydoc", "flate2", "indexmap", + "lzma-rs", "memchr", "thiserror 2.0.12", + "xz2", "zopfli", + "zstd", ] [[package]] name = "zlib-rs" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "868b928d7949e09af2f6086dfc1e01936064cc7a819253bce650d4e2a2d63ba8" +checksum = "626bd9fa9734751fc50d6060752170984d7053f5a39061f524cda68023d4db8a" [[package]] name = "zopfli" diff --git a/Cargo.toml b/Cargo.toml index cc6e4a7b1..ecdc11701 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ resolver = "2" [workspace.package] edition = "2024" -rust-version = "1.85" +rust-version = "1.86" homepage = "https://pypi.org/project/uv/" documentation = "https://pypi.org/project/uv/" repository = "https://github.com/astral-sh/uv" @@ -80,7 +80,7 @@ async-channel = { version = "2.3.1" } async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] } async-trait = { version = "0.1.82" } async_http_range_reader = { version = "0.9.1" } -async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] } +async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] } axoupdater = { version = "0.9.0", default-features = false } backon = { version = "1.3.0" } base64 = { version = "0.22.1" } @@ -109,7 +109,7 @@ futures = { version = "0.3.30" } glob = { version = "0.3.1" } globset = { version = "0.4.15" } globwalk = { version = "0.9.1" } -goblin = { version = "0.9.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] } +goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] } hashbrown = { version = "0.15.1" } hex = { version = "0.4.3" } home = { version = "0.5.9" } @@ -126,7 +126,7 @@ md-5 = { version = "0.10.6" } memchr = { version = "2.7.4" } miette = { version = "7.2.0", features = ["fancy-no-backtrace"] } nanoid = { version = "0.4.0" } -nix = { version = "0.29.0", features = ["signal"] } +nix = { version = "0.30.0", features = ["signal"] } once_cell = { version = "1.20.2" } owo-colors = { version = "4.1.0" } path-slash = { version = "0.2.1" } @@ -135,22 +135,23 @@ percent-encoding = { version = "2.3.1" } petgraph = { version = "0.8.0" } proc-macro2 = { version = "1.0.86" } procfs = { version = "0.17.0", default-features = false, features = ["flate2"] } -pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb" } +pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" } quote = { version = "1.0.37" } rayon = { version = "1.10.0" } +ref-cast = { version = "1.0.24" } reflink-copy = { version = "0.1.19" } regex = { version = "1.10.6" } regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] } -reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] } -reqwest-middleware = { version = "0.4.0", features = ["multipart"] } -reqwest-retry = { version = "0.7.0" } +reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] } +reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] } +reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } rkyv = { version = "0.8.8", features = ["bytecheck"] } rmp-serde = { version = "1.3.0" } rust-netrc = { version = "0.1.2" } rustc-hash = { version = "2.0.0" } rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"] } same-file = { version = "1.0.6" } -schemars = { version = "0.8.21", features = ["url"] } +schemars = { version = "1.0.0", features = ["url2"] } seahash = { version = "4.1.0" } self-replace = { version = "1.5.0" } serde = { version = "1.0.210", features = ["derive", "rc"] } @@ -180,16 +181,17 @@ tracing-tree = { version = "0.4.0" } unicode-width = { version = "0.2.0" } unscanny = { version = "0.1.0" } url = { version = "2.5.2", features = ["serde"] } -version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "73d6ecf5a4e4eb1c754b8c3255c4d31bdc266fdb" } +version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" } walkdir = { version = "2.5.0" } -which = { version = "7.0.0", features = ["regex"] } +which = { version = "8.0.0", features = ["regex"] } +windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] } +windows-core = { version = "0.59.0" } windows-registry = { version = "0.5.0" } windows-result = { version = "0.3.0" } windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] } -winsafe = { version = "0.0.23", features = ["kernel"] } -wiremock = { version = "0.6.2" } +wiremock = { version = "0.6.4" } xz2 = { version = "0.1.7" } -zip = { version = "2.2.3", default-features = false, features = ["deflate"] } +zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] } [workspace.metadata.cargo-shear] ignored = ["flate2", "xz2"] @@ -212,6 +214,7 @@ missing_panics_doc = "allow" module_name_repetitions = "allow" must_use_candidate = "allow" similar_names = "allow" +struct_excessive_bools = "allow" too_many_arguments = "allow" too_many_lines = "allow" used_underscore_binding = "allow" @@ -294,76 +297,6 @@ codegen-units = 1 [profile.dist] inherits = "release" -# Config for 'dist' -[workspace.metadata.dist] -# The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.28.4" -# make a package being included in our releases opt-in instead of opt-out -dist = false -# CI backends to support -ci = "github" -# The installers to generate for each app -installers = ["shell", "powershell"] -# The archive format to use for windows builds (defaults .zip) -windows-archive = ".zip" -# The archive format to use for non-windows builds (defaults .tar.xz) -unix-archive = ".tar.gz" -# Target platforms to build apps for (Rust target-triple syntax) -targets = [ - "aarch64-apple-darwin", - "aarch64-pc-windows-msvc", - "aarch64-unknown-linux-gnu", - "aarch64-unknown-linux-musl", - "arm-unknown-linux-musleabihf", - "armv7-unknown-linux-gnueabihf", - "armv7-unknown-linux-musleabihf", - "i686-pc-windows-msvc", - "i686-unknown-linux-gnu", - "i686-unknown-linux-musl", - "powerpc64-unknown-linux-gnu", - "powerpc64le-unknown-linux-gnu", - "s390x-unknown-linux-gnu", - "x86_64-apple-darwin", - "x86_64-pc-windows-msvc", - "x86_64-unknown-linux-gnu", - "x86_64-unknown-linux-musl", -] -# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true) -auto-includes = false -# Whether dist should create a Github Release or use an existing draft -create-release = true -# Which actions to run on pull requests -pr-run-mode = "plan" -# Whether CI should trigger releases with dispatches instead of tag pushes -dispatch-releases = true -# Which phase dist should use to create the GitHub release -github-release = "announce" -# Whether CI should include auto-generated code to build local artifacts -build-local-artifacts = false -# Local artifacts jobs to run in CI -local-artifacts-jobs = ["./build-binaries", "./build-docker"] -# Publish jobs to run in CI -publish-jobs = ["./publish-pypi"] -# Post-announce jobs to run in CI -post-announce-jobs = ["./publish-docs"] -# Custom permissions for GitHub Jobs -github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read", id-token = "write", attestations = "write" } } -# Whether to install an updater program -install-updater = false -# Path that installers should place binaries in -install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"] - -[workspace.metadata.dist.github-custom-runners] -global = "depot-ubuntu-latest-4" - -[workspace.metadata.dist.min-glibc-version] -# Override glibc version for specific target triplets. -aarch64-unknown-linux-gnu = "2.28" -# Override all remaining glibc versions. -"*" = "2.17" - -[workspace.metadata.dist.github-action-commits] -"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4 -"actions/upload-artifact" = "6027e3dd177782cd8ab9af838c04fd81a07f1d47" # v4.6.2 -"actions/download-artifact" = "d3f86a106a0bac45b974a628896c90dbdf5c8093" # v4.3.0 -"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3 +[patch.crates-io] +reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } +reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } diff --git a/README.md b/README.md index 0a66420d9..405095461 100644 --- a/README.md +++ b/README.md @@ -284,6 +284,16 @@ We are passionate about supporting contributors of all levels of experience and you get involved in the project. See the [contributing guide](https://github.com/astral-sh/uv/blob/main/CONTRIBUTING.md) to get started. +## FAQ + +#### How do you pronounce uv? + +It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/Help:IPA/English#Key)) + +#### How should I stylize uv? + +Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details. + ## Acknowledgements uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're diff --git a/changelogs/0.4.x.md b/changelogs/0.4.x.md index 335780fef..e5f996f69 100644 --- a/changelogs/0.4.x.md +++ b/changelogs/0.4.x.md @@ -960,7 +960,7 @@ argument (or the `UV_INDEX` environment variable); to replace the default index These changes are entirely backwards-compatible with the deprecated `--index-url` and `--extra-index-url` options, which continue to work as before. -See the [Index](https://docs.astral.sh/uv/configuration/indexes/) documentation for more. +See the [Index](https://docs.astral.sh/uv/concepts/indexes/) documentation for more. ### Enhancements diff --git a/clippy.toml b/clippy.toml index 191195e33..1151d773d 100644 --- a/clippy.toml +++ b/clippy.toml @@ -6,6 +6,8 @@ doc-valid-idents = [ "GraalPy", "ReFS", "PyTorch", + "ROCm", + "XPU", ".." # Include the defaults ] @@ -35,7 +37,7 @@ disallowed-methods = [ "std::fs::soft_link", "std::fs::symlink_metadata", "std::fs::write", - "std::os::unix::fs::symlink", - "std::os::windows::fs::symlink_dir", - "std::os::windows::fs::symlink_file", + { path = "std::os::unix::fs::symlink", allow-invalid = true }, + { path = "std::os::windows::fs::symlink_dir", allow-invalid = true }, + { path = "std::os::windows::fs::symlink_file", allow-invalid = true }, ] diff --git a/crates/uv-auth/Cargo.toml b/crates/uv-auth/Cargo.toml index 2717254d9..e63fb1a50 100644 --- a/crates/uv-auth/Cargo.toml +++ b/crates/uv-auth/Cargo.toml @@ -11,6 +11,7 @@ workspace = true [dependencies] uv-once-map = { workspace = true } +uv-redacted = { workspace = true } uv-small-str = { workspace = true } uv-static = { workspace = true } uv-warnings = { workspace = true } @@ -36,5 +37,4 @@ insta = { version = "1.40.0" } tempfile = { workspace = true } test-log = { version = "0.2.16", features = ["trace"], default-features = false } tokio = { workspace = true } -tracing-test = { workspace = true } wiremock = { workspace = true } diff --git a/crates/uv-auth/src/cache.rs b/crates/uv-auth/src/cache.rs index 5c57c8c18..274efab60 100644 --- a/crates/uv-auth/src/cache.rs +++ b/crates/uv-auth/src/cache.rs @@ -9,6 +9,7 @@ use tracing::trace; use url::Url; use uv_once_map::OnceMap; +use uv_redacted::DisplaySafeUrl; use crate::Realm; use crate::credentials::{Credentials, Username}; @@ -18,7 +19,7 @@ type FxOnceMap = OnceMap>; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) enum FetchUrl { /// A full index URL - Index(Url), + Index(DisplaySafeUrl), /// A realm URL Realm(Realm), } diff --git a/crates/uv-auth/src/credentials.rs b/crates/uv-auth/src/credentials.rs index ed27334f1..68d98b2e3 100644 --- a/crates/uv-auth/src/credentials.rs +++ b/crates/uv-auth/src/credentials.rs @@ -3,6 +3,7 @@ use base64::read::DecoderReader; use base64::write::EncoderWriter; use std::borrow::Cow; use std::fmt; +use uv_redacted::DisplaySafeUrl; use netrc::Netrc; use reqwest::Request; @@ -141,7 +142,11 @@ impl Credentials { /// Return [`Credentials`] for a [`Url`] from a [`Netrc`] file, if any. /// /// If a username is provided, it must match the login in the netrc file or [`None`] is returned. - pub(crate) fn from_netrc(netrc: &Netrc, url: &Url, username: Option<&str>) -> Option { + pub(crate) fn from_netrc( + netrc: &Netrc, + url: &DisplaySafeUrl, + username: Option<&str>, + ) -> Option { let host = url.host_str()?; let entry = netrc .hosts @@ -299,7 +304,7 @@ impl Credentials { /// /// Any existing credentials will be overridden. #[must_use] - pub fn apply(&self, mut url: Url) -> Url { + pub fn apply(&self, mut url: DisplaySafeUrl) -> DisplaySafeUrl { if let Some(username) = self.username() { let _ = url.set_username(username); } diff --git a/crates/uv-auth/src/index.rs b/crates/uv-auth/src/index.rs index 9419a9a22..b71bc9a62 100644 --- a/crates/uv-auth/src/index.rs +++ b/crates/uv-auth/src/index.rs @@ -2,6 +2,7 @@ use std::fmt::{self, Display, Formatter}; use rustc_hash::FxHashSet; use url::Url; +use uv_redacted::DisplaySafeUrl; /// When to use authentication. #[derive( @@ -53,10 +54,10 @@ impl Display for AuthPolicy { // could potentially make sense for a future refactor. #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub struct Index { - pub url: Url, + pub url: DisplaySafeUrl, /// The root endpoint where authentication is applied. /// For PEP 503 endpoints, this excludes `/simple`. - pub root_url: Url, + pub root_url: DisplaySafeUrl, pub auth_policy: AuthPolicy, } @@ -85,7 +86,7 @@ impl Indexes { Self(FxHashSet::default()) } - /// Create a new [`AuthIndexUrls`] from an iterator of [`AuthIndexUrl`]s. + /// Create a new [`Indexes`] instance from an iterator of [`Index`]s. pub fn from_indexes(urls: impl IntoIterator) -> Self { let mut index_urls = Self::new(); for url in urls { @@ -95,7 +96,7 @@ impl Indexes { } /// Get the index URL prefix for a URL if one exists. - pub fn index_url_for(&self, url: &Url) -> Option<&Url> { + pub fn index_url_for(&self, url: &Url) -> Option<&DisplaySafeUrl> { self.find_prefix_index(url).map(|index| &index.url) } diff --git a/crates/uv-auth/src/keyring.rs b/crates/uv-auth/src/keyring.rs index 842f2853a..41b92114a 100644 --- a/crates/uv-auth/src/keyring.rs +++ b/crates/uv-auth/src/keyring.rs @@ -1,7 +1,7 @@ use std::{io::Write, process::Stdio}; use tokio::process::Command; use tracing::{instrument, trace, warn}; -use url::Url; +use uv_redacted::DisplaySafeUrl; use uv_warnings::warn_user_once; use crate::credentials::Credentials; @@ -36,7 +36,7 @@ impl KeyringProvider { /// Returns [`None`] if no password was found for the username or if any errors /// are encountered in the keyring backend. #[instrument(skip_all, fields(url = % url.to_string(), username))] - pub async fn fetch(&self, url: &Url, username: Option<&str>) -> Option { + pub async fn fetch(&self, url: &DisplaySafeUrl, username: Option<&str>) -> Option { // Validate the request debug_assert!( url.host_str().is_some(), @@ -217,15 +217,18 @@ impl KeyringProvider { mod tests { use super::*; use futures::FutureExt; + use url::Url; #[tokio::test] async fn fetch_url_no_host() { let url = Url::parse("file:/etc/bin/").unwrap(); let keyring = KeyringProvider::empty(); // Panics due to debug assertion; returns `None` in production - let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, Some("user"))) - .catch_unwind() - .await; + let result = std::panic::AssertUnwindSafe( + keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user")), + ) + .catch_unwind() + .await; assert!(result.is_err()); } @@ -234,9 +237,11 @@ mod tests { let url = Url::parse("https://user:password@example.com").unwrap(); let keyring = KeyringProvider::empty(); // Panics due to debug assertion; returns `None` in production - let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, Some(url.username()))) - .catch_unwind() - .await; + let result = std::panic::AssertUnwindSafe( + keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())), + ) + .catch_unwind() + .await; assert!(result.is_err()); } @@ -245,17 +250,20 @@ mod tests { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::empty(); // Panics due to debug assertion; returns `None` in production - let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, Some(url.username()))) - .catch_unwind() - .await; + let result = std::panic::AssertUnwindSafe( + keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())), + ) + .catch_unwind() + .await; assert!(result.is_err()); } #[tokio::test] async fn fetch_url_no_auth() { let url = Url::parse("https://example.com").unwrap(); + let url = DisplaySafeUrl::ref_cast(&url); let keyring = KeyringProvider::empty(); - let credentials = keyring.fetch(&url, Some("user")); + let credentials = keyring.fetch(url, Some("user")); assert!(credentials.await.is_none()); } @@ -264,7 +272,9 @@ mod tests { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::dummy([(url.host_str().unwrap(), "user", "password")]); assert_eq!( - keyring.fetch(&url, Some("user")).await, + keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("user")) + .await, Some(Credentials::basic( Some("user".to_string()), Some("password".to_string()) @@ -272,7 +282,10 @@ mod tests { ); assert_eq!( keyring - .fetch(&url.join("test").unwrap(), Some("user")) + .fetch( + DisplaySafeUrl::ref_cast(&url.join("test").unwrap()), + Some("user") + ) .await, Some(Credentials::basic( Some("user".to_string()), @@ -285,7 +298,9 @@ mod tests { async fn fetch_url_no_match() { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::dummy([("other.com", "user", "password")]); - let credentials = keyring.fetch(&url, Some("user")).await; + let credentials = keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("user")) + .await; assert_eq!(credentials, None); } @@ -297,21 +312,33 @@ mod tests { (url.host_str().unwrap(), "user", "other-password"), ]); assert_eq!( - keyring.fetch(&url.join("foo").unwrap(), Some("user")).await, + keyring + .fetch( + DisplaySafeUrl::ref_cast(&url.join("foo").unwrap()), + Some("user") + ) + .await, Some(Credentials::basic( Some("user".to_string()), Some("password".to_string()) )) ); assert_eq!( - keyring.fetch(&url, Some("user")).await, + keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("user")) + .await, Some(Credentials::basic( Some("user".to_string()), Some("other-password".to_string()) )) ); assert_eq!( - keyring.fetch(&url.join("bar").unwrap(), Some("user")).await, + keyring + .fetch( + DisplaySafeUrl::ref_cast(&url.join("bar").unwrap()), + Some("user") + ) + .await, Some(Credentials::basic( Some("user".to_string()), Some("other-password".to_string()) @@ -323,7 +350,9 @@ mod tests { async fn fetch_url_username() { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::dummy([(url.host_str().unwrap(), "user", "password")]); - let credentials = keyring.fetch(&url, Some("user")).await; + let credentials = keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("user")) + .await; assert_eq!( credentials, Some(Credentials::basic( @@ -337,7 +366,7 @@ mod tests { async fn fetch_url_no_username() { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::dummy([(url.host_str().unwrap(), "user", "password")]); - let credentials = keyring.fetch(&url, None).await; + let credentials = keyring.fetch(DisplaySafeUrl::ref_cast(&url), None).await; assert_eq!( credentials, Some(Credentials::basic( @@ -351,12 +380,16 @@ mod tests { async fn fetch_url_username_no_match() { let url = Url::parse("https://example.com").unwrap(); let keyring = KeyringProvider::dummy([(url.host_str().unwrap(), "foo", "password")]); - let credentials = keyring.fetch(&url, Some("bar")).await; + let credentials = keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("bar")) + .await; assert_eq!(credentials, None); // Still fails if we have `foo` in the URL itself let url = Url::parse("https://foo@example.com").unwrap(); - let credentials = keyring.fetch(&url, Some("bar")).await; + let credentials = keyring + .fetch(DisplaySafeUrl::ref_cast(&url), Some("bar")) + .await; assert_eq!(credentials, None); } } diff --git a/crates/uv-auth/src/lib.rs b/crates/uv-auth/src/lib.rs index 6aa96a245..90a957630 100644 --- a/crates/uv-auth/src/lib.rs +++ b/crates/uv-auth/src/lib.rs @@ -1,7 +1,6 @@ use std::sync::{Arc, LazyLock}; use tracing::trace; -use url::Url; use cache::CredentialsCache; pub use credentials::Credentials; @@ -9,6 +8,7 @@ pub use index::{AuthPolicy, Index, Indexes}; pub use keyring::KeyringProvider; pub use middleware::AuthMiddleware; use realm::Realm; +use uv_redacted::DisplaySafeUrl; mod cache; mod credentials; @@ -28,7 +28,7 @@ pub(crate) static CREDENTIALS_CACHE: LazyLock = /// Populate the global authentication store with credentials on a URL, if there are any. /// /// Returns `true` if the store was updated. -pub fn store_credentials_from_url(url: &Url) -> bool { +pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool { if let Some(credentials) = Credentials::from_url(url) { trace!("Caching credentials for {url}"); CREDENTIALS_CACHE.insert(url, Arc::new(credentials)); @@ -41,7 +41,7 @@ pub fn store_credentials_from_url(url: &Url) -> bool { /// Populate the global authentication store with credentials on a URL, if there are any. /// /// Returns `true` if the store was updated. -pub fn store_credentials(url: &Url, credentials: Arc) { +pub fn store_credentials(url: &DisplaySafeUrl, credentials: Arc) { trace!("Caching credentials for {url}"); CREDENTIALS_CACHE.insert(url, credentials); } diff --git a/crates/uv-auth/src/middleware.rs b/crates/uv-auth/src/middleware.rs index f31a01540..1842effb3 100644 --- a/crates/uv-auth/src/middleware.rs +++ b/crates/uv-auth/src/middleware.rs @@ -1,7 +1,11 @@ use std::sync::{Arc, LazyLock}; +use anyhow::{anyhow, format_err}; use http::{Extensions, StatusCode}; -use url::Url; +use netrc::Netrc; +use reqwest::{Request, Response}; +use reqwest_middleware::{Error, Middleware, Next}; +use tracing::{debug, trace, warn}; use crate::{ CREDENTIALS_CACHE, CredentialsCache, KeyringProvider, @@ -10,11 +14,7 @@ use crate::{ index::{AuthPolicy, Indexes}, realm::Realm, }; -use anyhow::{anyhow, format_err}; -use netrc::Netrc; -use reqwest::{Request, Response}; -use reqwest_middleware::{Error, Middleware, Next}; -use tracing::{debug, trace, warn}; +use uv_redacted::DisplaySafeUrl; /// Strategy for loading netrc files. enum NetrcMode { @@ -274,6 +274,7 @@ impl Middleware for AuthMiddleware { trace!("Checking for credentials for {url}"); (request, None) }; + let retry_request_url = DisplaySafeUrl::ref_cast(retry_request.url()); let username = credentials .as_ref() @@ -282,13 +283,13 @@ impl Middleware for AuthMiddleware { let credentials = if let Some(index_url) = maybe_index_url { self.cache().get_url(index_url, &username).or_else(|| { self.cache() - .get_realm(Realm::from(retry_request.url()), username) + .get_realm(Realm::from(&**retry_request_url), username) }) } else { // Since there is no known index for this URL, check if there are credentials in // the realm-level cache. self.cache() - .get_realm(Realm::from(retry_request.url()), username) + .get_realm(Realm::from(&**retry_request_url), username) } .or(credentials); @@ -307,7 +308,7 @@ impl Middleware for AuthMiddleware { if let Some(credentials) = self .fetch_credentials( credentials.as_deref(), - retry_request.url(), + retry_request_url, maybe_index_url, auth_policy, ) @@ -362,7 +363,7 @@ impl AuthMiddleware { // Nothing to insert into the cache if we don't have credentials return next.run(request, extensions).await; }; - let url = request.url().clone(); + let url = DisplaySafeUrl::from(request.url().clone()); if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() { return Err(Error::Middleware(format_err!("Missing password for {url}"))); } @@ -387,8 +388,8 @@ impl AuthMiddleware { mut request: Request, extensions: &mut Extensions, next: Next<'_>, - url: &str, - index_url: Option<&Url>, + url: &DisplaySafeUrl, + index_url: Option<&DisplaySafeUrl>, auth_policy: AuthPolicy, ) -> reqwest_middleware::Result { let credentials = Arc::new(credentials); @@ -430,7 +431,12 @@ impl AuthMiddleware { // Do not insert already-cached credentials None } else if let Some(credentials) = self - .fetch_credentials(Some(&credentials), request.url(), index_url, auth_policy) + .fetch_credentials( + Some(&credentials), + DisplaySafeUrl::ref_cast(request.url()), + index_url, + auth_policy, + ) .await { request = credentials.authenticate(request); @@ -462,8 +468,8 @@ impl AuthMiddleware { async fn fetch_credentials( &self, credentials: Option<&Credentials>, - url: &Url, - maybe_index_url: Option<&Url>, + url: &DisplaySafeUrl, + maybe_index_url: Option<&DisplaySafeUrl>, auth_policy: AuthPolicy, ) -> Option> { let username = Username::from( @@ -475,7 +481,7 @@ impl AuthMiddleware { let key = if let Some(index_url) = maybe_index_url { (FetchUrl::Index(index_url.clone()), username) } else { - (FetchUrl::Realm(Realm::from(url)), username) + (FetchUrl::Realm(Realm::from(&**url)), username) }; if !self.cache().fetches.register(key.clone()) { let credentials = self @@ -523,7 +529,7 @@ impl AuthMiddleware { if let Some(username) = credentials.and_then(|credentials| credentials.username()) { if let Some(index_url) = maybe_index_url { debug!("Checking keyring for credentials for index URL {}@{}", username, index_url); - keyring.fetch(index_url, Some(username)).await + keyring.fetch(DisplaySafeUrl::ref_cast(index_url), Some(username)).await } else { debug!("Checking keyring for credentials for full URL {}@{}", username, url); keyring.fetch(url, Some(username)).await @@ -533,7 +539,7 @@ impl AuthMiddleware { debug!( "Checking keyring for credentials for index URL {index_url} without username due to `authenticate = always`" ); - keyring.fetch(index_url, None).await + keyring.fetch(DisplaySafeUrl::ref_cast(index_url), None).await } else { None } @@ -558,24 +564,17 @@ impl AuthMiddleware { } } -fn tracing_url(request: &Request, credentials: Option<&Credentials>) -> String { - if !tracing::enabled!(tracing::Level::DEBUG) { - return request.url().to_string(); - } - - let mut url = request.url().clone(); +fn tracing_url(request: &Request, credentials: Option<&Credentials>) -> DisplaySafeUrl { + let mut url = DisplaySafeUrl::from(request.url().clone()); if let Some(creds) = credentials { - if creds.password().is_some() { - if let Some(username) = creds.username() { - let _ = url.set_username(username); - } - let _ = url.set_password(Some("****")); - // A username on its own might be a secret token. - } else if creds.username().is_some() { - let _ = url.set_username("****"); + if let Some(username) = creds.username() { + let _ = url.set_username(username); + } + if let Some(password) = creds.password() { + let _ = url.set_password(Some(password)); } } - url.to_string() + url } #[cfg(test)] @@ -1749,13 +1748,13 @@ mod tests { let base_url_2 = base_url.join("prefix_2")?; let indexes = Indexes::from_indexes(vec![ Index { - url: base_url_1.clone(), - root_url: base_url_1.clone(), + url: DisplaySafeUrl::from(base_url_1.clone()), + root_url: DisplaySafeUrl::from(base_url_1.clone()), auth_policy: AuthPolicy::Auto, }, Index { - url: base_url_2.clone(), - root_url: base_url_2.clone(), + url: DisplaySafeUrl::from(base_url_2.clone()), + root_url: DisplaySafeUrl::from(base_url_2.clone()), auth_policy: AuthPolicy::Auto, }, ]); @@ -1857,8 +1856,8 @@ mod tests { let base_url = Url::parse(&server.uri())?; let index_url = base_url.join("prefix_1")?; let indexes = Indexes::from_indexes(vec![Index { - url: index_url.clone(), - root_url: index_url.clone(), + url: DisplaySafeUrl::from(index_url.clone()), + root_url: DisplaySafeUrl::from(index_url.clone()), auth_policy: AuthPolicy::Auto, }]); @@ -1912,7 +1911,7 @@ mod tests { } fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes { - let mut url = url.clone(); + let mut url = DisplaySafeUrl::from(url.clone()); url.set_password(None).ok(); url.set_username("").ok(); Indexes::from_indexes(vec![Index { @@ -2104,16 +2103,14 @@ mod tests { } #[test] - #[tracing_test::traced_test(level = "debug")] fn test_tracing_url() { // No credentials let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple"); assert_eq!( tracing_url(&req, None), - "https://pypi-proxy.fly.dev/basic-auth/simple" + DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap() ); - // Mask username if there is a username but no password let creds = Credentials::Basic { username: Username::new(Some(String::from("user"))), password: None, @@ -2121,10 +2118,9 @@ mod tests { let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple"); assert_eq!( tracing_url(&req, Some(&creds)), - "https://****@pypi-proxy.fly.dev/basic-auth/simple" + DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap() ); - // Log username but mask password if a password is present let creds = Credentials::Basic { username: Username::new(Some(String::from("user"))), password: Some(Password::new(String::from("password"))), @@ -2132,7 +2128,8 @@ mod tests { let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple"); assert_eq!( tracing_url(&req, Some(&creds)), - "https://user:****@pypi-proxy.fly.dev/basic-auth/simple" + DisplaySafeUrl::parse("https://user:password@pypi-proxy.fly.dev/basic-auth/simple") + .unwrap() ); } diff --git a/crates/uv-bench/Cargo.toml b/crates/uv-bench/Cargo.toml index 70fe7a98a..8c08d4dd2 100644 --- a/crates/uv-bench/Cargo.toml +++ b/crates/uv-bench/Cargo.toml @@ -18,11 +18,6 @@ workspace = true doctest = false bench = false -[[bench]] -name = "distribution-filename" -path = "benches/distribution_filename.rs" -harness = false - [[bench]] name = "uv" path = "benches/uv.rs" @@ -34,7 +29,6 @@ uv-client = { workspace = true } uv-configuration = { workspace = true } uv-dispatch = { workspace = true } uv-distribution = { workspace = true } -uv-distribution-filename = { workspace = true } uv-distribution-types = { workspace = true } uv-extract = { workspace = true, optional = true } uv-install-wheel = { workspace = true } @@ -48,8 +42,10 @@ uv-types = { workspace = true } uv-workspace = { workspace = true } anyhow = { workspace = true } -codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true } -criterion = { version = "0.5.1", default-features = false, features = ["async_tokio"] } +codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true } +criterion = { version = "0.6.0", default-features = false, features = [ + "async_tokio", +] } jiff = { workspace = true } tokio = { workspace = true } diff --git a/crates/uv-bench/benches/distribution_filename.rs b/crates/uv-bench/benches/distribution_filename.rs deleted file mode 100644 index 99d72cf05..000000000 --- a/crates/uv-bench/benches/distribution_filename.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::str::FromStr; - -use uv_bench::criterion::{ - BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime, -}; -use uv_distribution_filename::WheelFilename; -use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags}; - -/// A set of platform tags extracted from burntsushi's Archlinux workstation. -/// We could just re-create these via `Tags::from_env`, but those might differ -/// depending on the platform. This way, we always use the same data. It also -/// lets us assert tag compatibility regardless of where the benchmarks run. -const PLATFORM_TAGS: &[(&str, &str, &str)] = include!("../inputs/platform_tags.rs"); - -/// A set of wheel names used in the benchmarks below. We pick short and long -/// names, as well as compatible and not-compatibles (with `PLATFORM_TAGS`) -/// names. -/// -/// The tuple is (name, filename, compatible) where `name` is a descriptive -/// name for humans used in the benchmark definition. And `filename` is the -/// actual wheel filename we want to benchmark operation on. And `compatible` -/// indicates whether the tags in the wheel filename are expected to be -/// compatible with the tags in `PLATFORM_TAGS`. -const WHEEL_NAMES: &[(&str, &str, bool)] = &[ - // This tests a case with a very short name that *is* compatible with - // PLATFORM_TAGS. It only uses one tag for each component (one Python - // version, one ABI and one platform). - ( - "flyte-short-compatible", - "ipython-2.1.0-py3-none-any.whl", - true, - ), - // This tests a case with a long name that is *not* compatible. That - // is, all platform tags need to be checked against the tags in the - // wheel filename. This is essentially the worst possible practical - // case. - ( - "flyte-long-incompatible", - "protobuf-3.5.2.post1-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", - false, - ), - // This tests a case with a long name that *is* compatible. We - // expect this to be (on average) quicker because the compatibility - // check stops as soon as a positive match is found. (Where as the - // incompatible case needs to check all tags.) - ( - "flyte-long-compatible", - "coverage-6.6.0b1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - true, - ), -]; - -/// A list of names that are candidates for wheel filenames but will ultimately -/// fail to parse. -const INVALID_WHEEL_NAMES: &[(&str, &str)] = &[ - ("flyte-short-extension", "mock-5.1.0.tar.gz"), - ( - "flyte-long-extension", - "Pillow-5.4.0.dev0-py3.7-macosx-10.13-x86_64.egg", - ), -]; - -/// Benchmarks the construction of platform tags. -/// -/// This only happens ~once per program startup. Originally, construction was -/// trivial. But to speed up `WheelFilename::is_compatible`, we added some -/// extra processing. We thus expect construction to become slower, but we -/// write a benchmark to ensure it is still "reasonable." -fn benchmark_build_platform_tags(c: &mut Criterion) { - let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS - .iter() - .map(|&(py, abi, plat)| { - ( - LanguageTag::from_str(py).unwrap(), - AbiTag::from_str(abi).unwrap(), - PlatformTag::from_str(plat).unwrap(), - ) - }) - .collect(); - - let mut group = c.benchmark_group("build_platform_tags"); - group.bench_function(BenchmarkId::from_parameter("burntsushi-archlinux"), |b| { - b.iter(|| std::hint::black_box(Tags::new(tags.clone()))); - }); - group.finish(); -} - -/// Benchmarks `WheelFilename::from_str`. This has been observed to take some -/// non-trivial time in profiling (although, at time of writing, not as much -/// as tag compatibility). In the process of optimizing tag compatibility, -/// we tweaked wheel filename parsing. This benchmark was therefore added to -/// ensure we didn't regress here. -fn benchmark_wheelname_parsing(c: &mut Criterion) { - let mut group = c.benchmark_group("wheelname_parsing"); - for (name, filename, _) in WHEEL_NAMES.iter().copied() { - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - filename - .parse::() - .expect("valid wheel filename"); - }); - }); - } - group.finish(); -} - -/// Benchmarks `WheelFilename::from_str` when it fails. This routine is called -/// on every filename in a package's metadata. A non-trivial portion of which -/// are not wheel filenames. Ensuring that the error path is fast is thus -/// probably a good idea. -fn benchmark_wheelname_parsing_failure(c: &mut Criterion) { - let mut group = c.benchmark_group("wheelname_parsing_failure"); - for (name, filename) in INVALID_WHEEL_NAMES.iter().copied() { - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - filename - .parse::() - .expect_err("invalid wheel filename"); - }); - }); - } - group.finish(); -} - -/// Benchmarks the `WheelFilename::is_compatible` routine. This was revealed -/// to be the #1 bottleneck in the resolver. The main issue was that the -/// set of platform tags (generated once) is quite large, and the original -/// implementation did an exhaustive search over each of them for each tag in -/// the wheel filename. -fn benchmark_wheelname_tag_compatibility(c: &mut Criterion) { - let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS - .iter() - .map(|&(py, abi, plat)| { - ( - LanguageTag::from_str(py).unwrap(), - AbiTag::from_str(abi).unwrap(), - PlatformTag::from_str(plat).unwrap(), - ) - }) - .collect(); - let tags = Tags::new(tags); - - let mut group = c.benchmark_group("wheelname_tag_compatibility"); - for (name, filename, expected) in WHEEL_NAMES.iter().copied() { - let wheelname: WheelFilename = filename.parse().expect("valid wheel filename"); - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - assert_eq!(expected, wheelname.is_compatible(&tags)); - }); - }); - } - group.finish(); -} - -criterion_group!( - uv_distribution_filename, - benchmark_build_platform_tags, - benchmark_wheelname_parsing, - benchmark_wheelname_parsing_failure, - benchmark_wheelname_tag_compatibility, -); -criterion_main!(uv_distribution_filename); diff --git a/crates/uv-bench/benches/uv.rs b/crates/uv-bench/benches/uv.rs index 03a360ad5..9bdd7adb9 100644 --- a/crates/uv-bench/benches/uv.rs +++ b/crates/uv-bench/benches/uv.rs @@ -1,6 +1,6 @@ use std::str::FromStr; -use uv_bench::criterion::black_box; +use std::hint::black_box; use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime}; use uv_cache::Cache; use uv_client::RegistryClientBuilder; @@ -91,7 +91,7 @@ mod resolver { }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution::DistributionDatabase; - use uv_distribution_types::{DependencyMetadata, IndexLocations}; + use uv_distribution_types::{DependencyMetadata, IndexLocations, RequiresPython}; use uv_install_wheel::LinkMode; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; @@ -99,8 +99,8 @@ mod resolver { use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment}; use uv_python::Interpreter; use uv_resolver::{ - FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, RequiresPython, - Resolver, ResolverEnvironment, ResolverOutput, + FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver, + ResolverEnvironment, ResolverOutput, }; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; use uv_workspace::WorkspaceCache; @@ -206,6 +206,7 @@ mod resolver { options, &python_requirement, markers, + interpreter.markers(), conflicts, Some(&TAGS), &flat_index, diff --git a/crates/uv-build-backend/Cargo.toml b/crates/uv-build-backend/Cargo.toml index f23581662..7714423d4 100644 --- a/crates/uv-build-backend/Cargo.toml +++ b/crates/uv-build-backend/Cargo.toml @@ -31,6 +31,7 @@ flate2 = { workspace = true, default-features = false } fs-err = { workspace = true } globset = { workspace = true } itertools = { workspace = true } +rustc-hash = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true } sha2 = { workspace = true } diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs index c193f3a0a..2ec11aeeb 100644 --- a/crates/uv-build-backend/src/lib.rs +++ b/crates/uv-build-backend/src/lib.rs @@ -9,13 +9,12 @@ pub use settings::{BuildBackendSettings, WheelDataIncludes}; pub use source_dist::{build_source_dist, list_source_dist}; pub use wheel::{build_editable, build_wheel, list_wheel, metadata}; -use std::fs::FileType; use std::io; use std::path::{Path, PathBuf}; use std::str::FromStr; - use thiserror::Error; use tracing::debug; +use walkdir::DirEntry; use uv_fs::Simplified; use uv_globfilter::PortableGlobError; @@ -23,6 +22,7 @@ use uv_normalize::PackageName; use uv_pypi_types::{Identifier, IdentifierParseError}; use crate::metadata::ValidationError; +use crate::settings::ModuleName; #[derive(Debug, Error)] pub enum Error { @@ -32,8 +32,8 @@ pub enum Error { Toml(#[from] toml::de::Error), #[error("Invalid pyproject.toml")] Validation(#[from] ValidationError), - #[error(transparent)] - Identifier(#[from] IdentifierParseError), + #[error("Invalid module name: {0}")] + InvalidModuleName(String, #[source] IdentifierParseError), #[error("Unsupported glob expression in: `{field}`")] PortableGlob { field: String, @@ -55,33 +55,14 @@ pub enum Error { #[source] err: walkdir::Error, }, - #[error("Unsupported file type {:?}: `{}`", _1, _0.user_display())] - UnsupportedFileType(PathBuf, FileType), #[error("Failed to write wheel zip archive")] Zip(#[from] zip::result::ZipError), #[error("Failed to write RECORD file")] Csv(#[from] csv::Error), - #[error( - "Missing source directory at: `{}`", - _0.user_display() - )] - MissingSrc(PathBuf), - #[error( - "Expected a Python module directory at: `{}`", - _0.user_display() - )] + #[error("Expected a Python module at: `{}`", _0.user_display())] MissingInitPy(PathBuf), - #[error( - "Missing module directory for `{}` in `{}`. Found: `{}`", - module_name, - src_root.user_display(), - dir_listing.join("`, `") - )] - MissingModuleDir { - module_name: String, - src_root: PathBuf, - dir_listing: Vec, - }, + #[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: `{}`", _0.user_display())] + NotANamespace(PathBuf), /// Either an absolute path or a parent path through `..`. #[error("Module root must be inside the project: `{}`", _0.user_display())] InvalidModuleRoot(PathBuf), @@ -104,6 +85,16 @@ trait DirectoryWriter { /// Files added through the method are considered generated when listing included files. fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error>; + /// Add the file or directory to the path. + fn write_dir_entry(&mut self, entry: &DirEntry, target_path: &str) -> Result<(), Error> { + if entry.file_type().is_dir() { + self.write_directory(target_path)?; + } else { + self.write_file(target_path, entry.path())?; + } + Ok(()) + } + /// Add a local file. fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error>; @@ -194,93 +185,166 @@ fn check_metadata_directory( Ok(()) } -/// Resolve the source root, module root and the module name. -fn find_roots( - source_tree: &Path, - pyproject_toml: &PyProjectToml, - relative_module_root: &Path, - module_name: Option<&Identifier>, -) -> Result<(PathBuf, PathBuf), Error> { - let relative_module_root = uv_fs::normalize_path(relative_module_root); - let src_root = source_tree.join(&relative_module_root); - if !src_root.starts_with(source_tree) { - return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf())); - } - let src_root = source_tree.join(&relative_module_root); - let module_root = find_module_root(&src_root, module_name, pyproject_toml.name())?; - Ok((src_root, module_root)) -} - -/// Match the module name to its module directory with potentially different casing. +/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while +/// checking the project layout and names. /// /// Some target platforms have case-sensitive filesystems, while others have case-insensitive -/// filesystems and we always lower case the package name, our default for the module, while some +/// filesystems. We always lower case the package name, our default for the module, while some /// users want uppercase letters in their module names. For example, the package name is `pil_util`, -/// but the module `PIL_util`. +/// but the module `PIL_util`. To make the behavior as consistent as possible across platforms as +/// possible, we require that an upper case name is given explicitly through +/// `tool.uv.build-backend.module-name`. /// /// By default, the dist-info-normalized package name is the module name. For /// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and /// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string /// comparison with the module name. /// -/// To make the behavior as consistent as possible across platforms as possible, we require that an -/// upper case name is given explicitly through `tool.uv.module-name`. +/// While we recommend one module per package, it is possible to declare a list of modules. +fn find_roots( + source_tree: &Path, + pyproject_toml: &PyProjectToml, + relative_module_root: &Path, + module_name: Option<&ModuleName>, + namespace: bool, +) -> Result<(PathBuf, Vec), Error> { + let relative_module_root = uv_fs::normalize_path(relative_module_root); + let src_root = source_tree.join(&relative_module_root); + if !src_root.starts_with(source_tree) { + return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf())); + } + let src_root = source_tree.join(&relative_module_root); + debug!("Source root: {}", src_root.user_display()); + + if namespace { + // `namespace = true` disables module structure checks. + let modules_relative = if let Some(module_name) = module_name { + match module_name { + ModuleName::Name(name) => { + vec![name.split('.').collect::()] + } + ModuleName::Names(names) => names + .iter() + .map(|name| name.split('.').collect::()) + .collect(), + } + } else { + vec![PathBuf::from( + pyproject_toml.name().as_dist_info_name().to_string(), + )] + }; + for module_relative in &modules_relative { + debug!("Namespace module path: {}", module_relative.user_display()); + } + return Ok((src_root, modules_relative)); + } + + let modules_relative = if let Some(module_name) = module_name { + match module_name { + ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?], + ModuleName::Names(names) => names + .iter() + .map(|name| module_path_from_module_name(&src_root, name)) + .collect::>()?, + } + } else { + vec![find_module_path_from_package_name( + &src_root, + pyproject_toml.name(), + )?] + }; + for module_relative in &modules_relative { + debug!("Module path: {}", module_relative.user_display()); + } + Ok((src_root, modules_relative)) +} + +/// Infer stubs packages from package name alone. /// -/// Returns the module root path, the directory below which the `__init__.py` lives. -fn find_module_root( +/// There are potential false positives if someone had a regular package with `-stubs`. +/// The `Identifier` checks in `module_path_from_module_name` are here covered by the `PackageName` +/// validation. +fn find_module_path_from_package_name( src_root: &Path, - module_name: Option<&Identifier>, package_name: &PackageName, ) -> Result { - let module_name = if let Some(module_name) = module_name { - // This name can be uppercase. - module_name.to_string() + if let Some(stem) = package_name.to_string().strip_suffix("-stubs") { + debug!("Building stubs package instead of a regular package"); + let module_name = PackageName::from_str(stem) + .expect("non-empty package name prefix must be valid package name") + .as_dist_info_name() + .to_string(); + let module_relative = PathBuf::from(format!("{module_name}-stubs")); + let init_pyi = src_root.join(&module_relative).join("__init__.pyi"); + if !init_pyi.is_file() { + return Err(Error::MissingInitPy(init_pyi)); + } + Ok(module_relative) } else { - // Should never error, the rules for package names (in dist-info formatting) are stricter - // than those for identifiers. // This name is always lowercase. - Identifier::from_str(package_name.as_dist_info_name().as_ref())?.to_string() - }; + let module_relative = PathBuf::from(package_name.as_dist_info_name().to_string()); + let init_py = src_root.join(&module_relative).join("__init__.py"); + if !init_py.is_file() { + return Err(Error::MissingInitPy(init_py)); + } + Ok(module_relative) + } +} - let dir = match fs_err::read_dir(src_root) { - Ok(dir_iterator) => dir_iterator.collect::, _>>()?, - Err(err) if err.kind() == io::ErrorKind::NotFound => { - return Err(Error::MissingSrc(src_root.to_path_buf())); - } - Err(err) => return Err(Error::Io(err)), - }; - let module_root = dir.iter().find_map(|entry| { - // TODO(konsti): Do we ever need to check if `dir/{module_name}/__init__.py` exists because - // the wrong casing may be recorded on disk? - if entry - .file_name() - .to_str() - .is_some_and(|file_name| file_name == module_name) - { - Some(entry.path()) +/// Determine the relative module path from an explicit module name. +fn module_path_from_module_name(src_root: &Path, module_name: &str) -> Result { + // This name can be uppercase. + let module_relative = module_name.split('.').collect::(); + + // Check if we have a regular module or a namespace. + let (root_name, namespace_segments) = + if let Some((root_name, namespace_segments)) = module_name.split_once('.') { + ( + root_name, + namespace_segments.split('.').collect::>(), + ) } else { - None - } - }); - let module_root = if let Some(module_root) = module_root { - if module_root.join("__init__.py").is_file() { - module_root.clone() - } else { - return Err(Error::MissingInitPy(module_root.join("__init__.py"))); - } + (module_name, Vec::new()) + }; + + // Check if we have an implementation or a stubs package. + // For stubs for a namespace, the `-stubs` prefix must be on the root. + let stubs = if let Some(stem) = root_name.strip_suffix("-stubs") { + // Check that the stubs belong to a valid module. + Identifier::from_str(stem) + .map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?; + true } else { - return Err(Error::MissingModuleDir { - module_name, - src_root: src_root.to_path_buf(), - dir_listing: dir - .into_iter() - .filter_map(|entry| Some(entry.file_name().to_str()?.to_string())) - .collect(), - }); + Identifier::from_str(root_name) + .map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?; + false }; - debug!("Module name: `{}`", module_name); - Ok(module_root) + // For a namespace, check that all names below the root is valid. + for segment in namespace_segments { + Identifier::from_str(segment) + .map_err(|err| Error::InvalidModuleName(module_name.to_string(), err))?; + } + + // Check that an `__init__.py[i]` exists for the module. + let init_py = + src_root + .join(&module_relative) + .join(if stubs { "__init__.pyi" } else { "__init__.py" }); + if !init_py.is_file() { + return Err(Error::MissingInitPy(init_py)); + } + + // For a namespace, check that the directories above the lowest are namespace directories. + for namespace_dir in module_relative.ancestors().skip(1) { + if src_root.join(namespace_dir).join("__init__.py").exists() + || src_root.join(namespace_dir).join("__init__.pyi").exists() + { + return Err(Error::NotANamespace(src_root.join(namespace_dir))); + } + } + + Ok(module_relative) } #[cfg(test)] @@ -293,10 +357,18 @@ mod tests { use itertools::Itertools; use sha2::Digest; use std::io::{BufReader, Read}; + use std::iter; use tempfile::TempDir; use uv_distribution_filename::{SourceDistFilename, WheelFilename}; use uv_fs::{copy_dir_all, relative_to}; + fn format_err(err: &Error) -> String { + let context = iter::successors(std::error::Error::source(&err), |&err| err.source()) + .map(|err| format!(" Caused by: {err}")) + .join("\n"); + err.to_string() + "\n" + &context + } + /// File listings, generated archives and archive contents for both a build with /// source tree -> wheel /// and a build with @@ -364,6 +436,15 @@ mod tests { }) } + fn build_err(source_root: &Path) -> String { + let dist = TempDir::new().unwrap(); + let build_err = build(source_root, dist.path()).unwrap_err(); + let err_message: String = format_err(&build_err) + .replace(&source_root.user_display().to_string(), "[TEMP_PATH]") + .replace('\\', "/"); + err_message + } + fn sdist_contents(source_dist_path: &Path) -> Vec { let sdist_reader = BufReader::new(File::open(source_dist_path).unwrap()); let mut source_dist = tar::Archive::new(GzDecoder::new(sdist_reader)); @@ -818,13 +899,454 @@ mod tests { ) .unwrap(); let build_err = build(src.path(), dist.path()).unwrap_err(); - let err_message = build_err - .to_string() + let err_message = format_err(&build_err) .replace(&src.path().user_display().to_string(), "[TEMP_PATH]") .replace('\\', "/"); assert_snapshot!( err_message, - @"Missing module directory for `camel_case` in `[TEMP_PATH]/src`. Found: `camelCase`" + @"Expected a Python module at: `[TEMP_PATH]/src/camel_case/__init__.py`" ); } + + #[test] + fn invalid_stubs_name() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "camelcase" + version = "1.0.0" + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + + [tool.uv.build-backend] + module-name = "django@home-stubs" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + + let dist = TempDir::new().unwrap(); + let build_err = build(src.path(), dist.path()).unwrap_err(); + let err_message = format_err(&build_err); + assert_snapshot!( + err_message, + @r" + Invalid module name: django@home-stubs + Caused by: Invalid character `@` at position 7 for identifier `django@home`, expected an underscore or an alphanumeric character + " + ); + } + + /// Stubs packages use a special name and `__init__.pyi`. + #[test] + fn stubs_package() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "stuffed-bird-stubs" + version = "1.0.0" + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + fs_err::create_dir_all(src.path().join("src").join("stuffed_bird-stubs")).unwrap(); + // That's the wrong file, we're expecting a `__init__.pyi`. + let regular_init_py = src + .path() + .join("src") + .join("stuffed_bird-stubs") + .join("__init__.py"); + File::create(®ular_init_py).unwrap(); + + let dist = TempDir::new().unwrap(); + let build_err = build(src.path(), dist.path()).unwrap_err(); + let err_message = format_err(&build_err) + .replace(&src.path().user_display().to_string(), "[TEMP_PATH]") + .replace('\\', "/"); + assert_snapshot!( + err_message, + @"Expected a Python module at: `[TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi`" + ); + + // Create the correct file + fs_err::remove_file(regular_init_py).unwrap(); + File::create( + src.path() + .join("src") + .join("stuffed_bird-stubs") + .join("__init__.pyi"), + ) + .unwrap(); + + let build1 = build(src.path(), dist.path()).unwrap(); + assert_snapshot!(build1.wheel_contents.join("\n"), @r" + stuffed_bird-stubs/ + stuffed_bird-stubs/__init__.pyi + stuffed_bird_stubs-1.0.0.dist-info/ + stuffed_bird_stubs-1.0.0.dist-info/METADATA + stuffed_bird_stubs-1.0.0.dist-info/RECORD + stuffed_bird_stubs-1.0.0.dist-info/WHEEL + "); + + // Check that setting the name manually works equally. + let pyproject_toml = indoc! {r#" + [project] + name = "stuffed-bird-stubs" + version = "1.0.0" + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + + [tool.uv.build-backend] + module-name = "stuffed_bird-stubs" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + + let build2 = build(src.path(), dist.path()).unwrap(); + assert_eq!(build1.wheel_contents, build2.wheel_contents); + } + + /// A simple namespace package with a single root `__init__.py`. + #[test] + fn simple_namespace_package() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "simple-namespace-part" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "simple_namespace.part" + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + fs_err::create_dir_all(src.path().join("src").join("simple_namespace").join("part")) + .unwrap(); + + assert_snapshot!( + build_err(src.path()), + @"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`" + ); + + // Create the correct file + File::create( + src.path() + .join("src") + .join("simple_namespace") + .join("part") + .join("__init__.py"), + ) + .unwrap(); + + // For a namespace package, there must not be an `__init__.py` here. + let bogus_init_py = src + .path() + .join("src") + .join("simple_namespace") + .join("__init__.py"); + File::create(&bogus_init_py).unwrap(); + assert_snapshot!( + build_err(src.path()), + @"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`" + ); + fs_err::remove_file(bogus_init_py).unwrap(); + + let dist = TempDir::new().unwrap(); + let build1 = build(src.path(), dist.path()).unwrap(); + assert_snapshot!(build1.source_dist_contents.join("\n"), @r" + simple_namespace_part-1.0.0/ + simple_namespace_part-1.0.0/PKG-INFO + simple_namespace_part-1.0.0/pyproject.toml + simple_namespace_part-1.0.0/src + simple_namespace_part-1.0.0/src/simple_namespace + simple_namespace_part-1.0.0/src/simple_namespace/part + simple_namespace_part-1.0.0/src/simple_namespace/part/__init__.py + "); + assert_snapshot!(build1.wheel_contents.join("\n"), @r" + simple_namespace/ + simple_namespace/part/ + simple_namespace/part/__init__.py + simple_namespace_part-1.0.0.dist-info/ + simple_namespace_part-1.0.0.dist-info/METADATA + simple_namespace_part-1.0.0.dist-info/RECORD + simple_namespace_part-1.0.0.dist-info/WHEEL + "); + + // Check that `namespace = true` works too. + let pyproject_toml = indoc! {r#" + [project] + name = "simple-namespace-part" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "simple_namespace.part" + namespace = true + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + + let build2 = build(src.path(), dist.path()).unwrap(); + assert_eq!(build1, build2); + } + + /// A complex namespace package with a multiple root `__init__.py`. + #[test] + fn complex_namespace_package() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "complex-namespace" + version = "1.0.0" + + [tool.uv.build-backend] + namespace = true + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + fs_err::create_dir_all( + src.path() + .join("src") + .join("complex_namespace") + .join("part_a"), + ) + .unwrap(); + File::create( + src.path() + .join("src") + .join("complex_namespace") + .join("part_a") + .join("__init__.py"), + ) + .unwrap(); + fs_err::create_dir_all( + src.path() + .join("src") + .join("complex_namespace") + .join("part_b"), + ) + .unwrap(); + File::create( + src.path() + .join("src") + .join("complex_namespace") + .join("part_b") + .join("__init__.py"), + ) + .unwrap(); + + let dist = TempDir::new().unwrap(); + let build1 = build(src.path(), dist.path()).unwrap(); + assert_snapshot!(build1.wheel_contents.join("\n"), @r" + complex_namespace-1.0.0.dist-info/ + complex_namespace-1.0.0.dist-info/METADATA + complex_namespace-1.0.0.dist-info/RECORD + complex_namespace-1.0.0.dist-info/WHEEL + complex_namespace/ + complex_namespace/part_a/ + complex_namespace/part_a/__init__.py + complex_namespace/part_b/ + complex_namespace/part_b/__init__.py + "); + + // Check that setting the name manually works equally. + let pyproject_toml = indoc! {r#" + [project] + name = "complex-namespace" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "complex_namespace" + namespace = true + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + + let build2 = build(src.path(), dist.path()).unwrap(); + assert_eq!(build1, build2); + } + + /// Stubs for a namespace package. + #[test] + fn stubs_namespace() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "cloud.db.schema-stubs" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "cloud-stubs.db.schema" + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + fs_err::create_dir_all( + src.path() + .join("src") + .join("cloud-stubs") + .join("db") + .join("schema"), + ) + .unwrap(); + File::create( + src.path() + .join("src") + .join("cloud-stubs") + .join("db") + .join("schema") + .join("__init__.pyi"), + ) + .unwrap(); + + let dist = TempDir::new().unwrap(); + let build = build(src.path(), dist.path()).unwrap(); + assert_snapshot!(build.wheel_contents.join("\n"), @r" + cloud-stubs/ + cloud-stubs/db/ + cloud-stubs/db/schema/ + cloud-stubs/db/schema/__init__.pyi + cloud_db_schema_stubs-1.0.0.dist-info/ + cloud_db_schema_stubs-1.0.0.dist-info/METADATA + cloud_db_schema_stubs-1.0.0.dist-info/RECORD + cloud_db_schema_stubs-1.0.0.dist-info/WHEEL + "); + } + + /// A package with multiple modules, one a regular module and two namespace modules. + #[test] + fn multiple_module_names() { + let src = TempDir::new().unwrap(); + let pyproject_toml = indoc! {r#" + [project] + name = "simple-namespace-part" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"] + + [build-system] + requires = ["uv_build>=0.5.15,<0.6"] + build-backend = "uv_build" + "# + }; + fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap(); + fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap(); + fs_err::create_dir_all( + src.path() + .join("src") + .join("simple_namespace") + .join("part_a"), + ) + .unwrap(); + fs_err::create_dir_all( + src.path() + .join("src") + .join("simple_namespace") + .join("part_b"), + ) + .unwrap(); + + // Most of these checks exist in other tests too, but we want to ensure that they apply + // with multiple modules too. + + // The first module is missing an `__init__.py`. + assert_snapshot!( + build_err(src.path()), + @"Expected a Python module at: `[TEMP_PATH]/src/foo/__init__.py`" + ); + + // Create the first correct `__init__.py` file + File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap(); + + // The second module, a namespace, is missing an `__init__.py`. + assert_snapshot!( + build_err(src.path()), + @"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part_a/__init__.py`" + ); + + // Create the other two correct `__init__.py` files + File::create( + src.path() + .join("src") + .join("simple_namespace") + .join("part_a") + .join("__init__.py"), + ) + .unwrap(); + File::create( + src.path() + .join("src") + .join("simple_namespace") + .join("part_b") + .join("__init__.py"), + ) + .unwrap(); + + // For the second module, a namespace, there must not be an `__init__.py` here. + let bogus_init_py = src + .path() + .join("src") + .join("simple_namespace") + .join("__init__.py"); + File::create(&bogus_init_py).unwrap(); + assert_snapshot!( + build_err(src.path()), + @"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`" + ); + fs_err::remove_file(bogus_init_py).unwrap(); + + let dist = TempDir::new().unwrap(); + let build = build(src.path(), dist.path()).unwrap(); + assert_snapshot!(build.source_dist_contents.join("\n"), @r" + simple_namespace_part-1.0.0/ + simple_namespace_part-1.0.0/PKG-INFO + simple_namespace_part-1.0.0/pyproject.toml + simple_namespace_part-1.0.0/src + simple_namespace_part-1.0.0/src/foo + simple_namespace_part-1.0.0/src/foo/__init__.py + simple_namespace_part-1.0.0/src/simple_namespace + simple_namespace_part-1.0.0/src/simple_namespace/part_a + simple_namespace_part-1.0.0/src/simple_namespace/part_a/__init__.py + simple_namespace_part-1.0.0/src/simple_namespace/part_b + simple_namespace_part-1.0.0/src/simple_namespace/part_b/__init__.py + "); + assert_snapshot!(build.wheel_contents.join("\n"), @r" + foo/ + foo/__init__.py + simple_namespace/ + simple_namespace/part_a/ + simple_namespace/part_a/__init__.py + simple_namespace/part_b/ + simple_namespace/part_b/__init__.py + simple_namespace_part-1.0.0.dist-info/ + simple_namespace_part-1.0.0.dist-info/METADATA + simple_namespace_part-1.0.0.dist-info/RECORD + simple_namespace_part-1.0.0.dist-info/WHEEL + "); + } } diff --git a/crates/uv-build-backend/src/settings.rs b/crates/uv-build-backend/src/settings.rs index fb9803799..9e9e44961 100644 --- a/crates/uv-build-backend/src/settings.rs +++ b/crates/uv-build-backend/src/settings.rs @@ -1,14 +1,9 @@ use serde::{Deserialize, Serialize}; use std::path::PathBuf; use uv_macros::OptionsMetadata; -use uv_pypi_types::Identifier; /// Settings for the uv build backend (`uv_build`). /// -/// !!! note -/// -/// The uv build backend is currently in preview and may change in any future release. -/// /// Note that those settings only apply when using the `uv_build` backend, other build backends /// (such as hatchling) have their own configuration. /// @@ -32,15 +27,26 @@ pub struct BuildBackendSettings { /// /// The default module name is the package name with dots and dashes replaced by underscores. /// + /// Package names need to be valid Python identifiers, and the directory needs to contain a + /// `__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem + /// being the module name, and which contain a `__init__.pyi` file. + /// + /// For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or + /// `foo-stubs.bar`. + /// + /// For namespace packages with multiple modules, the path can be a list, e.g., + /// `["foo", "bar"]`. We recommend using a single module per package, splitting multiple + /// packages into a workspace. + /// /// Note that using this option runs the risk of creating two packages with different names but /// the same module names. Installing such packages together leads to unspecified behavior, /// often with corrupted files or directory trees. #[option( default = r#"None"#, - value_type = "str", + value_type = "str | list[str]", example = r#"module-name = "sklearn""# )] - pub module_name: Option, + pub module_name: Option, /// Glob expressions which files and directories to additionally include in the source /// distribution. @@ -79,6 +85,56 @@ pub struct BuildBackendSettings { )] pub wheel_exclude: Vec, + /// Build a namespace package. + /// + /// Build a PEP 420 implicit namespace package, allowing more than one root `__init__.py`. + /// + /// Use this option when the namespace package contains multiple root `__init__.py`, for + /// namespace packages with a single root `__init__.py` use a dotted `module-name` instead. + /// + /// To compare dotted `module-name` and `namespace = true`, the first example below can be + /// expressed with `module-name = "cloud.database"`: There is one root `__init__.py` `database`. + /// In the second example, we have three roots (`cloud.database`, `cloud.database_pro`, + /// `billing.modules.database_pro`), so `namespace = true` is required. + /// + /// ```text + /// src + /// └── cloud + /// └── database + /// ├── __init__.py + /// ├── query_builder + /// │ └── __init__.py + /// └── sql + /// ├── parser.py + /// └── __init__.py + /// ``` + /// + /// ```text + /// src + /// ├── cloud + /// │ ├── database + /// │ │ ├── __init__.py + /// │ │ ├── query_builder + /// │ │ │ └── __init__.py + /// │ │ └── sql + /// │ │ ├── __init__.py + /// │ │ └── parser.py + /// │ └── database_pro + /// │ ├── __init__.py + /// │ └── query_builder.py + /// └── billing + /// └── modules + /// └── database_pro + /// ├── __init__.py + /// └── sql.py + /// ``` + #[option( + default = r#"false"#, + value_type = "bool", + example = r#"namespace = true"# + )] + pub namespace: bool, + /// Data includes for wheels. /// /// Each entry is a directory, whose contents are copied to the matching directory in the wheel @@ -123,11 +179,23 @@ impl Default for BuildBackendSettings { default_excludes: true, source_exclude: Vec::new(), wheel_exclude: Vec::new(), + namespace: false, data: WheelDataIncludes::default(), } } } +/// Whether to include a single module or multiple modules. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(untagged)] +pub enum ModuleName { + /// A single module name. + Name(String), + /// Multiple module names, which are all included. + Names(Vec), +} + /// Data includes for wheels. /// /// See `BuildBackendSettings::data`. diff --git a/crates/uv-build-backend/src/source_dist.rs b/crates/uv-build-backend/src/source_dist.rs index c39a27e44..3b6d11ba4 100644 --- a/crates/uv-build-backend/src/source_dist.rs +++ b/crates/uv-build-backend/src/source_dist.rs @@ -68,20 +68,24 @@ fn source_dist_matcher( includes.push(globset::escape("pyproject.toml")); // Check that the source tree contains a module. - let (_, module_root) = find_roots( + let (src_root, modules_relative) = find_roots( source_tree, pyproject_toml, &settings.module_root, settings.module_name.as_ref(), + settings.namespace, )?; - // The wheel must not include any files included by the source distribution (at least until we - // have files generated in the source dist -> wheel build step). - let import_path = uv_fs::normalize_path( - &uv_fs::relative_to(module_root, source_tree).expect("module root is inside source tree"), - ) - .portable_display() - .to_string(); - includes.push(format!("{}/**", globset::escape(&import_path))); + for module_relative in modules_relative { + // The wheel must not include any files included by the source distribution (at least until we + // have files generated in the source dist -> wheel build step). + let import_path = uv_fs::normalize_path( + &uv_fs::relative_to(src_root.join(module_relative), source_tree) + .expect("module root is inside source tree"), + ) + .portable_display() + .to_string(); + includes.push(format!("{}/**", globset::escape(&import_path))); + } for include in includes { let glob = PortableGlobParser::Uv .parse(&include) @@ -248,32 +252,16 @@ fn write_source_dist( .expect("walkdir starts with root"); if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) { - trace!("Excluding: `{}`", relative.user_display()); + trace!("Excluding from sdist: `{}`", relative.user_display()); continue; } - debug!("Including {}", relative.user_display()); - if entry.file_type().is_dir() { - writer.write_directory( - &Path::new(&top_level) - .join(relative) - .portable_display() - .to_string(), - )?; - } else if entry.file_type().is_file() { - writer.write_file( - &Path::new(&top_level) - .join(relative) - .portable_display() - .to_string(), - entry.path(), - )?; - } else { - return Err(Error::UnsupportedFileType( - relative.to_path_buf(), - entry.file_type(), - )); - } + let entry_path = Path::new(&top_level) + .join(relative) + .portable_display() + .to_string(); + debug!("Adding to sdist: {}", relative.user_display()); + writer.write_dir_entry(&entry, &entry_path)?; } debug!("Visited {files_visited} files for source dist build"); diff --git a/crates/uv-build-backend/src/wheel.rs b/crates/uv-build-backend/src/wheel.rs index 69af1e203..6eeb899d0 100644 --- a/crates/uv-build-backend/src/wheel.rs +++ b/crates/uv-build-backend/src/wheel.rs @@ -1,6 +1,7 @@ use fs_err::File; use globset::{GlobSet, GlobSetBuilder}; use itertools::Itertools; +use rustc_hash::FxHashSet; use sha2::{Digest, Sha256}; use std::io::{BufReader, Read, Write}; use std::path::{Path, PathBuf}; @@ -17,8 +18,7 @@ use uv_warnings::warn_user_once; use crate::metadata::DEFAULT_EXCLUDES; use crate::{ - BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, - find_module_root, find_roots, + BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots, }; /// Build a wheel from the source tree and place it in the output directory. @@ -124,60 +124,64 @@ fn write_wheel( let exclude_matcher = build_exclude_matcher(excludes)?; debug!("Adding content files to wheel"); - let (src_root, module_root) = find_roots( + let (src_root, module_relative) = find_roots( source_tree, pyproject_toml, &settings.module_root, settings.module_name.as_ref(), + settings.namespace, )?; let mut files_visited = 0; - for entry in WalkDir::new(module_root) - .sort_by_file_name() - .into_iter() - .filter_entry(|entry| !exclude_matcher.is_match(entry.path())) - { - let entry = entry.map_err(|err| Error::WalkDir { - root: source_tree.to_path_buf(), - err, - })?; + let mut prefix_directories = FxHashSet::default(); + for module_relative in module_relative { + // For convenience, have directories for the whole tree in the wheel + for ancestor in module_relative.ancestors().skip(1) { + if ancestor == Path::new("") { + continue; + } + // Avoid duplicate directories in the zip. + if prefix_directories.insert(ancestor.to_path_buf()) { + wheel_writer.write_directory(&ancestor.portable_display().to_string())?; + } + } - files_visited += 1; - if files_visited > 10000 { - warn_user_once!( - "Visited more than 10,000 files for wheel build. \ + for entry in WalkDir::new(src_root.join(module_relative)) + .sort_by_file_name() + .into_iter() + .filter_entry(|entry| !exclude_matcher.is_match(entry.path())) + { + let entry = entry.map_err(|err| Error::WalkDir { + root: source_tree.to_path_buf(), + err, + })?; + + files_visited += 1; + if files_visited > 10000 { + warn_user_once!( + "Visited more than 10,000 files for wheel build. \ Consider using more constrained includes or more excludes." - ); - } + ); + } - // We only want to take the module root, but since excludes start at the source tree root, - // we strip higher than we iterate. - let match_path = entry - .path() - .strip_prefix(source_tree) - .expect("walkdir starts with root"); - let wheel_path = entry - .path() - .strip_prefix(&src_root) - .expect("walkdir starts with root"); - if exclude_matcher.is_match(match_path) { - trace!("Excluding from module: `{}`", match_path.user_display()); - continue; - } - let wheel_path = wheel_path.portable_display().to_string(); + // We only want to take the module root, but since excludes start at the source tree root, + // we strip higher than we iterate. + let match_path = entry + .path() + .strip_prefix(source_tree) + .expect("walkdir starts with root"); + let entry_path = entry + .path() + .strip_prefix(&src_root) + .expect("walkdir starts with root"); + if exclude_matcher.is_match(match_path) { + trace!("Excluding from module: `{}`", match_path.user_display()); + continue; + } - debug!("Adding to wheel: `{wheel_path}`"); - - if entry.file_type().is_dir() { - wheel_writer.write_directory(&wheel_path)?; - } else if entry.file_type().is_file() { - wheel_writer.write_file(&wheel_path, entry.path())?; - } else { - // TODO(konsti): We may want to support symlinks, there is support for installing them. - return Err(Error::UnsupportedFileType( - entry.path().to_path_buf(), - entry.file_type(), - )); + let entry_path = entry_path.portable_display().to_string(); + debug!("Adding to wheel: {entry_path}"); + wheel_writer.write_dir_entry(&entry, &entry_path)?; } } debug!("Visited {files_visited} files for wheel build"); @@ -267,16 +271,13 @@ pub fn build_editable( let mut wheel_writer = ZipDirectoryWriter::new_wheel(File::create(&wheel_path)?); debug!("Adding pth file to {}", wheel_path.user_display()); - let src_root = source_tree.join(&settings.module_root); - if !src_root.starts_with(source_tree) { - return Err(Error::InvalidModuleRoot(settings.module_root.clone())); - } - // Check that a module root exists in the directory we're linking from the `.pth` file - find_module_root( - &src_root, + let (src_root, _module_relative) = find_roots( + source_tree, + &pyproject_toml, + &settings.module_root, settings.module_name.as_ref(), - pyproject_toml.name(), + settings.namespace, )?; wheel_writer.write_bytes( @@ -514,23 +515,12 @@ fn wheel_subdir_from_globs( continue; } - let relative_licenses = Path::new(target) + let license_path = Path::new(target) .join(relative) .portable_display() .to_string(); - - if entry.file_type().is_dir() { - wheel_writer.write_directory(&relative_licenses)?; - } else if entry.file_type().is_file() { - debug!("Adding {} file: `{}`", globs_field, relative.user_display()); - wheel_writer.write_file(&relative_licenses, entry.path())?; - } else { - // TODO(konsti): We may want to support symlinks, there is support for installing them. - return Err(Error::UnsupportedFileType( - entry.path().to_path_buf(), - entry.file_type(), - )); - } + debug!("Adding for {}: `{}`", globs_field, relative.user_display()); + wheel_writer.write_dir_entry(&entry, &license_path)?; } Ok(()) } diff --git a/crates/uv-build-frontend/Cargo.toml b/crates/uv-build-frontend/Cargo.toml index 83f8008d9..748e7bb28 100644 --- a/crates/uv-build-frontend/Cargo.toml +++ b/crates/uv-build-frontend/Cargo.toml @@ -17,6 +17,7 @@ doctest = false workspace = true [dependencies] +uv-cache-key = { workspace = true } uv-configuration = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } diff --git a/crates/uv-build-frontend/src/lib.rs b/crates/uv-build-frontend/src/lib.rs index 1c29b2c31..5cbaece2e 100644 --- a/crates/uv-build-frontend/src/lib.rs +++ b/crates/uv-build-frontend/src/lib.rs @@ -25,11 +25,14 @@ use tempfile::TempDir; use tokio::io::AsyncBufReadExt; use tokio::process::Command; use tokio::sync::{Mutex, Semaphore}; -use tracing::{Instrument, debug, info_span, instrument}; +use tracing::{Instrument, debug, info_span, instrument, warn}; +use uv_cache_key::cache_digest; +use uv_configuration::PreviewMode; use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_distribution::BuildRequires; use uv_distribution_types::{IndexLocations, Requirement, Resolution}; +use uv_fs::LockedFile; use uv_fs::{PythonExt, Simplified}; use uv_pep440::Version; use uv_pep508::PackageName; @@ -200,6 +203,11 @@ impl Pep517Backend { {import} "#, backend_path = backend_path_encoded} } + + fn is_setuptools(&self) -> bool { + // either `setuptools.build_meta` or `setuptools.build_meta:__legacy__` + self.backend.split(':').next() == Some("setuptools.build_meta") + } } /// Uses an [`Rc`] internally, clone freely. @@ -278,6 +286,7 @@ impl SourceBuild { mut environment_variables: FxHashMap, level: BuildOutput, concurrent_builds: usize, + preview: PreviewMode, ) -> Result { let temp_dir = build_context.cache().venv_dir()?; @@ -325,6 +334,8 @@ impl SourceBuild { false, false, false, + false, + preview, )? }; @@ -430,6 +441,31 @@ impl SourceBuild { }) } + /// Acquire a lock on the source tree, if necessary. + async fn acquire_lock(&self) -> Result, Error> { + // Depending on the command, setuptools puts `*.egg-info`, `build/`, and `dist/` in the + // source tree, and concurrent invocations of setuptools using the same source dir can + // stomp on each other. We need to lock something to fix that, but we don't want to dump a + // `.lock` file into the source tree that the user will need to .gitignore. Take a global + // proxy lock instead. + let mut source_tree_lock = None; + if self.pep517_backend.is_setuptools() { + debug!("Locking the source tree for setuptools"); + let canonical_source_path = self.source_tree.canonicalize()?; + let lock_path = env::temp_dir().join(format!( + "uv-setuptools-{}.lock", + cache_digest(&canonical_source_path) + )); + source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy()) + .await + .inspect_err(|err| { + warn!("Failed to acquire build lock: {err}"); + }) + .ok(); + } + Ok(source_tree_lock) + } + async fn get_resolved_requirements( build_context: &impl BuildContext, source_build_context: SourceBuildContext, @@ -600,6 +636,9 @@ impl SourceBuild { return Ok(Some(metadata_dir.clone())); } + // Lock the source tree, if necessary. + let _lock = self.acquire_lock().await?; + // Hatch allows for highly dynamic customization of metadata via hooks. In such cases, Hatch // can't uphold the PEP 517 contract, in that the metadata Hatch would return by // `prepare_metadata_for_build_wheel` isn't guaranteed to match that of the built wheel. @@ -712,16 +751,15 @@ impl SourceBuild { pub async fn build(&self, wheel_dir: &Path) -> Result { // The build scripts run with the extracted root as cwd, so they need the absolute path. let wheel_dir = std::path::absolute(wheel_dir)?; - let filename = self.pep517_build(&wheel_dir, &self.pep517_backend).await?; + let filename = self.pep517_build(&wheel_dir).await?; Ok(filename) } /// Perform a PEP 517 build for a wheel or source distribution (sdist). - async fn pep517_build( - &self, - output_dir: &Path, - pep517_backend: &Pep517Backend, - ) -> Result { + async fn pep517_build(&self, output_dir: &Path) -> Result { + // Lock the source tree, if necessary. + let _lock = self.acquire_lock().await?; + // Write the hook output to a file so that we can read it back reliably. let outfile = self .temp_dir @@ -733,7 +771,7 @@ impl SourceBuild { BuildKind::Sdist => { debug!( r#"Calling `{}.build_{}("{}", {})`"#, - pep517_backend.backend, + self.pep517_backend.backend, self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -746,7 +784,7 @@ impl SourceBuild { with open("{}", "w") as fp: fp.write(sdist_filename) "#, - pep517_backend.backend_import(), + self.pep517_backend.backend_import(), self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -762,7 +800,7 @@ impl SourceBuild { }); debug!( r#"Calling `{}.build_{}("{}", {}, {})`"#, - pep517_backend.backend, + self.pep517_backend.backend, self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -776,7 +814,7 @@ impl SourceBuild { with open("{}", "w") as fp: fp.write(wheel_filename) "#, - pep517_backend.backend_import(), + self.pep517_backend.backend_import(), self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -806,7 +844,7 @@ impl SourceBuild { return Err(Error::from_command_output( format!( "Call to `{}.build_{}` failed", - pep517_backend.backend, self.build_kind + self.pep517_backend.backend, self.build_kind ), &output, self.level, @@ -821,7 +859,7 @@ impl SourceBuild { return Err(Error::from_command_output( format!( "Call to `{}.build_{}` failed", - pep517_backend.backend, self.build_kind + self.pep517_backend.backend, self.build_kind ), &output, self.level, diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml index d380520cf..ffbea0ea9 100644 --- a/crates/uv-build/Cargo.toml +++ b/crates/uv-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-build" -version = "0.7.6" +version = "0.7.20" edition.workspace = true rust-version.workspace = true homepage.workspace = true diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml index b86157079..13c21edd8 100644 --- a/crates/uv-build/pyproject.toml +++ b/crates/uv-build/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uv-build" -version = "0.7.6" +version = "0.7.20" description = "The uv build backend" authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/crates/uv-build/ruff.toml b/crates/uv-build/ruff.toml new file mode 100644 index 000000000..e480507a2 --- /dev/null +++ b/crates/uv-build/ruff.toml @@ -0,0 +1,2 @@ +# It is important retain compatibility with old versions in the build backend +target-version = "py37" diff --git a/crates/uv-cache-info/src/cache_info.rs b/crates/uv-cache-info/src/cache_info.rs index ce98cc513..27a98ab54 100644 --- a/crates/uv-cache-info/src/cache_info.rs +++ b/crates/uv-cache-info/src/cache_info.rs @@ -7,6 +7,7 @@ use serde::Deserialize; use tracing::{debug, warn}; use crate::git_info::{Commit, Tags}; +use crate::glob::cluster_globs; use crate::timestamp::Timestamp; #[derive(Debug, thiserror::Error)] @@ -212,34 +213,39 @@ impl CacheInfo { } } - // If we have any globs, process them in a single pass. + // If we have any globs, first cluster them using LCP and then do a single pass on each group. if !globs.is_empty() { - let walker = globwalk::GlobWalkerBuilder::from_patterns(directory, &globs) + for (glob_base, glob_patterns) in cluster_globs(&globs) { + let walker = globwalk::GlobWalkerBuilder::from_patterns( + directory.join(glob_base), + &glob_patterns, + ) .file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK) .build()?; - for entry in walker { - let entry = match entry { - Ok(entry) => entry, - Err(err) => { - warn!("Failed to read glob entry: {err}"); + for entry in walker { + let entry = match entry { + Ok(entry) => entry, + Err(err) => { + warn!("Failed to read glob entry: {err}"); + continue; + } + }; + let metadata = match entry.metadata() { + Ok(metadata) => metadata, + Err(err) => { + warn!("Failed to read metadata for glob entry: {err}"); + continue; + } + }; + if !metadata.is_file() { + warn!( + "Expected file for cache key, but found directory: `{}`", + entry.path().display() + ); continue; } - }; - let metadata = match entry.metadata() { - Ok(metadata) => metadata, - Err(err) => { - warn!("Failed to read metadata for glob entry: {err}"); - continue; - } - }; - if !metadata.is_file() { - warn!( - "Expected file for cache key, but found directory: `{}`", - entry.path().display() - ); - continue; + timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata))); } - timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata))); } } diff --git a/crates/uv-cache-info/src/glob.rs b/crates/uv-cache-info/src/glob.rs new file mode 100644 index 000000000..e9c85897f --- /dev/null +++ b/crates/uv-cache-info/src/glob.rs @@ -0,0 +1,318 @@ +use std::{ + collections::BTreeMap, + path::{Component, Components, Path, PathBuf}, +}; + +/// Check if a component of the path looks like it may be a glob pattern. +/// +/// Note: this function is being used when splitting a glob pattern into a long possible +/// base and the glob remainder (scanning through components until we hit the first component +/// for which this function returns true). It is acceptable for this function to return +/// false positives (e.g. patterns like 'foo[bar' or 'foo{bar') in which case correctness +/// will not be affected but efficiency might be (because we'll traverse more than we should), +/// however it should not return false negatives. +fn is_glob_like(part: Component) -> bool { + matches!(part, Component::Normal(_)) + && part.as_os_str().to_str().is_some_and(|part| { + ["*", "{", "}", "?", "[", "]"] + .into_iter() + .any(|c| part.contains(c)) + }) +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +struct GlobParts { + base: PathBuf, + pattern: PathBuf, +} + +/// Split a glob into longest possible base + shortest possible glob pattern. +fn split_glob(pattern: impl AsRef) -> GlobParts { + let pattern: &Path = pattern.as_ref().as_ref(); + + let mut glob = GlobParts::default(); + let mut globbing = false; + let mut last = None; + + for part in pattern.components() { + if let Some(last) = last { + if last != Component::CurDir { + if globbing { + glob.pattern.push(last); + } else { + glob.base.push(last); + } + } + } + if !globbing { + globbing = is_glob_like(part); + } + // we don't know if this part is the last one, defer handling it by one iteration + last = Some(part); + } + + if let Some(last) = last { + // defer handling the last component to prevent draining entire pattern into base + if globbing || matches!(last, Component::Normal(_)) { + glob.pattern.push(last); + } else { + glob.base.push(last); + } + } + glob +} + +/// Classic trie with edges being path components and values being glob patterns. +#[derive(Default)] +struct Trie<'a> { + children: BTreeMap, Trie<'a>>, + patterns: Vec<&'a Path>, +} + +impl<'a> Trie<'a> { + fn insert(&mut self, mut components: Components<'a>, pattern: &'a Path) { + if let Some(part) = components.next() { + self.children + .entry(part) + .or_default() + .insert(components, pattern); + } else { + self.patterns.push(pattern); + } + } + + #[allow(clippy::needless_pass_by_value)] + fn collect_patterns( + &self, + pattern_prefix: PathBuf, + group_prefix: PathBuf, + patterns: &mut Vec, + groups: &mut Vec<(PathBuf, Vec)>, + ) { + // collect all patterns beneath and including this node + for pattern in &self.patterns { + patterns.push(pattern_prefix.join(pattern)); + } + for (part, child) in &self.children { + if let Component::Normal(_) = part { + // for normal components, collect all descendant patterns ('normal' edges only) + child.collect_patterns( + pattern_prefix.join(part), + group_prefix.join(part), + patterns, + groups, + ); + } else { + // for non-normal component edges, kick off separate group collection at this node + child.collect_groups(group_prefix.join(part), groups); + } + } + } + + #[allow(clippy::needless_pass_by_value)] + fn collect_groups(&self, prefix: PathBuf, groups: &mut Vec<(PathBuf, Vec)>) { + // LCP-style grouping of patterns + if self.patterns.is_empty() { + // no patterns in this node; child nodes can form independent groups + for (part, child) in &self.children { + child.collect_groups(prefix.join(part), groups); + } + } else { + // pivot point, we've hit a pattern node; we have to stop here and form a group + let mut group = Vec::new(); + self.collect_patterns(PathBuf::new(), prefix.clone(), &mut group, groups); + groups.push((prefix, group)); + } + } +} + +/// Given a collection of globs, cluster them into (base, globs) groups so that: +/// - base doesn't contain any glob symbols +/// - each directory would only be walked at most once +/// - base of each group is the longest common prefix of globs in the group +pub(crate) fn cluster_globs(patterns: &[impl AsRef]) -> Vec<(PathBuf, Vec)> { + // split all globs into base/pattern + let globs: Vec<_> = patterns.iter().map(split_glob).collect(); + + // construct a path trie out of all split globs + let mut trie = Trie::default(); + for glob in &globs { + trie.insert(glob.base.components(), &glob.pattern); + } + + // run LCP-style aggregation of patterns in the trie into groups + let mut groups = Vec::new(); + trie.collect_groups(PathBuf::new(), &mut groups); + + // finally, convert resulting patterns to strings + groups + .into_iter() + .map(|(base, patterns)| { + ( + base, + patterns + .iter() + // NOTE: this unwrap is ok because input patterns are valid utf-8 + .map(|p| p.to_str().unwrap().to_owned()) + .collect(), + ) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::{GlobParts, cluster_globs, split_glob}; + + fn windowsify(path: &str) -> String { + if cfg!(windows) { + path.replace('/', "\\") + } else { + path.to_owned() + } + } + + #[test] + fn test_split_glob() { + #[track_caller] + fn check(input: &str, base: &str, pattern: &str) { + let result = split_glob(input); + let expected = GlobParts { + base: base.into(), + pattern: pattern.into(), + }; + assert_eq!(result, expected, "{input:?} != {base:?} + {pattern:?}"); + } + + check("", "", ""); + check("a", "", "a"); + check("a/b", "a", "b"); + check("a/b/", "a", "b"); + check("a/.//b/", "a", "b"); + check("./a/b/c", "a/b", "c"); + check("c/d/*", "c/d", "*"); + check("c/d/*/../*", "c/d", "*/../*"); + check("a/?b/c", "a", "?b/c"); + check("/a/b/*", "/a/b", "*"); + check("../x/*", "../x", "*"); + check("a/{b,c}/d", "a", "{b,c}/d"); + check("a/[bc]/d", "a", "[bc]/d"); + check("*", "", "*"); + check("*/*", "", "*/*"); + check("..", "..", ""); + check("/", "/", ""); + } + + #[test] + fn test_cluster_globs() { + #[track_caller] + fn check(input: &[&str], expected: &[(&str, &[&str])]) { + let input = input.iter().map(|s| windowsify(s)).collect::>(); + + let mut result_sorted = cluster_globs(&input); + for (_, patterns) in &mut result_sorted { + patterns.sort_unstable(); + } + result_sorted.sort_unstable(); + + let mut expected_sorted = Vec::new(); + for (base, patterns) in expected { + let mut patterns_sorted = Vec::new(); + for pattern in *patterns { + patterns_sorted.push(windowsify(pattern)); + } + patterns_sorted.sort_unstable(); + expected_sorted.push((windowsify(base).into(), patterns_sorted)); + } + expected_sorted.sort_unstable(); + + assert_eq!( + result_sorted, expected_sorted, + "{input:?} != {expected_sorted:?} (got: {result_sorted:?})" + ); + } + + check(&["a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]); + check(&["./a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]); + check(&["/a/b/*", "/a/c/*"], &[("/a/b", &["*"]), ("/a/c", &["*"])]); + check( + &["../a/b/*", "../a/c/*"], + &[("../a/b", &["*"]), ("../a/c", &["*"])], + ); + check(&["x/*", "y/*"], &[("x", &["*"]), ("y", &["*"])]); + check(&[], &[]); + check( + &["./*", "a/*", "../foo/*.png"], + &[("", &["*", "a/*"]), ("../foo", &["*.png"])], + ); + check( + &[ + "?", + "/foo/?", + "/foo/bar/*", + "../bar/*.png", + "../bar/../baz/*.jpg", + ], + &[ + ("", &["?"]), + ("/foo", &["?", "bar/*"]), + ("../bar", &["*.png"]), + ("../bar/../baz", &["*.jpg"]), + ], + ); + check(&["/abs/path/*"], &[("/abs/path", &["*"])]); + check(&["/abs/*", "rel/*"], &[("/abs", &["*"]), ("rel", &["*"])]); + check(&["a/{b,c}/*", "a/d?/*"], &[("a", &["{b,c}/*", "d?/*"])]); + check( + &[ + "../shared/a/[abc].png", + "../shared/a/b/*", + "../shared/b/c/?x/d", + "docs/important/*.{doc,xls}", + "docs/important/very/*", + ], + &[ + ("../shared/a", &["[abc].png", "b/*"]), + ("../shared/b/c", &["?x/d"]), + ("docs/important", &["*.{doc,xls}", "very/*"]), + ], + ); + check(&["file.txt"], &[("", &["file.txt"])]); + check(&["/"], &[("/", &[""])]); + check(&[".."], &[("..", &[""])]); + check( + &["file1.txt", "file2.txt"], + &[("", &["file1.txt", "file2.txt"])], + ); + check( + &["a/file1.txt", "a/file2.txt"], + &[("a", &["file1.txt", "file2.txt"])], + ); + check( + &["*", "a/b/*", "a/../c/*.jpg", "a/../c/*.png", "/a/*", "/b/*"], + &[ + ("", &["*", "a/b/*"]), + ("a/../c", &["*.jpg", "*.png"]), + ("/a", &["*"]), + ("/b", &["*"]), + ], + ); + + if cfg!(windows) { + check( + &[ + r"\\foo\bar\shared/a/[abc].png", + r"\\foo\bar\shared/a/b/*", + r"\\foo\bar/shared/b/c/?x/d", + r"D:\docs\important/*.{doc,xls}", + r"D:\docs/important/very/*", + ], + &[ + (r"\\foo\bar\shared\a", &["[abc].png", r"b\*"]), + (r"\\foo\bar\shared\b\c", &[r"?x\d"]), + (r"D:\docs\important", &["*.{doc,xls}", r"very\*"]), + ], + ); + } + } +} diff --git a/crates/uv-cache-info/src/lib.rs b/crates/uv-cache-info/src/lib.rs index 286411f68..092d40652 100644 --- a/crates/uv-cache-info/src/lib.rs +++ b/crates/uv-cache-info/src/lib.rs @@ -3,4 +3,5 @@ pub use crate::timestamp::*; mod cache_info; mod git_info; +mod glob; mod timestamp; diff --git a/crates/uv-cache-key/Cargo.toml b/crates/uv-cache-key/Cargo.toml index a50f3ca65..931a24db3 100644 --- a/crates/uv-cache-key/Cargo.toml +++ b/crates/uv-cache-key/Cargo.toml @@ -17,6 +17,8 @@ doctest = false workspace = true [dependencies] +uv-redacted = { workspace = true } + hex = { workspace = true } memchr = { workspace = true } percent-encoding = { workspace = true } diff --git a/crates/uv-cache-key/src/canonical_url.rs b/crates/uv-cache-key/src/canonical_url.rs index 50300f666..19f5a3d7c 100644 --- a/crates/uv-cache-key/src/canonical_url.rs +++ b/crates/uv-cache-key/src/canonical_url.rs @@ -4,6 +4,7 @@ use std::hash::{Hash, Hasher}; use std::ops::Deref; use url::Url; +use uv_redacted::DisplaySafeUrl; use crate::cache_key::{CacheKey, CacheKeyHasher}; @@ -16,10 +17,10 @@ use crate::cache_key::{CacheKey, CacheKeyHasher}; /// string value of the `Url` it contains. This is intentional, because all fetching should still /// happen within the context of the original URL. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] -pub struct CanonicalUrl(Url); +pub struct CanonicalUrl(DisplaySafeUrl); impl CanonicalUrl { - pub fn new(url: &Url) -> Self { + pub fn new(url: &DisplaySafeUrl) -> Self { let mut url = url.clone(); // If the URL cannot be a base, then it's not a valid URL anyway. @@ -42,8 +43,8 @@ impl CanonicalUrl { // almost certainly not using the same case conversion rules that GitHub // does. (See issue #84) if url.host_str() == Some("github.com") { - url.set_scheme(url.scheme().to_lowercase().as_str()) - .unwrap(); + let scheme = url.scheme().to_lowercase(); + url.set_scheme(&scheme).unwrap(); let path = url.path().to_lowercase(); url.set_path(&path); } @@ -56,7 +57,8 @@ impl CanonicalUrl { .is_some_and(|ext| ext.eq_ignore_ascii_case("git")); if needs_chopping { let prefix = &prefix[..prefix.len() - 4]; - url.set_path(&format!("{prefix}@{suffix}")); + let path = format!("{prefix}@{suffix}"); + url.set_path(&path); } } else { // Ex) `git+https://github.com/pypa/sample-namespace-packages.git` @@ -97,7 +99,7 @@ impl CanonicalUrl { } pub fn parse(url: &str) -> Result { - Ok(Self::new(&Url::parse(url)?)) + Ok(Self::new(&DisplaySafeUrl::parse(url)?)) } } @@ -117,7 +119,7 @@ impl Hash for CanonicalUrl { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: CanonicalUrl) -> Self { value.0 } @@ -138,10 +140,10 @@ impl std::fmt::Display for CanonicalUrl { /// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same /// resource. #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] -pub struct RepositoryUrl(Url); +pub struct RepositoryUrl(DisplaySafeUrl); impl RepositoryUrl { - pub fn new(url: &Url) -> Self { + pub fn new(url: &DisplaySafeUrl) -> Self { let mut url = CanonicalUrl::new(url).0; // If a Git URL ends in a reference (like a branch, tag, or commit), remove it. @@ -163,7 +165,7 @@ impl RepositoryUrl { } pub fn parse(url: &str) -> Result { - Ok(Self::new(&Url::parse(url)?)) + Ok(Self::new(&DisplaySafeUrl::parse(url)?)) } } diff --git a/crates/uv-cache/Cargo.toml b/crates/uv-cache/Cargo.toml index a3a9ab76b..779309f0f 100644 --- a/crates/uv-cache/Cargo.toml +++ b/crates/uv-cache/Cargo.toml @@ -24,6 +24,7 @@ uv-distribution-types = { workspace = true } uv-fs = { workspace = true, features = ["tokio"] } uv-normalize = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-static = { workspace = true } clap = { workspace = true, features = ["derive", "env"], optional = true } @@ -35,5 +36,4 @@ same-file = { workspace = true } serde = { workspace = true, features = ["derive"] } tempfile = { workspace = true } tracing = { workspace = true } -url = { workspace = true } walkdir = { workspace = true } diff --git a/crates/uv-cache/src/lib.rs b/crates/uv-cache/src/lib.rs index 18407a2b6..af28bb26c 100644 --- a/crates/uv-cache/src/lib.rs +++ b/crates/uv-cache/src/lib.rs @@ -6,7 +6,7 @@ use std::path::{Path, PathBuf}; use std::str::FromStr; use std::sync::Arc; -use rustc_hash::FxHashSet; +use rustc_hash::FxHashMap; use tracing::debug; pub use archive::ArchiveId; @@ -375,7 +375,7 @@ impl Cache { /// Returns the number of entries removed from the cache. pub fn remove(&self, name: &PackageName) -> Result { // Collect the set of referenced archives. - let before = self.find_archive_references()?; + let references = self.find_archive_references()?; // Remove any entries for the package from the cache. let mut summary = Removal::default(); @@ -383,18 +383,11 @@ impl Cache { summary += bucket.remove(self, name)?; } - // Collect the set of referenced archives after the removal. - let after = self.find_archive_references()?; - - if before != after { - // Remove any archives that are no longer referenced. - for entry in fs_err::read_dir(self.bucket(CacheBucket::Archive))? { - let entry = entry?; - let path = fs_err::canonicalize(entry.path())?; - if !after.contains(&path) && before.contains(&path) { - debug!("Removing dangling cache entry: {}", path.display()); - summary += rm_rf(path)?; - } + // Remove any archives that are no longer referenced. + for (target, references) in references { + if references.iter().all(|path| !path.exists()) { + debug!("Removing dangling cache entry: {}", target.display()); + summary += rm_rf(target)?; } } @@ -513,7 +506,7 @@ impl Cache { for entry in entries { let entry = entry?; let path = fs_err::canonicalize(entry.path())?; - if !references.contains(&path) { + if !references.contains_key(&path) { debug!("Removing dangling cache archive: {}", path.display()); summary += rm_rf(path)?; } @@ -530,33 +523,52 @@ impl Cache { /// /// Archive entries are often referenced by symlinks in other cache buckets. This method /// searches for all such references. - fn find_archive_references(&self) -> Result, io::Error> { - let mut references = FxHashSet::default(); - for bucket in CacheBucket::iter() { - // As an optimization, skip the archive bucket itself. - if matches!(bucket, CacheBucket::Archive) { - continue; - } - + /// + /// Returns a map from archive path to paths that reference it. + fn find_archive_references(&self) -> Result>, io::Error> { + let mut references = FxHashMap::>::default(); + for bucket in [CacheBucket::SourceDistributions, CacheBucket::Wheels] { let bucket_path = self.bucket(bucket); if bucket_path.is_dir() { - for entry in walkdir::WalkDir::new(bucket_path) { + let walker = walkdir::WalkDir::new(&bucket_path).into_iter(); + for entry in walker.filter_entry(|entry| { + !( + // As an optimization, ignore any `.lock`, `.whl`, `.msgpack`, `.rev`, or + // `.http` files, along with the `src` directory, which represents the + // unpacked source distribution. + entry.file_name() == "src" + || entry.file_name() == ".lock" + || entry.file_name() == ".gitignore" + || entry.path().extension().is_some_and(|ext| { + ext.eq_ignore_ascii_case("lock") + || ext.eq_ignore_ascii_case("whl") + || ext.eq_ignore_ascii_case("http") + || ext.eq_ignore_ascii_case("rev") + || ext.eq_ignore_ascii_case("msgpack") + }) + ) + }) { let entry = entry?; - // As an optimization, ignore any `.lock`, `.whl`, `.msgpack`, `.rev`, or - // `.http` files. - if entry.path().extension().is_some_and(|ext| { - ext.eq_ignore_ascii_case("lock") - || ext.eq_ignore_ascii_case("whl") - || ext.eq_ignore_ascii_case("http") - || ext.eq_ignore_ascii_case("rev") - || ext.eq_ignore_ascii_case("msgpack") - }) { - continue; + // On Unix, archive references use symlinks. + if cfg!(unix) { + if !entry.file_type().is_symlink() { + continue; + } + } + + // On Windows, archive references are files containing structured data. + if cfg!(windows) { + if !entry.file_type().is_file() { + continue; + } } if let Ok(target) = self.resolve_link(entry.path()) { - references.insert(target); + references + .entry(target) + .or_default() + .push(entry.path().to_path_buf()); } } } diff --git a/crates/uv-cache/src/wheel.rs b/crates/uv-cache/src/wheel.rs index d00a2895c..76103f0ff 100644 --- a/crates/uv-cache/src/wheel.rs +++ b/crates/uv-cache/src/wheel.rs @@ -1,9 +1,8 @@ use std::path::{Path, PathBuf}; -use url::Url; - use uv_cache_key::{CanonicalUrl, cache_digest}; use uv_distribution_types::IndexUrl; +use uv_redacted::DisplaySafeUrl; /// Cache wheels and their metadata, both from remote wheels and built from source distributions. #[derive(Debug, Clone)] @@ -11,16 +10,16 @@ pub enum WheelCache<'a> { /// Either PyPI or an alternative index, which we key by index URL. Index(&'a IndexUrl), /// A direct URL dependency, which we key by URL. - Url(&'a Url), + Url(&'a DisplaySafeUrl), /// A path dependency, which we key by URL. - Path(&'a Url), + Path(&'a DisplaySafeUrl), /// An editable dependency, which we key by URL. - Editable(&'a Url), + Editable(&'a DisplaySafeUrl), /// A Git dependency, which we key by URL and SHA. /// /// Note that this variant only exists for source distributions; wheels can't be delivered /// through Git. - Git(&'a Url, &'a str), + Git(&'a DisplaySafeUrl, &'a str), } impl WheelCache<'_> { @@ -30,7 +29,7 @@ impl WheelCache<'_> { WheelCache::Index(IndexUrl::Pypi(_)) => WheelCacheKind::Pypi.root(), WheelCache::Index(url) => WheelCacheKind::Index .root() - .join(cache_digest(&CanonicalUrl::new(url))), + .join(cache_digest(&CanonicalUrl::new(url.url()))), WheelCache::Url(url) => WheelCacheKind::Url .root() .join(cache_digest(&CanonicalUrl::new(url))), diff --git a/crates/uv-cli/Cargo.toml b/crates/uv-cli/Cargo.toml index debc21a74..fa8453662 100644 --- a/crates/uv-cli/Cargo.toml +++ b/crates/uv-cli/Cargo.toml @@ -25,12 +25,14 @@ uv-normalize = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true, features = ["clap", "schemars"]} +uv-redacted = { workspace = true } uv-resolver = { workspace = true, features = ["clap"] } uv-settings = { workspace = true, features = ["schemars"] } uv-static = { workspace = true } uv-torch = { workspace = true, features = ["clap"] } uv-version = { workspace = true } uv-warnings = { workspace = true } +uv-workspace = { workspace = true } anstream = { workspace = true } anyhow = { workspace = true } diff --git a/crates/uv-cli/src/compat.rs b/crates/uv-cli/src/compat.rs index 50f4c173d..d29afa760 100644 --- a/crates/uv-cli/src/compat.rs +++ b/crates/uv-cli/src/compat.rs @@ -13,7 +13,6 @@ pub trait CompatArgs { /// For example, users often pass `--allow-unsafe`, which is unnecessary with uv. But it's a /// nice user experience to warn, rather than fail, when users pass `--allow-unsafe`. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipCompileCompatArgs { #[clap(long, hide = true)] allow_unsafe: bool, @@ -159,7 +158,6 @@ impl CompatArgs for PipCompileCompatArgs { /// /// These represent a subset of the `pip list` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipListCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, @@ -184,7 +182,6 @@ impl CompatArgs for PipListCompatArgs { /// /// These represent a subset of the `pip-sync` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipSyncCompatArgs { #[clap(short, long, hide = true)] ask: bool, @@ -268,7 +265,6 @@ enum Resolver { /// /// These represent a subset of the `virtualenv` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct VenvCompatArgs { #[clap(long, hide = true)] clear: bool, @@ -327,7 +323,6 @@ impl CompatArgs for VenvCompatArgs { /// /// These represent a subset of the `pip install` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipInstallCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, @@ -361,7 +356,6 @@ impl CompatArgs for PipInstallCompatArgs { /// /// These represent a subset of the `pip` interface that exists on all commands. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipGlobalCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 039d9ccf0..056447959 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -8,7 +8,6 @@ use clap::builder::Styles; use clap::builder::styling::{AnsiColor, Effects, Style}; use clap::{Args, Parser, Subcommand}; -use url::Url; use uv_cache::CacheArgs; use uv_configuration::{ ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier, @@ -19,9 +18,11 @@ use uv_normalize::{ExtraName, GroupName, PackageName, PipGroupName}; use uv_pep508::{MarkerTree, Requirement}; use uv_pypi_types::VerbatimParsedUrl; use uv_python::{PythonDownloads, PythonPreference, PythonVersion}; +use uv_redacted::DisplaySafeUrl; use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode}; use uv_static::EnvVars; use uv_torch::TorchMode; +use uv_workspace::pyproject_mut::AddBoundsKind; pub mod comma; pub mod compat; @@ -84,7 +85,6 @@ const STYLES: Styles = Styles::styled() disable_version_flag = true )] #[command(styles=STYLES)] -#[allow(clippy::struct_excessive_bools)] pub struct Cli { #[command(subcommand)] pub command: Box, @@ -132,7 +132,6 @@ pub struct TopLevelArgs { #[derive(Parser, Debug, Clone)] #[command(next_help_heading = "Global options", next_display_order = 1000)] -#[allow(clippy::struct_excessive_bools)] pub struct GlobalArgs { #[arg( global = true, @@ -493,8 +492,6 @@ pub enum Commands { /// Clear the cache, removing all entries or those linked to specific packages. #[command(hide = true)] Clean(CleanArgs), - /// Read or update the project's version. - Version(VersionArgs), /// Generate shell completion #[command(alias = "--generate-shell-completion", hide = true)] GenerateShellCompletion(GenerateShellCompletionArgs), @@ -525,7 +522,7 @@ pub struct HelpArgs { pub command: Option>, } -#[derive(Args, Debug)] +#[derive(Args)] #[command(group = clap::ArgGroup::new("operation"))] pub struct VersionArgs { /// Set the project version to this value @@ -533,31 +530,136 @@ pub struct VersionArgs { /// To update the project using semantic versioning components instead, use `--bump`. #[arg(group = "operation")] pub value: Option, + /// Update the project version using the given semantics + /// + /// This flag can be passed multiple times. #[arg(group = "operation", long)] - pub bump: Option, + pub bump: Vec, + /// Don't write a new version to the `pyproject.toml` /// /// Instead, the version will be displayed. #[arg(long)] pub dry_run: bool, + /// Only show the version /// /// By default, uv will show the project name before the version. #[arg(long)] pub short: bool, + + /// The format of the output #[arg(long, value_enum, default_value = "text")] pub output_format: VersionFormat, + + /// Avoid syncing the virtual environment after re-locking the project. + #[arg(long, env = EnvVars::UV_NO_SYNC, value_parser = clap::builder::BoolishValueParser::new(), conflicts_with = "frozen")] + pub no_sync: bool, + + /// Prefer the active virtual environment over the project's virtual environment. + /// + /// If the project virtual environment is active or no virtual environment is active, this has + /// no effect. + #[arg(long, overrides_with = "no_active")] + pub active: bool, + + /// Prefer project's virtual environment over an active environment. + /// + /// This is the default behavior. + #[arg(long, overrides_with = "active", hide = true)] + pub no_active: bool, + + /// Assert that the `uv.lock` will remain unchanged. + /// + /// Requires that the lockfile is up-to-date. If the lockfile is missing or needs to be updated, + /// uv will exit with an error. + #[arg(long, env = EnvVars::UV_LOCKED, value_parser = clap::builder::BoolishValueParser::new(), conflicts_with_all = ["frozen", "upgrade"])] + pub locked: bool, + + /// Update the version without re-locking the project. + /// + /// The project environment will not be synced. + #[arg(long, env = EnvVars::UV_FROZEN, value_parser = clap::builder::BoolishValueParser::new(), conflicts_with_all = ["locked", "upgrade", "no_sources"])] + pub frozen: bool, + + #[command(flatten)] + pub installer: ResolverInstallerArgs, + + #[command(flatten)] + pub build: BuildOptionsArgs, + + #[command(flatten)] + pub refresh: RefreshArgs, + + /// Update the version of a specific package in the workspace. + #[arg(long, conflicts_with = "isolated")] + pub package: Option, + + /// The Python interpreter to use for resolving and syncing. + /// + /// See `uv help python` for details on Python discovery and supported request formats. + #[arg( + long, + short, + env = EnvVars::UV_PYTHON, + verbatim_doc_comment, + help_heading = "Python options", + value_parser = parse_maybe_string, + )] + pub python: Option>, } -#[derive(Debug, Copy, Clone, PartialEq, clap::ValueEnum)] +// Note that the ordering of the variants is significant, as when given a list of operations +// to perform, we sort them and apply them in order, so users don't have to think too hard about it. +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)] pub enum VersionBump { - /// Increase the major version (1.2.3 => 2.0.0) + /// Increase the major version (e.g., 1.2.3 => 2.0.0) Major, - /// Increase the minor version (1.2.3 => 1.3.0) + /// Increase the minor version (e.g., 1.2.3 => 1.3.0) Minor, - /// Increase the patch version (1.2.3 => 1.2.4) + /// Increase the patch version (e.g., 1.2.3 => 1.2.4) Patch, + /// Move from a pre-release to stable version (e.g., 1.2.3b4.post5.dev6 => 1.2.3) + /// + /// Removes all pre-release components, but will not remove "local" components. + Stable, + /// Increase the alpha version (e.g., 1.2.3a4 => 1.2.3a5) + /// + /// To move from a stable to a pre-release version, combine this with a stable component, e.g., + /// for 1.2.3 => 2.0.0a1, you'd also include [`VersionBump::Major`]. + Alpha, + /// Increase the beta version (e.g., 1.2.3b4 => 1.2.3b5) + /// + /// To move from a stable to a pre-release version, combine this with a stable component, e.g., + /// for 1.2.3 => 2.0.0b1, you'd also include [`VersionBump::Major`]. + Beta, + /// Increase the rc version (e.g., 1.2.3rc4 => 1.2.3rc5) + /// + /// To move from a stable to a pre-release version, combine this with a stable component, e.g., + /// for 1.2.3 => 2.0.0rc1, you'd also include [`VersionBump::Major`].] + Rc, + /// Increase the post version (e.g., 1.2.3.post5 => 1.2.3.post6) + Post, + /// Increase the dev version (e.g., 1.2.3a4.dev6 => 1.2.3.dev7) + Dev, +} + +impl std::fmt::Display for VersionBump { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let string = match self { + VersionBump::Major => "major", + VersionBump::Minor => "minor", + VersionBump::Patch => "patch", + VersionBump::Stable => "stable", + VersionBump::Alpha => "alpha", + VersionBump::Beta => "beta", + VersionBump::Rc => "rc", + VersionBump::Post => "post", + VersionBump::Dev => "dev", + }; + string.fmt(f) + } } #[derive(Args)] @@ -596,7 +698,6 @@ pub struct SelfUpdateArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct CacheNamespace { #[command(subcommand)] pub command: CacheCommand, @@ -626,14 +727,12 @@ pub enum CacheCommand { } #[derive(Args, Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct CleanArgs { /// The packages to remove from the cache. pub package: Vec, } #[derive(Args, Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct PruneArgs { /// Optimize the cache for persistence in a continuous integration environment, like GitHub /// Actions. @@ -653,7 +752,6 @@ pub struct PruneArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipNamespace { #[command(subcommand)] pub command: PipCommand, @@ -776,10 +874,6 @@ pub enum ProjectCommand { /// it includes markers that differ from the existing specifier in which case another entry for /// the dependency will be added. /// - /// If no constraint or URL is provided for a dependency, a lower bound is added equal to the - /// latest compatible version of the package, e.g., `>=1.2.3`, unless `--frozen` is provided, in - /// which case no resolution is performed. - /// /// The lockfile and project environment will be updated to reflect the added dependencies. To /// skip updating the lockfile, use `--frozen`. To skip updating the environment, use /// `--no-sync`. @@ -819,6 +913,8 @@ pub enum ProjectCommand { after_long_help = "" )] Remove(RemoveArgs), + /// Read or update the project's version. + Version(VersionArgs), /// Update the project's environment. /// /// Syncing ensures that all project dependencies are installed and up-to-date with the @@ -1036,7 +1132,6 @@ fn parse_maybe_string(input: &str) -> Result, String> { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] pub struct PipCompileArgs { /// Include all packages listed in the given `requirements.in` files. @@ -1384,7 +1479,6 @@ pub struct PipCompileArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipSyncArgs { /// Include all packages listed in the given `requirements.txt` files. /// @@ -1432,7 +1526,7 @@ pub struct PipSyncArgs { /// Hash-checking mode introduces a number of additional constraints: /// /// - Git dependencies are not supported. - /// - Editable installs are not supported. + /// - Editable installations are not supported. /// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or /// source archive (`.zip`, `.tar.gz`), as opposed to a directory. #[arg( @@ -1641,7 +1735,6 @@ pub struct PipSyncArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct PipInstallArgs { /// Install all listed packages. /// @@ -1742,7 +1835,7 @@ pub struct PipInstallArgs { /// Hash-checking mode introduces a number of additional constraints: /// /// - Git dependencies are not supported. - /// - Editable installs are not supported. + /// - Editable installations are not supported. /// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or /// source archive (`.zip`, `.tar.gz`), as opposed to a directory. #[arg( @@ -1956,7 +2049,6 @@ pub struct PipInstallArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct PipUninstallArgs { /// Uninstall all listed packages. #[arg(group = "sources")] @@ -2045,7 +2137,6 @@ pub struct PipUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipFreezeArgs { /// Exclude any editable packages from output. #[arg(long)] @@ -2100,7 +2191,6 @@ pub struct PipFreezeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipListArgs { /// Only include editable projects. #[arg(short, long)] @@ -2176,7 +2266,6 @@ pub struct PipListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipCheckArgs { /// The Python interpreter for which packages should be checked. /// @@ -2212,7 +2301,6 @@ pub struct PipCheckArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipShowArgs { /// The package(s) to display. pub package: Vec, @@ -2266,7 +2354,6 @@ pub struct PipShowArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipTreeArgs { /// Show the version constraint(s) imposed on each package. #[arg(long)] @@ -2323,7 +2410,6 @@ pub struct PipTreeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct BuildArgs { /// The directory from which distributions should be built, or a source /// distribution archive to build into a wheel. @@ -2412,7 +2498,7 @@ pub struct BuildArgs { /// Hash-checking mode introduces a number of additional constraints: /// /// - Git dependencies are not supported. - /// - Editable installs are not supported. + /// - Editable installations are not supported. /// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or /// source archive (`.zip`, `.tar.gz`), as opposed to a directory. #[arg( @@ -2470,7 +2556,6 @@ pub struct BuildArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct VenvArgs { /// The Python interpreter to use for the virtual environment. /// @@ -2666,7 +2751,6 @@ pub enum AuthorFrom { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct InitArgs { /// The path to use for the project/script. /// @@ -2824,7 +2908,6 @@ pub struct InitArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RunArgs { /// Include optional dependencies from the specified extra name. /// @@ -2962,7 +3045,7 @@ pub struct RunArgs { /// When used in a project, these dependencies will be layered on top of the project environment /// in a separate, ephemeral environment. These dependencies are allowed to conflict with those /// specified by the project. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Run with the given packages installed in editable mode. @@ -3111,7 +3194,6 @@ pub struct RunArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct SyncArgs { /// Include optional dependencies from the specified extra name. /// @@ -3357,6 +3439,23 @@ pub struct SyncArgs { )] pub python: Option>, + /// The platform for which requirements should be installed. + /// + /// Represented as a "target triple", a string that describes the target platform in terms of + /// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or + /// `aarch64-apple-darwin`. + /// + /// When targeting macOS (Darwin), the default minimum version is `12.0`. Use + /// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`. + /// + /// WARNING: When specified, uv will select wheels that are compatible with the _target_ + /// platform; as a result, the installed distributions may not be compatible with the _current_ + /// platform. Conversely, any distributions that are built from source may be incompatible with + /// the _target_ platform, as they will be built for the _current_ platform. The + /// `--python-platform` option is intended for advanced use cases. + #[arg(long)] + pub python_platform: Option, + /// Check if the Python environment is synchronized with the project. /// /// If the environment is not up to date, uv will exit with an error. @@ -3368,7 +3467,6 @@ pub struct SyncArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct LockArgs { /// Check if the lockfile is up-to-date. /// @@ -3430,7 +3528,6 @@ pub struct LockArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct AddArgs { /// The packages to add, as PEP 508 requirements (e.g., `ruff==0.5.0`). #[arg(group = "sources")] @@ -3457,7 +3554,12 @@ pub struct AddArgs { /// Add the requirements to the development dependency group. /// /// This option is an alias for `--group dev`. - #[arg(long, conflicts_with("optional"), conflicts_with("group"))] + #[arg( + long, + conflicts_with("optional"), + conflicts_with("group"), + conflicts_with("script") + )] pub dev: bool, /// Add the requirements to the package's optional dependencies for the specified extra. @@ -3471,7 +3573,12 @@ pub struct AddArgs { /// Add the requirements to the specified dependency group. /// /// These requirements will not be included in the published metadata for the project. - #[arg(long, conflicts_with("dev"), conflicts_with("optional"))] + #[arg( + long, + conflicts_with("dev"), + conflicts_with("optional"), + conflicts_with("script") + )] pub group: Option, /// Add the requirements as editable. @@ -3500,6 +3607,19 @@ pub struct AddArgs { )] pub raw: bool, + /// The kind of version specifier to use when adding dependencies. + /// + /// When adding a dependency to the project, if no constraint or URL is provided, a constraint + /// is added based on the latest compatible version of the package. By default, a lower bound + /// constraint is used, e.g., `>=1.2.3`. + /// + /// When `--frozen` is provided, no resolution is performed, and dependencies are always added + /// without constraints. + /// + /// This option is in preview and may change in any future release. + #[arg(long, value_enum)] + pub bounds: Option, + /// Commit to use when adding a dependency from Git. #[arg(long, group = "git-ref", action = clap::ArgAction::Set)] pub rev: Option, @@ -3573,7 +3693,8 @@ pub struct AddArgs { long, conflicts_with = "dev", conflicts_with = "optional", - conflicts_with = "package" + conflicts_with = "package", + conflicts_with = "workspace" )] pub script: Option, @@ -3589,10 +3710,16 @@ pub struct AddArgs { value_parser = parse_maybe_string, )] pub python: Option>, + + /// Add the dependency as a workspace member. + /// + /// When used with a path dependency, the package will be added to the workspace's `members` + /// list in the root `pyproject.toml` file. + #[arg(long)] + pub workspace: bool, } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RemoveArgs { /// The names of the dependencies to remove (e.g., `ruff`). #[arg(required = true)] @@ -3605,11 +3732,21 @@ pub struct RemoveArgs { pub dev: bool, /// Remove the packages from the project's optional dependencies for the specified extra. - #[arg(long, conflicts_with("dev"), conflicts_with("group"))] + #[arg( + long, + conflicts_with("dev"), + conflicts_with("group"), + conflicts_with("script") + )] pub optional: Option, /// Remove the packages from the specified dependency group. - #[arg(long, conflicts_with("dev"), conflicts_with("optional"))] + #[arg( + long, + conflicts_with("dev"), + conflicts_with("optional"), + conflicts_with("script") + )] pub group: Option, /// Avoid syncing the virtual environment after re-locking the project. @@ -3677,7 +3814,6 @@ pub struct RemoveArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct TreeArgs { /// Show a platform-independent dependency tree. /// @@ -3817,7 +3953,6 @@ pub struct TreeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ExportArgs { /// The format to which `uv.lock` should be exported. /// @@ -4032,7 +4167,6 @@ pub struct ExportArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolNamespace { #[command(subcommand)] pub command: ToolCommand, @@ -4125,7 +4259,6 @@ pub enum ToolCommand { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolRunArgs { /// The command to run. /// @@ -4140,7 +4273,7 @@ pub struct ToolRunArgs { pub from: Option, /// Run with the given packages installed. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Run with the given packages installed in editable mode @@ -4244,7 +4377,6 @@ pub struct UvxArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolInstallArgs { /// The package to install commands from. pub package: String, @@ -4256,7 +4388,7 @@ pub struct ToolInstallArgs { pub from: Option, /// Include the following additional requirements. - #[arg(long)] + #[arg(short = 'w', long)] pub with: Vec, /// Include all requirements listed in the given `requirements.txt` files. @@ -4333,7 +4465,6 @@ pub struct ToolInstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolListArgs { /// Whether to display the path to each tool environment and installed executable. #[arg(long)] @@ -4347,6 +4478,10 @@ pub struct ToolListArgs { #[arg(long)] pub show_with: bool, + /// Whether to display the extra requirements installed with each tool. + #[arg(long)] + pub show_extras: bool, + // Hide unused global Python options. #[arg(long, hide = true)] pub python_preference: Option, @@ -4356,7 +4491,6 @@ pub struct ToolListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolDirArgs { /// Show the directory into which `uv tool` will install executables. /// @@ -4375,7 +4509,6 @@ pub struct ToolDirArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolUninstallArgs { /// The name of the tool to uninstall. #[arg(required = true)] @@ -4387,7 +4520,6 @@ pub struct ToolUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolUpgradeArgs { /// The name of the tool to upgrade, along with an optional version specifier. #[arg(required = true)] @@ -4617,7 +4749,6 @@ pub struct ToolUpgradeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonNamespace { #[command(subcommand)] pub command: PythonCommand, @@ -4652,13 +4783,32 @@ pub enum PythonCommand { /// /// A `python` executable is not made globally available, managed Python versions are only used /// in uv commands or in active virtual environments. There is experimental support for adding - /// Python executables to the `PATH` — use the `--preview` flag to enable this behavior. + /// Python executables to a directory on the path — use the `--preview` flag to enable this + /// behavior and `uv python dir --bin` to retrieve the target directory. /// /// Multiple Python versions may be requested. /// /// See `uv help python` to view supported request formats. Install(PythonInstallArgs), + /// Upgrade installed Python versions to the latest supported patch release (requires the + /// `--preview` flag). + /// + /// A target Python minor version to upgrade may be provided, e.g., `3.13`. Multiple versions + /// may be provided to perform more than one upgrade. + /// + /// If no target version is provided, then uv will upgrade all managed CPython versions. + /// + /// During an upgrade, uv will not uninstall outdated patch versions. + /// + /// When an upgrade is performed, virtual environments created by uv will automatically + /// use the new version. However, if the virtual environment was created before the + /// upgrade functionality was added, it will continue to use the old Python version; to enable + /// upgrades, the environment must be recreated. + /// + /// Upgrades are not yet supported for alternative implementations, like PyPy. + Upgrade(PythonUpgradeArgs), + /// Search for a Python installation. /// /// Displays the path to the Python executable. @@ -4687,7 +4837,8 @@ pub enum PythonCommand { /// The Python installation directory may be overridden with `$UV_PYTHON_INSTALL_DIR`. /// /// To view the directory where uv installs Python executables instead, use the `--bin` flag. - /// Note that Python executables are only installed when preview mode is enabled. + /// The Python executable directory may be overridden with `$UV_PYTHON_BIN_DIR`. Note that + /// Python executables are only installed when preview mode is enabled. Dir(PythonDirArgs), /// Uninstall Python versions. @@ -4695,7 +4846,6 @@ pub enum PythonCommand { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonListArgs { /// A Python request to filter by. /// @@ -4750,7 +4900,6 @@ pub struct PythonListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonDirArgs { /// Show the directory into which `uv python` will install Python executables. /// @@ -4768,7 +4917,6 @@ pub struct PythonDirArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonInstallArgs { /// The directory to store the Python installation in. /// @@ -4847,7 +4995,50 @@ pub struct PythonInstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] +pub struct PythonUpgradeArgs { + /// The directory Python installations are stored in. + /// + /// If provided, `UV_PYTHON_INSTALL_DIR` will need to be set for subsequent operations for uv to + /// discover the Python installation. + /// + /// See `uv python dir` to view the current Python installation directory. Defaults to + /// `~/.local/share/uv/python`. + #[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)] + pub install_dir: Option, + + /// The Python minor version(s) to upgrade. + /// + /// If no target version is provided, then uv will upgrade all managed CPython versions. + #[arg(env = EnvVars::UV_PYTHON)] + pub targets: Vec, + + /// Set the URL to use as the source for downloading Python installations. + /// + /// The provided URL will replace + /// `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g., + /// `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`. + /// + /// Distributions can be read from a local directory by using the `file://` URL scheme. + #[arg(long, env = EnvVars::UV_PYTHON_INSTALL_MIRROR)] + pub mirror: Option, + + /// Set the URL to use as the source for downloading PyPy installations. + /// + /// The provided URL will replace `https://downloads.python.org/pypy` in, e.g., + /// `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`. + /// + /// Distributions can be read from a local directory by using the `file://` URL scheme. + #[arg(long, env = EnvVars::UV_PYPY_INSTALL_MIRROR)] + pub pypy_mirror: Option, + + /// URL pointing to JSON of custom Python installations. + /// + /// Note that currently, only local paths are supported. + #[arg(long, env = EnvVars::UV_PYTHON_DOWNLOADS_JSON_URL)] + pub python_downloads_json_url: Option, +} + +#[derive(Args)] pub struct PythonUninstallArgs { /// The directory where the Python was installed. #[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)] @@ -4865,7 +5056,6 @@ pub struct PythonUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonFindArgs { /// The Python request. /// @@ -4914,7 +5104,6 @@ pub struct PythonFindArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonPinArgs { /// The Python version request. /// @@ -4954,15 +5143,15 @@ pub struct PythonPinArgs { /// /// When a local Python version pin is not found in the working directory or an ancestor /// directory, this version will be used instead. - /// - /// Unlike local version pins, this version is used as the default for commands that mutate - /// global state, like `uv tool install`. #[arg(long)] pub global: bool, + + /// Remove the Python version pin. + #[arg(long, conflicts_with = "request", conflicts_with = "resolved")] + pub rm: bool, } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct GenerateShellCompletionArgs { /// The shell to generate the completion script for pub shell: clap_complete_command::Shell, @@ -5001,7 +5190,6 @@ pub struct GenerateShellCompletionArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct IndexArgs { /// The URLs to use when resolving dependencies, in addition to the default index. /// @@ -5011,6 +5199,9 @@ pub struct IndexArgs { /// All indexes provided via this flag take priority over the index specified by /// `--default-index` (which defaults to PyPI). When multiple `--index` flags are provided, /// earlier values take priority. + /// + /// Index names are not supported as values. Relative paths must be disambiguated from index + /// names with `./` or `../` on Unix or `.\\`, `..\\`, `./` or `../` on Windows. // // The nested Vec structure (`Vec>>`) is required for clap's // value parsing mechanism, which processes one value at a time, in order to handle @@ -5076,7 +5267,6 @@ pub struct IndexArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RefreshArgs { /// Refresh all cached data. #[arg( @@ -5102,7 +5292,6 @@ pub struct RefreshArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct BuildOptionsArgs { /// Don't build source distributions. /// @@ -5158,7 +5347,6 @@ pub struct BuildOptionsArgs { /// Arguments that are used by commands that need to install (but not resolve) packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct InstallerArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5300,7 +5488,6 @@ pub struct InstallerArgs { /// Arguments that are used by commands that need to resolve (but not install) packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ResolverArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5467,7 +5654,6 @@ pub struct ResolverArgs { /// Arguments that are used by commands that need to resolve and install packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ResolverInstallerArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5684,7 +5870,6 @@ pub struct ResolverInstallerArgs { /// Arguments that are used by commands that need to fetch from the Simple API. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct FetchArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5838,7 +6023,7 @@ pub struct PublishArgs { /// /// Defaults to PyPI's publish URL (). #[arg(long, env = EnvVars::UV_PUBLISH_URL)] - pub publish_url: Option, + pub publish_url: Option, /// Check an index URL for existing files to skip duplicate uploads. /// diff --git a/crates/uv-cli/src/options.rs b/crates/uv-cli/src/options.rs index 656edd43c..f522022a1 100644 --- a/crates/uv-cli/src/options.rs +++ b/crates/uv-cli/src/options.rs @@ -1,7 +1,10 @@ +use anstream::eprintln; + use uv_cache::Refresh; use uv_configuration::ConfigSettings; use uv_resolver::PrereleaseMode; use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions}; +use uv_warnings::owo_colors::OwoColorize; use crate::{ BuildOptionsArgs, FetchArgs, IndexArgs, InstallerArgs, Maybe, RefreshArgs, ResolverArgs, @@ -9,12 +12,27 @@ use crate::{ }; /// Given a boolean flag pair (like `--upgrade` and `--no-upgrade`), resolve the value of the flag. -pub fn flag(yes: bool, no: bool) -> Option { +pub fn flag(yes: bool, no: bool, name: &str) -> Option { match (yes, no) { (true, false) => Some(true), (false, true) => Some(false), (false, false) => None, - (..) => unreachable!("Clap should make this impossible"), + (..) => { + eprintln!( + "{}{} `{}` and `{}` cannot be used together. \ + Boolean flags on different levels are currently not supported \ + (https://github.com/clap-rs/clap/issues/6049)", + "error".bold().red(), + ":".bold(), + format!("--{name}").green(), + format!("--no-{name}").green(), + ); + // No error forwarding since should eventually be solved on the clap side. + #[allow(clippy::exit)] + { + std::process::exit(2); + } + } } } @@ -26,7 +44,7 @@ impl From for Refresh { refresh_package, } = value; - Self::from_args(flag(refresh, no_refresh), refresh_package) + Self::from_args(flag(refresh, no_refresh, "no-refresh"), refresh_package) } } @@ -53,7 +71,7 @@ impl From for PipOptions { } = args; Self { - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "no-upgrade"), upgrade_package: Some(upgrade_package), index_strategy, keyring_provider, @@ -66,7 +84,7 @@ impl From for PipOptions { }, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, @@ -96,16 +114,16 @@ impl From for PipOptions { } = args; Self { - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: Some(reinstall_package), index_strategy, keyring_provider, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), no_sources: if no_sources { Some(true) } else { None }, ..PipOptions::from(index_args) } @@ -140,9 +158,9 @@ impl From for PipOptions { } = args; Self { - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "upgrade"), upgrade_package: Some(upgrade_package), - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: Some(reinstall_package), index_strategy, keyring_provider, @@ -155,11 +173,11 @@ impl From for PipOptions { fork_strategy, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), no_sources: if no_sources { Some(true) } else { None }, ..PipOptions::from(index_args) } @@ -289,7 +307,7 @@ pub fn resolver_options( .filter_map(Maybe::into_option) .collect() }), - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "no-upgrade"), upgrade_package: Some(upgrade_package), index_strategy, keyring_provider, @@ -303,13 +321,13 @@ pub fn resolver_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, - no_build: flag(no_build, build), + no_build: flag(no_build, build, "build"), no_build_package: Some(no_build_package), - no_binary: flag(no_binary, binary), + no_binary: flag(no_binary, binary, "binary"), no_binary_package: Some(no_binary_package), no_sources: if no_sources { Some(true) } else { None }, } @@ -386,13 +404,13 @@ pub fn resolver_installer_options( .filter_map(Maybe::into_option) .collect() }), - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "upgrade"), upgrade_package: if upgrade_package.is_empty() { None } else { Some(upgrade_package) }, - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: if reinstall_package.is_empty() { None } else { @@ -410,7 +428,7 @@ pub fn resolver_installer_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: if no_build_isolation_package.is_empty() { None } else { @@ -418,14 +436,14 @@ pub fn resolver_installer_options( }, exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), - no_build: flag(no_build, build), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), + no_build: flag(no_build, build, "build"), no_build_package: if no_build_package.is_empty() { None } else { Some(no_build_package) }, - no_binary: flag(no_binary, binary), + no_binary: flag(no_binary, binary, "binary"), no_binary_package: if no_binary_package.is_empty() { None } else { diff --git a/crates/uv-client/Cargo.toml b/crates/uv-client/Cargo.toml index 81d1909fe..bc7fc611f 100644 --- a/crates/uv-client/Cargo.toml +++ b/crates/uv-client/Cargo.toml @@ -65,3 +65,4 @@ hyper = { version = "1.4.1", features = ["server", "http1"] } hyper-util = { version = "0.1.8", features = ["tokio"] } insta = { version = "1.40.0", features = ["filters", "json", "redactions"] } tokio = { workspace = true } +wiremock = { workspace = true } diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs index 2db0a920e..9ddc30e75 100644 --- a/crates/uv-client/src/base_client.rs +++ b/crates/uv-client/src/base_client.rs @@ -6,21 +6,33 @@ use std::sync::Arc; use std::time::Duration; use std::{env, io, iter}; +use anyhow::Context; +use anyhow::anyhow; +use http::{ + HeaderMap, HeaderName, HeaderValue, Method, StatusCode, + header::{ + AUTHORIZATION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, COOKIE, LOCATION, + PROXY_AUTHORIZATION, REFERER, TRANSFER_ENCODING, WWW_AUTHENTICATE, + }, +}; use itertools::Itertools; -use reqwest::{Client, ClientBuilder, Proxy, Response}; +use reqwest::{Client, ClientBuilder, IntoUrl, Proxy, Request, Response, multipart}; use reqwest_middleware::{ClientWithMiddleware, Middleware}; use reqwest_retry::policies::ExponentialBackoff; use reqwest_retry::{ DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy, }; use tracing::{debug, trace}; +use url::ParseError; use url::Url; +use uv_auth::Credentials; use uv_auth::{AuthMiddleware, Indexes}; use uv_configuration::{KeyringProviderType, TrustedHost}; use uv_fs::Simplified; use uv_pep508::MarkerEnvironment; use uv_platform_tags::Platform; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use uv_version::version; use uv_warnings::warn_user_once; @@ -31,6 +43,10 @@ use crate::middleware::OfflineMiddleware; use crate::tls::read_identity; pub const DEFAULT_RETRIES: u32 = 3; +/// Maximum number of redirects to follow before giving up. +/// +/// This is the default used by [`reqwest`]. +const DEFAULT_MAX_REDIRECTS: u32 = 10; /// Selectively skip parts or the entire auth middleware. #[derive(Debug, Clone, Copy, Default)] @@ -60,6 +76,31 @@ pub struct BaseClientBuilder<'a> { default_timeout: Duration, extra_middleware: Option, proxies: Vec, + redirect_policy: RedirectPolicy, + /// Whether credentials should be propagated during cross-origin redirects. + /// + /// A policy allowing propagation is insecure and should only be available for test code. + cross_origin_credential_policy: CrossOriginCredentialsPolicy, +} + +/// The policy for handling HTTP redirects. +#[derive(Debug, Default, Clone, Copy)] +pub enum RedirectPolicy { + /// Use reqwest's built-in redirect handling. This bypasses our custom middleware + /// on redirect. + #[default] + BypassMiddleware, + /// Handle redirects manually, re-triggering our custom middleware for each request. + RetriggerMiddleware, +} + +impl RedirectPolicy { + pub fn reqwest_policy(self) -> reqwest::redirect::Policy { + match self { + RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(), + RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(), + } + } } /// A list of user-defined middlewares to be applied to the client. @@ -95,6 +136,8 @@ impl BaseClientBuilder<'_> { default_timeout: Duration::from_secs(30), extra_middleware: None, proxies: vec![], + redirect_policy: RedirectPolicy::default(), + cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure, } } } @@ -124,6 +167,25 @@ impl<'a> BaseClientBuilder<'a> { self } + /// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change. + /// + /// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32. + pub fn retries_from_env(self) -> anyhow::Result { + // TODO(zanieb): We should probably parse this in another layer, but there's not a natural + // fit for it right now + if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) { + Ok(self.retries( + value + .to_string_lossy() + .as_ref() + .parse::() + .context("Failed to parse `UV_HTTP_RETRIES`")?, + )) + } else { + Ok(self) + } + } + #[must_use] pub fn native_tls(mut self, native_tls: bool) -> Self { self.native_tls = native_tls; @@ -172,13 +234,35 @@ impl<'a> BaseClientBuilder<'a> { self } + #[must_use] + pub fn redirect(mut self, policy: RedirectPolicy) -> Self { + self.redirect_policy = policy; + self + } + + /// Allows credentials to be propagated on cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, propagating credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + #[must_use] + pub fn allow_cross_origin_credentials(mut self) -> Self { + self.cross_origin_credential_policy = CrossOriginCredentialsPolicy::Insecure; + self + } + pub fn is_offline(&self) -> bool { matches!(self.connectivity, Connectivity::Offline) } /// Create a [`RetryPolicy`] for the client. fn retry_policy(&self) -> ExponentialBackoff { - ExponentialBackoff::builder().build_with_max_retries(self.retries) + let mut builder = ExponentialBackoff::builder(); + if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() { + builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0)); + } + builder.build_with_max_retries(self.retries) } pub fn build(&self) -> BaseClient { @@ -228,6 +312,7 @@ impl<'a> BaseClientBuilder<'a> { timeout, ssl_cert_file_exists, Security::Secure, + self.redirect_policy, ); // Create an insecure client that accepts invalid certificates. @@ -236,11 +321,20 @@ impl<'a> BaseClientBuilder<'a> { timeout, ssl_cert_file_exists, Security::Insecure, + self.redirect_policy, ); // Wrap in any relevant middleware and handle connectivity. - let client = self.apply_middleware(raw_client.clone()); - let dangerous_client = self.apply_middleware(raw_dangerous_client.clone()); + let client = RedirectClientWithMiddleware { + client: self.apply_middleware(raw_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; + let dangerous_client = RedirectClientWithMiddleware { + client: self.apply_middleware(raw_dangerous_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; BaseClient { connectivity: self.connectivity, @@ -257,8 +351,16 @@ impl<'a> BaseClientBuilder<'a> { /// Share the underlying client between two different middleware configurations. pub fn wrap_existing(&self, existing: &BaseClient) -> BaseClient { // Wrap in any relevant middleware and handle connectivity. - let client = self.apply_middleware(existing.raw_client.clone()); - let dangerous_client = self.apply_middleware(existing.raw_dangerous_client.clone()); + let client = RedirectClientWithMiddleware { + client: self.apply_middleware(existing.raw_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; + let dangerous_client = RedirectClientWithMiddleware { + client: self.apply_middleware(existing.raw_dangerous_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; BaseClient { connectivity: self.connectivity, @@ -278,6 +380,7 @@ impl<'a> BaseClientBuilder<'a> { timeout: Duration, ssl_cert_file_exists: bool, security: Security, + redirect_policy: RedirectPolicy, ) -> Client { // Configure the builder. let client_builder = ClientBuilder::new() @@ -285,7 +388,8 @@ impl<'a> BaseClientBuilder<'a> { .user_agent(user_agent) .pool_max_idle_per_host(20) .read_timeout(timeout) - .tls_built_in_root_certs(false); + .tls_built_in_root_certs(false) + .redirect(redirect_policy.reqwest_policy()); // If necessary, accept invalid certificates. let client_builder = match security { @@ -380,9 +484,9 @@ impl<'a> BaseClientBuilder<'a> { #[derive(Debug, Clone)] pub struct BaseClient { /// The underlying HTTP client that enforces valid certificates. - client: ClientWithMiddleware, + client: RedirectClientWithMiddleware, /// The underlying HTTP client that accepts invalid certificates. - dangerous_client: ClientWithMiddleware, + dangerous_client: RedirectClientWithMiddleware, /// The HTTP client without middleware. raw_client: Client, /// The HTTP client that accepts invalid certificates without middleware. @@ -407,7 +511,7 @@ enum Security { impl BaseClient { /// Selects the appropriate client based on the host's trustworthiness. - pub fn for_host(&self, url: &Url) -> &ClientWithMiddleware { + pub fn for_host(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware { if self.disable_ssl(url) { &self.dangerous_client } else { @@ -415,8 +519,14 @@ impl BaseClient { } } + /// Executes a request, applying redirect policy. + pub async fn execute(&self, req: Request) -> reqwest_middleware::Result { + let client = self.for_host(&DisplaySafeUrl::from(req.url().clone())); + client.execute(req).await + } + /// Returns `true` if the host is trusted to use the insecure client. - pub fn disable_ssl(&self, url: &Url) -> bool { + pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool { self.allow_insecure_host .iter() .any(|allow_insecure_host| allow_insecure_host.matches(url)) @@ -438,6 +548,326 @@ impl BaseClient { } } +/// Wrapper around [`ClientWithMiddleware`] that manages redirects. +#[derive(Debug, Clone)] +pub struct RedirectClientWithMiddleware { + client: ClientWithMiddleware, + redirect_policy: RedirectPolicy, + /// Whether credentials should be preserved during cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, preserving credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + cross_origin_credentials_policy: CrossOriginCredentialsPolicy, +} + +impl RedirectClientWithMiddleware { + /// Convenience method to make a `GET` request to a URL. + pub fn get(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.get(url), self) + } + + /// Convenience method to make a `POST` request to a URL. + pub fn post(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.post(url), self) + } + + /// Convenience method to make a `HEAD` request to a URL. + pub fn head(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.head(url), self) + } + + /// Executes a request, applying the redirect policy. + pub async fn execute(&self, req: Request) -> reqwest_middleware::Result { + match self.redirect_policy { + RedirectPolicy::BypassMiddleware => self.client.execute(req).await, + RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await, + } + } + + /// Executes a request. If the response is a redirect (one of HTTP 301, 302, 303, 307, or 308), the + /// request is executed again with the redirect location URL (up to a maximum number of + /// redirects). + /// + /// Unlike the built-in reqwest redirect policies, this sends the redirect request through the + /// entire middleware pipeline again. + /// + /// See RFC 7231 7.1.2 for details on + /// redirect semantics. + async fn execute_with_redirect_handling( + &self, + req: Request, + ) -> reqwest_middleware::Result { + let mut request = req; + let mut redirects = 0; + let max_redirects = DEFAULT_MAX_REDIRECTS; + + loop { + let result = self + .client + .execute(request.try_clone().expect("HTTP request must be cloneable")) + .await; + let Ok(response) = result else { + return result; + }; + + if redirects >= max_redirects { + return Ok(response); + } + + let Some(redirect_request) = + request_into_redirect(request, &response, self.cross_origin_credentials_policy)? + else { + return Ok(response); + }; + + redirects += 1; + request = redirect_request; + } + } + + pub fn raw_client(&self) -> &ClientWithMiddleware { + &self.client + } +} + +impl From for ClientWithMiddleware { + fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware { + item.client + } +} + +/// Check if this is should be a redirect and, if so, return a new redirect request. +/// +/// This implementation is based on the [`reqwest`] crate redirect implementation. +/// It takes ownership of the original [`Request`] and mutates it to create the new +/// redirect [`Request`]. +fn request_into_redirect( + mut req: Request, + res: &Response, + cross_origin_credentials_policy: CrossOriginCredentialsPolicy, +) -> reqwest_middleware::Result> { + let original_req_url = DisplaySafeUrl::from(req.url().clone()); + let status = res.status(); + let should_redirect = match status { + StatusCode::MOVED_PERMANENTLY + | StatusCode::FOUND + | StatusCode::TEMPORARY_REDIRECT + | StatusCode::PERMANENT_REDIRECT => true, + StatusCode::SEE_OTHER => { + // Per RFC 7231, HTTP 303 is intended for the user agent + // to perform a GET or HEAD request to the redirect target. + // Historically, some browsers also changed method from POST + // to GET on 301 or 302, but this is not required by RFC 7231 + // and was not intended by the HTTP spec. + *req.body_mut() = None; + for header in &[ + TRANSFER_ENCODING, + CONTENT_ENCODING, + CONTENT_TYPE, + CONTENT_LENGTH, + ] { + req.headers_mut().remove(header); + } + + match *req.method() { + Method::GET | Method::HEAD => {} + _ => { + *req.method_mut() = Method::GET; + } + } + true + } + _ => false, + }; + if !should_redirect { + return Ok(None); + } + + let location = res + .headers() + .get(LOCATION) + .ok_or(reqwest_middleware::Error::Middleware(anyhow!( + "Server returned redirect (HTTP {status}) without destination URL. This may indicate a server configuration issue" + )))? + .to_str() + .map_err(|_| { + reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value: must only contain visible ascii characters" + )) + })?; + + let mut redirect_url = match DisplaySafeUrl::parse(location) { + Ok(url) => url, + // Per RFC 7231, URLs should be resolved against the request URL. + Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| { + reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}" + )) + })?, + Err(err) => { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{location}`: {err}" + ))); + } + }; + // Per RFC 7231, fragments must be propagated + if let Some(fragment) = original_req_url.fragment() { + redirect_url.set_fragment(Some(fragment)); + } + + // Ensure the URL is a valid HTTP URI. + if let Err(err) = redirect_url.as_str().parse::() { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "HTTP {status} 'Location' value `{redirect_url}` is not a valid HTTP URI: {err}" + ))); + } + + if redirect_url.scheme() != "http" && redirect_url.scheme() != "https" { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{redirect_url}`: scheme needs to be https or http" + ))); + } + + let mut headers = HeaderMap::new(); + std::mem::swap(req.headers_mut(), &mut headers); + + let cross_host = redirect_url.host_str() != original_req_url.host_str() + || redirect_url.port_or_known_default() != original_req_url.port_or_known_default(); + if cross_host { + if cross_origin_credentials_policy == CrossOriginCredentialsPolicy::Secure { + debug!("Received a cross-origin redirect. Removing sensitive headers."); + headers.remove(AUTHORIZATION); + headers.remove(COOKIE); + headers.remove(PROXY_AUTHORIZATION); + headers.remove(WWW_AUTHENTICATE); + } + // If the redirect request is not a cross-origin request and the original request already + // had a Referer header, attempt to set the Referer header for the redirect request. + } else if headers.contains_key(REFERER) { + if let Some(referer) = make_referer(&redirect_url, &original_req_url) { + headers.insert(REFERER, referer); + } + } + + // Check if there are credentials on the redirect location itself. + // If so, move them to Authorization header. + if !redirect_url.username().is_empty() { + if let Some(credentials) = Credentials::from_url(&redirect_url) { + let _ = redirect_url.set_username(""); + let _ = redirect_url.set_password(None); + headers.insert(AUTHORIZATION, credentials.to_header_value()); + } + } + + std::mem::swap(req.headers_mut(), &mut headers); + *req.url_mut() = Url::from(redirect_url); + debug!( + "Received HTTP {status}. Redirecting to {}", + DisplaySafeUrl::ref_cast(req.url()) + ); + Ok(Some(req)) +} + +/// Return a Referer [`HeaderValue`] according to RFC 7231. +/// +/// Return [`None`] if https has been downgraded in the redirect location. +fn make_referer( + redirect_url: &DisplaySafeUrl, + original_url: &DisplaySafeUrl, +) -> Option { + if redirect_url.scheme() == "http" && original_url.scheme() == "https" { + return None; + } + + let mut referer = original_url.clone(); + referer.remove_credentials(); + referer.set_fragment(None); + referer.as_str().parse().ok() +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] +pub(crate) enum CrossOriginCredentialsPolicy { + /// Do not propagate credentials on cross-origin requests. + #[default] + Secure, + + /// Propagate credentials on cross-origin requests. + /// + /// WARNING: This should only be available for tests. In production code, preserving credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + Insecure, +} + +/// A builder to construct the properties of a `Request`. +/// +/// This wraps [`reqwest_middleware::RequestBuilder`] to ensure that the [`BaseClient`] +/// redirect policy is respected if `send()` is called. +#[derive(Debug)] +#[must_use] +pub struct RequestBuilder<'a> { + builder: reqwest_middleware::RequestBuilder, + client: &'a RedirectClientWithMiddleware, +} + +impl<'a> RequestBuilder<'a> { + pub fn new( + builder: reqwest_middleware::RequestBuilder, + client: &'a RedirectClientWithMiddleware, + ) -> Self { + Self { builder, client } + } + + /// Add a `Header` to this Request. + pub fn header(mut self, key: K, value: V) -> Self + where + HeaderName: TryFrom, + >::Error: Into, + HeaderValue: TryFrom, + >::Error: Into, + { + self.builder = self.builder.header(key, value); + self + } + + /// Add a set of Headers to the existing ones on this Request. + /// + /// The headers will be merged in to any already set. + pub fn headers(mut self, headers: HeaderMap) -> Self { + self.builder = self.builder.headers(headers); + self + } + + #[cfg(not(target_arch = "wasm32"))] + pub fn version(mut self, version: reqwest::Version) -> Self { + self.builder = self.builder.version(version); + self + } + + #[cfg_attr(docsrs, doc(cfg(feature = "multipart")))] + pub fn multipart(mut self, multipart: multipart::Form) -> Self { + self.builder = self.builder.multipart(multipart); + self + } + + /// Build a `Request`. + pub fn build(self) -> reqwest::Result { + self.builder.build() + } + + /// Constructs the Request and sends it to the target URL, returning a + /// future Response. + pub async fn send(self) -> reqwest_middleware::Result { + self.client.execute(self.build()?).await + } + + pub fn raw_builder(&self) -> &reqwest_middleware::RequestBuilder { + &self.builder + } +} + /// Extends [`DefaultRetryableStrategy`], to log transient request failures and additional retry cases. pub struct UvRetryableStrategy; @@ -527,3 +957,204 @@ fn find_source(orig: &dyn Error) -> Option<&E> { fn find_sources(orig: &dyn Error) -> impl Iterator { iter::successors(find_source::(orig), |&err| find_source(err)) } + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::Result; + + use reqwest::{Client, Method}; + use wiremock::matchers::method; + use wiremock::{Mock, MockServer, ResponseTemplate}; + + use crate::base_client::request_into_redirect; + + #[tokio::test] + async fn test_redirect_preserves_authorization_header_on_same_origin() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(request.headers().contains_key(AUTHORIZATION)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!(redirect_request.headers().contains_key(AUTHORIZATION)); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_preserves_fragment() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(format!("{}#fragment", server.uri())) + .build() + .unwrap(); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!( + redirect_request + .url() + .fragment() + .is_some_and(|fragment| fragment == "fragment") + ); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", "https://cross-origin.com/simple"), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(request.headers().contains_key(AUTHORIZATION)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!(!redirect_request.headers().contains_key(AUTHORIZATION)); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_303_changes_post_to_get() -> Result<()> { + let server = MockServer::start().await; + Mock::given(method("POST")) + .respond_with( + ResponseTemplate::new(303) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .post(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert_eq!(request.method(), Method::POST); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert_eq!(redirect_request.method(), Method::GET); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_no_referer_if_disabled() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::builder() + .referer(false) + .build() + .unwrap() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(!request.headers().contains_key(REFERER)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + + assert!(!redirect_request.headers().contains_key(REFERER)); + } + + Ok(()) + } +} diff --git a/crates/uv-client/src/cached_client.rs b/crates/uv-client/src/cached_client.rs index f5ade702b..ee3314d1c 100644 --- a/crates/uv-client/src/cached_client.rs +++ b/crates/uv-client/src/cached_client.rs @@ -1,4 +1,3 @@ -use std::fmt::{Debug, Display, Formatter}; use std::time::{Duration, SystemTime}; use std::{borrow::Cow, path::Path}; @@ -12,6 +11,7 @@ use tracing::{Instrument, debug, info_span, instrument, trace, warn}; use uv_cache::{CacheEntry, Freshness}; use uv_fs::write_atomic; +use uv_redacted::DisplaySafeUrl; use crate::BaseClient; use crate::base_client::is_extended_transient_error; @@ -99,44 +99,62 @@ where } } -/// Either a cached client error or a (user specified) error from the callback +/// Dispatch type: Either a cached client error or a (user specified) error from the callback pub enum CachedClientError { - Client(Error), - Callback(CallbackError), + Client { + retries: Option, + err: Error, + }, + Callback { + retries: Option, + err: CallbackError, + }, } -impl Display for CachedClientError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { +impl CachedClientError { + /// Attach the number of retries to the error context. + /// + /// Adds to existing errors if any, in case different layers retried. + fn with_retries(self, retries: u32) -> Self { match self { - CachedClientError::Client(err) => write!(f, "{err}"), - CachedClientError::Callback(err) => write!(f, "{err}"), + CachedClientError::Client { + retries: existing_retries, + err, + } => CachedClientError::Client { + retries: Some(existing_retries.unwrap_or_default() + retries), + err, + }, + CachedClientError::Callback { + retries: existing_retries, + err, + } => CachedClientError::Callback { + retries: Some(existing_retries.unwrap_or_default() + retries), + err, + }, } } -} -impl Debug for CachedClientError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn retries(&self) -> Option { match self { - CachedClientError::Client(err) => write!(f, "{err:?}"), - CachedClientError::Callback(err) => write!(f, "{err:?}"), + CachedClientError::Client { retries, .. } => *retries, + CachedClientError::Callback { retries, .. } => *retries, } } -} -impl std::error::Error - for CachedClientError -{ - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + fn error(&self) -> &dyn std::error::Error { match self { - CachedClientError::Client(err) => Some(err), - CachedClientError::Callback(err) => Some(err), + CachedClientError::Client { err, .. } => err, + CachedClientError::Callback { err, .. } => err, } } } impl From for CachedClientError { fn from(error: Error) -> Self { - Self::Client(error) + Self::Client { + retries: None, + err: error, + } } } @@ -144,15 +162,35 @@ impl From for CachedClientError { fn from(error: ErrorKind) -> Self { - Self::Client(error.into()) + Self::Client { + retries: None, + err: error.into(), + } } } impl + std::error::Error + 'static> From> for Error { + /// Attach retry error context, if there were retries. fn from(error: CachedClientError) -> Self { match error { - CachedClientError::Client(error) => error, - CachedClientError::Callback(error) => error.into(), + CachedClientError::Client { + retries: Some(retries), + err, + } => ErrorKind::RequestWithRetries { + source: Box::new(err.into_kind()), + retries, + } + .into(), + CachedClientError::Client { retries: None, err } => err, + CachedClientError::Callback { + retries: Some(retries), + err, + } => ErrorKind::RequestWithRetries { + source: Box::new(err.into().into_kind()), + retries, + } + .into(), + CachedClientError::Callback { retries: None, err } => err.into(), } } } @@ -384,7 +422,7 @@ impl CachedClient { let data = response_callback(response) .boxed_local() .await - .map_err(|err| CachedClientError::Callback(err))?; + .map_err(|err| CachedClientError::Callback { retries: None, err })?; let Some(cache_policy) = cache_policy else { return Ok(data.into_target()); }; @@ -481,11 +519,10 @@ impl CachedClient { cached: DataWithCachePolicy, new_cache_policy_builder: CachePolicyBuilder, ) -> Result { - let url = req.url().clone(); + let url = DisplaySafeUrl::from(req.url().clone()); debug!("Sending revalidation request for: {url}"); let response = self .0 - .for_host(req.url()) .execute(req) .instrument(info_span!("revalidation_request", url = url.as_str())) .await @@ -521,17 +558,28 @@ impl CachedClient { &self, req: Request, ) -> Result<(Response, Option>), Error> { - let url = req.url().clone(); + let url = DisplaySafeUrl::from(req.url().clone()); trace!("Sending fresh {} request for {}", req.method(), url); let cache_policy_builder = CachePolicyBuilder::new(&req); let response = self .0 - .for_host(&url) .execute(req) .await - .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))? - .error_for_status() - .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; + .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?; + + let retry_count = response + .extensions() + .get::() + .map(|retries| retries.value()); + + if let Err(status_error) = response.error_for_status_ref() { + return Err(CachedClientError::::Client { + retries: retry_count, + err: ErrorKind::from_reqwest(url, status_error).into(), + } + .into()); + } + let cache_policy = cache_policy_builder.build(&response); let cache_policy = if cache_policy.to_archived().is_storable() { Some(Box::new(cache_policy)) @@ -578,7 +626,7 @@ impl CachedClient { cache_control: CacheControl, response_callback: Callback, ) -> Result> { - let mut n_past_retries = 0; + let mut past_retries = 0; let start_time = SystemTime::now(); let retry_policy = self.uncached().retry_policy(); loop { @@ -586,11 +634,20 @@ impl CachedClient { let result = self .get_cacheable(fresh_req, cache_entry, cache_control, &response_callback) .await; + + // Check if the middleware already performed retries + let middleware_retries = match &result { + Err(err) => err.retries().unwrap_or_default(), + Ok(_) => 0, + }; + if result .as_ref() - .is_err_and(|err| is_extended_transient_error(err)) + .is_err_and(|err| is_extended_transient_error(err.error())) { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + // If middleware already retried, consider that in our retry budget + let total_retries = past_retries + middleware_retries; + let retry_decision = retry_policy.should_retry(start_time, total_retries); if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { debug!( "Transient failure while handling response from {}; retrying...", @@ -600,10 +657,15 @@ impl CachedClient { .duration_since(SystemTime::now()) .unwrap_or_else(|_| Duration::default()); tokio::time::sleep(duration).await; - n_past_retries += 1; + past_retries += 1; continue; } } + + if past_retries > 0 { + return result.map_err(|err| err.with_retries(past_retries)); + } + return result; } } @@ -621,7 +683,7 @@ impl CachedClient { cache_entry: &CacheEntry, response_callback: Callback, ) -> Result> { - let mut n_past_retries = 0; + let mut past_retries = 0; let start_time = SystemTime::now(); let retry_policy = self.uncached().retry_policy(); loop { @@ -629,12 +691,20 @@ impl CachedClient { let result = self .skip_cache(fresh_req, cache_entry, &response_callback) .await; + + // Check if the middleware already performed retries + let middleware_retries = match &result { + Err(err) => err.retries().unwrap_or_default(), + _ => 0, + }; + if result .as_ref() .err() - .is_some_and(|err| is_extended_transient_error(err)) + .is_some_and(|err| is_extended_transient_error(err.error())) { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + let total_retries = past_retries + middleware_retries; + let retry_decision = retry_policy.should_retry(start_time, total_retries); if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { debug!( "Transient failure while handling response from {}; retrying...", @@ -644,10 +714,15 @@ impl CachedClient { .duration_since(SystemTime::now()) .unwrap_or_else(|_| Duration::default()); tokio::time::sleep(duration).await; - n_past_retries += 1; + past_retries += 1; continue; } } + + if past_retries > 0 { + return result.map_err(|err| err.with_retries(past_retries)); + } + return result; } } diff --git a/crates/uv-client/src/error.rs b/crates/uv-client/src/error.rs index ad1e06823..754237fe2 100644 --- a/crates/uv-client/src/error.rs +++ b/crates/uv-client/src/error.rs @@ -3,11 +3,10 @@ use std::ops::Deref; use async_http_range_reader::AsyncHttpRangeReaderError; use async_zip::error::ZipError; -use url::Url; use uv_distribution_filename::{WheelFilename, WheelFilenameError}; use uv_normalize::PackageName; -use uv_redacted::redacted_url; +use uv_redacted::DisplaySafeUrl; use crate::middleware::OfflineError; use crate::{FlatIndexError, html}; @@ -30,12 +29,12 @@ impl Error { } /// Create a new error from a JSON parsing error. - pub(crate) fn from_json_err(err: serde_json::Error, url: Url) -> Self { + pub(crate) fn from_json_err(err: serde_json::Error, url: DisplaySafeUrl) -> Self { ErrorKind::BadJson { source: err, url }.into() } /// Create a new error from an HTML parsing error. - pub(crate) fn from_html_err(err: html::Error, url: Url) -> Self { + pub(crate) fn from_html_err(err: html::Error, url: DisplaySafeUrl) -> Self { ErrorKind::BadHtml { source: err, url }.into() } @@ -153,17 +152,14 @@ pub enum ErrorKind { #[error(transparent)] InvalidUrl(#[from] uv_distribution_types::ToUrlError), - #[error(transparent)] - JoinRelativeUrl(#[from] uv_pypi_types::JoinRelativeError), - #[error(transparent)] Flat(#[from] FlatIndexError), #[error("Expected a file URL, but received: {0}")] - NonFileUrl(Url), + NonFileUrl(DisplaySafeUrl), #[error("Expected an index URL, but received non-base URL: {0}")] - CannotBeABase(Url), + CannotBeABase(DisplaySafeUrl), #[error("Failed to read metadata: `{0}`")] Metadata(String, #[source] uv_metadata::Error), @@ -196,16 +192,29 @@ pub enum ErrorKind { /// An error that happened while making a request or in a reqwest middleware. #[error("Failed to fetch: `{0}`")] - WrappedReqwestError(Url, #[source] WrappedReqwestError), + WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError), - #[error("Received some unexpected JSON from {}", redacted_url(url))] - BadJson { source: serde_json::Error, url: Url }, + /// Add the number of failed retries to the error. + #[error("Request failed after {retries} retries")] + RequestWithRetries { + source: Box, + retries: u32, + }, - #[error("Received some unexpected HTML from {}", redacted_url(url))] - BadHtml { source: html::Error, url: Url }, + #[error("Received some unexpected JSON from {}", url)] + BadJson { + source: serde_json::Error, + url: DisplaySafeUrl, + }, + + #[error("Received some unexpected HTML from {}", url)] + BadHtml { + source: html::Error, + url: DisplaySafeUrl, + }, #[error("Failed to read zip with range requests: `{0}`")] - AsyncHttpRangeReader(Url, #[source] AsyncHttpRangeReaderError), + AsyncHttpRangeReader(DisplaySafeUrl, #[source] AsyncHttpRangeReaderError), #[error("{0} is not a valid wheel filename")] WheelFilename(#[source] WheelFilenameError), @@ -232,13 +241,13 @@ pub enum ErrorKind { Encode(#[source] rmp_serde::encode::Error), #[error("Missing `Content-Type` header for {0}")] - MissingContentType(Url), + MissingContentType(DisplaySafeUrl), #[error("Invalid `Content-Type` header for {0}")] - InvalidContentTypeHeader(Url, #[source] http::header::ToStrError), + InvalidContentTypeHeader(DisplaySafeUrl, #[source] http::header::ToStrError), #[error("Unsupported `Content-Type` \"{1}\" for {0}. Expected JSON or HTML.")] - UnsupportedMediaType(Url, String), + UnsupportedMediaType(DisplaySafeUrl, String), #[error("Reading from cache archive failed: {0}")] ArchiveRead(String), @@ -253,11 +262,14 @@ pub enum ErrorKind { } impl ErrorKind { - pub(crate) fn from_reqwest(url: Url, error: reqwest::Error) -> Self { + pub(crate) fn from_reqwest(url: DisplaySafeUrl, error: reqwest::Error) -> Self { Self::WrappedReqwestError(url, WrappedReqwestError::from(error)) } - pub(crate) fn from_reqwest_middleware(url: Url, err: reqwest_middleware::Error) -> Self { + pub(crate) fn from_reqwest_middleware( + url: DisplaySafeUrl, + err: reqwest_middleware::Error, + ) -> Self { if let reqwest_middleware::Error::Middleware(ref underlying) = err { if let Some(err) = underlying.downcast_ref::() { return Self::Offline(err.url().to_string()); diff --git a/crates/uv-client/src/flat_index.rs b/crates/uv-client/src/flat_index.rs index ca2166e18..91668c5c4 100644 --- a/crates/uv-client/src/flat_index.rs +++ b/crates/uv-client/src/flat_index.rs @@ -10,7 +10,7 @@ use uv_cache_key::cache_digest; use uv_distribution_filename::DistFilename; use uv_distribution_types::{File, FileLocation, IndexUrl, UrlString}; use uv_pypi_types::HashDigests; -use uv_redacted::redacted_url; +use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; use crate::cached_client::{CacheControl, CachedClientError}; @@ -20,13 +20,13 @@ use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive}; #[derive(Debug, thiserror::Error)] pub enum FlatIndexError { #[error("Expected a file URL, but received: {0}")] - NonFileUrl(Url), + NonFileUrl(DisplaySafeUrl), #[error("Failed to read `--find-links` directory: {0}")] FindLinksDirectory(PathBuf, #[source] FindLinksDirectoryError), #[error("Failed to read `--find-links` URL: {0}")] - FindLinksUrl(Url, #[source] Error), + FindLinksUrl(DisplaySafeUrl, #[source] Error), } #[derive(Debug, thiserror::Error)] @@ -159,7 +159,7 @@ impl<'a> FlatIndexClient<'a> { /// Read a flat remote index from a `--find-links` URL. async fn read_from_url( &self, - url: &Url, + url: &DisplaySafeUrl, flat_index: &IndexUrl, ) -> Result { let cache_entry = self.cache.entry( @@ -180,7 +180,7 @@ impl<'a> FlatIndexClient<'a> { .client .uncached() .for_host(url) - .get(url.clone()) + .get(Url::from(url.clone())) .header("Accept-Encoding", "gzip") .header("Accept", "text/html") .build() @@ -189,7 +189,7 @@ impl<'a> FlatIndexClient<'a> { async { // Use the response URL, rather than the request URL, as the base for relative URLs. // This ensures that we handle redirects and other URL transformations correctly. - let url = response.url().clone(); + let url = DisplaySafeUrl::from(response.url().clone()); let text = response .text() @@ -208,7 +208,7 @@ impl<'a> FlatIndexClient<'a> { Ok(file) => Some(file), Err(err) => { // Ignore files with unparsable version specifiers. - warn!("Skipping file in {}: {err}", redacted_url(&url)); + warn!("Skipping file in {}: {err}", &url); None } } @@ -246,7 +246,7 @@ impl<'a> FlatIndexClient<'a> { .collect(); Ok(FlatIndexEntries::from_entries(files)) } - Err(CachedClientError::Client(err)) if err.is_offline() => { + Err(CachedClientError::Client { err, .. }) if err.is_offline() => { Ok(FlatIndexEntries::offline()) } Err(err) => Err(err.into()), @@ -294,7 +294,7 @@ impl<'a> FlatIndexClient<'a> { }; // SAFETY: The index path is itself constructed from a URL. - let url = Url::from_file_path(entry.path()).unwrap(); + let url = DisplaySafeUrl::from_file_path(entry.path()).unwrap(); let file = File { dist_info_metadata: false, @@ -303,7 +303,7 @@ impl<'a> FlatIndexClient<'a> { requires_python: None, size: None, upload_time_utc_ms: None, - url: FileLocation::AbsoluteUrl(UrlString::from(&url)), + url: FileLocation::AbsoluteUrl(UrlString::from(url)), yanked: None, }; diff --git a/crates/uv-client/src/html.rs b/crates/uv-client/src/html.rs index 106bd35a3..1b6b01347 100644 --- a/crates/uv-client/src/html.rs +++ b/crates/uv-client/src/html.rs @@ -8,6 +8,7 @@ use url::Url; use uv_pep440::VersionSpecifiers; use uv_pypi_types::{BaseUrl, CoreMetadata, File, Hashes, Yanked}; use uv_pypi_types::{HashError, LenientVersionSpecifiers}; +use uv_redacted::DisplaySafeUrl; /// A parsed structure from PyPI "HTML" index format for a single package. #[derive(Debug, Clone)] @@ -27,7 +28,7 @@ impl SimpleHtml { // Parse the first `` tag, if any, to determine the base URL to which all // relative URLs should be resolved. The HTML spec requires that the `` tag // appear before other tags with attribute values of URLs. - let base = BaseUrl::from( + let base = BaseUrl::from(DisplaySafeUrl::from( dom.nodes() .iter() .filter_map(|node| node.as_tag()) @@ -37,7 +38,7 @@ impl SimpleHtml { .transpose()? .flatten() .unwrap_or_else(|| url.clone()), - ); + )); // Parse each `` tag, to extract the filename, hash, and URL. let mut files: Vec = dom @@ -278,7 +279,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -335,7 +336,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -395,7 +396,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -452,7 +453,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -509,7 +510,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -566,7 +567,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -621,7 +622,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -673,10 +674,10 @@ mod tests { "; let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap(); let result = SimpleHtml::parse(text, &base).unwrap(); - insta::assert_debug_snapshot!(result, @r###" + insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -694,7 +695,7 @@ mod tests { ), files: [], } - "###); + "#); } #[test] @@ -711,10 +712,10 @@ mod tests { "#; let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap(); let result = SimpleHtml::parse(text, &base).unwrap(); - insta::assert_debug_snapshot!(result, @r###" + insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -732,7 +733,7 @@ mod tests { ), files: [], } - "###); + "#); } #[test] @@ -752,7 +753,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -807,7 +808,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -863,7 +864,7 @@ mod tests { Ok( SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -920,7 +921,7 @@ mod tests { Ok( SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -994,7 +995,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -1076,7 +1077,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -1179,7 +1180,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -1252,7 +1253,7 @@ mod tests { insta::assert_debug_snapshot!(result, @r#" SimpleHtml { base: BaseUrl( - Url { + DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", diff --git a/crates/uv-client/src/httpcache/control.rs b/crates/uv-client/src/httpcache/control.rs index 724683188..ddac9d1bc 100644 --- a/crates/uv-client/src/httpcache/control.rs +++ b/crates/uv-client/src/httpcache/control.rs @@ -21,7 +21,6 @@ use crate::rkyvutil::OwnedArchive; rkyv::Serialize, )] #[rkyv(derive(Debug))] -#[allow(clippy::struct_excessive_bools)] pub struct CacheControl { // directives for requests and responses /// * diff --git a/crates/uv-client/src/lib.rs b/crates/uv-client/src/lib.rs index 3ea33204c..e42c86620 100644 --- a/crates/uv-client/src/lib.rs +++ b/crates/uv-client/src/lib.rs @@ -1,6 +1,6 @@ pub use base_client::{ AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware, - UvRetryableStrategy, is_extended_transient_error, + RedirectClientWithMiddleware, RequestBuilder, UvRetryableStrategy, is_extended_transient_error, }; pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy}; pub use error::{Error, ErrorKind, WrappedReqwestError}; diff --git a/crates/uv-client/src/middleware.rs b/crates/uv-client/src/middleware.rs index 13bd94904..f3a899b73 100644 --- a/crates/uv-client/src/middleware.rs +++ b/crates/uv-client/src/middleware.rs @@ -1,19 +1,19 @@ use http::Extensions; use std::fmt::Debug; +use uv_redacted::DisplaySafeUrl; use reqwest::{Request, Response}; use reqwest_middleware::{Middleware, Next}; -use url::Url; /// A custom error type for the offline middleware. #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct OfflineError { - url: Url, + url: DisplaySafeUrl, } impl OfflineError { /// Returns the URL that caused the error. - pub(crate) fn url(&self) -> &Url { + pub(crate) fn url(&self) -> &DisplaySafeUrl { &self.url } } @@ -43,7 +43,7 @@ impl Middleware for OfflineMiddleware { ) -> reqwest_middleware::Result { Err(reqwest_middleware::Error::Middleware( OfflineError { - url: req.url().clone(), + url: DisplaySafeUrl::from(req.url().clone()), } .into(), )) diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs index 9831fb81d..afa1b03ae 100644 --- a/crates/uv-client/src/registry_client.rs +++ b/crates/uv-client/src/registry_client.rs @@ -10,7 +10,6 @@ use futures::{FutureExt, StreamExt, TryStreamExt}; use http::{HeaderMap, StatusCode}; use itertools::Either; use reqwest::{Proxy, Response}; -use reqwest_middleware::ClientWithMiddleware; use rustc_hash::FxHashMap; use tokio::sync::{Mutex, Semaphore}; use tracing::{Instrument, debug, info_span, instrument, trace, warn}; @@ -22,8 +21,8 @@ use uv_configuration::KeyringProviderType; use uv_configuration::{IndexStrategy, TrustedHost}; use uv_distribution_filename::{DistFilename, SourceDistFilename, WheelFilename}; use uv_distribution_types::{ - BuiltDist, File, FileLocation, IndexCapabilities, IndexFormat, IndexLocations, - IndexMetadataRef, IndexStatusCodeDecision, IndexStatusCodeStrategy, IndexUrl, IndexUrls, Name, + BuiltDist, File, IndexCapabilities, IndexFormat, IndexLocations, IndexMetadataRef, + IndexStatusCodeDecision, IndexStatusCodeStrategy, IndexUrl, IndexUrls, Name, }; use uv_metadata::{read_metadata_async_seek, read_metadata_async_stream}; use uv_normalize::PackageName; @@ -31,19 +30,19 @@ use uv_pep440::Version; use uv_pep508::MarkerEnvironment; use uv_platform_tags::Platform; use uv_pypi_types::{ResolutionMetadata, SimpleJson}; -use uv_redacted::redacted_url; +use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; use uv_torch::TorchStrategy; -use crate::base_client::{BaseClientBuilder, ExtraMiddleware}; +use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy}; use crate::cached_client::CacheControl; use crate::flat_index::FlatIndexEntry; use crate::html::SimpleHtml; use crate::remote_metadata::wheel_metadata_from_remote_zip; use crate::rkyvutil::OwnedArchive; use crate::{ - BaseClient, CachedClient, CachedClientError, Error, ErrorKind, FlatIndexClient, - FlatIndexEntries, + BaseClient, CachedClient, Error, ErrorKind, FlatIndexClient, FlatIndexEntries, + RedirectClientWithMiddleware, }; /// A builder for an [`RegistryClient`]. @@ -116,6 +115,11 @@ impl<'a> RegistryClientBuilder<'a> { self } + pub fn retries_from_env(mut self) -> anyhow::Result { + self.base_client_builder = self.base_client_builder.retries_from_env()?; + Ok(self) + } + #[must_use] pub fn native_tls(mut self, native_tls: bool) -> Self { self.base_client_builder = self.base_client_builder.native_tls(native_tls); @@ -152,9 +156,23 @@ impl<'a> RegistryClientBuilder<'a> { self } + /// Allows credentials to be propagated on cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, propagating credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + #[must_use] + pub fn allow_cross_origin_credentials(mut self) -> Self { + self.base_client_builder = self.base_client_builder.allow_cross_origin_credentials(); + self + } + pub fn build(self) -> RegistryClient { // Build a base client - let builder = self.base_client_builder; + let builder = self + .base_client_builder + .redirect(RedirectPolicy::RetriggerMiddleware); let client = builder.build(); @@ -251,12 +269,12 @@ impl RegistryClient { } /// Return the [`BaseClient`] used by this client. - pub fn uncached_client(&self, url: &Url) -> &ClientWithMiddleware { + pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware { self.client.uncached().for_host(url) } /// Returns `true` if SSL verification is disabled for the given URL. - pub fn disable_ssl(&self, url: &Url) -> bool { + pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool { self.client.uncached().disable_ssl(url) } @@ -485,10 +503,7 @@ impl RegistryClient { // ref https://github.com/servo/rust-url/issues/333 .push(""); - trace!( - "Fetching metadata for {package_name} from {}", - redacted_url(&url) - ); + trace!("Fetching metadata for {package_name} from {url}"); let cache_entry = self.cache.entry( CacheBucket::Simple, @@ -554,14 +569,14 @@ impl RegistryClient { async fn fetch_remote_index( &self, package_name: &PackageName, - url: &Url, + url: &DisplaySafeUrl, cache_entry: &CacheEntry, cache_control: CacheControl, ) -> Result, Error> { let simple_request = self .uncached_client(url) - .get(url.clone()) - .header("Accept-Encoding", "gzip") + .get(Url::from(url.clone())) + .header("Accept-Encoding", "gzip, deflate, zstd") .header("Accept", MediaType::accepts()) .build() .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; @@ -569,7 +584,7 @@ impl RegistryClient { async { // Use the response URL, rather than the request URL, as the base for relative URLs. // This ensures that we handle redirects and other URL transformations correctly. - let url = response.url().clone(); + let url = DisplaySafeUrl::from(response.url().clone()); let content_type = response .headers() @@ -610,18 +625,16 @@ impl RegistryClient { .boxed_local() .instrument(info_span!("parse_simple_api", package = %package_name)) }; - self.cached_client() + let simple = self + .cached_client() .get_cacheable_with_retry( simple_request, cache_entry, cache_control, parse_simple_response, ) - .await - .map_err(|err| match err { - CachedClientError::Client(err) => err, - CachedClientError::Callback(err) => err, - }) + .await?; + Ok(simple) } /// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory @@ -629,7 +642,7 @@ impl RegistryClient { async fn fetch_local_index( &self, package_name: &PackageName, - url: &Url, + url: &DisplaySafeUrl, ) -> Result, Error> { let path = url .to_file_path() @@ -669,35 +682,19 @@ impl RegistryClient { /// A local file path. Path(PathBuf), /// A remote URL. - Url(Url), + Url(DisplaySafeUrl), } let wheel = wheels.best_wheel(); - let location = match &wheel.file.url { - FileLocation::RelativeUrl(base, url) => { - let url = uv_pypi_types::base_url_join_relative(base, url) - .map_err(ErrorKind::JoinRelativeUrl)?; - if url.scheme() == "file" { - let path = url - .to_file_path() - .map_err(|()| ErrorKind::NonFileUrl(url.clone()))?; - WheelLocation::Path(path) - } else { - WheelLocation::Url(url) - } - } - FileLocation::AbsoluteUrl(url) => { - let url = url.to_url().map_err(ErrorKind::InvalidUrl)?; - if url.scheme() == "file" { - let path = url - .to_file_path() - .map_err(|()| ErrorKind::NonFileUrl(url.clone()))?; - WheelLocation::Path(path) - } else { - WheelLocation::Url(url) - } - } + let url = wheel.file.url.to_url().map_err(ErrorKind::InvalidUrl)?; + let location = if url.scheme() == "file" { + let path = url + .to_file_path() + .map_err(|()| ErrorKind::NonFileUrl(url.clone()))?; + WheelLocation::Path(path) + } else { + WheelLocation::Url(url) }; match location { @@ -770,14 +767,15 @@ impl RegistryClient { &self, index: &IndexUrl, file: &File, - url: &Url, + url: &DisplaySafeUrl, capabilities: &IndexCapabilities, ) -> Result { // If the metadata file is available at its own url (PEP 658), download it from there. let filename = WheelFilename::from_str(&file.filename).map_err(ErrorKind::WheelFilename)?; if file.dist_info_metadata { let mut url = url.clone(); - url.set_path(&format!("{}.metadata", url.path())); + let path = format!("{}.metadata", url.path()); + url.set_path(&path); let cache_entry = self.cache.entry( CacheBucket::Wheels, @@ -818,7 +816,7 @@ impl RegistryClient { }; let req = self .uncached_client(&url) - .get(url.clone()) + .get(Url::from(url.clone())) .build() .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; Ok(self @@ -844,7 +842,7 @@ impl RegistryClient { async fn wheel_metadata_no_pep658<'data>( &self, filename: &'data WheelFilename, - url: &'data Url, + url: &'data DisplaySafeUrl, index: Option<&'data IndexUrl>, cache_shard: WheelCache<'data>, capabilities: &'data IndexCapabilities, @@ -874,7 +872,7 @@ impl RegistryClient { if index.is_none_or(|index| capabilities.supports_range_requests(index)) { let req = self .uncached_client(url) - .head(url.clone()) + .head(Url::from(url.clone())) .header( "accept-encoding", http::HeaderValue::from_static("identity"), @@ -895,22 +893,20 @@ impl RegistryClient { let mut reader = AsyncHttpRangeReader::from_head_response( self.uncached_client(url).clone(), response, - url.clone(), + Url::from(url.clone()), headers.clone(), ) .await .map_err(|err| ErrorKind::AsyncHttpRangeReader(url.clone(), err))?; trace!("Getting metadata for {filename} by range request"); let text = wheel_metadata_from_remote_zip(filename, url, &mut reader).await?; - let metadata = - ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| { - Error::from(ErrorKind::MetadataParseError( - filename.clone(), - url.to_string(), - Box::new(err), - )) - })?; - Ok::>(metadata) + ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| { + Error::from(ErrorKind::MetadataParseError( + filename.clone(), + url.to_string(), + Box::new(err), + )) + }) } .boxed_local() .instrument(info_span!("read_metadata_range_request", wheel = %filename)) @@ -949,7 +945,7 @@ impl RegistryClient { // Create a request to stream the file. let req = self .uncached_client(url) - .get(url.clone()) + .get(Url::from(url.clone())) .header( // `reqwest` defaults to accepting compressed responses. // Specify identity encoding to get consistent .whl downloading @@ -1141,7 +1137,11 @@ impl SimpleMetadata { } /// Read the [`SimpleMetadata`] from an HTML index. - fn from_html(text: &str, package_name: &PackageName, url: &Url) -> Result { + fn from_html( + text: &str, + package_name: &PackageName, + url: &DisplaySafeUrl, + ) -> Result { let SimpleHtml { base, files } = SimpleHtml::parse(text, url).map_err(|err| Error::from_html_err(err, url.clone()))?; @@ -1221,12 +1221,191 @@ mod tests { use std::str::FromStr; use url::Url; - use uv_normalize::PackageName; - use uv_pypi_types::{JoinRelativeError, SimpleJson}; + use uv_pypi_types::SimpleJson; + use uv_redacted::DisplaySafeUrl; use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml}; + use crate::RegistryClientBuilder; + use uv_cache::Cache; + use uv_distribution_types::{FileLocation, ToUrlError}; + use uv_small_str::SmallString; + use wiremock::matchers::{basic_auth, method, path_regex}; + use wiremock::{Mock, MockServer, ResponseTemplate}; + + type Error = Box; + + async fn start_test_server(username: &'static str, password: &'static str) -> MockServer { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&server) + .await; + + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(401)) + .mount(&server) + .await; + + server + } + + #[tokio::test] + async fn test_redirect_to_server_with_credentials() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let auth_server = start_test_server(username, password).await; + let auth_base_url = DisplaySafeUrl::parse(&auth_server.uri())?; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 302 to the auth server + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(302).insert_header("Location", format!("{auth_base_url}")), + ) + .mount(&redirect_server) + .await; + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + assert_eq!( + client + .for_host(&redirect_server_url) + .get(redirect_server.uri()) + .send() + .await? + .status(), + 401, + "Requests should fail if credentials are missing" + ); + + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&redirect_server_url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed if credentials are present" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_root_relative_url() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 307 with a relative URL. + Mock::given(method("GET")) + .and(path_regex("/foo/")) + .respond_with( + ResponseTemplate::new(307).insert_header("Location", "/bar/baz/".to_string()), + ) + .mount(&redirect_server) + .await; + + Mock::given(method("GET")) + .and(path_regex("/bar/baz/")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&redirect_server) + .await; + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed for relative URL" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_relative_url() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 307 with a relative URL. + Mock::given(method("GET")) + .and(path_regex("/foo/bar/baz/")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&redirect_server) + .await; + + Mock::given(method("GET")) + .and(path_regex("/foo/")) + .and(basic_auth(username, password)) + .respond_with( + ResponseTemplate::new(307).insert_header("Location", "bar/baz/".to_string()), + ) + .mount(&redirect_server) + .await; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?; + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed for relative URL" + ); + + Ok(()) + } + #[test] fn ignore_failing_files() { // 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid @@ -1263,7 +1442,7 @@ mod tests { } "#; let data: SimpleJson = serde_json::from_str(response).unwrap(); - let base = Url::parse("https://pypi.org/simple/pyflyby/").unwrap(); + let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap(); let simple_metadata = SimpleMetadata::from_files( data.files, &PackageName::from_str("pyflyby").unwrap(), @@ -1280,7 +1459,7 @@ mod tests { /// /// See: #[test] - fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> { + fn relative_urls_code_artifact() -> Result<(), ToUrlError> { let text = r#" @@ -1300,16 +1479,20 @@ mod tests { "#; // Note the lack of a trailing `/` here is important for coverage of url-join behavior - let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask") + let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask") .unwrap(); let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap(); + let base = SmallString::from(base.as_str()); // Test parsing of the file urls let urls = files + .into_iter() + .map(|file| FileLocation::new(file.url, &base).to_url()) + .collect::, _>>()?; + let urls = urls .iter() - .map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url)) - .collect::, JoinRelativeError>>()?; - let urls = urls.iter().map(Url::as_str).collect::>(); + .map(DisplaySafeUrl::to_string) + .collect::>(); insta::assert_debug_snapshot!(urls, @r#" [ "https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz", diff --git a/crates/uv-client/tests/it/remote_metadata.rs b/crates/uv-client/tests/it/remote_metadata.rs index 72bc73222..1dbdf1bad 100644 --- a/crates/uv-client/tests/it/remote_metadata.rs +++ b/crates/uv-client/tests/it/remote_metadata.rs @@ -1,13 +1,13 @@ use std::str::FromStr; use anyhow::Result; -use url::Url; use uv_cache::Cache; use uv_client::RegistryClientBuilder; use uv_distribution_filename::WheelFilename; use uv_distribution_types::{BuiltDist, DirectUrlBuiltDist, IndexCapabilities}; use uv_pep508::VerbatimUrl; +use uv_redacted::DisplaySafeUrl; #[tokio::test] async fn remote_metadata_with_and_without_cache() -> Result<()> { @@ -21,7 +21,7 @@ async fn remote_metadata_with_and_without_cache() -> Result<()> { let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?; let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist { filename, - location: Box::new(Url::parse(url).unwrap()), + location: Box::new(DisplaySafeUrl::parse(url).unwrap()), url: VerbatimUrl::from_str(url).unwrap(), }); let capabilities = IndexCapabilities::default(); diff --git a/crates/uv-client/tests/it/user_agent_version.rs b/crates/uv-client/tests/it/user_agent_version.rs index 42eebf3be..b10249154 100644 --- a/crates/uv-client/tests/it/user_agent_version.rs +++ b/crates/uv-client/tests/it/user_agent_version.rs @@ -16,6 +16,7 @@ use uv_client::LineHaul; use uv_client::RegistryClientBuilder; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; use uv_platform_tags::{Arch, Os, Platform}; +use uv_redacted::DisplaySafeUrl; use uv_version::version; #[tokio::test] @@ -54,12 +55,12 @@ async fn test_user_agent_has_version() -> Result<()> { let client = RegistryClientBuilder::new(cache).build(); // Send request to our dummy server - let url = Url::from_str(&format!("http://{addr}"))?; + let url = DisplaySafeUrl::from_str(&format!("http://{addr}"))?; let res = client .cached_client() .uncached() .for_host(&url) - .get(url) + .get(Url::from(url)) .send() .await?; @@ -151,12 +152,12 @@ async fn test_user_agent_has_linehaul() -> Result<()> { let client = builder.build(); // Send request to our dummy server - let url = Url::from_str(&format!("http://{addr}"))?; + let url = DisplaySafeUrl::from_str(&format!("http://{addr}"))?; let res = client .cached_client() .uncached() .for_host(&url) - .get(url) + .get(Url::from(url)) .send() .await?; diff --git a/crates/uv-configuration/src/build_options.rs b/crates/uv-configuration/src/build_options.rs index 1a62a1a12..8b493cbf0 100644 --- a/crates/uv-configuration/src/build_options.rs +++ b/crates/uv-configuration/src/build_options.rs @@ -4,7 +4,7 @@ use uv_pep508::PackageName; use crate::{PackageNameSpecifier, PackageNameSpecifiers}; -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)] pub enum BuildKind { /// A PEP 517 wheel build. #[default] diff --git a/crates/uv-configuration/src/dependency_groups.rs b/crates/uv-configuration/src/dependency_groups.rs index 345f4077c..a3b90ea5f 100644 --- a/crates/uv-configuration/src/dependency_groups.rs +++ b/crates/uv-configuration/src/dependency_groups.rs @@ -295,6 +295,15 @@ pub struct DependencyGroupsWithDefaults { } impl DependencyGroupsWithDefaults { + /// Do not enable any groups + /// + /// Many places in the code need to know what dependency-groups are active, + /// but various commands or subsystems never enable any dependency-groups, + /// in which case they want this. + pub fn none() -> Self { + DependencyGroups::default().with_defaults(DefaultGroups::default()) + } + /// Returns `true` if the specification was enabled, and *only* because it was a default pub fn contains_because_default(&self, group: &GroupName) -> bool { self.cur.contains(group) && !self.prev.contains(group) diff --git a/crates/uv-configuration/src/extras.rs b/crates/uv-configuration/src/extras.rs index 3bc9da21a..e39fc72ef 100644 --- a/crates/uv-configuration/src/extras.rs +++ b/crates/uv-configuration/src/extras.rs @@ -263,6 +263,14 @@ pub struct ExtrasSpecificationWithDefaults { } impl ExtrasSpecificationWithDefaults { + /// Do not enable any extras + /// + /// Many places in the code need to know what extras are active, + /// but various commands or subsystems never enable any extras, + /// in which case they want this. + pub fn none() -> Self { + ExtrasSpecification::default().with_defaults(DefaultExtras::default()) + } /// Returns `true` if the specification was enabled, and *only* because it was a default pub fn contains_because_default(&self, extra: &ExtraName) -> bool { self.cur.contains(extra) && !self.prev.contains(extra) diff --git a/crates/uv-configuration/src/name_specifiers.rs b/crates/uv-configuration/src/name_specifiers.rs index 5ff209948..3efeee1f2 100644 --- a/crates/uv-configuration/src/name_specifiers.rs +++ b/crates/uv-configuration/src/name_specifiers.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use uv_pep508::PackageName; @@ -63,28 +65,16 @@ impl<'de> serde::Deserialize<'de> for PackageNameSpecifier { #[cfg(feature = "schemars")] impl schemars::JsonSchema for PackageNameSpecifier { - fn schema_name() -> String { - "PackageNameSpecifier".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PackageNameSpecifier") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - // See: https://packaging.python.org/en/latest/specifications/name-normalization/#name-format - pattern: Some( - r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$" - .to_string(), - ), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$", + "description": "The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.", + }) } } diff --git a/crates/uv-configuration/src/required_version.rs b/crates/uv-configuration/src/required_version.rs index a0138a46e..70c69eaf3 100644 --- a/crates/uv-configuration/src/required_version.rs +++ b/crates/uv-configuration/src/required_version.rs @@ -1,5 +1,6 @@ -use std::fmt::Formatter; -use std::str::FromStr; +#[cfg(feature = "schemars")] +use std::borrow::Cow; +use std::{fmt::Formatter, str::FromStr}; use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError}; @@ -36,20 +37,15 @@ impl FromStr for RequiredVersion { #[cfg(feature = "schemars")] impl schemars::JsonSchema for RequiredVersion { - fn schema_name() -> String { - String::from("RequiredVersion") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("RequiredVersion") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("A version specifier, e.g. `>=0.5.0` or `==0.5.0`.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A version specifier, e.g. `>=0.5.0` or `==0.5.0`." + }) } } diff --git a/crates/uv-configuration/src/sources.rs b/crates/uv-configuration/src/sources.rs index c60d69ef4..f8d0c3367 100644 --- a/crates/uv-configuration/src/sources.rs +++ b/crates/uv-configuration/src/sources.rs @@ -1,4 +1,6 @@ -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, +)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub enum SourceStrategy { /// Use `tool.uv.sources` when resolving dependencies. diff --git a/crates/uv-configuration/src/target_triple.rs b/crates/uv-configuration/src/target_triple.rs index c0c651409..81499deff 100644 --- a/crates/uv-configuration/src/target_triple.rs +++ b/crates/uv-configuration/src/target_triple.rs @@ -226,6 +226,10 @@ pub enum TargetTriple { #[serde(rename = "aarch64-manylinux_2_40")] #[serde(alias = "aarch64-manylinux240")] Aarch64Manylinux240, + + /// A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12. + #[cfg_attr(feature = "clap", value(name = "wasm32-pyodide2024"))] + Wasm32Pyodide2024, } impl TargetTriple { @@ -450,6 +454,13 @@ impl TargetTriple { }, Arch::Aarch64, ), + Self::Wasm32Pyodide2024 => Platform::new( + Os::Pyodide { + major: 2024, + minor: 0, + }, + Arch::Wasm32, + ), } } @@ -490,6 +501,7 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "aarch64", Self::Aarch64Manylinux239 => "aarch64", Self::Aarch64Manylinux240 => "aarch64", + Self::Wasm32Pyodide2024 => "wasm32", } } @@ -530,6 +542,7 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "Linux", Self::Aarch64Manylinux239 => "Linux", Self::Aarch64Manylinux240 => "Linux", + Self::Wasm32Pyodide2024 => "Emscripten", } } @@ -570,6 +583,10 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "", Self::Aarch64Manylinux239 => "", Self::Aarch64Manylinux240 => "", + // This is the value Emscripten gives for its version: + // https://github.com/emscripten-core/emscripten/blob/4.0.8/system/lib/libc/emscripten_syscall_stubs.c#L63 + // It doesn't really seem to mean anything? But for completeness we include it here. + Self::Wasm32Pyodide2024 => "#1", } } @@ -610,6 +627,9 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "", Self::Aarch64Manylinux239 => "", Self::Aarch64Manylinux240 => "", + // This is the Emscripten compiler version for Pyodide 2024. + // See https://pyodide.org/en/stable/development/abi.html#pyodide-2024-0 + Self::Wasm32Pyodide2024 => "3.1.58", } } @@ -650,6 +670,7 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "posix", Self::Aarch64Manylinux239 => "posix", Self::Aarch64Manylinux240 => "posix", + Self::Wasm32Pyodide2024 => "posix", } } @@ -690,6 +711,7 @@ impl TargetTriple { Self::Aarch64Manylinux238 => "linux", Self::Aarch64Manylinux239 => "linux", Self::Aarch64Manylinux240 => "linux", + Self::Wasm32Pyodide2024 => "emscripten", } } @@ -730,6 +752,7 @@ impl TargetTriple { Self::Aarch64Manylinux238 => true, Self::Aarch64Manylinux239 => true, Self::Aarch64Manylinux240 => true, + Self::Wasm32Pyodide2024 => false, } } diff --git a/crates/uv-configuration/src/threading.rs b/crates/uv-configuration/src/threading.rs index 58b6190a6..2f70b5d81 100644 --- a/crates/uv-configuration/src/threading.rs +++ b/crates/uv-configuration/src/threading.rs @@ -62,7 +62,7 @@ pub static RAYON_PARALLELISM: AtomicUsize = AtomicUsize::new(0); /// `LazyLock::force(&RAYON_INITIALIZE)`. pub static RAYON_INITIALIZE: LazyLock<()> = LazyLock::new(|| { rayon::ThreadPoolBuilder::new() - .num_threads(RAYON_PARALLELISM.load(Ordering::SeqCst)) + .num_threads(RAYON_PARALLELISM.load(Ordering::Relaxed)) .stack_size(min_stack_size()) .build_global() .expect("failed to initialize global rayon pool"); diff --git a/crates/uv-configuration/src/trusted_host.rs b/crates/uv-configuration/src/trusted_host.rs index 64fb14169..07ff2998a 100644 --- a/crates/uv-configuration/src/trusted_host.rs +++ b/crates/uv-configuration/src/trusted_host.rs @@ -1,4 +1,6 @@ use serde::{Deserialize, Deserializer}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use url::Url; @@ -143,20 +145,15 @@ impl std::fmt::Display for TrustedHost { #[cfg(feature = "schemars")] impl schemars::JsonSchema for TrustedHost { - fn schema_name() -> String { - "TrustedHost".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("TrustedHost") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("A host or host-port pair.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A host or host-port pair." + }) } } diff --git a/crates/uv-dev/Cargo.toml b/crates/uv-dev/Cargo.toml index c9d218360..c778d842b 100644 --- a/crates/uv-dev/Cargo.toml +++ b/crates/uv-dev/Cargo.toml @@ -40,14 +40,16 @@ anyhow = { workspace = true } clap = { workspace = true, features = ["derive", "wrap_help"] } fs-err = { workspace = true, features = ["tokio"] } itertools = { workspace = true } -markdown = { version = "0.3.0" } +markdown = { version = "1.0.0" } owo-colors = { workspace = true } poloto = { version = "19.1.2", optional = true } pretty_assertions = { version = "1.4.1" } +reqwest = { workspace = true } resvg = { version = "0.29.0", optional = true } schemars = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +serde_yaml = { version = "0.9.34" } tagu = { version = "0.1.6", optional = true } textwrap = { workspace = true } tokio = { workspace = true } diff --git a/crates/uv-dev/src/compile.rs b/crates/uv-dev/src/compile.rs index 434b5e791..d2b685b23 100644 --- a/crates/uv-dev/src/compile.rs +++ b/crates/uv-dev/src/compile.rs @@ -4,7 +4,7 @@ use clap::Parser; use tracing::info; use uv_cache::{Cache, CacheArgs}; -use uv_configuration::Concurrency; +use uv_configuration::{Concurrency, PreviewMode}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; #[derive(Parser)] @@ -26,6 +26,7 @@ pub(crate) async fn compile(args: CompileArgs) -> anyhow::Result<()> { &PythonRequest::default(), EnvironmentPreference::OnlyVirtual, &cache, + PreviewMode::Disabled, )? .into_interpreter(); interpreter.sys_executable().to_path_buf() diff --git a/crates/uv-dev/src/generate_all.rs b/crates/uv-dev/src/generate_all.rs index 1655f9a21..96f906830 100644 --- a/crates/uv-dev/src/generate_all.rs +++ b/crates/uv-dev/src/generate_all.rs @@ -4,7 +4,7 @@ use anyhow::Result; use crate::{ generate_cli_reference, generate_env_vars_reference, generate_json_schema, - generate_options_reference, + generate_options_reference, generate_sysconfig_mappings, }; #[derive(clap::Args)] @@ -26,10 +26,12 @@ pub(crate) enum Mode { DryRun, } -pub(crate) fn main(args: &Args) -> Result<()> { +pub(crate) async fn main(args: &Args) -> Result<()> { generate_json_schema::main(&generate_json_schema::Args { mode: args.mode })?; generate_options_reference::main(&generate_options_reference::Args { mode: args.mode })?; generate_cli_reference::main(&generate_cli_reference::Args { mode: args.mode })?; generate_env_vars_reference::main(&generate_env_vars_reference::Args { mode: args.mode })?; + generate_sysconfig_mappings::main(&generate_sysconfig_mappings::Args { mode: args.mode }) + .await?; Ok(()) } diff --git a/crates/uv-dev/src/generate_cli_reference.rs b/crates/uv-dev/src/generate_cli_reference.rs index 7ae8ba9cb..5ebedab8a 100644 --- a/crates/uv-dev/src/generate_cli_reference.rs +++ b/crates/uv-dev/src/generate_cli_reference.rs @@ -24,7 +24,7 @@ const REPLACEMENTS: &[(&str, &str)] = &[ // TODO(zanieb): In general, we should show all of the environment variables in the reference // but this one is non-standard so it's the only one included right now. When we tackle the rest // we can fix the formatting. - (" [env: "UVPYTHONDOWNLOADS=never"]", ""), + (" [env: "UV_PYTHON_DOWNLOADS=never"]", ""), ]; const SHOW_HIDDEN_COMMANDS: &[&str] = &["generate-shell-completion"]; diff --git a/crates/uv-dev/src/generate_env_vars_reference.rs b/crates/uv-dev/src/generate_env_vars_reference.rs index 7265200c1..1004b00ef 100644 --- a/crates/uv-dev/src/generate_env_vars_reference.rs +++ b/crates/uv-dev/src/generate_env_vars_reference.rs @@ -21,7 +21,7 @@ pub(crate) fn main(args: &Args) -> anyhow::Result<()> { let filename = "environment.md"; let reference_path = PathBuf::from(ROOT_DIR) .join("docs") - .join("configuration") + .join("reference") .join(filename); match args.mode { diff --git a/crates/uv-dev/src/generate_json_schema.rs b/crates/uv-dev/src/generate_json_schema.rs index 75465f429..8a4ff47d5 100644 --- a/crates/uv-dev/src/generate_json_schema.rs +++ b/crates/uv-dev/src/generate_json_schema.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use anstream::println; use anyhow::{Result, bail}; use pretty_assertions::StrComparison; -use schemars::{JsonSchema, schema_for}; +use schemars::JsonSchema; use serde::Deserialize; use uv_settings::Options as SettingsOptions; @@ -91,7 +91,10 @@ const REPLACEMENTS: &[(&str, &str)] = &[ /// Generate the JSON schema for the combined options as a string. fn generate() -> String { - let schema = schema_for!(CombinedOptions); + let settings = schemars::generate::SchemaSettings::draft07(); + let generator = schemars::SchemaGenerator::new(settings); + let schema = generator.into_root_schema_for::(); + let mut output = serde_json::to_string_pretty(&schema).unwrap(); for (value, replacement) in REPLACEMENTS { diff --git a/crates/uv-dev/src/generate_sysconfig_mappings.rs b/crates/uv-dev/src/generate_sysconfig_mappings.rs new file mode 100644 index 000000000..b9f58dd92 --- /dev/null +++ b/crates/uv-dev/src/generate_sysconfig_mappings.rs @@ -0,0 +1,198 @@ +//! Generate sysconfig mappings for supported python-build-standalone *nix platforms. +use anstream::println; +use anyhow::{Result, bail}; +use pretty_assertions::StrComparison; +use serde::Deserialize; +use std::collections::BTreeMap; +use std::fmt::Write; +use std::path::PathBuf; + +use crate::ROOT_DIR; +use crate::generate_all::Mode; + +/// Contains current supported targets +const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250708/cpython-unix/targets.yml"; + +#[derive(clap::Args)] +pub(crate) struct Args { + #[arg(long, default_value_t, value_enum)] + pub(crate) mode: Mode, +} + +#[derive(Debug, Deserialize)] +struct TargetConfig { + host_cc: Option, + host_cxx: Option, + target_cc: Option, + target_cxx: Option, +} + +pub(crate) async fn main(args: &Args) -> Result<()> { + let reference_string = generate().await?; + let filename = "generated_mappings.rs"; + let reference_path = PathBuf::from(ROOT_DIR) + .join("crates") + .join("uv-python") + .join("src") + .join("sysconfig") + .join(filename); + + match args.mode { + Mode::DryRun => { + println!("{reference_string}"); + } + Mode::Check => match fs_err::read_to_string(reference_path) { + Ok(current) => { + if current == reference_string { + println!("Up-to-date: {filename}"); + } else { + let comparison = StrComparison::new(¤t, &reference_string); + bail!( + "{filename} changed, please run `cargo dev generate-sysconfig-metadata`:\n{comparison}" + ); + } + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + bail!("{filename} not found, please run `cargo dev generate-sysconfig-metadata`"); + } + Err(err) => { + bail!( + "{filename} changed, please run `cargo dev generate-sysconfig-metadata`:\n{err}" + ); + } + }, + Mode::Write => match fs_err::read_to_string(&reference_path) { + Ok(current) => { + if current == reference_string { + println!("Up-to-date: {filename}"); + } else { + println!("Updating: {filename}"); + fs_err::write(reference_path, reference_string.as_bytes())?; + } + } + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + println!("Updating: {filename}"); + fs_err::write(reference_path, reference_string.as_bytes())?; + } + Err(err) => { + bail!( + "{filename} changed, please run `cargo dev generate-sysconfig-metadata`:\n{err}" + ); + } + }, + } + + Ok(()) +} + +async fn generate() -> Result { + println!("Downloading python-build-standalone cpython-unix/targets.yml ..."); + let body = reqwest::get(TARGETS_YML_URL).await?.text().await?; + + let parsed: BTreeMap = serde_yaml::from_str(&body)?; + + let mut replacements: BTreeMap<&str, BTreeMap> = BTreeMap::new(); + + for targets_config in parsed.values() { + for sysconfig_cc_entry in ["CC", "LDSHARED", "BLDSHARED", "LINKCC"] { + if let Some(ref from_cc) = targets_config.host_cc { + replacements + .entry(sysconfig_cc_entry) + .or_default() + .insert(from_cc.to_string(), "cc".to_string()); + } + if let Some(ref from_cc) = targets_config.target_cc { + replacements + .entry(sysconfig_cc_entry) + .or_default() + .insert(from_cc.to_string(), "cc".to_string()); + } + } + for sysconfig_cxx_entry in ["CXX", "LDCXXSHARED"] { + if let Some(ref from_cxx) = targets_config.host_cxx { + replacements + .entry(sysconfig_cxx_entry) + .or_default() + .insert(from_cxx.to_string(), "c++".to_string()); + } + if let Some(ref from_cxx) = targets_config.target_cxx { + replacements + .entry(sysconfig_cxx_entry) + .or_default() + .insert(from_cxx.to_string(), "c++".to_string()); + } + } + } + + let mut output = String::new(); + + // Opening statements + output.push_str("//! DO NOT EDIT\n"); + output.push_str("//!\n"); + output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n"); + output.push_str("//! Targets from \n"); + output.push_str("//!\n"); + + // Disable clippy/fmt + output.push_str("#![allow(clippy::all)]\n"); + output.push_str("#![cfg_attr(any(), rustfmt::skip)]\n\n"); + + // Begin main code + output.push_str("use std::collections::BTreeMap;\n"); + output.push_str("use std::sync::LazyLock;\n\n"); + output.push_str("use crate::sysconfig::replacements::{ReplacementEntry, ReplacementMode};\n\n"); + + output.push_str( + "/// Mapping for sysconfig keys to lookup and replace with the appropriate entry.\n", + ); + output.push_str("pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock>> = LazyLock::new(|| {\n"); + output.push_str(" BTreeMap::from_iter([\n"); + + // Add Replacement Entries for CC, CXX, etc. + for (key, entries) in &replacements { + writeln!(output, " (\"{key}\".to_string(), vec![")?; + for (from, to) in entries { + writeln!( + output, + " ReplacementEntry {{ mode: ReplacementMode::Partial {{ from: \"{from}\".to_string() }}, to: \"{to}\".to_string() }}," + )?; + } + writeln!(output, " ]),")?; + } + + // Add AR case last + output.push_str(" (\"AR\".to_string(), vec![\n"); + output.push_str(" ReplacementEntry {\n"); + output.push_str(" mode: ReplacementMode::Full,\n"); + output.push_str(" to: \"ar\".to_string(),\n"); + output.push_str(" },\n"); + output.push_str(" ]),\n"); + + // Closing + output.push_str(" ])\n});\n"); + + Ok(output) +} + +#[cfg(test)] +mod tests { + use std::env; + + use anyhow::Result; + + use uv_static::EnvVars; + + use crate::generate_all::Mode; + + use super::{Args, main}; + + #[tokio::test] + async fn test_generate_sysconfig_mappings() -> Result<()> { + let mode = if env::var(EnvVars::UV_UPDATE_SCHEMA).as_deref() == Ok("1") { + Mode::Write + } else { + Mode::Check + }; + main(&Args { mode }).await + } +} diff --git a/crates/uv-dev/src/lib.rs b/crates/uv-dev/src/lib.rs index 8ad97130a..c01cc62c4 100644 --- a/crates/uv-dev/src/lib.rs +++ b/crates/uv-dev/src/lib.rs @@ -11,6 +11,7 @@ use crate::generate_cli_reference::Args as GenerateCliReferenceArgs; use crate::generate_env_vars_reference::Args as GenerateEnvVarsReferenceArgs; use crate::generate_json_schema::Args as GenerateJsonSchemaArgs; use crate::generate_options_reference::Args as GenerateOptionsReferenceArgs; +use crate::generate_sysconfig_mappings::Args as GenerateSysconfigMetadataArgs; #[cfg(feature = "render")] use crate::render_benchmarks::RenderBenchmarksArgs; use crate::wheel_metadata::WheelMetadataArgs; @@ -22,6 +23,7 @@ mod generate_cli_reference; mod generate_env_vars_reference; mod generate_json_schema; mod generate_options_reference; +mod generate_sysconfig_mappings; mod render_benchmarks; mod wheel_metadata; @@ -45,6 +47,8 @@ enum Cli { GenerateCliReference(GenerateCliReferenceArgs), /// Generate the environment variables reference for the documentation. GenerateEnvVarsReference(GenerateEnvVarsReferenceArgs), + /// Generate the sysconfig metadata from derived targets. + GenerateSysconfigMetadata(GenerateSysconfigMetadataArgs), #[cfg(feature = "render")] /// Render the benchmarks. RenderBenchmarks(RenderBenchmarksArgs), @@ -57,11 +61,12 @@ pub async fn run() -> Result<()> { Cli::WheelMetadata(args) => wheel_metadata::wheel_metadata(args).await?, Cli::Compile(args) => compile::compile(args).await?, Cli::ClearCompile(args) => clear_compile::clear_compile(&args)?, - Cli::GenerateAll(args) => generate_all::main(&args)?, + Cli::GenerateAll(args) => generate_all::main(&args).await?, Cli::GenerateJSONSchema(args) => generate_json_schema::main(&args)?, Cli::GenerateOptionsReference(args) => generate_options_reference::main(&args)?, Cli::GenerateCliReference(args) => generate_cli_reference::main(&args)?, Cli::GenerateEnvVarsReference(args) => generate_env_vars_reference::main(&args)?, + Cli::GenerateSysconfigMetadata(args) => generate_sysconfig_mappings::main(&args).await?, #[cfg(feature = "render")] Cli::RenderBenchmarks(args) => render_benchmarks::render_benchmarks(&args)?, } diff --git a/crates/uv-dev/sync_sysconfig_targets.sh b/crates/uv-dev/sync_sysconfig_targets.sh new file mode 100755 index 000000000..f388fad21 --- /dev/null +++ b/crates/uv-dev/sync_sysconfig_targets.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Fetch latest python-build-standalone tag +latest_tag=$(curl -fsSL -H "Accept: application/json" https://github.com/astral-sh/python-build-standalone/releases/latest | jq -r .tag_name) + +# Validate we got a tag name back +if [[ -z "${latest_tag}" ]]; then + echo "Error: Failed to fetch the latest tag from astral-sh/python-build-standalone." >&2 + exit 1 +fi + +# Edit the sysconfig mapping endpoints +sed -i.bak "s|refs/tags/[^/]\+/cpython-unix|refs/tags/${latest_tag}/cpython-unix|g" src/generate_sysconfig_mappings.rs && rm -f src/generate_sysconfig_mappings.rs.bak +sed -i.bak "s|blob/[^/]\+/cpython-unix|blob/${latest_tag}/cpython-unix|g" src/generate_sysconfig_mappings.rs && rm -f src/generate_sysconfig_mappings.rs.bak + +# Regenerate mappings in case there's any changes +cargo dev generate-sysconfig-metadata diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 891ca8a38..874e412e5 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -11,6 +11,7 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use thiserror::Error; use tracing::{debug, instrument, trace}; + use uv_build_backend::check_direct_build; use uv_build_frontend::{SourceBuild, SourceBuildContext}; use uv_cache::Cache; @@ -35,8 +36,8 @@ use uv_resolver::{ PythonRequirement, Resolver, ResolverEnvironment, }; use uv_types::{ - AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, HashStrategy, - InFlight, + AnyErrorBuild, BuildArena, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, + HashStrategy, InFlight, }; use uv_workspace::WorkspaceCache; @@ -179,6 +180,10 @@ impl BuildContext for BuildDispatch<'_> { &self.shared_state.git } + fn build_arena(&self) -> &BuildArena { + &self.shared_state.build_arena + } + fn capabilities(&self) -> &IndexCapabilities { &self.shared_state.capabilities } @@ -226,6 +231,7 @@ impl BuildContext for BuildDispatch<'_> { .build(), &python_requirement, ResolverEnvironment::specific(marker_env), + self.interpreter.markers(), // Conflicting groups only make sense when doing universal resolution. Conflicts::empty(), Some(tags), @@ -432,6 +438,7 @@ impl BuildContext for BuildDispatch<'_> { self.build_extra_env_vars.clone(), build_output, self.concurrency.builds, + self.preview, ) .boxed_local() .await?; @@ -446,12 +453,6 @@ impl BuildContext for BuildDispatch<'_> { build_kind: BuildKind, version_id: Option<&'data str>, ) -> Result, BuildDispatchError> { - // Direct builds are a preview feature with the uv build backend. - if self.preview.is_disabled() { - trace!("Preview is disabled, not checking for direct build"); - return Ok(None); - } - let source_tree = if let Some(subdir) = subdirectory { source.join(subdir) } else { @@ -519,6 +520,8 @@ pub struct SharedState { index: InMemoryIndex, /// The downloaded distributions. in_flight: InFlight, + /// Build directories for any PEP 517 builds executed during resolution or installation. + build_arena: BuildArena, } impl SharedState { @@ -531,6 +534,7 @@ impl SharedState { Self { git: self.git.clone(), capabilities: self.capabilities.clone(), + build_arena: self.build_arena.clone(), ..Default::default() } } @@ -554,4 +558,9 @@ impl SharedState { pub fn capabilities(&self) -> &IndexCapabilities { &self.capabilities } + + /// Return the [`BuildArena`] used by the [`SharedState`]. + pub fn build_arena(&self) -> &BuildArena { + &self.build_arena + } } diff --git a/crates/uv-distribution-filename/Cargo.toml b/crates/uv-distribution-filename/Cargo.toml index f30e79b3b..0dfdd623e 100644 --- a/crates/uv-distribution-filename/Cargo.toml +++ b/crates/uv-distribution-filename/Cargo.toml @@ -27,7 +27,6 @@ rkyv = { workspace = true, features = ["smallvec-1"] } serde = { workspace = true } smallvec = { workspace = true } thiserror = { workspace = true } -url = { workspace = true } [dev-dependencies] insta = { version = "1.40.0" } diff --git a/crates/uv-distribution-filename/src/wheel.rs b/crates/uv-distribution-filename/src/wheel.rs index d7dc7dfca..2ac0ef7d9 100644 --- a/crates/uv-distribution-filename/src/wheel.rs +++ b/crates/uv-distribution-filename/src/wheel.rs @@ -5,7 +5,6 @@ use std::str::FromStr; use memchr::memchr; use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use thiserror::Error; -use url::Url; use uv_cache_key::cache_digest; use uv_normalize::{InvalidNameError, PackageName}; @@ -300,29 +299,6 @@ impl WheelFilename { } } -impl TryFrom<&Url> for WheelFilename { - type Error = WheelFilenameError; - - fn try_from(url: &Url) -> Result { - let filename = url - .path_segments() - .ok_or_else(|| { - WheelFilenameError::InvalidWheelFileName( - url.to_string(), - "URL must have a path".to_string(), - ) - })? - .next_back() - .ok_or_else(|| { - WheelFilenameError::InvalidWheelFileName( - url.to_string(), - "URL must contain a filename".to_string(), - ) - })?; - Self::from_str(filename) - } -} - impl<'de> Deserialize<'de> for WheelFilename { fn deserialize(deserializer: D) -> Result where diff --git a/crates/uv-distribution-types/Cargo.toml b/crates/uv-distribution-types/Cargo.toml index 91943937e..1ca28c5ed 100644 --- a/crates/uv-distribution-types/Cargo.toml +++ b/crates/uv-distribution-types/Cargo.toml @@ -27,7 +27,9 @@ uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-small-str = { workspace = true } +uv-warnings = { workspace = true } arcstr = { workspace = true } bitflags = { workspace = true } @@ -50,3 +52,6 @@ version-ranges = { workspace = true } [dev-dependencies] toml = { workspace = true } + +[features] +schemars = ["dep:schemars", "uv-redacted/schemars"] diff --git a/crates/uv-distribution-types/src/annotation.rs b/crates/uv-distribution-types/src/annotation.rs index 673d23c17..398bcb6b4 100644 --- a/crates/uv-distribution-types/src/annotation.rs +++ b/crates/uv-distribution-types/src/annotation.rs @@ -26,7 +26,11 @@ impl std::fmt::Display for SourceAnnotation { write!(f, "{project_name} ({})", path.portable_display()) } RequirementOrigin::Group(path, project_name, group) => { - write!(f, "{project_name} ({}:{group})", path.portable_display()) + if let Some(project_name) = project_name { + write!(f, "{project_name} ({}:{group})", path.portable_display()) + } else { + write!(f, "({}:{group})", path.portable_display()) + } } RequirementOrigin::Workspace => { write!(f, "(workspace)") @@ -45,11 +49,15 @@ impl std::fmt::Display for SourceAnnotation { } RequirementOrigin::Group(path, project_name, group) => { // Group is not used for override - write!( - f, - "--override {project_name} ({}:{group})", - path.portable_display() - ) + if let Some(project_name) = project_name { + write!( + f, + "--override {project_name} ({}:{group})", + path.portable_display() + ) + } else { + write!(f, "--override ({}:{group})", path.portable_display()) + } } RequirementOrigin::Workspace => { write!(f, "--override (workspace)") diff --git a/crates/uv-distribution-types/src/buildable.rs b/crates/uv-distribution-types/src/buildable.rs index 3fe6fe8db..c97bb362f 100644 --- a/crates/uv-distribution-types/src/buildable.rs +++ b/crates/uv-distribution-types/src/buildable.rs @@ -1,13 +1,13 @@ use std::borrow::Cow; use std::path::Path; -use url::Url; use uv_distribution_filename::SourceDistExtension; use uv_git_types::GitUrl; use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::VerbatimUrl; use uv_normalize::PackageName; +use uv_redacted::DisplaySafeUrl; use crate::{DirectorySourceDist, GitSourceDist, Name, PathSourceDist, SourceDist}; @@ -102,8 +102,8 @@ pub enum SourceUrl<'a> { } impl SourceUrl<'_> { - /// Return the [`Url`] of the source. - pub fn url(&self) -> &Url { + /// Return the [`DisplaySafeUrl`] of the source. + pub fn url(&self) -> &DisplaySafeUrl { match self { Self::Direct(dist) => dist.url, Self::Git(dist) => dist.url, @@ -147,7 +147,7 @@ impl std::fmt::Display for SourceUrl<'_> { #[derive(Debug, Clone)] pub struct DirectSourceUrl<'a> { - pub url: &'a Url, + pub url: &'a DisplaySafeUrl, pub subdirectory: Option<&'a Path>, pub ext: SourceDistExtension, } @@ -185,7 +185,7 @@ impl<'a> From<&'a GitSourceDist> for GitSourceUrl<'a> { #[derive(Debug, Clone)] pub struct PathSourceUrl<'a> { - pub url: &'a Url, + pub url: &'a DisplaySafeUrl, pub path: Cow<'a, Path>, pub ext: SourceDistExtension, } @@ -208,7 +208,7 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> { #[derive(Debug, Clone)] pub struct DirectorySourceUrl<'a> { - pub url: &'a Url, + pub url: &'a DisplaySafeUrl, pub install_path: Cow<'a, Path>, pub editable: bool, } diff --git a/crates/uv-distribution-types/src/error.rs b/crates/uv-distribution-types/src/error.rs index fc1c4f588..34dc29e78 100644 --- a/crates/uv-distribution-types/src/error.rs +++ b/crates/uv-distribution-types/src/error.rs @@ -1,6 +1,5 @@ -use url::Url; - use uv_normalize::PackageName; +use uv_redacted::DisplaySafeUrl; #[derive(thiserror::Error, Debug)] pub enum Error { @@ -17,7 +16,7 @@ pub enum Error { MissingPathSegments(String), #[error("Distribution not found at: {0}")] - NotFound(Url), + NotFound(DisplaySafeUrl), #[error("Requested package name `{0}` does not match `{1}` in the distribution filename: {2}")] PackageNameMismatch(PackageName, PackageName, String), diff --git a/crates/uv-distribution-types/src/file.rs b/crates/uv-distribution-types/src/file.rs index 02b981768..b16305f73 100644 --- a/crates/uv-distribution-types/src/file.rs +++ b/crates/uv-distribution-types/src/file.rs @@ -1,13 +1,14 @@ +use std::borrow::Cow; use std::fmt::{self, Display, Formatter}; use std::str::FromStr; use jiff::Timestamp; use serde::{Deserialize, Serialize}; -use url::Url; use uv_pep440::{VersionSpecifiers, VersionSpecifiersParseError}; use uv_pep508::split_scheme; use uv_pypi_types::{CoreMetadata, HashDigests, Yanked}; +use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; /// Error converting [`uv_pypi_types::File`] to [`distribution_type::File`]. @@ -56,10 +57,7 @@ impl File { .map_err(|err| FileConversionError::RequiresPython(err.line().clone(), err))?, size: file.size, upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond), - url: match split_scheme(&file.url) { - Some(..) => FileLocation::AbsoluteUrl(UrlString::new(file.url)), - None => FileLocation::RelativeUrl(base.clone(), file.url), - }, + url: FileLocation::new(file.url, base), yanked: file.yanked, }) } @@ -76,6 +74,17 @@ pub enum FileLocation { } impl FileLocation { + /// Parse a relative or absolute URL on a page with a base URL. + /// + /// This follows the HTML semantics where a link on a page is resolved relative to the URL of + /// that page. + pub fn new(url: SmallString, base: &SmallString) -> Self { + match split_scheme(&url) { + Some(..) => FileLocation::AbsoluteUrl(UrlString::new(url)), + None => FileLocation::RelativeUrl(base.clone(), url), + } + } + /// Convert this location to a URL. /// /// A relative URL has its base joined to the path. An absolute URL is @@ -87,13 +96,14 @@ impl FileLocation { /// This returns an error if any of the URL parsing fails, or if, for /// example, the location is a path and the path isn't valid UTF-8. /// (Because URLs must be valid UTF-8.) - pub fn to_url(&self) -> Result { + pub fn to_url(&self) -> Result { match *self { FileLocation::RelativeUrl(ref base, ref path) => { - let base_url = Url::parse(base).map_err(|err| ToUrlError::InvalidBase { - base: base.to_string(), - err, - })?; + let base_url = + DisplaySafeUrl::parse(base).map_err(|err| ToUrlError::InvalidBase { + base: base.to_string(), + err, + })?; let joined = base_url.join(path).map_err(|err| ToUrlError::InvalidJoin { base: base.to_string(), path: path.to_string(), @@ -142,9 +152,9 @@ impl UrlString { Self(url) } - /// Converts a [`UrlString`] to a [`Url`]. - pub fn to_url(&self) -> Result { - Url::from_str(&self.0).map_err(|err| ToUrlError::InvalidAbsolute { + /// Converts a [`UrlString`] to a [`DisplaySafeUrl`]. + pub fn to_url(&self) -> Result { + DisplaySafeUrl::from_str(&self.0).map_err(|err| ToUrlError::InvalidAbsolute { absolute: self.0.to_string(), err, }) @@ -159,16 +169,13 @@ impl UrlString { .unwrap_or(self.as_ref()) } - /// Return the [`UrlString`] with any fragments removed. + /// Return the [`UrlString`] (as a [`Cow`]) with any fragments removed. #[must_use] - pub fn without_fragment(&self) -> Self { - Self( - self.as_ref() - .split_once('#') - .map(|(path, _)| path) - .map(SmallString::from) - .unwrap_or_else(|| self.0.clone()), - ) + pub fn without_fragment(&self) -> Cow<'_, Self> { + self.as_ref() + .split_once('#') + .map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path)))) + .unwrap_or(Cow::Borrowed(self)) } } @@ -178,14 +185,14 @@ impl AsRef for UrlString { } } -impl From for UrlString { - fn from(value: Url) -> Self { +impl From for UrlString { + fn from(value: DisplaySafeUrl) -> Self { Self(value.as_str().into()) } } -impl From<&Url> for UrlString { - fn from(value: &Url) -> Self { +impl From<&DisplaySafeUrl> for UrlString { + fn from(value: &DisplaySafeUrl) -> Self { Self(value.as_str().into()) } } @@ -251,16 +258,17 @@ mod tests { #[test] fn without_fragment() { + // Borrows a URL without a fragment + let url = UrlString("https://example.com/path".into()); + assert_eq!(&*url.without_fragment(), &url); + assert!(matches!(url.without_fragment(), Cow::Borrowed(_))); + + // Removes the fragment if present on the URL let url = UrlString("https://example.com/path?query#fragment".into()); assert_eq!( - url.without_fragment(), - UrlString("https://example.com/path?query".into()) + &*url.without_fragment(), + &UrlString("https://example.com/path?query".into()) ); - - let url = UrlString("https://example.com/path#fragment".into()); - assert_eq!(url.base_str(), "https://example.com/path"); - - let url = UrlString("https://example.com/path".into()); - assert_eq!(url.base_str(), "https://example.com/path"); + assert!(matches!(url.without_fragment(), Cow::Owned(_))); } } diff --git a/crates/uv-distribution-types/src/id.rs b/crates/uv-distribution-types/src/id.rs index 43fb355e4..b68b4f24c 100644 --- a/crates/uv-distribution-types/src/id.rs +++ b/crates/uv-distribution-types/src/id.rs @@ -1,12 +1,12 @@ use std::fmt::{Display, Formatter}; use std::path::PathBuf; -use url::Url; use uv_cache_key::{CanonicalUrl, RepositoryUrl}; use uv_normalize::PackageName; use uv_pep440::Version; use uv_pypi_types::HashDigest; +use uv_redacted::DisplaySafeUrl; /// A unique identifier for a package. A package can either be identified by a name (e.g., `black`) /// or a URL (e.g., `git+https://github.com/psf/black`). @@ -25,7 +25,7 @@ impl PackageId { } /// Create a new [`PackageId`] from a URL. - pub fn from_url(url: &Url) -> Self { + pub fn from_url(url: &DisplaySafeUrl) -> Self { Self::Url(CanonicalUrl::new(url)) } } @@ -55,7 +55,7 @@ impl VersionId { } /// Create a new [`VersionId`] from a URL. - pub fn from_url(url: &Url) -> Self { + pub fn from_url(url: &DisplaySafeUrl) -> Self { Self::Url(CanonicalUrl::new(url)) } } diff --git a/crates/uv-distribution-types/src/index.rs b/crates/uv-distribution-types/src/index.rs index d3f974ad3..8ac7c3cd4 100644 --- a/crates/uv-distribution-types/src/index.rs +++ b/crates/uv-distribution-types/src/index.rs @@ -3,9 +3,9 @@ use std::str::FromStr; use serde::{Deserialize, Serialize}; use thiserror::Error; -use url::Url; use uv_auth::{AuthPolicy, Credentials}; +use uv_redacted::DisplaySafeUrl; use crate::index_name::{IndexName, IndexNameError}; use crate::origin::Origin; @@ -82,7 +82,7 @@ pub struct Index { /// url = "https://pypi.org/simple" /// publish-url = "https://upload.pypi.org/legacy/" /// ``` - pub publish_url: Option, + pub publish_url: Option, /// When uv should use authentication for requests to the index. /// /// ```toml @@ -193,7 +193,7 @@ impl Index { } /// Return the raw [`Url`] of the index. - pub fn raw_url(&self) -> &Url { + pub fn raw_url(&self) -> &DisplaySafeUrl { self.url.url() } @@ -201,7 +201,7 @@ impl Index { /// /// For indexes with a `/simple` endpoint, this is simply the URL with the final segment /// removed. This is useful, e.g., for credential propagation to other endpoints on the index. - pub fn root_url(&self) -> Option { + pub fn root_url(&self) -> Option { self.url.root() } diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs index f67063a41..1c8cd0a76 100644 --- a/crates/uv-distribution-types/src/index_url.rs +++ b/crates/uv-distribution-types/src/index_url.rs @@ -11,10 +11,13 @@ use thiserror::Error; use url::{ParseError, Url}; use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme}; +use uv_redacted::DisplaySafeUrl; +use uv_warnings::warn_user; use crate::{Index, IndexStatusCodeStrategy, Verbatim}; -static PYPI_URL: LazyLock = LazyLock::new(|| Url::parse("https://pypi.org/simple").unwrap()); +static PYPI_URL: LazyLock = + LazyLock::new(|| DisplaySafeUrl::parse("https://pypi.org/simple").unwrap()); static DEFAULT_INDEX: LazyLock = LazyLock::new(|| { Index::from_index_url(IndexUrl::Pypi(Arc::new(VerbatimUrl::from_url( @@ -69,7 +72,7 @@ impl IndexUrl { /// /// For indexes with a `/simple` endpoint, this is simply the URL with the final segment /// removed. This is useful, e.g., for credential propagation to other endpoints on the index. - pub fn root(&self) -> Option { + pub fn root(&self) -> Option { let mut segments = self.url().path_segments()?; let last = match segments.next_back()? { // If the last segment is empty due to a trailing `/`, skip it (as in `pop_if_empty`) @@ -77,7 +80,8 @@ impl IndexUrl { segment => segment, }; - if !last.eq_ignore_ascii_case("simple") { + // We also handle `/+simple` as it's used in devpi + if !(last.eq_ignore_ascii_case("simple") || last.eq_ignore_ascii_case("+simple")) { return None; } @@ -89,26 +93,21 @@ impl IndexUrl { #[cfg(feature = "schemars")] impl schemars::JsonSchema for IndexUrl { - fn schema_name() -> String { - "IndexUrl".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("IndexUrl") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path." + }) } } impl IndexUrl { /// Return the raw URL for the index. - pub fn url(&self) -> &Url { + pub fn url(&self) -> &DisplaySafeUrl { match self { Self::Pypi(url) => url.raw(), Self::Url(url) => url.raw(), @@ -116,8 +115,8 @@ impl IndexUrl { } } - /// Convert the index URL into a [`Url`]. - pub fn into_url(self) -> Url { + /// Convert the index URL into a [`DisplaySafeUrl`]. + pub fn into_url(self) -> DisplaySafeUrl { match self { Self::Pypi(url) => url.to_url(), Self::Url(url) => url.to_url(), @@ -126,7 +125,7 @@ impl IndexUrl { } /// Return the redacted URL for the index, omitting any sensitive credentials. - pub fn redacted(&self) -> Cow<'_, Url> { + pub fn without_credentials(&self) -> Cow<'_, DisplaySafeUrl> { let url = self.url(); if url.username().is_empty() && url.password().is_none() { Cow::Borrowed(url) @@ -137,6 +136,30 @@ impl IndexUrl { Cow::Owned(url) } } + + /// Warn user if the given URL was provided as an ambiguous relative path. + /// + /// This is a temporary warning. Ambiguous values will not be + /// accepted in the future. + pub fn warn_on_disambiguated_relative_path(&self) { + let Self::Path(verbatim_url) = &self else { + return; + }; + + if let Some(path) = verbatim_url.given() { + if !is_disambiguated_path(path) { + if cfg!(windows) { + warn_user!( + "Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `.\\{path}` or `./{path}`). Support for ambiguous values will be removed in the future" + ); + } else { + warn_user!( + "Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `./{path}`). Support for ambiguous values will be removed in the future" + ); + } + } + } + } } impl Display for IndexUrl { @@ -159,6 +182,28 @@ impl Verbatim for IndexUrl { } } +/// Checks if a path is disambiguated. +/// +/// Disambiguated paths are absolute paths, paths with valid schemes, +/// and paths starting with "./" or "../" on Unix or ".\\", "..\\", +/// "./", or "../" on Windows. +fn is_disambiguated_path(path: &str) -> bool { + if cfg!(windows) { + if path.starts_with(".\\") || path.starts_with("..\\") || path.starts_with('/') { + return true; + } + } + if path.starts_with("./") || path.starts_with("../") || Path::new(path).is_absolute() { + return true; + } + // Check if the path has a scheme (like `file://`) + if let Some((scheme, _)) = split_scheme(path) { + return Scheme::parse(scheme).is_some(); + } + // This is an ambiguous relative path + false +} + /// An error that can occur when parsing an [`IndexUrl`]. #[derive(Error, Debug)] pub enum IndexUrlError { @@ -222,7 +267,7 @@ impl From for IndexUrl { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(index: IndexUrl) -> Self { match index { IndexUrl::Pypi(url) => url.to_url(), @@ -408,6 +453,19 @@ impl<'a> IndexLocations { indexes } } + + /// Add all authenticated sources to the cache. + pub fn cache_index_credentials(&self) { + for index in self.allowed_indexes() { + if let Some(credentials) = index.credentials() { + let credentials = Arc::new(credentials); + uv_auth::store_credentials(index.raw_url(), credentials.clone()); + if let Some(root_url) = index.root_url() { + uv_auth::store_credentials(&root_url, credentials.clone()); + } + } + } + } } impl From<&IndexLocations> for uv_auth::Indexes { @@ -508,30 +566,23 @@ impl<'a> IndexUrls { /// iterator. pub fn defined_indexes(&'a self) -> impl Iterator + 'a { if self.no_index { - Either::Left(std::iter::empty()) - } else { - Either::Right( - { - let mut seen = FxHashSet::default(); - self.indexes - .iter() - .filter(move |index| { - index.name.as_ref().is_none_or(|name| seen.insert(name)) - }) - .filter(|index| !index.default) - } - .chain({ - let mut seen = FxHashSet::default(); - self.indexes - .iter() - .filter(move |index| { - index.name.as_ref().is_none_or(|name| seen.insert(name)) - }) - .find(|index| index.default) - .into_iter() - }), - ) + return Either::Left(std::iter::empty()); } + + let mut seen = FxHashSet::default(); + let (non_default, default) = self + .indexes + .iter() + .filter(move |index| { + if let Some(name) = &index.name { + seen.insert(name) + } else { + true + } + }) + .partition::, _>(|index| !index.default); + + Either::Right(non_default.into_iter().chain(default)) } /// Return the `--no-index` flag. @@ -629,3 +680,41 @@ impl IndexCapabilities { .insert(Flags::FORBIDDEN); } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_index_url_parse_valid_paths() { + // Absolute path + assert!(is_disambiguated_path("/absolute/path")); + // Relative path + assert!(is_disambiguated_path("./relative/path")); + assert!(is_disambiguated_path("../../relative/path")); + if cfg!(windows) { + // Windows absolute path + assert!(is_disambiguated_path("C:/absolute/path")); + // Windows relative path + assert!(is_disambiguated_path(".\\relative\\path")); + assert!(is_disambiguated_path("..\\..\\relative\\path")); + } + } + + #[test] + fn test_index_url_parse_ambiguous_paths() { + // Test single-segment ambiguous path + assert!(!is_disambiguated_path("index")); + // Test multi-segment ambiguous path + assert!(!is_disambiguated_path("relative/path")); + } + + #[test] + fn test_index_url_parse_with_schemes() { + assert!(is_disambiguated_path("file:///absolute/path")); + assert!(is_disambiguated_path("https://registry.com/simple/")); + assert!(is_disambiguated_path( + "git+https://github.com/example/repo.git" + )); + } +} diff --git a/crates/uv-distribution-types/src/installed.rs b/crates/uv-distribution-types/src/installed.rs index c96002729..a285fc1db 100644 --- a/crates/uv-distribution-types/src/installed.rs +++ b/crates/uv-distribution-types/src/installed.rs @@ -14,6 +14,7 @@ use uv_fs::Simplified; use uv_normalize::PackageName; use uv_pep440::Version; use uv_pypi_types::{DirectUrl, MetadataError}; +use uv_redacted::DisplaySafeUrl; use crate::{DistributionMetadata, InstalledMetadata, InstalledVersion, Name, VersionOrUrlRef}; @@ -86,7 +87,7 @@ pub struct InstalledDirectUrlDist { pub name: PackageName, pub version: Version, pub direct_url: Box, - pub url: Url, + pub url: DisplaySafeUrl, pub editable: bool, pub path: Box, pub cache_info: Option, @@ -112,7 +113,7 @@ pub struct InstalledLegacyEditable { pub version: Version, pub egg_link: Box, pub target: Box, - pub target_url: Url, + pub target_url: DisplaySafeUrl, pub egg_info: Box, } @@ -144,7 +145,7 @@ impl InstalledDist { version, editable: matches!(&direct_url, DirectUrl::LocalDirectory { dir_info, .. } if dir_info.editable == Some(true)), direct_url: Box::new(direct_url), - url, + url: DisplaySafeUrl::from(url), path: path.to_path_buf().into_boxed_path(), cache_info, }))), @@ -272,7 +273,7 @@ impl InstalledDist { version: Version::from_str(&egg_metadata.version)?, egg_link: path.to_path_buf().into_boxed_path(), target: target.into_boxed_path(), - target_url: url, + target_url: DisplaySafeUrl::from(url), egg_info: egg_info.into_boxed_path(), }))); } @@ -364,7 +365,7 @@ impl InstalledDist { pub fn installer(&self) -> Result, InstalledDistError> { let path = self.install_path().join("INSTALLER"); match fs::read_to_string(path) { - Ok(installer) => Ok(Some(installer)), + Ok(installer) => Ok(Some(installer.trim().to_owned())), Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), Err(err) => Err(err.into()), } diff --git a/crates/uv-distribution-types/src/lib.rs b/crates/uv-distribution-types/src/lib.rs index c062e8b01..1e3ad7eba 100644 --- a/crates/uv-distribution-types/src/lib.rs +++ b/crates/uv-distribution-types/src/lib.rs @@ -50,6 +50,7 @@ use uv_pep508::{Pep508Url, VerbatimUrl}; use uv_pypi_types::{ ParsedArchiveUrl, ParsedDirectoryUrl, ParsedGitUrl, ParsedPathUrl, ParsedUrl, VerbatimParsedUrl, }; +use uv_redacted::DisplaySafeUrl; pub use crate::annotation::*; pub use crate::any::*; @@ -72,6 +73,7 @@ pub use crate::pip_index::*; pub use crate::prioritized_distribution::*; pub use crate::requested::*; pub use crate::requirement::*; +pub use crate::requires_python::*; pub use crate::resolution::*; pub use crate::resolved::*; pub use crate::specified_requirement::*; @@ -99,6 +101,7 @@ mod pip_index; mod prioritized_distribution; mod requested; mod requirement; +mod requires_python; mod resolution; mod resolved; mod specified_requirement; @@ -147,12 +150,12 @@ pub enum InstalledVersion<'a> { Version(&'a Version), /// A URL, used to identify a distribution at an arbitrary location, along with the version /// specifier to which it resolved. - Url(&'a Url, &'a Version), + Url(&'a DisplaySafeUrl, &'a Version), } impl InstalledVersion<'_> { /// If it is a URL, return its value. - pub fn url(&self) -> Option<&Url> { + pub fn url(&self) -> Option<&DisplaySafeUrl> { match self { InstalledVersion::Version(_) => None, InstalledVersion::Url(url, _) => Some(url), @@ -258,7 +261,7 @@ pub struct DirectUrlBuiltDist { /// `https://example.org/packages/flask-3.0.0-py3-none-any.whl` pub filename: WheelFilename, /// The URL without the subdirectory fragment. - pub location: Box, + pub location: Box, /// The URL as it was provided by the user. pub url: VerbatimUrl, } @@ -299,7 +302,7 @@ pub struct DirectUrlSourceDist { /// like using e.g. `foo @ https://github.com/org/repo/archive/master.zip` pub name: PackageName, /// The URL without the subdirectory fragment. - pub location: Box, + pub location: Box, /// The subdirectory within the archive in which the source distribution is located. pub subdirectory: Option>, /// The file extension, e.g. `tar.gz`, `zip`, etc. @@ -353,7 +356,7 @@ impl Dist { pub fn from_http_url( name: PackageName, url: VerbatimUrl, - location: Url, + location: DisplaySafeUrl, subdirectory: Option>, ext: DistExtension, ) -> Result { @@ -1168,7 +1171,7 @@ impl RemoteSource for Dist { } } -impl Identifier for Url { +impl Identifier for DisplaySafeUrl { fn distribution_id(&self) -> DistributionId { DistributionId::Url(uv_cache_key::CanonicalUrl::new(self)) } @@ -1461,7 +1464,7 @@ impl Identifier for BuildableSource<'_> { #[cfg(test)] mod test { use crate::{BuiltDist, Dist, RemoteSource, SourceDist, UrlString}; - use url::Url; + use uv_redacted::DisplaySafeUrl; /// Ensure that we don't accidentally grow the `Dist` sizes. #[test] @@ -1485,7 +1488,7 @@ mod test { "https://example.com/foo-0.1.0.tar.gz?query=1/2#fragment", "https://example.com/foo-0.1.0.tar.gz?query=1/2#fragment/3", ] { - let url = Url::parse(url).unwrap(); + let url = DisplaySafeUrl::parse(url).unwrap(); assert_eq!(url.filename().unwrap(), "foo-0.1.0.tar.gz", "{url}"); let url = UrlString::from(url.clone()); assert_eq!(url.filename().unwrap(), "foo-0.1.0.tar.gz", "{url}"); diff --git a/crates/uv-distribution-types/src/pip_index.rs b/crates/uv-distribution-types/src/pip_index.rs index 6ce22abd2..18671e42f 100644 --- a/crates/uv-distribution-types/src/pip_index.rs +++ b/crates/uv-distribution-types/src/pip_index.rs @@ -3,6 +3,8 @@ //! flags set. use serde::{Deserialize, Deserializer, Serialize}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::path::Path; use crate::{Index, IndexUrl}; @@ -50,14 +52,14 @@ macro_rules! impl_index { #[cfg(feature = "schemars")] impl schemars::JsonSchema for $name { - fn schema_name() -> String { + fn schema_name() -> Cow<'static, str> { IndexUrl::schema_name() } fn json_schema( - r#gen: &mut schemars::r#gen::SchemaGenerator, - ) -> schemars::schema::Schema { - IndexUrl::json_schema(r#gen) + generator: &mut schemars::generate::SchemaGenerator, + ) -> schemars::Schema { + IndexUrl::json_schema(generator) } } }; diff --git a/crates/uv-distribution-types/src/prioritized_distribution.rs b/crates/uv-distribution-types/src/prioritized_distribution.rs index b19061095..52ac2fbd1 100644 --- a/crates/uv-distribution-types/src/prioritized_distribution.rs +++ b/crates/uv-distribution-types/src/prioritized_distribution.rs @@ -11,7 +11,7 @@ use uv_platform_tags::{AbiTag, IncompatibleTag, LanguageTag, PlatformTag, TagPri use uv_pypi_types::{HashDigest, Yanked}; use crate::{ - InstalledDist, KnownPlatform, RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, + File, InstalledDist, KnownPlatform, RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, ResolvedDistRef, }; @@ -557,6 +557,20 @@ impl PrioritizedDist { self.0.best_wheel_index.map(|i| &self.0.wheels[i]) } + /// Returns an iterator of all wheels and the source distribution, if any. + pub fn files(&self) -> impl Iterator { + self.0 + .wheels + .iter() + .map(|(wheel, _)| wheel.file.as_ref()) + .chain( + self.0 + .source + .as_ref() + .map(|(source_dist, _)| source_dist.file.as_ref()), + ) + } + /// Returns an iterator over all Python tags for the distribution. pub fn python_tags(&self) -> impl Iterator + '_ { self.0 @@ -817,7 +831,7 @@ pub fn implied_markers(filename: &WheelFilename) -> MarkerTree { tag_marker.and(MarkerTree::expression(MarkerExpression::String { key: MarkerValueString::PlatformMachine, operator: MarkerOperator::Equal, - value: arcstr::literal!("x86_64"), + value: arcstr::literal!("AMD64"), })); marker.or(tag_marker); } @@ -911,7 +925,7 @@ mod tests { ); assert_markers( "numpy-2.2.1-cp313-cp313t-win_amd64.whl", - "sys_platform == 'win32' and platform_machine == 'x86_64'", + "sys_platform == 'win32' and platform_machine == 'AMD64'", ); assert_markers( "numpy-2.2.1-cp313-cp313t-win_arm64.whl", diff --git a/crates/uv-distribution-types/src/requirement.rs b/crates/uv-distribution-types/src/requirement.rs index 9a2686273..432cc4e12 100644 --- a/crates/uv-distribution-types/src/requirement.rs +++ b/crates/uv-distribution-types/src/requirement.rs @@ -4,7 +4,6 @@ use std::path::Path; use std::str::FromStr; use thiserror::Error; -use url::Url; use uv_distribution_filename::DistExtension; use uv_fs::{CWD, PortablePath, PortablePathBuf, relative_to}; @@ -14,6 +13,7 @@ use uv_pep440::VersionSpecifiers; use uv_pep508::{ MarkerEnvironment, MarkerTree, RequirementOrigin, VerbatimUrl, VersionOrUrl, marker, }; +use uv_redacted::DisplaySafeUrl; use crate::{IndexMetadata, IndexUrl}; @@ -391,7 +391,7 @@ pub enum RequirementSource { /// e.g.`foo @ https://example.org/foo-1.0.zip`. Url { /// The remote location of the archive file, without subdirectory fragment. - location: Url, + location: DisplaySafeUrl, /// For source distributions, the path to the distribution if it is not in the archive /// root. subdirectory: Option>, @@ -682,7 +682,7 @@ enum RequirementSourceWire { Git { git: String }, /// Ex) `source = { url = "" }` Direct { - url: Url, + url: DisplaySafeUrl, subdirectory: Option, }, /// Ex) `source = { path = "/home/ferris/iniconfig-2.0.0-py3-none-any.whl" }` @@ -697,7 +697,7 @@ enum RequirementSourceWire { Registry { #[serde(skip_serializing_if = "VersionSpecifiers::is_empty", default)] specifier: VersionSpecifiers, - index: Option, + index: Option, conflict: Option, }, } @@ -711,7 +711,7 @@ impl From for RequirementSourceWire { conflict, } => { let index = index.map(|index| index.url.into_url()).map(|mut index| { - redact_credentials(&mut index); + index.remove_credentials(); index }); Self::Registry { @@ -736,8 +736,8 @@ impl From for RequirementSourceWire { } => { let mut url = git.repository().clone(); - // Redact the credentials. - redact_credentials(&mut url); + // Remove the credentials. + url.remove_credentials(); // Clear out any existing state. url.set_fragment(None); @@ -826,7 +826,7 @@ impl TryFrom for RequirementSource { conflict, }), RequirementSourceWire::Git { git } => { - let mut repository = Url::parse(&git)?; + let mut repository = DisplaySafeUrl::parse(&git)?; let mut reference = GitReference::DefaultBranch; let mut subdirectory: Option = None; @@ -848,13 +848,14 @@ impl TryFrom for RequirementSource { repository.set_fragment(None); repository.set_query(None); - // Redact the credentials. - redact_credentials(&mut repository); + // Remove the credentials. + repository.remove_credentials(); // Create a PEP 508-compatible URL. - let mut url = Url::parse(&format!("git+{repository}"))?; + let mut url = DisplaySafeUrl::parse(&format!("git+{repository}"))?; if let Some(rev) = reference.as_str() { - url.set_path(&format!("{}@{}", url.path(), rev)); + let path = format!("{}@{}", url.path(), rev); + url.set_path(&path); } if let Some(subdirectory) = subdirectory.as_ref() { url.set_fragment(Some(&format!("subdirectory={subdirectory}"))); @@ -940,18 +941,6 @@ impl TryFrom for RequirementSource { } } -/// Remove the credentials from a URL, allowing the generic `git` username (without a password) -/// in SSH URLs, as in, `ssh://git@github.com/...`. -pub fn redact_credentials(url: &mut Url) { - // For URLs that use the `git` convention (i.e., `ssh://git@github.com/...`), avoid dropping the - // username. - if url.scheme() == "ssh" && url.username() == "git" && url.password().is_none() { - return; - } - let _ = url.set_password(None); - let _ = url.set_username(""); -} - #[cfg(test)] mod tests { use std::path::PathBuf; diff --git a/crates/uv-resolver/src/requires_python.rs b/crates/uv-distribution-types/src/requires_python.rs similarity index 96% rename from crates/uv-resolver/src/requires_python.rs rename to crates/uv-distribution-types/src/requires_python.rs index 8e4d33213..49a4fd5c4 100644 --- a/crates/uv-resolver/src/requires_python.rs +++ b/crates/uv-distribution-types/src/requires_python.rs @@ -1,6 +1,6 @@ use std::collections::Bound; -use pubgrub::Range; +use version_ranges::Ranges; use uv_distribution_filename::WheelFilename; use uv_pep440::{ @@ -66,15 +66,8 @@ impl RequiresPython { ) -> Option { // Convert to PubGrub range and perform an intersection. let range = specifiers - .into_iter() - .map(|specifier| release_specifiers_to_ranges(specifier.clone())) - .fold(None, |range: Option>, requires_python| { - if let Some(range) = range { - Some(range.intersection(&requires_python)) - } else { - Some(requires_python) - } - })?; + .map(|specs| release_specifiers_to_ranges(specs.clone())) + .reduce(|acc, r| acc.intersection(&r))?; // If the intersection is empty, return `None`. if range.is_empty() { @@ -97,12 +90,12 @@ impl RequiresPython { pub fn split(&self, bound: Bound) -> Option<(Self, Self)> { let RequiresPythonRange(.., upper) = &self.range; - let upper = Range::from_range_bounds((bound, upper.clone().into())); + let upper = Ranges::from_range_bounds((bound, upper.clone().into())); let lower = upper.complement(); // Intersect left and right with the existing range. - let lower = lower.intersection(&Range::from(self.range.clone())); - let upper = upper.intersection(&Range::from(self.range.clone())); + let lower = lower.intersection(&Ranges::from(self.range.clone())); + let upper = upper.intersection(&Ranges::from(self.range.clone())); if lower.is_empty() || upper.is_empty() { None @@ -353,7 +346,7 @@ impl RequiresPython { /// a lock file are deserialized and turned into a `ResolutionGraph`, the /// markers are "complexified" to put the `requires-python` assumption back /// into the marker explicitly. - pub(crate) fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree { + pub fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree { let (lower, upper) = (self.range().lower(), self.range().upper()); marker.simplify_python_versions(lower.as_ref(), upper.as_ref()) } @@ -373,7 +366,7 @@ impl RequiresPython { /// ```text /// python_full_version >= '3.8' and python_full_version < '3.12' /// ``` - pub(crate) fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree { + pub fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree { let (lower, upper) = (self.range().lower(), self.range().upper()); marker.complexify_python_versions(lower.as_ref(), upper.as_ref()) } @@ -537,7 +530,7 @@ pub struct RequiresPythonRange(LowerBound, UpperBound); impl RequiresPythonRange { /// Initialize a [`RequiresPythonRange`] from a [`Range`]. - pub fn from_range(range: &Range) -> Self { + pub fn from_range(range: &Ranges) -> Self { let (lower, upper) = range .bounding_range() .map(|(lower_bound, upper_bound)| (lower_bound.cloned(), upper_bound.cloned())) @@ -575,9 +568,9 @@ impl Default for RequiresPythonRange { } } -impl From for Range { +impl From for Ranges { fn from(value: RequiresPythonRange) -> Self { - Range::from_range_bounds::<(Bound, Bound), _>(( + Ranges::from_range_bounds::<(Bound, Bound), _>(( value.0.into(), value.1.into(), )) @@ -592,21 +585,18 @@ impl From for Range { /// a simplified marker, one must re-contextualize it by adding the /// `requires-python` constraint back to the marker. #[derive(Clone, Copy, Debug, Default, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)] -pub(crate) struct SimplifiedMarkerTree(MarkerTree); +pub struct SimplifiedMarkerTree(MarkerTree); impl SimplifiedMarkerTree { /// Simplifies the given markers by assuming the given `requires-python` /// bound is true. - pub(crate) fn new( - requires_python: &RequiresPython, - marker: MarkerTree, - ) -> SimplifiedMarkerTree { + pub fn new(requires_python: &RequiresPython, marker: MarkerTree) -> SimplifiedMarkerTree { SimplifiedMarkerTree(requires_python.simplify_markers(marker)) } /// Complexifies the given markers by adding the given `requires-python` as /// a constraint to these simplified markers. - pub(crate) fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree { + pub fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree { requires_python.complexify_markers(self.0) } @@ -614,12 +604,12 @@ impl SimplifiedMarkerTree { /// /// This only returns `None` when the underlying marker is always true, /// i.e., it matches all possible marker environments. - pub(crate) fn try_to_string(self) -> Option { + pub fn try_to_string(self) -> Option { self.0.try_to_string() } /// Returns the underlying marker tree without re-complexifying them. - pub(crate) fn as_simplified_marker_tree(self) -> MarkerTree { + pub fn as_simplified_marker_tree(self) -> MarkerTree { self.0 } } diff --git a/crates/uv-distribution-types/src/status_code_strategy.rs b/crates/uv-distribution-types/src/status_code_strategy.rs index a2940a23a..b019d0329 100644 --- a/crates/uv-distribution-types/src/status_code_strategy.rs +++ b/crates/uv-distribution-types/src/status_code_strategy.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::ops::Deref; use http::StatusCode; @@ -136,17 +138,17 @@ impl<'de> Deserialize<'de> for SerializableStatusCode { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SerializableStatusCode { - fn schema_name() -> String { - "StatusCode".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("StatusCode") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - let mut schema = r#gen.subschema_for::().into_object(); - schema.metadata().description = Some("HTTP status code (100-599)".to_string()); - schema.number().minimum = Some(100.0); - schema.number().maximum = Some(599.0); - - schema.into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "number", + "minimum": 100, + "maximum": 599, + "description": "HTTP status code (100-599)" + }) } } diff --git a/crates/uv-distribution/Cargo.toml b/crates/uv-distribution/Cargo.toml index 55975fcc4..2c490590b 100644 --- a/crates/uv-distribution/Cargo.toml +++ b/crates/uv-distribution/Cargo.toml @@ -33,6 +33,7 @@ uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-types = { workspace = true } uv-workspace = { workspace = true } diff --git a/crates/uv-distribution/src/distribution_database.rs b/crates/uv-distribution/src/distribution_database.rs index 1e3d8d9ba..d18269730 100644 --- a/crates/uv-distribution/src/distribution_database.rs +++ b/crates/uv-distribution/src/distribution_database.rs @@ -20,13 +20,13 @@ use uv_client::{ }; use uv_distribution_filename::WheelFilename; use uv_distribution_types::{ - BuildableSource, BuiltDist, Dist, FileLocation, HashPolicy, Hashed, InstalledDist, Name, - SourceDist, + BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, InstalledDist, Name, SourceDist, }; use uv_extract::hash::Hasher; use uv_fs::write_atomic; use uv_platform_tags::Tags; use uv_pypi_types::{HashDigest, HashDigests}; +use uv_redacted::DisplaySafeUrl; use uv_types::{BuildContext, BuildStack}; use crate::archive::Archive; @@ -178,12 +178,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { match dist { BuiltDist::Registry(wheels) => { let wheel = wheels.best_wheel(); - let url = match &wheel.file.url { - FileLocation::RelativeUrl(base, url) => { - uv_pypi_types::base_url_join_relative(base, url)? - } - FileLocation::AbsoluteUrl(url) => url.to_url()?, - }; + let url = wheel.file.url.to_url()?; // Create a cache entry for the wheel. let wheel_entry = self.build_context.cache().entry( @@ -417,15 +412,6 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { dist: &BuiltDist, hashes: HashPolicy<'_>, ) -> Result { - // If the metadata was provided by the user directly, prefer it. - if let Some(metadata) = self - .build_context - .dependency_metadata() - .get(dist.name(), Some(dist.version())) - { - return Ok(ArchiveMetadata::from_metadata23(metadata.clone())); - } - // If hash generation is enabled, and the distribution isn't hosted on a registry, get the // entire wheel to ensure that the hashes are included in the response. If the distribution // is hosted on an index, the hashes will be included in the simple metadata response. @@ -442,7 +428,16 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { // TODO(charlie): Request the hashes via a separate method, to reduce the coupling in this API. if hashes.is_generate(dist) { let wheel = self.get_wheel(dist, hashes).await?; - let metadata = wheel.metadata()?; + // If the metadata was provided by the user directly, prefer it. + let metadata = if let Some(metadata) = self + .build_context + .dependency_metadata() + .get(dist.name(), Some(dist.version())) + { + metadata.clone() + } else { + wheel.metadata()? + }; let hashes = wheel.hashes; return Ok(ArchiveMetadata { metadata: Metadata::from_metadata23(metadata), @@ -450,6 +445,15 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { }); } + // If the metadata was provided by the user directly, prefer it. + if let Some(metadata) = self + .build_context + .dependency_metadata() + .get(dist.name(), Some(dist.version())) + { + return Ok(ArchiveMetadata::from_metadata23(metadata.clone())); + } + let result = self .client .managed(|client| { @@ -529,7 +533,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { /// Stream a wheel from a URL, unzipping it into the cache as it's downloaded. async fn stream_wheel( &self, - url: Url, + url: DisplaySafeUrl, filename: &WheelFilename, size: Option, wheel_entry: &CacheEntry, @@ -592,7 +596,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let id = self .build_context .cache() - .persist(temp_dir.into_path(), wheel_entry.path()) + .persist(temp_dir.keep(), wheel_entry.path()) .await .map_err(Error::CacheRead)?; @@ -634,8 +638,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, or has since been removed, force a refresh. @@ -653,8 +657,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .skip_cache_with_retry(self.request(url)?, &http_entry, download) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await? @@ -666,7 +670,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { /// Download a wheel from a URL, then unzip it into the cache. async fn download_wheel( &self, - url: Url, + url: DisplaySafeUrl, filename: &WheelFilename, size: Option, wheel_entry: &CacheEntry, @@ -763,7 +767,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let id = self .build_context .cache() - .persist(temp_dir.into_path(), wheel_entry.path()) + .persist(temp_dir.keep(), wheel_entry.path()) .await .map_err(Error::CacheRead)?; @@ -801,8 +805,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, or has since been removed, force a refresh. @@ -820,8 +824,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .skip_cache_with_retry(self.request(url)?, &http_entry, download) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await? @@ -924,7 +928,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let id = self .build_context .cache() - .persist(temp_dir.into_path(), wheel_entry.path()) + .persist(temp_dir.keep(), wheel_entry.path()) .await .map_err(Error::CacheWrite)?; @@ -972,7 +976,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { let id = self .build_context .cache() - .persist(temp_dir.into_path(), target) + .persist(temp_dir.keep(), target) .await .map_err(Error::CacheWrite)?; @@ -980,11 +984,11 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { } /// Returns a GET [`reqwest::Request`] for the given URL. - fn request(&self, url: Url) -> Result { + fn request(&self, url: DisplaySafeUrl) -> Result { self.client .unmanaged .uncached_client(&url) - .get(url) + .get(Url::from(url)) .header( // `reqwest` defaults to accepting compressed responses. // Specify identity encoding to get consistent .whl downloading diff --git a/crates/uv-distribution/src/error.rs b/crates/uv-distribution/src/error.rs index 2e9dccae6..7d0a9593b 100644 --- a/crates/uv-distribution/src/error.rs +++ b/crates/uv-distribution/src/error.rs @@ -2,7 +2,6 @@ use std::path::PathBuf; use owo_colors::OwoColorize; use tokio::task::JoinError; -use url::Url; use zip::result::ZipError; use crate::metadata::MetadataError; @@ -13,6 +12,7 @@ use uv_fs::Simplified; use uv_normalize::PackageName; use uv_pep440::{Version, VersionSpecifiers}; use uv_pypi_types::{HashAlgorithm, HashDigest}; +use uv_redacted::DisplaySafeUrl; use uv_types::AnyErrorBuild; #[derive(Debug, thiserror::Error)] @@ -25,10 +25,8 @@ pub enum Error { RelativePath(PathBuf), #[error(transparent)] InvalidUrl(#[from] uv_distribution_types::ToUrlError), - #[error(transparent)] - JoinRelativeUrl(#[from] uv_pypi_types::JoinRelativeError), #[error("Expected a file URL, but received: {0}")] - NonFileUrl(Url), + NonFileUrl(DisplaySafeUrl), #[error(transparent)] Git(#[from] uv_git::GitResolverError), #[error(transparent)] @@ -89,7 +87,7 @@ pub enum Error { #[error("The source distribution is missing a `PKG-INFO` file")] MissingPkgInfo, #[error("The source distribution `{}` has no subdirectory `{}`", _0, _1.display())] - MissingSubdirectory(Url, PathBuf), + MissingSubdirectory(DisplaySafeUrl, PathBuf), #[error("Failed to extract static metadata from `PKG-INFO`")] PkgInfo(#[source] uv_pypi_types::MetadataError), #[error("Failed to extract metadata from `requires.txt`")] @@ -103,11 +101,13 @@ pub enum Error { #[error(transparent)] MetadataLowering(#[from] MetadataError), #[error("Distribution not found at: {0}")] - NotFound(Url), + NotFound(DisplaySafeUrl), #[error("Attempted to re-extract the source distribution for `{}`, but the {} hash didn't match. Run `{}` to clear the cache.", _0, _1, "uv cache clean".green())] CacheHeal(String, HashAlgorithm), #[error("The source distribution requires Python {0}, but {1} is installed")] RequiresPython(VersionSpecifiers, Version), + #[error("Failed to identify base Python interpreter")] + BaseInterpreter(#[source] std::io::Error), /// A generic request middleware error happened while making a request. /// Refer to the error message for more details. diff --git a/crates/uv-distribution/src/lib.rs b/crates/uv-distribution/src/lib.rs index d7679a5fb..07958f715 100644 --- a/crates/uv-distribution/src/lib.rs +++ b/crates/uv-distribution/src/lib.rs @@ -4,7 +4,7 @@ pub use error::Error; pub use index::{BuiltWheelIndex, RegistryWheelIndex}; pub use metadata::{ ArchiveMetadata, BuildRequires, FlatRequiresDist, LoweredRequirement, LoweringError, Metadata, - MetadataError, RequiresDist, + MetadataError, RequiresDist, SourcedDependencyGroups, }; pub use reporter::Reporter; pub use source::prune; diff --git a/crates/uv-distribution/src/metadata/dependency_groups.rs b/crates/uv-distribution/src/metadata/dependency_groups.rs new file mode 100644 index 000000000..7fb69b516 --- /dev/null +++ b/crates/uv-distribution/src/metadata/dependency_groups.rs @@ -0,0 +1,208 @@ +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; + +use uv_configuration::SourceStrategy; +use uv_distribution_types::{IndexLocations, Requirement}; +use uv_normalize::{GroupName, PackageName}; +use uv_workspace::dependency_groups::FlatDependencyGroups; +use uv_workspace::pyproject::{Sources, ToolUvSources}; +use uv_workspace::{ + DiscoveryOptions, MemberDiscovery, VirtualProject, WorkspaceCache, WorkspaceError, +}; + +use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError}; + +/// Like [`crate::RequiresDist`] but only supporting dependency-groups. +/// +/// PEP 735 says: +/// +/// > A pyproject.toml file with only `[dependency-groups]` and no other tables is valid. +/// +/// This is a special carveout to enable users to adopt dependency-groups without having +/// to learn about projects. It is supported by `pip install --group`, and thus interfaces +/// like `uv pip install --group` must also support it for interop and conformance. +/// +/// On paper this is trivial to support because dependency-groups are so self-contained +/// that they're basically a `requirements.txt` embedded within a pyproject.toml, so it's +/// fine to just grab that section and handle it independently. +/// +/// However several uv extensions make this complicated, notably, as of this writing: +/// +/// * tool.uv.sources +/// * tool.uv.index +/// +/// These fields may also be present in the pyproject.toml, and, critically, +/// may be defined and inherited in a parent workspace pyproject.toml. +/// +/// Therefore, we need to gracefully degrade from a full workspacey situation all +/// the way down to one of these stub pyproject.tomls the PEP defines. This is why +/// we avoid going through `RequiresDist` -- we don't want to muddy up the "compile a package" +/// logic with support for non-project/workspace pyproject.tomls, and we don't want to +/// muddy this logic up with setuptools fallback modes that `RequiresDist` wants. +/// +/// (We used to shove this feature into that path, and then we would see there's no metadata +/// and try to run setuptools to try to desperately find any metadata, and then error out.) +#[derive(Debug, Clone)] +pub struct SourcedDependencyGroups { + pub name: Option, + pub dependency_groups: BTreeMap>, +} + +impl SourcedDependencyGroups { + /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory + /// dependencies. + pub async fn from_virtual_project( + pyproject_path: &Path, + git_member: Option<&GitWorkspaceMember<'_>>, + locations: &IndexLocations, + source_strategy: SourceStrategy, + cache: &WorkspaceCache, + ) -> Result { + let discovery = DiscoveryOptions { + stop_discovery_at: git_member.map(|git_member| { + git_member + .fetch_root + .parent() + .expect("git checkout has a parent") + .to_path_buf() + }), + members: match source_strategy { + SourceStrategy::Enabled => MemberDiscovery::default(), + SourceStrategy::Disabled => MemberDiscovery::None, + }, + }; + + // The subsequent API takes an absolute path to the dir the pyproject is in + let empty = PathBuf::new(); + let absolute_pyproject_path = + std::path::absolute(pyproject_path).map_err(WorkspaceError::Normalize)?; + let project_dir = absolute_pyproject_path.parent().unwrap_or(&empty); + let project = VirtualProject::discover_defaulted(project_dir, &discovery, cache).await?; + + // Collect the dependency groups. + let dependency_groups = + FlatDependencyGroups::from_pyproject_toml(project.root(), project.pyproject_toml())?; + + // If sources/indexes are disabled we can just stop here + let SourceStrategy::Enabled = source_strategy else { + return Ok(Self { + name: project.project_name().cloned(), + dependency_groups: dependency_groups + .into_iter() + .map(|(name, group)| { + let requirements = group + .requirements + .into_iter() + .map(Requirement::from) + .collect(); + (name, requirements) + }) + .collect(), + }); + }; + + // Collect any `tool.uv.index` entries. + let empty = vec![]; + let project_indexes = project + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.index.as_deref()) + .unwrap_or(&empty); + + // Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`. + let empty = BTreeMap::default(); + let project_sources = project + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.sources.as_ref()) + .map(ToolUvSources::inner) + .unwrap_or(&empty); + + // Now that we've resolved the dependency groups, we can validate that each source references + // a valid extra or group, if present. + Self::validate_sources(project_sources, &dependency_groups)?; + + // Lower the dependency groups. + let dependency_groups = dependency_groups + .into_iter() + .map(|(name, group)| { + let requirements = group + .requirements + .into_iter() + .flat_map(|requirement| { + let requirement_name = requirement.name.clone(); + let group = name.clone(); + let extra = None; + LoweredRequirement::from_requirement( + requirement, + project.project_name(), + project.root(), + project_sources, + project_indexes, + extra, + Some(&group), + locations, + project.workspace(), + git_member, + ) + .map(move |requirement| match requirement { + Ok(requirement) => Ok(requirement.into_inner()), + Err(err) => Err(MetadataError::GroupLoweringError( + group.clone(), + requirement_name.clone(), + Box::new(err), + )), + }) + }) + .collect::, _>>()?; + Ok::<(GroupName, Box<_>), MetadataError>((name, requirements)) + }) + .collect::, _>>()?; + + Ok(Self { + name: project.project_name().cloned(), + dependency_groups, + }) + } + + /// Validate the sources. + /// + /// If a source is requested with `group`, ensure that the relevant dependency is + /// present in the relevant `dependency-groups` section. + fn validate_sources( + sources: &BTreeMap, + dependency_groups: &FlatDependencyGroups, + ) -> Result<(), MetadataError> { + for (name, sources) in sources { + for source in sources.iter() { + if let Some(group) = source.group() { + // If the group doesn't exist at all, error. + let Some(flat_group) = dependency_groups.get(group) else { + return Err(MetadataError::MissingSourceGroup( + name.clone(), + group.clone(), + )); + }; + + // If there is no such requirement with the group, error. + if !flat_group + .requirements + .iter() + .any(|requirement| requirement.name == *name) + { + return Err(MetadataError::IncompleteSourceGroup( + name.clone(), + group.clone(), + )); + } + } + } + } + + Ok(()) + } +} diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index 197b9de06..330075842 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -4,7 +4,6 @@ use std::path::{Path, PathBuf}; use either::Either; use thiserror::Error; -use url::Url; use uv_distribution_filename::DistExtension; use uv_distribution_types::{ @@ -14,7 +13,8 @@ use uv_git_types::{GitReference, GitUrl, GitUrlParseError}; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::VersionSpecifiers; use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository}; -use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl}; +use uv_pypi_types::{ConflictItem, ParsedGitUrl, ParsedUrlError, VerbatimParsedUrl}; +use uv_redacted::DisplaySafeUrl; use uv_workspace::Workspace; use uv_workspace::pyproject::{PyProjectToml, Source, Sources}; @@ -528,11 +528,11 @@ pub enum LoweringError { #[error(transparent)] InvalidVerbatimUrl(#[from] uv_pep508::VerbatimUrlError), #[error("Fragments are not allowed in URLs: `{0}`")] - ForbiddenFragment(Url), + ForbiddenFragment(DisplaySafeUrl), #[error( "`{0}` is associated with a URL source, but references a Git repository. Consider using a Git source instead (e.g., `{0} = {{ git = \"{1}\" }}`)" )] - MissingGitSource(PackageName, Url), + MissingGitSource(PackageName, DisplaySafeUrl), #[error("`workspace = false` is not yet supported")] WorkspaceFalse, #[error("Source with `editable = true` must refer to a local directory, not a file: `{0}`")] @@ -572,7 +572,7 @@ impl std::fmt::Display for SourceKind { /// Convert a Git source into a [`RequirementSource`]. fn git_source( - git: &Url, + git: &DisplaySafeUrl, subdirectory: Option>, rev: Option, tag: Option, @@ -587,9 +587,10 @@ fn git_source( }; // Create a PEP 508-compatible URL. - let mut url = Url::parse(&format!("git+{git}"))?; + let mut url = DisplaySafeUrl::parse(&format!("git+{git}"))?; if let Some(rev) = reference.as_str() { - url.set_path(&format!("{}@{}", url.path(), rev)); + let path = format!("{}@{}", url.path(), rev); + url.set_path(&path); } if let Some(subdirectory) = subdirectory.as_ref() { let subdirectory = subdirectory @@ -611,7 +612,7 @@ fn git_source( /// Convert a URL source into a [`RequirementSource`]. fn url_source( requirement: &uv_pep508::Requirement, - url: Url, + url: DisplaySafeUrl, subdirectory: Option>, ) -> Result { let mut verbatim_url = url.clone(); @@ -699,17 +700,23 @@ fn path_source( }; if is_dir { if let Some(git_member) = git_member { + let git = git_member.git_source.git.clone(); let subdirectory = uv_fs::relative_to(install_path, git_member.fetch_root) .expect("Workspace member must be relative"); let subdirectory = uv_fs::normalize_path_buf(subdirectory); + let subdirectory = if subdirectory == PathBuf::new() { + None + } else { + Some(subdirectory.into_boxed_path()) + }; + let url = DisplaySafeUrl::from(ParsedGitUrl { + url: git.clone(), + subdirectory: subdirectory.clone(), + }); return Ok(RequirementSource::Git { - git: git_member.git_source.git.clone(), - subdirectory: if subdirectory == PathBuf::new() { - None - } else { - Some(subdirectory.into_boxed_path()) - }, - url, + git, + subdirectory, + url: VerbatimUrl::from_url(url), }); } diff --git a/crates/uv-distribution/src/metadata/mod.rs b/crates/uv-distribution/src/metadata/mod.rs index 8a6fd4064..a56a1c354 100644 --- a/crates/uv-distribution/src/metadata/mod.rs +++ b/crates/uv-distribution/src/metadata/mod.rs @@ -12,11 +12,13 @@ use uv_workspace::dependency_groups::DependencyGroupError; use uv_workspace::{WorkspaceCache, WorkspaceError}; pub use crate::metadata::build_requires::BuildRequires; +pub use crate::metadata::dependency_groups::SourcedDependencyGroups; pub use crate::metadata::lowering::LoweredRequirement; pub use crate::metadata::lowering::LoweringError; pub use crate::metadata::requires_dist::{FlatRequiresDist, RequiresDist}; mod build_requires; +mod dependency_groups; mod lowering; mod requires_dist; @@ -142,11 +144,6 @@ impl ArchiveMetadata { hashes: HashDigests::empty(), } } - - /// Create an [`ArchiveMetadata`] with the given metadata and hashes. - pub fn with_hashes(metadata: Metadata, hashes: HashDigests) -> Self { - Self { metadata, hashes } - } } impl From for ArchiveMetadata { diff --git a/crates/uv-distribution/src/metadata/requires_dist.rs b/crates/uv-distribution/src/metadata/requires_dist.rs index d728ed58b..e9f36f174 100644 --- a/crates/uv-distribution/src/metadata/requires_dist.rs +++ b/crates/uv-distribution/src/metadata/requires_dist.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashSet; use uv_configuration::SourceStrategy; use uv_distribution_types::{IndexLocations, Requirement}; -use uv_normalize::{DEV_DEPENDENCIES, ExtraName, GroupName, PackageName}; +use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep508::MarkerTree; use uv_workspace::dependency_groups::FlatDependencyGroups; use uv_workspace::pyproject::{Sources, ToolUvSources}; @@ -107,41 +107,10 @@ impl RequiresDist { SourceStrategy::Disabled => &empty, }; - // Collect the dependency groups. - let dependency_groups = { - // First, collect `tool.uv.dev_dependencies` - let dev_dependencies = project_workspace - .current_project() - .pyproject_toml() - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.dev_dependencies.as_ref()); - - // Then, collect `dependency-groups` - let dependency_groups = project_workspace - .current_project() - .pyproject_toml() - .dependency_groups - .iter() - .flatten() - .collect::>(); - - // Flatten the dependency groups. - let mut dependency_groups = - FlatDependencyGroups::from_dependency_groups(&dependency_groups) - .map_err(|err| err.with_dev_dependencies(dev_dependencies))?; - - // Add the `dev` group, if `dev-dependencies` is defined. - if let Some(dev_dependencies) = dev_dependencies { - dependency_groups - .entry(DEV_DEPENDENCIES.clone()) - .or_insert_with(Vec::new) - .extend(dev_dependencies.clone()); - } - - dependency_groups - }; + let dependency_groups = FlatDependencyGroups::from_pyproject_toml( + project_workspace.current_project().root(), + project_workspace.current_project().pyproject_toml(), + )?; // Now that we've resolved the dependency groups, we can validate that each source references // a valid extra or group, if present. @@ -150,9 +119,10 @@ impl RequiresDist { // Lower the dependency groups. let dependency_groups = dependency_groups .into_iter() - .map(|(name, requirements)| { + .map(|(name, flat_group)| { let requirements = match source_strategy { - SourceStrategy::Enabled => requirements + SourceStrategy::Enabled => flat_group + .requirements .into_iter() .flat_map(|requirement| { let requirement_name = requirement.name.clone(); @@ -182,9 +152,11 @@ impl RequiresDist { ) }) .collect::, _>>(), - SourceStrategy::Disabled => { - Ok(requirements.into_iter().map(Requirement::from).collect()) - } + SourceStrategy::Disabled => Ok(flat_group + .requirements + .into_iter() + .map(Requirement::from) + .collect()), }?; Ok::<(GroupName, Box<_>), MetadataError>((name, requirements)) }) @@ -265,7 +237,7 @@ impl RequiresDist { if let Some(group) = source.group() { // If the group doesn't exist at all, error. - let Some(dependencies) = dependency_groups.get(group) else { + let Some(flat_group) = dependency_groups.get(group) else { return Err(MetadataError::MissingSourceGroup( name.clone(), group.clone(), @@ -273,7 +245,8 @@ impl RequiresDist { }; // If there is no such requirement with the group, error. - if !dependencies + if !flat_group + .requirements .iter() .any(|requirement| requirement.name == *name) { diff --git a/crates/uv-distribution/src/reporter.rs b/crates/uv-distribution/src/reporter.rs index 9be3a5fa4..befc21a18 100644 --- a/crates/uv-distribution/src/reporter.rs +++ b/crates/uv-distribution/src/reporter.rs @@ -1,9 +1,8 @@ use std::sync::Arc; -use url::Url; - use uv_distribution_types::BuildableSource; use uv_pep508::PackageName; +use uv_redacted::DisplaySafeUrl; pub trait Reporter: Send + Sync { /// Callback to invoke when a source distribution build is kicked off. @@ -13,10 +12,10 @@ pub trait Reporter: Send + Sync { fn on_build_complete(&self, source: &BuildableSource, id: usize); /// Callback to invoke when a repository checkout begins. - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize; + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize; /// Callback to invoke when a repository checkout completes. - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize); + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize); /// Callback to invoke when a download is kicked off. fn on_download_start(&self, name: &PackageName, size: Option) -> usize; @@ -44,11 +43,11 @@ struct Facade { } impl uv_git::Reporter for Facade { - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { self.reporter.on_checkout_complete(url, rev, id); } } diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index 9173106dc..92d83e6ce 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -20,6 +20,7 @@ use reqwest::{Response, StatusCode}; use tokio_util::compat::FuturesAsyncReadCompatExt; use tracing::{Instrument, debug, info_span, instrument, warn}; use url::Url; +use uv_redacted::DisplaySafeUrl; use zip::ZipArchive; use uv_cache::{Cache, CacheBucket, CacheEntry, CacheShard, Removal, WheelCache}; @@ -31,8 +32,8 @@ use uv_client::{ use uv_configuration::{BuildKind, BuildOutput, SourceStrategy}; use uv_distribution_filename::{SourceDistExtension, WheelFilename}; use uv_distribution_types::{ - BuildableSource, DirectorySourceUrl, FileLocation, GitSourceUrl, HashPolicy, Hashed, - PathSourceUrl, SourceDist, SourceUrl, + BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl, + SourceDist, SourceUrl, }; use uv_extract::hash::Hasher; use uv_fs::{rename_with_retry, write_atomic}; @@ -42,7 +43,7 @@ use uv_normalize::PackageName; use uv_pep440::{Version, release_specifiers_to_ranges}; use uv_platform_tags::Tags; use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata}; -use uv_types::{BuildContext, BuildStack, SourceBuildTrait}; +use uv_types::{BuildContext, BuildKey, BuildStack, SourceBuildTrait}; use uv_workspace::pyproject::ToolUvSources; use crate::distribution_database::ManagedClient; @@ -121,12 +122,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .join(dist.version.to_string()), ); - let url = match &dist.file.url { - FileLocation::RelativeUrl(base, url) => { - uv_pypi_types::base_url_join_relative(base, url)? - } - FileLocation::AbsoluteUrl(url) => url.to_url()?, - }; + let url = dist.file.url.to_url()?; // If the URL is a file URL, use the local path directly. if url.scheme() == "file" { @@ -270,12 +266,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .join(dist.version.to_string()), ); - let url = match &dist.file.url { - FileLocation::RelativeUrl(base, url) => { - uv_pypi_types::base_url_join_relative(base, url)? - } - FileLocation::AbsoluteUrl(url) => url.to_url()?, - }; + let url = dist.file.url.to_url()?; // If the URL is a file URL, use the local path directly. if url.scheme() == "file" { @@ -386,7 +377,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { async fn url<'data>( &self, source: &BuildableSource<'data>, - url: &'data Url, + url: &'data DisplaySafeUrl, cache_shard: &CacheShard, subdirectory: Option<&'data Path>, ext: SourceDistExtension, @@ -582,7 +573,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { if let Some(subdirectory) = subdirectory { if !source_dist_entry.path().join(subdirectory).is_dir() { return Err(Error::MissingSubdirectory( - url.clone(), + DisplaySafeUrl::from(url.clone()), subdirectory.to_path_buf(), )); } @@ -715,7 +706,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { .boxed_local() .instrument(info_span!("download", source_dist = %source)) }; - let req = Self::request(url.clone(), client.unmanaged)?; + let req = Self::request(DisplaySafeUrl::from(url.clone()), client.unmanaged)?; let revision = client .managed(|client| { client.cached_client().get_serde_with_retry( @@ -727,8 +718,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, force a refresh. @@ -740,14 +731,14 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { client .cached_client() .skip_cache_with_retry( - Self::request(url.clone(), client)?, + Self::request(DisplaySafeUrl::from(url.clone()), client)?, &cache_entry, download, ) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await @@ -1582,7 +1573,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { client .unmanaged .uncached_client(resource.git.repository()) - .clone(), + .raw_client(), ) .await { @@ -1859,13 +1850,22 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } }; + // If the URL is already precise, return it. + if self.build_context.git().get_precise(git).is_some() { + debug!("Precise commit already known: {source}"); + return Ok(()); + } + // If this is GitHub URL, attempt to resolve to a precise commit using the GitHub API. if self .build_context .git() .github_fast_path( git, - client.unmanaged.uncached_client(git.repository()).clone(), + client + .unmanaged + .uncached_client(git.repository()) + .raw_client(), ) .await? .is_some() @@ -2077,14 +2077,14 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { client .cached_client() .skip_cache_with_retry( - Self::request(url.clone(), client)?, + Self::request(DisplaySafeUrl::from(url.clone()), client)?, &cache_entry, download, ) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await @@ -2135,7 +2135,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Extract the top-level directory. let extracted = match uv_extract::strip_component(temp_dir.path()) { Ok(top_level) => top_level, - Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.into_path(), + Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.keep(), Err(err) => { return Err(Error::Extract( temp_dir.path().to_string_lossy().into_owned(), @@ -2266,6 +2266,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { fs::create_dir_all(&cache_shard) .await .map_err(Error::CacheWrite)?; + // Try a direct build if that isn't disabled and the uv build backend is used. let disk_filename = if let Some(name) = self .build_context @@ -2286,27 +2287,73 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // In the uv build backend, the normalized filename and the disk filename are the same. name.to_string() } else { - self.build_context - .setup_build( - source_root, - subdirectory, - source_root, - Some(&source.to_string()), - source.as_dist(), - source_strategy, - if source.is_editable() { - BuildKind::Editable - } else { - BuildKind::Wheel - }, - BuildOutput::Debug, - self.build_stack.cloned().unwrap_or_default(), - ) - .await - .map_err(|err| Error::Build(err.into()))? - .wheel(temp_dir.path()) - .await - .map_err(Error::Build)? + // Identify the base Python interpreter to use in the cache key. + let base_python = if cfg!(unix) { + self.build_context + .interpreter() + .find_base_python() + .map_err(Error::BaseInterpreter)? + } else { + self.build_context + .interpreter() + .to_base_python() + .map_err(Error::BaseInterpreter)? + }; + + let build_kind = if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }; + + let build_key = BuildKey { + base_python: base_python.into_boxed_path(), + source_root: source_root.to_path_buf().into_boxed_path(), + subdirectory: subdirectory + .map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()), + source_strategy, + build_kind, + }; + + if let Some(builder) = self.build_context.build_arena().remove(&build_key) { + debug!("Creating build environment for: {source}"); + let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert(build_key, builder); + + wheel + } else { + debug!("Reusing existing build environment for: {source}"); + + let builder = self + .build_context + .setup_build( + source_root, + subdirectory, + source_root, + Some(&source.to_string()), + source.as_dist(), + source_strategy, + if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }, + BuildOutput::Debug, + self.build_stack.cloned().unwrap_or_default(), + ) + .await + .map_err(|err| Error::Build(err.into()))?; + + // Build the wheel. + let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert(build_key, builder); + + wheel + } }; // Read the metadata from the wheel. @@ -2361,6 +2408,26 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } } + // Identify the base Python interpreter to use in the cache key. + let base_python = if cfg!(unix) { + self.build_context + .interpreter() + .find_base_python() + .map_err(Error::BaseInterpreter)? + } else { + self.build_context + .interpreter() + .to_base_python() + .map_err(Error::BaseInterpreter)? + }; + + // Determine whether this is an editable or non-editable build. + let build_kind = if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }; + // Set up the builder. let mut builder = self .build_context @@ -2371,11 +2438,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { Some(&source.to_string()), source.as_dist(), source_strategy, - if source.is_editable() { - BuildKind::Editable - } else { - BuildKind::Wheel - }, + build_kind, BuildOutput::Debug, self.build_stack.cloned().unwrap_or_default(), ) @@ -2384,6 +2447,21 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Build the metadata. let dist_info = builder.metadata().await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert( + BuildKey { + base_python: base_python.into_boxed_path(), + source_root: source_root.to_path_buf().into_boxed_path(), + subdirectory: subdirectory + .map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()), + source_strategy, + build_kind, + }, + builder, + ); + + // Return the `.dist-info` directory, if it exists. let Some(dist_info) = dist_info else { return Ok(None); }; @@ -2402,10 +2480,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } /// Returns a GET [`reqwest::Request`] for the given URL. - fn request(url: Url, client: &RegistryClient) -> Result { + fn request( + url: DisplaySafeUrl, + client: &RegistryClient, + ) -> Result { client .uncached_client(&url) - .get(url) + .get(Url::from(url)) .header( // `reqwest` defaults to accepting compressed responses. // Specify identity encoding to get consistent .whl downloading diff --git a/crates/uv-extract/src/error.rs b/crates/uv-extract/src/error.rs index 09191bb0a..ae2fdff1a 100644 --- a/crates/uv-extract/src/error.rs +++ b/crates/uv-extract/src/error.rs @@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf}; #[derive(Debug, thiserror::Error)] pub enum Error { - #[error(transparent)] + #[error("Failed to read from zip file")] Zip(#[from] zip::result::ZipError), - #[error(transparent)] + #[error("Failed to read from zip file")] AsyncZip(#[from] async_zip::error::ZipError), - #[error(transparent)] + #[error("I/O operation failed during extraction")] Io(#[from] std::io::Error), #[error( "The top-level of the archive must only contain a list directory, but it contains: {0:?}" diff --git a/crates/uv-fs/Cargo.toml b/crates/uv-fs/Cargo.toml index 47271be54..fba4910e6 100644 --- a/crates/uv-fs/Cargo.toml +++ b/crates/uv-fs/Cargo.toml @@ -16,7 +16,6 @@ doctest = false workspace = true [dependencies] - dunce = { workspace = true } either = { workspace = true } encoding_rs_io = { workspace = true } @@ -31,15 +30,14 @@ tempfile = { workspace = true } tokio = { workspace = true, optional = true} tracing = { workspace = true } -[target.'cfg(target_os = "windows")'.dependencies] -winsafe = { workspace = true } - [target.'cfg(any(unix, target_os = "wasi", target_os = "redox"))'.dependencies] rustix = { workspace = true } [target.'cfg(windows)'.dependencies] backon = { workspace = true } junction = { workspace = true } +windows = { workspace = true } +windows-core = { workspace = true } [features] default = [] diff --git a/crates/uv-fs/src/lib.rs b/crates/uv-fs/src/lib.rs index 0b5055b40..dcc0f00b2 100644 --- a/crates/uv-fs/src/lib.rs +++ b/crates/uv-fs/src/lib.rs @@ -575,8 +575,33 @@ pub fn is_temporary(path: impl AsRef) -> bool { .is_some_and(|name| name.starts_with(".tmp")) } +/// Checks if the grandparent directory of the given executable is the base +/// of a virtual environment. +/// +/// The procedure described in PEP 405 includes checking both the parent and +/// grandparent directory of an executable, but in practice we've found this to +/// be unnecessary. +pub fn is_virtualenv_executable(executable: impl AsRef) -> bool { + executable + .as_ref() + .parent() + .and_then(Path::parent) + .is_some_and(is_virtualenv_base) +} + +/// Returns `true` if a path is the base path of a virtual environment, +/// indicated by the presence of a `pyvenv.cfg` file. +/// +/// The procedure described in PEP 405 includes scanning `pyvenv.cfg` +/// for a `home` key, but in practice we've found this to be +/// unnecessary. +pub fn is_virtualenv_base(path: impl AsRef) -> bool { + path.as_ref().join("pyvenv.cfg").is_file() +} + /// A file lock that is automatically released when dropped. #[derive(Debug)] +#[must_use] pub struct LockedFile(fs_err::File); impl LockedFile { diff --git a/crates/uv-fs/src/path.rs b/crates/uv-fs/src/path.rs index 7a75c76c3..40e579f8e 100644 --- a/crates/uv-fs/src/path.rs +++ b/crates/uv-fs/src/path.rs @@ -277,21 +277,6 @@ fn normalized(path: &Path) -> PathBuf { normalized } -/// Like `fs_err::canonicalize`, but avoids attempting to resolve symlinks on Windows. -pub fn canonicalize_executable(path: impl AsRef) -> std::io::Result { - let path = path.as_ref(); - debug_assert!( - path.is_absolute(), - "path must be absolute: {}", - path.display() - ); - if cfg!(windows) { - Ok(path.to_path_buf()) - } else { - fs_err::canonicalize(path) - } -} - /// Compute a path describing `path` relative to `base`. /// /// `lib/python/site-packages/foo/__init__.py` and `lib/python/site-packages` -> `foo/__init__.py` @@ -345,11 +330,11 @@ pub struct PortablePathBuf(Box); #[cfg(feature = "schemars")] impl schemars::JsonSchema for PortablePathBuf { - fn schema_name() -> String { - PathBuf::schema_name() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PortablePathBuf") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { PathBuf::json_schema(_gen) } } diff --git a/crates/uv-fs/src/which.rs b/crates/uv-fs/src/which.rs index d8342c313..e63174a17 100644 --- a/crates/uv-fs/src/which.rs +++ b/crates/uv-fs/src/which.rs @@ -1,5 +1,26 @@ use std::path::Path; +#[cfg(windows)] +#[allow(unsafe_code)] // We need to do an FFI call through the windows-* crates. +fn get_binary_type(path: &Path) -> windows::core::Result { + use std::os::windows::ffi::OsStrExt; + use windows::Win32::Storage::FileSystem::GetBinaryTypeW; + use windows_core::PCWSTR; + + // References: + // https://github.com/denoland/deno/blob/01a6379505712be34ebf2cdc874fa7f54a6e9408/runtime/permissions/which.rs#L131-L154 + // https://github.com/conradkleinespel/rooster/blob/afa78dc9918535752c4af59d2f812197ad754e5a/src/quale.rs#L51-L77 + let mut binary_type = 0u32; + let name = path + .as_os_str() + .encode_wide() + .chain(Some(0)) + .collect::>(); + // SAFETY: winapi call + unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &raw mut binary_type)? }; + Ok(binary_type) +} + /// Check whether a path in PATH is a valid executable. /// /// Derived from `which`'s `Checker`. @@ -20,9 +41,7 @@ pub fn is_executable(path: &Path) -> bool { if !file_type.is_file() && !file_type.is_symlink() { return false; } - if path.extension().is_none() - && winsafe::GetBinaryType(&path.display().to_string()).is_err() - { + if path.extension().is_none() && get_binary_type(path).is_err() { return false; } } diff --git a/crates/uv-git-types/Cargo.toml b/crates/uv-git-types/Cargo.toml index 7158879de..a374d7cbe 100644 --- a/crates/uv-git-types/Cargo.toml +++ b/crates/uv-git-types/Cargo.toml @@ -16,9 +16,9 @@ doctest = false workspace = true [dependencies] +uv-redacted = { workspace = true } + serde = { workspace = true } thiserror = { workspace = true } tracing = { workspace = true } url = { workspace = true } - -uv-redacted = { workspace = true } diff --git a/crates/uv-git-types/src/lib.rs b/crates/uv-git-types/src/lib.rs index 7e2328db0..dbfa02ea3 100644 --- a/crates/uv-git-types/src/lib.rs +++ b/crates/uv-git-types/src/lib.rs @@ -3,9 +3,7 @@ pub use crate::oid::{GitOid, OidParseError}; pub use crate::reference::GitReference; use thiserror::Error; -use url::Url; - -use uv_redacted::redacted_url; +use uv_redacted::DisplaySafeUrl; mod github; mod oid; @@ -16,7 +14,7 @@ pub enum GitUrlParseError { #[error( "Unsupported Git URL scheme `{0}:` in `{1}` (expected one of `https:`, `ssh:`, or `file:`)" )] - UnsupportedGitScheme(String, Url), + UnsupportedGitScheme(String, DisplaySafeUrl), } /// A URL reference to a Git repository. @@ -24,7 +22,7 @@ pub enum GitUrlParseError { pub struct GitUrl { /// The URL of the Git repository, with any query parameters, fragments, and leading `git+` /// removed. - repository: Url, + repository: DisplaySafeUrl, /// The reference to the commit to use, which could be a branch, tag or revision. reference: GitReference, /// The precise commit to use, if known. @@ -34,7 +32,7 @@ pub struct GitUrl { impl GitUrl { /// Create a new [`GitUrl`] from a repository URL and a reference. pub fn from_reference( - repository: Url, + repository: DisplaySafeUrl, reference: GitReference, ) -> Result { Self::from_fields(repository, reference, None) @@ -42,7 +40,7 @@ impl GitUrl { /// Create a new [`GitUrl`] from a repository URL and a precise commit. pub fn from_commit( - repository: Url, + repository: DisplaySafeUrl, reference: GitReference, precise: GitOid, ) -> Result { @@ -51,7 +49,7 @@ impl GitUrl { /// Create a new [`GitUrl`] from a repository URL and a precise commit, if known. pub fn from_fields( - repository: Url, + repository: DisplaySafeUrl, reference: GitReference, precise: Option, ) -> Result { @@ -86,7 +84,7 @@ impl GitUrl { } /// Return the [`Url`] of the Git repository. - pub fn repository(&self) -> &Url { + pub fn repository(&self) -> &DisplaySafeUrl { &self.repository } @@ -101,11 +99,11 @@ impl GitUrl { } } -impl TryFrom for GitUrl { +impl TryFrom for GitUrl { type Error = GitUrlParseError; /// Initialize a [`GitUrl`] source from a URL. - fn try_from(mut url: Url) -> Result { + fn try_from(mut url: DisplaySafeUrl) -> Result { // Remove any query parameters and fragments. url.set_fragment(None); url.set_query(None); @@ -126,13 +124,14 @@ impl TryFrom for GitUrl { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(git: GitUrl) -> Self { let mut url = git.repository; // If we have a precise commit, add `@` and the commit hash to the URL. if let Some(precise) = git.precise { - url.set_path(&format!("{}@{}", url.path(), precise)); + let path = format!("{}@{}", url.path(), precise); + url.set_path(&path); } else { // Otherwise, add the branch or tag name. match git.reference { @@ -141,7 +140,8 @@ impl From for Url { | GitReference::BranchOrTag(rev) | GitReference::NamedRef(rev) | GitReference::BranchOrTagOrCommit(rev) => { - url.set_path(&format!("{}@{}", url.path(), rev)); + let path = format!("{}@{}", url.path(), rev); + url.set_path(&path); } GitReference::DefaultBranch => {} } @@ -153,6 +153,6 @@ impl From for Url { impl std::fmt::Display for GitUrl { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", redacted_url(&self.repository)) + write!(f, "{}", &self.repository) } } diff --git a/crates/uv-git-types/src/oid.rs b/crates/uv-git-types/src/oid.rs index 2ccd376b7..9319f5e34 100644 --- a/crates/uv-git-types/src/oid.rs +++ b/crates/uv-git-types/src/oid.rs @@ -5,31 +5,36 @@ use thiserror::Error; /// Unique identity of any Git object (commit, tree, blob, tag). /// -/// Note this type does not validate whether the input is a valid hash. +/// This type's `FromStr` implementation validates that it's exactly 40 hex characters, i.e. a +/// full-length git commit. +/// +/// If Git's SHA-256 support becomes more widespread in the future (in particular if GitHub ever +/// adds support), we might need to make this an enum. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct GitOid { - len: usize, bytes: [u8; 40], } impl GitOid { /// Return the string representation of an object ID. pub fn as_str(&self) -> &str { - str::from_utf8(&self.bytes[..self.len]).unwrap() + str::from_utf8(&self.bytes).unwrap() } /// Return a truncated representation, i.e., the first 16 characters of the SHA. pub fn as_short_str(&self) -> &str { - self.as_str().get(..16).unwrap_or(self.as_str()) + &self.as_str()[..16] } } #[derive(Debug, Error, PartialEq)] pub enum OidParseError { - #[error("Object ID can be at most 40 hex characters")] - TooLong, #[error("Object ID cannot be parsed from empty string")] Empty, + #[error("Object ID must be exactly 40 hex characters")] + WrongLength, + #[error("Object ID must be valid hex characters")] + NotHex, } impl FromStr for GitOid { @@ -40,17 +45,17 @@ impl FromStr for GitOid { return Err(OidParseError::Empty); } - if s.len() > 40 { - return Err(OidParseError::TooLong); + if s.len() != 40 { + return Err(OidParseError::WrongLength); } - let mut out = [0; 40]; - out[..s.len()].copy_from_slice(s.as_bytes()); + if !s.chars().all(|ch| ch.is_ascii_hexdigit()) { + return Err(OidParseError::NotHex); + } - Ok(GitOid { - len: s.len(), - bytes: out, - }) + let mut bytes = [0; 40]; + bytes.copy_from_slice(s.as_bytes()); + Ok(GitOid { bytes }) } } @@ -101,11 +106,20 @@ mod tests { #[test] fn git_oid() { GitOid::from_str("4a23745badf5bf5ef7928f1e346e9986bd696d82").unwrap(); + GitOid::from_str("4A23745BADF5BF5EF7928F1E346E9986BD696D82").unwrap(); assert_eq!(GitOid::from_str(""), Err(OidParseError::Empty)); assert_eq!( GitOid::from_str(&str::repeat("a", 41)), - Err(OidParseError::TooLong) + Err(OidParseError::WrongLength) + ); + assert_eq!( + GitOid::from_str(&str::repeat("a", 39)), + Err(OidParseError::WrongLength) + ); + assert_eq!( + GitOid::from_str(&str::repeat("x", 40)), + Err(OidParseError::NotHex) ); } } diff --git a/crates/uv-git/src/credentials.rs b/crates/uv-git/src/credentials.rs index d8a67a250..051560980 100644 --- a/crates/uv-git/src/credentials.rs +++ b/crates/uv-git/src/credentials.rs @@ -1,10 +1,9 @@ use std::collections::HashMap; use std::sync::{Arc, LazyLock, RwLock}; use tracing::trace; -use url::Url; use uv_auth::Credentials; use uv_cache_key::RepositoryUrl; -use uv_redacted::redacted_url; +use uv_redacted::DisplaySafeUrl; /// Global authentication cache for a uv invocation. /// @@ -30,9 +29,9 @@ impl GitStore { /// Populate the global authentication store with credentials on a Git URL, if there are any. /// /// Returns `true` if the store was updated. -pub fn store_credentials_from_url(url: &Url) -> bool { +pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool { if let Some(credentials) = Credentials::from_url(url) { - trace!("Caching credentials for {}", redacted_url(url)); + trace!("Caching credentials for {url}"); GIT_STORE.insert(RepositoryUrl::new(url), credentials); true } else { diff --git a/crates/uv-git/src/git.rs b/crates/uv-git/src/git.rs index d31444081..4ee4c2670 100644 --- a/crates/uv-git/src/git.rs +++ b/crates/uv-git/src/git.rs @@ -16,9 +16,12 @@ use url::Url; use uv_fs::Simplified; use uv_git_types::{GitHubRepository, GitOid, GitReference}; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use uv_version::version; +use crate::rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited}; + /// A file indicates that if present, `git reset` has been done and a repo /// checkout is ready to go. See [`GitCheckout::reset`] for why we need this. const CHECKOUT_READY_LOCK: &str = ".ok"; @@ -132,7 +135,7 @@ impl Display for ReferenceOrOid<'_> { #[derive(PartialEq, Clone, Debug)] pub(crate) struct GitRemote { /// URL to a remote repository. - url: Url, + url: DisplaySafeUrl, } /// A local clone of a remote repository's database. Multiple [`GitCheckout`]s @@ -205,12 +208,12 @@ impl GitRepository { impl GitRemote { /// Creates an instance for a remote repository URL. - pub(crate) fn new(url: &Url) -> Self { + pub(crate) fn new(url: &DisplaySafeUrl) -> Self { Self { url: url.clone() } } /// Gets the remote repository URL. - pub(crate) fn url(&self) -> &Url { + pub(crate) fn url(&self) -> &DisplaySafeUrl { &self.url } @@ -786,7 +789,15 @@ fn github_fast_path( } }; - let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{github_branch_name}"); + // Check if we're rate-limited by GitHub before determining the FastPathRev + if GITHUB_RATE_LIMIT_STATUS.is_active() { + debug!("Skipping GitHub fast path attempt for: {url} (rate-limited)"); + return Ok(FastPathRev::Indeterminate); + } + + let base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL) + .unwrap_or("https://api.github.com/repos".to_owned()); + let url = format!("{base_url}/{owner}/{repo}/commits/{github_branch_name}"); let runtime = tokio::runtime::Builder::new_current_thread() .enable_all() @@ -806,6 +817,11 @@ fn github_fast_path( let response = request.send().await?; + if is_github_rate_limited(&response) { + // Mark that we are being rate-limited by GitHub + GITHUB_RATE_LIMIT_STATUS.activate(); + } + // GitHub returns a 404 if the repository does not exist, and a 422 if it exists but GitHub // is unable to resolve the requested revision. response.error_for_status_ref()?; diff --git a/crates/uv-git/src/lib.rs b/crates/uv-git/src/lib.rs index ef23e58c2..716eb7538 100644 --- a/crates/uv-git/src/lib.rs +++ b/crates/uv-git/src/lib.rs @@ -7,5 +7,6 @@ pub use crate::source::{Fetch, GitSource, Reporter}; mod credentials; mod git; +mod rate_limit; mod resolver; mod source; diff --git a/crates/uv-git/src/rate_limit.rs b/crates/uv-git/src/rate_limit.rs new file mode 100644 index 000000000..4d277e652 --- /dev/null +++ b/crates/uv-git/src/rate_limit.rs @@ -0,0 +1,37 @@ +use reqwest::{Response, StatusCode}; +use std::sync::atomic::{AtomicBool, Ordering}; + +/// A global state on whether we are being rate-limited by GitHub's REST API. +/// If we are, avoid "fast-path" attempts. +pub(crate) static GITHUB_RATE_LIMIT_STATUS: GitHubRateLimitStatus = GitHubRateLimitStatus::new(); + +/// GitHub REST API rate limit status tracker. +/// +/// ## Assumptions +/// +/// The rate limit timeout duration is much longer than the runtime of a `uv` command. +/// And so we do not need to invalidate this state based on `x-ratelimit-reset`. +#[derive(Debug)] +pub(crate) struct GitHubRateLimitStatus(AtomicBool); + +impl GitHubRateLimitStatus { + const fn new() -> Self { + Self(AtomicBool::new(false)) + } + + pub(crate) fn activate(&self) { + self.0.store(true, Ordering::Relaxed); + } + + pub(crate) fn is_active(&self) -> bool { + self.0.load(Ordering::Relaxed) + } +} + +/// Determine if GitHub is applying rate-limiting based on the response +pub(crate) fn is_github_rate_limited(response: &Response) -> bool { + // HTTP 403 and 429 are possible status codes in the event of a primary or secondary rate limit. + // Source: https://docs.github.com/en/rest/using-the-rest-api/troubleshooting-the-rest-api?apiVersion=2022-11-28#rate-limit-errors + let status_code = response.status(); + status_code == StatusCode::FORBIDDEN || status_code == StatusCode::TOO_MANY_REQUESTS +} diff --git a/crates/uv-git/src/resolver.rs b/crates/uv-git/src/resolver.rs index 9335aed4d..3c12fc589 100644 --- a/crates/uv-git/src/resolver.rs +++ b/crates/uv-git/src/resolver.rs @@ -12,9 +12,13 @@ use tracing::debug; use uv_cache_key::{RepositoryUrl, cache_digest}; use uv_fs::LockedFile; use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl}; +use uv_static::EnvVars; use uv_version::version; -use crate::{Fetch, GitSource, Reporter}; +use crate::{ + Fetch, GitSource, Reporter, + rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited}, +}; #[derive(Debug, thiserror::Error)] pub enum GitResolverError { @@ -45,6 +49,21 @@ impl GitResolver { self.0.get(reference) } + pub fn get_precise(&self, url: &GitUrl) -> Option { + // If the URL is already precise, return it. + if let Some(precise) = url.precise() { + return Some(precise); + } + + // If we know the precise commit already, return it. + let reference = RepositoryReference::from(url); + if let Some(precise) = self.get(&reference) { + return Some(*precise); + } + + None + } + /// Resolve a Git URL to a specific commit without performing any Git operations. /// /// Returns a [`GitOid`] if the URL has already been resolved (i.e., is available in the cache), @@ -52,18 +71,15 @@ impl GitResolver { pub async fn github_fast_path( &self, url: &GitUrl, - client: ClientWithMiddleware, + client: &ClientWithMiddleware, ) -> Result, GitResolverError> { - let reference = RepositoryReference::from(url); - - // If the URL is already precise, return it. - if let Some(precise) = url.precise() { - return Ok(Some(precise)); + if std::env::var_os(EnvVars::UV_NO_GITHUB_FAST_PATH).is_some() { + return Ok(None); } - // If we know the precise commit already, return it. - if let Some(precise) = self.get(&reference) { - return Ok(Some(*precise)); + // If the URL is already precise or we know the precise commit, return it. + if let Some(precise) = self.get_precise(url) { + return Ok(Some(precise)); } // If the URL is a GitHub URL, attempt to resolve it via the GitHub API. @@ -72,13 +88,21 @@ impl GitResolver { return Ok(None); }; + // Check if we're rate-limited by GitHub, before determining the Git reference + if GITHUB_RATE_LIMIT_STATUS.is_active() { + debug!("Rate-limited by GitHub. Skipping GitHub fast path attempt for: {url}"); + return Ok(None); + } + // Determine the Git reference. let rev = url.reference().as_rev(); - let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{rev}"); + let github_api_base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL) + .unwrap_or("https://api.github.com/repos".to_owned()); + let github_api_url = format!("{github_api_base_url}/{owner}/{repo}/commits/{rev}"); - debug!("Querying GitHub for commit at: {url}"); - let mut request = client.get(&url); + debug!("Querying GitHub for commit at: {github_api_url}"); + let mut request = client.get(&github_api_url); request = request.header("Accept", "application/vnd.github.3.sha"); request = request.header( "User-Agent", @@ -86,13 +110,20 @@ impl GitResolver { ); let response = request.send().await?; - if !response.status().is_success() { + let status = response.status(); + if !status.is_success() { // Returns a 404 if the repository does not exist, and a 422 if GitHub is unable to // resolve the requested rev. debug!( - "GitHub API request failed for: {url} ({})", + "GitHub API request failed for: {github_api_url} ({})", response.status() ); + + if is_github_rate_limited(&response) { + // Mark that we are being rate-limited by GitHub + GITHUB_RATE_LIMIT_STATUS.activate(); + } + return Ok(None); } @@ -103,7 +134,7 @@ impl GitResolver { // Insert the resolved URL into the in-memory cache. This ensures that subsequent fetches // resolve to the same precise commit. - self.insert(reference, precise); + self.insert(RepositoryReference::from(url), precise); Ok(Some(precise)) } @@ -112,7 +143,7 @@ impl GitResolver { pub async fn fetch( &self, url: &GitUrl, - client: ClientWithMiddleware, + client: impl Into, disable_ssl: bool, offline: bool, cache: PathBuf, diff --git a/crates/uv-git/src/source.rs b/crates/uv-git/src/source.rs index 6c6b38072..cb6d0a24f 100644 --- a/crates/uv-git/src/source.rs +++ b/crates/uv-git/src/source.rs @@ -9,14 +9,13 @@ use std::sync::Arc; use anyhow::Result; use reqwest_middleware::ClientWithMiddleware; use tracing::{debug, instrument}; -use url::Url; use uv_cache_key::{RepositoryUrl, cache_digest}; -use uv_git_types::GitUrl; -use uv_redacted::redacted_url; +use uv_git_types::{GitOid, GitReference, GitUrl}; +use uv_redacted::DisplaySafeUrl; use crate::GIT_STORE; -use crate::git::GitRemote; +use crate::git::{GitDatabase, GitRemote}; /// A remote Git source that can be checked out locally. pub struct GitSource { @@ -87,43 +86,59 @@ impl GitSource { Cow::Borrowed(self.git.repository()) }; - let remote = GitRemote::new(&remote); - let (db, actual_rev, task) = match (self.git.precise(), remote.db_at(&db_path).ok()) { - // If we have a locked revision, and we have a preexisting database - // which has that revision, then no update needs to happen. - (Some(rev), Some(db)) if db.contains(rev) => { - debug!("Using existing Git source `{}`", self.git.repository()); - (db, rev, None) + // Fetch the commit, if we don't already have it. Wrapping this section in a closure makes + // it easier to short-circuit this in the cases where we do have the commit. + let (db, actual_rev, maybe_task) = || -> Result<(GitDatabase, GitOid, Option)> { + let git_remote = GitRemote::new(&remote); + let maybe_db = git_remote.db_at(&db_path).ok(); + + // If we have a locked revision, and we have a pre-existing database which has that + // revision, then no update needs to happen. + if let (Some(rev), Some(db)) = (self.git.precise(), &maybe_db) { + if db.contains(rev) { + debug!("Using existing Git source `{}`", self.git.repository()); + return Ok((maybe_db.unwrap(), rev, None)); + } } - // ... otherwise we use this state to update the git database. Note - // that we still check for being offline here, for example in the - // situation that we have a locked revision but the database - // doesn't have it. - (locked_rev, db) => { - debug!( - "Updating Git source `{}`", - redacted_url(self.git.repository()) - ); - - // Report the checkout operation to the reporter. - let task = self.reporter.as_ref().map(|reporter| { - reporter.on_checkout_start(remote.url(), self.git.reference().as_rev()) - }); - - let (db, actual_rev) = remote.checkout( - &db_path, - db, - self.git.reference(), - locked_rev, - &self.client, - self.disable_ssl, - self.offline, - )?; - - (db, actual_rev, task) + // If the revision isn't locked, but it looks like it might be an exact commit hash, + // and we do have a pre-existing database, then check whether it is, in fact, a commit + // hash. If so, treat it like it's locked. + if let Some(db) = &maybe_db { + if let GitReference::BranchOrTagOrCommit(maybe_commit) = self.git.reference() { + if let Ok(oid) = maybe_commit.parse::() { + if db.contains(oid) { + // This reference is an exact commit. Treat it like it's + // locked. + debug!("Using existing Git source `{}`", self.git.repository()); + return Ok((maybe_db.unwrap(), oid, None)); + } + } + } } - }; + + // ... otherwise, we use this state to update the Git database. Note that we still check + // for being offline here, for example in the situation that we have a locked revision + // but the database doesn't have it. + debug!("Updating Git source `{}`", self.git.repository()); + + // Report the checkout operation to the reporter. + let task = self.reporter.as_ref().map(|reporter| { + reporter.on_checkout_start(git_remote.url(), self.git.reference().as_rev()) + }); + + let (db, actual_rev) = git_remote.checkout( + &db_path, + maybe_db, + self.git.reference(), + self.git.precise(), + &self.client, + self.disable_ssl, + self.offline, + )?; + + Ok((db, actual_rev, task)) + }()?; // Don’t use the full hash, in order to contribute less to reaching the // path length limit on Windows. @@ -141,9 +156,9 @@ impl GitSource { db.copy_to(actual_rev, &checkout_path)?; // Report the checkout operation to the reporter. - if let Some(task) = task { + if let Some(task) = maybe_task { if let Some(reporter) = self.reporter.as_ref() { - reporter.on_checkout_complete(remote.url(), actual_rev.as_str(), task); + reporter.on_checkout_complete(remote.as_ref(), actual_rev.as_str(), task); } } @@ -181,8 +196,8 @@ impl Fetch { pub trait Reporter: Send + Sync { /// Callback to invoke when a repository checkout begins. - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize; + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize; /// Callback to invoke when a repository checkout completes. - fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize); + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, index: usize); } diff --git a/crates/uv-globfilter/src/glob_dir_filter.rs b/crates/uv-globfilter/src/glob_dir_filter.rs index eaca7ee0e..e963ee86e 100644 --- a/crates/uv-globfilter/src/glob_dir_filter.rs +++ b/crates/uv-globfilter/src/glob_dir_filter.rs @@ -85,7 +85,7 @@ impl GlobDirFilter { /// don't end up including any child. pub fn match_directory(&self, path: &Path) -> bool { let Some(dfa) = &self.dfa else { - return false; + return true; }; // Allow the root path diff --git a/crates/uv-install-wheel/src/wheel.rs b/crates/uv-install-wheel/src/wheel.rs index 0661aa6b2..250143016 100644 --- a/crates/uv-install-wheel/src/wheel.rs +++ b/crates/uv-install-wheel/src/wheel.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; use std::io; -use std::io::{BufReader, Read, Seek, Write}; +use std::io::{BufReader, Read, Write}; use std::path::{Path, PathBuf}; use data_encoding::BASE64URL_NOPAD; @@ -144,7 +144,7 @@ fn format_shebang(executable: impl AsRef, os_name: &str, relocatable: bool /// /// fn get_script_executable(python_executable: &Path, is_gui: bool) -> PathBuf { - // Only check for pythonw.exe on Windows + // Only check for `pythonw.exe` on Windows. if cfg!(windows) && is_gui { python_executable .file_name() @@ -431,22 +431,41 @@ fn install_script( Err(err) => return Err(Error::Io(err)), } let size_and_encoded_hash = if start == placeholder_python { - let is_gui = { - let mut buf = vec![0; 1]; - script.read_exact(&mut buf)?; - if buf == b"w" { - true - } else { - script.seek_relative(-1)?; - false + // Read the rest of the first line, one byte at a time, until we hit a newline. + let mut is_gui = false; + let mut first = true; + let mut byte = [0u8; 1]; + loop { + match script.read_exact(&mut byte) { + Ok(()) => { + if byte[0] == b'\n' || byte[0] == b'\r' { + break; + } + + // Check if this is a GUI script (starts with 'w'). + if first { + is_gui = byte[0] == b'w'; + first = false; + } + } + Err(err) if err.kind() == io::ErrorKind::UnexpectedEof => break, + Err(err) => return Err(Error::Io(err)), } - }; + } + let executable = get_script_executable(&layout.sys_executable, is_gui); let executable = get_relocatable_executable(executable, layout, relocatable)?; - let start = format_shebang(&executable, &layout.os_name, relocatable) + let mut start = format_shebang(&executable, &layout.os_name, relocatable) .as_bytes() .to_vec(); + // Use appropriate line ending for the platform. + if layout.os_name == "nt" { + start.extend_from_slice(b"\r\n"); + } else { + start.push(b'\n'); + } + let mut target = uv_fs::tempfile_in(&layout.scheme.scripts)?; let size_and_encoded_hash = copy_and_hash(&mut start.chain(script), &mut target)?; @@ -569,12 +588,20 @@ pub(crate) fn install_data( match path.file_name().and_then(|name| name.to_str()) { Some("data") => { - trace!(?dist_name, "Installing data/data"); + trace!( + ?dist_name, + "Installing data/data to {}", + layout.scheme.data.user_display() + ); // Move the content of the folder to the root of the venv move_folder_recorded(&path, &layout.scheme.data, site_packages, record)?; } Some("scripts") => { - trace!(?dist_name, "Installing data/scripts"); + trace!( + ?dist_name, + "Installing data/scripts to {}", + layout.scheme.scripts.user_display() + ); let mut rename_or_copy = RenameOrCopy::default(); let mut initialized = false; for file in fs::read_dir(path)? { @@ -613,16 +640,28 @@ pub(crate) fn install_data( } } Some("headers") => { - trace!(?dist_name, "Installing data/headers"); let target_path = layout.scheme.include.join(dist_name.as_str()); + trace!( + ?dist_name, + "Installing data/headers to {}", + target_path.user_display() + ); move_folder_recorded(&path, &target_path, site_packages, record)?; } Some("purelib") => { - trace!(?dist_name, "Installing data/purelib"); + trace!( + ?dist_name, + "Installing data/purelib to {}", + layout.scheme.purelib.user_display() + ); move_folder_recorded(&path, &layout.scheme.purelib, site_packages, record)?; } Some("platlib") => { - trace!(?dist_name, "Installing data/platlib"); + trace!( + ?dist_name, + "Installing data/platlib to {}", + layout.scheme.platlib.user_display() + ); move_folder_recorded(&path, &layout.scheme.platlib, site_packages, record)?; } _ => { diff --git a/crates/uv-installer/Cargo.toml b/crates/uv-installer/Cargo.toml index 02ab10b43..a78dec23b 100644 --- a/crates/uv-installer/Cargo.toml +++ b/crates/uv-installer/Cargo.toml @@ -31,6 +31,7 @@ uv-pep508 = { workspace = true } uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true } +uv-redacted = { workspace = true } uv-static = { workspace = true } uv-types = { workspace = true } uv-warnings = { workspace = true } diff --git a/crates/uv-installer/src/preparer.rs b/crates/uv-installer/src/preparer.rs index eaf3b5b6d..7181fa454 100644 --- a/crates/uv-installer/src/preparer.rs +++ b/crates/uv-installer/src/preparer.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use futures::{FutureExt, Stream, TryFutureExt, TryStreamExt, stream::FuturesUnordered}; use tracing::{debug, instrument}; -use url::Url; use uv_cache::Cache; use uv_configuration::BuildOptions; @@ -14,6 +13,7 @@ use uv_distribution_types::{ }; use uv_pep508::PackageName; use uv_platform_tags::Tags; +use uv_redacted::DisplaySafeUrl; use uv_types::{BuildContext, HashStrategy, InFlight}; /// Prepare distributions for installation. @@ -268,10 +268,10 @@ pub trait Reporter: Send + Sync { fn on_build_complete(&self, source: &BuildableSource, id: usize); /// Callback to invoke when a repository checkout begins. - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize; + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize; /// Callback to invoke when a repository checkout completes. - fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize); + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, index: usize); } impl dyn Reporter { @@ -299,11 +299,11 @@ impl uv_distribution::Reporter for Facade { self.reporter.on_build_complete(source, id); } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, index: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, index: usize) { self.reporter.on_checkout_complete(url, rev, index); } diff --git a/crates/uv-installer/src/site_packages.rs b/crates/uv-installer/src/site_packages.rs index 5351e04a8..d0ed782ff 100644 --- a/crates/uv-installer/src/site_packages.rs +++ b/crates/uv-installer/src/site_packages.rs @@ -6,7 +6,6 @@ use std::path::PathBuf; use anyhow::{Context, Result}; use fs_err as fs; use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; -use url::Url; use uv_distribution_types::{ Diagnostic, InstalledDist, Name, NameRequirementSpecification, Requirement, @@ -18,6 +17,7 @@ use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::VersionOrUrl; use uv_pypi_types::{ResolverMarkerEnvironment, VerbatimParsedUrl}; use uv_python::{Interpreter, PythonEnvironment}; +use uv_redacted::DisplaySafeUrl; use uv_types::InstalledPackagesProvider; use uv_warnings::warn_user; @@ -38,7 +38,7 @@ pub struct SitePackages { /// virtual environment, which we handle gracefully. by_name: FxHashMap>, /// The installed editable distributions, keyed by URL. - by_url: FxHashMap>, + by_url: FxHashMap>, } impl SitePackages { @@ -174,7 +174,7 @@ impl SitePackages { } /// Returns the distributions installed from the given URL, if any. - pub fn get_urls(&self, url: &Url) -> Vec<&InstalledDist> { + pub fn get_urls(&self, url: &DisplaySafeUrl) -> Vec<&InstalledDist> { let Some(indexes) = self.by_url.get(url) else { return Vec::new(); }; diff --git a/crates/uv-pep440/Readme.md b/crates/uv-pep440/Readme.md index b8fd4015f..cc9281738 100644 --- a/crates/uv-pep440/Readme.md +++ b/crates/uv-pep440/Readme.md @@ -26,20 +26,19 @@ PEP 440 has a lot of unintuitive features, including: - An epoch that you can prefix the version with, e.g., `1!1.2.3`. Lower epoch always means lower version (`1.0 <=2!0.1`) - -* post versions, which can be attached to both stable releases and pre-releases -* dev versions, which can be attached to sbpth table releases and pre-releases. When attached to a +- Post versions, which can be attached to both stable releases and pre-releases +- Dev versions, which can be attached to sbpth table releases and pre-releases. When attached to a pre-release the dev version is ordered just below the normal pre-release, however when attached to a stable version, the dev version is sorted before a pre-releases -* pre-release handling is a mess: "Pre-releases of any kind, including developmental releases, are +- Pre-release handling is a mess: "Pre-releases of any kind, including developmental releases, are implicitly excluded from all version specifiers, unless they are already present on the system, explicitly requested by the user, or if the only available version that satisfies the version specifier is a pre-release.". This means that we can't say whether a specifier matches without also looking at the environment -* pre-release vs. pre-release incl. dev is fuzzy -* local versions on top of all the others, which are added with a + and have implicitly typed string +- Pre-release vs. pre-release incl. dev is fuzzy +- Local versions on top of all the others, which are added with a + and have implicitly typed string and number segments -* no semver-caret (`^`), but a pseudo-semver tilde (`~=`) -* ordering contradicts matching: We have, e.g., `1.0+local > 1.0` when sorting, but `==1.0` matches +- No semver-caret (`^`), but a pseudo-semver tilde (`~=`) +- Ordering contradicts matching: We have, e.g., `1.0+local > 1.0` when sorting, but `==1.0` matches `1.0+local`. While the ordering of versions itself is a total order the version matching needs to catch all sorts of special cases diff --git a/crates/uv-pep440/src/lib.rs b/crates/uv-pep440/src/lib.rs index 3d2e256ae..40c7d97c6 100644 --- a/crates/uv-pep440/src/lib.rs +++ b/crates/uv-pep440/src/lib.rs @@ -29,12 +29,12 @@ pub use version_ranges::{ }; pub use { version::{ - LocalSegment, LocalVersion, LocalVersionSlice, MIN_VERSION, Operator, OperatorParseError, - Prerelease, PrereleaseKind, Version, VersionParseError, VersionPattern, + BumpCommand, LocalSegment, LocalVersion, LocalVersionSlice, MIN_VERSION, Operator, + OperatorParseError, Prerelease, PrereleaseKind, Version, VersionParseError, VersionPattern, VersionPatternParseError, }, version_specifier::{ - VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers, + TildeVersionSpecifier, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers, VersionSpecifiersParseError, }, }; diff --git a/crates/uv-pep440/src/version.rs b/crates/uv-pep440/src/version.rs index 59f090927..223701692 100644 --- a/crates/uv-pep440/src/version.rs +++ b/crates/uv-pep440/src/version.rs @@ -610,6 +610,24 @@ impl Version { Self::new(self.release().iter().copied()) } + /// Return the version with any segments apart from the release removed, with trailing zeroes + /// trimmed. + #[inline] + #[must_use] + pub fn only_release_trimmed(&self) -> Self { + if let Some(last_non_zero) = self.release().iter().rposition(|segment| *segment != 0) { + if last_non_zero == self.release().len() { + // Already trimmed. + self.clone() + } else { + Self::new(self.release().iter().take(last_non_zero + 1).copied()) + } + } else { + // `0` is a valid version. + Self::new([0]) + } + } + /// Return the version with trailing `.0` release segments removed. /// /// # Panics @@ -625,6 +643,90 @@ impl Version { self.with_release(release) } + /// Various "increment the version" operations + pub fn bump(&mut self, bump: BumpCommand) { + // This code operates on the understanding that the components of a version form + // the following hierarchy: + // + // major > minor > patch > stable > pre > post > dev + // + // Any updates to something earlier in the hierarchy should clear all values lower + // in the hierarchy. So for instance: + // + // if you bump `minor`, then clear: patch, pre, post, dev + // if you bump `pre`, then clear: post, dev + // + // ...and so on. + // + // If you bump a value that doesn't exist, it will be set to "1". + // + // The special "stable" mode has no value, bumping it clears: pre, post, dev. + let full = self.make_full(); + + match bump { + BumpCommand::BumpRelease { index } => { + // Clear all sub-release items + full.pre = None; + full.post = None; + full.dev = None; + + // Use `max` here to try to do 0.2 => 0.3 instead of 0.2 => 0.3.0 + let old_parts = &full.release; + let len = old_parts.len().max(index + 1); + let new_release_vec = (0..len) + .map(|i| match i.cmp(&index) { + // Everything before the bumped value is preserved (or is an implicit 0) + Ordering::Less => old_parts.get(i).copied().unwrap_or(0), + // This is the value to bump (could be implicit 0) + Ordering::Equal => old_parts.get(i).copied().unwrap_or(0) + 1, + // Everything after the bumped value becomes 0 + Ordering::Greater => 0, + }) + .collect::>(); + full.release = new_release_vec; + } + BumpCommand::MakeStable => { + // Clear all sub-release items + full.pre = None; + full.post = None; + full.dev = None; + } + BumpCommand::BumpPrerelease { kind } => { + // Clear all sub-prerelease items + full.post = None; + full.dev = None; + + // Either bump the matching kind or set to 1 + if let Some(prerelease) = &mut full.pre { + if prerelease.kind == kind { + prerelease.number += 1; + return; + } + } + full.pre = Some(Prerelease { kind, number: 1 }); + } + BumpCommand::BumpPost => { + // Clear sub-post items + full.dev = None; + + // Either bump or set to 1 + if let Some(post) = &mut full.post { + *post += 1; + } else { + full.post = Some(1); + } + } + BumpCommand::BumpDev => { + // Either bump or set to 1 + if let Some(dev) = &mut full.dev { + *dev += 1; + } else { + full.dev = Some(1); + } + } + } + } + /// Set the min-release component and return the updated version. /// /// The "min" component is internal-only, and does not exist in PEP 440. @@ -762,41 +864,38 @@ impl Serialize for Version { /// Shows normalized version impl std::fmt::Display for Version { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let epoch = if self.epoch() == 0 { - String::new() - } else { - format!("{}!", self.epoch()) - }; - let release = self - .release() - .iter() - .map(ToString::to_string) - .collect::>() - .join("."); - let pre = self - .pre() - .as_ref() - .map(|Prerelease { kind, number }| format!("{kind}{number}")) - .unwrap_or_default(); - let post = self - .post() - .map(|post| format!(".post{post}")) - .unwrap_or_default(); - let dev = self - .dev() - .map(|dev| format!(".dev{dev}")) - .unwrap_or_default(); - let local = if self.local().is_empty() { - String::new() - } else { + if self.epoch() != 0 { + write!(f, "{}!", self.epoch())?; + } + let release = self.release(); + let mut release_iter = release.iter(); + if let Some(first) = release_iter.next() { + write!(f, "{first}")?; + for n in release_iter { + write!(f, ".{n}")?; + } + } + + if let Some(Prerelease { kind, number }) = self.pre() { + write!(f, "{kind}{number}")?; + } + if let Some(post) = self.post() { + write!(f, ".post{post}")?; + } + if let Some(dev) = self.dev() { + write!(f, ".dev{dev}")?; + } + if !self.local().is_empty() { match self.local() { LocalVersionSlice::Segments(_) => { - format!("+{}", self.local()) + write!(f, "+{}", self.local())?; + } + LocalVersionSlice::Max => { + write!(f, "+")?; } - LocalVersionSlice::Max => "+".to_string(), } - }; - write!(f, "{epoch}{release}{pre}{post}{dev}{local}") + } + Ok(()) } } @@ -864,6 +963,27 @@ impl FromStr for Version { } } +/// Various ways to "bump" a version +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum BumpCommand { + /// Bump the release component + BumpRelease { + /// The release component to bump (0 is major, 1 is minor, 2 is patch) + index: usize, + }, + /// Bump the prerelease component + BumpPrerelease { + /// prerelease component to bump + kind: PrereleaseKind, + }, + /// Bump to the associated stable release + MakeStable, + /// Bump the post component + BumpPost, + /// Bump the dev component + BumpDev, +} + /// A small representation of a version. /// /// This representation is used for a (very common) subset of versions: the @@ -4028,4 +4148,351 @@ mod tests { assert_eq!(size_of::(), size_of::() * 2); assert_eq!(size_of::(), size_of::() * 2); } + + /// Test major bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_major() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "2.0"); + + // three digit (zero major) + let mut version = "0.1.2".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "1.0.0"); + + // three digit (non-zero major) + let mut version = "1.2.3".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "2.0.0"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "2.0.0.0"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "5!2.0.0.0+local"); + version.bump(BumpCommand::BumpRelease { index: 0 }); + assert_eq!(version.to_string().as_str(), "5!3.0.0.0+local"); + } + + /// Test minor bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_minor() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "0.1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "1.6"); + + // three digit (non-zero major) + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "5.4.0"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "1.3.0.0"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "5!1.8.0.0+local"); + version.bump(BumpCommand::BumpRelease { index: 1 }); + assert_eq!(version.to_string().as_str(), "5!1.9.0.0+local"); + } + + /// Test patch bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_patch() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "0.0.1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "1.5.1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "5.3.7"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "1.2.4.0"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "5!1.7.4.0+local"); + version.bump(BumpCommand::BumpRelease { index: 2 }); + assert_eq!(version.to_string().as_str(), "5!1.7.5.0+local"); + } + + /// Test alpha bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_alpha() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "0a1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "1.5a1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "5.3.6a1"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "1.2.3.4a1"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5a1+local"); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5a2+local"); + } + + /// Test beta bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_beta() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "0b1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "1.5b1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "5.3.6b1"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "1.2.3.4b1"); + + // All the version junk + let mut version = "5!1.7.3.5a2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5b1+local"); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5b2+local"); + } + + /// Test rc bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_rc() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "0rc1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "1.5rc1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "5.3.6rc1"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "1.2.3.4rc1"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345.dev456+local" + .parse::() + .unwrap(); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5rc1+local"); + version.bump(BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5rc2+local"); + } + + /// Test post bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_post() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "0.post1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "1.5.post1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "5.3.6.post1"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "1.2.3.4.post1"); + + // All the version junk + let mut version = "5!1.7.3.5b2.dev123+local".parse::().unwrap(); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5b2.post1+local"); + version.bump(BumpCommand::BumpPost); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5b2.post2+local"); + } + + /// Test dev bumping + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn bump_dev() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::BumpDev); + assert_eq!(version.to_string().as_str(), "0.dev1"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::BumpDev); + assert_eq!(version.to_string().as_str(), "1.5.dev1"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::BumpDev); + assert_eq!(version.to_string().as_str(), "5.3.6.dev1"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::BumpDev); + assert_eq!(version.to_string().as_str(), "1.2.3.4.dev1"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345+local".parse::().unwrap(); + version.bump(BumpCommand::BumpDev); + assert_eq!( + version.to_string().as_str(), + "5!1.7.3.5b2.post345.dev1+local" + ); + version.bump(BumpCommand::BumpDev); + assert_eq!( + version.to_string().as_str(), + "5!1.7.3.5b2.post345.dev2+local" + ); + } + + /// Test stable setting + /// Explicitly using the string display because we want to preserve formatting where possible! + #[test] + fn make_stable() { + // one digit + let mut version = "0".parse::().unwrap(); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "0"); + + // two digit + let mut version = "1.5".parse::().unwrap(); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "1.5"); + + // three digit + let mut version = "5.3.6".parse::().unwrap(); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "5.3.6"); + + // four digit + let mut version = "1.2.3.4".parse::().unwrap(); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "1.2.3.4"); + + // All the version junk + let mut version = "5!1.7.3.5b2.post345+local".parse::().unwrap(); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5+local"); + version.bump(BumpCommand::MakeStable); + assert_eq!(version.to_string().as_str(), "5!1.7.3.5+local"); + } } diff --git a/crates/uv-pep440/src/version_ranges.rs b/crates/uv-pep440/src/version_ranges.rs index 2bd7dcd4d..38038ffcf 100644 --- a/crates/uv-pep440/src/version_ranges.rs +++ b/crates/uv-pep440/src/version_ranges.rs @@ -132,7 +132,7 @@ impl From for Ranges { pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges { let mut range = Ranges::full(); for specifier in specifiers { - range = range.intersection(&release_specifier_to_range(specifier)); + range = range.intersection(&release_specifier_to_range(specifier, false)); } range } @@ -148,67 +148,57 @@ pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges -pub fn release_specifier_to_range(specifier: VersionSpecifier) -> Ranges { +pub fn release_specifier_to_range(specifier: VersionSpecifier, trim: bool) -> Ranges { let VersionSpecifier { operator, version } = specifier; + // Note(konsti): We switched strategies to trimmed for the markers, but we don't want to cause + // churn in lockfile requires-python, so we only trim for markers. + let version_trimmed = if trim { + version.only_release_trimmed() + } else { + version.only_release() + }; match operator { - Operator::Equal => { - let version = version.only_release(); - Ranges::singleton(version) - } - Operator::ExactEqual => { - let version = version.only_release(); - Ranges::singleton(version) - } - Operator::NotEqual => { - let version = version.only_release(); - Ranges::singleton(version).complement() - } + // Trailing zeroes are not semantically relevant. + Operator::Equal => Ranges::singleton(version_trimmed), + Operator::ExactEqual => Ranges::singleton(version_trimmed), + Operator::NotEqual => Ranges::singleton(version_trimmed).complement(), + Operator::LessThan => Ranges::strictly_lower_than(version_trimmed), + Operator::LessThanEqual => Ranges::lower_than(version_trimmed), + Operator::GreaterThan => Ranges::strictly_higher_than(version_trimmed), + Operator::GreaterThanEqual => Ranges::higher_than(version_trimmed), + + // Trailing zeroes are semantically relevant. Operator::TildeEqual => { let release = version.release(); let [rest @ .., last, _] = &*release else { unreachable!("~= must have at least two segments"); }; let upper = Version::new(rest.iter().chain([&(last + 1)])); - let version = version.only_release(); - Ranges::from_range_bounds(version..upper) - } - Operator::LessThan => { - let version = version.only_release(); - Ranges::strictly_lower_than(version) - } - Operator::LessThanEqual => { - let version = version.only_release(); - Ranges::lower_than(version) - } - Operator::GreaterThan => { - let version = version.only_release(); - Ranges::strictly_higher_than(version) - } - Operator::GreaterThanEqual => { - let version = version.only_release(); - Ranges::higher_than(version) + Ranges::from_range_bounds(version_trimmed..upper) } Operator::EqualStar => { - let low = version.only_release(); + // For (not-)equal-star, trailing zeroes are still before the star. + let low_full = version.only_release(); let high = { - let mut high = low.clone(); + let mut high = low_full.clone(); let mut release = high.release().to_vec(); *release.last_mut().unwrap() += 1; high = high.with_release(release); high }; - Ranges::from_range_bounds(low..high) + Ranges::from_range_bounds(version..high) } Operator::NotEqualStar => { - let low = version.only_release(); + // For (not-)equal-star, trailing zeroes are still before the star. + let low_full = version.only_release(); let high = { - let mut high = low.clone(); + let mut high = low_full.clone(); let mut release = high.release().to_vec(); *release.last_mut().unwrap() += 1; high = high.with_release(release); high }; - Ranges::from_range_bounds(low..high).complement() + Ranges::from_range_bounds(version..high).complement() } } } @@ -223,8 +213,8 @@ impl LowerBound { /// These bounds use release-only semantics when comparing versions. pub fn new(bound: Bound) -> Self { Self(match bound { - Bound::Included(version) => Bound::Included(version.only_release()), - Bound::Excluded(version) => Bound::Excluded(version.only_release()), + Bound::Included(version) => Bound::Included(version.only_release_trimmed()), + Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()), Bound::Unbounded => Bound::Unbounded, }) } @@ -358,8 +348,8 @@ impl UpperBound { /// These bounds use release-only semantics when comparing versions. pub fn new(bound: Bound) -> Self { Self(match bound { - Bound::Included(version) => Bound::Included(version.only_release()), - Bound::Excluded(version) => Bound::Excluded(version.only_release()), + Bound::Included(version) => Bound::Included(version.only_release_trimmed()), + Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()), Bound::Unbounded => Bound::Unbounded, }) } diff --git a/crates/uv-pep440/src/version_specifier.rs b/crates/uv-pep440/src/version_specifier.rs index 47bde78b7..e111c5118 100644 --- a/crates/uv-pep440/src/version_specifier.rs +++ b/crates/uv-pep440/src/version_specifier.rs @@ -80,24 +80,38 @@ impl VersionSpecifiers { // Add specifiers for the holes between the bounds. for (lower, upper) in bounds { - match (next, lower) { + let specifier = match (next, lower) { // Ex) [3.7, 3.8.5), (3.8.5, 3.9] -> >=3.7,!=3.8.5,<=3.9 (Bound::Excluded(prev), Bound::Excluded(lower)) if prev == lower => { - specifiers.push(VersionSpecifier::not_equals_version(prev.clone())); + Some(VersionSpecifier::not_equals_version(prev.clone())) } // Ex) [3.7, 3.8), (3.8, 3.9] -> >=3.7,!=3.8.*,<=3.9 - (Bound::Excluded(prev), Bound::Included(lower)) - if prev.release().len() == 2 - && *lower.release() == [prev.release()[0], prev.release()[1] + 1] => - { - specifiers.push(VersionSpecifier::not_equals_star_version(prev.clone())); - } - _ => { - #[cfg(feature = "tracing")] - warn!( - "Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}" - ); + (Bound::Excluded(prev), Bound::Included(lower)) => { + match *prev.only_release_trimmed().release() { + [major] if *lower.only_release_trimmed().release() == [major, 1] => { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, 0, + ]))) + } + [major, minor] + if *lower.only_release_trimmed().release() == [major, minor + 1] => + { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, minor, + ]))) + } + _ => None, + } } + _ => None, + }; + if let Some(specifier) = specifier { + specifiers.push(specifier); + } else { + #[cfg(feature = "tracing")] + warn!( + "Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}" + ); } next = upper; } @@ -348,6 +362,33 @@ impl VersionSpecifier { Ok(Self { operator, version }) } + /// Remove all non-release parts of the version. + /// + /// The marker decision diagram relies on the assumption that the negation of a marker tree is + /// the complement of the marker space. However, pre-release versions violate this assumption. + /// + /// For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'` + /// does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However, + /// its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not + /// match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams + /// rely on. For this reason we ignore pre-release versions entirely when evaluating markers. + /// + /// Note that `python_version` cannot take on pre-release values as it is truncated to just the + /// major and minor version segments. Thus using release-only specifiers is definitely necessary + /// for `python_version` to fully simplify any ranges, such as + /// `python_version > '3.9' or python_version <= '3.9'`, which is always `true` for + /// `python_version`. For `python_full_version` however, this decision is a semantic change. + /// + /// For Python versions, the major.minor is considered the API version, so unlike the rules + /// for package versions in PEP 440, we Python `3.9.0a0` is acceptable for `>= "3.9"`. + #[must_use] + pub fn only_release(self) -> Self { + Self { + operator: self.operator, + version: self.version.only_release(), + } + } + /// `==` pub fn equals_version(version: Version) -> Self { Self { @@ -416,7 +457,7 @@ impl VersionSpecifier { &self.operator } - /// Get the version, e.g. `<=` in `<= 2.0.0` + /// Get the version, e.g. `2.0.0` in `<= 2.0.0` pub fn version(&self) -> &Version { &self.version } @@ -442,14 +483,23 @@ impl VersionSpecifier { (Some(VersionSpecifier::equals_version(v1.clone())), None) } // `v >= 3.7 && v < 3.8` is equivalent to `v == 3.7.*` - (Bound::Included(v1), Bound::Excluded(v2)) - if v1.release().len() == 2 - && *v2.release() == [v1.release()[0], v1.release()[1] + 1] => - { - ( - Some(VersionSpecifier::equals_star_version(v1.clone())), - None, - ) + (Bound::Included(v1), Bound::Excluded(v2)) => { + match *v1.only_release_trimmed().release() { + [major] if *v2.only_release_trimmed().release() == [major, 1] => { + let version = Version::new([major, 0]); + (Some(VersionSpecifier::equals_star_version(version)), None) + } + [major, minor] + if *v2.only_release_trimmed().release() == [major, minor + 1] => + { + let version = Version::new([major, minor]); + (Some(VersionSpecifier::equals_star_version(version)), None) + } + _ => ( + VersionSpecifier::from_lower_bound(&Bound::Included(v1.clone())), + VersionSpecifier::from_upper_bound(&Bound::Excluded(v2.clone())), + ), + } } (lower, upper) => ( VersionSpecifier::from_lower_bound(lower), @@ -627,7 +677,15 @@ impl FromStr for VersionSpecifier { // operator but we don't know yet if it has a star let operator = s.eat_while(['=', '!', '~', '<', '>']); if operator.is_empty() { - return Err(ParseErrorKind::MissingOperator.into()); + // Attempt to parse the version from the rest of the scanner to provide a more useful error message in MissingOperator. + // If it is not able to be parsed (i.e. not a valid version), it will just be None and no additional info will be added to the error message. + s.eat_while(|c: char| c.is_whitespace()); + let version = s.eat_while(|c: char| !c.is_whitespace()); + s.eat_while(|c: char| c.is_whitespace()); + return Err(ParseErrorKind::MissingOperator(VersionOperatorBuildError { + version_pattern: VersionPattern::from_str(version).ok(), + }) + .into()); } let operator = Operator::from_str(operator).map_err(ParseErrorKind::InvalidOperator)?; s.eat_while(|c: char| c.is_whitespace()); @@ -695,6 +753,25 @@ impl std::fmt::Display for VersionSpecifierBuildError { } } +#[derive(Clone, Debug, Eq, PartialEq)] +struct VersionOperatorBuildError { + version_pattern: Option, +} + +impl std::error::Error for VersionOperatorBuildError {} + +impl std::fmt::Display for VersionOperatorBuildError { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "Unexpected end of version specifier, expected operator")?; + if let Some(version_pattern) = &self.version_pattern { + let version_specifier = + VersionSpecifier::from_pattern(Operator::Equal, version_pattern.clone()).unwrap(); + write!(f, ". Did you mean `{version_specifier}`?")?; + } + Ok(()) + } +} + /// The specific kind of error that can occur when building a version specifier /// from an operator and version pair. #[derive(Clone, Debug, Eq, PartialEq)] @@ -748,9 +825,7 @@ impl std::fmt::Display for VersionSpecifierParseError { ParseErrorKind::InvalidOperator(ref err) => err.fmt(f), ParseErrorKind::InvalidVersion(ref err) => err.fmt(f), ParseErrorKind::InvalidSpecifier(ref err) => err.fmt(f), - ParseErrorKind::MissingOperator => { - write!(f, "Unexpected end of version specifier, expected operator") - } + ParseErrorKind::MissingOperator(ref err) => err.fmt(f), ParseErrorKind::MissingVersion => { write!(f, "Unexpected end of version specifier, expected version") } @@ -768,7 +843,7 @@ enum ParseErrorKind { InvalidOperator(OperatorParseError), InvalidVersion(VersionPatternParseError), InvalidSpecifier(VersionSpecifierBuildError), - MissingOperator, + MissingOperator(VersionOperatorBuildError), MissingVersion, InvalidTrailing(String), } @@ -813,6 +888,90 @@ pub(crate) fn parse_version_specifiers( Ok(version_ranges) } +/// A simple `~=` version specifier with a major, minor and (optional) patch version, e.g., `~=3.13` +/// or `~=3.13.0`. +#[derive(Clone, Debug)] +pub struct TildeVersionSpecifier<'a> { + inner: Cow<'a, VersionSpecifier>, +} + +impl<'a> TildeVersionSpecifier<'a> { + /// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] value. + /// + /// If a [`Operator::TildeEqual`] is not used, or the version includes more than minor and patch + /// segments, this will return [`None`]. + pub fn from_specifier(specifier: VersionSpecifier) -> Option> { + TildeVersionSpecifier::new(Cow::Owned(specifier)) + } + + /// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] reference. + /// + /// See [`TildeVersionSpecifier::from_specifier`]. + pub fn from_specifier_ref( + specifier: &'a VersionSpecifier, + ) -> Option> { + TildeVersionSpecifier::new(Cow::Borrowed(specifier)) + } + + fn new(specifier: Cow<'a, VersionSpecifier>) -> Option { + if specifier.operator != Operator::TildeEqual { + return None; + } + if specifier.version().release().len() < 2 || specifier.version().release().len() > 3 { + return None; + } + if specifier.version().any_prerelease() + || specifier.version().is_local() + || specifier.version().is_post() + { + return None; + } + Some(Self { inner: specifier }) + } + + /// Whether a patch version is present in this tilde version specifier. + pub fn has_patch(&self) -> bool { + self.inner.version.release().len() == 3 + } + + /// Construct the lower and upper bounding version specifiers for this tilde version specifier, + /// e.g., for `~=3.13` this would return `>=3.13` and `<4` and for `~=3.13.0` it would + /// return `>=3.13.0` and `<3.14`. + pub fn bounding_specifiers(&self) -> (VersionSpecifier, VersionSpecifier) { + let release = self.inner.version().release(); + let lower = self.inner.version.clone(); + let upper = if self.has_patch() { + Version::new([release[0], release[1] + 1]) + } else { + Version::new([release[0] + 1]) + }; + ( + VersionSpecifier::greater_than_equal_version(lower), + VersionSpecifier::less_than_version(upper), + ) + } + + /// Construct a new tilde `VersionSpecifier` with the given patch version appended. + pub fn with_patch_version(&self, patch: u64) -> TildeVersionSpecifier { + let mut release = self.inner.version.release().to_vec(); + if self.has_patch() { + release.pop(); + } + release.push(patch); + TildeVersionSpecifier::from_specifier( + VersionSpecifier::from_version(Operator::TildeEqual, Version::new(release)) + .expect("We should always derive a valid new version specifier"), + ) + .expect("We should always derive a new tilde version specifier") + } +} + +impl std::fmt::Display for TildeVersionSpecifier<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.inner) + } +} + #[cfg(test)] mod tests { use std::{cmp::Ordering, str::FromStr}; @@ -1356,6 +1515,32 @@ mod tests { ); } + #[test] + fn test_parse_specifier_missing_operator_error() { + let result = VersionSpecifiers::from_str("3.12"); + assert_eq!( + result.unwrap_err().to_string(), + indoc! {" + Failed to parse version: Unexpected end of version specifier, expected operator. Did you mean `==3.12`?: + 3.12 + ^^^^ + "} + ); + } + + #[test] + fn test_parse_specifier_missing_operator_invalid_version_error() { + let result = VersionSpecifiers::from_str("blergh"); + assert_eq!( + result.unwrap_err().to_string(), + indoc! {r" + Failed to parse version: Unexpected end of version specifier, expected operator: + blergh + ^^^^^^ + "} + ); + } + #[test] fn test_non_star_after_star() { let result = VersionSpecifiers::from_str("== 0.9.*.1"); @@ -1386,7 +1571,10 @@ mod tests { let result = VersionSpecifiers::from_str("blergh"); assert_eq!( result.unwrap_err().inner.err, - ParseErrorKind::MissingOperator.into(), + ParseErrorKind::MissingOperator(VersionOperatorBuildError { + version_pattern: None + }) + .into(), ); } @@ -1395,7 +1583,13 @@ mod tests { fn test_invalid_specifier() { let specifiers = [ // Operator-less specifier - ("2.0", ParseErrorKind::MissingOperator.into()), + ( + "2.0", + ParseErrorKind::MissingOperator(VersionOperatorBuildError { + version_pattern: VersionPattern::from_str("2.0").ok(), + }) + .into(), + ), // Invalid operator ( "=>2.0", @@ -1735,7 +1929,9 @@ mod tests { fn error_message_version_specifiers_parse_error() { let specs = ">=1.2.3, 5.4.3, >=3.4.5"; let err = VersionSpecifierParseError { - kind: Box::new(ParseErrorKind::MissingOperator), + kind: Box::new(ParseErrorKind::MissingOperator(VersionOperatorBuildError { + version_pattern: VersionPattern::from_str("5.4.3").ok(), + })), }; let inner = Box::new(VersionSpecifiersParseErrorInner { err, @@ -1748,7 +1944,7 @@ mod tests { assert_eq!( err.to_string(), "\ -Failed to parse version: Unexpected end of version specifier, expected operator: +Failed to parse version: Unexpected end of version specifier, expected operator. Did you mean `==5.4.3`?: >=1.2.3, 5.4.3, >=3.4.5 ^^^^^^ " diff --git a/crates/uv-pep508/Cargo.toml b/crates/uv-pep508/Cargo.toml index 1fff96287..e9306da00 100644 --- a/crates/uv-pep508/Cargo.toml +++ b/crates/uv-pep508/Cargo.toml @@ -22,6 +22,7 @@ workspace = true uv-fs = { workspace = true } uv-normalize = { workspace = true } uv-pep440 = { workspace = true } +uv-redacted = { workspace = true } arcstr = { workspace = true} boxcar = { workspace = true } @@ -40,7 +41,7 @@ version-ranges = { workspace = true } [dev-dependencies] insta = { version = "1.40.0" } -serde_json = { version = "1.0.128" } +serde_json = { workspace = true } tracing-test = { version = "0.2.5" } [features] diff --git a/crates/uv-pep508/src/lib.rs b/crates/uv-pep508/src/lib.rs index a78678d92..e2945743b 100644 --- a/crates/uv-pep508/src/lib.rs +++ b/crates/uv-pep508/src/lib.rs @@ -16,6 +16,8 @@ #![warn(missing_docs)] +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::error::Error; use std::fmt::{Debug, Display, Formatter}; use std::path::Path; @@ -144,23 +146,50 @@ impl Requirement { self.version_or_url = None; } } + + /// Returns a [`Display`] implementation that doesn't mask credentials. + pub fn displayable_with_credentials(&self) -> impl Display { + RequirementDisplay { + requirement: self, + display_credentials: true, + } + } } impl Display for Requirement { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.name)?; - if !self.extras.is_empty() { + RequirementDisplay { + requirement: self, + display_credentials: false, + } + .fmt(f) + } +} + +struct RequirementDisplay<'a, T> +where + T: Pep508Url + Display, +{ + requirement: &'a Requirement, + display_credentials: bool, +} + +impl Display for RequirementDisplay<'_, T> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.requirement.name)?; + if !self.requirement.extras.is_empty() { write!( f, "[{}]", - self.extras + self.requirement + .extras .iter() .map(ToString::to_string) .collect::>() .join(",") )?; } - if let Some(version_or_url) = &self.version_or_url { + if let Some(version_or_url) = &self.requirement.version_or_url { match version_or_url { VersionOrUrl::VersionSpecifier(version_specifier) => { let version_specifier: Vec = @@ -168,12 +197,17 @@ impl Display for Requirement { write!(f, "{}", version_specifier.join(","))?; } VersionOrUrl::Url(url) => { + let url_string = if self.display_credentials { + url.displayable_with_credentials().to_string() + } else { + url.to_string() + }; // We add the space for markers later if necessary - write!(f, " @ {url}")?; + write!(f, " @ {url_string}")?; } } } - if let Some(marker) = self.marker.contents() { + if let Some(marker) = self.requirement.marker.contents() { write!(f, " ; {marker}")?; } Ok(()) @@ -255,6 +289,9 @@ pub trait Pep508Url: Display + Debug + Sized { /// Parse a url from `name @ `. Defaults to [`Url::parse_url`]. fn parse_url(url: &str, working_dir: Option<&Path>) -> Result; + + /// Returns a [`Display`] implementation that doesn't mask credentials. + fn displayable_with_credentials(&self) -> impl Display; } impl Pep508Url for Url { @@ -263,6 +300,10 @@ impl Pep508Url for Url { fn parse_url(url: &str, _working_dir: Option<&Path>) -> Result { Url::parse(url) } + + fn displayable_with_credentials(&self) -> impl Display { + self + } } /// A reporter for warnings that occur during marker parsing or evaluation. @@ -295,22 +336,15 @@ impl Reporter for TracingReporter { #[cfg(feature = "schemars")] impl schemars::JsonSchema for Requirement { - fn schema_name() -> String { - "Requirement".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Requirement") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`".to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`" + }) } } diff --git a/crates/uv-pep508/src/marker/algebra.rs b/crates/uv-pep508/src/marker/algebra.rs index f421a8fa3..2a3f82f27 100644 --- a/crates/uv-pep508/src/marker/algebra.rs +++ b/crates/uv-pep508/src/marker/algebra.rs @@ -172,7 +172,7 @@ impl InternerGuard<'_> { ), // Normalize `python_version` markers to `python_full_version` nodes. MarkerValueVersion::PythonVersion => { - match python_version_to_full_version(normalize_specifier(specifier)) { + match python_version_to_full_version(specifier.only_release()) { Ok(specifier) => ( Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion), Edges::from_specifier(specifier), @@ -1214,7 +1214,7 @@ impl Edges { /// Returns the [`Edges`] for a version specifier. fn from_specifier(specifier: VersionSpecifier) -> Edges { - let specifier = release_specifier_to_range(normalize_specifier(specifier)); + let specifier = release_specifier_to_range(specifier.only_release(), true); Edges::Version { edges: Edges::from_range(&specifier), } @@ -1227,9 +1227,9 @@ impl Edges { let mut range: Ranges = versions .into_iter() .map(|version| { - let specifier = VersionSpecifier::equals_version(version.clone()); + let specifier = VersionSpecifier::equals_version(version.only_release()); let specifier = python_version_to_full_version(specifier)?; - Ok(release_specifier_to_range(normalize_specifier(specifier))) + Ok(release_specifier_to_range(specifier, true)) }) .flatten_ok() .collect::, NodeId>>()?; @@ -1526,57 +1526,62 @@ impl Edges { } } -// Normalize a [`VersionSpecifier`] before adding it to the tree. -fn normalize_specifier(specifier: VersionSpecifier) -> VersionSpecifier { - let (operator, version) = specifier.into_parts(); - - // The decision diagram relies on the assumption that the negation of a marker tree is - // the complement of the marker space. However, pre-release versions violate this assumption. - // - // For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'` - // does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However, - // its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not - // match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams - // rely on. For this reason we ignore pre-release versions entirely when evaluating markers. - // - // Note that `python_version` cannot take on pre-release values as it is truncated to just the - // major and minor version segments. Thus using release-only specifiers is definitely necessary - // for `python_version` to fully simplify any ranges, such as `python_version > '3.9' or python_version <= '3.9'`, - // which is always `true` for `python_version`. For `python_full_version` however, this decision - // is a semantic change. - let mut release = &*version.release(); - - // Strip any trailing `0`s. - // - // The [`Version`] type ignores trailing `0`s for equality, but still preserves them in its - // [`Display`] output. We must normalize all versions by stripping trailing `0`s to remove the - // distinction between versions like `3.9` and `3.9.0`. Otherwise, their output would depend on - // which form was added to the global marker interner first. - // - // Note that we cannot strip trailing `0`s for star equality, as `==3.0.*` is different from `==3.*`. - if !operator.is_star() { - if let Some(end) = release.iter().rposition(|segment| *segment != 0) { - if end > 0 { - release = &release[..=end]; - } - } - } - - VersionSpecifier::from_version(operator, Version::new(release)).unwrap() -} - /// Returns the equivalent `python_full_version` specifier for a `python_version` specifier. /// /// Returns `Err` with a constant node if the equivalent comparison is always `true` or `false`. fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { + // Trailing zeroes matter only for (not-)equals-star and tilde-equals. This means that below + // the next two blocks, we can use the trimmed release as the release. + if specifier.operator().is_star() { + // Input python_version python_full_version + // ==3.* 3.* 3.* + // ==3.0.* 3.0 3.0.* + // ==3.0.0.* 3.0 3.0.* + // ==3.9.* 3.9 3.9.* + // ==3.9.0.* 3.9 3.9.* + // ==3.9.0.0.* 3.9 3.9.* + // ==3.9.1.* FALSE FALSE + // ==3.9.1.0.* FALSE FALSE + // ==3.9.1.0.0.* FALSE FALSE + return match &*specifier.version().release() { + // `3.*` + [_major] => Ok(specifier), + // Ex) `3.9.*`, `3.9.0.*`, or `3.9.0.0.*` + [major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => { + let python_version = Version::new([major, minor]); + // Unwrap safety: A star operator with two version segments is always valid. + Ok(VersionSpecifier::from_version(*specifier.operator(), python_version).unwrap()) + } + // Ex) `3.9.1.*` or `3.9.0.1.*` + _ => Err(NodeId::FALSE), + }; + } + + if *specifier.operator() == Operator::TildeEqual { + // python_version python_full_version + // ~=3 (not possible) + // ~= 3.0 >= 3.0, < 4.0 + // ~= 3.9 >= 3.9, < 4.0 + // ~= 3.9.0 == 3.9.* + // ~= 3.9.1 FALSE + // ~= 3.9.0.0 == 3.9.* + // ~= 3.9.0.1 FALSE + return match &*specifier.version().release() { + // Ex) `3.0`, `3.7` + [_major, _minor] => Ok(specifier), + // Ex) `3.9`, `3.9.0`, or `3.9.0.0` + [major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => { + let python_version = Version::new([major, minor]); + Ok(VersionSpecifier::equals_star_version(python_version)) + } + // Ex) `3.9.1` or `3.9.0.1` + _ => Err(NodeId::FALSE), + }; + } + // Extract the major and minor version segments if the specifier contains exactly // those segments, or if it contains a major segment with an implied minor segment of `0`. - let major_minor = match *specifier.version().release() { - // For star operators, we cannot add a trailing `0`. - // - // `python_version == 3.*` is equivalent to `python_full_version == 3.*`. Adding a - // trailing `0` would result in `python_version == 3.0.*`, which is incorrect. - [_major] if specifier.operator().is_star() => return Ok(specifier), + let major_minor = match *specifier.version().only_release_trimmed().release() { // Add a trailing `0` for the minor version, which is implied. // For example, `python_version == 3` matches `3.0.1`, `3.0.2`, etc. [major] => Some((major, 0)), @@ -1614,9 +1619,10 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result specifier, + Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => { + // Handled above. + unreachable!() + } }) } else { let [major, minor, ..] = *specifier.version().release() else { @@ -1624,13 +1630,14 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { + // `python_version` cannot have more than two release segments, and we know + // that the following release segments aren't purely zeroes so equality is impossible. + Operator::Equal | Operator::ExactEqual => { return Err(NodeId::FALSE); } // Similarly, inequalities are always `true`. - Operator::NotEqual | Operator::NotEqualStar => return Err(NodeId::TRUE), + Operator::NotEqual => return Err(NodeId::TRUE), // `python_version {<,<=} 3.7.8` is equivalent to `python_full_version < 3.8`. Operator::LessThan | Operator::LessThanEqual => { @@ -1641,6 +1648,11 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { VersionSpecifier::greater_than_equal_version(Version::new([major, minor + 1])) } + + Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => { + // Handled above. + unreachable!() + } }) } } diff --git a/crates/uv-pep508/src/marker/simplify.rs b/crates/uv-pep508/src/marker/simplify.rs index 34c095b09..3dc03693a 100644 --- a/crates/uv-pep508/src/marker/simplify.rs +++ b/crates/uv-pep508/src/marker/simplify.rs @@ -64,8 +64,8 @@ fn collect_dnf( continue; } - // Detect whether the range for this edge can be simplified as a star inequality. - if let Some(specifier) = star_range_inequality(&range) { + // Detect whether the range for this edge can be simplified as a star specifier. + if let Some(specifier) = star_range_specifier(&range) { path.push(MarkerExpression::Version { key: marker.key().into(), specifier, @@ -343,22 +343,34 @@ where Some(excluded) } -/// Returns `Some` if the version expression can be simplified as a star inequality with the given -/// specifier. +/// Returns `Some` if the version range can be simplified as a star specifier. /// -/// For example, `python_full_version < '3.8' or python_full_version >= '3.9'` can be simplified to -/// `python_full_version != '3.8.*'`. -fn star_range_inequality(range: &Ranges) -> Option { +/// Only for the two bounds case not covered by [`VersionSpecifier::from_release_only_bounds`]. +/// +/// For negative ranges like `python_full_version < '3.8' or python_full_version >= '3.9'`, +/// returns `!= '3.8.*'`. +fn star_range_specifier(range: &Ranges) -> Option { + if range.iter().count() != 2 { + return None; + } + // Check for negative star range: two segments [(Unbounded, Excluded(v1)), (Included(v2), Unbounded)] let (b1, b2) = range.iter().collect_tuple()?; - - match (b1, b2) { - ((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded)) - if v1.release().len() == 2 - && *v2.release() == [v1.release()[0], v1.release()[1] + 1] => - { - Some(VersionSpecifier::not_equals_star_version(v1.clone())) + if let ((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded)) = + (b1, b2) + { + match *v1.only_release_trimmed().release() { + [major] if *v2.release() == [major, 1] => { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, 0, + ]))) + } + [major, minor] if *v2.release() == [major, minor + 1] => { + Some(VersionSpecifier::not_equals_star_version(v1.clone())) + } + _ => None, } - _ => None, + } else { + None } } diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 070a24b26..5739d7c98 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -1707,23 +1707,15 @@ impl Display for MarkerTreeContents { #[cfg(feature = "schemars")] impl schemars::JsonSchema for MarkerTree { - fn schema_name() -> String { - "MarkerTree".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("MarkerTree") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`" - .to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`" + }) } } @@ -2279,13 +2271,13 @@ mod test { #[test] fn test_marker_simplification() { assert_false("python_version == '3.9.1'"); - assert_false("python_version == '3.9.0.*'"); assert_true("python_version != '3.9.1'"); - // Technically these is are valid substring comparison, but we do not allow them. - // e.g., using a version with patch components with `python_version` is considered - // impossible to satisfy since the value it is truncated at the minor version - assert_false("python_version in '3.9.0'"); + // This is an edge case that happens to be supported, but is not critical to support. + assert_simplifies( + "python_version in '3.9.0'", + "python_full_version == '3.9.*'", + ); // e.g., using a version that is not PEP 440 compliant is considered arbitrary assert_true("python_version in 'foo'"); // e.g., including `*` versions, which would require tracking a version specifier @@ -2295,16 +2287,25 @@ mod test { assert_true("python_version in '3.9,3.10'"); assert_true("python_version in '3.9 or 3.10'"); - // e.g, when one of the values cannot be true - // TODO(zanieb): This seems like a quirk of the `python_full_version` normalization, this - // should just act as though the patch version isn't present - assert_false("python_version in '3.9 3.10.0 3.11'"); + // This is an edge case that happens to be supported, but is not critical to support. + assert_simplifies( + "python_version in '3.9 3.10.0 3.11'", + "python_full_version >= '3.9' and python_full_version < '3.12'", + ); assert_simplifies("python_version == '3.9'", "python_full_version == '3.9.*'"); assert_simplifies( "python_version == '3.9.0'", "python_full_version == '3.9.*'", ); + assert_simplifies( + "python_version == '3.9.0.*'", + "python_full_version == '3.9.*'", + ); + assert_simplifies( + "python_version == '3.*'", + "python_full_version >= '3' and python_full_version < '4'", + ); // ` in` // e.g., when the range is not contiguous @@ -2515,7 +2516,7 @@ mod test { #[test] fn test_simplification_extra_versus_other() { // Here, the `extra != 'foo'` cannot be simplified out, because - // `extra == 'foo'` can be true even when `extra == 'bar`' is true. + // `extra == 'foo'` can be true even when `extra == 'bar'`' is true. assert_simplifies( r#"extra != "foo" and (extra == "bar" or extra == "baz")"#, "(extra == 'bar' and extra != 'foo') or (extra == 'baz' and extra != 'foo')", @@ -2536,6 +2537,68 @@ mod test { ); } + #[test] + fn test_python_version_equal_star() { + // Input, equivalent with python_version, equivalent with python_full_version + let cases = [ + ("3.*", "3.*", "3.*"), + ("3.0.*", "3.0", "3.0.*"), + ("3.0.0.*", "3.0", "3.0.*"), + ("3.9.*", "3.9", "3.9.*"), + ("3.9.0.*", "3.9", "3.9.*"), + ("3.9.0.0.*", "3.9", "3.9.*"), + ]; + for (input, equal_python_version, equal_python_full_version) in cases { + assert_eq!( + m(&format!("python_version == '{input}'")), + m(&format!("python_version == '{equal_python_version}'")), + "{input} {equal_python_version}" + ); + assert_eq!( + m(&format!("python_version == '{input}'")), + m(&format!( + "python_full_version == '{equal_python_full_version}'" + )), + "{input} {equal_python_full_version}" + ); + } + + let cases_false = ["3.9.1.*", "3.9.1.0.*", "3.9.1.0.0.*"]; + for input in cases_false { + assert!( + m(&format!("python_version == '{input}'")).is_false(), + "{input}" + ); + } + } + + #[test] + fn test_tilde_equal_normalization() { + assert_eq!( + m("python_version ~= '3.10.0'"), + m("python_version >= '3.10.0' and python_version < '3.11.0'") + ); + + // Two digit versions such as `python_version` get padded with a zero, so they can never + // match + assert_eq!(m("python_version ~= '3.10.1'"), MarkerTree::FALSE); + + assert_eq!( + m("python_version ~= '3.10'"), + m("python_version >= '3.10' and python_version < '4.0'") + ); + + assert_eq!( + m("python_full_version ~= '3.10.0'"), + m("python_full_version >= '3.10.0' and python_full_version < '3.11.0'") + ); + + assert_eq!( + m("python_full_version ~= '3.10'"), + m("python_full_version >= '3.10' and python_full_version < '4.0'") + ); + } + /// This tests marker implication. /// /// Specifically, these test cases come from a [bug] where `foo` and `bar` @@ -3332,4 +3395,32 @@ mod test { ] ); } + + /// Case a: There is no version `3` (no trailing zero) in the interner yet. + #[test] + fn marker_normalization_a() { + let left_tree = m("python_version == '3.0.*'"); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.0.*'"; + assert_eq!(left, right, "{left} != {right}"); + } + + /// Case b: There is already a version `3` (no trailing zero) in the interner. + #[test] + fn marker_normalization_b() { + m("python_version >= '3' and python_version <= '3.0'"); + + let left_tree = m("python_version == '3.0.*'"); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.0.*'"; + assert_eq!(left, right, "{left} != {right}"); + } + + #[test] + fn marker_normalization_c() { + let left_tree = MarkerTree::from_str("python_version == '3.10.0.*'").unwrap(); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.10.*'"; + assert_eq!(left, right, "{left} != {right}"); + } } diff --git a/crates/uv-pep508/src/origin.rs b/crates/uv-pep508/src/origin.rs index 91a88f59a..4619e6f2e 100644 --- a/crates/uv-pep508/src/origin.rs +++ b/crates/uv-pep508/src/origin.rs @@ -12,8 +12,8 @@ pub enum RequirementOrigin { File(PathBuf), /// The requirement was provided via a local project (e.g., a `pyproject.toml` file). Project(PathBuf, PackageName), - /// The requirement was provided via a local project (e.g., a `pyproject.toml` file). - Group(PathBuf, PackageName, GroupName), + /// The requirement was provided via a local project's group (e.g., a `pyproject.toml` file). + Group(PathBuf, Option, GroupName), /// The requirement was provided via a workspace. Workspace, } diff --git a/crates/uv-pep508/src/unnamed.rs b/crates/uv-pep508/src/unnamed.rs index cbde5fb06..d5c1820bb 100644 --- a/crates/uv-pep508/src/unnamed.rs +++ b/crates/uv-pep508/src/unnamed.rs @@ -66,9 +66,9 @@ impl UnnamedRequirementUrl for VerbatimUrl { /// dependencies. This isn't compliant with PEP 508, but is common in `requirements.txt`, which /// is implementation-defined. #[derive(Hash, Debug, Clone, Eq, PartialEq)] -pub struct UnnamedRequirement { +pub struct UnnamedRequirement { /// The direct URL that defines the version specifier. - pub url: Url, + pub url: ReqUrl, /// The list of extras such as `security`, `tests` in /// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`. pub extras: Box<[ExtraName]>, diff --git a/crates/uv-pep508/src/verbatim_url.rs b/crates/uv-pep508/src/verbatim_url.rs index 67580ec62..37d07b40b 100644 --- a/crates/uv-pep508/src/verbatim_url.rs +++ b/crates/uv-pep508/src/verbatim_url.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; use std::cmp::Ordering; -use std::fmt::Debug; +use std::fmt::{Debug, Display}; use std::hash::Hash; use std::ops::Deref; use std::path::{Path, PathBuf}; @@ -13,15 +13,21 @@ use url::{ParseError, Url}; #[cfg_attr(not(feature = "non-pep508-extensions"), allow(unused_imports))] use uv_fs::{normalize_absolute_path, normalize_url_path}; +use uv_redacted::DisplaySafeUrl; use crate::Pep508Url; /// A wrapper around [`Url`] that preserves the original string. +/// +/// The original string is not preserved after serialization/deserialization. #[derive(Debug, Clone, Eq)] pub struct VerbatimUrl { /// The parsed URL. - url: Url, + url: DisplaySafeUrl, /// The URL as it was provided by the user. + /// + /// Even if originally set, this will be [`None`] after + /// serialization/deserialization. given: Option, } @@ -39,14 +45,17 @@ impl PartialEq for VerbatimUrl { impl VerbatimUrl { /// Create a [`VerbatimUrl`] from a [`Url`]. - pub fn from_url(url: Url) -> Self { + pub fn from_url(url: DisplaySafeUrl) -> Self { Self { url, given: None } } /// Parse a URL from a string. pub fn parse_url(given: impl AsRef) -> Result { let url = Url::parse(given.as_ref())?; - Ok(Self { url, given: None }) + Ok(Self { + url: DisplaySafeUrl::from(url), + given: None, + }) } /// Parse a URL from an absolute or relative path. @@ -72,7 +81,7 @@ impl VerbatimUrl { let (path, fragment) = split_fragment(&path); // Convert to a URL. - let mut url = Url::from_file_path(path.clone()) + let mut url = DisplaySafeUrl::from_file_path(path.clone()) .map_err(|()| VerbatimUrlError::UrlConversion(path.to_path_buf()))?; // Set the fragment, if it exists. @@ -102,7 +111,7 @@ impl VerbatimUrl { let (path, fragment) = split_fragment(&path); // Convert to a URL. - let mut url = Url::from_file_path(path.clone()) + let mut url = DisplaySafeUrl::from_file_path(path.clone()) .unwrap_or_else(|()| panic!("path is absolute: {}", path.display())); // Set the fragment, if it exists. @@ -130,8 +139,10 @@ impl VerbatimUrl { let (path, fragment) = split_fragment(path); // Convert to a URL. - let mut url = Url::from_file_path(path.clone()) - .unwrap_or_else(|()| panic!("path is absolute: {}", path.display())); + let mut url = DisplaySafeUrl::from( + Url::from_file_path(path.clone()) + .unwrap_or_else(|()| panic!("path is absolute: {}", path.display())), + ); // Set the fragment, if it exists. if let Some(fragment) = fragment { @@ -155,18 +166,18 @@ impl VerbatimUrl { self.given.as_deref() } - /// Return the underlying [`Url`]. - pub fn raw(&self) -> &Url { + /// Return the underlying [`DisplaySafeUrl`]. + pub fn raw(&self) -> &DisplaySafeUrl { &self.url } - /// Convert a [`VerbatimUrl`] into a [`Url`]. - pub fn to_url(&self) -> Url { + /// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`]. + pub fn to_url(&self) -> DisplaySafeUrl { self.url.clone() } - /// Convert a [`VerbatimUrl`] into a [`Url`]. - pub fn into_url(self) -> Url { + /// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`]. + pub fn into_url(self) -> DisplaySafeUrl { self.url } @@ -206,7 +217,7 @@ impl std::fmt::Display for VerbatimUrl { } impl Deref for VerbatimUrl { - type Target = Url; + type Target = DisplaySafeUrl; fn deref(&self) -> &Self::Target { &self.url @@ -215,10 +226,22 @@ impl Deref for VerbatimUrl { impl From for VerbatimUrl { fn from(url: Url) -> Self { + VerbatimUrl::from_url(DisplaySafeUrl::from(url)) + } +} + +impl From for VerbatimUrl { + fn from(url: DisplaySafeUrl) -> Self { VerbatimUrl::from_url(url) } } +impl From for Url { + fn from(url: VerbatimUrl) -> Self { + Url::from(url.url) + } +} + #[cfg(feature = "serde")] impl serde::Serialize for VerbatimUrl { fn serialize(&self, serializer: S) -> Result @@ -235,7 +258,7 @@ impl<'de> serde::Deserialize<'de> for VerbatimUrl { where D: serde::Deserializer<'de>, { - let url = Url::deserialize(deserializer)?; + let url = DisplaySafeUrl::deserialize(deserializer)?; Ok(VerbatimUrl::from_url(url)) } } @@ -314,6 +337,10 @@ impl Pep508Url for VerbatimUrl { Err(Self::Err::NotAUrl(expanded.to_string())) } } + + fn displayable_with_credentials(&self) -> impl Display { + self.url.displayable_with_credentials() + } } /// An error that can occur when parsing a [`VerbatimUrl`]. diff --git a/crates/uv-performance-memory-allocator/Cargo.lock b/crates/uv-performance-memory-allocator/Cargo.lock index 831d5a0f9..e1650c824 100644 --- a/crates/uv-performance-memory-allocator/Cargo.lock +++ b/crates/uv-performance-memory-allocator/Cargo.lock @@ -19,9 +19,9 @@ checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libmimalloc-sys" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4" +checksum = "bf88cd67e9de251c1781dbe2f641a1a3ad66eaae831b8a2c38fbdc5ddae16d4d" dependencies = [ "cc", "libc", @@ -29,9 +29,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.46" +version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af" +checksum = "b1791cbe101e95af5764f06f20f6760521f7158f69dbf9d6baf941ee1bf6bc40" dependencies = [ "libmimalloc-sys", ] diff --git a/crates/uv-platform-tags/src/platform.rs b/crates/uv-platform-tags/src/platform.rs index c2b849e0f..88c36714b 100644 --- a/crates/uv-platform-tags/src/platform.rs +++ b/crates/uv-platform-tags/src/platform.rs @@ -43,6 +43,7 @@ pub enum Os { Manylinux { major: u16, minor: u16 }, Musllinux { major: u16, minor: u16 }, Windows, + Pyodide { major: u16, minor: u16 }, Macos { major: u16, minor: u16 }, FreeBsd { release: String }, NetBsd { release: String }, @@ -67,6 +68,7 @@ impl fmt::Display for Os { Self::Illumos { .. } => write!(f, "illumos"), Self::Haiku { .. } => write!(f, "haiku"), Self::Android { .. } => write!(f, "android"), + Self::Pyodide { .. } => write!(f, "pyodide"), } } } @@ -109,6 +111,7 @@ pub enum Arch { S390X, LoongArch64, Riscv64, + Wasm32, } impl fmt::Display for Arch { @@ -126,6 +129,7 @@ impl fmt::Display for Arch { Self::S390X => write!(f, "s390x"), Self::LoongArch64 => write!(f, "loongarch64"), Self::Riscv64 => write!(f, "riscv64"), + Self::Wasm32 => write!(f, "wasm32"), } } } @@ -168,7 +172,7 @@ impl Arch { // manylinux_2_36 Self::LoongArch64 => Some(36), // unsupported - Self::Powerpc | Self::Armv5TEL | Self::Armv6L => None, + Self::Powerpc | Self::Armv5TEL | Self::Armv6L | Self::Wasm32 => None, } } @@ -187,6 +191,7 @@ impl Arch { Self::S390X => "s390x", Self::LoongArch64 => "loongarch64", Self::Riscv64 => "riscv64", + Self::Wasm32 => "wasm32", } } diff --git a/crates/uv-platform-tags/src/platform_tag.rs b/crates/uv-platform-tags/src/platform_tag.rs index 0ba46200d..1162a83d7 100644 --- a/crates/uv-platform-tags/src/platform_tag.rs +++ b/crates/uv-platform-tags/src/platform_tag.rs @@ -71,6 +71,8 @@ pub enum PlatformTag { Illumos { release_arch: SmallString }, /// Ex) `solaris_11_4_x86_64` Solaris { release_arch: SmallString }, + /// Ex) `pyodide_2024_0_wasm32` + Pyodide { major: u16, minor: u16 }, } impl PlatformTag { @@ -97,6 +99,7 @@ impl PlatformTag { PlatformTag::Haiku { .. } => Some("Haiku"), PlatformTag::Illumos { .. } => Some("Illumos"), PlatformTag::Solaris { .. } => Some("Solaris"), + PlatformTag::Pyodide { .. } => Some("Pyodide"), } } } @@ -262,6 +265,7 @@ impl std::fmt::Display for PlatformTag { Self::Haiku { release_arch } => write!(f, "haiku_{release_arch}"), Self::Illumos { release_arch } => write!(f, "illumos_{release_arch}"), Self::Solaris { release_arch } => write!(f, "solaris_{release_arch}_64bit"), + Self::Pyodide { major, minor } => write!(f, "pyodide_{major}_{minor}_wasm32"), } } } @@ -616,6 +620,35 @@ impl FromStr for PlatformTag { }); } + if let Some(rest) = s.strip_prefix("pyodide_") { + let mid = + rest.strip_suffix("_wasm32") + .ok_or_else(|| ParsePlatformTagError::InvalidArch { + platform: "pyodide", + tag: s.to_string(), + })?; + let underscore = memchr::memchr(b'_', mid.as_bytes()).ok_or_else(|| { + ParsePlatformTagError::InvalidFormat { + platform: "pyodide", + tag: s.to_string(), + } + })?; + let major: u16 = mid[..underscore].parse().map_err(|_| { + ParsePlatformTagError::InvalidMajorVersion { + platform: "pyodide", + tag: s.to_string(), + } + })?; + + let minor: u16 = mid[underscore + 1..].parse().map_err(|_| { + ParsePlatformTagError::InvalidMinorVersion { + platform: "pyodide", + tag: s.to_string(), + } + })?; + return Ok(Self::Pyodide { major, minor }); + } + Err(ParsePlatformTagError::UnknownFormat(s.to_string())) } } @@ -900,6 +933,27 @@ mod tests { ); } + #[test] + fn pyodide_platform() { + let tag = PlatformTag::Pyodide { + major: 2024, + minor: 0, + }; + assert_eq!( + PlatformTag::from_str("pyodide_2024_0_wasm32").as_ref(), + Ok(&tag) + ); + assert_eq!(tag.to_string(), "pyodide_2024_0_wasm32"); + + assert_eq!( + PlatformTag::from_str("pyodide_2024_0_wasm64"), + Err(ParsePlatformTagError::InvalidArch { + platform: "pyodide", + tag: "pyodide_2024_0_wasm64".to_string() + }) + ); + } + #[test] fn unknown_platform() { assert_eq!( diff --git a/crates/uv-platform-tags/src/tags.rs b/crates/uv-platform-tags/src/tags.rs index 1e20c348c..7381f5dd5 100644 --- a/crates/uv-platform-tags/src/tags.rs +++ b/crates/uv-platform-tags/src/tags.rs @@ -617,6 +617,12 @@ fn compatible_tags(platform: &Platform) -> Result, PlatformErro arch, }] } + (Os::Pyodide { major, minor }, Arch::Wasm32) => { + vec![PlatformTag::Pyodide { + major: *major, + minor: *minor, + }] + } _ => { return Err(PlatformError::OsVersionDetectionError(format!( "Unsupported operating system and architecture combination: {os} {arch}" diff --git a/crates/uv-publish/Cargo.toml b/crates/uv-publish/Cargo.toml index 254d83d03..c3dfeef39 100644 --- a/crates/uv-publish/Cargo.toml +++ b/crates/uv-publish/Cargo.toml @@ -23,6 +23,7 @@ uv-extract = { workspace = true } uv-fs = { workspace = true } uv-metadata = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-static = { workspace = true } uv-warnings = { workspace = true } diff --git a/crates/uv-publish/src/lib.rs b/crates/uv-publish/src/lib.rs index 72e4d102a..f3dc768c6 100644 --- a/crates/uv-publish/src/lib.rs +++ b/crates/uv-publish/src/lib.rs @@ -12,7 +12,6 @@ use itertools::Itertools; use reqwest::header::AUTHORIZATION; use reqwest::multipart::Part; use reqwest::{Body, Response, StatusCode}; -use reqwest_middleware::RequestBuilder; use reqwest_retry::policies::ExponentialBackoff; use reqwest_retry::{RetryPolicy, Retryable, RetryableStrategy}; use rustc_hash::FxHashSet; @@ -29,7 +28,7 @@ use uv_auth::Credentials; use uv_cache::{Cache, Refresh}; use uv_client::{ BaseClient, DEFAULT_RETRIES, MetadataFormat, OwnedArchive, RegistryClientBuilder, - UvRetryableStrategy, + RequestBuilder, UvRetryableStrategy, }; use uv_configuration::{KeyringProviderType, TrustedPublishing}; use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename}; @@ -38,6 +37,7 @@ use uv_extract::hash::{HashReader, Hasher}; use uv_fs::{ProgressReader, Simplified}; use uv_metadata::read_metadata_async_seek; use uv_pypi_types::{HashAlgorithm, HashDigest, Metadata23, MetadataError}; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use uv_warnings::{warn_user, warn_user_once}; @@ -59,7 +59,7 @@ pub enum PublishError { #[error("Failed to publish: `{}`", _0.user_display())] PublishPrepare(PathBuf, #[source] Box), #[error("Failed to publish `{}` to {}", _0.user_display(), _1)] - PublishSend(PathBuf, Url, #[source] PublishSendError), + PublishSend(PathBuf, DisplaySafeUrl, #[source] PublishSendError), #[error("Failed to obtain token for trusted publishing")] TrustedPublishing(#[from] TrustedPublishingError), #[error("{0} are not allowed when using trusted publishing")] @@ -308,7 +308,7 @@ pub async fn check_trusted_publishing( password: Option<&str>, keyring_provider: KeyringProviderType, trusted_publishing: TrustedPublishing, - registry: &Url, + registry: &DisplaySafeUrl, client: &BaseClient, ) -> Result { match trusted_publishing { @@ -329,7 +329,9 @@ pub async fn check_trusted_publishing( debug!( "Running on GitHub Actions without explicit credentials, checking for trusted publishing" ); - match trusted_publishing::get_token(registry, client.for_host(registry)).await { + match trusted_publishing::get_token(registry, client.for_host(registry).raw_client()) + .await + { Ok(token) => Ok(TrustedPublishResult::Configured(token)), Err(err) => { // TODO(konsti): It would be useful if we could differentiate between actual errors @@ -363,7 +365,9 @@ pub async fn check_trusted_publishing( ); } - let token = trusted_publishing::get_token(registry, client.for_host(registry)).await?; + let token = + trusted_publishing::get_token(registry, client.for_host(registry).raw_client()) + .await?; Ok(TrustedPublishResult::Configured(token)) } TrustedPublishing::Never => Ok(TrustedPublishResult::Skipped), @@ -379,14 +383,14 @@ pub async fn upload( file: &Path, raw_filename: &str, filename: &DistFilename, - registry: &Url, + registry: &DisplaySafeUrl, client: &BaseClient, credentials: &Credentials, check_url_client: Option<&CheckUrlClient<'_>>, download_concurrency: &Semaphore, reporter: Arc, ) -> Result { - let form_metadata = form_metadata(file, filename) + let form_metadata = FormMetadata::read_from_file(file, filename) .await .map_err(|err| PublishError::PublishPrepare(file.to_path_buf(), Box::new(err)))?; @@ -640,125 +644,143 @@ async fn metadata(file: &Path, filename: &DistFilename) -> Result -async fn form_metadata( - file: &Path, - filename: &DistFilename, -) -> Result, PublishPrepareError> { - let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?; +#[derive(Debug, Clone)] +struct FormMetadata(Vec<(&'static str, String)>); - let Metadata23 { - metadata_version, - name, - version, - platforms, - // Not used by PyPI legacy upload - supported_platforms: _, - summary, - description, - description_content_type, - keywords, - home_page, - download_url, - author, - author_email, - maintainer, - maintainer_email, - license, - license_expression, - license_files, - classifiers, - requires_dist, - provides_dist, - obsoletes_dist, - requires_python, - requires_external, - project_urls, - provides_extras, - dynamic, - } = metadata(file, filename).await?; +impl FormMetadata { + /// Collect the non-file fields for the multipart request from the package METADATA. + /// + /// Reference implementation: + async fn read_from_file( + file: &Path, + filename: &DistFilename, + ) -> Result { + let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?; - let mut form_metadata = vec![ - (":action", "file_upload".to_string()), - ("sha256_digest", hash_hex.digest.to_string()), - ("protocol_version", "1".to_string()), - ("metadata_version", metadata_version.clone()), - // Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)` - // * - // * - // warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate - // `normalized_name`, so we'll start with this and we'll readjust if there are user reports. - ("name", name.clone()), - ("version", version.clone()), - ("filetype", filename.filetype().to_string()), - ]; + let Metadata23 { + metadata_version, + name, + version, + platforms, + // Not used by PyPI legacy upload + supported_platforms: _, + summary, + description, + description_content_type, + keywords, + home_page, + download_url, + author, + author_email, + maintainer, + maintainer_email, + license, + license_expression, + license_files, + classifiers, + requires_dist, + provides_dist, + obsoletes_dist, + requires_python, + requires_external, + project_urls, + provides_extras, + dynamic, + } = metadata(file, filename).await?; - if let DistFilename::WheelFilename(wheel) = filename { - form_metadata.push(("pyversion", wheel.python_tags().iter().join("."))); - } else { - form_metadata.push(("pyversion", "source".to_string())); + let mut form_metadata = vec![ + (":action", "file_upload".to_string()), + ("sha256_digest", hash_hex.digest.to_string()), + ("protocol_version", "1".to_string()), + ("metadata_version", metadata_version.clone()), + // Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)` + // * + // * + // warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate + // `normalized_name`, so we'll start with this and we'll readjust if there are user reports. + ("name", name.clone()), + ("version", version.clone()), + ("filetype", filename.filetype().to_string()), + ]; + + if let DistFilename::WheelFilename(wheel) = filename { + form_metadata.push(("pyversion", wheel.python_tags().iter().join("."))); + } else { + form_metadata.push(("pyversion", "source".to_string())); + } + + let mut add_option = |name, value: Option| { + if let Some(some) = value.clone() { + form_metadata.push((name, some)); + } + }; + + add_option("author", author); + add_option("author_email", author_email); + add_option("description", description); + add_option("description_content_type", description_content_type); + add_option("download_url", download_url); + add_option("home_page", home_page); + add_option("keywords", keywords); + add_option("license", license); + add_option("license_expression", license_expression); + add_option("maintainer", maintainer); + add_option("maintainer_email", maintainer_email); + add_option("summary", summary); + + // The GitLab PyPI repository API implementation requires this metadata field and twine always + // includes it in the request, even when it's empty. + form_metadata.push(("requires_python", requires_python.unwrap_or(String::new()))); + + let mut add_vec = |name, values: Vec| { + for i in values { + form_metadata.push((name, i.clone())); + } + }; + + add_vec("classifiers", classifiers); + add_vec("dynamic", dynamic); + add_vec("license_file", license_files); + add_vec("obsoletes_dist", obsoletes_dist); + add_vec("platform", platforms); + add_vec("project_urls", project_urls); + add_vec("provides_dist", provides_dist); + add_vec("provides_extra", provides_extras); + add_vec("requires_dist", requires_dist); + add_vec("requires_external", requires_external); + + Ok(Self(form_metadata)) } - let mut add_option = |name, value: Option| { - if let Some(some) = value.clone() { - form_metadata.push((name, some)); - } - }; + /// Returns an iterator over the metadata fields. + fn iter(&self) -> std::slice::Iter<'_, (&'static str, String)> { + self.0.iter() + } +} - add_option("author", author); - add_option("author_email", author_email); - add_option("description", description); - add_option("description_content_type", description_content_type); - add_option("download_url", download_url); - add_option("home_page", home_page); - add_option("keywords", keywords); - add_option("license", license); - add_option("license_expression", license_expression); - add_option("maintainer", maintainer); - add_option("maintainer_email", maintainer_email); - add_option("summary", summary); - - // The GitLab PyPI repository API implementation requires this metadata field and twine always - // includes it in the request, even when it's empty. - form_metadata.push(("requires_python", requires_python.unwrap_or(String::new()))); - - let mut add_vec = |name, values: Vec| { - for i in values { - form_metadata.push((name, i.clone())); - } - }; - - add_vec("classifiers", classifiers); - add_vec("dynamic", dynamic); - add_vec("license_file", license_files); - add_vec("obsoletes_dist", obsoletes_dist); - add_vec("platform", platforms); - add_vec("project_urls", project_urls); - add_vec("provides_dist", provides_dist); - add_vec("provides_extra", provides_extras); - add_vec("requires_dist", requires_dist); - add_vec("requires_external", requires_external); - - Ok(form_metadata) +impl<'a> IntoIterator for &'a FormMetadata { + type Item = &'a (&'a str, String); + type IntoIter = std::slice::Iter<'a, (&'a str, String)>; + fn into_iter(self) -> Self::IntoIter { + self.iter() + } } /// Build the upload request. /// /// Returns the request and the reporter progress bar id. -async fn build_request( +async fn build_request<'a>( file: &Path, raw_filename: &str, filename: &DistFilename, - registry: &Url, - client: &BaseClient, + registry: &DisplaySafeUrl, + client: &'a BaseClient, credentials: &Credentials, - form_metadata: &[(&'static str, String)], + form_metadata: &FormMetadata, reporter: Arc, -) -> Result<(RequestBuilder, usize), PublishPrepareError> { +) -> Result<(RequestBuilder<'a>, usize), PublishPrepareError> { let mut form = reqwest::multipart::Form::new(); - for (key, value) in form_metadata { + for (key, value) in form_metadata.iter() { form = form.text(*key, value.clone()); } @@ -790,7 +812,7 @@ async fn build_request( let mut request = client .for_host(&url) - .post(url) + .post(Url::from(url)) .multipart(form) // Ask PyPI for a structured error messages instead of HTML-markup error messages. // For other registries, we ask them to return plain text over HTML. See @@ -884,15 +906,18 @@ async fn handle_response(registry: &Url, response: Response) -> Result<(), Publi #[cfg(test)] mod tests { - use crate::{Reporter, build_request, form_metadata}; - use insta::{assert_debug_snapshot, assert_snapshot}; - use itertools::Itertools; use std::path::PathBuf; use std::sync::Arc; - use url::Url; + + use insta::{assert_debug_snapshot, assert_snapshot}; + use itertools::Itertools; + use uv_auth::Credentials; use uv_client::BaseClientBuilder; use uv_distribution_filename::DistFilename; + use uv_redacted::DisplaySafeUrl; + + use crate::{FormMetadata, Reporter, build_request}; struct DummyReporter; @@ -912,7 +937,9 @@ mod tests { let file = PathBuf::from("../../scripts/links/").join(raw_filename); let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap(); - let form_metadata = form_metadata(&file, &filename).await.unwrap(); + let form_metadata = FormMetadata::read_from_file(&file, &filename) + .await + .unwrap(); let formatted_metadata = form_metadata .iter() @@ -968,12 +995,13 @@ mod tests { project_urls: Source, https://github.com/unknown/tqdm "###); + let client = BaseClientBuilder::new().build(); let (request, _) = build_request( &file, raw_filename, &filename, - &Url::parse("https://example.org/upload").unwrap(), - &BaseClientBuilder::new().build(), + &DisplaySafeUrl::parse("https://example.org/upload").unwrap(), + &client, &Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())), &form_metadata, Arc::new(DummyReporter), @@ -984,7 +1012,7 @@ mod tests { insta::with_settings!({ filters => [("boundary=[0-9a-f-]+", "boundary=[...]")], }, { - assert_debug_snapshot!(&request, @r#" + assert_debug_snapshot!(&request.raw_builder(), @r#" RequestBuilder { inner: RequestBuilder { method: POST, @@ -1023,7 +1051,9 @@ mod tests { let file = PathBuf::from("../../scripts/links/").join(raw_filename); let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap(); - let form_metadata = form_metadata(&file, &filename).await.unwrap(); + let form_metadata = FormMetadata::read_from_file(&file, &filename) + .await + .unwrap(); let formatted_metadata = form_metadata .iter() @@ -1117,12 +1147,13 @@ mod tests { requires_dist: requests ; extra == 'telegram' "###); + let client = BaseClientBuilder::new().build(); let (request, _) = build_request( &file, raw_filename, &filename, - &Url::parse("https://example.org/upload").unwrap(), - &BaseClientBuilder::new().build(), + &DisplaySafeUrl::parse("https://example.org/upload").unwrap(), + &client, &Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())), &form_metadata, Arc::new(DummyReporter), @@ -1133,7 +1164,7 @@ mod tests { insta::with_settings!({ filters => [("boundary=[0-9a-f-]+", "boundary=[...]")], }, { - assert_debug_snapshot!(&request, @r#" + assert_debug_snapshot!(&request.raw_builder(), @r#" RequestBuilder { inner: RequestBuilder { method: POST, diff --git a/crates/uv-publish/src/trusted_publishing.rs b/crates/uv-publish/src/trusted_publishing.rs index bdbc8077d..4e45f924a 100644 --- a/crates/uv-publish/src/trusted_publishing.rs +++ b/crates/uv-publish/src/trusted_publishing.rs @@ -12,6 +12,7 @@ use std::fmt::Display; use thiserror::Error; use tracing::{debug, trace}; use url::Url; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; #[derive(Debug, Error)] @@ -23,9 +24,9 @@ pub enum TrustedPublishingError { #[error(transparent)] Url(#[from] url::ParseError), #[error("Failed to fetch: `{0}`")] - Reqwest(Url, #[source] reqwest::Error), + Reqwest(DisplaySafeUrl, #[source] reqwest::Error), #[error("Failed to fetch: `{0}`")] - ReqwestMiddleware(Url, #[source] reqwest_middleware::Error), + ReqwestMiddleware(DisplaySafeUrl, #[source] reqwest_middleware::Error), #[error(transparent)] SerdeJson(#[from] serde_json::error::Error), #[error( @@ -94,7 +95,7 @@ pub struct OidcTokenClaims { /// Returns the short-lived token to use for uploading. pub(crate) async fn get_token( - registry: &Url, + registry: &DisplaySafeUrl, client: &ClientWithMiddleware, ) -> Result { // If this fails, we can skip the audience request. @@ -124,15 +125,16 @@ pub(crate) async fn get_token( } async fn get_audience( - registry: &Url, + registry: &DisplaySafeUrl, client: &ClientWithMiddleware, ) -> Result { // `pypa/gh-action-pypi-publish` uses `netloc` (RFC 1808), which is deprecated for authority // (RFC 3986). - let audience_url = Url::parse(&format!("https://{}/_/oidc/audience", registry.authority()))?; + let audience_url = + DisplaySafeUrl::parse(&format!("https://{}/_/oidc/audience", registry.authority()))?; debug!("Querying the trusted publishing audience from {audience_url}"); let response = client - .get(audience_url.clone()) + .get(Url::from(audience_url.clone())) .send() .await .map_err(|err| TrustedPublishingError::ReqwestMiddleware(audience_url.clone(), err))?; @@ -154,14 +156,14 @@ async fn get_oidc_token( let oidc_token_url = env::var(EnvVars::ACTIONS_ID_TOKEN_REQUEST_URL).map_err(|err| { TrustedPublishingError::from_var_err(EnvVars::ACTIONS_ID_TOKEN_REQUEST_URL, err) })?; - let mut oidc_token_url = Url::parse(&oidc_token_url)?; + let mut oidc_token_url = DisplaySafeUrl::parse(&oidc_token_url)?; oidc_token_url .query_pairs_mut() .append_pair("audience", audience); debug!("Querying the trusted publishing OIDC token from {oidc_token_url}"); let authorization = format!("bearer {oidc_token_request_token}"); let response = client - .get(oidc_token_url.clone()) + .get(Url::from(oidc_token_url.clone())) .header(header::AUTHORIZATION, authorization) .send() .await @@ -188,11 +190,11 @@ fn decode_oidc_token(oidc_token: &str) -> Option { } async fn get_publish_token( - registry: &Url, + registry: &DisplaySafeUrl, oidc_token: &str, client: &ClientWithMiddleware, ) -> Result { - let mint_token_url = Url::parse(&format!( + let mint_token_url = DisplaySafeUrl::parse(&format!( "https://{}/_/oidc/mint-token", registry.authority() ))?; @@ -201,7 +203,7 @@ async fn get_publish_token( token: oidc_token.to_string(), }; let response = client - .post(mint_token_url.clone()) + .post(Url::from(mint_token_url.clone())) .body(serde_json::to_vec(&mint_token_payload)?) .send() .await diff --git a/crates/uv-pypi-types/Cargo.toml b/crates/uv-pypi-types/Cargo.toml index 2393240b9..0a94cc9ad 100644 --- a/crates/uv-pypi-types/Cargo.toml +++ b/crates/uv-pypi-types/Cargo.toml @@ -21,6 +21,7 @@ uv-git-types = { workspace = true } uv-normalize = { workspace = true } uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } +uv-redacted = { workspace = true } uv-small-str = { workspace = true } hashbrown = { workspace = true } diff --git a/crates/uv-pypi-types/src/base_url.rs b/crates/uv-pypi-types/src/base_url.rs index 535407cf6..e8dae7968 100644 --- a/crates/uv-pypi-types/src/base_url.rs +++ b/crates/uv-pypi-types/src/base_url.rs @@ -1,57 +1,29 @@ use serde::{Deserialize, Serialize}; -use url::Url; - -/// Join a relative URL to a base URL. -pub fn base_url_join_relative(base: &str, relative: &str) -> Result { - let base_url = Url::parse(base).map_err(|err| JoinRelativeError::ParseError { - original: base.to_string(), - source: err, - })?; - - base_url - .join(relative) - .map_err(|err| JoinRelativeError::ParseError { - original: format!("{base}/{relative}"), - source: err, - }) -} - -/// An error that occurs when `base_url_join_relative` fails. -/// -/// The error message includes the URL (`base` or `maybe_relative`) passed to -/// `base_url_join_relative` that provoked the error. -#[derive(Clone, Debug, thiserror::Error)] -pub enum JoinRelativeError { - #[error("Failed to parse URL: `{original}`")] - ParseError { - original: String, - source: url::ParseError, - }, -} +use uv_redacted::DisplaySafeUrl; #[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct BaseUrl( #[serde( - serialize_with = "Url::serialize_internal", - deserialize_with = "Url::deserialize_internal" + serialize_with = "DisplaySafeUrl::serialize_internal", + deserialize_with = "DisplaySafeUrl::deserialize_internal" )] - Url, + DisplaySafeUrl, ); impl BaseUrl { - /// Return the underlying [`Url`]. - pub fn as_url(&self) -> &Url { + /// Return the underlying [`DisplaySafeUrl`]. + pub fn as_url(&self) -> &DisplaySafeUrl { &self.0 } - /// Return the underlying [`Url`] as a serialized string. + /// Return the underlying [`DisplaySafeUrl`] as a serialized string. pub fn as_str(&self) -> &str { self.0.as_str() } } -impl From for BaseUrl { - fn from(url: Url) -> Self { +impl From for BaseUrl { + fn from(url: DisplaySafeUrl) -> Self { Self(url) } } diff --git a/crates/uv-pypi-types/src/conflicts.rs b/crates/uv-pypi-types/src/conflicts.rs index 94366bfd2..81064955a 100644 --- a/crates/uv-pypi-types/src/conflicts.rs +++ b/crates/uv-pypi-types/src/conflicts.rs @@ -3,6 +3,8 @@ use petgraph::{ graph::{DiGraph, NodeIndex}, }; use rustc_hash::{FxHashMap, FxHashSet}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::{collections::BTreeSet, hash::Hash, rc::Rc}; use uv_normalize::{ExtraName, GroupName, PackageName}; @@ -638,12 +640,12 @@ pub struct SchemaConflictItem { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SchemaConflictItem { - fn schema_name() -> String { - "SchemaConflictItem".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("SchemaConflictItem") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - ::json_schema(r#gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } } diff --git a/crates/uv-pypi-types/src/identifier.rs b/crates/uv-pypi-types/src/identifier.rs index b0c78d5b2..47439f2c9 100644 --- a/crates/uv-pypi-types/src/identifier.rs +++ b/crates/uv-pypi-types/src/identifier.rs @@ -1,4 +1,6 @@ use serde::{Serialize, Serializer}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::fmt::Display; use std::str::FromStr; use thiserror::Error; @@ -99,25 +101,16 @@ impl Serialize for Identifier { #[cfg(feature = "schemars")] impl schemars::JsonSchema for Identifier { - fn schema_name() -> String { - "Identifier".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Identifier") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - // Best-effort Unicode support (https://stackoverflow.com/a/68844380/3549270) - pattern: Some(r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$".to_string()), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("An identifier in Python".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$", + "description": "An identifier in Python" + }) } } diff --git a/crates/uv-pypi-types/src/parsed_url.rs b/crates/uv-pypi-types/src/parsed_url.rs index c918c1cf1..9517dfdc6 100644 --- a/crates/uv-pypi-types/src/parsed_url.rs +++ b/crates/uv-pypi-types/src/parsed_url.rs @@ -9,6 +9,7 @@ use uv_git_types::{GitUrl, GitUrlParseError}; use uv_pep508::{ Pep508Url, UnnamedRequirementUrl, VerbatimUrl, VerbatimUrlError, looks_like_git_repository, }; +use uv_redacted::DisplaySafeUrl; use crate::{ArchiveInfo, DirInfo, DirectUrl, VcsInfo, VcsKind}; @@ -61,6 +62,10 @@ impl Pep508Url for VerbatimParsedUrl { verbatim, }) } + + fn displayable_with_credentials(&self) -> impl Display { + self.verbatim.displayable_with_credentials() + } } impl UnnamedRequirementUrl for VerbatimParsedUrl { @@ -194,7 +199,7 @@ impl ParsedUrl { /// * `file:///home/ferris/my_project/my_project-0.1.0-py3-none-any.whl` #[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)] pub struct ParsedPathUrl { - pub url: Url, + pub url: DisplaySafeUrl, /// The absolute path to the distribution which we use for installing. pub install_path: Box, /// The file extension, e.g. `tar.gz`, `zip`, etc. @@ -203,7 +208,7 @@ pub struct ParsedPathUrl { impl ParsedPathUrl { /// Construct a [`ParsedPathUrl`] from a path requirement source. - pub fn from_source(install_path: Box, ext: DistExtension, url: Url) -> Self { + pub fn from_source(install_path: Box, ext: DistExtension, url: DisplaySafeUrl) -> Self { Self { url, install_path, @@ -218,7 +223,7 @@ impl ParsedPathUrl { /// * `file:///home/ferris/my_project` #[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)] pub struct ParsedDirectoryUrl { - pub url: Url, + pub url: DisplaySafeUrl, /// The absolute path to the distribution which we use for installing. pub install_path: Box, pub editable: bool, @@ -227,7 +232,12 @@ pub struct ParsedDirectoryUrl { impl ParsedDirectoryUrl { /// Construct a [`ParsedDirectoryUrl`] from a path requirement source. - pub fn from_source(install_path: Box, editable: bool, r#virtual: bool, url: Url) -> Self { + pub fn from_source( + install_path: Box, + editable: bool, + r#virtual: bool, + url: DisplaySafeUrl, + ) -> Self { Self { url, install_path, @@ -255,21 +265,22 @@ impl ParsedGitUrl { } } -impl TryFrom for ParsedGitUrl { +impl TryFrom for ParsedGitUrl { type Error = ParsedUrlError; /// Supports URLs with and without the `git+` prefix. /// /// When the URL includes a prefix, it's presumed to come from a PEP 508 requirement; when it's /// excluded, it's presumed to come from `tool.uv.sources`. - fn try_from(url_in: Url) -> Result { + fn try_from(url_in: DisplaySafeUrl) -> Result { let subdirectory = get_subdirectory(&url_in).map(PathBuf::into_boxed_path); let url = url_in .as_str() .strip_prefix("git+") .unwrap_or(url_in.as_str()); - let url = Url::parse(url).map_err(|err| ParsedUrlError::UrlParse(url.to_string(), err))?; + let url = DisplaySafeUrl::parse(url) + .map_err(|err| ParsedUrlError::UrlParse(url.to_string(), err))?; let url = GitUrl::try_from(url)?; Ok(Self { url, subdirectory }) } @@ -283,14 +294,18 @@ impl TryFrom for ParsedGitUrl { /// * A source dist with a recognizable extension but invalid name: `https://github.com/foo-labs/foo/archive/master.zip#egg=pkg&subdirectory=packages/bar` #[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] pub struct ParsedArchiveUrl { - pub url: Url, + pub url: DisplaySafeUrl, pub subdirectory: Option>, pub ext: DistExtension, } impl ParsedArchiveUrl { /// Construct a [`ParsedArchiveUrl`] from a URL requirement source. - pub fn from_source(location: Url, subdirectory: Option>, ext: DistExtension) -> Self { + pub fn from_source( + location: DisplaySafeUrl, + subdirectory: Option>, + ext: DistExtension, + ) -> Self { Self { url: location, subdirectory, @@ -299,10 +314,10 @@ impl ParsedArchiveUrl { } } -impl TryFrom for ParsedArchiveUrl { +impl TryFrom for ParsedArchiveUrl { type Error = ParsedUrlError; - fn try_from(mut url: Url) -> Result { + fn try_from(mut url: DisplaySafeUrl) -> Result { // Extract the `#subdirectory` fragment, if present. let subdirectory = get_subdirectory(&url).map(PathBuf::into_boxed_path); url.set_fragment(None); @@ -338,10 +353,10 @@ fn get_subdirectory(url: &Url) -> Option { Some(PathBuf::from(subdirectory)) } -impl TryFrom for ParsedUrl { +impl TryFrom for ParsedUrl { type Error = ParsedUrlError; - fn try_from(url: Url) -> Result { + fn try_from(url: DisplaySafeUrl) -> Result { if let Some((prefix, ..)) = url.scheme().split_once('+') { match prefix { "git" => Ok(Self::Git(ParsedGitUrl::try_from(url)?)), @@ -464,7 +479,7 @@ impl From<&ParsedGitUrl> for DirectUrl { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: ParsedUrl) -> Self { match value { ParsedUrl::Path(value) => value.into(), @@ -475,19 +490,19 @@ impl From for Url { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: ParsedPathUrl) -> Self { value.url } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: ParsedDirectoryUrl) -> Self { value.url } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: ParsedArchiveUrl) -> Self { let mut url = value.url; if let Some(subdirectory) = value.subdirectory { @@ -497,7 +512,7 @@ impl From for Url { } } -impl From for Url { +impl From for DisplaySafeUrl { fn from(value: ParsedGitUrl) -> Self { let mut url = Self::parse(&format!("{}{}", "git+", Self::from(value.url).as_str())) .expect("Git URL is invalid"); @@ -511,33 +526,36 @@ impl From for Url { #[cfg(test)] mod tests { use anyhow::Result; - use url::Url; use crate::parsed_url::ParsedUrl; + use uv_redacted::DisplaySafeUrl; #[test] fn direct_url_from_url() -> Result<()> { - let expected = Url::parse("git+https://github.com/pallets/flask.git")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); - assert_eq!(expected, actual); - - let expected = Url::parse("git+https://github.com/pallets/flask.git#subdirectory=pkg_dir")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); - assert_eq!(expected, actual); - - let expected = Url::parse("git+https://github.com/pallets/flask.git@2.0.0")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); + let expected = DisplaySafeUrl::parse("git+https://github.com/pallets/flask.git")?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); assert_eq!(expected, actual); let expected = - Url::parse("git+https://github.com/pallets/flask.git@2.0.0#subdirectory=pkg_dir")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); + DisplaySafeUrl::parse("git+https://github.com/pallets/flask.git#subdirectory=pkg_dir")?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); + assert_eq!(expected, actual); + + let expected = DisplaySafeUrl::parse("git+https://github.com/pallets/flask.git@2.0.0")?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); + assert_eq!(expected, actual); + + let expected = DisplaySafeUrl::parse( + "git+https://github.com/pallets/flask.git@2.0.0#subdirectory=pkg_dir", + )?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); assert_eq!(expected, actual); // TODO(charlie): Preserve other fragments. - let expected = - Url::parse("git+https://github.com/pallets/flask.git#egg=flask&subdirectory=pkg_dir")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); + let expected = DisplaySafeUrl::parse( + "git+https://github.com/pallets/flask.git#egg=flask&subdirectory=pkg_dir", + )?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); assert_ne!(expected, actual); Ok(()) @@ -546,8 +564,8 @@ mod tests { #[test] #[cfg(unix)] fn direct_url_from_url_absolute() -> Result<()> { - let expected = Url::parse("file:///path/to/directory")?; - let actual = Url::from(ParsedUrl::try_from(expected.clone())?); + let expected = DisplaySafeUrl::parse("file:///path/to/directory")?; + let actual = DisplaySafeUrl::from(ParsedUrl::try_from(expected.clone())?); assert_eq!(expected, actual); Ok(()) } diff --git a/crates/uv-python/Cargo.toml b/crates/uv-python/Cargo.toml index 25317017b..d008b2d4e 100644 --- a/crates/uv-python/Cargo.toml +++ b/crates/uv-python/Cargo.toml @@ -20,6 +20,7 @@ uv-cache = { workspace = true } uv-cache-info = { workspace = true } uv-cache-key = { workspace = true } uv-client = { workspace = true } +uv-configuration = { workspace = true } uv-dirs = { workspace = true } uv-distribution-filename = { workspace = true } uv-extract = { workspace = true } @@ -29,6 +30,7 @@ uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-state = { workspace = true } uv-static = { workspace = true } uv-trampoline-builder = { workspace = true } @@ -37,11 +39,14 @@ uv-warnings = { workspace = true } anyhow = { workspace = true } clap = { workspace = true, optional = true } configparser = { workspace = true } +dunce = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } goblin = { workspace = true, default-features = false } +indexmap = { workspace = true } itertools = { workspace = true } owo-colors = { workspace = true } +ref-cast = { workspace = true } regex = { workspace = true } reqwest = { workspace = true } reqwest-middleware = { workspace = true } diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json index 08e4a3901..8c7ffec4c 100644 --- a/crates/uv-python/download-metadata.json +++ b/crates/uv-python/download-metadata.json @@ -1,4 +1,3268 @@ { + "cpython-3.14.0b4-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "94b80254a7e50dd2d82d323a0bffdc59772b2f04b0f0c044bc4d56d696249eb2", + "variant": null + }, + "cpython-3.14.0b4-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "2155f60b2a8a1448b2c4852a27887be2e9fe8e910bac1a75b342e44884a191b5", + "variant": null + }, + "cpython-3.14.0b4-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f76fb1a88e722f9cae8b82b9851b736968582527d8a1212ab3b918b2012ce0a6", + "variant": null + }, + "cpython-3.14.0b4-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "c358e87ac84d228e191a22d2447c60e1cb15e6cbb753c397b0e9b9da9c557ce0", + "variant": null + }, + "cpython-3.14.0b4-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "a426e05b3d8a20dfbda84162ef75ed3590e7137436623b93d136c084d0688690", + "variant": null + }, + "cpython-3.14.0b4-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b835aac7264b64652007f5210369d5fe1b8d1629befbb8d00e40a891cd039f67", + "variant": null + }, + "cpython-3.14.0b4-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0ad96a96ae32f5979f2bd9e6992ecf122819ceb06711439c66b9f8a3dc1eaba4", + "variant": null + }, + "cpython-3.14.0b4-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "18763ccce35baeb1960e043f9bd4be3a36a511acc6844b91381532ee5b7c6da8", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "3d07868b329c7c9b7ae5a52af35c27d0b20b5a7f6f574a3bedb5836b4bb337d7", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "82ee7827c1f75a7b5150f731ddf1dc312c7958c741a6746967fb8a5656c85b91", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c96dd14927c89392bd0ff3264e4b7bdfeea76979f544ee30260151c913046396", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ae82acb77c69c506a799bd7022fe9a22508814fe76d0d7e53c1f2f60b5fc77d6", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9fdb71600bbdcae5dd47426972d1d0af03a2f7d98ac44fbb63284203738fda2c", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "f864428b9b6b5938efeb93526d52ec685377672ad292e4b2eee62cb6107933e1", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0d3f7f0c8b881bcdff08d14a0999c736f13e309e663edd0739a2db327c43e4c2", + "variant": null + }, + "cpython-3.14.0b4-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "11443f91bbda5f3d440908f20bfafd549dad5357e705f1e85273ebb6db0206f3", + "variant": null + }, + "cpython-3.14.0b4-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "61bef0ff22c3117795c55d5e8e2c87956a94fbb4725e03231f360b7c68ba5358", + "variant": null + }, + "cpython-3.14.0b4-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "bcf229f25c12f81169b1f1d207a719fc2908f4e6ba5b61404787710d3b1e2120", + "variant": null + }, + "cpython-3.14.0b4-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "8255b31a40867eb52ff1a2e476f56c697a717e6193d313413c788b0fbdd28a3c", + "variant": null + }, + "cpython-3.14.0b4+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "ce28498dcf2c5c4d3c964e6e44ff44e5b1b72a4234f807e2ff121393ed40442e", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "a7d63512a17522d7c76c7bafa27b49a35f4f5f74b5140be209ca17c0cad15737", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "0250288ab21cfd14caa826056de7203baa19ed7e85198c19e6dcdd8b2124ae0e", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "c0bd17a6409c21fb10b075449511c09940b53438bf785cd20db1f2e5d15ade30", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "d747055b6b5878dcf6b9d425b0a7ea3fa7b33fe241b31681e28f56d5ed86ed5d", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "756376b22bf237646f7bb519bee69b1704d369a6ca5941b5ff83d5b2d022612b", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "25dbe52c44b42914d9343d456dc17fbcbf234ab1f0fd0be00cae27c6e336546b", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "7ebb845ee94ae870e13146de0052251d48d584363c1b374f84fbdeb8e7936350", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "0df5305c3b95f53f7f2db762be2badf752477c359146155f8b9658b71aff2128", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "c6beef48f6a2ca49da0b2798e5dc9c45233a8f0b6fa778616ba7cfdcd66f85a6", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "31587432be64d6913317919c239ef84ae4c78a7b11f95e8d48b81dc820021be3", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "302a23af192207337db2c2268a3fed98f13845ad5324f1ff97baa68807098513", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "39747d608a5400b0fa37fbddef606678f8552fdf907f43b1d8a475436c413aa9", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "9870447eb095027df97a1e412eff378fb78872a527dc6adeffc901fff8a40d70", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "78adac3ab0696380ebdbceb96924d0f033e20b033e3a1633aa54df0295407292", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "59f92039b72eca4cfb4639699bc97bbb0de6b866a7894bac9cf132374cf5aa1a", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "37fac713d3b25731f134c9c6b1c9021ffb2aacda630010ffa15497446655179f", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "5a7d61b1863960dab6f78027b5edc543ee41d0a45f7851413951389b842385c8", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "e503ec18fd8b1d0fcb94ded5a67be4a88334d5b101dc485b0281577ae84a6acc", + "variant": "freethreaded" + }, + "cpython-3.14.0b4+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6bf05e71ef3cf092d0f40d992ea192016327468992e5e0b7bde8ac48d6b9c145", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "9b73df95176c383e4af6027b78da060c69892914bfc195107084b21281f09bfd", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "2d325c459c761b4bca5e2005aeccc889ef62ee4b0811d9252e22817f3037825e", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1c49311aae1ade3afd9d39091897d2b1307aeadfdde87e5099e07b0fdc32bc2f", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ad52ff04ef3fc78430b8b0623a0442088dc4e8c6835fce6957e251676942ebbf", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6865d4830ef7beaa99dd817df0c49bb0d380b9a0c822be6f8ca090f9a568df81", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "db9c32e119c58d9f25745599efaa383be06323ca8d8524a6c50b62367b058b93", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "39dece02d5b286e7d9ffbbacdd730db0d64b881bb2b2edd3b721be23c4e89609", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "90453b5f3d982604a950e5f362b192889f82524257d2fa8bf979b270e8bdb370", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "d070ef11038828a1326c230c45782c70f02a6b89504af76cc95f0778db20caac", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "baf92ab8fa281f72a8e8b4a1975a931876866b69aebed1eb94dafeaa219f788d", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "3a92a638ef08b058eebf806ecb0134aa9467c554512fd2082e6ecd1a6c517fdd", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7144cb9ac62b0084b8421b83e90aab0ed6e704cc5f63ba1c16f8216971d11857", + "variant": "debug" + }, + "cpython-3.14.0b4+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.14.0b4%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "bef1d2f0e3f32667366655e8333ef1f92ab07cd7b988da110f3970a5d671e3a3", + "variant": "debug" + }, + "cpython-3.14.0b3-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0b948f37363193fcf5e20c2e887183467907f1b6d04420fc5a0c0c7c421e7b12", + "variant": null + }, + "cpython-3.14.0b3-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "47f21cf35481e5ba8e4e6b35c4dd549b0463d0f1dc24134d6e7fcc832a292869", + "variant": null + }, + "cpython-3.14.0b3-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2935079dd417d8940955f0b083be698ae27a1d65f947614c36ce5e4ea509c812", + "variant": null + }, + "cpython-3.14.0b3-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "b64dfec2a7016ae5fa5340298f46c05df0c93a30021c009fd3db9b97a5cad92b", + "variant": null + }, + "cpython-3.14.0b3-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "7139f66c73f09f8ed3fcd840e08b85dc591fe8df048cfa5c48dc695a68f74149", + "variant": null + }, + "cpython-3.14.0b3-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5210b912d9dc1e7ee9fc215972c7c254ddaf9d64ad293f42af1a819896a4cbed", + "variant": null + }, + "cpython-3.14.0b3-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "66a8f6825c5e1b289bfd62370b4cc6c9b5212a91b0440dcf5408c4e3bcfcdddd", + "variant": null + }, + "cpython-3.14.0b3-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2af3a5d27e7fd49b5796a35c1f4a17848d9e5d40c946b9e690d7c27e527d99d8", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "17643efc55b6b68b4fa7b3a5e43abb0ea31b4f03942e2d17bd04c5cd5be52c52", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "1c35d7e5ac357d012d3c265da406e331535bf9fa5e29454b190ac8cc0c57dd40", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8d7c283a6f9e18377776968c5d5fcce9ff0a9c833c4f6c64d8f804da743e0e9d", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "75d5b65bae7b39f3e35a30070a7ccef0c773b1976e764c7fb68ba840a3ad0594", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "db25121d9a35f1613e281ead33903a7e6489d0506207451ef49d82eb71d722df", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "31cbe24575231d706937802a8f81536d11dd79f8c9cd7981b8f93b970a8e8481", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "3c98b94dfc77c9d6369c3cdc09e03abc0dad2ead2f40a6b52d1b119bdcb33ab7", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "0742eb6b381fdb6b57983f8a5918dd9e154953f959f2be5a203699e5b1901c1b", + "variant": null + }, + "cpython-3.14.0b3-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "62dc6ff21cbbf2c216f1b9f573ed8e0433c0f7185280a13b2b2f3a81ac862b90", + "variant": null + }, + "cpython-3.14.0b3-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0fc98664753360e23eaf3aa169657627ca5766141a49e1cfb0397895cbb47826", + "variant": null + }, + "cpython-3.14.0b3-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "5b5ef4c03b4e2aaab389f10b973914780d76bd82eeaeb3c305239a57aba2e367", + "variant": null + }, + "cpython-3.14.0b3+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "d19213021f5fd039d7021ccb41698cc99ca313064d7c1cc9b5ef8f831abb9961", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "26ec6697bbb38c3fa6275e79e110854b2585914ca503c65916478e7ca8d0491b", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "b01cc74173515cc3733f0af62b7d574364c1c68daf3ad748bca47e4328770cde", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "199ff8d1366007d666840a7320b0a44e6bab0aa0ee1e13e9247d3ec610ed9d45", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "e62adb4c3c7549bb909556457ac7863b98073bdcf5e6d9ffec52182b0fe32ccd", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "1f093e0c3532e27744e3fb73a8c738355910b6bfa195039e4f73b4f48c1bc4fc", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "73162a5da31cc1e410d456496114f8e5ee7243bc7bbe0e087b1ea50f0fdc6774", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "045017e60f1298111e8ccfec6afbe47abe56f82997258c8754009269a5343736", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "081f0147d8f4479764d6a3819f67275be3306003366eda9ecb9ee844f2f611be", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "3e20f3c4757ca3d3738e2b4ed9bb7ce1b6b868b0f92e1766549b58bdfdf6ad79", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "7b50ca3a919531e6d175308d53efa0ccd3d21438ac735a51c7fdcd74c5316f99", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "6787ae8dfa33268ae3336d9f2ff7107bb9da5714757cab2aed20bf916835888f", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6f16bffec9ad3717498b379b5640956abeb39b830ae390bb650585beac14b974", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "651aef6d3640db60dbb0c28c68d194846053b3d08085427e1c9c76eb13de5e46", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "637097da9317bd1af34a2f3baab76d98fb11aee3fb887dec4e829616d944cdb8", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "f607cd590190311cbe5f85d82d4220eb5b71416486b827e99b93ca1c341f2045", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "331816d79cd78eaadba5ae6cdd3a243771199d0ca07057e7a452158dd4a7edcc", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "2e55b7204f391fbe653168e6004daf5ed624d890ab7dd7d5aa7f7234e271ae47", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "8de6235b29396e3b25fc3ade166c49506171ec464cda46987ef9641dd9a44071", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9bc39c6c669aaba047690395bf0955062aa80edb4fd92c59ada03a18a3df1234", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "544dc6759e2d7af367eeb5d3c45116c52c33054a730e120a8ea442e6e8b9d091", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "3e91cd08cefd404d55003ec25347ae9d591e72ee77a00e2a172ce206c34f5ecc", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b5679a4176431600ce146a6783046bbac84721d99ff91ead0b8eef1538514369", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9724b0ebf2a8f80c1dd76bcb9880297bb2a95010bc707868145d9a1cfa0857de", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "23ca40a78ad8a61fc820d58b71e5aeb3b5f88ed7e449a04c0515b37041e8e644", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "84129181fc24fd5fd39a8dc83c5eb4dd7c51a9f105bd1b22733dba2d52da9f38", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "bfaaabee0e9cab4a7967be9759140830de1994c8f87e8e05bee5ec7fd6a99b69", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c14586447c4ef79ab875b7b7b8a13e6d05eaec8627f187067e02f4b026023db6", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "69c477df92e4332382e9a1b3177155b1c2c9e6612366b385409bd17f18c49a70", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5a9c969834b90307152a8bdcef27a2797288fdfecb92911e0ebc17ec5747ccbf", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "02fad0b21f30b42742468107fe33eb23d307ba2c5670b0baa11e33fc30160fba", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4e110ee96813a907c7468f5c1317046c5e5ba10e2fe23b2c3d30f1ee6b4bc5c7", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "6015df86031d23765c7f4c8a02e1aa3e3b5e4a5fe9f2747fcdc37d28e3f8a0f5", + "variant": "debug" + }, + "cpython-3.14.0b2-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "af7fb49f2c00905ce822fde97d9437d34a4dd3786575a1daaad1e51a44fb21dd", + "variant": null + }, + "cpython-3.14.0b2-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "dea974e250afcf4c28936e35af5025072295a711d72fa65eed0679170c70c278", + "variant": null + }, + "cpython-3.14.0b2-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "6cbb5d50fa2c34b76b98ce68e38b5ae1fee4712faf4dc7c9db0127bba91269f1", + "variant": null + }, + "cpython-3.14.0b2-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "0ea973d82fcacd8d5c954780264f57a4040927da6b7bd7b7d9eb7a166073cda5", + "variant": null + }, + "cpython-3.14.0b2-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "f77ba3ab0c86d6b77ede290302d938db0e43611c54bc07e5bbf2c13c9df0c53e", + "variant": null + }, + "cpython-3.14.0b2-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "30eef4d101df58ec61444b4d0049b2a9920d60c36dbb891f34add7baac0169ab", + "variant": null + }, + "cpython-3.14.0b2-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "6ea4e6ee0afe34d474b23678ddf59ed941e68eda5f99a354d716b7b72d75f173", + "variant": null + }, + "cpython-3.14.0b2-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "cf1d0b4ec5555edcf56f6dcb88e8a6702a33fb5ea54854645a8227de1e5f4474", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b922eda4390cc5cb737e32ea8d466efeaee2d693a5f2e8a3269ccc2b5710c328", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "0de7671e828ae554941f445dbd631dd8bf08c80152b169ffc4c29ca65db299c1", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "dd4cbfe0ed2aedf6ff7f0dd2ee23b417b89092477e90dcf3fd4852571bc64e2d", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "d9e57cd25e4619ec9f9be43f12e3a884d729b7226bd4c8843845428e445b2c74", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "cb82c15f230612b29838b3cc0948ed71d8e622dbbb4875b8951e7d61e2acbb8c", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "1f33a310cbd2683bad82ed898bab3df4e46f13210534f785edd37389b306d814", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f899450ad15000086a4972ff47344730b4840bcb86f3a10cdfa27de50702dea1", + "variant": null + }, + "cpython-3.14.0b2-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "4fba9d463ea71b13d8130d2079b59f5964bb9f2333d223219ed1a27ba53de048", + "variant": null + }, + "cpython-3.14.0b2-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "04eaebe1eb4241666792ab184f8ad55fd25a2410594c7104e4f034eaa91bbff1", + "variant": null + }, + "cpython-3.14.0b2-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "4d7e811323a839d9ce3a6a38b36422f89bb2c0f136a040055aa6002bade12b79", + "variant": null + }, + "cpython-3.14.0b2+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "1ae31adfed2a8425f08a945869d3bfd910e97acd150465de257d3ae3da37dc7c", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "4e022b8b7a1b2986aa5780fae34b5a89a1ac5ed11bea0c3349e674a6cb7e31c1", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "f5fcf5e8310244ccd346aab2abdc2650ffb900a429cfb732c4884e238cba1782", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "bfe7516a665eac9dde00184874e9da6b59c7875786b21fb40084da786c6e07b1", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "9af53a8e77cc1d44d6eceaa861f53d42532bc3c8d1fa4b711a20ea5416def27e", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "c1177510c359494b6a70601d9c810cdfc662f834c1d686abd487eb89d7a577ef", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "cb0f2d86b20f47c70a9c8647b01a35ab7d53cbcbde9ab89ffc8aacafb36cc2e4", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "f38f126b31a55f37829ee581979214a6d2ac8a985ed7915b42c99d52af329d9f", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "61383d43f639533a5105abad376bc497cc94dde8a1ed294f523d534c8cd99a8e", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "3abf3d3db7f9dc39aab7ac1736095e27a0aa262b7b0c85d86029ae79cf977c7f", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "be07688729954618ddacc1398301767589cd361dd1b3cf5d86e4e11607bf5f4e", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "94b86091807906bfc2b8e7407741db19e73267bd658dad921953a5c21ed38844", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "e4dfa7560730249ae469c03975192ef4a52a484a0a7e12c6bba9a0421726f55c", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "0b22e8a273d62994e8512159202cef2b884a92420955ef6cc889f4fa2364518f", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "f3e1e0c669a963d842d8d2bd4a39128e85cf862c0e289264360fda756cce9106", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "a8e6cf95ff812c9caf07fb51eee1c4a154d60cb4f0ae9ee7d2bac9ec072226be", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "6ede2041eb5ef6bd2d0274ba13a732d75773e44159cd679c73912eefdb64ff87", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "35abc125304ec81a7be0d7ac54f515e7addd7dcba912882210d37720eaab1d7e", + "variant": "freethreaded" + }, + "cpython-3.14.0b2+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7bae32bc84fa6f2db3446e6f17fbbd81a6bd7646dc3265b965dd1feed266cf33", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "b96995dc30f0d1aa8a70e95d44a7a9012ccf417b2a3b6d623ce1cfe5525c56d3", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "aea6f36f26da7585b99d4ac03359e3c216bb653abf81c56de78f89dacb85a8fd", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "93688398c2fbc76c8e3866bde6fd0d1bcf5cd70de3c45e257f644d081ac62af4", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "618da441de84f996da3a19a15a46af47ec8230d89d324dde336dbf11ef6cc39e", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "97bee0620e7fcf81b412cc8cd9347eb03ae3d811f9a42352a4f46cfa5a793969", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6b4fff14c05d4523da9146946e4114545b0e1d5b90590683a54e6dbb80da3e4d", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "97ba08f74fbd95530b8f21801dedacf31afd84275d6d8b517b82f8d4ecba6fbb", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1b275b7421602d7acb3071e455ae9359049c8b06c0b0041cb742004551829475", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "82e2f2161f87db164b5ce083452fef3eec5d5dc3d9a66512656858f754ad4461", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "504148dcec0c7dff501ac70a9a9716e17d06e284012b170072f7dc4eaa156324", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "cbd83ce9bd3a64061a3c16f74e40786071c7612f01996a0b4e4133fc278e554e", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "25cdd993de5994d0997db82f032411e30de7dde3aface32d1dd1880b28c4fe26", + "variant": "debug" + }, + "cpython-3.14.0b2+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.14.0b2%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "b79f712b646a39c1af9e91a45962f768542a7830be00f674bd0955867696c1f0", + "variant": "debug" + }, + "cpython-3.14.0b1-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1d118780ec1b610ca7520379f5b6c68314f0672145348e06bb29668fe2967657", + "variant": null + }, + "cpython-3.14.0b1-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "bedcafacb966f0259b32944388039ad906e649588f91c9fa8892c8c58eb10ed8", + "variant": null + }, + "cpython-3.14.0b1-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a0ddc9b714fa7236869190308852d0ce3a9292b6dcfa14544cceef6b2e9a478a", + "variant": null + }, + "cpython-3.14.0b1-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "8acfd7f51db4c1bbd4b885c693520b2ed82429d00e8b5715d22b7dd0a436bf00", + "variant": null + }, + "cpython-3.14.0b1-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "220b888323546fa466514c455b9cd4e136138a29d24991f6441fc6f3c66a702e", + "variant": null + }, + "cpython-3.14.0b1-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b865a3e7e86de1c05a20e38d683f58fd6720f6fd7921be6f66a28faab90afe23", + "variant": null + }, + "cpython-3.14.0b1-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "43789b114c6bf708822040ad74ee9fefdd6ac8acc52c9d14b4d6a306f9fc2fce", + "variant": null + }, + "cpython-3.14.0b1-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9961f062bd6d2045ed84510a940f65ecce4ebc416d87ebe3c9865fe73648db77", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7239666ecd4c205dbbb9f2defa52f91cf2a59d2e1cd5a041b5e9e0c8c07af1a0", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "4bb35000a3a99b05f1ab290ea1c360e5e0530a0bcd3d9aed0f1635da0d97762f", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "cbd0cd0209a3514fa7ba80be8ba72e88571562eee9331f6a4bf803e70f59b9eb", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "814e5a91c1fe706da55f0ae8a447ea9e5a22f197be4064105bcfedf1960e2cda", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "370c2188baf500e810bde23b2c3ba45c60cb4f53b0381816ef9443b96742e925", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "c1ab6f3fc5e413d2c7496c7844856fc0f4cdebe7b24eb34769d399ab4f830e44", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "48e01a7d254226fd5d7aeec633f9b4a9e8805a16996eebe370139967fe69c09e", + "variant": null + }, + "cpython-3.14.0b1-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "1d102b29e869d3ec8a01afc02f132a34f378c777ca2db00bad95ad4571b9ac82", + "variant": null + }, + "cpython-3.14.0b1-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "6cb3871821917e9d0f2faf1d9e6eb798d487d6bcb353035dbf3c282d263634f1", + "variant": null + }, + "cpython-3.14.0b1-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "ad78cd205c5b71bd8790e4eee5a8775b5cbf5b6f9593b5e4f09dc0ed0591b216", + "variant": null + }, + "cpython-3.14.0b1+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "3c70e6c641041fac01e8aa6f42c3d261f9b5038edebbed92f2bc63c738fabf80", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "50c0d4b010ad52915e642533969ba6d9191cefd62a8c194cd4e741909e1888af", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "50dae9b70d85dc2222a12907c0bc1fd5d387dce72da51a3a2f0e00d2e8ae2fe0", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "62ea0a1210e33b4dd64cbe6f3fd2e667e5aa6915594a7dcc08866813f38ad263", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "5fa4c36b14ad504a76617f9e7e3e07d244a19a9605a08bd9ebcb074f2b6d419e", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "93b89f74bc1b0a5c9bbb9f7a5a9e845b77b296a3fca1fdf652844354068b5510", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "6f26da3dd497793e3d32559ffd4e3e7d7fa25dc923e1d490f022a41179169dd2", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "44ed2d3bc3a67c88a5d13fb855252952659186fbe1f7c489e241971f9b5cbdac", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "72fce8adbbd9e1a3cae96fa11c908930d039780f148562c8a0a169cf0950254e", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "f107d98f8d9390ccad38a94914ff1886a9feb1dee147596b24c196761484d6e9", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "c3555140cab901f4f1fccc61162ebe53a9b43298230b579f2ecf8080f19f043a", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "ade8e50da56ee8d492db40059dcc99cf2587f4742e1ddc47e8fe77c9d15789c7", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "505a9fd0e88ea1c11ee383661bae3e8b4a87973072ec7b068f8605619d095568", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "774074106198623f7ce18dc7419a7ac2cf03caab4b4a49acebb83a697a768a50", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "ff3d37f009adeaf1214a03c34db0f97418c6cbe95d2e84c8795a33ef64d71ebe", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "389a789233e9a1911bc77d4bc3a11d99481ad56953d0e33f9301c8451c197b77", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "ab67b1623761b8da2a7c25f38ebab71f9dca5ed19868bb97ca345d2799e49970", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "8c478ba0bf2081f9abe4dc422ac9ab02319ef7b476ce9fe98999a7e89bf478c2", + "variant": "freethreaded" + }, + "cpython-3.14.0b1+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "67d969d97d9a6cefc780fc898ebf1eaf90b4652512ebb60de14cb4290f48bd83", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "355b9b829aa0736acdb48145235025ece06406b932647ad5af7ef0db781e0acf", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "125e2b5a470157d9bbdb782bca691a922c490e01fae29859adac9ab861a70ff3", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "16cd16598fcdd61c66107ba468e3e57e4ac4dbdd578d418c532631475251ee09", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "326b87cba49d447764d6235120025b28b4672331dcb89858944c86e8018d968b", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8bd0aad1f68a465f86bbd2167dd8beba5714726f7c84c9104f1c4f52f5258355", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "01dcd2188bd60d4f77d3289d5532f0ba027d8ebebe972147e93f5a34ebecb038", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a1eff13c5e1c068326220878b254572d0d7a2bbde064a5037ac9ec296f5fe36b", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3bb1a5e372e866ae7d54b2d47ffe0ff25a2f1971de2682205bdd208b2a6b22c5", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1fe182c5091519b55e2d6d50925f6ef707036204d1fe83a25dbac7106479488b", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "93b6648fb818004b97399ad7bbca16a22f3b03aff13ce964bbc9e55db4f7f6c7", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "4feff760f2a5f4e289bc0416996d224034ef877f142d5062a9fef0bf41848ad5", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1b46954388e4a198b931c015ec5ef323eeed893f9857ba07465939c639a4cde5", + "variant": "debug" + }, + "cpython-3.14.0b1+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250604/cpython-3.14.0b1%2B20250604-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "e375062a77f341f34fe57e9109bd22108b1a7abf0fa5d6069a781e266213b0e9", + "variant": "debug" + }, "cpython-3.14.0a7-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -11,8 +3275,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "994e6578a141a2532a08fd52caa0a81c8b467dd16fe6aa56ca65340e9a782a14", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "c0a9a1f6357ebaa1dac4c95b030e28e4661c7290712ffe45283b012548cfc7e8", "variant": null }, "cpython-3.14.0a7-darwin-x86_64-none": { @@ -27,8 +3291,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "d0a96579e6a16fbef927ab792e058124bf6470193ad68bf1d5ae1c713b4ecb5e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "40723e37765edfc2f674b7c9d32eba49cf92d76ed23f1179c98df467f1ed9f87", "variant": null }, "cpython-3.14.0a7-linux-aarch64-gnu": { @@ -43,8 +3307,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "431cfe70c2cd7c8db86ffd5bd72942bb0dc68a5fbb739c34583c87a84791709b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b43063632403f8f788e6257efa98e94fb322e44aea3937de61900f4e912809bf", "variant": null }, "cpython-3.14.0a7-linux-armv7-gnueabi": { @@ -59,8 +3323,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "01f8be027cba057dc5f28d4951be29bb0303004d5156652fb6cdbc57bcf91548", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "d22dc5493701a23c8e743be0a10379c9a49055e32f6c6a11bdb9e1ab0e36214e", "variant": null }, "cpython-3.14.0a7-linux-armv7-gnueabihf": { @@ -75,8 +3339,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "78c1433ef65374cf78269cac437f014eb50418c9944c5d5529f0b32d773c5fd2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "41b40a7c66a23aa8219a89d3d0afb8fad7cd1a855d572d580a8d8b3bd83ba602", "variant": null }, "cpython-3.14.0a7-linux-powerpc64le-gnu": { @@ -91,8 +3355,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "b575c16b29bfe268fbaca36e1b8de69574aa6f03c26263a3298549db0104fa31", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5aa1f4072605f390398d603f72c68d12d789babfeae623cf954e91ba38bab618", "variant": null }, "cpython-3.14.0a7-linux-riscv64-gnu": { @@ -107,8 +3371,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "40afe32356426676a5ed87392e3ed62c34ca7edef3f280c247d66a14ba2d2be8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f77976f484fd95a2561e2eb22aa2c59774ebbf298a41189c49c768651cf50807", "variant": null }, "cpython-3.14.0a7-linux-s390x-gnu": { @@ -123,8 +3387,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "bdd05314591298e0b790c1335e9bfa92c381e9dfc1407991c6b7e32f9a59b0c1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c065aa26639f8f5c29a64231140e4bcdcc41beae54ff7323e6a19625f871bb88", "variant": null }, "cpython-3.14.0a7-linux-x86_64-gnu": { @@ -139,8 +3403,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "0f58dc070b204b30154bad355e7230fcb666d0dca46065aecb265f7a12060099", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b9ab2566b1da8bc048e92332c33439cd57b251bd3bef19f3f7408cb9297b2b06", "variant": null }, "cpython-3.14.0a7-linux-x86_64-musl": { @@ -155,8 +3419,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "e11937873931763c5aa1cb817c61ccf7df1b550e3f5152a6f640b486317af9f2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ccd1c60189509639f1b3fed98d2e67bd9f3154105fb9e9015df0521e964aeb97", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v2-gnu": { @@ -171,8 +3435,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f0aceae1049c5822a5bb50b6065518528dc02b66f23f16e63edc9b16f2fc8c4e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f74649ff49d6a7757172dd7cd9848a5a0f5c45355ba460ae516d4cdbfdee6ace", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v2-musl": { @@ -187,8 +3451,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "0511f00c255de20d208ea36a1c5c3d24a0f9c3e2b0aade6d67f2c63a67e92960", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "474e11660a70aeb2c46e16b2c908f5f1b3d8d595706c6de7baf32311c8bfd2c7", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v3-gnu": { @@ -203,8 +3467,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "9e0276ea35be901e8cccf4cb93a9a81bcd327b9fef33367a4d5dcc2da77f34c7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2632aafa675c4d6ee909813f607124359b023e9953e5af0d7d8dcc7ba59cb38c", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v3-musl": { @@ -219,8 +3483,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "acdf795c7eeb12773c12e1010afc428b1a52a84b4498d689d60d549e7efe8e5d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "7fcd54eae0c46988e9a27a6ca320a944b029137fdcc7527d8f5b70ee731369d7", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v4-gnu": { @@ -235,8 +3499,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "302fec41bc8399d587795b3736171edca3b1e28c31ba22bf66463e62b7811775", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5dee18dd1c196d29835d96f6e73ace234f92484b1be90a17724e40b7e62e5688", "variant": null }, "cpython-3.14.0a7-linux-x86_64_v4-musl": { @@ -251,8 +3515,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "b559ee7edfacac7e7a8a50fa2c09803f7ec5efdefee11f0324edf33fdc429e55", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "1973f72623f2bdae0644669acb93c486440301e3abe195442a5e4ab842772717", "variant": null }, "cpython-3.14.0a7-windows-i686-none": { @@ -267,8 +3531,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "5ca21c4697f12e126fa161d234aa7960b2cb6b39c6512a3bb573d8bfc4488acb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "40b868b3c6016848bee1f68bc9dc5ac3b9c752a7e7dba5bc0820cef30bf8e68a", "variant": null }, "cpython-3.14.0a7-windows-x86_64-none": { @@ -283,8 +3547,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "311582db816f4afa8e893df11a47063449fb93f6d0e943d479a9a561f6699587", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "afd79938fd5ad5e53d10999ec0cb5961cce901c08684ce053f9d7c4753fac93e", "variant": null }, "cpython-3.14.0a7+freethreaded-darwin-aarch64-none": { @@ -299,8 +3563,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "51d386a579b1cc5b8823f031d217104e242cfdf59305c73a15178fdf81b57508", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "beec4a58cbce817991873ab9c5183736a6b183836594591c2dde6ef8202ba7f9", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-darwin-x86_64-none": { @@ -315,8 +3579,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "2a481eeef0677fda69e9a50ed4b1e93e798f45d7ee4b3609b55c28d58346a621", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "a57a76bdad4dbb36f4108525de5f99ed9af79a90e755357b8c5b83b172e7329b", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-aarch64-gnu": { @@ -331,8 +3595,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "732e654d0f7d88139c822c68cf2d61e8fa018d0a1880c90eb3e717aa09fd38b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "e7892d12d5f0adea5c69dbe88e3fbf2da8928e1703e376dab55c8ac63889ad8c", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-armv7-gnueabi": { @@ -347,8 +3611,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", - "sha256": "1df454b382866a8ed32c20b8fd5c4cedf9d08a1d578370596770a0568c60f09e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "aa4d583594d276872a11b048aee08a32fd54d19eecac0853e37ba61d080b9847", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-armv7-gnueabihf": { @@ -363,8 +3627,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", - "sha256": "705b0a76de9b28028fd05ab77048d2a9a8c8f602d4cdc2e9eeab3cb612ce7075", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "257eabb6ca5af8701039594547f2a0eb2d7da4465d093b7ac7348281c220638d", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-powerpc64le-gnu": { @@ -379,8 +3643,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "43fd4d1dad4c936c84a68a0600bc3f09f9dae785ba1c5a691a88c767046ba23f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "33fdd706f6970ad753c103e9460d7f15bcdb210c2aa474d9cea5eba056cceac9", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-riscv64-gnu": { @@ -395,8 +3659,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "9a0c60c6458489625cf810d95f08481d80cde16e7521f20b6aa4aac09845c83c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "3b9afccb8fe37433beb2f4bfb34a4abc891809902260a1888ebe5e508a408e6f", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-s390x-gnu": { @@ -411,8 +3675,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "a441d84fc15976d696465e8a571a84e9b31793500cffc546a32eced8bcb820f0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "86fa5bbbfa14abad66e4bd2d5aacf3232a20aa04511ace4c232ff6c94fab22a3", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64-gnu": { @@ -427,8 +3691,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "cf2ec648f120e39c3d98672de512457c8c3a79d30e0bf62c965eaa50f49510bc", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "c786b1b66a4397df8110c338cdf6150c8dc9b007db9dcd9976b555bf6f2f7433", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64-musl": { @@ -443,8 +3707,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "ffebe43cc4e0d3c6b0d82fb5850c0e8244c3b4435cef70e1445ce948c6806515", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "777f4f9ef674271f5cf71798b39a4643c780060b5963bbfc5b10087a93a8309c", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v2-gnu": { @@ -459,8 +3723,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "36fde0973e5404c6a9ab532d4403af22913c3b34ad4f37326f0cf7bbfe5ceef9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "c3c06888d6cddcbd1800669c65f34e9b3c397be046728863baaaa797cc914764", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v2-musl": { @@ -475,8 +3739,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "14b652b2b2c6e3eb822eab77668f766a469226871e50761084da35e5d4a63187", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "8fa63c8d8c093956e2804075a5c36436a838d81400699018eb662cc5ee10ff9a", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v3-gnu": { @@ -491,8 +3755,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "522af25d5bac66aee434d8727d962409965a1cdee8fd55be0bee7be43237cb8f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "91d6e06ee7b2801e7047708c32bd1ce17cbc03bb9bf0b614e6406649b4f6d2ec", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v3-musl": { @@ -507,8 +3771,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "0daa63bb24806309edd8f271652e0dfe87eaa556b59ae932472d9d3b7f2c3e83", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "40aee5a2d415160533dfffd83affe585d9e804ab1ab281bd2810b735b12bf3d5", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v4-gnu": { @@ -523,8 +3787,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "2510b3cf65ea3d719cb635d0662d938073c8cac485f92603152b48d48cf83136", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "1d4e62eb975557e45278419f2e1f09ba9c085d2a60f336b16a885f435c90f7f3", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-linux-x86_64_v4-musl": { @@ -539,8 +3803,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "d6efaf05af5bec73398f6d56d352b54f263c111aaa7fe6d8aa965619c6ca3577", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "37e0dca4ff55277aeb6265e995d47f2561942c47040f081545fe607b896be257", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-windows-i686-none": { @@ -555,8 +3819,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "fb6cc4df84dfae1f6b95c5d630942311c43c08e2e622adc0bf537bee8e4ef77d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "d79f3e425308cbf80f7a1951788c67d599faf2e23a58208e9a2b58a4e63a0d08", "variant": "freethreaded" }, "cpython-3.14.0a7+freethreaded-windows-x86_64-none": { @@ -571,8 +3835,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "c56984e4919641a2940a20eca59c009db4f0b064d85911a54414e3097f2a855a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "2298ddc7a698d81d1711c0f91ffe45fc09b4bef2c59484bc65b1781378bc2886", "variant": "freethreaded" }, "cpython-3.14.0a7+debug-linux-aarch64-gnu": { @@ -587,8 +3851,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6e0314f7dd1a90afe023816284684bcb98c45124fb1eb74ff6b61ac98f4f7f27", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "08b23869e0073c3cc58f6dc2eddb41e17ebdc7825dec91ec404dd6046dcf4b2a", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-armv7-gnueabi": { @@ -603,8 +3867,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "1aed3ac0310bd355687dfbb492552ba8a56f903c7fe34370956e1579b5b8327d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "c30b6f4921c7d1ea5544f508473008cfb6f3d4505691d48e7a4d39f5f5559a48", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-armv7-gnueabihf": { @@ -619,8 +3883,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "4b92533ef23ac48372f4bf2c702f32d8ee0991b30f6292944f6639d958a6b26e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "8eb7ff2720311781d9a37453a99c07e22c0aac8039d99cb246ad3a8cc85b15fc", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-powerpc64le-gnu": { @@ -635,8 +3899,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "2c105e22401e12a7e36cf9f96a6c132437435bed58eb23f53a56a435249984d7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2964ba15edc67f5315f2210a4d7d4ab49183c0eb5a5ef9f151bb93ad9da6b01d", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-riscv64-gnu": { @@ -651,8 +3915,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c4135fef50c4ddc02ef5c33a97b2e544457a6e2499b62463474033c2c159afba", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9c06c2989c5213d00e4ba3125d190a579af5fb4ee2dcc248553110f21469e41d", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-s390x-gnu": { @@ -667,8 +3931,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "cb8af9562df510a81a1903fc68a8fcffad44037804f87214b0039469eb69cf18", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "cbd62a74802ed9a5d2d230f45030b036feb3e1330af444fe5b2de3cac574b774", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64-gnu": { @@ -683,8 +3947,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "cbdd584283f5eb5e07e349973521b71c9003c979c1928682bf9a93e5139127f4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3efec407a79d3fec8198369017c36c86e947149e77c37d5f68adb52400548cf8", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64-musl": { @@ -699,8 +3963,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "9679585887e8b6c70af08362622528ecd3be05abc097aeeafc8d5d9d58b3b81c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1db695dd97e668a87553f99de20ca610a77642575e4eb6d77be771149b66e563", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v2-gnu": { @@ -715,8 +3979,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5cb8dec58ec9234a0a8a6d58268f107db0348d9afa209d2a2c03f7149ebf8396", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "41d0fe18a59ab9bd9c0c1da645034b59017a6bdd010dd11588d43802529ec886", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v2-musl": { @@ -731,8 +3995,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "891cb125475a5dfb8c3dff6e64153a35364160d304bcb8f9a86011c0b623ee47", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "7b50de24eee33e13b019449a2d8a13a37ee6ee657844698e74ddb26d951b8868", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v3-gnu": { @@ -747,8 +4011,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1b72112fb722b3d062971aaf90e94d0a05e764d0bbc85fb76f803fea747667fe", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2ee68e03497168a80cb8dd769cb2fea31a6fc031610c254cc586d4ab91a6e8a3", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v3-musl": { @@ -763,8 +4027,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "1c2102849391faf93f65b529699438d603a7f1ae56944b9197b9727945b3c7fb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "446b67d3b5a03a421f0e396a1d78cfba45b794e5b46d6507b765a94fac04a212", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v4-gnu": { @@ -779,8 +4043,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "0967fcb6c5124318dc6e672aec7e93153c2dd61caa8fad016ed4fa75ef5d10cd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f331755c9182b5fd78958513a4c9a148e6a8a65189a68f6d7be63a251aec7118", "variant": "debug" }, "cpython-3.14.0a7+debug-linux-x86_64_v4-musl": { @@ -795,8 +4059,8 @@ "minor": 14, "patch": 0, "prerelease": "a7", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.14.0a7%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "6bbb06cf928e7b5da9c4ca8f38ffac5930ad7234f910c06d4272764f28ff0360", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.14.0a7%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "420e3b318e83fe6b4cfc4791b36e86976d2832a5c8e05f37fd89f1c2ed7ef453", "variant": "debug" }, "cpython-3.14.0a6-darwin-aarch64-none": { @@ -3119,6 +6383,1638 @@ "sha256": "a72cb0116a6511c20de6350a1fc664b96f8f5af4bdc10abdaa7208526a729fad", "variant": "debug" }, + "cpython-3.13.5-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "08d840adc7dd1724bd7c25141a0207f8343808749fa67e608d8007b46429c196", + "variant": null + }, + "cpython-3.13.5-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "5277dc381e94abde80989841f3015df2aba33894893c4a31d63400887bdefd2d", + "variant": null + }, + "cpython-3.13.5-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "82d8a025b43c9127d47490a7070aa5d8bfede2d1deb5161c0f4c2355396f9e5d", + "variant": null + }, + "cpython-3.13.5-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "6aa50bf3245364091a7e5ca6b88166f960c2268586c33e295069645815f16195", + "variant": null + }, + "cpython-3.13.5-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "5f776b18951b9a0507e64e890796113a16b18adb93a01d4f84c922e2564dab43", + "variant": null + }, + "cpython-3.13.5-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b74b79e5a65c84ed732071fd7b445a51b86c03ef18643b87c0fe5c96242e629b", + "variant": null + }, + "cpython-3.13.5-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "652416183693219b1f0f1f2a8d2a595f75f8c94e8c7b8b25ecd312ec1fdbb36e", + "variant": null + }, + "cpython-3.13.5-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "29a7140db0cbd1426f450cd419a8b5892a4a72d7ef74c1760940dd656f8eaded", + "variant": null + }, + "cpython-3.13.5-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e42827755c227d3ea31b0c887230db1cd411e8bddf84f16341a989de2d352c51", + "variant": null + }, + "cpython-3.13.5-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a652ff101318b7bd7a06181df679e2e76d592ebe70dbc4ca5db97b572889d93f", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "dd945e6178236e2eee27b9de8e6d0b2ef9c6f905185a177676d608e42d81bebb", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "af86120b3c3c48afdd512a798c1df2e01e7404875d5b54fc7bbde23f8b004265", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c13783eae63223bced84ec976be9ad87d5b2ab3d9ba80c4f678520a4763410ba", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "5e7433fd471a8d2a5dfa9b062b3c1af108eef5958e74d123de963c5d018b3086", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "14a4301952bf11ddf023e27ff5810963bf5a165946009f72c18bdd53f22450c0", + "variant": null + }, + "cpython-3.13.5-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "583b793e600a9d55b941092de2f4f7426acaac7e7430ed9a36586f7a1754a8ea", + "variant": null + }, + "cpython-3.13.5-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0e95119f5d018ec18bcf9ee57c91e13c9ffda2a5da5fa14f578498f8ec6e4ac0", + "variant": null + }, + "cpython-3.13.5-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "a877e912a7fc298e2b8ee349ed86bee00ac551232faebf258b790e334208f9d2", + "variant": null + }, + "cpython-3.13.5-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "bf9d014f24aa15f2ae37814e748773e395cbec111e368a91cdbcb4372bdff7c5", + "variant": null + }, + "cpython-3.13.5+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "61862be1c897fff1d5ec772be045d1af44846ffd4a6186247cc11e5e9ae3d247", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "a51777a7a3d4b4860dd761dbcce85a8e9589031293a2f91f4a6a3679c3d0f5a8", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "e907a33d468de5f3936e73a0e6281a40307207acf62d59a34a1ef5a703816810", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "fa495608f0bb7debc53a5d7e9bd10a328e7f087bba5b14203512902ead9e6142", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "5316526a325b72a7e6a75f5c0ba8f2f4d1cbab8c8f0516f76055f7a178666f21", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "23770a0b9e176b8ca1bbbecd86029d4c9961fa8b88d0b0d584b14f0ad7a5dccc", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "0f111d4619843451a0edd13e145fc3b1ea44aecf8d7a92184dcd4a9ed0a063c4", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "0a6df4acd93d29b0d94aa92fa46482f10bbcfe1b1e608e26909f608691c7f512", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "2c49314909be249c90071a54168f80d4cbf27ecbec7d464f8743d84427c5b7b1", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "e27a15c987d616763619413b2d7122d1f4ba66a66c564c2ab4a22fb1f95c826d", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6882afc2e308561b8c1a23187c0439116434aae8573fd6e6dbdce60e3af79db5", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "a8ef0d7a50a2616b2a1f8a5d7a3b52fa69085e6a75a6f7d3f318f7c132abfe16", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "ab2e44c83245d18226f1fce26b09218de866048ecb515b50b8174ba75c182b4e", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "bad372bd5e38ff42064907b95273736137485ffdc6ff1d90b2e49f8df2829abb", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "d12f4ecb61ae7ced3723173aa0a5ddaea395e098bfede57497426c65b5776b82", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "734233279cbab1f882f6e6b7d1a403695379aaba7473ba865b9741b860833076", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "51d116a7f93654d602d7e503e3c7132ae4f10e5a8e8fbe7e2ceb9e550f11051a", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "d4461149a95fd6d9c97d01afb42561c4b687d08526c84e8ff9658d26514450eb", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "eb704f14608176fc8d8d8d08ca5b7e7de14c982b12cd447727bf79b1d2b72ac7", + "variant": "freethreaded" + }, + "cpython-3.13.5+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "defdf6ddc233f8e97cc26afaa341651791c6085a59e02a1ab14cf8a981cdc7bf", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "69308c195ebc63543efa8f09fabb4a6fa2fc575019bd1afbc36c66858d2122c4", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "ad3c911764e60a94c073c57361dc44ed1e04885652cabb1d1f3a1d11d466650d", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "bd91893c42edc3b23ee45df6fff77250dab8f94646bbdf2087c0a209231f210d", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7f3e649685358af0d78c8d7dcc4d357d5674e24aeaecbcc309ce83d5694821ce", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fc013b0375c357286bf6886c0160c9a7fca774869c8a5896114ac1bf338f0b2e", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3502c7c36500fa1a84096f0e9c04dc036f3dbbae117d6b86d05b0a71a65e53cb", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "b42647c29dca10e55ceeaa10b6425f4ff851721376b4b9de82ce10c21da2b5f2", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5dee021b1e82ddeacae72fdee5ba6d2727faf1b39b8d4b9361a7961e5321c347", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "623e2fedb44f5c8c123371a9e82771792d1a64ea11cb963259947679c1bb7027", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f24df9f31d052c4e9cabec7a897d78ceccf9fb90a6edaa6f4f128e49d5f27162", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "2821ef432b962ab4968e339f8d55a790eb64e266ccba674837589d58fb40f0d0", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8f9f953c202e0f6b5f7e7abff2b34beaff7a627d1f7ff8cdfe4d29f4fc12f067", + "variant": "debug" + }, + "cpython-3.13.5+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.13.5%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "5c0740e8df7d69b4e2ead4f11db97e3d884e77377d84cbf6fba58077043388fb", + "variant": "debug" + }, + "cpython-3.13.4-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "59183bd70b0facb184fc5e7bd18488595d90ac5eb010ac1ee63eea7db4e230a1", + "variant": null + }, + "cpython-3.13.4-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "b219b93c37ffefe46495feff6d996048706832307de454efa60a09b392f887db", + "variant": null + }, + "cpython-3.13.4-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "fef6c8c96260eff9258aeb38bbfc90a9d662e683f8fe6946a941c954053bdb71", + "variant": null + }, + "cpython-3.13.4-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "8cd822d146401242df2c056f4cff5febc116b7f36ce22dc798c8f3d2a2fd222c", + "variant": null + }, + "cpython-3.13.4-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "1ad613b25e5a695eb820ddd430114d6b52c5f4591ad9ef94f909528b7bdcc68f", + "variant": null + }, + "cpython-3.13.4-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "cb02c98ba48d7bd1d70176ed615e8aa026c913f4db6fda5077f42547b0e0bd75", + "variant": null + }, + "cpython-3.13.4-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "608d17e3bd00af8d3a9c85b7d1938348181471575774ea2d8ee866733a36ca76", + "variant": null + }, + "cpython-3.13.4-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "97b632b31628123a65b89543091cf26c48b8f6785baca41f76dcae08e6821e63", + "variant": null + }, + "cpython-3.13.4-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ffd0b484e40e4fffdfcac265560e109456f802485f1f27e3cd314763b2b1587c", + "variant": null + }, + "cpython-3.13.4-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "8d60e688b5dc3b5a2676fde68546316af10cbd31b0632867b385dbbd7353c3bf", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "de338569a53d35b75503c52c4e74a91d246475eebb33681be665880bfd61ce3d", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "26491581911355ac39fa8322f0a55e56a225be80a913786c3c30ad076445a083", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0a6eaa2778f6b2bedd6e3c3d763c2f379f0ea99f87ae2fbf01483ab9b2e17925", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "df24b661532df4d6c3c0de66a7f25d317d494a658f95d0aa1c6755dc4e536f49", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7fb2e42a9922cee1533ceba19590a779970333f068b519bb94df33e8be7917ad", + "variant": null + }, + "cpython-3.13.4-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "01b7b4ec668da9ea13f1b2583b357c7cd709ac124b45e29b76fd0a8e54c4984f", + "variant": null + }, + "cpython-3.13.4-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "d5100077b9753db01714e2ff1e95c628df023bb2da35adc9b9116eb56e45f27f", + "variant": null + }, + "cpython-3.13.4-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "1e6348c237d4665c5327683030594b4f93405ae19ad7dabfb1951d12bb266944", + "variant": null + }, + "cpython-3.13.4+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "278dccade56b4bbeecb9a613b77012cf5c1433a5e9b8ef99230d5e61f31d9e02", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "64ab7ac8c88002d9ba20a92f72945bfa350268e944a7922500af75d20330574d", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "b1c1bd6ab9ef95b464d92a6a911cef1a8d9f0b0f6a192f694ef18ed15d882edf", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "5eeaf5b2cd44b51647ae66b1b75fb078d39b890f9f73752ea3eec50b5901bbff", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "1bfcd7636c9a5d66b3fc72e4ecdddaac92a305a3239edc526bfbe4f83bdd16ca", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "ed66ae213a62b286b9b7338b816ccd2815f5248b7a28a185dc8159fe004149ae", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "913264545215236660e4178bc3e5b57a20a444a8deb5c11680c95afc960b4016", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "7556a38ab5e507c1ec22bc38f9859982bc956cab7f4de05a2faac114feb306db", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "864df6e6819e8f8e855ce30f34410fdc5867d0616e904daeb9a40e5806e970d7", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "1001cce26923d89fa72a52ad3bbda5cfaee7bff11291d921dec4151846ef0c3c", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "50f1fcf56521d5751272071b698594960616f7be3ccffee5c071aa0115d6791a", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "976dc818dcc468c760fbd6fb2553318a49eec6aad8dcc4c4b1a44b0be3359783", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "1525c19758e4b08ca03fc6650815107561e396c8905cfb3bbc9adccdbe5a5af5", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "7965bd6354f2db624e904b9f58bb27883f2e6ad7ecb65612c328462fb65d6ce0", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "9244c970b5eba41b2ef20eb1f6ffb992d2cf9a2a2e0611d62c2ec5070047577b", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "720ed989b925c5be3919dcb8738addcd1ce68444d5e081b041229d88ed743e19", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "595786a6613306b74118c56e29ab430ed0eb08d82c4103b7778e827c5e43f8cf", + "variant": "freethreaded" + }, + "cpython-3.13.4+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "9457504547edb2e0156bf76b53c7e4941c7f61c0eff9fd5f4d816d3df51c58e3", + "variant": "freethreaded" + }, + "cpython-3.13.4+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "979fae2f42587a9b91dc4628786798c61410e24c26209d3666dde94531137f1f", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "e7ef86f07ce18805efdf9b21a7e7b9d9c06e44157991aaa72f71cdecb53183e4", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "e40b3fdfab879973e45363f159856a8c81570b16505f8862d17dc2d6749d4515", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2c4b8c4a451466770fa81e23647109b8c0446a56f056db955fc0fe8e3bb9dfc0", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e40baeae33d0cab3437d2c39c8dbef959b62c289a2c2a17426cf4e6a1f11629f", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e2a403cf6a923253a3b330c6ed827f12832bf087964aece3c71cee61cf4f4570", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d5c95731b75dd789b137ad30dd71904b89781735142fc31c9f72949c46a57ffe", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "3d8f42bfe6033e16c60e06151d27dece67974560c392f13f0598d4641bb0470b", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "84a6287669650bbd09cf4b86e01075f8ad38cda2c8580e0c8c568074f452feb5", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "99b1306dad6d0be44d8fd0ba556473b385259030cf0c27349342f072f2822890", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "44f62c784f32b456a5719ad11ed8ba8a97890f8e9dc9c9764f178c64ad30a6ba", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a9a87fb2789b6dfbcd7d1a79c846d208f0c0a7969759ad84742786c24da78047", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "377c8b6d6da681959abd464fac093564b55fe63231fe27b6327d18bbbe13f843", + "variant": "debug" + }, + "cpython-3.13.4+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 13, + "patch": 4, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250610/cpython-3.13.4%2B20250610-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "f00a51117fabcfac19b9db628db64c8afe549d19c4a5b5c78bf9848f79536d15", + "variant": "debug" + }, "cpython-3.13.3-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -3131,8 +8027,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "4f193061d652943c62e7f1f51da54cf023ddeb252952d0317309512dbf1b05e2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "ad2afb2c030665b3a9617a30b1fe60024ddcaf97042ba37f8d14e9e1c0bd4aa9", "variant": null }, "cpython-3.13.3-darwin-x86_64-none": { @@ -3147,8 +8043,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "9235654170de8e707186ca6412446011033138f260fb528fb4a36ae9bd2bc160", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "84f0a473dca76e1fd0f5906256be47ae6798021536a8f5f99031354e6c7ea08e", "variant": null }, "cpython-3.13.3-linux-aarch64-gnu": { @@ -3163,8 +8059,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "8e0402f50fa9cb074900fc2dd2223b8fca8e23b89ed8ffe5f1a7e3978436e1e7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ac752f8f26f96fc58944e2c3b6528b5ed8964e745ff86e2aebf0ee07f0c9d11d", "variant": null }, "cpython-3.13.3-linux-armv7-gnueabi": { @@ -3179,8 +8075,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "6adee38f520f7f55d828462668f364203c1cfa4d214fec48faa233194acb29f3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "dec611dc17e159f664e8f06649db030552b2c22e594148b744083e57d3cebff6", "variant": null }, "cpython-3.13.3-linux-armv7-gnueabihf": { @@ -3195,8 +8091,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "d8c4585d56536cda5b9e04d4b74b22e787cefcfac7667c989395e1e4a71273cc", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "bc6a401763796a59cfb4c03c3fffc147075ffa7fd40b03e278b3fb971fa3c94a", "variant": null }, "cpython-3.13.3-linux-powerpc64le-gnu": { @@ -3211,8 +8107,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2f9ab0fad39d39331d8f35d9499eb05ac59293a61f888239f5be710fcdcbd72c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "d493458b93ead052014619f781910e8ad4ff46b54cefee950b7e9e335ea2d6d8", "variant": null }, "cpython-3.13.3-linux-riscv64-gnu": { @@ -3227,8 +8123,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6e6d289e6662f47f1d29d00b1bec6c1c307a800fbf40daa79facd0000873ed70", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "523478f62f9addbd15b06cf7d01ffca417892289f627a243f13d7074532bd57e", "variant": null }, "cpython-3.13.3-linux-s390x-gnu": { @@ -3243,8 +8139,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "95dc29cbd6642e47e94788b693bc2962108ed6de266a388e07d45f8f4152401e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f400097360f81dc3aad6cc5bdb0e2dddd900ef7ed0ec634653ef14274af83206", "variant": null }, "cpython-3.13.3-linux-x86_64-gnu": { @@ -3259,8 +8155,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1a3512a5aa3f5522b4df1e7ac54c1d4b0ca8d0ee608af724cccb7aa21e4da6fa", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "532a6db30bcf8f8609a0c4e0a7c985315572364c19868bc9650a64df534954e7", "variant": null }, "cpython-3.13.3-linux-x86_64-musl": { @@ -3275,8 +8171,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "f47691e5ef866b03946b8932aa4f3161f7eecc5007a38697ee7d0388b1dccc9d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "880f87d939c05b72a03bf8748d931b44a4e7379b7c2afa39af7b5002f71df51c", "variant": null }, "cpython-3.13.3-linux-x86_64_v2-gnu": { @@ -3291,8 +8187,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "68f65b2e8098ea332ae06f2afe984f6532a42eaa9a6fc5393c4421840d8dfd70", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "604d75bfff38827172803dfd9a586e0b28cd2e9581f30ed7a0aa6f64e10311ac", "variant": null }, "cpython-3.13.3-linux-x86_64_v2-musl": { @@ -3307,8 +8203,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "ae1b682221f80963d6bc106f22f1fc389f90b206ad6ec37d35acfae9fa8945a6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "db6c394f1a2517d467990393606ce54f181aa0f114ca7b17e084fe2f07a96d2f", "variant": null }, "cpython-3.13.3-linux-x86_64_v3-gnu": { @@ -3323,8 +8219,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "473501dc352f96005695befc3c38879e1ac745439ffa870191b887e42ccfba44", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "63849f204f54057fb715e952d9f10ebf3215abac4d002fc7e3cfe78d99d35082", "variant": null }, "cpython-3.13.3-linux-x86_64_v3-musl": { @@ -3339,8 +8235,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "6f800e9f1850bc1b8cad543ca3dd6b7a1b00794109c18d6918ed15449cdb4027", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ee9417e4d3063be13f71033e2079b96906f2147fdb75b636da0867d22575e9cd", "variant": null }, "cpython-3.13.3-linux-x86_64_v4-gnu": { @@ -3355,8 +8251,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2904b578bc14f643e18fe7ed576bc4f7354c765d4403481ce672f3dd56487c06", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "18b4e2d2d5c3e9eaa714801f4de056ee99057aa47f4f976729cd79b10ec6e98c", "variant": null }, "cpython-3.13.3-linux-x86_64_v4-musl": { @@ -3371,8 +8267,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "7dc24e123f25be963afb52957835264666159eca4150d989badc45ea3b804ca1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "29843f0b22d9c27289f14f8451b38e33763361e4fe85d3e0d986ae7de1397cec", "variant": null }, "cpython-3.13.3-windows-i686-none": { @@ -3387,8 +8283,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "005197756b1adbf0526aec5bfeba079c745689739a03a9ca45f4057101307f3e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "033d8f36393a9f117ebfd170c98c04f06ee41a992174bb8b478a033808374257", "variant": null }, "cpython-3.13.3-windows-x86_64-none": { @@ -3403,8 +8299,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "893f7a2f5210f0187f93069e073a96f77110d104386ea5c611de08d355aea149", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "35ca5521b4e634f64075ca5b5002b72e63a1cb27f9f87bcd60a289348aea61a3", "variant": null }, "cpython-3.13.3+freethreaded-darwin-aarch64-none": { @@ -3419,8 +8315,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "f0f3259acfbbc8558fff704641909852767204c06d7a0f494edc6b36ebb70c10", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "d1f00e1322f985daed5a812d7b212cb96f30dedf14575e7d5622d99f1c03ee95", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-darwin-x86_64-none": { @@ -3435,8 +8331,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "21704d7516f44b680f99f963e22b65f0e0a62c79b061934107f6c2127ab62390", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "7dcc77f8bfb8f958b3b375b30a72371b0aeda5e9973aba269f041eb093705c35", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-aarch64-gnu": { @@ -3451,8 +8347,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "3afe0576eb01c2b70cb349012b955c14d9e3e6d12796d593276812b9a1f037e5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "d4d17846fa0d5bd617c74e7e7a0af40587f79fcf3b571362c97eee66da4d21ad", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-armv7-gnueabi": { @@ -3467,8 +8363,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", - "sha256": "7368cb693ea1f9920223accb088ffc5b6447f705f282438c40b8c3125cb98514", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "b25185c94ad5a4a20206292a5f3ddbca40c33d894217743fe4fbde9f06cea87a", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-armv7-gnueabihf": { @@ -3483,8 +8379,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", - "sha256": "d64184f04967dcb2f16f2b8646971edd5ecb6568ac05ecfac1e4cd8c8b5944c9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "843de709240cfb76c202f1bbc48ccea13aeec15fc45b178744d1136bd05c553b", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-powerpc64le-gnu": { @@ -3499,8 +8395,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "af050103145e9ee18580ac83e1fe28611ab055baef338f61dfe20b4759b5102d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "8aa9429d5e7af5b41bead51d3a5dd64b37c44b5ba29cd7d346e0a5a39d92f46a", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-riscv64-gnu": { @@ -3515,8 +8411,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "25d60ca5e606517464c74721eadcd7452fe0a5c1ea569fa9526e620e51d37c8c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "ebf36a1012e591b822e2d78c4b49cc4cf86a2d6ff939f1d96f70ba2d0066abab", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-s390x-gnu": { @@ -3531,8 +8427,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "d8a0c8354231cb81353378b5448d36435a99dc1794ac03fdb1abd989c594c81b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "0ca47518e3587c4fe483a69e2887872a537842a90f499089fc42f0b0df91ba8d", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64-gnu": { @@ -3547,8 +8443,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "03a6517368860793cd7ecd9ef1bc122fba0c7ed696729a3f4d44e252e233f6e2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "9041b0fd8c4ede100c73afcb6c5a56febf8de6f2134d89740483c2da74e3c7e6", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64-musl": { @@ -3563,8 +8459,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "9ca7cd220fddd3f016943e4ec67c25eb112bd0f4f0aff220c3cb61bd842d8ef2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "3624ce44194219c9de6d2cfecbb7fc95c7c74ef0c2f98dc91709b7e42e940700", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v2-gnu": { @@ -3579,8 +8475,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "6fde32077c8c4c8f4645b8469249ff3240c26ddb00c1b9f125805dd9a80901c4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "db3551fa250d2077989ed5082cae92dbae19bf5635f5e0b8a91e600fe56abb27", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v2-musl": { @@ -3595,8 +8491,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "c1274f41ab2458d3ca66a3cec83cd47ce36c56bbba1fe44cb57652487225016e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "5f4f5b8bf02e4f51cdf95c2639ee11d86ac7b5c5177003bb66780d904711ebf3", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v3-gnu": { @@ -3611,8 +8507,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "18ab4e32bac487e6b2857edb0ab44b980a241498f52525f02013277ee3c79228", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "23ed687e43069c628ea363b52271e5d4c0b3cb83eaa40abf1649b8b5ae954b91", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v3-musl": { @@ -3627,8 +8523,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "06758779e8d580e861218557890db7e71d0dadd90c3b62df2d372e16af165456", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "f79ef983120f69aed4e3bbdd49b5dda04a707a83fb767a87385945f6315e9a74", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v4-gnu": { @@ -3643,8 +8539,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "ec2cd01197c32486d489c874ec2dcb395ccd360270d2625a74d70ebc23af0be7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6f0a074de33aaea17fbb227e3b42e5c9762b382bf0f683a26b36a7e2004e1d1f", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-linux-x86_64_v4-musl": { @@ -3659,8 +8555,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "afa3e0c7d03bd81a2b1435332133b5cf7827005092606f9faf8ed9947bbcf3da", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "ee66f43f39d3ade36fcee0c12db223a116ead4173e761e23098582f0be12cb0b", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-windows-i686-none": { @@ -3675,8 +8571,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "b8657e5ede4d695510aca412f3a2d8ac24b256ea93d5355693f51bc222e0164e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "005998b6503fe3b40cd16f040ca8bbb13947cc1336d37bafbe57ab1745f2fb54", "variant": "freethreaded" }, "cpython-3.13.3+freethreaded-windows-x86_64-none": { @@ -3691,8 +8587,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "6ba9b9b9fe58ebc3d621e27390c9919cc30e5ab4d13efa5c62b92777c710eaae", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "c833faa78fe5349ed1b3e96fec14e9dba0930087eb303d478cff560207a2f7f7", "variant": "freethreaded" }, "cpython-3.13.3+debug-linux-aarch64-gnu": { @@ -3707,8 +8603,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "0b198290b99fdb5e3fd063316a4be08d384f16ba4ee744fca11a727c0b1c351e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "24e6806bd3de6fce6c13c3a3cf019620188abb61af633484a8176e1b20277579", "variant": "debug" }, "cpython-3.13.3+debug-linux-armv7-gnueabi": { @@ -3723,8 +8619,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "68791cdcb2dd4a9d9e8b17faa17d842a2606034d3531dcfc0448d02099518cd5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "2ea85ef688f55aa04d4e17e2ccd8b7512f1d583fb6c80113324e14886b0bcfbd", "variant": "debug" }, "cpython-3.13.3+debug-linux-armv7-gnueabihf": { @@ -3739,8 +8635,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "3ed563ace9dc194d57190b46449e195a813b684e1625169445ecdb904444feb8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "b2c2fe16eee1d3a8c6b3e945123be4827332ae9cceaa2c00ceb7bf7a6097f25e", "variant": "debug" }, "cpython-3.13.3+debug-linux-powerpc64le-gnu": { @@ -3755,8 +8651,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "f8c4e5c186ea0511cc5560f53ec55ceab6fce63b4eb31fa18ba80a3a11881d07", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ffcb7ea0fde00eddec0f9c7ba0edc9b6a148f7f7b62c800e7b94d669ce5f2473", "variant": "debug" }, "cpython-3.13.3+debug-linux-riscv64-gnu": { @@ -3771,8 +8667,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a1680e70cf0170e25189a4bb9f820a1d9a3d253395e796239ad559cb2995a368", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7f3c45567dcc373d85e5bbc0059ff88cc2822d665493bc04f7f69c0984154adc", "variant": "debug" }, "cpython-3.13.3+debug-linux-s390x-gnu": { @@ -3787,8 +8683,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "758ef540c6c784ebb00bf9329a60834ecd561ff5e3ed732657768d3061a6cfd1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2732219e696079dea33e850b502759bbb31f41bbb5dc302a5f2b66db07e2503e", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64-gnu": { @@ -3803,8 +8699,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ef3145e10a9dfeba3767d3d733adcfcb022712ad9858ebfc748d6fcef4cf6b6e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9b82eb4d21da2d0335a75a2d14b32aa67eb50246e6c68ebc5d1665c09066251e", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64-musl": { @@ -3819,8 +8715,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "d51ce78673fc6be6a67537ffd1ded5803ed970ca4bc073367d3e2d7bc718ab7f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "50bf5bfeb627be2967eae44fdfc204ee7a22f57fbb406f76565656de30d6f4c1", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v2-gnu": { @@ -3835,8 +8731,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c5857d5cf48ab5e25a9c058fd42254a5618f333962d68f4ea4802b31f465697d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "02a0592880e2f5a47e4ec058d6509cbebac9e01484758b71476b54bd55e8a781", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v2-musl": { @@ -3851,8 +8747,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "bf820cf7fc67599ac79ff453b9263821190276f60666721b58907e22737dd98a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a028e693fa4a657850d79a3127c631a9c32dd99ef4a12dfcc60b70e197a043a8", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v3-gnu": { @@ -3867,8 +8763,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a330a58eb250438606abfbed3d3a6124455b8b3bad1ac202d1b87fa4a88c41fa", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "456cf4f3ef5d290fc049a2229728d31a3b05875e24229e57892bc09e50abbdbf", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v3-musl": { @@ -3883,8 +8779,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "82fb671013c623b852ae9edb504d327c186309680249431c538efd7c64ab3fe5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "953b87558849565a9b3ea88fe7ad2a145d94c78c7f10ff0d6e34ecc4dfefc211", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v4-gnu": { @@ -3899,8 +8795,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b90b05f3267307001587dac966e3ede923117535a3b0928c6929052f8d11ccb7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "92abd78070e35028cf19101e248d3c69efe5be7abdd834decc4f112f130acaf1", "variant": "debug" }, "cpython-3.13.3+debug-linux-x86_64_v4-musl": { @@ -3915,8 +8811,8 @@ "minor": 13, "patch": 3, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.13.3%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "08029d75de62ea4ea235a8143aa7e9d87b8d3950a08e5668f05f0b364b11cd99", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.13.3%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "07cc45e76b280660183923fb1e998b509e1dcbad706b7d00d0b3f7d24cd85cfb", "variant": "debug" }, "cpython-3.13.2-darwin-aarch64-none": { @@ -6527,6 +11423,534 @@ "sha256": "bd021bd31769abec42a07cf77cc4937dc83a0713b5038269e62e268f0e9639d1", "variant": "debug" }, + "cpython-3.12.11-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0a5748a455ebd0ef0419bffa0b239c1596ea021937fa4c9eb3b8893cf7b46d48", + "variant": null + }, + "cpython-3.12.11-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1154b0be69bdd8c144272cee596181f096577d535bff1548f8df49e0d7d9c721", + "variant": null + }, + "cpython-3.12.11-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "73a22b9fa275682f326393df8f8afe82c302330e760bf9b4667378a3a98613ba", + "variant": null + }, + "cpython-3.12.11-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "6a60953cc821d673bf67724d05a430576d0921a60cfceeca11af5a758bd3ae71", + "variant": null + }, + "cpython-3.12.11-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "1f8b03c8bf51f36f659961364f9d78a093af84305bbe416f95b5ecb64a11314d", + "variant": null + }, + "cpython-3.12.11-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "10164c4c0e7f9a29024677226bc5f7c0b8b2b6ac5109a0d51a0fb7963f4bec48", + "variant": null + }, + "cpython-3.12.11-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f47a3ad7d96ba16b8b38f68f69296e0dca1e910b8ff9b89dd9e9309fab9aa379", + "variant": null + }, + "cpython-3.12.11-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0714bccd13e1bfd7cce812255f4ba960b9ac5eb0a8b876daef7f8796dbd79c7a", + "variant": null + }, + "cpython-3.12.11-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e42c16fe50fda85dad3f5042b6d507476ea8e88c0f039018fef0680038d87c17", + "variant": null + }, + "cpython-3.12.11-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3676e47a82e674878b986a6ba05d5e2829cb8061bfda3c72258c232ad2a5c9f1", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ddf0c26a2df22156672e7476fda10845056d13d4b5223de6ba054d25bfcd9d3c", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "2be8e228b2a698b66f9d96819bcc6f31ac5bdc773f6ec6dbd917ab351d665da2", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "820174fbb713495a1beecd087cc651d2d4f1d10b1bb2e308c61aecec006fea0a", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "5cfc247d6ee2303c98fecddfbdf6ddd2e0d44c59a033cb47a3eb6ab4bd236933", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "01519be2a0930f86a43ac93f25fb0f44b3dbf8077ecd23c98c5b3011150ef16a", + "variant": null + }, + "cpython-3.12.11-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "93a9714ef88ece8575707e1841369b753f9d320c42832efffda8df8dfcbd9ca7", + "variant": null + }, + "cpython-3.12.11-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "512ae77ca0afe3a81d990c975548f052b9cde78187190eb5457b3b9cdad37a9c", + "variant": null + }, + "cpython-3.12.11-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "c815e6eadc40013227269d4999d5aef856c4967e175beedadef60e429275be57", + "variant": null + }, + "cpython-3.12.11-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "49911a479230f9a0ad33fc6742229128249f695502360dab3f5fd9096585e9a5", + "variant": null + }, + "cpython-3.12.11+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "aed96d0c279ff78619991fadf2ef85539d9ca208f2204ea252d3197b82092e37", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "360e6b2b9bf34d8fb086c43f3b0ce95e7918a458b491c6d85bf2624ab7e75ae3", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "fffb9b6c2e81b03aa8a1d8932a351da172cd6069bbdc192f020c8862d262eab5", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "a8bed95f73ccd6451cad69163ef7097bfc17eda984d2932a93e2dda639f06ff2", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "395d73e73ff0d0085ddb83f15d51375c655756e28b0e44c0266eb49f8d2b2f27", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "097dc82abc3805b8e1721e67869fd4ae6419fb9089d7289aec4dd61b9c834db4", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d11f20d2adaa582ac3e3ab6f56a3c1f4e468e1aa4712d6fe76dd2776fdb28330", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a4cfaa4c7915c35ecf4a15a3f25cdda68b1e2de06280cfe98680b4eed3e11ac1", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e040fa65666bd109534c8ed4c70d198954a28e87dffbab1b138a55c8c98c4db5", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "89504b7f5fba85aa2644be63aa9377e69e56f6c6f4c57a96e0a6050e95e2b8d8", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5eb9cb98d4528045f1e03373373ddb783fbbf6646e3d0e683fb563e5f1d198e6", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "0d463ebb5c0886e019c54e07963965ee53c52d01e42b3ca8a994e8599c2d7242", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "94924bb8ca1f03bf06c87554be2ea50ff8db47f2a3b02c5ff3b27d5a502d5fe4", + "variant": "debug" + }, + "cpython-3.12.11+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.12.11%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "47d315cae2b1cd67155cd072410e4a6c0f428e78f09bb5da9ff7eb08480c05c4", + "variant": "debug" + }, "cpython-3.12.10-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -6539,8 +11963,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "c07581d02ba192ac56bb5d84b8c6ced909fbd8dfed9f9a79ed8f2730fba8303e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "2579c9ffcbb0a745f1de3d645612c7466269eb9a23316a54ece49d50c1c9122f", "variant": null }, "cpython-3.12.10-darwin-x86_64-none": { @@ -6555,8 +11979,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "1ba64c9307d3e6ba6ced6cb317987ba0c671d5c9bd78f58cc19015dd107ffc32", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "f171af578ecdbbcf63cbb0abd08f382df3e0d091e98c3161b3f84cd9d0acc0fd", "variant": null }, "cpython-3.12.10-linux-aarch64-gnu": { @@ -6571,8 +11995,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "012bba52c1a824e357b0152c2c6366f3ed65551f1751330ebb6c2f6bb730d987", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "235d0666528a8e948896f68c08964dc8dd14a54c68e5cbd4924594d2e20f81b9", "variant": null }, "cpython-3.12.10-linux-armv7-gnueabi": { @@ -6587,8 +12011,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "fa390dc35e1238c236d69300f51b4fe6fd244877e3937b2d5e2351ba59ca1514", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "1d4363dbe5d1a9ee1e2ede25144669c0cce0c4668801b52fa9bb6e36dfa60a52", "variant": null }, "cpython-3.12.10-linux-armv7-gnueabihf": { @@ -6603,8 +12027,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "371bc28f0c84b97d6c933202cfe0eee4cb88bde3e17b1249321562f82bd7c0e1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "aa147159baed2a49f5d60eb94964f047e791a33378f074efd056a299c8542144", "variant": null }, "cpython-3.12.10-linux-powerpc64le-gnu": { @@ -6619,8 +12043,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2710f0d239533467d053ace4837240135add9bfcb7d252586b760ac5fecebe6e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ed464e49d045f25f91850ec17978c852658f9b7339c4dd7ec522454bec31e334", "variant": null }, "cpython-3.12.10-linux-riscv64-gnu": { @@ -6635,8 +12059,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "a554d3bae75d861013592dd9410f8a20c9f23019e6f9b669588e60b1debbc0ee", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "1625136e4d97b1e155e21738071254c1a76dd12f9bf3c452eb3e34e44d3e74a8", "variant": null }, "cpython-3.12.10-linux-s390x-gnu": { @@ -6651,8 +12075,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c30eff1446320b5b0f8c34f3ab791316a317d6e75cee7ec2558d58dce2eb5cad", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "d92cb4f241787b664d28336073128061a9fb8bdb27d533b60b7dbc4cb6039faf", "variant": null }, "cpython-3.12.10-linux-x86_64-gnu": { @@ -6667,8 +12091,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "0356bd1db292781a20e011d789f0c41ea5ec04731c8236c8185b53a54faf2871", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f50d08e0304b15d752b22932047135c1dc02f81977b284e51288be9d90627bca", "variant": null }, "cpython-3.12.10-linux-x86_64-musl": { @@ -6683,8 +12107,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "c203193ab325dfa9934c8b49a2f26a682124b783a28d6f63525b9a5d8561e28b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ebcb774652aeaa10c8b9a5825be257e7b21b604aac1bac2d3f6f1403de116ce8", "variant": null }, "cpython-3.12.10-linux-x86_64_v2-gnu": { @@ -6699,8 +12123,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "66db7f515fc78cca52ed899ff693246137ad625e149506234cd3da09892f5cc1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "eb36df47a9acedb63eb08054cd44454706b6b1f34f8d53a8837f0e9a18580774", "variant": null }, "cpython-3.12.10-linux-x86_64_v2-musl": { @@ -6715,8 +12139,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "16ee2af7a14fc5396ab0655910cb0772c7855458c53194750766c8e3314c3ed8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ac119a114f578c823d1b701aa24c07f53a631c843eb37278c08be54d0786f642", "variant": null }, "cpython-3.12.10-linux-x86_64_v3-gnu": { @@ -6731,8 +12155,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "3f2701780f8bf805ff76091ba62363fec9a2a9a3779d8976fdcc7df86d83e61b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a5e05cf6c5d8243f3a7f4acc6cfe7c82d43395ded4348e6126974f28ab245ff7", "variant": null }, "cpython-3.12.10-linux-x86_64_v3-musl": { @@ -6747,8 +12171,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "263f6531f65d158b5d2b13108d89c6a754e9041955a286ffcb41f81ab6b16b0f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "dcd8d2421531cd246e0eec1915022539d1aa3dd8c54b16cfdd81eab187a1fab5", "variant": null }, "cpython-3.12.10-linux-x86_64_v4-gnu": { @@ -6763,8 +12187,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "70a8b1627cbd9a380f2b824e40671faf6211f3a39a0fc65f87f7514f145298fa", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9971057c0bb2051f344d84167c01a18a16f8331d706c2e2f4923acc78c921f9d", "variant": null }, "cpython-3.12.10-linux-x86_64_v4-musl": { @@ -6779,8 +12203,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "ef5d2b78fbfdd31d804c3b99080cd3b81d3aff2c7351fbf4d22eef11d82f82ba", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "2104bc96bd5c91f04162053579f3af1b959f3d56acd1f3e7794abd587a839c9b", "variant": null }, "cpython-3.12.10-windows-i686-none": { @@ -6795,8 +12219,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "bb2b16b038b75000dc88433e9e4d3ec3872a7fa479f06444e4edfaaf0cb36859", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "17d998bc06ca212b99fd6c53ac8d790c5d6682778165cf351f304ee9f00143fa", "variant": null }, "cpython-3.12.10-windows-x86_64-none": { @@ -6811,8 +12235,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "dddc99752696e4e6c277ba7e579b7cac179ce45761534cfbf7ea34f264a277e0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "ca22a9a9e64ecab6d0b5de7cdf8b679ccaa41e9def6aaa2b4aaa6bb23ec7aaba", "variant": null }, "cpython-3.12.10+debug-linux-aarch64-gnu": { @@ -6827,8 +12251,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b3539001dd396b1e7924f99bdb143c236f21b2d1819e3ecebaf4ad506cedf7c1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "513210be95022ec8c9430ef5e6ba737464555cc89f920b3b169fbe25d8154337", "variant": "debug" }, "cpython-3.12.10+debug-linux-armv7-gnueabi": { @@ -6843,8 +12267,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "5ff4f42cc2b07e35a947019d7807b3f45f60c2c76025129c02b497454b4dab42", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "032757c9e80a10dba9c0593cf712eeeb102f29a7a6b63af787522b992a5f3ecb", "variant": "debug" }, "cpython-3.12.10+debug-linux-armv7-gnueabihf": { @@ -6859,8 +12283,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "6d0b3bbb8d5fd3bcc9b14362d71c222c5963d0244c7953859c35b67c3fd2a5f7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "efcb79096c0cebed26549d9d96885342099abbf049f546d9a5d838629d0e71f2", "variant": "debug" }, "cpython-3.12.10+debug-linux-powerpc64le-gnu": { @@ -6875,8 +12299,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b63eb2d5618265dfd6f23c7804385920d1f0e0da1d3463d6102286cf8edb377b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "738d34301ff341b112d45e41d40163fff8f3a7e33f021b072f005414d4337294", "variant": "debug" }, "cpython-3.12.10+debug-linux-riscv64-gnu": { @@ -6891,8 +12315,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "9bd1343110617690347cabf41d757c79168d9a30bca7fb963ab2ead0e67b6da1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9ea8e9a31faf1808b4980ce28688d1c4d11fe4542ddbfeb2d6311b1c0677eabb", "variant": "debug" }, "cpython-3.12.10+debug-linux-s390x-gnu": { @@ -6907,8 +12331,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "29ab23ee497150c9635b898745815bd67ce02c23ee39c67dea4e4a325d2c826d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7a3df4d795570a98c499fd6cf7ebfc2f650508146976672abe72493062383c31", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64-gnu": { @@ -6923,8 +12347,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "445c3f5f96a8a9fb3c3796fc25e22bbde2662a8bcfb7bc86ae9a8dac73ec4bce", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "02769598fa503296392bdd0f7bdcab9545915f796d49c331f8ba7da8f83b9849", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64-musl": { @@ -6939,8 +12363,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "4d67a1632ebe3d78ca4eb0e3f47c6bc7860112f0da0052205fdcd9f8ad4718eb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "b781f551a1bab2ef8ad70d5fc1e963a055bb530037f4cf86a75e94ac6d21bcf7", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v2-gnu": { @@ -6955,8 +12379,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1ae6653aa76802e9e92ffb993a721d1dde05996f05c15625957953dddba92314", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "90775f0bbdc20517578c768e6676ae902a68a2f7b8e1d4dbd439f1f92ce697e8", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v2-musl": { @@ -6971,8 +12395,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "8463dfd1da5aaee82570025bf4fe8162bfc4e86fee9658ce4f567397e6d716b6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "09f1c0c12543c8bcbbbecdc4e1b4ecd9566dd4ce4f1049e9abdab2443a544b25", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v3-gnu": { @@ -6987,8 +12411,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "e7d57644f852e057c34ce88d2350f1455576822359c044fa5ac00dba163b0717", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "20f1e5c60e4e6fd33c38792bae86ef4f1a0b41c27f474888086cab5477c599ec", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v3-musl": { @@ -7003,8 +12427,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "b4346f29f0621ca52a092d382d07a8d42ca323cdecc7b352b9658323d37a5f03", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ac1e67a11822425b1108c0a74508cefabdb3691b6781c2976c91a3562e60874d", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v4-gnu": { @@ -7019,8 +12443,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "123a8ccc93871fdcd5205029fbf3a3d4008c6e40495c82328a564a6e0e2dd3f1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1202222040db10748c730d25a87ba5cd2edc4ff3d67ae3e0cb5d7b241e4d859a", "variant": "debug" }, "cpython-3.12.10+debug-linux-x86_64_v4-musl": { @@ -7035,8 +12459,8 @@ "minor": 12, "patch": 10, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.12.10%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "944c8feeca9f0e5cbaf93bfc3442b00031f2fa0b1bcae06b428d8ed161ac172d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.12.10%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "f6f8457586193d3b6e7b05fefda9b822fabb18f2d58b2ece3b221bc6b6b39865", "variant": "debug" }, "cpython-3.12.9-darwin-aarch64-none": { @@ -10559,6 +15983,534 @@ "sha256": "3c900c3495453cfa7e7626026ef0d8be3adf589b2b810969f6a9f44dba3c129d", "variant": "debug" }, + "cpython-3.11.13-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "cb07230fc0946bab64762b2a97cca278c32c0fa4b1cf5c5c3eb848f08757498a", + "variant": null + }, + "cpython-3.11.13-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1eec204b5dffad8a430c2380fd14895fad2b47406f6d69e07f00b954ffdb8064", + "variant": null + }, + "cpython-3.11.13-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c5155a27d8e8df696eff8c39b1b37e5330f12a764fdf79b5f52ea2deb98a73a0", + "variant": null + }, + "cpython-3.11.13-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "680ecfd9fc09d62dbe68cfb201e567086e3df9a27d061d9bcde78fad4f7f4d94", + "variant": null + }, + "cpython-3.11.13-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "af2508bfab6c90a28d7e271e9c1cede875769556f3537fc7b0e3b6dd1f1c92b7", + "variant": null + }, + "cpython-3.11.13-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c83b749e3908140dec9ffadbf6b3f98bacaf4ca2230ead6adbd8a0923eebf362", + "variant": null + }, + "cpython-3.11.13-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7f0dfc489925e04ba015f170f4f30309330fae711d28bc4ed11ff13b9c3d9443", + "variant": null + }, + "cpython-3.11.13-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "603e7bad4e81cee7d4c1c9ca3cb5573036fb1d226a9a9634ca0763120740d8ff", + "variant": null + }, + "cpython-3.11.13-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e50197b0784baaf2d47c8c8773daa4600b2809330829565e9f31e6cfbc657eae", + "variant": null + }, + "cpython-3.11.13-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a233b0492531f187ac33ecfd466debf21537a8b3ae90d799758808d74af09162", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5f970ce2eecd824c367132c4fd8d066a0af3d079e46acf972e672588a578b246", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "a2df9657ecbecce2a50f8bb27cb8755d54c478195d49558de1c9c56f5de84033", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c30fd4073a10ac6ee0b8719d106bb6195ca73b7f85340aac6e33069869ae4ee8", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "cd15f24848c848b058a41dd0b05c4e5beca692d2c60c962fcb912fffc690afef", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8c390cae0b2d163f18117cae43bcbe430e58146d97e0c39b4afe72842e55f5fc", + "variant": null + }, + "cpython-3.11.13-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "f2ac3addbdf3c08ccf2320bdbed20213b45acd3399d44a990046f09dd883824e", + "variant": null + }, + "cpython-3.11.13-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "84058f18798534e76f6b9d15b96c41116aad0055e01c6e3ab2ab02db24826b9a", + "variant": null + }, + "cpython-3.11.13-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "8044a253950315481784b9f4764e1025b0d4a7a2760b7a82df849f4667113f80", + "variant": null + }, + "cpython-3.11.13-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "43a574437fb7e11c439e13d84dd094fa25c741d32f9245c5ffc0e5f9523aafa9", + "variant": null + }, + "cpython-3.11.13+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b6ca253ced82c9575935a32d327d29dcffa9cb15963b9331c621ac91aa151933", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "3e02d8ff6b63bb83a9b4cbf428d75c90d06f79df211fa176d291f3864c1e77df", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "c7f9429f877d9e78a1b7e71c83b2beea38a727f239899ed325b3648e4e4cc1bf", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1f47dd100661489bf86befae148ce290009b91a7b62994f087136916ba4cfe4f", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "47c5cae609e683e59bf6aff225c06216305b939374476a4cf796d65888a00436", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7c16d22e0eeddfec0275f413ccca73c62ba55736230e889e5e78213e456bae1c", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "22b0309a7232568c054790a23979f490143c2a65f5b4638b52ebfa2e02ad7b20", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "6a3c83db95e39a68ace7515787be03e77993f023bb0c908eaed4cf79480f24d4", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "0d7a5be35f70db94f151656a912fd66e0c001c515969007906b3f97c3fe46364", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "7c4ae94fe3f488027f1a97f304ef4dbe2d83f4b97381b5d6dd5552ce01065027", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5fec7d7868079bd9107c190a3187d3bffe8e3a0214d09f8ce7fbe02788f6030d", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ac5f52aca1051354e336448634b8e544476198d1f8db73f0bcd6dff64267cf9e", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "467cee90b4081db0ddfef98e213bf9b69355068c2899853c7cf38bea44661fd5", + "variant": "debug" + }, + "cpython-3.11.13+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.11.13%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1ac6812cca22b1d3c70b932d5f6f6da0bc693a532e78132661f856bafcd40e2b", + "variant": "debug" + }, "cpython-3.11.12-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -10571,8 +16523,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "82ffd1ecf04d447580b40d5c5abb5bf12c6838b009e1e75e92dae05debfc9986", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "9279f496a7f16fa4da822337819ead9fafd68f1c16e15cef26bf32ed57f24e0b", "variant": null }, "cpython-3.11.12-darwin-x86_64-none": { @@ -10587,8 +16539,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "987d5d7b1cf57d2d7a3000b201b6890ab200558de929a80fad153207fe557a3b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "6f944e7797f0fca3859db19a6344f01527616eadb088b5b4c3974682f144f9db", "variant": null }, "cpython-3.11.12-linux-aarch64-gnu": { @@ -10603,8 +16555,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "dab026afde9ae182c42812178dd710782a9d0b53865aaef8c673fc4f430be19a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ae5bed638f9116faf1b488b9058e478835538f3a748b50fa2919e832fb3dda28", "variant": null }, "cpython-3.11.12-linux-armv7-gnueabi": { @@ -10619,8 +16571,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "905cbb787fc8cc1d0b801d943f77d9153ec5cd96b92e4bb97e5ecaae53f38639", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "6dd6efe5dc8258400110aaca83a1b8ecc6b0415dde15cecff160a93622b9acc3", "variant": null }, "cpython-3.11.12-linux-armv7-gnueabihf": { @@ -10635,8 +16587,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "3b75f963ae738e5f3a9bb62bf0052010458a779ab249d90ccd0563c8e8ef962c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "f9cc0802ebceccb808bd30dee00ff2274c8b4cc7e29bc9e19af4c550743a3d01", "variant": null }, "cpython-3.11.12-linux-powerpc64le-gnu": { @@ -10651,8 +16603,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6df8eb270225e7c94a7d8cf6971937ea6c18929ff4cb7ebc40317d92a8f4f8af", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "04073a80743a45be1451995824b7a7c7cc32846fc0ef7257e77a17e2f9536a18", "variant": null }, "cpython-3.11.12-linux-riscv64-gnu": { @@ -10667,8 +16619,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6c7d91801db35ba7a6675bff98fd6aa64c920f9662653e0a0f96f653dd74987d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "70df92cfc732c12efe7f2446bfda160155bc4157e29a7b882f9e1fb64e63406e", "variant": null }, "cpython-3.11.12-linux-s390x-gnu": { @@ -10683,8 +16635,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f4e8364261a4a9f2f9c8e08c77ed8b27e04858dc21c2cd57b702874b572bcdaf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9220c0001a0fe55a6ed9f5bb93c8c4180d536a27e70f085b273b8bc616f9d640", "variant": null }, "cpython-3.11.12-linux-x86_64-gnu": { @@ -10699,8 +16651,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "625786f96cbb94f011aca6694ac6694fb9675ff2f184d96129584d6d3e912c37", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a8c2320f1df72d02d58f3f7ab0899bff7946e5efcc723ae2ff3113ad8bb645c0", "variant": null }, "cpython-3.11.12-linux-x86_64-musl": { @@ -10715,8 +16667,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "b34b94fa23dba347ab0c757df789bfe4345ccac248f2f47423fbf4e7c6cda570", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "009de744b61ebca3e75a6298fc1a395090d5f4891ce22e4b8f5dc9aa04c4afbb", "variant": null }, "cpython-3.11.12-linux-x86_64_v2-gnu": { @@ -10731,8 +16683,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "3a2af35debaf7eecae468565108485cb7d7f9d981cc5ce3a2f6f3fbbe911c1f7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f6c59b5b5cc8f2097dcde5beb744ed691af8f2f401b65f72d6e9cef078b6647d", "variant": null }, "cpython-3.11.12-linux-x86_64_v2-musl": { @@ -10747,8 +16699,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "30d159c7ab103884c613b16c56675a57d8f8e0e853d396dacf6039e228d67a17", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3ef08e1e7866054aa8cbdc4e49a3593c1446427602e4c2bccd20d15792774bdd", "variant": null }, "cpython-3.11.12-linux-x86_64_v3-gnu": { @@ -10763,8 +16715,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ec15c4c6009b4c49105fa276b22d938ad2ff4d30ab1b44ed6838b96aa0dbaab9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "78151687c55765bdc99e40cc7e3a93ae703f65ae38121794db519cd713ea6df9", "variant": null }, "cpython-3.11.12-linux-x86_64_v3-musl": { @@ -10779,8 +16731,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "a558e1a4a83d6e14cc79207858955e1b5ddc0c2c369683ee62eef4502ea06904", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "7bd6b725c2c4176df2ff6c8ab4cf438254cf8fa3ccb21b6cb4cd93524a2db754", "variant": null }, "cpython-3.11.12-linux-x86_64_v4-gnu": { @@ -10795,8 +16747,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c3b1dc1d214dc3a948db37c60c306e294fb3139994410a25044485e9974531ff", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "af12ca1ad0530b86c2fa1dfa849b1d3b3467006183fba850f08ebe520d90e181", "variant": null }, "cpython-3.11.12-linux-x86_64_v4-musl": { @@ -10811,8 +16763,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "8dbc75adf487af88076c149e726060e81c6e189e521628553f644728bff7fd20", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "e5900b751700d2636905f3f70d4424796b1921b508446888bb34f842236b32d7", "variant": null }, "cpython-3.11.12-windows-i686-none": { @@ -10827,8 +16779,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "7de30c544eee857ec7bc81301679253a7fedb3610b9dc7901983732b070bf6fb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "5957094ab6b8dad69852851b2737c26ba786c178edaf4a7db17b65f32327fa32", "variant": null }, "cpython-3.11.12-windows-x86_64-none": { @@ -10843,8 +16795,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "019a0a4decbb7d63a00619bd3bcdb709ef397a316dd752881c8329b58a7a8976", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "f794b1e74349a989d1b9a466d2207527d4e16a36c92a1277802458293bc4be31", "variant": null }, "cpython-3.11.12+debug-linux-aarch64-gnu": { @@ -10859,8 +16811,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "7db4630b160e9cb4ba4f3ae6b6e7f21c401c3c9c23bf695f1171b3b8d3f4e61f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c30d81d45b679c29bb6c8a1aad684fe58c9a70520e39e9244ddf0f3483c3f26c", "variant": "debug" }, "cpython-3.11.12+debug-linux-armv7-gnueabi": { @@ -10875,8 +16827,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "1c0727060e7ce2aedd8de582583b6176c8963b68fd8089a3ba4d76c72602904b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "c88899c4862f8f6c5108f15abb566916c3d3f925302d9ca8e515471d09bfd11c", "variant": "debug" }, "cpython-3.11.12+debug-linux-armv7-gnueabihf": { @@ -10891,8 +16843,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "b7cb897b3a1ebc0a8ca63ee19ea363a2fa49cae68841b1d8508524cf907b7ac5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "1a852bfa6d302255b7a6c7b0801821523315e3963161826fb3b562908a29d7bc", "variant": "debug" }, "cpython-3.11.12+debug-linux-powerpc64le-gnu": { @@ -10907,8 +16859,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "94348c6332b8d1a183351ae6fb5b068087a2809e1d9950b014edf13458fc42a7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5b37bacfbf9a145f4656a2288ae7749f7f93e6b8595bb24a32efc3df222b2666", "variant": "debug" }, "cpython-3.11.12+debug-linux-riscv64-gnu": { @@ -10923,8 +16875,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "4b3e98c381c2c28aeac865b51b173788435d3f110a827dec4a05b044edb52d43", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5e094a46a84f2ced1318e627768fd00c3996064f7ce30c9e350f94563e350541", "variant": "debug" }, "cpython-3.11.12+debug-linux-s390x-gnu": { @@ -10939,8 +16891,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "104c9c84aaa55ea3e35d21a642b3608a5539c71e7527612af3dcabdfc9823b6d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8e833b7740f46568915b62265a29b71a51c260622d61e14d4a9a8e9b24c1ebe0", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64-gnu": { @@ -10955,8 +16907,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "785a646eb324ab4b9b60b5a6a4ea7e1d988cd1a1aad94e84d6c4330d4f7b1b4a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d177fd1da66ac95c3ee93dd8c2b5afac3813a945f7a10910e352cef789a189f4", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64-musl": { @@ -10971,8 +16923,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "289bd1744f7f2551349a8a0f5396319ca8a95ff1a090ccc26e79a7baf0939a04", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "7427f91ad9246dd24de1490111bbbd07ae29f0393067e2c77e5551ada309a155", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v2-gnu": { @@ -10987,8 +16939,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "f017d3d3ddb86fbbcbf25bccce16fe3f0adda216d4da151e08bd5cf2e558b911", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d1b80d5381144f0439cced6cebf55e1b509c98f7aa049dde5ea018b25807fd15", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v2-musl": { @@ -11003,8 +16955,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "598fd9a15e2bed4a332c3e0f16bf8067157cbf02dca756db31969d412f35b14d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1f3937108093b9123ce4afaa2cec0c4b81be0c717ac7d62a69dd2b1d3cb2207b", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v3-gnu": { @@ -11019,8 +16971,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "23a56e232a1d2f22deb077b94cc789cfcb5af016005328528028bebd42ab55b4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "24fbb0c7936afdec8ccc711fba8d84d9e4ab951f3673fcfbe8875a2abd813ba9", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v3-musl": { @@ -11035,8 +16987,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "51abe73b49df97b3302ef2a3d2a76953192b43e81d6f925dd550ef4da09dc666", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "b15c97d3c31e27a7414bb8ec08635e1e6a94b42364fcc5b108d88d62f3009685", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v4-gnu": { @@ -11051,8 +17003,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "70921231816f3c09ce00a67fce340313366718214cc4b9496cb5e7f2da8f9fb5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f5599cced22dba53b51e7cefd72c325887e0384b9dae7ac1e34517ad230c521d", "variant": "debug" }, "cpython-3.11.12+debug-linux-x86_64_v4-musl": { @@ -11067,8 +17019,8 @@ "minor": 11, "patch": 12, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.11.12%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "55628ed69db855a8f57c693635949a1aed17bf7ebc7b17e6c19739062a41091f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.11.12%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1512b6d9a883a305c30eaefc3ade96d377a1bb86f99f727f92a8df22bc52b1dd", "variant": "debug" }, "cpython-3.11.11-darwin-aarch64-none": { @@ -14335,6 +20287,518 @@ "sha256": "abf6c9a813e3c600b095ccfe0fb8c21e2b4156df342e9cf4ea34cb5759a0ff1c", "variant": "debug" }, + "cpython-3.10.18-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "73939b9c93d50163cd0f1af8b3ce751c941a3a8d6eba9c08edcc9235dc5888c7", + "variant": null + }, + "cpython-3.10.18-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1ba1523d81d042a516068b98ded99d3490d3f4bb6c214fc468b62dadde88e5ac", + "variant": null + }, + "cpython-3.10.18-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "54c490a7f22ac03171334e5265081ca90d75ca0525b154b001f0ee96ad961c18", + "variant": null + }, + "cpython-3.10.18-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "56ca1369651cb56221053676d206aa675ee91ddad5de71cb8de7e357f213ff59", + "variant": null + }, + "cpython-3.10.18-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "eacff45758c90b3cdd4456a31b1217d665e122df8b5a0b8b238efcc59b8d8867", + "variant": null + }, + "cpython-3.10.18-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "6e4180591050ec321a76ac278f9eab9c80017136293ce965229f3cbea3a1a855", + "variant": null + }, + "cpython-3.10.18-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ef176d45d3199989df3563e8a578fb00084190fa139ecc752debdee7d9acc77d", + "variant": null + }, + "cpython-3.10.18-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f744cbebf0cc0236fd234aa99ae799105ed2edb0a01cf3fe9991d6dd85bd157c", + "variant": null + }, + "cpython-3.10.18-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ba282bc7e494c38c7f5483437fd1108e1d55f0b24effb3eb5b28e03966667d7c", + "variant": null + }, + "cpython-3.10.18-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "0502186e5ccc85134a2c7d11913198eb5319477da1702deb5d4b89c3f692b166", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ddd7ff4a13131c29011dd508d2f398c95977dc5c055be891835a3aa12df7acfa", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "feb3d0c6ddfa959948321d6ac3de32d5cde32fe50135862c65165c9415cafedf", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "69c634bf5c979ca3d6fac7e5a34613915e55fc6671bfb0dee7470f3960a649ee", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "dbe2e101bb60277ef0f9354b7f0b1aaa85b07dec3a12ca72ae133baa080deeca", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a6b2530a580061eb9d08168ac5e8808b8df1d2e7b8dd683c424b59cc9124a3a2", + "variant": null + }, + "cpython-3.10.18-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3a2abc86a8e740d4e7dddcd697781630d9d9e6ce538095b43a4789a531f8239b", + "variant": null + }, + "cpython-3.10.18-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "1326fb50a7f39ff80b338a95c47acbeda30f484ee28ff168c3e395320345ee01", + "variant": null + }, + "cpython-3.10.18-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0dec10054eefa76d4e47e8f53d9993e51a6d76252d9f8e5162b1b9805e6ffc20", + "variant": null + }, + "cpython-3.10.18+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ed4d68544efef0d7c158c4464d8e3b4407a02e2ea014e76dfa65fddfd49384af", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "39fdc60b2645262ef658ebbf5edfaffd655524855d3aa35bfb05a149a271e4f5", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "cf0c02ab4b46c9b6a0854e5bd9da9b322d8d91ae5803190b798ff15cb25ab153", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e9f346d7fa001e85cea92cf027b924c2095d54f7db297287b2df550f04e6c304", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c11eba8055c7bb643f55694fb1828d8d13e4ade2cb3ec60d8d9bb38fbf7500d8", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c7b407062dc86e011c2e3d8f5f0e1db8d8eac3124e4d0b597f561d7f7b2a8723", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1ba2a0159629d92207966cbf2038774afd0f78cc59e94efb8a86e88a32563bdd", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ebee02e3380e50e394962697dc4d4c845f60ac356da88f671be563ef0dafaa9b", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4de984931af2c4a2b18139ff123843671c5037900524065c2fef26ff3d1a5771", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "fd97d5565e0fb98ad78db65f107789e287f84c53f4d9f3ccb37fdd5f3849288b", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ea450da681ab3fdef0da5181d90ebff7331ce1f7f827bb3b56657badc4127fad", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ff9fe8b880460ce9529db369e2becca20a7e6a042df2deba2277e35c5cdcd35a", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c1a1d9661cf1d45096478fefd1e70ff6d0cbc419194cf094414d24fa336f5116", + "variant": "debug" + }, + "cpython-3.10.18+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 10, + "patch": 18, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.10.18%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "2bf809a85ffc45a37b32d5107f1a3ee8a6d12f07bb5fd3ad26ba16501418a8a7", + "variant": "debug" + }, "cpython-3.10.17-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -14347,8 +20811,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "1555a3d4149924471c2e1027fd5985a67b3779196bfecba1ea455d04e787d322", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "366d037181b1cea0a7a8b1457874a0cdfbb795815d07ae25c55ef1461aa487ef", "variant": null }, "cpython-3.10.17-darwin-x86_64-none": { @@ -14363,8 +20827,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "6cd2984ea44579d099bb7dd4e0f2874e31ec37ec2e85088be9978629ffb83982", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "cb45a45bcbdf00a4808f48fbf344f597a01e66c5ed83a7e388883c86844bd2f1", "variant": null }, "cpython-3.10.17-linux-aarch64-gnu": { @@ -14379,8 +20843,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "9136fee0f3f020a4b7afee1f3179920db98822c8fd9b0288dc0671a6823e01e2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "aaf1fd370ab3ae31807c8d7d5aff0b8d2abb370c12d17a561178ceb842314f2a", "variant": null }, "cpython-3.10.17-linux-armv7-gnueabi": { @@ -14395,8 +20859,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "e6df245a7f4f91d29f331a21c4fb9e7835dfd8870b77f3fa1a1825a361611247", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "a8861876b7bdf8cec9a8278bd79c009fb130dcfae657fb3e0e676b57ffed1b43", "variant": null }, "cpython-3.10.17-linux-armv7-gnueabihf": { @@ -14411,8 +20875,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "4b2dd5d055bdc4a5b5e9b72813d7f4d644070b7fd74ae7cd0d7aa0b4b7ccd102", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "210d6c9559f8f1224eb32c6e4e02a2ce4f9ba8b304bb5b5356a8fa68331c35ad", "variant": null }, "cpython-3.10.17-linux-powerpc64le-gnu": { @@ -14427,8 +20891,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "bcbc96cbbe9308b5b59ed2ee3c29966d464dd7fdd5765c63e900ad30a7262ecf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e5a9d5761c452d2e233f4a52909700b77426f8211d3dc9e259cce55e7cb94ed9", "variant": null }, "cpython-3.10.17-linux-riscv64-gnu": { @@ -14443,8 +20907,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6edbf5c64182d775deb1f14775ef20e71c5a7728de3fab4442b8bfdd6e842828", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e55eeabde02dd08fccd7664750bed9dcc83150c9ff6f5e3c7de62457f681e56d", "variant": null }, "cpython-3.10.17-linux-s390x-gnu": { @@ -14459,8 +20923,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "55ea4cc18593daec33282ba5499d91e44ad759734ee1bfbde0036c8bfe1b73d6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ead1b5ba133d6646a2065e8d81ff5476dd9103cd35bf68d123ec66d47aff7baf", "variant": null }, "cpython-3.10.17-linux-x86_64-gnu": { @@ -14475,8 +20939,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "322cb251eba1be573d025265fe9fdfcb91998de283265304de1c868e0908bfb1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e3abc6e300ccdfe5e8faf220d0682dc8eae4d438b96b7d312b32d50a4e536d21", "variant": null }, "cpython-3.10.17-linux-x86_64-musl": { @@ -14491,8 +20955,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "5f39c748f671c1200090790e27d0d56128fe91d3f7e8a4387b0ad4a7f82d60f5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "32bf6162838c5b2fde22eeab84d6537037cf443323a4ec5921d8619c75de6378", "variant": null }, "cpython-3.10.17-linux-x86_64_v2-gnu": { @@ -14507,8 +20971,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1826fa93c79dc399a425c43448f57ccf000d972c78dec688b99737312b998417", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f4b1ad8ec67363f4e6241c581c4e0fc804378f243f65bf13f72f6c3fb049f3aa", "variant": null }, "cpython-3.10.17-linux-x86_64_v2-musl": { @@ -14523,8 +20987,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "eecef0af8c155dbe9cefb9bedff63fa3e4996d91ffce96c5ad91754ef9a685c1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "af472547f8567ddb893d3321d2213d0a92322b873b7534d8206e65c5fa2b642c", "variant": null }, "cpython-3.10.17-linux-x86_64_v3-gnu": { @@ -14539,8 +21003,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6694cb082e9df7309af033dbc8f95fed5a2a2ba2a13de7604d03381430cd8245", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2397fe96f854f30d30b87aec7094d44e84dce791a28df1f90bd04c631f1dd347", "variant": null }, "cpython-3.10.17-linux-x86_64_v3-musl": { @@ -14555,8 +21019,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "80eb25461151df22284af9de65ae47131d83a284143eda62af4d8f47b0a1ef0e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "97f4ace9d7432cfca9322a23c2838f54c712622064c52f6b5e7ba1907d0bdcda", "variant": null }, "cpython-3.10.17-linux-x86_64_v4-gnu": { @@ -14571,8 +21035,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "5d345006b4b8b425a0a744539cab29b78cb5b4ee6f14108fbec47bbad7dc725b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "766c94f2ab33be4fb2ad2fd4d4f8d026d21ba4b74d29aaab7295252968c3cf9b", "variant": null }, "cpython-3.10.17-linux-x86_64_v4-musl": { @@ -14587,8 +21051,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "6c097820d2f0da5fa0a3a9192d3d1b79933ada766c74f868769d2e82017e18ef", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "99ee84e6b1643fc3589e2f62130d1a9013c914773dc4074b25b80cc534185ebf", "variant": null }, "cpython-3.10.17-windows-i686-none": { @@ -14603,8 +21067,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "e94c73ed200d5543e09b872bc85c795b8005504171a4e6f2010e2e28bc61577f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "f411aa385e5034c1eb07e0c038296725a3cc357d9b5942cc7d350ac916a77b77", "variant": null }, "cpython-3.10.17-windows-x86_64-none": { @@ -14619,8 +21083,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "00ff1e1d31cf7f973cbb3d6389f6cf92ae8da55d299c1f339043c81721f23f2c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "775558a9ad71d42afb168ff76f6db6fa4bf512e933b888e55c8f0557860eac83", "variant": null }, "cpython-3.10.17+debug-linux-aarch64-gnu": { @@ -14635,8 +21099,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a40928607745eaefe0072efabbd63b8d6e2d275e4be97cce44b3df07f6d8e76b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "84566e47b3fc09e5d8b7d51d8761d95a235a521cfc7e62ae71beb031d0b37feb", "variant": "debug" }, "cpython-3.10.17+debug-linux-armv7-gnueabi": { @@ -14651,8 +21115,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "bd2e549313001b5f5a5b05b9764495bd9afe31455fd7226117f8e72a41d241fe", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "25dc261e3704c66dba349f2ef14bd725f7c8fccc27a79d16b1fc6e29427bdfcb", "variant": "debug" }, "cpython-3.10.17+debug-linux-armv7-gnueabihf": { @@ -14667,8 +21131,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "765f9c644c305ab36ec08827e7ceb1f25b3941968ca7a15640462470283b4b4f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "ee3f482a927a5d7c1617280bddf58eeaedc591826c9c318d2cc3229167965742", "variant": "debug" }, "cpython-3.10.17+debug-linux-powerpc64le-gnu": { @@ -14683,8 +21147,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "299db3b606c1000b9f24e5ec038c80f02d88442a411c11c8a8a08b85bc1d753d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8cc701740649bce2c60978d4a6838d401a2567d81156522ac50c88ff9de2b87a", "variant": "debug" }, "cpython-3.10.17+debug-linux-riscv64-gnu": { @@ -14699,8 +21163,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "044bbdccce545166ce84c8f3250faba1a4c5409b473e97e38a95e9ec16e361c8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b27de0dbff584735505073779c20348036bad7804db5b2bd2f0102420389da46", "variant": "debug" }, "cpython-3.10.17+debug-linux-s390x-gnu": { @@ -14715,8 +21179,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "a6a8ebff520837e92e6223d4261286a6443eb9a89f245268d0d6f553940b1a35", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "0b7184152655145a52f9a24016bf4f344f532fa455875433d8dbd6e232c932e9", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64-gnu": { @@ -14731,8 +21195,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "bb34d1532186d935dfe1fc4e6d671341e4c5905cca2390f0457608e70fd8b605", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d023386aa62e6d991b69d59a9eb29ea422e3e8e71c71654828aee5b078f55296", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64-musl": { @@ -14747,8 +21211,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "13a06dcaf00545ca37258270f7f8c92e64c719485b4084bffe2cf8ec51ecb97d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "8c9edbf1e0ba8b19c63f493a7dd9bae3bbe84a4eb603bb5df50a2c6d2e9c7a3b", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v2-gnu": { @@ -14763,8 +21227,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6735a10ef8e5c9fc666e8043618dffdecbdc92f8892e3605584ca92417e6642e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9f28ffc7c31662ad4757d57cfe77862b8c73a741169e9a4f760388154a9fed26", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v2-musl": { @@ -14779,8 +21243,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "3152ba36ae323a17e0fcad2b4bf5ac381a2d53ecaa448b0fbf592d095c456751", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "6ca2a607c716568c8efa3990b518b900229b9bdc121baa5eb894b9c20b43d760", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v3-gnu": { @@ -14795,8 +21259,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "15678465487552d92ec0100424f5d40c4cd9b3aa96f36879517d291d20912002", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f5ce71fd8e96ecfb2bc3f788c5d8594302b1defcbecf7e53b2138032a9063bb2", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v3-musl": { @@ -14811,8 +21275,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "7b95a1221bbb742e954c183e1245ae81f81193b4b7d23637123371475fa81dac", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "f2ce95ead101812bd5603ae8bf1607ea6cb0be9c4f5479a282e7758d58a3e287", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v4-gnu": { @@ -14827,8 +21291,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "04bab0d566cc9294140c05cf268cf076d9392f33593b716307952314854a1ee7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3f42d84369ce6c49eec3efe7bb74d05dfbd2d90dc634fb432c373ef78abd3104", "variant": "debug" }, "cpython-3.10.17+debug-linux-x86_64_v4-musl": { @@ -14843,8 +21307,8 @@ "minor": 10, "patch": 17, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.10.17%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "6c63189c87b26f068c91bf90d4a9d4fbf9f53793bae79aae32d91a2a52615ede", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.10.17%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "0cf0b666dca349a9d21b76f55c3f2b5d39de9398a1401d80bfce1f75efed6dbc", "variant": "debug" }, "cpython-3.10.16-darwin-aarch64-none": { @@ -19263,6 +25727,518 @@ "sha256": null, "variant": "debug" }, + "cpython-3.9.23-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "3ab0d1885fee62dadc1123f0b23814e51b6abe5dcf6182a0c9af6cfc69764741", + "variant": null + }, + "cpython-3.9.23-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0fbb8bcc5d203b83ba1e63f9b8b1debe9162c22dd0f7481543f310b298255d6a", + "variant": null + }, + "cpython-3.9.23-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "da2e4a73d7318241031d87da2acb7da99070f94d715b8c9f8c973a5d586b20a6", + "variant": null + }, + "cpython-3.9.23-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "41599a37d0f6fa48b44183d15a7c98a299839b83fa28774ff3f01d28500da9a6", + "variant": null + }, + "cpython-3.9.23-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "2263daa7d9cda3e53449091dc86aa7931409721031bad1a1a160b214777c5cd6", + "variant": null + }, + "cpython-3.9.23-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "fc068ac5cf5e4effc74e2b63e34c2618e5a838737a19ca8f7f17cc2f10e44f26", + "variant": null + }, + "cpython-3.9.23-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5475f1106abed1b1163fa7964f8f8e834cbdafc26ddb9ab79cc5c10fb8110457", + "variant": null + }, + "cpython-3.9.23-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2d571c79b0722488b4980badb163ebd83e48b02b5a125239c67239df8dd37476", + "variant": null + }, + "cpython-3.9.23-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "7932256affbd8fe7e055fb54715dae47e4557919bfe84bb8f33260a7a792633a", + "variant": null + }, + "cpython-3.9.23-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "64c4bb8c76b50f264a6900f3391156efd0c39ad75447f1b561aa0b150069e361", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c2bdab1548c60ed0bda4c69bea6dd17569c1d681065ed5ec5395175ed165f47a", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "61b59f2c19575acd088e1d63ca95e810e8e2b1af20f37d7acebf90f864c22ca4", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f791037703a7370783c853bb406034532599ff561dfbf5bc67d44323d131b3c3", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "88c3ad43158942c232039752e4d269cd89e282795e4c7f863f76f3e307b852f4", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "0a71dcb46a9ff949f7672f65090d210ee79d80846f10629e3f234eb7f5fe58e8", + "variant": null + }, + "cpython-3.9.23-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "cd574a9a36a729aa964e1c52bb3084a36350d905c4d16427d85dd3f80e1b3dcd", + "variant": null + }, + "cpython-3.9.23-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "f5b6a6185ed80463160cbd95e520d8d741873736d816ac314d3e08d61f4df222", + "variant": null + }, + "cpython-3.9.23-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "a8f80f8da7901fba2b271cdc5351a79b3d12fd95ee50cc4fe78410dc693eb150", + "variant": null + }, + "cpython-3.9.23+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c00ba3d83356c187e39c9d6b1541733299a675663690dc1b49c62a152d2db191", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "eb4875c6220036fd1b40af4d885823057122d61fc60f0b2c364065259adad0cc", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "eca68cac8c0880f08de5c1bcae91ff0bd7fe64e5788a433fc182a5e037af671c", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5ffc8d84b6098cfa5e2e3aaedcc3e130809d5caa1958d5155995ed3df15d8cc7", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d7f38d5539d7a0b15ce6071ba3290ce1a4ac2da3bd490d023b4d7b36c6c33c89", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "14250195a8c4c42fa9b22e7ca70ac5be3fe5e0ca81239c0672043eddeb6bb96e", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "846ad94f04ca8413762e6cfaee752156bbaa75f3ec030bcc235453f708e3577c", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "4ef30683e0dd6a08a6ef591ab37a218baa42a7352f5c3951131538ab0ef83865", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8964daf898c112bc5caa9499e8d1ba4c0d82911b4c3e07044c7f5abf489b97c6", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "868f2f3e994992a1b68eb051fa2678a2e57bbbe1fcfc9f48461b0d2d87c5b6a8", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1616c6f535b6edf4160ee97b9beca8146f9cd77a4de8c240a0a3f095a09795e9", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1f9d7987734042d04badc60686f5503eb373ea8b7b7f3ade6a58a37f7d808265", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4b8f925b20b6b74c1eb48fa869ee79cde20745fb93c83776e5c71924448e7e53", + "variant": "debug" + }, + "cpython-3.9.23+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 9, + "patch": 23, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250712/cpython-3.9.23%2B20250712-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ecab1905698e5dd4a11c46a1dc6be49cf0e37f70b81191adbb7dad6e453906cb", + "variant": "debug" + }, "cpython-3.9.22-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -19275,8 +26251,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "90e82a402311bade9663940b37b3c9cd09b158824aaaacfa24d68c1ecb7e2ce1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "165cb2574669b3df82e40db8cb01bf4bbc9bb594dc09f9ae34d313ecd27ec7b8", "variant": null }, "cpython-3.9.22-darwin-x86_64-none": { @@ -19291,8 +26267,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "a004114e69f431288096e6eed2fd4d28e1bf632e66f6c37d178fcfa9483601a1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "fbb4a50fffb19d5e0d33fcd53474c6e70b578d20d80b4841bceb22df596208fa", "variant": null }, "cpython-3.9.22-linux-aarch64-gnu": { @@ -19307,8 +26283,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "83cf6f2f7856185c6930432eb6d79c08a1225f55bb2a2866387ec87a7d5fc9a9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "42772f60d3c73be63e8b5abaa08ffc23e56458893fe5764ad7586e116a8f6acd", "variant": null }, "cpython-3.9.22-linux-armv7-gnueabi": { @@ -19323,8 +26299,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "2df7ae5fbcf884db7419c6d11560cc51c3b1137f71f16a15e513c90403b9924a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "592435ea8f991c965d996dcb36b5df5d789f9a3872e404932a8c73dfc02b1a40", "variant": null }, "cpython-3.9.22-linux-armv7-gnueabihf": { @@ -19339,8 +26315,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "67196f34e4a04af94ad7c54b02ec7a02e29c3be61f92c2b82d9087fdfc2aa29a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "bf011ef076e05617e65aaac001c23bb78bd50ad3b47ec74ccfc56a612a38e003", "variant": null }, "cpython-3.9.22-linux-powerpc64le-gnu": { @@ -19355,8 +26331,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f66356c75929f8e17fb4e87a4a325d9bfd08aa9730bf02d0e5b56681b9454933", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8b4561950089e30b130670952156f8ac86ee8a079ba91d7aaae97bd28f47c0e5", "variant": null }, "cpython-3.9.22-linux-riscv64-gnu": { @@ -19371,8 +26347,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c2ca5c9d24fe73abe09ebd2d839d65867bbed3c0959c4b98923912d238d74bbd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b151539a8b645ef360f395278f911ae6a71e9a45599e72f8c78b426d328fcc1b", "variant": null }, "cpython-3.9.22-linux-s390x-gnu": { @@ -19387,8 +26363,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "5a31bd2915728c33b7e6d12b423c9f16dffaab766119a04d7152682578120191", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "a98bdb10fe26dcd4e7074607ad7909d037fcafad33fa169946bbe6a868bd77f1", "variant": null }, "cpython-3.9.22-linux-x86_64-gnu": { @@ -19403,8 +26379,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c84a6a05d250168a85cfa2f432c61bb66ba5a85d3c45524d2024e9eb7c649d24", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "4aa03eb60fc6934e40d94ed9166f74ada85d851434667afcd152d46e1d995dbf", "variant": null }, "cpython-3.9.22-linux-x86_64-musl": { @@ -19419,8 +26395,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "773b3842fc691188ec6cfb1ba970be30fe3af5708deb456d5fb09e07776d0e9f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "eddccfaa41df310c9d93158221e8c7daac86eec8786c2e9890db42f15d03109e", "variant": null }, "cpython-3.9.22-linux-x86_64_v2-gnu": { @@ -19435,8 +26411,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "aea9b28f06ab11f9fc533852fe8e8e035cf675486c6e31c1cd823c2f16f7b701", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "55d3ca738f8ea5f46e2e3c3e9c50849b94fa08c643bfcd4db9215585255ba564", "variant": null }, "cpython-3.9.22-linux-x86_64_v2-musl": { @@ -19451,8 +26427,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "66adc7f9ff5cfc8d9dab99a400bd09660658c9b96abb9abaa5d9779bef6e411c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "68fbf8bab8f0b526179fbbe59a26c4d3a297128d15ba10d502804e16a25d941c", "variant": null }, "cpython-3.9.22-linux-x86_64_v3-gnu": { @@ -19467,8 +26443,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "640ad1b6be2c68a1d8e6268211e69ccbeb04ff8de5c60554809dc86d7f110672", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9d2b43a36187288ecb0ee2e48201d4f16e88545d666af5c39d4c2bf7bdf267bc", "variant": null }, "cpython-3.9.22-linux-x86_64_v3-musl": { @@ -19483,8 +26459,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "e0011f581688c486e9ae5c5cf523071c920e9225e1b8894a45d41f6772c73f13", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ac20b667db74690d60f0d55f5845392c07f639b57502e09b156afcdde098562a", "variant": null }, "cpython-3.9.22-linux-x86_64_v4-gnu": { @@ -19499,8 +26475,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "15746c09f68f19104f4db554ef8d6b5b51bdf16d84a4712e1a3dfcb48cab48e9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e15226686318a682efebaed8d0310a8035234d883b6bab6882c2f8c3dc5a1135", "variant": null }, "cpython-3.9.22-linux-x86_64_v4-musl": { @@ -19515,8 +26491,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "23abb8514d2eb1131b4cb3f5feed7c65ac5683533c03d5d67952a4a1ea427042", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "e25ed29cc1d51ae0dbe7315c14ba1aae206c0d4b75d9e5364d6a2ad4f99bbb21", "variant": null }, "cpython-3.9.22-windows-i686-none": { @@ -19531,8 +26507,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "c684ac8bd2dd9737246421490f18c1685cd6b3667bd6f197e4806f7360f2e31f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "437e2d4b16cfdc5fcd66b88d0966db8880170ac9cb1b068e777c6521acced3db", "variant": null }, "cpython-3.9.22-windows-x86_64-none": { @@ -19547,8 +26523,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "e0f63c83b575d49b5cedd9a9f2765db8348fc8a5a3402e69cdcde9e1cfdbcbba", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "10cb302c27a5e58ecd7aa2684a731e32924677114728626664c1cecce6e960a0", "variant": null }, "cpython-3.9.22+debug-linux-aarch64-gnu": { @@ -19563,8 +26539,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5e67b76c2a5f2ae21c9d86baf8131d018f99c0ae525f0cb31eeac84f0b5da5d8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9ebbafc96803cd5c8ec25fadaa0c97b874397de06c91747f7977515407e75c93", "variant": "debug" }, "cpython-3.9.22+debug-linux-armv7-gnueabi": { @@ -19579,8 +26555,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "f32eb374a8abb8a614e806032fc9d40fc38c339f3af433e8c923e4058320918a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "251e6320de280131b76d11b62798726c3b4d4b808327a344f725c62d5cde0031", "variant": "debug" }, "cpython-3.9.22+debug-linux-armv7-gnueabihf": { @@ -19595,8 +26571,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "f9498b60c1056265b5df1eed0b2125f37c6e878a45367d60023387f3c6767cb6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "a0ddc173dcc1c21f10e04b1d6048d52d8f65c9a0d36bb0d2f67803db43ca5367", "variant": "debug" }, "cpython-3.9.22+debug-linux-powerpc64le-gnu": { @@ -19611,8 +26587,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1ebba8cd604bd81f9894a7593c4a9403cb0169c51ce90396ee8d5ece9f86c177", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c8cc3490d167e9edb037886181e4b8f73ddf3cd62751e211c1a04532138474a6", "variant": "debug" }, "cpython-3.9.22+debug-linux-riscv64-gnu": { @@ -19627,8 +26603,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6caae063207c3d7491a78470127b250497c6a6d524b56654db5ebeef3d3cdb14", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1d35753162dff1824418dba8ea9c9136174602ab9f5509720e0d35984fee492b", "variant": "debug" }, "cpython-3.9.22+debug-linux-s390x-gnu": { @@ -19643,8 +26619,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "dde791eab923c035a81179c210d32c145dc43e401ceea39c37e6e8fef66139d2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b5f8c29f427bcdf809e0d43fb89e901b2f6d03d4fd4878892b441472bb30eba9", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64-gnu": { @@ -19659,8 +26635,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5406685459e0baa23280e3abaadb275cbed99e734825c85d3f3b14c6eb5e84ab", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7d1f28b1f24a44f90f03a847b61768f8883f51fc1a73fab438da4f36fa935480", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64-musl": { @@ -19675,8 +26651,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "c39e3bc6a484fe474aa0ebd5855f6c0efe281cff2dd53cccd1dcced4e3f85410", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "02ed324049aea894aadf1adf27b9de1d43d7957668713f1bbc4fcbf2a5af96a7", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v2-gnu": { @@ -19691,8 +26667,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "af16da6574fa8019e946870823fc95f5e0bcdef30644f89da1d9d8212688f619", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "cab5c3d2060d5f5eff81e4cdaaee58f254e2c2a49209d9843988987d1ff8de50", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v2-musl": { @@ -19707,8 +26683,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "af8e632c83cbb4b5f8cc447bcaeea2286fd9ff85a27ec74c09ab0599db2e9e82", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "acd1fe7146f33582b0498976b41df949dbe01dec2c602d1e7041fd4ee1763e40", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v3-gnu": { @@ -19723,8 +26699,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ef8b2e8bd9b4790ee91d9aa6481714ee045af40de32e63dfef0989d85416100b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4df3bf083ec4cfc9f295505b468d9c7f4801c583433a69de2e1cb7a17d740a48", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v3-musl": { @@ -19739,8 +26715,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "389b989618a7ba7e3a2a01f16fd6fc9da22f25155b2e17f04bf9055246c4fd65", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "4b48015067981315c071aad0fe30c83a9fa0d6c6a24990f8bfef902bdb6acef9", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v4-gnu": { @@ -19755,8 +26731,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b9f0f68aeb1c68f7a9ab7f4635d24f68936ae76b003b70ff6c017a179bd58238", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fd5b4a9c69af62f878d6252c15a12a1b8c99e3702007cbcd5d9a8314408b8c81", "variant": "debug" }, "cpython-3.9.22+debug-linux-x86_64_v4-musl": { @@ -19771,8 +26747,8 @@ "minor": 9, "patch": 22, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250517/cpython-3.9.22%2B20250517-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "9fbbd8b798874230aac19b543961e2de5cc09b170d6ea31e98a694af581ab644", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250529/cpython-3.9.22%2B20250529-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "5816ee798d1089e26c342e044f671345fb7c68293d5320f7c65578fe768eb4ac", "variant": "debug" }, "cpython-3.9.21-darwin-aarch64-none": { @@ -26991,6 +33967,102 @@ "sha256": null, "variant": "debug" }, + "pypy-3.11.13-darwin-aarch64-none": { + "name": "pypy", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-macos_arm64.tar.bz2", + "sha256": "84a48e09c97f57df62cc9f01b7a6d8c3e306b6270671d871aa8ab8c06945940d", + "variant": null + }, + "pypy-3.11.13-darwin-x86_64-none": { + "name": "pypy", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-macos_x86_64.tar.bz2", + "sha256": "bb3ae80cf5fca5044af2e42933e7692c7c5e76a828ce0eb6404a5d5da83b313c", + "variant": null + }, + "pypy-3.11.13-linux-aarch64-gnu": { + "name": "pypy", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-aarch64.tar.bz2", + "sha256": "9347fe691a07fd9df17a1b186554fb9d9e6210178ffef19520a579ce1f9eb741", + "variant": null + }, + "pypy-3.11.13-linux-i686-gnu": { + "name": "pypy", + "arch": { + "family": "i686", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-linux32.tar.bz2", + "sha256": "d08ce15dd61e9ace5e010b047104f0137110a258184e448ea8239472f10cf99b", + "variant": null + }, + "pypy-3.11.13-linux-x86_64-gnu": { + "name": "pypy", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-linux64.tar.bz2", + "sha256": "1410db3a7ae47603e2b7cbfd7ff6390b891b2e041c9eb4f1599f333677bccb3e", + "variant": null + }, + "pypy-3.11.13-windows-x86_64-none": { + "name": "pypy", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://downloads.python.org/pypy/pypy3.11-v7.3.20-win64.zip", + "sha256": "a8d36f6ceb1d9be6cf24a73b0ba103e7567e396b2f7a33426b05e4a06330755b", + "variant": null + }, "pypy-3.11.11-darwin-aarch64-none": { "name": "pypy", "arch": { diff --git a/crates/uv-python/fetch-download-metadata.py b/crates/uv-python/fetch-download-metadata.py index f3976ee5f..08adaecea 100755 --- a/crates/uv-python/fetch-download-metadata.py +++ b/crates/uv-python/fetch-download-metadata.py @@ -630,7 +630,9 @@ class GraalPyFinder(Finder): for download in batch: url = download.url + ".sha256" checksum_requests.append(self.client.get(url)) - for download, resp in zip(batch, await asyncio.gather(*checksum_requests)): + for download, resp in zip( + batch, await asyncio.gather(*checksum_requests), strict=False + ): try: resp.raise_for_status() except httpx.HTTPStatusError as e: @@ -729,7 +731,7 @@ async def find() -> None: } if token: headers["Authorization"] = "Bearer " + token - client = httpx.AsyncClient(follow_redirects=True, headers=headers, timeout=15) + client = httpx.AsyncClient(follow_redirects=True, headers=headers, timeout=60) finders = [ CPythonFinder(client), diff --git a/crates/uv-python/python/get_interpreter_info.py b/crates/uv-python/python/get_interpreter_info.py index 2b1ee09cc..8e9fc37fd 100644 --- a/crates/uv-python/python/get_interpreter_info.py +++ b/crates/uv-python/python/get_interpreter_info.py @@ -39,10 +39,9 @@ if hasattr(sys, "implementation"): # GraalPy reports the CPython version as sys.implementation.version, # so we need to discover the GraalPy version from the cache_tag import re + implementation_version = re.sub( - r"graalpy(\d)(\d+)-\d+", - r"\1.\2", - sys.implementation.cache_tag + r"graalpy(\d)(\d+)-\d+", r"\1.\2", sys.implementation.cache_tag ) else: implementation_version = format_full_version(sys.implementation.version) @@ -510,6 +509,24 @@ def get_operating_system_and_architecture(): "major": int(version[0]), "minor": int(version[1]), } + elif operating_system == "emscripten": + pyodide_abi_version = sysconfig.get_config_var("PYODIDE_ABI_VERSION") + if not pyodide_abi_version: + print( + json.dumps( + { + "result": "error", + "kind": "emscripten_not_pyodide", + } + ) + ) + sys.exit(0) + version = pyodide_abi_version.split("_") + operating_system = { + "name": "pyodide", + "major": int(version[0]), + "minor": int(version[1]), + } elif operating_system in [ "freebsd", "netbsd", @@ -565,7 +582,6 @@ def main() -> None: elif os_and_arch["os"]["name"] == "musllinux": manylinux_compatible = True - # By default, pip uses sysconfig on Python 3.10+. # But Python distributors can override this decision by setting: # sysconfig._PIP_USE_SYSCONFIG = True / False @@ -590,7 +606,7 @@ def main() -> None: except (ImportError, AttributeError): pass - import distutils.dist + import distutils.dist # noqa: F401 except ImportError: # We require distutils, but it's not installed; this is fairly # common in, e.g., deadsnakes where distutils is packaged @@ -623,7 +639,10 @@ def main() -> None: # Prior to the introduction of `sysconfig` patching, python-build-standalone installations would always use # "/install" as the prefix. With `sysconfig` patching, we rewrite the prefix to match the actual installation # location. So in newer versions, we also write a dedicated flag to indicate standalone builds. - "standalone": sysconfig.get_config_var("prefix") == "/install" or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE")), + "standalone": ( + sysconfig.get_config_var("prefix") == "/install" + or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE")) + ), "scheme": get_scheme(use_sysconfig_scheme), "virtualenv": get_virtualenv(), "platform": os_and_arch, diff --git a/crates/uv-python/python/packaging/_elffile.py b/crates/uv-python/python/packaging/_elffile.py index f7a02180b..8dc7fb32a 100644 --- a/crates/uv-python/python/packaging/_elffile.py +++ b/crates/uv-python/python/packaging/_elffile.py @@ -69,8 +69,7 @@ class ELFFile: }[(self.capacity, self.encoding)] except KeyError: raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" ) try: diff --git a/crates/uv-python/python/packaging/_manylinux.py b/crates/uv-python/python/packaging/_manylinux.py index 3c3aa54cb..7b52a5581 100644 --- a/crates/uv-python/python/packaging/_manylinux.py +++ b/crates/uv-python/python/packaging/_manylinux.py @@ -161,8 +161,7 @@ def _parse_glibc_version(version_str: str) -> _GLibCVersion: m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) if not m: warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", + f"Expected glibc version with 2 components major.minor, got: {version_str}", RuntimeWarning, ) return _GLibCVersion(-1, -1) @@ -232,7 +231,7 @@ def platform_tags(archs: Sequence[str]) -> Iterator[str]: if set(archs) & {"x86_64", "i686"}: # On x86/i686 also oldest glibc to be supported is (2, 5). too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) + current_glibc = _get_glibc_version() glibc_max_list = [current_glibc] # We can assume compatibility across glibc major versions. # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 @@ -252,11 +251,9 @@ def platform_tags(archs: Sequence[str]) -> Iterator[str]: min_minor = -1 for glibc_minor in range(glibc_max.minor, min_minor, -1): glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) if _is_compatible(arch, glibc_version): - yield f"{tag}_{arch}" - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(arch, glibc_version): + yield "manylinux_{}_{}_{}".format(*glibc_version, arch) + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + legacy_tag = _LEGACY_MANYLINUX_MAP.get(glibc_version) + if legacy_tag: yield f"{legacy_tag}_{arch}" diff --git a/crates/uv-python/python/ruff.toml b/crates/uv-python/python/ruff.toml new file mode 100644 index 000000000..5e6921be4 --- /dev/null +++ b/crates/uv-python/python/ruff.toml @@ -0,0 +1,2 @@ +# It is important retain compatibility when querying interpreters +target-version = "py37" diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs index a90b26d65..67f8f37ff 100644 --- a/crates/uv-python/src/discovery.rs +++ b/crates/uv-python/src/discovery.rs @@ -8,6 +8,7 @@ use std::{env, io, iter}; use std::{path::Path, path::PathBuf, str::FromStr}; use thiserror::Error; use tracing::{debug, instrument, trace}; +use uv_configuration::PreviewMode; use which::{which, which_all}; use uv_cache::Cache; @@ -20,12 +21,12 @@ use uv_pep440::{ use uv_static::EnvVars; use uv_warnings::warn_user_once; -use crate::downloads::PythonDownloadRequest; +use crate::downloads::{PlatformRequest, PythonDownloadRequest}; use crate::implementation::ImplementationName; use crate::installation::PythonInstallation; use crate::interpreter::Error as InterpreterError; use crate::interpreter::{StatusCodeError, UnexpectedResponseError}; -use crate::managed::ManagedPythonInstallations; +use crate::managed::{ManagedPythonInstallations, PythonMinorVersionLink}; #[cfg(windows)] use crate::microsoft_store::find_microsoft_store_pythons; use crate::virtualenv::Error as VirtualEnvError; @@ -35,12 +36,12 @@ use crate::virtualenv::{ }; #[cfg(windows)] use crate::windows_registry::{WindowsPython, registry_pythons}; -use crate::{BrokenSymlink, Interpreter, PythonVersion}; +use crate::{BrokenSymlink, Interpreter, PythonInstallationKey, PythonVersion}; /// A request to find a Python installation. /// /// See [`PythonRequest::from_str`]. -#[derive(Debug, Clone, PartialEq, Eq, Default)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Hash)] pub enum PythonRequest { /// An appropriate default Python installation /// @@ -67,6 +68,26 @@ pub enum PythonRequest { Key(PythonDownloadRequest), } +impl<'a> serde::Deserialize<'a> for PythonRequest { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'a>, + { + let s = String::deserialize(deserializer)?; + Ok(PythonRequest::parse(&s)) + } +} + +impl serde::Serialize for PythonRequest { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let s = self.to_canonical_string(); + serializer.serialize_str(&s) + } +} + #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Deserialize)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "clap", derive(clap::ValueEnum))] @@ -153,7 +174,7 @@ pub enum PythonVariant { } /// A Python discovery version request. -#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] pub enum VersionRequest { /// Allow an appropriate default Python version. #[default] @@ -222,8 +243,9 @@ pub enum Error { PythonSource, ), - /// An error was encountered when interacting with a managed Python installation. - #[error(transparent)] + /// An error was encountered while trying to find a managed Python installation matching the + /// current platform. + #[error("Failed to discover managed Python installations")] ManagedPython(#[from] crate::managed::Error), /// An error was encountered when inspecting a virtual environment. @@ -238,6 +260,10 @@ pub enum Error { #[error("Invalid version request: {0}")] InvalidVersionRequest(String), + /// The @latest version request was given + #[error("Requesting the 'latest' Python version is not yet supported")] + LatestVersionRequest, + // TODO(zanieb): Is this error case necessary still? We should probably drop it. #[error("Interpreter discovery for `{0}` requires `{1}` but only `{2}` is allowed")] SourceNotAllowed(PythonRequest, PythonSource, PythonPreference), @@ -307,7 +333,9 @@ fn python_executables_from_virtual_environments<'a>() fn python_executables_from_installed<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, preference: PythonPreference, + preview: PreviewMode, ) -> Box> + 'a> { let from_managed_installations = iter::once_with(move || { ManagedPythonInstallations::from_settings(None) @@ -318,19 +346,44 @@ fn python_executables_from_installed<'a>( installed_installations.root().user_display() ); let installations = installed_installations.find_matching_current_platform()?; - // Check that the Python version satisfies the request to avoid unnecessary interpreter queries later + // Check that the Python version and platform satisfy the request to avoid unnecessary interpreter queries later Ok(installations .into_iter() .filter(move |installation| { - if version.matches_version(&installation.version()) { - true - } else { - debug!("Skipping incompatible managed installation `{installation}`"); - false + if !version.matches_version(&installation.version()) { + debug!("Skipping managed installation `{installation}`: does not satisfy `{version}`"); + return false; } + if !platform.matches(installation.key()) { + debug!("Skipping managed installation `{installation}`: does not satisfy `{platform}`"); + return false; + } + true }) .inspect(|installation| debug!("Found managed installation `{installation}`")) - .map(|installation| (PythonSource::Managed, installation.executable(false)))) + .map(move |installation| { + // If it's not a patch version request, then attempt to read the stable + // minor version link. + let executable = version + .patch() + .is_none() + .then(|| { + PythonMinorVersionLink::from_installation( + &installation, + preview, + ) + .filter(PythonMinorVersionLink::exists) + .map( + |minor_version_link| { + minor_version_link.symlink_executable.clone() + }, + ) + }) + .flatten() + .unwrap_or_else(|| installation.executable(false)); + (PythonSource::Managed, executable) + }) + ) }) }) .flatten_ok(); @@ -410,17 +463,20 @@ fn python_executables_from_installed<'a>( /// Lazily iterate over all discoverable Python executables. /// -/// Note that Python executables may be excluded by the given [`EnvironmentPreference`] and -/// [`PythonPreference`]. However, these filters are only applied for performance. We cannot -/// guarantee that the [`EnvironmentPreference`] is satisfied until we query the interpreter. +/// Note that Python executables may be excluded by the given [`EnvironmentPreference`], +/// [`PythonPreference`], and [`PlatformRequest`]. However, these filters are only applied for +/// performance. We cannot guarantee that the all requests or preferences are satisfied until we +/// query the interpreter. /// /// See [`python_executables_from_installed`] and [`python_executables_from_virtual_environments`] /// for more information on discovery. fn python_executables<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, environments: EnvironmentPreference, preference: PythonPreference, + preview: PreviewMode, ) -> Box> + 'a> { // Always read from `UV_INTERNAL__PARENT_INTERPRETER` — it could be a system interpreter let from_parent_interpreter = iter::once_with(|| { @@ -440,7 +496,8 @@ fn python_executables<'a>( .flatten(); let from_virtual_environments = python_executables_from_virtual_environments(); - let from_installed = python_executables_from_installed(version, implementation, preference); + let from_installed = + python_executables_from_installed(version, implementation, platform, preference, preview); // Limit the search to the relevant environment preference; this avoids unnecessary work like // traversal of the file system. Subsequent filtering should be done by the caller with @@ -625,25 +682,37 @@ fn find_all_minor( /// Lazily iterate over all discoverable Python interpreters. /// -/// Note interpreters may be excluded by the given [`EnvironmentPreference`] and [`PythonPreference`]. +/// Note interpreters may be excluded by the given [`EnvironmentPreference`], [`PythonPreference`], +/// [`VersionRequest`], or [`PlatformRequest`]. +/// +/// The [`PlatformRequest`] is currently only applied to managed Python installations before querying +/// the interpreter. The caller is responsible for ensuring it is applied otherwise. /// /// See [`python_executables`] for more information on discovery. fn python_interpreters<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, environments: EnvironmentPreference, preference: PythonPreference, cache: &'a Cache, + preview: PreviewMode, ) -> impl Iterator> + 'a { python_interpreters_from_executables( // Perform filtering on the discovered executables based on their source. This avoids // unnecessary interpreter queries, which are generally expensive. We'll filter again // with `interpreter_satisfies_environment_preference` after querying. - python_executables(version, implementation, environments, preference).filter_ok( - move |(source, path)| { - source_satisfies_environment_preference(*source, path, environments) - }, - ), + python_executables( + version, + implementation, + platform, + environments, + preference, + preview, + ) + .filter_ok(move |(source, path)| { + source_satisfies_environment_preference(*source, path, environments) + }), cache, ) .filter_ok(move |(source, interpreter)| { @@ -819,13 +888,8 @@ impl Error { | InterpreterError::BrokenSymlink(BrokenSymlink { path, .. }) => { // If the interpreter is from an active, valid virtual environment, we should // fail because it's broken - if let Some(Ok(true)) = matches!(source, PythonSource::ActiveEnvironment) - .then(|| { - path.parent() - .and_then(Path::parent) - .map(|path| path.join("pyvenv.cfg").try_exists()) - }) - .flatten() + if matches!(source, PythonSource::ActiveEnvironment) + && uv_fs::is_virtualenv_executable(path) { true } else { @@ -882,6 +946,7 @@ pub fn find_python_installations<'a>( environments: EnvironmentPreference, preference: PythonPreference, cache: &'a Cache, + preview: PreviewMode, ) -> Box> + 'a> { let sources = DiscoveryPreferences { python_preference: preference, @@ -966,17 +1031,27 @@ pub fn find_python_installations<'a>( } PythonRequest::Any => Box::new({ debug!("Searching for any Python interpreter in {sources}"); - python_interpreters(&VersionRequest::Any, None, environments, preference, cache) - .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) + python_interpreters( + &VersionRequest::Any, + None, + PlatformRequest::default(), + environments, + preference, + cache, + preview, + ) + .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }), PythonRequest::Default => Box::new({ debug!("Searching for default Python interpreter in {sources}"); python_interpreters( &VersionRequest::Default, None, + PlatformRequest::default(), environments, preference, cache, + preview, ) .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }), @@ -986,8 +1061,16 @@ pub fn find_python_installations<'a>( } Box::new({ debug!("Searching for {request} in {sources}"); - python_interpreters(version, None, environments, preference, cache) - .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) + python_interpreters( + version, + None, + PlatformRequest::default(), + environments, + preference, + cache, + preview, + ) + .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }) } PythonRequest::Implementation(implementation) => Box::new({ @@ -995,9 +1078,11 @@ pub fn find_python_installations<'a>( python_interpreters( &VersionRequest::Default, Some(implementation), + PlatformRequest::default(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| { interpreter @@ -1015,9 +1100,11 @@ pub fn find_python_installations<'a>( python_interpreters( version, Some(implementation), + PlatformRequest::default(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| { interpreter @@ -1038,9 +1125,11 @@ pub fn find_python_installations<'a>( python_interpreters( request.version().unwrap_or(&VersionRequest::Default), request.implementation(), + request.platform(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| request.satisfied_by_interpreter(interpreter)) .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) @@ -1058,8 +1147,10 @@ pub(crate) fn find_python_installation( environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { - let installations = find_python_installations(request, environments, preference, cache); + let installations = + find_python_installations(request, environments, preference, cache, preview); let mut first_prerelease = None; let mut first_error = None; for result in installations { @@ -1150,17 +1241,18 @@ pub(crate) fn find_python_installation( /// /// See [`find_python_installation`] for more details on installation discovery. #[instrument(skip_all, fields(request))] -pub fn find_best_python_installation( +pub(crate) fn find_best_python_installation( request: &PythonRequest, environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { debug!("Starting Python discovery for {}", request); // First, check for an exact match (or the first available version if no Python version was provided) debug!("Looking for exact match for request {request}"); - let result = find_python_installation(request, environments, preference, cache); + let result = find_python_installation(request, environments, preference, cache, preview); match result { Ok(Ok(installation)) => { warn_on_unsupported_python(installation.interpreter()); @@ -1188,7 +1280,7 @@ pub fn find_best_python_installation( _ => None, } { debug!("Looking for relaxed patch version {request}"); - let result = find_python_installation(&request, environments, preference, cache); + let result = find_python_installation(&request, environments, preference, cache, preview); match result { Ok(Ok(installation)) => { warn_on_unsupported_python(installation.interpreter()); @@ -1205,14 +1297,16 @@ pub fn find_best_python_installation( debug!("Looking for a default Python installation"); let request = PythonRequest::Default; Ok( - find_python_installation(&request, environments, preference, cache)?.map_err(|err| { - // Use a more general error in this case since we looked for multiple versions - PythonNotFound { - request, - python_preference: err.python_preference, - environment_preference: err.environment_preference, - } - }), + find_python_installation(&request, environments, preference, cache, preview)?.map_err( + |err| { + // Use a more general error in this case since we looked for multiple versions + PythonNotFound { + request, + python_preference: err.python_preference, + environment_preference: err.environment_preference, + } + }, + ), ) } @@ -1339,7 +1433,7 @@ pub(crate) fn is_windows_store_shim(path: &Path) -> bool { 0, buf.as_mut_ptr().cast(), buf.len() as u32 * 2, - &mut bytes_returned, + &raw mut bytes_returned, std::ptr::null_mut(), ) != 0 }; @@ -1388,55 +1482,36 @@ impl PythonVariant { impl PythonRequest { /// Create a request from a string. /// - /// This cannot fail, which means weird inputs will be parsed as [`PythonRequest::File`] or [`PythonRequest::ExecutableName`]. + /// This cannot fail, which means weird inputs will be parsed as [`PythonRequest::File`] or + /// [`PythonRequest::ExecutableName`]. + /// + /// This is intended for parsing the argument to the `--python` flag. See also + /// [`try_from_tool_name`][Self::try_from_tool_name] below. pub fn parse(value: &str) -> Self { + let lowercase_value = &value.to_ascii_lowercase(); + // Literals, e.g. `any` or `default` - if value.eq_ignore_ascii_case("any") { + if lowercase_value == "any" { return Self::Any; } - if value.eq_ignore_ascii_case("default") { + if lowercase_value == "default" { return Self::Default; } - // e.g. `3.12.1`, `312`, or `>=3.12` - if let Ok(version) = VersionRequest::from_str(value) { - return Self::Version(version); - } - // e.g. `python3.12.1` - if let Some(remainder) = value.strip_prefix("python") { - if let Ok(version) = VersionRequest::from_str(remainder) { - return Self::Version(version); - } - } - // e.g. `pypy@3.12` - if let Some((first, second)) = value.split_once('@') { - if let Ok(implementation) = ImplementationName::from_str(first) { - if let Ok(version) = VersionRequest::from_str(second) { - return Self::ImplementationVersion(implementation, version); - } - } - } - for implementation in - ImplementationName::long_names().chain(ImplementationName::short_names()) - { - if let Some(remainder) = value.to_ascii_lowercase().strip_prefix(implementation) { - // e.g. `pypy` - if remainder.is_empty() { - return Self::Implementation( - // Safety: The name matched the possible names above - ImplementationName::from_str(implementation).unwrap(), - ); - } - // e.g. `pypy3.12` or `pp312` - if let Ok(version) = VersionRequest::from_str(remainder) { - return Self::ImplementationVersion( - // Safety: The name matched the possible names above - ImplementationName::from_str(implementation).unwrap(), - version, - ); - } - } + // the prefix of e.g. `python312` and the empty prefix of bare versions, e.g. `312` + let abstract_version_prefixes = ["python", ""]; + let all_implementation_names = + ImplementationName::long_names().chain(ImplementationName::short_names()); + // Abstract versions like `python@312`, `python312`, or `312`, plus implementations and + // implementation versions like `pypy`, `pypy@312` or `pypy312`. + if let Ok(Some(request)) = Self::parse_versions_and_implementations( + abstract_version_prefixes, + all_implementation_names, + lowercase_value, + ) { + return request; } + let value_as_path = PathBuf::from(value); // e.g. /path/to/.venv if value_as_path.is_dir() { @@ -1447,7 +1522,7 @@ impl PythonRequest { return Self::File(value_as_path); } - // e.g. path/to/python on Windows, where path/to/python is the true path + // e.g. path/to/python on Windows, where path/to/python.exe is the true path #[cfg(windows)] if value_as_path.extension().is_none() { let value_as_path = value_as_path.with_extension(EXE_SUFFIX); @@ -1490,6 +1565,143 @@ impl PythonRequest { Self::ExecutableName(value.to_string()) } + /// Try to parse a tool name as a Python version, e.g. `uvx python311`. + /// + /// The `PythonRequest::parse` constructor above is intended for the `--python` flag, where the + /// value is unambiguously a Python version. This alternate constructor is intended for `uvx` + /// or `uvx --from`, where the executable could be either a Python version or a package name. + /// There are several differences in behavior: + /// + /// - This only supports long names, including e.g. `pypy39` but **not** `pp39` or `39`. + /// - On Windows only, this allows `pythonw` as an alias for `python`. + /// - This allows `python` by itself (and on Windows, `pythonw`) as an alias for `default`. + /// + /// This can only return `Err` if `@` is used. Otherwise, if no match is found, it returns + /// `Ok(None)`. + pub fn try_from_tool_name(value: &str) -> Result, Error> { + let lowercase_value = &value.to_ascii_lowercase(); + // Omitting the empty string from these lists excludes bare versions like "39". + let abstract_version_prefixes = if cfg!(windows) { + &["python", "pythonw"][..] + } else { + &["python"][..] + }; + // e.g. just `python` + if abstract_version_prefixes.contains(&lowercase_value.as_str()) { + return Ok(Some(Self::Default)); + } + Self::parse_versions_and_implementations( + abstract_version_prefixes.iter().copied(), + ImplementationName::long_names(), + lowercase_value, + ) + } + + /// Take a value like `"python3.11"`, check whether it matches a set of abstract python + /// prefixes (e.g. `"python"`, `"pythonw"`, or even `""`) or a set of specific Python + /// implementations (e.g. `"cpython"` or `"pypy"`, possibly with abbreviations), and if so try + /// to parse its version. + /// + /// This can only return `Err` if `@` is used, see + /// [`try_split_prefix_and_version`][Self::try_split_prefix_and_version] below. Otherwise, if + /// no match is found, it returns `Ok(None)`. + fn parse_versions_and_implementations<'a>( + // typically "python", possibly also "pythonw" or "" (for bare versions) + abstract_version_prefixes: impl IntoIterator, + // expected to be either long_names() or all names + implementation_names: impl IntoIterator, + // the string to parse + lowercase_value: &str, + ) -> Result, Error> { + for prefix in abstract_version_prefixes { + if let Some(version_request) = + Self::try_split_prefix_and_version(prefix, lowercase_value)? + { + // e.g. `python39` or `python@39` + // Note that e.g. `python` gets handled elsewhere, if at all. (It's currently + // allowed in tool executables but not in --python flags.) + return Ok(Some(Self::Version(version_request))); + } + } + for implementation in implementation_names { + if lowercase_value == implementation { + return Ok(Some(Self::Implementation( + // e.g. `pypy` + // Safety: The name matched the possible names above + ImplementationName::from_str(implementation).unwrap(), + ))); + } + if let Some(version_request) = + Self::try_split_prefix_and_version(implementation, lowercase_value)? + { + // e.g. `pypy39` + return Ok(Some(Self::ImplementationVersion( + // Safety: The name matched the possible names above + ImplementationName::from_str(implementation).unwrap(), + version_request, + ))); + } + } + Ok(None) + } + + /// Take a value like `"python3.11"`, check whether it matches a target prefix (e.g. + /// `"python"`, `"pypy"`, or even `""`), and if so try to parse its version. + /// + /// Failing to match the prefix (e.g. `"notpython3.11"`) or failing to parse a version (e.g. + /// `"python3notaversion"`) is not an error, and those cases return `Ok(None)`. The `@` + /// separator is optional, and this function can only return `Err` if `@` is used. There are + /// two error cases: + /// + /// - The value starts with `@` (e.g. `@3.11`). + /// - The prefix is a match, but the version is invalid (e.g. `python@3.not.a.version`). + fn try_split_prefix_and_version( + prefix: &str, + lowercase_value: &str, + ) -> Result, Error> { + if lowercase_value.starts_with('@') { + return Err(Error::InvalidVersionRequest(lowercase_value.to_string())); + } + let Some(rest) = lowercase_value.strip_prefix(prefix) else { + return Ok(None); + }; + // Just the prefix by itself (e.g. "python") is handled elsewhere. + if rest.is_empty() { + return Ok(None); + } + // The @ separator is optional. If it's present, the right half must be a version, and + // parsing errors are raised to the caller. + if let Some(after_at) = rest.strip_prefix('@') { + if after_at == "latest" { + // Handle `@latest` as a special case. It's still an error for now, but we plan to + // support it. TODO(zanieb): Add `PythonRequest::Latest` + return Err(Error::LatestVersionRequest); + } + return after_at.parse().map(Some); + } + // The @ was not present, so if the version fails to parse just return Ok(None). For + // example, python3stuff. + Ok(rest.parse().ok()) + } + + /// Check if this request includes a specific patch version. + pub fn includes_patch(&self) -> bool { + match self { + PythonRequest::Default => false, + PythonRequest::Any => false, + PythonRequest::Version(version_request) => version_request.patch().is_some(), + PythonRequest::Directory(..) => false, + PythonRequest::File(..) => false, + PythonRequest::ExecutableName(..) => false, + PythonRequest::Implementation(..) => false, + PythonRequest::ImplementationVersion(_, version) => version.patch().is_some(), + PythonRequest::Key(request) => request + .version + .as_ref() + .is_some_and(|request| request.patch().is_some()), + } + } + /// Check if a given interpreter satisfies the interpreter request. pub fn satisfied(&self, interpreter: &Interpreter, cache: &Cache) -> bool { /// Returns `true` if the two paths refer to the same interpreter executable. @@ -1931,6 +2143,11 @@ impl fmt::Display for ExecutableName { } impl VersionRequest { + /// Derive a [`VersionRequest::MajorMinor`] from a [`PythonInstallationKey`] + pub fn major_minor_request_from_key(key: &PythonInstallationKey) -> Self { + Self::MajorMinor(key.major, key.minor, key.variant) + } + /// Return possible executable names for the given version request. pub(crate) fn executable_names( &self, @@ -2360,6 +2577,12 @@ impl FromStr for VersionRequest { type Err = Error; fn from_str(s: &str) -> Result { + // Stripping the 't' suffix produces awkward error messages if the user tries a version + // like "latest". HACK: If the version is all letters, don't even try to parse it further. + if s.chars().all(char::is_alphabetic) { + return Err(Error::InvalidVersionRequest(s.to_string())); + } + // Check if the version request is for a free-threaded Python version let (s, variant) = s .strip_suffix('t') @@ -2722,7 +2945,7 @@ mod tests { use crate::{ discovery::{PythonRequest, VersionRequest}, - downloads::PythonDownloadRequest, + downloads::{ArchRequest, PythonDownloadRequest}, implementation::ImplementationName, platform::{Arch, Libc, Os}, }; @@ -2812,10 +3035,10 @@ mod tests { PythonVariant::Default )), implementation: Some(ImplementationName::CPython), - arch: Some(Arch { + arch: Some(ArchRequest::Explicit(Arch { family: Architecture::Aarch64(Aarch64Architecture::Aarch64), variant: None - }), + })), os: Some(Os(target_lexicon::OperatingSystem::Darwin(None))), libc: Some(Libc::None), prereleases: None @@ -2847,10 +3070,10 @@ mod tests { PythonVariant::Default )), implementation: None, - arch: Some(Arch { + arch: Some(ArchRequest::Explicit(Arch { family: Architecture::Aarch64(Aarch64Architecture::Aarch64), variant: None - }), + })), os: None, libc: None, prereleases: None @@ -3321,4 +3544,30 @@ mod tests { &["python3.13rc2", "python3.13", "python3", "python"], ); } + + #[test] + fn test_try_split_prefix_and_version() { + assert!(matches!( + PythonRequest::try_split_prefix_and_version("prefix", "prefix"), + Ok(None), + )); + assert!(matches!( + PythonRequest::try_split_prefix_and_version("prefix", "prefix3"), + Ok(Some(_)), + )); + assert!(matches!( + PythonRequest::try_split_prefix_and_version("prefix", "prefix@3"), + Ok(Some(_)), + )); + assert!(matches!( + PythonRequest::try_split_prefix_and_version("prefix", "prefix3notaversion"), + Ok(None), + )); + // Version parsing errors are only raised if @ is present. + assert!( + PythonRequest::try_split_prefix_and_version("prefix", "prefix@3notaversion").is_err() + ); + // @ is not allowed if the prefix is empty. + assert!(PythonRequest::try_split_prefix_and_version("", "@3").is_err()); + } } diff --git a/crates/uv-python/src/downloads.rs b/crates/uv-python/src/downloads.rs index c4a342856..ad516d096 100644 --- a/crates/uv-python/src/downloads.rs +++ b/crates/uv-python/src/downloads.rs @@ -12,7 +12,7 @@ use futures::TryStreamExt; use itertools::Itertools; use once_cell::sync::OnceCell; use owo_colors::OwoColorize; -use reqwest_retry::RetryPolicy; +use reqwest_retry::{RetryError, RetryPolicy}; use serde::Deserialize; use thiserror::Error; use tokio::io::{AsyncRead, AsyncWriteExt, BufWriter, ReadBuf}; @@ -26,6 +26,7 @@ use uv_distribution_filename::{ExtensionError, SourceDistExtension}; use uv_extract::hash::Hasher; use uv_fs::{Simplified, rename_with_retry}; use uv_pypi_types::{HashAlgorithm, HashDigest}; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use crate::PythonVariant; @@ -51,9 +52,15 @@ pub enum Error { #[error("Invalid request key (too many parts): {0}")] TooManyParts(String), #[error("Failed to download {0}")] - NetworkError(Url, #[source] WrappedReqwestError), + NetworkError(DisplaySafeUrl, #[source] WrappedReqwestError), + #[error("Request failed after {retries} retries")] + NetworkErrorWithRetries { + #[source] + err: Box, + retries: u32, + }, #[error("Failed to download {0}")] - NetworkMiddlewareError(Url, #[source] anyhow::Error), + NetworkMiddlewareError(DisplaySafeUrl, #[source] anyhow::Error), #[error("Failed to extract archive: {0}")] ExtractError(String, #[source] uv_extract::Error), #[error("Failed to hash installation")] @@ -90,14 +97,12 @@ pub enum Error { NoDownloadFound(PythonDownloadRequest), #[error("A mirror was provided via `{0}`, but the URL does not match the expected format: {0}")] Mirror(&'static str, &'static str), - #[error(transparent)] + #[error("Failed to determine the libc used on the current platform")] LibcDetection(#[from] LibcDetectionError), - #[error( - "Remote python downloads JSON is not yet supported, please use a local path (without `file://` prefix)" - )] - RemoteJSONNotSupported(), - #[error("The json of the python downloads is invalid: {0}")] - InvalidPythonDownloadsJSON(String, #[source] serde_json::Error), + #[error("Remote Python downloads JSON is not yet supported, please use a local path")] + RemoteJSONNotSupported, + #[error("The JSON of the python downloads is invalid: {0}")] + InvalidPythonDownloadsJSON(PathBuf, #[source] serde_json::Error), #[error("An offline Python installation was requested, but {file} (from {url}) is missing in {}", python_builds_dir.user_display())] OfflinePythonMissing { file: Box, @@ -106,18 +111,45 @@ pub enum Error { }, } -#[derive(Debug, PartialEq, Clone)] +impl Error { + // Return the number of attempts that were made to complete this request before this error was + // returned. Note that e.g. 3 retries equates to 4 attempts. + // + // It's easier to do arithmetic with "attempts" instead of "retries", because if you have + // nested retry loops you can just add up all the attempts directly, while adding up the + // retries requires +1/-1 adjustments. + fn attempts(&self) -> u32 { + // Unfortunately different variants of `Error` track retry counts in different ways. We + // could consider unifying the variants we handle here in `Error::from_reqwest_middleware` + // instead, but both approaches will be fragile as new variants get added over time. + if let Error::NetworkErrorWithRetries { retries, .. } = self { + return retries + 1; + } + // TODO(jack): let-chains are stable as of Rust 1.88. We should use them here as soon as + // our rust-version is high enough. + if let Error::NetworkMiddlewareError(_, anyhow_error) = self { + if let Some(RetryError::WithRetries { retries, .. }) = + anyhow_error.downcast_ref::() + { + return retries + 1; + } + } + 1 + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct ManagedPythonDownload { key: PythonInstallationKey, url: &'static str, sha256: Option<&'static str>, } -#[derive(Debug, Clone, Default, Eq, PartialEq)] +#[derive(Debug, Clone, Default, Eq, PartialEq, Hash)] pub struct PythonDownloadRequest { pub(crate) version: Option, pub(crate) implementation: Option, - pub(crate) arch: Option, + pub(crate) arch: Option, pub(crate) os: Option, pub(crate) libc: Option, @@ -126,11 +158,88 @@ pub struct PythonDownloadRequest { pub(crate) prereleases: Option, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ArchRequest { + Explicit(Arch), + Environment(Arch), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct PlatformRequest { + pub(crate) os: Option, + pub(crate) arch: Option, + pub(crate) libc: Option, +} + +impl PlatformRequest { + /// Check if this platform request is satisfied by an installation key. + pub fn matches(&self, key: &PythonInstallationKey) -> bool { + if let Some(os) = self.os { + if key.os != os { + return false; + } + } + + if let Some(arch) = self.arch { + if !arch.satisfied_by(key.arch) { + return false; + } + } + + if let Some(libc) = self.libc { + if key.libc != libc { + return false; + } + } + + true + } +} + +impl Display for PlatformRequest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut parts = Vec::new(); + if let Some(os) = &self.os { + parts.push(os.to_string()); + } + if let Some(arch) = &self.arch { + parts.push(arch.to_string()); + } + if let Some(libc) = &self.libc { + parts.push(libc.to_string()); + } + write!(f, "{}", parts.join("-")) + } +} + +impl Display for ArchRequest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Explicit(arch) | Self::Environment(arch) => write!(f, "{arch}"), + } + } +} + +impl ArchRequest { + pub(crate) fn satisfied_by(self, arch: Arch) -> bool { + match self { + Self::Explicit(request) => request == arch, + Self::Environment(env) => env.supports(arch), + } + } + + pub fn inner(&self) -> Arch { + match self { + Self::Explicit(arch) | Self::Environment(arch) => *arch, + } + } +} + impl PythonDownloadRequest { pub fn new( version: Option, implementation: Option, - arch: Option, + arch: Option, os: Option, libc: Option, prereleases: Option, @@ -159,7 +268,7 @@ impl PythonDownloadRequest { #[must_use] pub fn with_arch(mut self, arch: Arch) -> Self { - self.arch = Some(arch); + self.arch = Some(ArchRequest::Explicit(arch)); self } @@ -203,7 +312,11 @@ impl PythonDownloadRequest { .with_version(version.clone()), ), PythonRequest::Key(request) => Some(request.clone()), - PythonRequest::Default | PythonRequest::Any => Some(Self::default()), + PythonRequest::Any => Some(Self { + prereleases: Some(true), // Explicitly allow pre-releases for PythonRequest::Any + ..Self::default() + }), + PythonRequest::Default => Some(Self::default()), // We can't download a managed installation for these request kinds PythonRequest::Directory(_) | PythonRequest::ExecutableName(_) @@ -216,7 +329,7 @@ impl PythonDownloadRequest { /// Platform information is pulled from the environment. pub fn fill_platform(mut self) -> Result { if self.arch.is_none() { - self.arch = Some(Arch::from_env()); + self.arch = Some(ArchRequest::Environment(Arch::from_env())); } if self.os.is_none() { self.os = Some(Os::from_env()); @@ -235,18 +348,6 @@ impl PythonDownloadRequest { Ok(self) } - /// Construct a new [`PythonDownloadRequest`] with platform information from the environment. - pub fn from_env() -> Result { - Ok(Self::new( - None, - None, - Some(Arch::from_env()), - Some(Os::from_env()), - Some(Libc::from_env()?), - None, - )) - } - pub fn implementation(&self) -> Option<&ImplementationName> { self.implementation.as_ref() } @@ -255,7 +356,7 @@ impl PythonDownloadRequest { self.version.as_ref() } - pub fn arch(&self) -> Option<&Arch> { + pub fn arch(&self) -> Option<&ArchRequest> { self.arch.as_ref() } @@ -285,7 +386,7 @@ impl PythonDownloadRequest { } if let Some(arch) = &self.arch { - if !arch.supports(key.arch) { + if !arch.satisfied_by(key.arch) { return false; } } @@ -363,7 +464,7 @@ impl PythonDownloadRequest { } if let Some(arch) = self.arch() { let interpreter_arch = Arch::from(&interpreter.platform().arch()); - if &interpreter_arch != arch { + if !arch.satisfied_by(interpreter_arch) { debug!( "Skipping interpreter at `{executable}`: architecture `{interpreter_arch}` does not match request `{arch}`" ); @@ -392,6 +493,15 @@ impl PythonDownloadRequest { } true } + + /// Extract the platform components of this request. + pub fn platform(&self) -> PlatformRequest { + PlatformRequest { + os: self.os, + arch: self.arch, + libc: self.libc, + } + } } impl From<&ManagedPythonInstallation> for PythonDownloadRequest { @@ -405,7 +515,7 @@ impl From<&ManagedPythonInstallation> for PythonDownloadRequest { "Managed Python installations are expected to always have known implementation names, found {name}" ), }, - Some(key.arch), + Some(ArchRequest::Explicit(key.arch)), Some(key.os), Some(key.libc), Some(key.prerelease.is_some()), @@ -475,7 +585,7 @@ impl FromStr for PythonDownloadRequest { ); } 3 => os = Some(Os::from_str(part)?), - 4 => arch = Some(Arch::from_str(part)?), + 4 => arch = Some(ArchRequest::Explicit(Arch::from_str(part)?)), 5 => libc = Some(Libc::from_str(part)?), _ => return Err(Error::TooManyParts(s.to_string())), } @@ -554,20 +664,26 @@ impl ManagedPythonDownload { let json_downloads: HashMap = if let Some(json_source) = python_downloads_json_url { - if Url::parse(json_source).is_ok() { - return Err(Error::RemoteJSONNotSupported()); - } - - let file = match fs_err::File::open(json_source) { - Ok(file) => file, - Err(e) => { Err(Error::Io(e)) }?, + // Windows paths are also valid URLs + let json_source = if let Ok(url) = Url::parse(json_source) { + if let Ok(path) = url.to_file_path() { + Cow::Owned(path) + } else if matches!(url.scheme(), "http" | "https") { + return Err(Error::RemoteJSONNotSupported); + } else { + Cow::Borrowed(Path::new(json_source)) + } + } else { + Cow::Borrowed(Path::new(json_source)) }; + let file = fs_err::File::open(json_source.as_ref())?; + serde_json::from_reader(file) - .map_err(|e| Error::InvalidPythonDownloadsJSON(json_source.to_string(), e))? + .map_err(|e| Error::InvalidPythonDownloadsJSON(json_source.to_path_buf(), e))? } else { serde_json::from_str(BUILTIN_PYTHON_DOWNLOADS_JSON).map_err(|e| { - Error::InvalidPythonDownloadsJSON("EMBEDDED IN THE BINARY".to_string(), e) + Error::InvalidPythonDownloadsJSON(PathBuf::from("EMBEDDED IN THE BINARY"), e) })? }; @@ -606,7 +722,8 @@ impl ManagedPythonDownload { pypy_install_mirror: Option<&str>, reporter: Option<&dyn Reporter>, ) -> Result { - let mut n_past_retries = 0; + let mut total_attempts = 0; + let mut retried_here = false; let start_time = SystemTime::now(); let retry_policy = client.retry_policy(); loop { @@ -621,25 +738,41 @@ impl ManagedPythonDownload { reporter, ) .await; - if result - .as_ref() - .err() - .is_some_and(|err| is_extended_transient_error(err)) - { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); - if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { - debug!( - "Transient failure while handling response for {}; retrying...", - self.key() - ); - let duration = execute_after - .duration_since(SystemTime::now()) - .unwrap_or_else(|_| Duration::default()); - tokio::time::sleep(duration).await; - n_past_retries += 1; - continue; + let result = match result { + Ok(download_result) => Ok(download_result), + Err(err) => { + // Inner retry loops (e.g. `reqwest-retry` middleware) might make more than one + // attempt per error we see here. + total_attempts += err.attempts(); + // We currently interpret e.g. "3 retries" to mean we should make 4 attempts. + let n_past_retries = total_attempts - 1; + if is_extended_transient_error(&err) { + let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + if let reqwest_retry::RetryDecision::Retry { execute_after } = + retry_decision + { + debug!( + "Transient failure while handling response for {}; retrying...", + self.key() + ); + let duration = execute_after + .duration_since(SystemTime::now()) + .unwrap_or_else(|_| Duration::default()); + tokio::time::sleep(duration).await; + retried_here = true; + continue; // Retry. + } + } + if retried_here { + Err(Error::NetworkErrorWithRetries { + err: Box::new(err), + retries: n_past_retries, + }) + } else { + Err(err) + } } - } + }; return result; } } @@ -683,7 +816,9 @@ impl ManagedPythonDownload { let temp_dir = tempfile::tempdir_in(scratch_dir).map_err(Error::DownloadDirError)?; - if let Some(python_builds_dir) = env::var_os(EnvVars::UV_PYTHON_CACHE_DIR) { + if let Some(python_builds_dir) = + env::var_os(EnvVars::UV_PYTHON_CACHE_DIR).filter(|s| !s.is_empty()) + { let python_builds_dir = PathBuf::from(python_builds_dir); fs_err::create_dir_all(&python_builds_dir)?; let hash_prefix = match self.sha256 { @@ -772,7 +907,7 @@ impl ManagedPythonDownload { // Extract the top-level directory. let mut extracted = match uv_extract::strip_component(temp_dir.path()) { Ok(top_level) => top_level, - Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.into_path(), + Err(uv_extract::Error::NonSingularArchive(_)) => temp_dir.keep(), Err(err) => return Err(Error::ExtractError(filename.to_string(), err)), }; @@ -1060,11 +1195,26 @@ fn parse_json_downloads( } impl Error { - pub(crate) fn from_reqwest(url: Url, err: reqwest::Error) -> Self { - Self::NetworkError(url, WrappedReqwestError::from(err)) + pub(crate) fn from_reqwest( + url: DisplaySafeUrl, + err: reqwest::Error, + retries: Option, + ) -> Self { + let err = Self::NetworkError(url, WrappedReqwestError::from(err)); + if let Some(retries) = retries { + Self::NetworkErrorWithRetries { + err: Box::new(err), + retries, + } + } else { + err + } } - pub(crate) fn from_reqwest_middleware(url: Url, err: reqwest_middleware::Error) -> Self { + pub(crate) fn from_reqwest_middleware( + url: DisplaySafeUrl, + err: reqwest_middleware::Error, + ) -> Self { match err { reqwest_middleware::Error::Middleware(error) => { Self::NetworkMiddlewareError(url, error) @@ -1155,6 +1305,7 @@ async fn read_url( url: &Url, client: &BaseClient, ) -> Result<(impl AsyncRead + Unpin, Option), Error> { + let url = DisplaySafeUrl::from(url.clone()); if url.scheme() == "file" { // Loads downloaded distribution from the given `file://` URL. let path = url @@ -1167,16 +1318,21 @@ async fn read_url( Ok((Either::Left(reader), Some(size))) } else { let response = client - .for_host(url) - .get(url.clone()) + .for_host(&url) + .get(Url::from(url.clone())) .send() .await .map_err(|err| Error::from_reqwest_middleware(url.clone(), err))?; - // Ensure the request was successful. - response - .error_for_status_ref() - .map_err(|err| Error::from_reqwest(url.clone(), err))?; + let retry_count = response + .extensions() + .get::() + .map(|retries| retries.value()); + + // Check the status code. + let response = response + .error_for_status() + .map_err(|err| Error::from_reqwest(url, err, retry_count))?; let size = response.content_length(); let stream = response diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs index de6cf59d8..02f9fd683 100644 --- a/crates/uv-python/src/environment.rs +++ b/crates/uv-python/src/environment.rs @@ -1,5 +1,4 @@ use std::borrow::Cow; -use std::env; use std::fmt; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -8,7 +7,7 @@ use owo_colors::OwoColorize; use tracing::debug; use uv_cache::Cache; -use uv_cache_key::cache_digest; +use uv_configuration::PreviewMode; use uv_fs::{LockedFile, Simplified}; use uv_pep440::Version; @@ -154,6 +153,7 @@ impl PythonEnvironment { request: &PythonRequest, preference: EnvironmentPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { let installation = match find_python_installation( request, @@ -161,6 +161,7 @@ impl PythonEnvironment { // Ignore managed installations when looking for environments PythonPreference::OnlySystem, cache, + preview, )? { Ok(installation) => installation, Err(err) => return Err(EnvironmentNotFound::from(err).into()), @@ -316,23 +317,7 @@ impl PythonEnvironment { /// Grab a file lock for the environment to prevent concurrent writes across processes. pub async fn lock(&self) -> Result { - if let Some(target) = self.0.interpreter.target() { - // If we're installing into a `--target`, use a target-specific lockfile. - LockedFile::acquire(target.root().join(".lock"), target.root().user_display()).await - } else if let Some(prefix) = self.0.interpreter.prefix() { - // Likewise, if we're installing into a `--prefix`, use a prefix-specific lockfile. - LockedFile::acquire(prefix.root().join(".lock"), prefix.root().user_display()).await - } else if self.0.interpreter.is_virtualenv() { - // If the environment a virtualenv, use a virtualenv-specific lockfile. - LockedFile::acquire(self.0.root.join(".lock"), self.0.root.user_display()).await - } else { - // Otherwise, use a global lockfile. - LockedFile::acquire( - env::temp_dir().join(format!("uv-{}.lock", cache_digest(&self.0.root))), - self.0.root.user_display(), - ) - .await - } + self.0.interpreter.lock().await } /// Return the [`Interpreter`] for this environment. diff --git a/crates/uv-python/src/implementation.rs b/crates/uv-python/src/implementation.rs index ffc61dac7..4393d56f4 100644 --- a/crates/uv-python/src/implementation.rs +++ b/crates/uv-python/src/implementation.rs @@ -44,6 +44,13 @@ impl ImplementationName { Self::GraalPy => "GraalPy", } } + + pub fn executable_name(self) -> &'static str { + match self { + Self::CPython => "python", + Self::PyPy | Self::GraalPy => self.into(), + } + } } impl LenientImplementationName { @@ -53,6 +60,13 @@ impl LenientImplementationName { Self::Unknown(name) => name, } } + + pub fn executable_name(&self) -> &str { + match self { + Self::Known(implementation) => implementation.executable_name(), + Self::Unknown(name) => name, + } + } } impl From<&ImplementationName> for &'static str { diff --git a/crates/uv-python/src/installation.rs b/crates/uv-python/src/installation.rs index 9d558f3ff..a5dbb55f2 100644 --- a/crates/uv-python/src/installation.rs +++ b/crates/uv-python/src/installation.rs @@ -1,10 +1,14 @@ use std::fmt; +use std::hash::{Hash, Hasher}; use std::str::FromStr; +use indexmap::IndexMap; +use ref_cast::RefCast; use tracing::{debug, info}; use uv_cache::Cache; use uv_client::BaseClientBuilder; +use uv_configuration::PreviewMode; use uv_pep440::{Prerelease, Version}; use crate::discovery::{ @@ -54,8 +58,10 @@ impl PythonInstallation { environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { - let installation = find_python_installation(request, environments, preference, cache)??; + let installation = + find_python_installation(request, environments, preference, cache, preview)??; Ok(installation) } @@ -66,12 +72,14 @@ impl PythonInstallation { environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { Ok(find_best_python_installation( request, environments, preference, cache, + preview, )??) } @@ -89,26 +97,19 @@ impl PythonInstallation { python_install_mirror: Option<&str>, pypy_install_mirror: Option<&str>, python_downloads_json_url: Option<&str>, + preview: PreviewMode, ) -> Result { let request = request.unwrap_or(&PythonRequest::Default); // Search for the installation - let err = match Self::find(request, environments, preference, cache) { + let err = match Self::find(request, environments, preference, cache, preview) { Ok(installation) => return Ok(installation), Err(err) => err, }; - let downloads_enabled = preference.allows_managed() - && python_downloads.is_automatic() - && client_builder.connectivity.is_online(); - - if !downloads_enabled { - return Err(err); - } - match err { // If Python is missing, we should attempt a download - Error::MissingPython(_) => {} + Error::MissingPython(..) => {} // If we raised a non-critical error, we should attempt a download Error::Discovery(ref err) if !err.is_critical() => {} // Otherwise, this is fatal @@ -116,46 +117,116 @@ impl PythonInstallation { } // If we can't convert the request to a download, throw the original error - let Some(request) = PythonDownloadRequest::from_request(request) else { + let Some(download_request) = PythonDownloadRequest::from_request(request) else { return Err(err); }; - debug!("Requested Python not found, checking for available download..."); - match Self::fetch( - request.fill()?, + let downloads_enabled = preference.allows_managed() + && python_downloads.is_automatic() + && client_builder.connectivity.is_online(); + + let download = download_request.clone().fill().map(|request| { + ManagedPythonDownload::from_request(&request, python_downloads_json_url) + }); + + // Regardless of whether downloads are enabled, we want to determine if the download is + // available to power error messages. However, if downloads aren't enabled, we don't want to + // report any errors related to them. + let download = match download { + Ok(Ok(download)) => Some(download), + // If the download cannot be found, return the _original_ discovery error + Ok(Err(downloads::Error::NoDownloadFound(_))) => { + if downloads_enabled { + debug!("No downloads are available for {request}"); + return Err(err); + } + None + } + Err(err) | Ok(Err(err)) => { + if downloads_enabled { + // We failed to determine the platform information + return Err(err.into()); + } + None + } + }; + + let Some(download) = download else { + // N.B. We should only be in this case when downloads are disabled; when downloads are + // enabled, we should fail eagerly when something goes wrong with the download. + debug_assert!(!downloads_enabled); + return Err(err); + }; + + // If the download is available, but not usable, we attach a hint to the original error. + if !downloads_enabled { + let for_request = match request { + PythonRequest::Default | PythonRequest::Any => String::new(), + _ => format!(" for {request}"), + }; + + match python_downloads { + PythonDownloads::Automatic => {} + PythonDownloads::Manual => { + return Err(err.with_missing_python_hint(format!( + "A managed Python download is available{for_request}, but Python downloads are set to 'manual', use `uv python install {}` to install the required version", + request.to_canonical_string(), + ))); + } + PythonDownloads::Never => { + return Err(err.with_missing_python_hint(format!( + "A managed Python download is available{for_request}, but Python downloads are set to 'never'" + ))); + } + } + + match preference { + PythonPreference::OnlySystem => { + return Err(err.with_missing_python_hint(format!( + "A managed Python download is available{for_request}, but the Python preference is set to 'only system'" + ))); + } + PythonPreference::Managed + | PythonPreference::OnlyManaged + | PythonPreference::System => {} + } + + if !client_builder.connectivity.is_online() { + return Err(err.with_missing_python_hint(format!( + "A managed Python download is available{for_request}, but uv is set to offline mode" + ))); + } + + return Err(err); + } + + Self::fetch( + download, client_builder, cache, reporter, python_install_mirror, pypy_install_mirror, - python_downloads_json_url, + preview, ) .await - { - Ok(installation) => Ok(installation), - // Throw the original error if we couldn't find a download - Err(Error::Download(downloads::Error::NoDownloadFound(_))) => Err(err), - // But if the download failed, throw that error - Err(err) => Err(err), - } } /// Download and install the requested installation. pub async fn fetch( - request: PythonDownloadRequest, + download: &'static ManagedPythonDownload, client_builder: &BaseClientBuilder<'_>, cache: &Cache, reporter: Option<&dyn Reporter>, python_install_mirror: Option<&str>, pypy_install_mirror: Option<&str>, - python_downloads_json_url: Option<&str>, + preview: PreviewMode, ) -> Result { let installations = ManagedPythonInstallations::from_settings(None)?.init()?; let installations_dir = installations.root(); let scratch_dir = installations.scratch(); let _lock = installations.lock().await?; - let download = ManagedPythonDownload::from_request(&request, python_downloads_json_url)?; let client = client_builder.build(); info!("Fetching requested Python..."); @@ -180,6 +251,21 @@ impl PythonInstallation { installed.ensure_externally_managed()?; installed.ensure_sysconfig_patched()?; installed.ensure_canonical_executables()?; + + let minor_version = installed.minor_version_key(); + let highest_patch = installations + .find_all()? + .filter(|installation| installation.minor_version_key() == minor_version) + .filter_map(|installation| installation.version().patch()) + .fold(0, std::cmp::max); + if installed + .version() + .patch() + .is_some_and(|p| p >= highest_patch) + { + installed.ensure_minor_version_link(preview)?; + } + if let Err(e) = installed.ensure_dylib_patched() { e.warn_user(&installed); } @@ -340,6 +426,14 @@ impl PythonInstallationKey { format!("{}.{}.{}", self.major, self.minor, self.patch) } + pub fn major(&self) -> u8 { + self.major + } + + pub fn minor(&self) -> u8 { + self.minor + } + pub fn arch(&self) -> &Arch { &self.arch } @@ -483,8 +577,119 @@ impl Ord for PythonInstallationKey { .cmp(&other.implementation) .then_with(|| self.version().cmp(&other.version())) .then_with(|| self.os.to_string().cmp(&other.os.to_string())) - .then_with(|| self.arch.to_string().cmp(&other.arch.to_string())) + // Architectures are sorted in preferred order, with native architectures first + .then_with(|| self.arch.cmp(&other.arch).reverse()) .then_with(|| self.libc.to_string().cmp(&other.libc.to_string())) - .then_with(|| self.variant.cmp(&other.variant).reverse()) // we want Default to come first + // Python variants are sorted in preferred order, with `Default` first + .then_with(|| self.variant.cmp(&other.variant).reverse()) + } +} + +/// A view into a [`PythonInstallationKey`] that excludes the patch and prerelease versions. +#[derive(Clone, Eq, Ord, PartialOrd, RefCast)] +#[repr(transparent)] +pub struct PythonInstallationMinorVersionKey(PythonInstallationKey); + +impl PythonInstallationMinorVersionKey { + /// Cast a `&PythonInstallationKey` to a `&PythonInstallationMinorVersionKey` using ref-cast. + #[inline] + pub fn ref_cast(key: &PythonInstallationKey) -> &Self { + RefCast::ref_cast(key) + } + + /// Takes an [`IntoIterator`] of [`ManagedPythonInstallation`]s and returns an [`FxHashMap`] from + /// [`PythonInstallationMinorVersionKey`] to the installation with highest [`PythonInstallationKey`] + /// for that minor version key. + #[inline] + pub fn highest_installations_by_minor_version_key<'a, I>( + installations: I, + ) -> IndexMap + where + I: IntoIterator, + { + let mut minor_versions = IndexMap::default(); + for installation in installations { + minor_versions + .entry(installation.minor_version_key().clone()) + .and_modify(|high_installation: &mut ManagedPythonInstallation| { + if installation.key() >= high_installation.key() { + *high_installation = installation.clone(); + } + }) + .or_insert_with(|| installation.clone()); + } + minor_versions + } +} + +impl fmt::Display for PythonInstallationMinorVersionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Display every field on the wrapped key except the patch + // and prerelease (with special formatting for the variant). + let variant = match self.0.variant { + PythonVariant::Default => String::new(), + PythonVariant::Freethreaded => format!("+{}", self.0.variant), + }; + write!( + f, + "{}-{}.{}{}-{}-{}-{}", + self.0.implementation, + self.0.major, + self.0.minor, + variant, + self.0.os, + self.0.arch, + self.0.libc, + ) + } +} + +impl fmt::Debug for PythonInstallationMinorVersionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Display every field on the wrapped key except the patch + // and prerelease. + f.debug_struct("PythonInstallationMinorVersionKey") + .field("implementation", &self.0.implementation) + .field("major", &self.0.major) + .field("minor", &self.0.minor) + .field("variant", &self.0.variant) + .field("os", &self.0.os) + .field("arch", &self.0.arch) + .field("libc", &self.0.libc) + .finish() + } +} + +impl PartialEq for PythonInstallationMinorVersionKey { + fn eq(&self, other: &Self) -> bool { + // Compare every field on the wrapped key except the patch + // and prerelease. + self.0.implementation == other.0.implementation + && self.0.major == other.0.major + && self.0.minor == other.0.minor + && self.0.os == other.0.os + && self.0.arch == other.0.arch + && self.0.libc == other.0.libc + && self.0.variant == other.0.variant + } +} + +impl Hash for PythonInstallationMinorVersionKey { + fn hash(&self, state: &mut H) { + // Hash every field on the wrapped key except the patch + // and prerelease. + self.0.implementation.hash(state); + self.0.major.hash(state); + self.0.minor.hash(state); + self.0.os.hash(state); + self.0.arch.hash(state); + self.0.libc.hash(state); + self.0.variant.hash(state); + } +} + +impl From for PythonInstallationMinorVersionKey { + fn from(key: PythonInstallationKey) -> Self { + PythonInstallationMinorVersionKey(key) } } diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index 2f85fa042..0f074ebb6 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -1,10 +1,10 @@ use std::borrow::Cow; use std::env::consts::ARCH; use std::fmt::{Display, Formatter}; -use std::io; use std::path::{Path, PathBuf}; use std::process::{Command, ExitStatus}; use std::sync::OnceLock; +use std::{env, io}; use configparser::ini::Ini; use fs_err as fs; @@ -17,7 +17,7 @@ use tracing::{debug, trace, warn}; use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness}; use uv_cache_info::Timestamp; use uv_cache_key::cache_digest; -use uv_fs::{PythonExt, Simplified, write_atomic_sync}; +use uv_fs::{LockedFile, PythonExt, Simplified, write_atomic_sync}; use uv_install_wheel::Layout; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, StringVersion}; @@ -26,6 +26,7 @@ use uv_platform_tags::{Tags, TagsError}; use uv_pypi_types::{ResolverMarkerEnvironment, Scheme}; use crate::implementation::LenientImplementationName; +use crate::managed::ManagedPythonInstallations; use crate::platform::{Arch, Libc, Os}; use crate::pointer_size::PointerSize; use crate::{ @@ -168,7 +169,7 @@ impl Interpreter { Ok(path) => path, Err(err) => { warn!("Failed to find base Python executable: {err}"); - uv_fs::canonicalize_executable(base_executable)? + canonicalize_executable(base_executable)? } }; Ok(base_python) @@ -263,6 +264,21 @@ impl Interpreter { self.prefix.is_some() } + /// Returns `true` if this interpreter is managed by uv. + /// + /// Returns `false` if we cannot determine the path of the uv managed Python interpreters. + pub fn is_managed(&self) -> bool { + let Ok(installations) = ManagedPythonInstallations::from_settings(None) else { + return false; + }; + + installations + .find_all() + .into_iter() + .flatten() + .any(|install| install.path() == self.sys_base_prefix) + } + /// Returns `Some` if the environment is externally managed, optionally including an error /// message from the `EXTERNALLY-MANAGED` file. /// @@ -483,10 +499,19 @@ impl Interpreter { /// `python-build-standalone`. /// /// See: + #[cfg(unix)] pub fn is_standalone(&self) -> bool { self.standalone } + /// Returns `true` if an [`Interpreter`] may be a `python-build-standalone` interpreter. + // TODO(john): Replace this approach with patching sysconfig on Windows to + // set `PYTHON_BUILD_STANDALONE=1`.` + #[cfg(windows)] + pub fn is_standalone(&self) -> bool { + self.standalone || (self.is_managed() && self.markers().implementation_name() == "cpython") + } + /// Return the [`Layout`] environment used to install wheels into this interpreter. pub fn layout(&self) -> Layout { Layout { @@ -581,6 +606,54 @@ impl Interpreter { .into_iter() .any(|default_name| name == default_name.to_string()) } + + /// Grab a file lock for the environment to prevent concurrent writes across processes. + pub async fn lock(&self) -> Result { + if let Some(target) = self.target() { + // If we're installing into a `--target`, use a target-specific lockfile. + LockedFile::acquire(target.root().join(".lock"), target.root().user_display()).await + } else if let Some(prefix) = self.prefix() { + // Likewise, if we're installing into a `--prefix`, use a prefix-specific lockfile. + LockedFile::acquire(prefix.root().join(".lock"), prefix.root().user_display()).await + } else if self.is_virtualenv() { + // If the environment a virtualenv, use a virtualenv-specific lockfile. + LockedFile::acquire( + self.sys_prefix.join(".lock"), + self.sys_prefix.user_display(), + ) + .await + } else { + // Otherwise, use a global lockfile. + LockedFile::acquire( + env::temp_dir().join(format!("uv-{}.lock", cache_digest(&self.sys_executable))), + self.sys_prefix.user_display(), + ) + .await + } + } +} + +/// Calls `fs_err::canonicalize` on Unix. On Windows, avoids attempting to resolve symlinks +/// but will resolve junctions if they are part of a trampoline target. +pub fn canonicalize_executable(path: impl AsRef) -> std::io::Result { + let path = path.as_ref(); + debug_assert!( + path.is_absolute(), + "path must be absolute: {}", + path.display() + ); + + #[cfg(windows)] + { + if let Ok(Some(launcher)) = uv_trampoline_builder::Launcher::try_from_path(path) { + Ok(dunce::canonicalize(launcher.python_path)?) + } else { + Ok(path.to_path_buf()) + } + } + + #[cfg(unix)] + fs_err::canonicalize(path) } /// The `EXTERNALLY-MANAGED` file in a Python installation. @@ -757,6 +830,8 @@ pub enum InterpreterInfoError { python_major: usize, python_minor: usize, }, + #[error("Only Pyodide is support for Emscripten Python")] + EmscriptenNotPyodide, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -892,6 +967,31 @@ impl InterpreterInfo { pub(crate) fn query_cached(executable: &Path, cache: &Cache) -> Result { let absolute = std::path::absolute(executable)?; + // Provide a better error message if the link is broken or the file does not exist. Since + // `canonicalize_executable` does not resolve the file on Windows, we must re-use this logic + // for the subsequent metadata read as we may not have actually resolved the path. + let handle_io_error = |err: io::Error| -> Error { + if err.kind() == io::ErrorKind::NotFound { + // Check if it looks like a venv interpreter where the underlying Python + // installation was removed. + if absolute + .symlink_metadata() + .is_ok_and(|metadata| metadata.is_symlink()) + { + Error::BrokenSymlink(BrokenSymlink { + path: executable.to_path_buf(), + venv: uv_fs::is_virtualenv_executable(executable), + }) + } else { + Error::NotFound(executable.to_path_buf()) + } + } else { + err.into() + } + }; + + let canonical = canonicalize_executable(&absolute).map_err(handle_io_error)?; + let cache_entry = cache.entry( CacheBucket::Interpreter, // Shard interpreter metadata by host architecture, operating system, and version, to @@ -902,38 +1002,18 @@ impl InterpreterInfo { sys_info::os_release().unwrap_or_default(), )), // We use the absolute path for the cache entry to avoid cache collisions for relative - // paths. But we don't to query the executable with symbolic links resolved. - format!("{}.msgpack", cache_digest(&absolute)), + // paths. But we don't want to query the executable with symbolic links resolved because + // that can change reported values, e.g., `sys.executable`. We include the canonical + // path in the cache entry as well, otherwise we can have cache collisions if an + // absolute path refers to different interpreters with matching ctimes, e.g., if you + // have a `.venv/bin/python` pointing to both Python 3.12 and Python 3.13 that were + // modified at the same time. + format!("{}.msgpack", cache_digest(&(&absolute, &canonical))), ); // We check the timestamp of the canonicalized executable to check if an underlying // interpreter has been modified. - let modified = uv_fs::canonicalize_executable(&absolute) - .and_then(Timestamp::from_path) - .map_err(|err| { - if err.kind() == io::ErrorKind::NotFound { - // Check if it looks like a venv interpreter where the underlying Python - // installation was removed. - if absolute - .symlink_metadata() - .is_ok_and(|metadata| metadata.is_symlink()) - { - let venv = executable - .parent() - .and_then(Path::parent) - .map(|path| path.join("pyvenv.cfg").is_file()) - .unwrap_or(false); - Error::BrokenSymlink(BrokenSymlink { - path: executable.to_path_buf(), - venv, - }) - } else { - Error::NotFound(executable.to_path_buf()) - } - } else { - err.into() - } - })?; + let modified = Timestamp::from_path(canonical).map_err(handle_io_error)?; // Read from the cache. if cache @@ -945,7 +1025,7 @@ impl InterpreterInfo { Ok(cached) => { if cached.timestamp == modified { trace!( - "Cached interpreter info for Python {}, skipping probing: {}", + "Found cached interpreter info for Python {}, skipping query of: {}", cached.data.markers.python_full_version(), executable.user_display() ); diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs index 024cd5cbc..ea6f0db61 100644 --- a/crates/uv-python/src/lib.rs +++ b/crates/uv-python/src/lib.rs @@ -1,4 +1,5 @@ //! Find requested Python interpreters and query interpreters for information. +use owo_colors::OwoColorize; use thiserror::Error; #[cfg(test)] @@ -9,10 +10,15 @@ pub use crate::discovery::{ PythonPreference, PythonRequest, PythonSource, PythonVariant, VersionRequest, find_python_installations, }; +pub use crate::downloads::PlatformRequest; pub use crate::environment::{InvalidEnvironmentKind, PythonEnvironment}; -pub use crate::implementation::ImplementationName; -pub use crate::installation::{PythonInstallation, PythonInstallationKey}; -pub use crate::interpreter::{BrokenSymlink, Error as InterpreterError, Interpreter}; +pub use crate::implementation::{ImplementationName, LenientImplementationName}; +pub use crate::installation::{ + PythonInstallation, PythonInstallationKey, PythonInstallationMinorVersionKey, +}; +pub use crate::interpreter::{ + BrokenSymlink, Error as InterpreterError, Interpreter, canonicalize_executable, +}; pub use crate::pointer_size::PointerSize; pub use crate::prefix::Prefix; pub use crate::python_version::PythonVersion; @@ -88,8 +94,8 @@ pub enum Error { #[error(transparent)] KeyError(#[from] installation::PythonInstallationKeyError), - #[error(transparent)] - MissingPython(#[from] PythonNotFound), + #[error("{}{}", .0, if let Some(hint) = .1 { format!("\n\n{}{} {hint}", "hint".bold().cyan(), ":".bold()) } else { String::new() })] + MissingPython(PythonNotFound, Option), #[error(transparent)] MissingEnvironment(#[from] environment::EnvironmentNotFound), @@ -98,6 +104,21 @@ pub enum Error { InvalidEnvironment(#[from] environment::InvalidEnvironment), } +impl Error { + pub(crate) fn with_missing_python_hint(self, hint: String) -> Self { + match self { + Error::MissingPython(err, _) => Error::MissingPython(err, Some(hint)), + _ => self, + } + } +} + +impl From for Error { + fn from(err: PythonNotFound) -> Self { + Error::MissingPython(err, None) + } +} + // The mock interpreters are not valid on Windows so we don't have unit test coverage there // TODO(zanieb): We should write a mock interpreter script that works on Windows #[cfg(all(test, unix))] @@ -114,6 +135,7 @@ mod tests { use indoc::{formatdoc, indoc}; use temp_env::with_vars; use test_log::test; + use uv_configuration::PreviewMode; use uv_static::EnvVars; use uv_cache::Cache; @@ -446,6 +468,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -460,6 +483,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -484,6 +508,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -505,6 +530,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -566,6 +592,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -597,6 +624,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -633,6 +661,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -664,6 +693,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -685,6 +715,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -710,6 +741,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -735,6 +767,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -757,6 +790,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -790,6 +824,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -823,6 +858,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -844,6 +880,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -865,6 +902,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -898,6 +936,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -934,6 +973,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -964,6 +1004,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -998,6 +1039,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1023,6 +1065,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1049,6 +1092,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1073,6 +1117,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )?; @@ -1094,6 +1139,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1116,6 +1162,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1148,6 +1195,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1168,6 +1216,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1194,6 +1243,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -1211,6 +1261,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -1239,6 +1290,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1276,6 +1328,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1303,6 +1356,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1327,6 +1381,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1351,6 +1406,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1375,6 +1431,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1412,6 +1469,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1439,6 +1497,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1455,6 +1514,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1471,6 +1531,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1492,6 +1553,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1508,6 +1570,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )?; @@ -1529,6 +1592,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1543,6 +1607,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1556,6 +1621,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1584,6 +1650,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1599,6 +1666,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1628,6 +1696,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1643,6 +1712,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1658,6 +1728,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1673,6 +1744,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1696,6 +1768,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1710,6 +1783,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1733,6 +1807,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1752,6 +1827,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1780,6 +1856,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1801,6 +1878,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1830,6 +1908,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1845,6 +1924,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1871,6 +1951,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -1895,6 +1976,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1911,6 +1993,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1925,6 +2008,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1950,6 +2034,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1964,6 +2049,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1989,6 +2075,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2015,6 +2102,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2041,6 +2129,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2067,6 +2156,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2093,6 +2183,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2120,6 +2211,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -2141,6 +2233,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2155,6 +2248,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2180,6 +2274,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2194,6 +2289,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2231,6 +2327,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2248,6 +2345,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2289,6 +2387,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2306,6 +2405,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2342,6 +2442,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2364,6 +2465,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2386,6 +2488,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2424,6 +2527,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -2476,6 +2580,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; diff --git a/crates/uv-python/src/libc.rs b/crates/uv-python/src/libc.rs index 974680ac8..40950ae08 100644 --- a/crates/uv-python/src/libc.rs +++ b/crates/uv-python/src/libc.rs @@ -31,12 +31,14 @@ pub enum LibcDetectionError { #[source] err: io::Error, }, - #[error("Could not find glibc version in output of: `ldd --version`")] - InvalidLddOutputGnu, + #[error("Could not find glibc version in output of: `{0} --version`")] + InvalidLdSoOutputGnu(PathBuf), #[error("Could not find musl version in output of: `{0}`")] - InvalidLddOutputMusl(PathBuf), + InvalidLdSoOutputMusl(PathBuf), #[error("Could not read ELF interpreter from any of the following paths: {0}")] CoreBinaryParsing(String), + #[error("Failed to find any common binaries to determine libc from: {0}")] + NoCommonBinariesFound(String), #[error("Failed to determine libc")] Io(#[from] io::Error), } @@ -73,49 +75,55 @@ pub(crate) fn detect_linux_libc() -> Result { ); } } - match detect_glibc_version_from_ldd(&ld_path) { + match detect_glibc_version_from_ld(&ld_path) { Ok(os_version) => return Ok(os_version), Err(err) => { - trace!("Tried to find glibc version from `ldd --version`, but failed: {err}"); + trace!( + "Tried to find glibc version from `{} --version`, but failed: {}", + ld_path.simplified_display(), + err + ); } } Err(LibcDetectionError::NoLibcFound) } // glibc version is taken from `std/sys/unix/os.rs`. -fn detect_glibc_version_from_ldd(ldd: &Path) -> Result { - let output = Command::new(ldd) +fn detect_glibc_version_from_ld(ld_so: &Path) -> Result { + let output = Command::new(ld_so) .args(["--version"]) .output() .map_err(|err| LibcDetectionError::FailedToRun { libc: "glibc", - program: format!("{} --version", ldd.user_display()), + program: format!("{} --version", ld_so.user_display()), err, })?; - if let Some(os) = glibc_ldd_output_to_version("stdout", &output.stdout) { + if let Some(os) = glibc_ld_output_to_version("stdout", &output.stdout) { return Ok(os); } - if let Some(os) = glibc_ldd_output_to_version("stderr", &output.stderr) { + if let Some(os) = glibc_ld_output_to_version("stderr", &output.stderr) { return Ok(os); } - Err(LibcDetectionError::InvalidLddOutputGnu) + Err(LibcDetectionError::InvalidLdSoOutputGnu( + ld_so.to_path_buf(), + )) } -/// Parse `ldd --version` output. +/// Parse output `/lib64/ld-linux-x86-64.so.2 --version` and equivalent ld.so files. /// /// Example: `ld.so (Ubuntu GLIBC 2.39-0ubuntu8.3) stable release version 2.39.`. -fn glibc_ldd_output_to_version(kind: &str, output: &[u8]) -> Option { +fn glibc_ld_output_to_version(kind: &str, output: &[u8]) -> Option { static RE: LazyLock = LazyLock::new(|| Regex::new(r"ld.so \(.+\) .* ([0-9]+\.[0-9]+)").unwrap()); let output = String::from_utf8_lossy(output); - trace!("{kind} output from `ldd --version`: {output:?}"); + trace!("{kind} output from `ld.so --version`: {output:?}"); let (_, [version]) = RE.captures(output.as_ref()).map(|c| c.extract())?; // Parse the input as "x.y" glibc version. let mut parsed_ints = version.split('.').map(str::parse).fuse(); let major = parsed_ints.next()?.ok()?; let minor = parsed_ints.next()?.ok()?; - trace!("Found manylinux {major}.{minor} in {kind} of `ldd --version`"); + trace!("Found manylinux {major}.{minor} in {kind} of ld.so version"); Some(LibcVersion::Manylinux { major, minor }) } @@ -166,7 +174,7 @@ fn detect_musl_version(ld_path: impl AsRef) -> Result Result { // See: https://github.com/astral-sh/uv/issues/1810 // See: https://github.com/astral-sh/uv/issues/4242#issuecomment-2306164449 let attempts = ["/bin/sh", "/usr/bin/env", "/bin/dash", "/bin/ls"]; + let mut found_anything = false; for path in attempts { - if let Some(ld_path) = find_ld_path_at(path) { - return Ok(ld_path); + if std::fs::exists(path).ok() == Some(true) { + found_anything = true; + if let Some(ld_path) = find_ld_path_at(path) { + return Ok(ld_path); + } } } - Err(LibcDetectionError::CoreBinaryParsing(attempts.join(", "))) + let attempts_string = attempts.join(", "); + if !found_anything { + // Known failure cases here include running the distroless Docker images directly + // (depending on what subcommand you use) and certain Nix setups. See: + // https://github.com/astral-sh/uv/issues/8635 + Err(LibcDetectionError::NoCommonBinariesFound(attempts_string)) + } else { + Err(LibcDetectionError::CoreBinaryParsing(attempts_string)) + } } /// Attempt to find the path to the `ld` executable by @@ -244,8 +264,8 @@ mod tests { use indoc::indoc; #[test] - fn parse_ldd_output() { - let ver_str = glibc_ldd_output_to_version( + fn parse_ld_so_output() { + let ver_str = glibc_ld_output_to_version( "stdout", indoc! {br"ld.so (Ubuntu GLIBC 2.39-0ubuntu8.3) stable release version 2.39. Copyright (C) 2024 Free Software Foundation, Inc. diff --git a/crates/uv-python/src/managed.rs b/crates/uv-python/src/managed.rs index acdeb4e46..ad1dacac6 100644 --- a/crates/uv-python/src/managed.rs +++ b/crates/uv-python/src/managed.rs @@ -2,6 +2,8 @@ use core::fmt; use std::cmp::Reverse; use std::ffi::OsStr; use std::io::{self, Write}; +#[cfg(windows)] +use std::os::windows::fs::MetadataExt; use std::path::{Path, PathBuf}; use std::str::FromStr; @@ -10,8 +12,11 @@ use itertools::Itertools; use same_file::is_same_file; use thiserror::Error; use tracing::{debug, warn}; +use uv_configuration::PreviewMode; +#[cfg(windows)] +use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_REPARSE_POINT; -use uv_fs::{LockedFile, Simplified, symlink_or_copy_file}; +use uv_fs::{LockedFile, Simplified, replace_symlink, symlink_or_copy_file}; use uv_state::{StateBucket, StateStore}; use uv_static::EnvVars; use uv_trampoline_builder::{Launcher, windows_python_launcher}; @@ -25,7 +30,9 @@ use crate::libc::LibcDetectionError; use crate::platform::Error as PlatformError; use crate::platform::{Arch, Libc, Os}; use crate::python_version::PythonVersion; -use crate::{PythonRequest, PythonVariant, macos_dylib, sysconfig}; +use crate::{ + PythonInstallationMinorVersionKey, PythonRequest, PythonVariant, macos_dylib, sysconfig, +}; #[derive(Error, Debug)] pub enum Error { @@ -51,6 +58,8 @@ pub enum Error { }, #[error("Missing expected Python executable at {}", _0.user_display())] MissingExecutable(PathBuf), + #[error("Missing expected target directory for Python minor version link at {}", _0.user_display())] + MissingPythonMinorVersionLinkTargetDirectory(PathBuf), #[error("Failed to create canonical Python executable at {} from {}", to.user_display(), from.user_display())] CanonicalizeExecutable { from: PathBuf, @@ -65,6 +74,13 @@ pub enum Error { #[source] err: io::Error, }, + #[error("Failed to create Python minor version link directory at {} from {}", to.user_display(), from.user_display())] + PythonMinorVersionLinkDirectory { + from: PathBuf, + to: PathBuf, + #[source] + err: io::Error, + }, #[error("Failed to create directory for Python executable link at {}", to.user_display())] ExecutableDirectory { to: PathBuf, @@ -87,7 +103,7 @@ pub enum Error { AbsolutePath(PathBuf, #[source] io::Error), #[error(transparent)] NameParseError(#[from] installation::PythonInstallationKeyError), - #[error(transparent)] + #[error("Failed to determine the libc used on the current platform")] LibcDetection(#[from] LibcDetectionError), #[error(transparent)] MacOsDylib(#[from] macos_dylib::Error), @@ -339,18 +355,14 @@ impl ManagedPythonInstallation { /// The path to this managed installation's Python executable. /// - /// If the installation has multiple execututables i.e., `python`, `python3`, etc., this will + /// If the installation has multiple executables i.e., `python`, `python3`, etc., this will /// return the _canonical_ executable name which the other names link to. On Unix, this is /// `python{major}.{minor}{variant}` and on Windows, this is `python{exe}`. /// /// If windowed is true, `pythonw.exe` is selected over `python.exe` on windows, with no changes /// on non-windows. pub fn executable(&self, windowed: bool) -> PathBuf { - let implementation = match self.implementation() { - ImplementationName::CPython => "python", - ImplementationName::PyPy => "pypy", - ImplementationName::GraalPy => "graalpy", - }; + let implementation = self.implementation().executable_name(); let version = match self.implementation() { ImplementationName::CPython => { @@ -383,13 +395,11 @@ impl ManagedPythonInstallation { exe = std::env::consts::EXE_SUFFIX ); - let executable = if cfg!(unix) || *self.implementation() == ImplementationName::GraalPy { - self.python_dir().join("bin").join(name) - } else if cfg!(windows) { - self.python_dir().join(name) - } else { - unimplemented!("Only Windows and Unix systems are supported.") - }; + let executable = executable_path_from_base( + self.python_dir().as_path(), + &name, + &LenientImplementationName::from(*self.implementation()), + ); // Workaround for python-build-standalone v20241016 which is missing the standard // `python.exe` executable in free-threaded distributions on Windows. @@ -442,6 +452,10 @@ impl ManagedPythonInstallation { &self.key } + pub fn minor_version_key(&self) -> &PythonInstallationMinorVersionKey { + PythonInstallationMinorVersionKey::ref_cast(&self.key) + } + pub fn satisfies(&self, request: &PythonRequest) -> bool { match request { PythonRequest::File(path) => self.executable(false) == *path, @@ -503,6 +517,30 @@ impl ManagedPythonInstallation { Ok(()) } + /// Ensure the environment contains the symlink directory (or junction on Windows) + /// pointing to the patch directory for this minor version. + pub fn ensure_minor_version_link(&self, preview: PreviewMode) -> Result<(), Error> { + if let Some(minor_version_link) = PythonMinorVersionLink::from_installation(self, preview) { + minor_version_link.create_directory()?; + } + Ok(()) + } + + /// If the environment contains a symlink directory (or junction on Windows), + /// update it to the latest patch directory for this minor version. + /// + /// Unlike [`ensure_minor_version_link`], will not create a new symlink directory + /// if one doesn't already exist, + pub fn update_minor_version_link(&self, preview: PreviewMode) -> Result<(), Error> { + if let Some(minor_version_link) = PythonMinorVersionLink::from_installation(self, preview) { + if !minor_version_link.exists() { + return Ok(()); + } + minor_version_link.create_directory()?; + } + Ok(()) + } + /// Ensure the environment is marked as externally managed with the /// standard `EXTERNALLY-MANAGED` file. pub fn ensure_externally_managed(&self) -> Result<(), Error> { @@ -567,54 +605,8 @@ impl ManagedPythonInstallation { Ok(()) } - /// Create a link to the managed Python executable. - /// - /// If the file already exists at the target path, an error will be returned. - pub fn create_bin_link(&self, target: &Path) -> Result<(), Error> { - let python = self.executable(false); - - let bin = target.parent().ok_or(Error::NoExecutableDirectory)?; - fs_err::create_dir_all(bin).map_err(|err| Error::ExecutableDirectory { - to: bin.to_path_buf(), - err, - })?; - - if cfg!(unix) { - // Note this will never copy on Unix — we use it here to allow compilation on Windows - match symlink_or_copy_file(&python, target) { - Ok(()) => Ok(()), - Err(err) if err.kind() == io::ErrorKind::NotFound => { - Err(Error::MissingExecutable(python.clone())) - } - Err(err) => Err(Error::LinkExecutable { - from: python, - to: target.to_path_buf(), - err, - }), - } - } else if cfg!(windows) { - // TODO(zanieb): Install GUI launchers as well - let launcher = windows_python_launcher(&python, false)?; - - // OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach - // error context anyway - #[allow(clippy::disallowed_types)] - { - std::fs::File::create_new(target) - .and_then(|mut file| file.write_all(launcher.as_ref())) - .map_err(|err| Error::LinkExecutable { - from: python, - to: target.to_path_buf(), - err, - }) - } - } else { - unimplemented!("Only Windows and Unix systems are supported.") - } - } - /// Returns `true` if the path is a link to this installation's binary, e.g., as created by - /// [`ManagedPythonInstallation::create_bin_link`]. + /// [`create_bin_link`]. pub fn is_bin_link(&self, path: &Path) -> bool { if cfg!(unix) { is_same_file(path, self.executable(false)).unwrap_or_default() @@ -625,7 +617,11 @@ impl ManagedPythonInstallation { if !matches!(launcher.kind, uv_trampoline_builder::LauncherKind::Python) { return false; } - launcher.python_path == self.executable(false) + // We canonicalize the target path of the launcher in case it includes a minor version + // junction directory. If canonicalization fails, we check against the launcher path + // directly. + dunce::canonicalize(&launcher.python_path).unwrap_or(launcher.python_path) + == self.executable(false) } else { unreachable!("Only Windows and Unix are supported") } @@ -669,6 +665,229 @@ impl ManagedPythonInstallation { } } +/// A representation of a minor version symlink directory (or junction on Windows) +/// linking to the home directory of a Python installation. +#[derive(Clone, Debug)] +pub struct PythonMinorVersionLink { + /// The symlink directory (or junction on Windows). + pub symlink_directory: PathBuf, + /// The full path to the executable including the symlink directory + /// (or junction on Windows). + pub symlink_executable: PathBuf, + /// The target directory for the symlink. This is the home directory for + /// a Python installation. + pub target_directory: PathBuf, +} + +impl PythonMinorVersionLink { + /// Attempt to derive a path from an executable path that substitutes a minor + /// version symlink directory (or junction on Windows) for the patch version + /// directory. + /// + /// The implementation is expected to be CPython and, on Unix, the base Python is + /// expected to be in `/bin/` on Unix. If either condition isn't true, + /// return [`None`]. + /// + /// # Examples + /// + /// ## Unix + /// For a Python 3.10.8 installation in `/path/to/uv/python/cpython-3.10.8-macos-aarch64-none/bin/python3.10`, + /// the symlink directory would be `/path/to/uv/python/cpython-3.10-macos-aarch64-none` and the executable path including the + /// symlink directory would be `/path/to/uv/python/cpython-3.10-macos-aarch64-none/bin/python3.10`. + /// + /// ## Windows + /// For a Python 3.10.8 installation in `C:\path\to\uv\python\cpython-3.10.8-windows-x86_64-none\python.exe`, + /// the junction would be `C:\path\to\uv\python\cpython-3.10-windows-x86_64-none` and the executable path including the + /// junction would be `C:\path\to\uv\python\cpython-3.10-windows-x86_64-none\python.exe`. + pub fn from_executable( + executable: &Path, + key: &PythonInstallationKey, + preview: PreviewMode, + ) -> Option { + let implementation = key.implementation(); + if !matches!( + implementation, + LenientImplementationName::Known(ImplementationName::CPython) + ) { + // We don't currently support transparent upgrades for PyPy or GraalPy. + return None; + } + let executable_name = executable + .file_name() + .expect("Executable file name should exist"); + let symlink_directory_name = PythonInstallationMinorVersionKey::ref_cast(key).to_string(); + let parent = executable + .parent() + .expect("Executable should have parent directory"); + + // The home directory of the Python installation + let target_directory = if cfg!(unix) { + if parent + .components() + .next_back() + .is_some_and(|c| c.as_os_str() == "bin") + { + parent.parent()?.to_path_buf() + } else { + return None; + } + } else if cfg!(windows) { + parent.to_path_buf() + } else { + unimplemented!("Only Windows and Unix systems are supported.") + }; + let symlink_directory = target_directory.with_file_name(symlink_directory_name); + // If this would create a circular link, return `None`. + if target_directory == symlink_directory { + return None; + } + // The full executable path including the symlink directory (or junction). + let symlink_executable = executable_path_from_base( + symlink_directory.as_path(), + &executable_name.to_string_lossy(), + implementation, + ); + let minor_version_link = Self { + symlink_directory, + symlink_executable, + target_directory, + }; + // If preview mode is disabled, still return a `MinorVersionSymlink` for + // existing symlinks, allowing continued operations without the `--preview` + // flag after initial symlink directory installation. + if preview.is_disabled() && !minor_version_link.exists() { + return None; + } + Some(minor_version_link) + } + + pub fn from_installation( + installation: &ManagedPythonInstallation, + preview: PreviewMode, + ) -> Option { + PythonMinorVersionLink::from_executable( + installation.executable(false).as_path(), + installation.key(), + preview, + ) + } + + pub fn create_directory(&self) -> Result<(), Error> { + match replace_symlink( + self.target_directory.as_path(), + self.symlink_directory.as_path(), + ) { + Ok(()) => { + debug!( + "Created link {} -> {}", + &self.symlink_directory.user_display(), + &self.target_directory.user_display(), + ); + } + Err(err) if err.kind() == io::ErrorKind::NotFound => { + return Err(Error::MissingPythonMinorVersionLinkTargetDirectory( + self.target_directory.clone(), + )); + } + Err(err) if err.kind() == io::ErrorKind::AlreadyExists => {} + Err(err) => { + return Err(Error::PythonMinorVersionLinkDirectory { + from: self.symlink_directory.clone(), + to: self.target_directory.clone(), + err, + }); + } + } + Ok(()) + } + + pub fn exists(&self) -> bool { + #[cfg(unix)] + { + self.symlink_directory + .symlink_metadata() + .map(|metadata| metadata.file_type().is_symlink()) + .unwrap_or(false) + } + #[cfg(windows)] + { + self.symlink_directory + .symlink_metadata() + .is_ok_and(|metadata| { + // Check that this is a reparse point, which indicates this + // is a symlink or junction. + (metadata.file_attributes() & FILE_ATTRIBUTE_REPARSE_POINT) != 0 + }) + } + } +} + +/// Derive the full path to an executable from the given base path and executable +/// name. On Unix, this is, e.g., `/bin/python3.10`. On Windows, this is, +/// e.g., `\python.exe`. +fn executable_path_from_base( + base: &Path, + executable_name: &str, + implementation: &LenientImplementationName, +) -> PathBuf { + if cfg!(unix) + || matches!( + implementation, + &LenientImplementationName::Known(ImplementationName::GraalPy) + ) + { + base.join("bin").join(executable_name) + } else if cfg!(windows) { + base.join(executable_name) + } else { + unimplemented!("Only Windows and Unix systems are supported.") + } +} + +/// Create a link to a managed Python executable. +/// +/// If the file already exists at the link path, an error will be returned. +pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), Error> { + let link_parent = link.parent().ok_or(Error::NoExecutableDirectory)?; + fs_err::create_dir_all(link_parent).map_err(|err| Error::ExecutableDirectory { + to: link_parent.to_path_buf(), + err, + })?; + + if cfg!(unix) { + // Note this will never copy on Unix — we use it here to allow compilation on Windows + match symlink_or_copy_file(&executable, link) { + Ok(()) => Ok(()), + Err(err) if err.kind() == io::ErrorKind::NotFound => { + Err(Error::MissingExecutable(executable.clone())) + } + Err(err) => Err(Error::LinkExecutable { + from: executable, + to: link.to_path_buf(), + err, + }), + } + } else if cfg!(windows) { + // TODO(zanieb): Install GUI launchers as well + let launcher = windows_python_launcher(&executable, false)?; + + // OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach + // error context anyway + #[allow(clippy::disallowed_types)] + { + std::fs::File::create_new(link) + .and_then(|mut file| file.write_all(launcher.as_ref())) + .map_err(|err| Error::LinkExecutable { + from: executable, + to: link.to_path_buf(), + err, + }) + } + } else { + unimplemented!("Only Windows and Unix systems are supported.") + } +} + // TODO(zanieb): Only used in tests now. /// Generate a platform portion of a key from the environment. pub fn platform_key_from_env() -> Result { diff --git a/crates/uv-python/src/platform.rs b/crates/uv-python/src/platform.rs index 0e64a0fea..ce8620ae2 100644 --- a/crates/uv-python/src/platform.rs +++ b/crates/uv-python/src/platform.rs @@ -18,7 +18,7 @@ pub enum Error { } /// Architecture variants, e.g., with support for different instruction sets -#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)] +#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash, Ord, PartialOrd)] pub enum ArchVariant { /// Targets 64-bit Intel/AMD CPUs newer than Nehalem (2008). /// Includes SSE3, SSE4 and other post-2003 CPU instructions. @@ -37,6 +37,54 @@ pub struct Arch { pub(crate) variant: Option, } +impl Ord for Arch { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + if self.family == other.family { + return self.variant.cmp(&other.variant); + } + + // For the time being, manually make aarch64 windows disfavored + // on its own host platform, because most packages don't have wheels for + // aarch64 windows, making emulation more useful than native execution! + // + // The reason we do this in "sorting" and not "supports" is so that we don't + // *refuse* to use an aarch64 windows pythons if they happen to be installed + // and nothing else is available. + // + // Similarly if someone manually requests an aarch64 windows install, we + // should respect that request (this is the way users should "override" + // this behaviour). + let preferred = if cfg!(all(windows, target_arch = "aarch64")) { + Arch { + family: target_lexicon::Architecture::X86_64, + variant: None, + } + } else { + // Prefer native architectures + Arch::from_env() + }; + + match ( + self.family == preferred.family, + other.family == preferred.family, + ) { + (true, true) => unreachable!(), + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + (false, false) => { + // Both non-preferred, fallback to lexicographic order + self.family.to_string().cmp(&other.family.to_string()) + } + } + } +} + +impl PartialOrd for Arch { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + #[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)] pub struct Os(pub(crate) target_lexicon::OperatingSystem); @@ -116,7 +164,12 @@ impl Arch { // TODO: Implement `variant` support checks // Windows ARM64 runs emulated x86_64 binaries transparently - if cfg!(windows) && matches!(self.family, target_lexicon::Architecture::Aarch64(_)) { + // Similarly, macOS aarch64 runs emulated x86_64 binaries transparently if you have Rosetta + // installed. We don't try to be clever and check if that's the case here, we just assume + // that if x86_64 distributions are available, they're usable. + if (cfg!(windows) || cfg!(target_os = "macos")) + && matches!(self.family, target_lexicon::Architecture::Aarch64(_)) + { return other.family == target_lexicon::Architecture::X86_64; } @@ -316,6 +369,10 @@ impl From<&uv_platform_tags::Arch> for Arch { ), variant: None, }, + uv_platform_tags::Arch::Wasm32 => Self { + family: target_lexicon::Architecture::Wasm32, + variant: None, + }, } } } @@ -348,6 +405,9 @@ impl From<&uv_platform_tags::Os> for Os { uv_platform_tags::Os::NetBsd { .. } => Self(target_lexicon::OperatingSystem::Netbsd), uv_platform_tags::Os::OpenBsd { .. } => Self(target_lexicon::OperatingSystem::Openbsd), uv_platform_tags::Os::Windows => Self(target_lexicon::OperatingSystem::Windows), + uv_platform_tags::Os::Pyodide { .. } => { + Self(target_lexicon::OperatingSystem::Emscripten) + } } } } diff --git a/crates/uv-python/src/python_version.rs b/crates/uv-python/src/python_version.rs index 30dfccecd..c5d8f6365 100644 --- a/crates/uv-python/src/python_version.rs +++ b/crates/uv-python/src/python_version.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::fmt::{Display, Formatter}; use std::ops::Deref; use std::str::FromStr; @@ -5,7 +7,7 @@ use std::str::FromStr; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, StringVersion}; -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct PythonVersion(StringVersion); impl From for PythonVersion { @@ -65,26 +67,16 @@ impl FromStr for PythonVersion { #[cfg(feature = "schemars")] impl schemars::JsonSchema for PythonVersion { - fn schema_name() -> String { - String::from("PythonVersion") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PythonVersion") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - pattern: Some(r"^3\.\d+(\.\d+)?$".to_string()), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A Python version specifier, e.g. `3.11` or `3.12.4`.".to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^3\.\d+(\.\d+)?$", + "description": "A Python version specifier, e.g. `3.11` or `3.12.4`." + }) } } diff --git a/crates/uv-python/src/sysconfig/generated_mappings.rs b/crates/uv-python/src/sysconfig/generated_mappings.rs new file mode 100644 index 000000000..54170aba5 --- /dev/null +++ b/crates/uv-python/src/sysconfig/generated_mappings.rs @@ -0,0 +1,94 @@ +//! DO NOT EDIT +//! +//! Generated with `cargo run dev generate-sysconfig-metadata` +//! Targets from +//! +#![allow(clippy::all)] +#![cfg_attr(any(), rustfmt::skip)] + +use std::collections::BTreeMap; +use std::sync::LazyLock; + +use crate::sysconfig::replacements::{ReplacementEntry, ReplacementMode}; + +/// Mapping for sysconfig keys to lookup and replace with the appropriate entry. +pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock>> = LazyLock::new(|| { + BTreeMap::from_iter([ + ("BLDSHARED".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() }, + ]), + ("CC".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() }, + ]), + ("CXX".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() }, + ]), + ("LDCXXSHARED".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-g++".to_string() }, to: "c++".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang++".to_string() }, to: "c++".to_string() }, + ]), + ("LDSHARED".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() }, + ]), + ("LINKCC".to_string(), vec![ + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mipsel-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/powerpc64le-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/riscv64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/s390x-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/x86_64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "clang".to_string() }, to: "cc".to_string() }, + ReplacementEntry { mode: ReplacementMode::Partial { from: "musl-clang".to_string() }, to: "cc".to_string() }, + ]), + ("AR".to_string(), vec![ + ReplacementEntry { + mode: ReplacementMode::Full, + to: "ar".to_string(), + }, + ]), + ]) +}); diff --git a/crates/uv-python/src/sysconfig/mod.rs b/crates/uv-python/src/sysconfig/mod.rs index 822408024..9aa2323bf 100644 --- a/crates/uv-python/src/sysconfig/mod.rs +++ b/crates/uv-python/src/sysconfig/mod.rs @@ -25,131 +25,20 @@ //! ``` use std::borrow::Cow; -use std::collections::BTreeMap; use std::io::Write; use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::LazyLock; use itertools::{Either, Itertools}; use tracing::trace; +use crate::sysconfig::generated_mappings::DEFAULT_VARIABLE_UPDATES; use crate::sysconfig::parser::{Error as ParseError, SysconfigData, Value}; mod cursor; +mod generated_mappings; mod parser; - -/// Replacement mode for sysconfig values. -#[derive(Debug)] -enum ReplacementMode { - Partial { from: String }, - Full, -} - -/// A replacement entry to patch in sysconfig data. -#[derive(Debug)] -struct ReplacementEntry { - mode: ReplacementMode, - to: String, -} - -impl ReplacementEntry { - /// Patches a sysconfig value either partially (replacing a specific word) or fully. - fn patch(&self, entry: &str) -> String { - match &self.mode { - ReplacementMode::Partial { from } => entry - .split_whitespace() - .map(|word| if word == from { &self.to } else { word }) - .collect::>() - .join(" "), - ReplacementMode::Full => self.to.clone(), - } - } -} - -/// Mapping for sysconfig keys to lookup and replace with the appropriate entry. -static DEFAULT_VARIABLE_UPDATES: LazyLock>> = - LazyLock::new(|| { - BTreeMap::from_iter([ - ( - "CC".to_string(), - vec![ - ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang".to_string(), - }, - to: "cc".to_string(), - }, - ReplacementEntry { - mode: ReplacementMode::Partial { - from: "/usr/bin/aarch64-linux-gnu-gcc".to_string(), - }, - to: "cc".to_string(), - }, - ], - ), - ( - "CXX".to_string(), - vec![ - ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang++".to_string(), - }, - to: "c++".to_string(), - }, - ReplacementEntry { - mode: ReplacementMode::Partial { - from: "/usr/bin/x86_64-linux-gnu-g++".to_string(), - }, - to: "c++".to_string(), - }, - ], - ), - ( - "BLDSHARED".to_string(), - vec![ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang".to_string(), - }, - to: "cc".to_string(), - }], - ), - ( - "LDSHARED".to_string(), - vec![ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang".to_string(), - }, - to: "cc".to_string(), - }], - ), - ( - "LDCXXSHARED".to_string(), - vec![ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang++".to_string(), - }, - to: "c++".to_string(), - }], - ), - ( - "LINKCC".to_string(), - vec![ReplacementEntry { - mode: ReplacementMode::Partial { - from: "clang".to_string(), - }, - to: "cc".to_string(), - }], - ), - ( - "AR".to_string(), - vec![ReplacementEntry { - mode: ReplacementMode::Full, - to: "ar".to_string(), - }], - ), - ]) - }); +mod replacements; /// Update the `sysconfig` data in a Python installation. pub(crate) fn update_sysconfig( @@ -460,7 +349,7 @@ mod tests { // Cross-compiles use GNU let sysconfigdata = [ - ("CC", "/usr/bin/aarch64-linux-gnu-gcc"), + ("CC", "/usr/bin/riscv64-linux-gnu-gcc"), ("CXX", "/usr/bin/x86_64-linux-gnu-g++"), ] .into_iter() diff --git a/crates/uv-python/src/sysconfig/replacements.rs b/crates/uv-python/src/sysconfig/replacements.rs new file mode 100644 index 000000000..63119bcd8 --- /dev/null +++ b/crates/uv-python/src/sysconfig/replacements.rs @@ -0,0 +1,27 @@ +/// Replacement mode for sysconfig values. +#[derive(Debug)] +pub(crate) enum ReplacementMode { + Partial { from: String }, + Full, +} + +/// A replacement entry to patch in sysconfig data. +#[derive(Debug)] +pub(crate) struct ReplacementEntry { + pub(crate) mode: ReplacementMode, + pub(crate) to: String, +} + +impl ReplacementEntry { + /// Patches a sysconfig value either partially (replacing a specific word) or fully. + pub(crate) fn patch(&self, entry: &str) -> String { + match &self.mode { + ReplacementMode::Partial { from } => entry + .split_whitespace() + .map(|word| if word == from { &self.to } else { word }) + .collect::>() + .join(" "), + ReplacementMode::Full => self.to.clone(), + } + } +} diff --git a/crates/uv-python/src/version_files.rs b/crates/uv-python/src/version_files.rs index 894654a3c..a9cd05b7e 100644 --- a/crates/uv-python/src/version_files.rs +++ b/crates/uv-python/src/version_files.rs @@ -37,11 +37,14 @@ pub enum FilePreference { pub struct DiscoveryOptions<'a> { /// The path to stop discovery at. stop_discovery_at: Option<&'a Path>, - /// When `no_config` is set, Python version files will be ignored. + /// Ignore Python version files. /// /// Discovery will still run in order to display a log about the ignored file. no_config: bool, + /// Whether `.python-version` or `.python-versions` should be preferred. preference: FilePreference, + /// Whether to ignore local version files, and only search for a global one. + no_local: bool, } impl<'a> DiscoveryOptions<'a> { @@ -62,6 +65,11 @@ impl<'a> DiscoveryOptions<'a> { ..self } } + + #[must_use] + pub fn with_no_local(self, no_local: bool) -> Self { + Self { no_local, ..self } + } } impl PythonVersionFile { @@ -70,33 +78,38 @@ impl PythonVersionFile { working_directory: impl AsRef, options: &DiscoveryOptions<'_>, ) -> Result, std::io::Error> { - let Some(path) = Self::find_nearest(&working_directory, options) else { - if let Some(stop_discovery_at) = options.stop_discovery_at { - if stop_discovery_at == working_directory.as_ref() { - debug!( - "No Python version file found in workspace: {}", - working_directory.as_ref().display() - ); + let allow_local = !options.no_local; + let Some(path) = allow_local.then(|| { + // First, try to find a local version file. + let local = Self::find_nearest(&working_directory, options); + if local.is_none() { + // Log where we searched for the file, if not found + if let Some(stop_discovery_at) = options.stop_discovery_at { + if stop_discovery_at == working_directory.as_ref() { + debug!( + "No Python version file found in workspace: {}", + working_directory.as_ref().display() + ); + } else { + debug!( + "No Python version file found between working directory `{}` and workspace root `{}`", + working_directory.as_ref().display(), + stop_discovery_at.display() + ); + } } else { debug!( - "No Python version file found between working directory `{}` and workspace root `{}`", - working_directory.as_ref().display(), - stop_discovery_at.display() + "No Python version file found in ancestors of working directory: {}", + working_directory.as_ref().display() ); } - } else { - debug!( - "No Python version file found in ancestors of working directory: {}", - working_directory.as_ref().display() - ); } - // Not found in directory or its ancestors. Looking in user-level config. - return Ok(match user_uv_config_dir() { - Some(user_dir) => Self::discover_user_config(user_dir, options) - .await? - .or(None), - None => None, - }); + local + }).flatten().or_else(|| { + // Search for a global config + Self::find_global(options) + }) else { + return Ok(None); }; if options.no_config { @@ -111,20 +124,9 @@ impl PythonVersionFile { Self::try_from_path(path).await } - pub async fn discover_user_config( - user_config_working_directory: impl AsRef, - options: &DiscoveryOptions<'_>, - ) -> Result, std::io::Error> { - if !options.no_config { - if let Some(path) = - Self::find_in_directory(user_config_working_directory.as_ref(), options) - .into_iter() - .find(|path| path.is_file()) - { - return Self::try_from_path(path).await; - } - } - Ok(None) + fn find_global(options: &DiscoveryOptions<'_>) -> Option { + let user_config_dir = user_uv_config_dir()?; + Self::find_in_directory(&user_config_dir, options) } fn find_nearest(path: impl AsRef, options: &DiscoveryOptions<'_>) -> Option { diff --git a/crates/uv-python/src/virtualenv.rs b/crates/uv-python/src/virtualenv.rs index 7d72188fc..8b51a5e1b 100644 --- a/crates/uv-python/src/virtualenv.rs +++ b/crates/uv-python/src/virtualenv.rs @@ -32,7 +32,6 @@ pub struct VirtualEnvironment { /// A parsed `pyvenv.cfg` #[derive(Debug, Clone)] -#[allow(clippy::struct_excessive_bools)] pub struct PyVenvConfiguration { /// Was the virtual environment created with the `virtualenv` package? pub(crate) virtualenv: bool, @@ -131,14 +130,14 @@ pub(crate) fn virtualenv_from_working_dir() -> Result, Error> { for dir in current_dir.ancestors() { // If we're _within_ a virtualenv, return it. - if dir.join("pyvenv.cfg").is_file() { + if uv_fs::is_virtualenv_base(dir) { return Ok(Some(dir.to_path_buf())); } // Otherwise, search for a `.venv` directory. let dot_venv = dir.join(".venv"); if dot_venv.is_dir() { - if !dot_venv.join("pyvenv.cfg").is_file() { + if !uv_fs::is_virtualenv_base(&dot_venv) { return Err(Error::MissingPyVenvCfg(dot_venv)); } return Ok(Some(dot_venv)); diff --git a/crates/uv-redacted/Cargo.toml b/crates/uv-redacted/Cargo.toml index bf337ad23..52fca3f89 100644 --- a/crates/uv-redacted/Cargo.toml +++ b/crates/uv-redacted/Cargo.toml @@ -16,4 +16,10 @@ doctest = false workspace = true [dependencies] +ref-cast = { workspace = true } +schemars = { workspace = true, optional = true } +serde = { workspace = true } url = { workspace = true } + +[features] +schemars = ["dep:schemars"] diff --git a/crates/uv-redacted/src/lib.rs b/crates/uv-redacted/src/lib.rs index 36ef5a46f..5c9a8e278 100644 --- a/crates/uv-redacted/src/lib.rs +++ b/crates/uv-redacted/src/lib.rs @@ -1,21 +1,227 @@ -use std::borrow::Cow; - +use ref_cast::RefCast; +use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Display}; +use std::ops::{Deref, DerefMut}; +use std::str::FromStr; use url::Url; -/// Return a version of the URL with redacted credentials, allowing the generic `git` username (without a password) -/// in SSH URLs, as in, `ssh://git@github.com/...`. -pub fn redacted_url(url: &Url) -> Cow<'_, Url> { - if url.username().is_empty() && url.password().is_none() { - return Cow::Borrowed(url); - } - if url.scheme() == "ssh" && url.username() == "git" && url.password().is_none() { - return Cow::Borrowed(url); +/// A [`Url`] wrapper that redacts credentials when displaying the URL. +/// +/// `DisplaySafeUrl` wraps the standard [`url::Url`] type, providing functionality to mask +/// secrets by default when the URL is displayed or logged. This helps prevent accidental +/// exposure of sensitive information in logs and debug output. +/// +/// # Examples +/// +/// ``` +/// use uv_redacted::DisplaySafeUrl; +/// use std::str::FromStr; +/// +/// // Create a `DisplaySafeUrl` from a `&str` +/// let mut url = DisplaySafeUrl::parse("https://user:password@example.com").unwrap(); +/// +/// // Display will mask secrets +/// assert_eq!(url.to_string(), "https://user:****@example.com/"); +/// +/// // You can still access the username and password +/// assert_eq!(url.username(), "user"); +/// assert_eq!(url.password(), Some("password")); +/// +/// // And you can still update the username and password +/// let _ = url.set_username("new_user"); +/// let _ = url.set_password(Some("new_password")); +/// assert_eq!(url.username(), "new_user"); +/// assert_eq!(url.password(), Some("new_password")); +/// +/// // It is also possible to remove the credentials entirely +/// url.remove_credentials(); +/// assert_eq!(url.username(), ""); +/// assert_eq!(url.password(), None); +/// ``` +#[derive(Clone, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize, RefCast)] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "schemars", schemars(transparent))] +#[repr(transparent)] +pub struct DisplaySafeUrl(Url); + +impl DisplaySafeUrl { + #[inline] + pub fn parse(input: &str) -> Result { + Ok(Self(Url::parse(input)?)) } - let mut url = url.clone(); - let _ = url.set_username(""); - let _ = url.set_password(None); - Cow::Owned(url) + /// Cast a `&Url` to a `&DisplaySafeUrl` using ref-cast. + #[inline] + pub fn ref_cast(url: &Url) -> &Self { + RefCast::ref_cast(url) + } + + /// Parse a string as an URL, with this URL as the base URL. + #[inline] + pub fn join(&self, input: &str) -> Result { + self.0.join(input).map(DisplaySafeUrl::from) + } + + /// Serialize with Serde using the internal representation of the `Url` struct. + #[inline] + pub fn serialize_internal(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.0.serialize_internal(serializer) + } + + /// Serialize with Serde using the internal representation of the `Url` struct. + #[inline] + pub fn deserialize_internal<'de, D>(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Url::deserialize_internal(deserializer).map(DisplaySafeUrl::from) + } + + #[allow(clippy::result_unit_err)] + pub fn from_file_path>(path: P) -> Result { + Url::from_file_path(path).map(DisplaySafeUrl::from) + } + + /// Remove the credentials from a URL, allowing the generic `git` username (without a password) + /// in SSH URLs, as in, `ssh://git@github.com/...`. + #[inline] + pub fn remove_credentials(&mut self) { + // For URLs that use the `git` convention (i.e., `ssh://git@github.com/...`), avoid dropping the + // username. + if is_ssh_git_username(&self.0) { + return; + } + let _ = self.0.set_username(""); + let _ = self.0.set_password(None); + } + + /// Returns [`Display`] implementation that doesn't mask credentials. + #[inline] + pub fn displayable_with_credentials(&self) -> impl Display { + &self.0 + } +} + +impl Deref for DisplaySafeUrl { + type Target = Url; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for DisplaySafeUrl { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Display for DisplaySafeUrl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + display_with_redacted_credentials(&self.0, f) + } +} + +impl Debug for DisplaySafeUrl { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let url = &self.0; + // For URLs that use the `git` convention (i.e., `ssh://git@github.com/...`), avoid masking the + // username. + let (username, password) = if is_ssh_git_username(url) { + (url.username(), None) + } else if url.username() != "" && url.password().is_some() { + (url.username(), Some("****")) + } else if url.username() != "" { + ("****", None) + } else if url.password().is_some() { + ("", Some("****")) + } else { + ("", None) + }; + + f.debug_struct("DisplaySafeUrl") + .field("scheme", &url.scheme()) + .field("cannot_be_a_base", &url.cannot_be_a_base()) + .field("username", &username) + .field("password", &password) + .field("host", &url.host()) + .field("port", &url.port()) + .field("path", &url.path()) + .field("query", &url.query()) + .field("fragment", &url.fragment()) + .finish() + } +} + +impl From for DisplaySafeUrl { + fn from(url: Url) -> Self { + DisplaySafeUrl(url) + } +} + +impl From for Url { + fn from(url: DisplaySafeUrl) -> Self { + url.0 + } +} + +impl FromStr for DisplaySafeUrl { + type Err = url::ParseError; + + fn from_str(input: &str) -> Result { + Ok(Self(Url::from_str(input)?)) + } +} + +fn is_ssh_git_username(url: &Url) -> bool { + matches!(url.scheme(), "ssh" | "git+ssh" | "git+https") + && url.username() == "git" + && url.password().is_none() +} + +fn display_with_redacted_credentials( + url: &Url, + f: &mut std::fmt::Formatter<'_>, +) -> std::fmt::Result { + if url.password().is_none() && url.username() == "" { + return write!(f, "{url}"); + } + + // For URLs that use the `git` convention (i.e., `ssh://git@github.com/...`), avoid dropping the + // username. + if is_ssh_git_username(url) { + return write!(f, "{url}"); + } + + write!(f, "{}://", url.scheme())?; + + if url.username() != "" && url.password().is_some() { + write!(f, "{}", url.username())?; + write!(f, ":****@")?; + } else if url.username() != "" { + write!(f, "****@")?; + } else if url.password().is_some() { + write!(f, ":****@")?; + } + + write!(f, "{}", url.host_str().unwrap_or(""))?; + + if let Some(port) = url.port() { + write!(f, ":{port}")?; + } + + write!(f, "{}", url.path())?; + if let Some(query) = url.query() { + write!(f, "?{query}")?; + } + if let Some(fragment) = url.fragment() { + write!(f, "#{fragment}")?; + } + + Ok(()) } #[cfg(test)] @@ -24,49 +230,138 @@ mod tests { #[test] fn from_url_no_credentials() { - let url = Url::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap(); - let redacted = redacted_url(&url); - assert_eq!(redacted.username(), ""); - assert!(redacted.password().is_none()); - assert_eq!( - format!("{redacted}"), - "https://pypi-proxy.fly.dev/basic-auth/simple" - ); + let url_str = "https://pypi-proxy.fly.dev/basic-auth/simple"; + let url = Url::parse(url_str).unwrap(); + let log_safe_url = DisplaySafeUrl::from(url); + assert_eq!(log_safe_url.username(), ""); + assert!(log_safe_url.password().is_none()); + assert_eq!(log_safe_url.to_string(), url_str); } #[test] fn from_url_username_and_password() { - let url = Url::parse("https://user:pass@pypi-proxy.fly.dev/basic-auth/simple").unwrap(); - let redacted = redacted_url(&url); - assert_eq!(redacted.username(), ""); - assert!(redacted.password().is_none()); + let url_str = "https://user:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let url = Url::parse(url_str).unwrap(); + let log_safe_url = DisplaySafeUrl::from(url); + assert_eq!(log_safe_url.username(), "user"); + assert!(log_safe_url.password().is_some_and(|p| p == "pass")); assert_eq!( - format!("{redacted}"), - "https://pypi-proxy.fly.dev/basic-auth/simple" + log_safe_url.to_string(), + "https://user:****@pypi-proxy.fly.dev/basic-auth/simple" ); } #[test] fn from_url_just_password() { - let url = Url::parse("https://:pass@pypi-proxy.fly.dev/basic-auth/simple").unwrap(); - let redacted = redacted_url(&url); - assert_eq!(redacted.username(), ""); - assert!(redacted.password().is_none()); + let url_str = "https://:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let url = Url::parse(url_str).unwrap(); + let log_safe_url = DisplaySafeUrl::from(url); + assert_eq!(log_safe_url.username(), ""); + assert!(log_safe_url.password().is_some_and(|p| p == "pass")); assert_eq!( - format!("{redacted}"), - "https://pypi-proxy.fly.dev/basic-auth/simple" + log_safe_url.to_string(), + "https://:****@pypi-proxy.fly.dev/basic-auth/simple" ); } #[test] fn from_url_just_username() { - let url = Url::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap(); - let redacted = redacted_url(&url); - assert_eq!(redacted.username(), ""); - assert!(redacted.password().is_none()); + let url_str = "https://user@pypi-proxy.fly.dev/basic-auth/simple"; + let url = Url::parse(url_str).unwrap(); + let log_safe_url = DisplaySafeUrl::from(url); + assert_eq!(log_safe_url.username(), "user"); + assert!(log_safe_url.password().is_none()); assert_eq!( - format!("{redacted}"), + log_safe_url.to_string(), + "https://****@pypi-proxy.fly.dev/basic-auth/simple" + ); + } + + #[test] + fn from_url_git_username() { + let ssh_str = "ssh://git@github.com/org/repo"; + let ssh_url = DisplaySafeUrl::parse(ssh_str).unwrap(); + assert_eq!(ssh_url.username(), "git"); + assert!(ssh_url.password().is_none()); + assert_eq!(ssh_url.to_string(), ssh_str); + // Test again for the `git+ssh` scheme + let git_ssh_str = "git+ssh://git@github.com/org/repo"; + let git_ssh_url = DisplaySafeUrl::parse(git_ssh_str).unwrap(); + assert_eq!(git_ssh_url.username(), "git"); + assert!(git_ssh_url.password().is_none()); + assert_eq!(git_ssh_url.to_string(), git_ssh_str); + } + + #[test] + fn parse_url_string() { + let url_str = "https://user:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let log_safe_url = DisplaySafeUrl::parse(url_str).unwrap(); + assert_eq!(log_safe_url.username(), "user"); + assert!(log_safe_url.password().is_some_and(|p| p == "pass")); + assert_eq!( + log_safe_url.to_string(), + "https://user:****@pypi-proxy.fly.dev/basic-auth/simple" + ); + } + + #[test] + fn remove_credentials() { + let url_str = "https://user:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let mut log_safe_url = DisplaySafeUrl::parse(url_str).unwrap(); + log_safe_url.remove_credentials(); + assert_eq!(log_safe_url.username(), ""); + assert!(log_safe_url.password().is_none()); + assert_eq!( + log_safe_url.to_string(), "https://pypi-proxy.fly.dev/basic-auth/simple" ); } + + #[test] + fn preserve_ssh_git_username_on_remove_credentials() { + let ssh_str = "ssh://git@pypi-proxy.fly.dev/basic-auth/simple"; + let mut ssh_url = DisplaySafeUrl::parse(ssh_str).unwrap(); + ssh_url.remove_credentials(); + assert_eq!(ssh_url.username(), "git"); + assert!(ssh_url.password().is_none()); + assert_eq!(ssh_url.to_string(), ssh_str); + // Test again for `git+ssh` scheme + let git_ssh_str = "git+ssh://git@pypi-proxy.fly.dev/basic-auth/simple"; + let mut git_shh_url = DisplaySafeUrl::parse(git_ssh_str).unwrap(); + git_shh_url.remove_credentials(); + assert_eq!(git_shh_url.username(), "git"); + assert!(git_shh_url.password().is_none()); + assert_eq!(git_shh_url.to_string(), git_ssh_str); + } + + #[test] + fn displayable_with_credentials() { + let url_str = "https://user:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let log_safe_url = DisplaySafeUrl::parse(url_str).unwrap(); + assert_eq!( + log_safe_url.displayable_with_credentials().to_string(), + url_str + ); + } + + #[test] + fn url_join() { + let url_str = "https://token@example.com/abc/"; + let log_safe_url = DisplaySafeUrl::parse(url_str).unwrap(); + let foo_url = log_safe_url.join("foo").unwrap(); + assert_eq!(foo_url.to_string(), "https://****@example.com/abc/foo"); + } + + #[test] + fn log_safe_url_ref() { + let url_str = "https://user:pass@pypi-proxy.fly.dev/basic-auth/simple"; + let url = Url::parse(url_str).unwrap(); + let log_safe_url = DisplaySafeUrl::ref_cast(&url); + assert_eq!(log_safe_url.username(), "user"); + assert!(log_safe_url.password().is_some_and(|p| p == "pass")); + assert_eq!( + log_safe_url.to_string(), + "https://user:****@pypi-proxy.fly.dev/basic-auth/simple" + ); + } } diff --git a/crates/uv-requirements-txt/Cargo.toml b/crates/uv-requirements-txt/Cargo.toml index 617a76123..f82aa2c1c 100644 --- a/crates/uv-requirements-txt/Cargo.toml +++ b/crates/uv-requirements-txt/Cargo.toml @@ -23,6 +23,7 @@ uv-fs = { workspace = true } uv-normalize = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-warnings = { workspace = true } fs-err = { workspace = true } diff --git a/crates/uv-requirements-txt/src/lib.rs b/crates/uv-requirements-txt/src/lib.rs index 439e6f547..b734bf8a2 100644 --- a/crates/uv-requirements-txt/src/lib.rs +++ b/crates/uv-requirements-txt/src/lib.rs @@ -54,6 +54,8 @@ use uv_distribution_types::{ use uv_fs::Simplified; use uv_pep508::{Pep508Error, RequirementOrigin, VerbatimUrl, expand_env_vars}; use uv_pypi_types::VerbatimParsedUrl; +#[cfg(feature = "http")] +use uv_redacted::DisplaySafeUrl; use crate::requirement::EditableError; pub use crate::requirement::RequirementsTxtRequirement; @@ -949,11 +951,11 @@ async fn read_url_to_string( url: path.as_ref().to_owned(), })?; - let url = Url::from_str(path_utf8) + let url = DisplaySafeUrl::from_str(path_utf8) .map_err(|err| RequirementsTxtParserError::InvalidUrl(path_utf8.to_string(), err))?; let response = client .for_host(&url) - .get(url.clone()) + .get(Url::from(url.clone())) .send() .await .map_err(|err| RequirementsTxtParserError::from_reqwest_middleware(url.clone(), err))?; @@ -1047,7 +1049,7 @@ pub enum RequirementsTxtParserError { url: PathBuf, }, #[cfg(feature = "http")] - Reqwest(Url, reqwest_middleware::Error), + Reqwest(DisplaySafeUrl, reqwest_middleware::Error), #[cfg(feature = "http")] InvalidUrl(String, url::ParseError), } @@ -1301,11 +1303,11 @@ impl From for RequirementsTxtParserError { #[cfg(feature = "http")] impl RequirementsTxtParserError { - fn from_reqwest(url: Url, err: reqwest::Error) -> Self { + fn from_reqwest(url: DisplaySafeUrl, err: reqwest::Error) -> Self { Self::Reqwest(url, reqwest_middleware::Error::Reqwest(err)) } - fn from_reqwest_middleware(url: Url, err: reqwest_middleware::Error) -> Self { + fn from_reqwest_middleware(url: DisplaySafeUrl, err: reqwest_middleware::Error) -> Self { Self::Reqwest(url, err) } } @@ -2039,7 +2041,7 @@ mod test { insta::with_settings!({ filters => path_filters(&path_filter(temp_dir.path())), }, { - insta::assert_debug_snapshot!(requirements, @r###" + insta::assert_debug_snapshot!(requirements, @r#" RequirementsTxt { requirements: [], constraints: [], @@ -2050,7 +2052,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2067,7 +2069,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2102,7 +2104,7 @@ mod test { no_binary: None, only_binary: None, } - "###); + "#); }); Ok(()) @@ -2187,7 +2189,7 @@ mod test { insta::with_settings!({ filters => path_filters(&path_filter(temp_dir.path())), }, { - insta::assert_debug_snapshot!(requirements, @r###" + insta::assert_debug_snapshot!(requirements, @r#" RequirementsTxt { requirements: [ RequirementEntry { @@ -2333,7 +2335,7 @@ mod test { editables: [], index_url: Some( VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -2359,7 +2361,7 @@ mod test { no_binary: All, only_binary: None, } - "###); + "#); }); Ok(()) @@ -2402,7 +2404,7 @@ mod test { insta::with_settings!({ filters => path_filters(&path_filter(temp_dir.path())), }, { - insta::assert_debug_snapshot!(requirements, @r###" + insta::assert_debug_snapshot!(requirements, @r#" RequirementsTxt { requirements: [ RequirementEntry { @@ -2411,7 +2413,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2427,7 +2429,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2460,7 +2462,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2476,7 +2478,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2509,7 +2511,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2525,7 +2527,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2562,7 +2564,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2578,7 +2580,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2611,7 +2613,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2627,7 +2629,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2660,7 +2662,7 @@ mod test { url: VerbatimParsedUrl { parsed_url: Path( ParsedPathUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2676,7 +2678,7 @@ mod test { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -2717,7 +2719,7 @@ mod test { no_binary: None, only_binary: None, } - "###); + "#); }); Ok(()) diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap index ad5e2a0e6..e13ab75b7 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap @@ -1,6 +1,7 @@ --- source: crates/uv-requirements-txt/src/lib.rs expression: actual +snapshot_kind: text --- RequirementsTxt { requirements: [ @@ -23,7 +24,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -54,7 +55,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -85,7 +86,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0' and sys_platform == 'win32', + marker: python_full_version >= '3.8' and python_full_version < '4' and sys_platform == 'win32', origin: Some( File( "/poetry-with-hashes.txt", @@ -116,7 +117,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -148,7 +149,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-whitespace.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-whitespace.txt.snap index b59788026..45b1cc43f 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-whitespace.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-whitespace.txt.snap @@ -39,7 +39,7 @@ RequirementsTxt { parsed_url: Git( ParsedGitUrl { url: GitUrl { - repository: Url { + repository: DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -61,7 +61,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "git+https", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap index ad5e2a0e6..e13ab75b7 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap @@ -1,6 +1,7 @@ --- source: crates/uv-requirements-txt/src/lib.rs expression: actual +snapshot_kind: text --- RequirementsTxt { requirements: [ @@ -23,7 +24,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -54,7 +55,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -85,7 +86,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0' and sys_platform == 'win32', + marker: python_full_version >= '3.8' and python_full_version < '4' and sys_platform == 'win32', origin: Some( File( "/poetry-with-hashes.txt", @@ -116,7 +117,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -148,7 +149,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap index b0ebd7157..f2187a1a2 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-bare-url.txt.snap @@ -10,7 +10,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -27,7 +27,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -60,7 +60,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -77,7 +77,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -114,7 +114,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -131,7 +131,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -164,7 +164,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -181,7 +181,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -214,7 +214,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -231,7 +231,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -264,7 +264,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -281,7 +281,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap index 86f25edf6..222ab6b10 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-unix-editable.txt.snap @@ -12,7 +12,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -29,7 +29,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -69,7 +69,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -86,7 +86,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -126,7 +126,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -143,7 +143,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -183,7 +183,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -200,7 +200,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -240,7 +240,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -257,7 +257,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -290,7 +290,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -307,7 +307,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -340,7 +340,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -357,7 +357,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -390,7 +390,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -407,7 +407,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-whitespace.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-whitespace.txt.snap index b59788026..45b1cc43f 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-whitespace.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-whitespace.txt.snap @@ -39,7 +39,7 @@ RequirementsTxt { parsed_url: Git( ParsedGitUrl { url: GitUrl { - repository: Url { + repository: DisplaySafeUrl { scheme: "https", cannot_be_a_base: false, username: "", @@ -61,7 +61,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "git+https", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap index 6f3410ddd..72e1c8635 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-bare-url.txt.snap @@ -1,5 +1,5 @@ --- -source: crates/requirements-txt/src/lib.rs +source: crates/uv-requirements-txt/src/lib.rs expression: actual --- RequirementsTxt { @@ -10,7 +10,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -27,7 +27,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -60,7 +60,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -77,7 +77,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -114,7 +114,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -131,7 +131,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -164,7 +164,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -181,7 +181,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -214,7 +214,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -231,7 +231,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -264,7 +264,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -281,7 +281,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap index 12909f7e8..84ae22816 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-windows-editable.txt.snap @@ -12,7 +12,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -29,7 +29,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -69,7 +69,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -86,7 +86,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -126,7 +126,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -143,7 +143,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -183,7 +183,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -200,7 +200,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -240,7 +240,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -257,7 +257,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -290,7 +290,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -307,7 +307,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -340,7 +340,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -357,7 +357,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -390,7 +390,7 @@ RequirementsTxt { url: VerbatimParsedUrl { parsed_url: Directory( ParsedDirectoryUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", @@ -407,7 +407,7 @@ RequirementsTxt { }, ), verbatim: VerbatimUrl { - url: Url { + url: DisplaySafeUrl { scheme: "file", cannot_be_a_base: false, username: "", diff --git a/crates/uv-requirements/Cargo.toml b/crates/uv-requirements/Cargo.toml index d3fbf929b..25da28b7a 100644 --- a/crates/uv-requirements/Cargo.toml +++ b/crates/uv-requirements/Cargo.toml @@ -28,6 +28,7 @@ uv-git = { workspace = true } uv-normalize = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-requirements-txt = { workspace = true, features = ["http"] } uv-resolver = { workspace = true, features = ["clap"] } uv-types = { workspace = true } diff --git a/crates/uv-requirements/src/lib.rs b/crates/uv-requirements/src/lib.rs index 812f9141f..68fe84abc 100644 --- a/crates/uv-requirements/src/lib.rs +++ b/crates/uv-requirements/src/lib.rs @@ -31,6 +31,9 @@ pub enum Error { #[error(transparent)] WheelFilename(#[from] uv_distribution_filename::WheelFilenameError), + #[error("Failed to construct HTTP client")] + ClientError(#[source] anyhow::Error), + #[error(transparent)] Io(#[from] std::io::Error), } diff --git a/crates/uv-requirements/src/source_tree.rs b/crates/uv-requirements/src/source_tree.rs index 4e7ab2b47..39fbe453b 100644 --- a/crates/uv-requirements/src/source_tree.rs +++ b/crates/uv-requirements/src/source_tree.rs @@ -1,13 +1,13 @@ -use std::path::{Path, PathBuf}; +use std::borrow::Cow; +use std::path::Path; use std::sync::Arc; -use std::{borrow::Cow, collections::BTreeMap}; use anyhow::{Context, Result}; use futures::TryStreamExt; use futures::stream::FuturesOrdered; use url::Url; -use uv_configuration::{DependencyGroups, ExtrasSpecification}; +use uv_configuration::ExtrasSpecification; use uv_distribution::{DistributionDatabase, FlatRequiresDist, Reporter, RequiresDist}; use uv_distribution_types::Requirement; use uv_distribution_types::{ @@ -16,6 +16,7 @@ use uv_distribution_types::{ use uv_fs::Simplified; use uv_normalize::{ExtraName, PackageName}; use uv_pep508::RequirementOrigin; +use uv_redacted::DisplaySafeUrl; use uv_resolver::{InMemoryIndex, MetadataResponse}; use uv_types::{BuildContext, HashStrategy}; @@ -36,8 +37,6 @@ pub struct SourceTreeResolution { pub struct SourceTreeResolver<'a, Context: BuildContext> { /// The extras to include when resolving requirements. extras: &'a ExtrasSpecification, - /// The groups to include when resolving requirements. - groups: &'a BTreeMap, /// The hash policy to enforce. hasher: &'a HashStrategy, /// The in-memory index for resolving dependencies. @@ -50,14 +49,12 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { /// Instantiate a new [`SourceTreeResolver`] for a given set of `source_trees`. pub fn new( extras: &'a ExtrasSpecification, - groups: &'a BTreeMap, hasher: &'a HashStrategy, index: &'a InMemoryIndex, database: DistributionDatabase<'a, Context>, ) -> Self { Self { extras, - groups, hasher, index, database, @@ -100,46 +97,17 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { let mut requirements = Vec::new(); - // Resolve any groups associated with this path - let default_groups = DependencyGroups::default(); - let groups = self.groups.get(path).unwrap_or(&default_groups); - // Flatten any transitive extras and include dependencies // (unless something like --only-group was passed) - if groups.prod() { - requirements.extend( - FlatRequiresDist::from_requirements(metadata.requires_dist, &metadata.name) - .into_iter() - .map(|requirement| Requirement { - origin: Some(origin.clone()), - marker: requirement.marker.simplify_extras(&extras), - ..requirement - }), - ); - } - - // Apply dependency-groups - for (group_name, group) in &metadata.dependency_groups { - if groups.contains(group_name) { - requirements.extend(group.iter().cloned().map(|group| Requirement { - origin: Some(RequirementOrigin::Group( - path.to_path_buf(), - metadata.name.clone(), - group_name.clone(), - )), - ..group - })); - } - } - // Complain if dependency groups are named that don't appear. - for name in groups.explicit_names() { - if !metadata.dependency_groups.contains_key(name) { - return Err(anyhow::anyhow!( - "The dependency group '{name}' was not found in the project: {}", - path.user_display() - )); - } - } + requirements.extend( + FlatRequiresDist::from_requirements(metadata.requires_dist, &metadata.name) + .into_iter() + .map(|requirement| Requirement { + origin: Some(origin.clone()), + marker: requirement.marker.simplify_extras(&extras), + ..requirement + }), + ); let requirements = requirements.into_boxed_slice(); let project = metadata.name; @@ -180,7 +148,7 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { return Ok(metadata); } - let Ok(url) = Url::from_directory_path(source_tree) else { + let Ok(url) = Url::from_directory_path(source_tree).map(DisplaySafeUrl::from) else { return Err(anyhow::anyhow!("Failed to convert path to URL")); }; let source = SourceUrl::Directory(DirectorySourceUrl { diff --git a/crates/uv-requirements/src/specification.rs b/crates/uv-requirements/src/specification.rs index a0b122de8..4c5741392 100644 --- a/crates/uv-requirements/src/specification.rs +++ b/crates/uv-requirements/src/specification.rs @@ -290,52 +290,18 @@ impl RequirementsSpecification { if !groups.is_empty() { let mut group_specs = BTreeMap::new(); for (path, groups) in groups { - // Conceptually pip `--group` flags just add the group referred to by the file. - // In uv semantics this would be like `--only-group`, however if you do this: - // - // uv pip install -r pyproject.toml --group pyproject.toml:foo - // - // We don't want to discard the package listed by `-r` in the way `--only-group` - // would. So we check to see if any other source wants to add this path, and use - // that to determine if we're doing `--group` or `--only-group` semantics. - // - // Note that it's fine if a file gets referred to multiple times by - // different-looking paths (like `./pyproject.toml` vs `pyproject.toml`). We're - // specifically trying to disambiguate in situations where the `--group` *happens* - // to match with an unrelated argument, and `--only-group` would be overzealous! - let source_exists_without_group = requirement_sources - .iter() - .any(|source| source.source_trees.contains(&path)); - let (group, only_group) = if source_exists_without_group { - (groups, Vec::new()) - } else { - (Vec::new(), groups) - }; let group_spec = DependencyGroups::from_args( false, false, false, - group, + Vec::new(), Vec::new(), false, - only_group, + groups, false, ); - - // If we're doing `--only-group` semantics it's because only `--group` flags referred - // to this file, and so we need to make sure to add it to the list of sources! - if !source_exists_without_group { - let source = Self::from_source( - &RequirementsSource::PyprojectToml(path.clone()), - client_builder, - ) - .await?; - requirement_sources.push(source); - } - group_specs.insert(path, group_spec); } - spec.groups = group_specs; } diff --git a/crates/uv-resolver/Cargo.toml b/crates/uv-resolver/Cargo.toml index 79645906a..715dacab8 100644 --- a/crates/uv-resolver/Cargo.toml +++ b/crates/uv-resolver/Cargo.toml @@ -33,6 +33,7 @@ uv-pep508 = { workspace = true } uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true } +uv-redacted = { workspace = true } uv-requirements-txt = { workspace = true } uv-small-str = { workspace = true } uv-static = { workspace = true } diff --git a/crates/uv-resolver/src/candidate_selector.rs b/crates/uv-resolver/src/candidate_selector.rs index b0fe74409..e03302966 100644 --- a/crates/uv-resolver/src/candidate_selector.rs +++ b/crates/uv-resolver/src/candidate_selector.rs @@ -13,10 +13,9 @@ use uv_normalize::PackageName; use uv_pep440::Version; use uv_types::InstalledPackagesProvider; -use crate::preferences::{Entry, Preferences}; +use crate::preferences::{Entry, PreferenceSource, Preferences}; use crate::prerelease::{AllowPrerelease, PrereleaseStrategy}; use crate::resolution_mode::ResolutionStrategy; -use crate::universal_marker::UniversalMarker; use crate::version_map::{VersionMap, VersionMapDistHandle}; use crate::{Exclusions, Manifest, Options, ResolverEnvironment}; @@ -188,7 +187,7 @@ impl CandidateSelector { if index.is_some_and(|index| !entry.index().matches(index)) { return None; } - Either::Left(std::iter::once((entry.marker(), entry.pin().version()))) + Either::Left(std::iter::once((entry.pin().version(), entry.source()))) } [..] => { type Entries<'a> = SmallVec<[&'a Entry; 3]>; @@ -219,7 +218,7 @@ impl CandidateSelector { Either::Right( preferences .into_iter() - .map(|entry| (entry.marker(), entry.pin().version())), + .map(|entry| (entry.pin().version(), entry.source())), ) } }; @@ -238,7 +237,7 @@ impl CandidateSelector { /// Return the first preference that satisfies the current range and is allowed. fn get_preferred_from_iter<'a, InstalledPackages: InstalledPackagesProvider>( &'a self, - preferences: impl Iterator, + preferences: impl Iterator, package_name: &'a PackageName, range: &Range, version_maps: &'a [VersionMap], @@ -246,7 +245,7 @@ impl CandidateSelector { reinstall: bool, env: &ResolverEnvironment, ) -> Option> { - for (marker, version) in preferences { + for (version, source) in preferences { // Respect the version range for this requirement. if !range.contains(version) { continue; @@ -290,9 +289,14 @@ impl CandidateSelector { let allow = match self.prerelease_strategy.allows(package_name, env) { AllowPrerelease::Yes => true, AllowPrerelease::No => false, - // If the pre-release is "global" (i.e., provided via a lockfile, rather than - // a fork), accept it unless pre-releases are completely banned. - AllowPrerelease::IfNecessary => marker.is_true(), + // If the pre-release was provided via an existing file, rather than from the + // current solve, accept it unless pre-releases are completely banned. + AllowPrerelease::IfNecessary => match source { + PreferenceSource::Resolver => false, + PreferenceSource::Lock + | PreferenceSource::Environment + | PreferenceSource::RequirementsTxt => true, + }, }; if !allow { continue; diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index e3bb9cf23..0916f54ac 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -3,6 +3,8 @@ use std::fmt::Formatter; use std::sync::Arc; use indexmap::IndexSet; +use itertools::Itertools; +use owo_colors::OwoColorize; use pubgrub::{ DefaultStringReporter, DerivationTree, Derived, External, Range, Ranges, Reporter, Term, }; @@ -13,8 +15,11 @@ use uv_distribution_types::{ DerivationChain, DistErrorKind, IndexCapabilities, IndexLocations, IndexUrl, RequestedDist, }; use uv_normalize::{ExtraName, InvalidNameError, PackageName}; -use uv_pep440::{LocalVersionSlice, LowerBound, Version}; +use uv_pep440::{LocalVersionSlice, LowerBound, Version, VersionSpecifier}; +use uv_pep508::{MarkerEnvironment, MarkerExpression, MarkerTree, MarkerValueVersion}; use uv_platform_tags::Tags; +use uv_pypi_types::ParsedUrl; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use crate::candidate_selector::CandidateSelector; @@ -54,11 +59,14 @@ pub enum ResolveError { } else { format!(" in {env}") }, - urls.join("\n- "), + urls.iter() + .map(|url| format!("{}{}", DisplaySafeUrl::from(url.clone()), if url.is_editable() { " (editable)" } else { "" })) + .collect::>() + .join("\n- ") )] ConflictingUrls { package_name: PackageName, - urls: Vec, + urls: Vec, env: ResolverEnvironment, }, @@ -69,11 +77,14 @@ pub enum ResolveError { } else { format!(" in {env}") }, - indexes.join("\n- "), + indexes.iter() + .map(std::string::ToString::to_string) + .collect::>() + .join("\n- ") )] ConflictingIndexesForEnvironment { package_name: PackageName, - indexes: Vec, + indexes: Vec, env: ResolverEnvironment, }, @@ -146,7 +157,7 @@ impl From> for ResolveError { } } -pub(crate) type ErrorTree = DerivationTree, UnavailableReason>; +pub type ErrorTree = DerivationTree, UnavailableReason>; /// A wrapper around [`pubgrub::error::NoSolutionError`] that displays a resolution failure report. pub struct NoSolutionError { @@ -163,6 +174,7 @@ pub struct NoSolutionError { fork_urls: ForkUrls, fork_indexes: ForkIndexes, env: ResolverEnvironment, + current_environment: MarkerEnvironment, tags: Option, workspace_members: BTreeSet, options: Options, @@ -184,6 +196,7 @@ impl NoSolutionError { fork_urls: ForkUrls, fork_indexes: ForkIndexes, env: ResolverEnvironment, + current_environment: MarkerEnvironment, tags: Option, workspace_members: BTreeSet, options: Options, @@ -202,6 +215,7 @@ impl NoSolutionError { fork_urls, fork_indexes, env, + current_environment, tags, workspace_members, options, @@ -353,6 +367,58 @@ impl NoSolutionError { pub fn header(&self) -> NoSolutionHeader { NoSolutionHeader::new(self.env.clone()) } + + /// Get the conflict derivation tree for external analysis + pub fn derivation_tree(&self) -> &ErrorTree { + &self.error + } + + /// Hint at limiting the resolver environment if universal resolution failed for a target + /// that is not the current platform or not the current Python version. + fn hint_disjoint_targets(&self, f: &mut Formatter) -> std::fmt::Result { + // Only applicable to universal resolution. + let Some(markers) = self.env.fork_markers() else { + return Ok(()); + }; + + // TODO(konsti): This is a crude approximation to telling the user the difference + // between their Python version and the relevant Python version range from the marker. + let current_python_version = self.current_environment.python_version().version.clone(); + let current_python_marker = MarkerTree::expression(MarkerExpression::Version { + key: MarkerValueVersion::PythonVersion, + specifier: VersionSpecifier::equals_version(current_python_version.clone()), + }); + if markers.is_disjoint(current_python_marker) { + write!( + f, + "\n\n{}{} While the active Python version is {}, \ + the resolution failed for other Python versions supported by your \ + project. Consider limiting your project's supported Python versions \ + using `requires-python`.", + "hint".bold().cyan(), + ":".bold(), + current_python_version, + )?; + } else if !markers.evaluate(&self.current_environment, &[]) { + write!( + f, + "\n\n{}{} The resolution failed for an environment that is not the current one, \ + consider limiting the environments with `tool.uv.environments`.", + "hint".bold().cyan(), + ":".bold(), + )?; + } + Ok(()) + } + + /// Get the packages that are involved in this error. + pub fn packages(&self) -> impl Iterator { + self.error + .packages() + .into_iter() + .filter_map(|p| p.name()) + .unique() + } } impl std::fmt::Debug for NoSolutionError { @@ -372,6 +438,7 @@ impl std::fmt::Debug for NoSolutionError { fork_urls, fork_indexes, env, + current_environment, tags, workspace_members, options, @@ -389,6 +456,7 @@ impl std::fmt::Debug for NoSolutionError { .field("fork_urls", fork_urls) .field("fork_indexes", fork_indexes) .field("env", env) + .field("current_environment", current_environment) .field("tags", tags) .field("workspace_members", workspace_members) .field("options", options) @@ -473,6 +541,8 @@ impl std::fmt::Display for NoSolutionError { write!(f, "\n\n{hint}")?; } + self.hint_disjoint_targets(f)?; + Ok(()) } } @@ -1166,6 +1236,69 @@ impl SentinelRange<'_> { } } +/// A prefix match, e.g., `==2.4.*`, which is desugared to a range like `>=2.4.dev0,<2.5.dev0`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct PrefixMatch<'a> { + version: &'a Version, +} + +impl<'a> PrefixMatch<'a> { + /// Determine whether a given range is equivalent to a prefix match (e.g., `==2.4.*`). + /// + /// Prefix matches are desugared to (e.g.) `>=2.4.dev0,<2.5.dev0`, but we want to render them + /// as `==2.4.*` in error messages. + pub(crate) fn from_range(lower: &'a Bound, upper: &'a Bound) -> Option { + let Bound::Included(lower) = lower else { + return None; + }; + let Bound::Excluded(upper) = upper else { + return None; + }; + if lower.is_pre() || lower.is_post() || lower.is_local() { + return None; + } + if upper.is_pre() || upper.is_post() || upper.is_local() { + return None; + } + if lower.dev() != Some(0) { + return None; + } + if upper.dev() != Some(0) { + return None; + } + if lower.release().len() != upper.release().len() { + return None; + } + + // All segments should be the same, except the last one, which should be incremented. + let num_segments = lower.release().len(); + for (i, (lower, upper)) in lower + .release() + .iter() + .zip(upper.release().iter()) + .enumerate() + { + if i == num_segments - 1 { + if lower + 1 != *upper { + return None; + } + } else { + if lower != upper { + return None; + } + } + } + + Some(PrefixMatch { version: lower }) + } +} + +impl std::fmt::Display for PrefixMatch<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "=={}.*", self.version.only_release()) + } +} + #[derive(Debug)] pub struct NoSolutionHeader { /// The [`ResolverEnvironment`] that caused the failure. diff --git a/crates/uv-resolver/src/exclude_newer.rs b/crates/uv-resolver/src/exclude_newer.rs index 40ec009f8..65fa55cfe 100644 --- a/crates/uv-resolver/src/exclude_newer.rs +++ b/crates/uv-resolver/src/exclude_newer.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use jiff::{Timestamp, ToSpan, tz::TimeZone}; @@ -67,25 +69,15 @@ impl std::fmt::Display for ExcludeNewer { #[cfg(feature = "schemars")] impl schemars::JsonSchema for ExcludeNewer { - fn schema_name() -> String { - "ExcludeNewer".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("ExcludeNewer") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - pattern: Some( - r"^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2}))?$".to_string(), - ), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("Exclude distributions uploaded after the given timestamp.\n\nAccepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`).".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2}))?$", + "description": "Exclude distributions uploaded after the given timestamp.\n\nAccepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`).", + }) } } diff --git a/crates/uv-resolver/src/fork_indexes.rs b/crates/uv-resolver/src/fork_indexes.rs index 5b39fb626..7283b5cbc 100644 --- a/crates/uv-resolver/src/fork_indexes.rs +++ b/crates/uv-resolver/src/fork_indexes.rs @@ -24,7 +24,7 @@ impl ForkIndexes { ) -> Result<(), ResolveError> { if let Some(previous) = self.0.insert(package_name.clone(), index.clone()) { if &previous != index { - let mut conflicts = vec![previous.url.to_string(), index.url.to_string()]; + let mut conflicts = vec![previous.url, index.url.clone()]; conflicts.sort(); return Err(ResolveError::ConflictingIndexesForEnvironment { package_name: package_name.clone(), diff --git a/crates/uv-resolver/src/fork_urls.rs b/crates/uv-resolver/src/fork_urls.rs index dc1b067c4..dd69f7bf7 100644 --- a/crates/uv-resolver/src/fork_urls.rs +++ b/crates/uv-resolver/src/fork_urls.rs @@ -2,7 +2,6 @@ use std::collections::hash_map::Entry; use rustc_hash::FxHashMap; -use uv_distribution_types::Verbatim; use uv_normalize::PackageName; use uv_pypi_types::VerbatimParsedUrl; @@ -34,10 +33,8 @@ impl ForkUrls { match self.0.entry(package_name.clone()) { Entry::Occupied(previous) => { if previous.get() != url { - let mut conflicting_url = vec![ - previous.get().verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - ]; + let mut conflicting_url = + vec![previous.get().parsed_url.clone(), url.parsed_url.clone()]; conflicting_url.sort(); return Err(ResolveError::ConflictingUrls { package_name: package_name.clone(), diff --git a/crates/uv-resolver/src/lib.rs b/crates/uv-resolver/src/lib.rs index 3285f9a6a..e91df3a7e 100644 --- a/crates/uv-resolver/src/lib.rs +++ b/crates/uv-resolver/src/lib.rs @@ -1,5 +1,5 @@ pub use dependency_mode::DependencyMode; -pub use error::{NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange}; +pub use error::{ErrorTree, NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange}; pub use exclude_newer::ExcludeNewer; pub use exclusions::Exclusions; pub use flat_index::{FlatDistributions, FlatIndex}; @@ -14,7 +14,6 @@ pub use options::{Flexibility, Options, OptionsBuilder}; pub use preferences::{Preference, PreferenceError, Preferences}; pub use prerelease::PrereleaseMode; pub use python_requirement::PythonRequirement; -pub use requires_python::{RequiresPython, RequiresPythonRange}; pub use resolution::{ AnnotationStyle, ConflictingDistributionError, DisplayResolutionGraph, ResolverOutput, }; @@ -55,10 +54,9 @@ mod options; mod pins; mod preferences; mod prerelease; -mod pubgrub; +pub mod pubgrub; mod python_requirement; mod redirect; -mod requires_python; mod resolution; mod resolution_mode; mod resolver; diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index 1ab2ce152..d2c2383a5 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -13,7 +13,8 @@ use url::Url; use uv_cache_key::RepositoryUrl; use uv_configuration::{ - BuildOptions, DependencyGroupsWithDefaults, ExtrasSpecificationWithDefaults, InstallOptions, + BuildOptions, DependencyGroupsWithDefaults, EditableMode, ExtrasSpecificationWithDefaults, + InstallOptions, }; use uv_distribution_filename::{ BuildTag, DistExtension, ExtensionError, SourceDistExtension, SourceDistFilename, @@ -22,8 +23,8 @@ use uv_distribution_filename::{ use uv_distribution_types::{ BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, Edge, FileLocation, GitSourceDist, IndexUrl, Name, Node, PathBuiltDist, PathSourceDist, - RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, RemoteSource, Resolution, - ResolvedDist, SourceDist, ToUrlError, UrlString, + RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, RemoteSource, RequiresPython, + Resolution, ResolvedDist, SourceDist, ToUrlError, UrlString, }; use uv_fs::{PortablePathBuf, relative_to}; use uv_git::{RepositoryReference, ResolvedRepositoryReference}; @@ -33,12 +34,13 @@ use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerTree, VerbatimUrl}; use uv_platform_tags::{TagCompatibility, TagPriority, Tags}; use uv_pypi_types::{HashDigests, Hashes, ParsedGitUrl, VcsKind}; +use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; use crate::lock::export::ExportableRequirements; use crate::lock::{Source, WheelTagHint, each_element_on_its_line_array}; use crate::resolution::ResolutionGraphNode; -use crate::{Installable, LockError, RequiresPython, ResolverOutput}; +use crate::{Installable, LockError, ResolverOutput}; #[derive(Debug, thiserror::Error)] pub enum PylockTomlErrorKind { @@ -93,7 +95,7 @@ pub enum PylockTomlErrorKind { #[error("`packages.vcs` entry for `{0}` must have a `url` or `path`")] VcsMissingPathUrl(PackageName), #[error("URL must end in a valid wheel filename: `{0}`")] - UrlMissingFilename(Url), + UrlMissingFilename(DisplaySafeUrl), #[error("Path must end in a valid wheel filename: `{0}`")] PathMissingFilename(Box), #[error("Failed to convert path to URL")] @@ -204,7 +206,7 @@ pub struct PylockTomlPackage { #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub index: Option, + pub index: Option, #[serde( skip_serializing_if = "uv_pep508::marker::ser::is_empty", serialize_with = "uv_pep508::marker::ser::serialize", @@ -247,7 +249,7 @@ struct PylockTomlDirectory { struct PylockTomlVcs { r#type: VcsKind, #[serde(skip_serializing_if = "Option::is_none")] - url: Option, + url: Option, #[serde(skip_serializing_if = "Option::is_none")] path: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -261,7 +263,7 @@ struct PylockTomlVcs { #[serde(rename_all = "kebab-case")] struct PylockTomlArchive { #[serde(skip_serializing_if = "Option::is_none")] - url: Option, + url: Option, #[serde(skip_serializing_if = "Option::is_none")] path: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -284,7 +286,7 @@ struct PylockTomlSdist { #[serde(skip_serializing_if = "Option::is_none")] name: Option, #[serde(skip_serializing_if = "Option::is_none")] - url: Option, + url: Option, #[serde(skip_serializing_if = "Option::is_none")] path: Option, #[serde( @@ -305,7 +307,7 @@ struct PylockTomlWheel { #[serde(skip_serializing_if = "Option::is_none")] name: Option, #[serde(skip_serializing_if = "Option::is_none")] - url: Option, + url: Option, #[serde(skip_serializing_if = "Option::is_none")] path: Option, #[serde( @@ -618,6 +620,7 @@ impl<'lock> PylockToml { extras: &ExtrasSpecificationWithDefaults, dev: &DependencyGroupsWithDefaults, annotate: bool, + editable: EditableMode, install_options: &'lock InstallOptions, ) -> Result { // Extract the packages from the lock file. @@ -732,7 +735,10 @@ impl<'lock> PylockToml { .unwrap_or_else(|_| sdist.install_path.to_path_buf()) .into_boxed_path(), ), - editable: Some(sdist.editable), + editable: match editable { + EditableMode::NonEditable => None, + EditableMode::Editable => Some(sdist.editable), + }, subdirectory: None, }), _ => None, @@ -1324,7 +1330,7 @@ impl PylockTomlWheel { &self, install_path: &Path, name: &PackageName, - index: Option<&Url>, + index: Option<&DisplaySafeUrl>, ) -> Result { let filename = self.filename(name)?.into_owned(); @@ -1332,7 +1338,8 @@ impl PylockTomlWheel { UrlString::from(url) } else if let Some(path) = self.path.as_ref() { let path = install_path.join(path); - let url = Url::from_file_path(path).map_err(|()| PylockTomlErrorKind::PathToUrl)?; + let url = DisplaySafeUrl::from_file_path(path) + .map_err(|()| PylockTomlErrorKind::PathToUrl)?; UrlString::from(url) } else { return Err(PylockTomlErrorKind::WheelMissingPathUrl(name.clone())); @@ -1408,8 +1415,10 @@ impl PylockTomlVcs { let mut url = if let Some(url) = self.url.as_ref() { url.clone() } else if let Some(path) = self.path.as_ref() { - Url::from_directory_path(install_path.join(path)) - .map_err(|()| PylockTomlErrorKind::PathToUrl)? + DisplaySafeUrl::from( + Url::from_directory_path(install_path.join(path)) + .map_err(|()| PylockTomlErrorKind::PathToUrl)?, + ) } else { return Err(PylockTomlErrorKind::VcsMissingPathUrl(name.clone())); }; @@ -1427,7 +1436,7 @@ impl PylockTomlVcs { }; // Reconstruct the PEP 508-compatible URL from the `GitSource`. - let url = Url::from(ParsedGitUrl { + let url = DisplaySafeUrl::from(ParsedGitUrl { url: git_url.clone(), subdirectory: subdirectory.clone(), }); @@ -1469,7 +1478,7 @@ impl PylockTomlSdist { install_path: &Path, name: &PackageName, version: Option<&Version>, - index: Option<&Url>, + index: Option<&DisplaySafeUrl>, ) -> Result { let filename = self.filename(name)?.into_owned(); let ext = SourceDistExtension::from_path(filename.as_ref())?; @@ -1485,7 +1494,8 @@ impl PylockTomlSdist { UrlString::from(url) } else if let Some(path) = self.path.as_ref() { let path = install_path.join(path); - let url = Url::from_file_path(path).map_err(|()| PylockTomlErrorKind::PathToUrl)?; + let url = DisplaySafeUrl::from_file_path(path) + .map_err(|()| PylockTomlErrorKind::PathToUrl)?; UrlString::from(url) } else { return Err(PylockTomlErrorKind::SdistMissingPathUrl(name.clone())); diff --git a/crates/uv-resolver/src/lock/export/requirements_txt.rs b/crates/uv-resolver/src/lock/export/requirements_txt.rs index fe1e17046..4cca73a34 100644 --- a/crates/uv-resolver/src/lock/export/requirements_txt.rs +++ b/crates/uv-resolver/src/lock/export/requirements_txt.rs @@ -13,6 +13,7 @@ use uv_fs::Simplified; use uv_git_types::GitReference; use uv_normalize::PackageName; use uv_pypi_types::{ParsedArchiveUrl, ParsedGitUrl}; +use uv_redacted::DisplaySafeUrl; use crate::lock::export::{ExportableRequirement, ExportableRequirements}; use crate::lock::{Package, PackageId, Source}; @@ -94,7 +95,7 @@ impl std::fmt::Display for RequirementsTxtExport<'_> { .expect("Internal Git URLs must have supported schemes"); // Reconstruct the PEP 508-compatible URL from the `GitSource`. - let url = Url::from(ParsedGitUrl { + let url = DisplaySafeUrl::from(ParsedGitUrl { url: git_url.clone(), subdirectory: git.subdirectory.clone(), }); @@ -102,12 +103,19 @@ impl std::fmt::Display for RequirementsTxtExport<'_> { write!(f, "{} @ {}", package.id.name, url)?; } Source::Direct(url, direct) => { - let url = Url::from(ParsedArchiveUrl { + let url = DisplaySafeUrl::from(ParsedArchiveUrl { url: url.to_url().map_err(|_| std::fmt::Error)?, subdirectory: direct.subdirectory.clone(), ext: DistExtension::Source(SourceDistExtension::TarGz), }); - write!(f, "{} @ {}", package.id.name, url)?; + write!( + f, + "{} @ {}", + package.id.name, + // TODO(zanieb): We should probably omit passwords here by default, but we + // should change it in a breaking release and allow opt-in to include them. + url.displayable_with_credentials() + )?; } Source::Path(path) | Source::Directory(path) => { if path.is_absolute() { diff --git a/crates/uv-resolver/src/lock/installable.rs b/crates/uv-resolver/src/lock/installable.rs index 4522a243d..e3cdbf019 100644 --- a/crates/uv-resolver/src/lock/installable.rs +++ b/crates/uv-resolver/src/lock/installable.rs @@ -91,7 +91,8 @@ pub trait Installable<'lock> { } } - // Add the workspace packages to the queue. + // Initialize the workspace roots. + let mut roots = vec![]; for root_name in self.roots() { let dist = self .lock() @@ -114,6 +115,12 @@ pub trait Installable<'lock> { // Add an edge from the root. petgraph.add_edge(root, index, Edge::Prod(MarkerTree::TRUE)); + // Push the package onto the queue. + roots.push((dist, index)); + } + + // Add the workspace dependencies to the queue. + for (dist, index) in roots { if dev.prod() { // Push its dependencies onto the queue. queue.push_back((dist, None)); diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index 6dab3b4b1..7ca100fd8 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -29,8 +29,8 @@ use uv_distribution_types::{ BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexMetadata, IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel, - RegistrySourceDist, RemoteSource, Requirement, RequirementSource, ResolvedDist, StaticMetadata, - ToUrlError, UrlString, redact_credentials, + RegistrySourceDist, RemoteSource, Requirement, RequirementSource, RequiresPython, ResolvedDist, + SimplifiedMarkerTree, StaticMetadata, ToUrlError, UrlString, }; use uv_fs::{PortablePath, PortablePathBuf, relative_to}; use uv_git::{RepositoryReference, ResolvedRepositoryReference}; @@ -45,6 +45,7 @@ use uv_pypi_types::{ ConflictPackage, Conflicts, HashAlgorithm, HashDigest, HashDigests, Hashes, ParsedArchiveUrl, ParsedGitUrl, }; +use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; use uv_types::{BuildContext, HashStrategy}; use uv_workspace::WorkspaceMember; @@ -56,12 +57,10 @@ pub use crate::lock::export::{PylockToml, PylockTomlErrorKind}; pub use crate::lock::installable::Installable; pub use crate::lock::map::PackageMap; pub use crate::lock::tree::TreeDisplay; -use crate::requires_python::SimplifiedMarkerTree; use crate::resolution::{AnnotatedDist, ResolutionGraphNode}; use crate::universal_marker::{ConflictMarker, UniversalMarker}; use crate::{ - ExcludeNewer, InMemoryIndex, MetadataResponse, PrereleaseMode, RequiresPython, ResolutionMode, - ResolverOutput, + ExcludeNewer, InMemoryIndex, MetadataResponse, PrereleaseMode, ResolutionMode, ResolverOutput, }; mod export; @@ -769,6 +768,36 @@ impl Lock { } } + /// Checks whether the new requires-python specification is disjoint with + /// the fork markers in this lock file. + /// + /// If they are disjoint, then the union of the fork markers along with the + /// given requires-python specification (converted to a marker tree) are + /// returned. + /// + /// When disjoint, the fork markers in the lock file should be dropped and + /// not used. + pub fn requires_python_coverage( + &self, + new_requires_python: &RequiresPython, + ) -> Result<(), (MarkerTree, MarkerTree)> { + let fork_markers_union = if self.fork_markers().is_empty() { + self.requires_python.to_marker_tree() + } else { + let mut fork_markers_union = MarkerTree::FALSE; + for fork_marker in self.fork_markers() { + fork_markers_union.or(fork_marker.pep508()); + } + fork_markers_union + }; + let new_requires_python = new_requires_python.to_marker_tree(); + if fork_markers_union.is_disjoint(new_requires_python) { + Err((fork_markers_union, new_requires_python)) + } else { + Ok(()) + } + } + /// Returns the TOML representation of this lockfile. pub fn to_toml(&self) -> Result { // Catch a lockfile where the union of fork markers doesn't cover the supported @@ -1144,7 +1173,9 @@ impl Lock { Some( FlatRequiresDist::from_requirements(requires_dist.clone(), &package.id.name) .into_iter() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| { + normalize_requirement(requirement, root, &self.requires_python) + }) .collect::, _>>()?, ) } else { @@ -1153,14 +1184,14 @@ impl Lock { // Validate the `requires-dist` metadata. let expected: BTreeSet<_> = Box::into_iter(requires_dist) - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; let actual: BTreeSet<_> = package .metadata .requires_dist .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; if expected != actual && flattened.is_none_or(|expected| expected != actual) { @@ -1180,7 +1211,9 @@ impl Lock { Ok::<_, LockError>(( group, Box::into_iter(requirements) - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| { + normalize_requirement(requirement, root, &self.requires_python) + }) .collect::>()?, )) }) @@ -1196,7 +1229,9 @@ impl Lock { requirements .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| { + normalize_requirement(requirement, root, &self.requires_python) + }) .collect::>()?, )) }) @@ -1263,14 +1298,14 @@ impl Lock { let expected: BTreeSet<_> = requirements .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; let actual: BTreeSet<_> = self .manifest .requirements .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; if expected != actual { return Ok(SatisfiesResult::MismatchedRequirements(expected, actual)); @@ -1282,14 +1317,14 @@ impl Lock { let expected: BTreeSet<_> = constraints .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; let actual: BTreeSet<_> = self .manifest .constraints .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; if expected != actual { return Ok(SatisfiesResult::MismatchedConstraints(expected, actual)); @@ -1301,14 +1336,14 @@ impl Lock { let expected: BTreeSet<_> = overrides .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; let actual: BTreeSet<_> = self .manifest .overrides .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; if expected != actual { return Ok(SatisfiesResult::MismatchedOverrides(expected, actual)); @@ -1320,14 +1355,14 @@ impl Lock { let expected: BTreeSet<_> = build_constraints .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; let actual: BTreeSet<_> = self .manifest .build_constraints .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| normalize_requirement(requirement, root, &self.requires_python)) .collect::>()?; if expected != actual { return Ok(SatisfiesResult::MismatchedBuildConstraints( @@ -1347,7 +1382,9 @@ impl Lock { requirements .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| { + normalize_requirement(requirement, root, &self.requires_python) + }) .collect::>()?, )) }) @@ -1363,7 +1400,9 @@ impl Lock { requirements .iter() .cloned() - .map(|requirement| normalize_requirement(requirement, root)) + .map(|requirement| { + normalize_requirement(requirement, root, &self.requires_python) + }) .collect::>()?, )) }) @@ -1394,7 +1433,7 @@ impl Lock { .into_iter() .filter_map(|index| match index.url() { IndexUrl::Pypi(_) | IndexUrl::Url(_) => { - Some(UrlString::from(index.url().redacted().as_ref())) + Some(UrlString::from(index.url().without_credentials().as_ref())) } IndexUrl::Path(_) => None, }) @@ -2228,7 +2267,7 @@ impl Package { Source::Direct(url, direct) => { let filename: WheelFilename = self.wheels[best_wheel_index].filename.clone(); - let url = Url::from(ParsedArchiveUrl { + let url = DisplaySafeUrl::from(ParsedArchiveUrl { url: url.to_url().map_err(LockErrorKind::InvalidUrl)?, subdirectory: direct.subdirectory.clone(), ext: DistExtension::Wheel, @@ -2332,7 +2371,13 @@ impl Package { let sdist = match &self.id.source { Source::Path(path) => { // A direct path source can also be a wheel, so validate the extension. - let DistExtension::Source(ext) = DistExtension::from_path(path)? else { + let DistExtension::Source(ext) = DistExtension::from_path(path).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })? + else { return Ok(None); }; let install_path = absolute_path(workspace_root, path)?; @@ -2390,7 +2435,7 @@ impl Package { GitUrl::from_commit(url, GitReference::from(git.kind.clone()), git.precise)?; // Reconstruct the PEP 508-compatible URL from the `GitSource`. - let url = Url::from(ParsedGitUrl { + let url = DisplaySafeUrl::from(ParsedGitUrl { url: git_url.clone(), subdirectory: git.subdirectory.clone(), }); @@ -2405,11 +2450,18 @@ impl Package { } Source::Direct(url, direct) => { // A direct URL source can also be a wheel, so validate the extension. - let DistExtension::Source(ext) = DistExtension::from_path(url.as_ref())? else { + let DistExtension::Source(ext) = + DistExtension::from_path(url.base_str()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })? + else { return Ok(None); }; let location = url.to_url().map_err(LockErrorKind::InvalidUrl)?; - let url = Url::from(ParsedArchiveUrl { + let url = DisplaySafeUrl::from(ParsedArchiveUrl { url: location.clone(), subdirectory: direct.subdirectory.clone(), ext: DistExtension::Source(ext), @@ -2444,7 +2496,12 @@ impl Package { .ok_or_else(|| LockErrorKind::MissingFilename { id: self.id.clone(), })?; - let ext = SourceDistExtension::from_path(filename.as_ref())?; + let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename), @@ -2484,18 +2541,41 @@ impl Package { .as_ref() .expect("version for registry source"); - let file_path = sdist.path().ok_or_else(|| LockErrorKind::MissingPath { - name: name.clone(), - version: version.clone(), - })?; - let file_url = Url::from_file_path(workspace_root.join(path).join(file_path)) - .map_err(|()| LockErrorKind::PathToUrl)?; + let file_url = match sdist { + SourceDist::Url { url: file_url, .. } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } + SourceDist::Path { + path: file_path, .. + } => { + let file_path = workspace_root.join(path).join(file_path); + let file_url = + DisplaySafeUrl::from_file_path(&file_path).map_err(|()| { + LockErrorKind::PathToUrl { + path: file_path.into_boxed_path(), + } + })?; + FileLocation::AbsoluteUrl(UrlString::from(file_url)) + } + SourceDist::Metadata { .. } => { + return Err(LockErrorKind::MissingPath { + name: name.clone(), + version: version.clone(), + } + .into()); + } + }; let filename = sdist .filename() .ok_or_else(|| LockErrorKind::MissingFilename { id: self.id.clone(), })?; - let ext = SourceDistExtension::from_path(filename.as_ref())?; + let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename), @@ -2505,9 +2585,10 @@ impl Package { requires_python: None, size: sdist.size(), upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(UrlString::from(file_url)), + url: file_url, yanked: None, }); + let index = IndexUrl::from( VerbatimUrl::from_absolute_path(workspace_root.join(path)) .map_err(LockErrorKind::RegistryVerbatimUrl)?, @@ -2837,6 +2918,34 @@ struct PackageMetadata { dependency_groups: BTreeMap>, } +impl PackageMetadata { + fn unwire(self, requires_python: &RequiresPython) -> PackageMetadata { + // We need to complexify these markers so things like + // `requires_python < '0'` get normalized to False + let unwire_requirements = |requirements: BTreeSet| -> BTreeSet { + requirements + .into_iter() + .map(|mut requirement| { + let complexified_marker = + requires_python.complexify_markers(requirement.marker); + requirement.marker = complexified_marker; + requirement + }) + .collect() + }; + + PackageMetadata { + requires_dist: unwire_requirements(self.requires_dist), + provides_extras: self.provides_extras, + dependency_groups: self + .dependency_groups + .into_iter() + .map(|(group, requirements)| (group, unwire_requirements(requirements))) + .collect(), + } + } +} + impl PackageWire { fn unwire( self, @@ -2865,9 +2974,10 @@ impl PackageWire { .map(|dep| dep.unwire(requires_python, unambiguous_package_ids)) .collect() }; + Ok(Package { id: self.id, - metadata: self.metadata, + metadata: self.metadata.unwire(requires_python), sdist: self.sdist, wheels: self.wheels, fork_markers: self @@ -3153,12 +3263,14 @@ impl Source { match index_url { IndexUrl::Pypi(_) | IndexUrl::Url(_) => { // Remove any sensitive credentials from the index URL. - let redacted = index_url.redacted(); + let redacted = index_url.without_credentials(); let source = RegistrySource::Url(UrlString::from(redacted.as_ref())); Ok(Source::Registry(source)) } IndexUrl::Path(url) => { - let path = url.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; + let path = url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: url.to_url() })?; let path = relative_to(&path, root) .or_else(|_| std::path::absolute(&path)) .map_err(LockErrorKind::IndexRelativePath)?; @@ -3366,7 +3478,7 @@ impl TryFrom for Source { match wire { Registry { registry } => Ok(Source::Registry(registry.into())), Git { git } => { - let url = Url::parse(&git) + let url = DisplaySafeUrl::parse(&git) .map_err(|err| SourceParseError::InvalidUrl { given: git.to_string(), err, @@ -3591,14 +3703,6 @@ impl SourceDist { } } - fn path(&self) -> Option<&Path> { - match &self { - SourceDist::Metadata { .. } => None, - SourceDist::Url { .. } => None, - SourceDist::Path { path, .. } => Some(path), - } - } - pub(crate) fn hash(&self) -> Option<&Hash> { match &self { SourceDist::Metadata { metadata } => metadata.hash.as_ref(), @@ -3718,34 +3822,60 @@ impl SourceDist { })) } IndexUrl::Path(path) => { - let index_path = path.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; - let reg_dist_path = reg_dist + let index_path = path + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?; + let url = reg_dist .file .url .to_url() - .map_err(LockErrorKind::InvalidUrl)? - .to_file_path() - .map_err(|()| LockErrorKind::UrlToPath)?; - let path = relative_to(®_dist_path, index_path) - .or_else(|_| std::path::absolute(®_dist_path)) - .map_err(LockErrorKind::DistributionRelativePath)? - .into_boxed_path(); - let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); - let size = reg_dist.file.size; - let upload_time = reg_dist - .file - .upload_time_utc_ms - .map(Timestamp::from_millisecond) - .transpose() - .map_err(LockErrorKind::InvalidTimestamp)?; - Ok(Some(SourceDist::Path { - path, - metadata: SourceDistMetadata { - hash, - size, - upload_time, - }, - })) + .map_err(LockErrorKind::InvalidUrl)?; + + if url.scheme() == "file" { + let reg_dist_path = url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url })?; + let path = relative_to(®_dist_path, index_path) + .or_else(|_| std::path::absolute(®_dist_path)) + .map_err(LockErrorKind::DistributionRelativePath)? + .into_boxed_path(); + let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); + let size = reg_dist.file.size; + let upload_time = reg_dist + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Some(SourceDist::Path { + path, + metadata: SourceDistMetadata { + hash, + size, + upload_time, + }, + })) + } else { + let url = normalize_file_location(®_dist.file.url) + .map_err(LockErrorKind::InvalidUrl) + .map_err(LockError::from)?; + let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); + let size = reg_dist.file.size; + let upload_time = reg_dist + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Some(SourceDist::Url { + url, + metadata: SourceDistMetadata { + hash, + size, + upload_time, + }, + })) + } } } } @@ -3874,12 +4004,12 @@ impl From for GitReference { } } -/// Construct the lockfile-compatible [`URL`] for a [`GitSourceDist`]. -fn locked_git_url(git_dist: &GitSourceDist) -> Url { +/// Construct the lockfile-compatible [`DisplaySafeUrl`] for a [`GitSourceDist`]. +fn locked_git_url(git_dist: &GitSourceDist) -> DisplaySafeUrl { let mut url = git_dist.git.repository().clone(); - // Redact the credentials. - redact_credentials(&mut url); + // Remove the credentials. + url.remove_credentials(); // Clear out any existing state. url.set_fragment(None); @@ -4048,25 +4178,46 @@ impl Wheel { }) } IndexUrl::Path(path) => { - let index_path = path.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; - let wheel_path = wheel - .file - .url - .to_url() - .map_err(LockErrorKind::InvalidUrl)? + let index_path = path .to_file_path() - .map_err(|()| LockErrorKind::UrlToPath)?; - let path = relative_to(&wheel_path, index_path) - .or_else(|_| std::path::absolute(&wheel_path)) - .map_err(LockErrorKind::DistributionRelativePath)? - .into_boxed_path(); - Ok(Wheel { - url: WheelWireSource::Path { path }, - hash: None, - size: None, - upload_time: None, - filename, - }) + .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?; + let wheel_url = wheel.file.url.to_url().map_err(LockErrorKind::InvalidUrl)?; + + if wheel_url.scheme() == "file" { + let wheel_path = wheel_url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: wheel_url })?; + let path = relative_to(&wheel_path, index_path) + .or_else(|_| std::path::absolute(&wheel_path)) + .map_err(LockErrorKind::DistributionRelativePath)? + .into_boxed_path(); + Ok(Wheel { + url: WheelWireSource::Path { path }, + hash: None, + size: None, + upload_time: None, + filename, + }) + } else { + let url = normalize_file_location(&wheel.file.url) + .map_err(LockErrorKind::InvalidUrl) + .map_err(LockError::from)?; + let hash = wheel.file.hashes.iter().max().cloned().map(Hash::from); + let size = wheel.file.size; + let upload_time = wheel + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Wheel { + url: WheelWireSource::Url { url }, + hash, + size, + filename, + upload_time, + }) + } } } } @@ -4104,8 +4255,10 @@ impl Wheel { match source { RegistrySource::Url(url) => { - let file_url = match &self.url { - WheelWireSource::Url { url } => url, + let file_location = match &self.url { + WheelWireSource::Url { url: file_url } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } WheelWireSource::Path { .. } | WheelWireSource::Filename { .. } => { return Err(LockErrorKind::MissingUrl { name: filename.name, @@ -4121,7 +4274,7 @@ impl Wheel { requires_python: None, size: self.size, upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(file_url.clone()), + url: file_location, yanked: None, }); let index = IndexUrl::from(VerbatimUrl::from_url( @@ -4134,9 +4287,21 @@ impl Wheel { }) } RegistrySource::Path(index_path) => { - let file_path = match &self.url { - WheelWireSource::Path { path } => path, - WheelWireSource::Url { .. } | WheelWireSource::Filename { .. } => { + let file_location = match &self.url { + WheelWireSource::Url { url: file_url } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } + WheelWireSource::Path { path: file_path } => { + let file_path = root.join(index_path).join(file_path); + let file_url = + DisplaySafeUrl::from_file_path(&file_path).map_err(|()| { + LockErrorKind::PathToUrl { + path: file_path.into_boxed_path(), + } + })?; + FileLocation::AbsoluteUrl(UrlString::from(file_url)) + } + WheelWireSource::Filename { .. } => { return Err(LockErrorKind::MissingPath { name: filename.name, version: filename.version, @@ -4144,8 +4309,6 @@ impl Wheel { .into()); } }; - let file_url = Url::from_file_path(root.join(index_path).join(file_path)) - .map_err(|()| LockErrorKind::PathToUrl)?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename.to_string()), @@ -4153,7 +4316,7 @@ impl Wheel { requires_python: None, size: self.size, upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(UrlString::from(file_url)), + url: file_location, yanked: None, }); let index = IndexUrl::from( @@ -4527,13 +4690,13 @@ impl From for Hashes { /// Convert a [`FileLocation`] into a normalized [`UrlString`]. fn normalize_file_location(location: &FileLocation) -> Result { match location { - FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment()), + FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment().into_owned()), FileLocation::RelativeUrl(_, _) => Ok(normalize_url(location.to_url()?)), } } -/// Convert a [`Url`] into a normalized [`UrlString`] by removing the fragment. -fn normalize_url(mut url: Url) -> UrlString { +/// Convert a [`DisplaySafeUrl`] into a normalized [`UrlString`] by removing the fragment. +fn normalize_url(mut url: DisplaySafeUrl) -> UrlString { url.set_fragment(None); UrlString::from(url) } @@ -4546,9 +4709,11 @@ fn normalize_url(mut url: Url) -> UrlString { /// 2. Ensures that the lock and install paths are appropriately framed with respect to the /// current [`Workspace`]. /// 3. Removes the `origin` field, which is only used in `requirements.txt`. +/// 4. Simplifies the markers using the provided [`RequiresPython`] instance. fn normalize_requirement( mut requirement: Requirement, root: &Path, + requires_python: &RequiresPython, ) -> Result { // Sort the extras and groups for consistency. requirement.extras.sort(); @@ -4565,8 +4730,8 @@ fn normalize_requirement( let git = { let mut repository = git.repository().clone(); - // Redact the credentials. - redact_credentials(&mut repository); + // Remove the credentials. + repository.remove_credentials(); // Remove the fragment and query from the URL; they're already present in the source. repository.set_fragment(None); @@ -4576,7 +4741,7 @@ fn normalize_requirement( }; // Reconstruct the PEP 508 URL from the underlying data. - let url = Url::from(ParsedGitUrl { + let url = DisplaySafeUrl::from(ParsedGitUrl { url: git.clone(), subdirectory: subdirectory.clone(), }); @@ -4585,7 +4750,7 @@ fn normalize_requirement( name: requirement.name, extras: requirement.extras, groups: requirement.groups, - marker: requirement.marker, + marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Git { git, subdirectory, @@ -4608,7 +4773,7 @@ fn normalize_requirement( name: requirement.name, extras: requirement.extras, groups: requirement.groups, - marker: requirement.marker, + marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Path { install_path, ext, @@ -4632,7 +4797,7 @@ fn normalize_requirement( name: requirement.name, extras: requirement.extras, groups: requirement.groups, - marker: requirement.marker, + marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Directory { install_path, editable, @@ -4651,7 +4816,7 @@ fn normalize_requirement( let index = index .map(|index| index.url.into_url()) .map(|mut index| { - redact_credentials(&mut index); + index.remove_credentials(); index }) .map(|index| IndexMetadata::from(IndexUrl::from(VerbatimUrl::from_url(index)))); @@ -4659,7 +4824,7 @@ fn normalize_requirement( name: requirement.name, extras: requirement.extras, groups: requirement.groups, - marker: requirement.marker, + marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Registry { specifier, index, @@ -4674,14 +4839,14 @@ fn normalize_requirement( ext, url: _, } => { - // Redact the credentials. - redact_credentials(&mut location); + // Remove the credentials. + location.remove_credentials(); // Remove the fragment from the URL; it's already present in the source. location.set_fragment(None); // Reconstruct the PEP 508 URL from the underlying data. - let url = Url::from(ParsedArchiveUrl { + let url = DisplaySafeUrl::from(ParsedArchiveUrl { url: location.clone(), subdirectory: subdirectory.clone(), ext, @@ -4691,7 +4856,7 @@ fn normalize_requirement( name: requirement.name, extras: requirement.extras, groups: requirement.groups, - marker: requirement.marker, + marker: requires_python.simplify_markers(requirement.marker), source: RequirementSource::Url { location, subdirectory, @@ -5040,32 +5205,23 @@ impl std::fmt::Display for WheelTagHint { } else { format!("`{}`", best.cyan()) }; - if let Some(version) = version { - write!( - f, - "{}{} You're on {}, but `{}` ({}) only has wheels for the following platform{s}: {}", - "hint".bold().cyan(), - ":".bold(), - best, - package.cyan(), - format!("v{version}").cyan(), - tags.iter() - .map(|tag| format!("`{}`", tag.cyan())) - .join(", "), - ) + let package_ref = if let Some(version) = version { + format!("`{}` ({})", package.cyan(), format!("v{version}").cyan()) } else { - write!( - f, - "{}{} You're on {}, but `{}` only has wheels for the following platform{s}: {}", - "hint".bold().cyan(), - ":".bold(), - best, - package.cyan(), - tags.iter() - .map(|tag| format!("`{}`", tag.cyan())) - .join(", "), - ) - } + format!("`{}`", package.cyan()) + }; + writeln!( + f, + "{}{} You're on {}, but {} only has wheels for the following platform{s}: {}; consider adding your platform to `{}` to ensure uv resolves to a version with compatible wheels", + "hint".bold().cyan(), + ":".bold(), + best, + package_ref, + tags.iter() + .map(|tag| format!("`{}`", tag.cyan())) + .join(", "), + "tool.uv.required-environments".green() + ) } else { if let Some(version) = version { write!( @@ -5159,8 +5315,13 @@ enum LockErrorKind { ), /// An error that occurs when the extension can't be determined /// for a given wheel or source distribution. - #[error("Failed to parse file extension; expected one of: {0}")] - MissingExtension(#[from] ExtensionError), + #[error("Failed to parse file extension for `{id}`; expected one of: {err}", id = id.cyan())] + MissingExtension { + /// The filename that was expected to have an extension. + id: PackageId, + /// The list of valid extensions that were expected. + err: ExtensionError, + }, /// Failed to parse a Git source URL. #[error("Failed to parse Git URL")] InvalidGitSourceUrl( @@ -5358,11 +5519,11 @@ enum LockErrorKind { VerbatimUrlError, ), /// An error that occurs when converting a path to a URL. - #[error("Failed to convert path to URL")] - PathToUrl, + #[error("Failed to convert path to URL: {path}", path = path.display().cyan())] + PathToUrl { path: Box }, /// An error that occurs when converting a URL to a path - #[error("Failed to convert URL to path")] - UrlToPath, + #[error("Failed to convert URL to path: {url}", url = url.cyan())] + UrlToPath { url: DisplaySafeUrl }, /// An error that occurs when multiple packages with the same /// name were found when identifying the root packages. #[error("Found multiple packages matching `{name}`", name = name.cyan())] diff --git a/crates/uv-resolver/src/marker.rs b/crates/uv-resolver/src/marker.rs index 1bb938a33..b63d51401 100644 --- a/crates/uv-resolver/src/marker.rs +++ b/crates/uv-resolver/src/marker.rs @@ -5,7 +5,7 @@ use std::ops::Bound; use uv_pep440::{LowerBound, UpperBound, Version}; use uv_pep508::{CanonicalMarkerValueVersion, MarkerTree, MarkerTreeKind}; -use crate::requires_python::RequiresPythonRange; +use uv_distribution_types::RequiresPythonRange; /// Returns the bounding Python versions that can satisfy the [`MarkerTree`], if it's constrained. pub(crate) fn requires_python(tree: MarkerTree) -> Option { diff --git a/crates/uv-resolver/src/preferences.rs b/crates/uv-resolver/src/preferences.rs index 116d94d87..51e325d68 100644 --- a/crates/uv-resolver/src/preferences.rs +++ b/crates/uv-resolver/src/preferences.rs @@ -34,6 +34,8 @@ pub struct Preference { /// is part of, otherwise `None`. fork_markers: Vec, hashes: HashDigests, + /// The source of the preference. + source: PreferenceSource, } impl Preference { @@ -73,6 +75,7 @@ impl Preference { .map(String::as_str) .map(HashDigest::from_str) .collect::>()?, + source: PreferenceSource::RequirementsTxt, })) } @@ -91,6 +94,7 @@ impl Preference { index: PreferenceIndex::from(package.index(install_path)?), fork_markers: package.fork_markers().to_vec(), hashes: HashDigests::empty(), + source: PreferenceSource::Lock, })) } @@ -112,6 +116,7 @@ impl Preference { // `pylock.toml` doesn't have fork annotations. fork_markers: vec![], hashes: HashDigests::empty(), + source: PreferenceSource::Lock, })) } @@ -127,6 +132,7 @@ impl Preference { index: PreferenceIndex::Any, fork_markers: vec![], hashes: HashDigests::empty(), + source: PreferenceSource::Environment, }) } @@ -171,11 +177,24 @@ impl From> for PreferenceIndex { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PreferenceSource { + /// The preference is from an installed package in the environment. + Environment, + /// The preference is from a `uv.ock` file. + Lock, + /// The preference is from a `requirements.txt` file. + RequirementsTxt, + /// The preference is from the current solve. + Resolver, +} + #[derive(Debug, Clone)] pub(crate) struct Entry { marker: UniversalMarker, index: PreferenceIndex, pin: Pin, + source: PreferenceSource, } impl Entry { @@ -193,6 +212,11 @@ impl Entry { pub(crate) fn pin(&self) -> &Pin { &self.pin } + + /// Return the source of the entry. + pub(crate) fn source(&self) -> PreferenceSource { + self.source + } } /// A set of pinned packages that should be preserved during resolution, if possible. @@ -245,6 +269,7 @@ impl Preferences { version: preference.version, hashes: preference.hashes, }, + source: preference.source, }); } else { for fork_marker in preference.fork_markers { @@ -255,6 +280,7 @@ impl Preferences { version: preference.version.clone(), hashes: preference.hashes.clone(), }, + source: preference.source, }); } } @@ -270,11 +296,13 @@ impl Preferences { index: Option, markers: UniversalMarker, pin: impl Into, + source: PreferenceSource, ) { self.0.entry(package_name).or_default().push(Entry { marker: markers, index: PreferenceIndex::from(index), pin: pin.into(), + source, }); } diff --git a/crates/uv-resolver/src/pubgrub/mod.rs b/crates/uv-resolver/src/pubgrub/mod.rs index f4802a2ca..bd58fbc72 100644 --- a/crates/uv-resolver/src/pubgrub/mod.rs +++ b/crates/uv-resolver/src/pubgrub/mod.rs @@ -1,6 +1,6 @@ pub(crate) use crate::pubgrub::dependencies::PubGrubDependency; pub(crate) use crate::pubgrub::distribution::PubGrubDistribution; -pub(crate) use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython}; +pub use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython}; pub(crate) use crate::pubgrub::priority::{PubGrubPriorities, PubGrubPriority, PubGrubTiebreaker}; pub(crate) use crate::pubgrub::report::PubGrubReportFormatter; diff --git a/crates/uv-resolver/src/pubgrub/package.rs b/crates/uv-resolver/src/pubgrub/package.rs index 8c40f8080..2e67a715a 100644 --- a/crates/uv-resolver/src/pubgrub/package.rs +++ b/crates/uv-resolver/src/pubgrub/package.rs @@ -9,7 +9,7 @@ use crate::python_requirement::PythonRequirement; /// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap. #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub(crate) struct PubGrubPackage(Arc); +pub struct PubGrubPackage(Arc); impl Deref for PubGrubPackage { type Target = PubGrubPackageInner; @@ -39,7 +39,7 @@ impl From for PubGrubPackage { /// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g., /// `black`). We then discard the virtual packages at the end of the resolution process. #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub(crate) enum PubGrubPackageInner { +pub enum PubGrubPackageInner { /// The root package, which is used to start the resolution process. Root(Option), /// A Python version. @@ -295,7 +295,7 @@ impl PubGrubPackage { } #[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)] -pub(crate) enum PubGrubPython { +pub enum PubGrubPython { /// The Python version installed in the current environment. Installed, /// The Python version for which dependencies are being resolved. diff --git a/crates/uv-resolver/src/pubgrub/report.rs b/crates/uv-resolver/src/pubgrub/report.rs index 06d11aad6..5c62f0b1f 100644 --- a/crates/uv-resolver/src/pubgrub/report.rs +++ b/crates/uv-resolver/src/pubgrub/report.rs @@ -11,14 +11,14 @@ use rustc_hash::FxHashMap; use uv_configuration::{IndexStrategy, NoBinary, NoBuild}; use uv_distribution_types::{ IncompatibleDist, IncompatibleSource, IncompatibleWheel, Index, IndexCapabilities, - IndexLocations, IndexMetadata, IndexUrl, + IndexLocations, IndexMetadata, IndexUrl, RequiresPython, }; use uv_normalize::PackageName; use uv_pep440::{Version, VersionSpecifiers}; use uv_platform_tags::{AbiTag, IncompatibleTag, LanguageTag, PlatformTag, Tags}; use crate::candidate_selector::CandidateSelector; -use crate::error::ErrorTree; +use crate::error::{ErrorTree, PrefixMatch}; use crate::fork_indexes::ForkIndexes; use crate::fork_urls::ForkUrls; use crate::prerelease::AllowPrerelease; @@ -27,9 +27,7 @@ use crate::python_requirement::{PythonRequirement, PythonRequirementSource}; use crate::resolver::{ MetadataUnavailable, UnavailablePackage, UnavailableReason, UnavailableVersion, }; -use crate::{ - Flexibility, InMemoryIndex, Options, RequiresPython, ResolverEnvironment, VersionsResponse, -}; +use crate::{Flexibility, InMemoryIndex, Options, ResolverEnvironment, VersionsResponse}; #[derive(Debug)] pub(crate) struct PubGrubReportFormatter<'a> { @@ -946,17 +944,30 @@ impl PubGrubReportFormatter<'_> { hints: &mut IndexSet, ) { let any_prerelease = set.iter().any(|(start, end)| { + // Ignore, e.g., `>=2.4.dev0,<2.5.dev0`, which is the desugared form of `==2.4.*`. + if PrefixMatch::from_range(start, end).is_some() { + return false; + } + let is_pre1 = match start { Bound::Included(version) => version.any_prerelease(), Bound::Excluded(version) => version.any_prerelease(), Bound::Unbounded => false, }; + if is_pre1 { + return true; + } + let is_pre2 = match end { Bound::Included(version) => version.any_prerelease(), Bound::Excluded(version) => version.any_prerelease(), Bound::Unbounded => false, }; - is_pre1 || is_pre2 + if is_pre2 { + return true; + } + + false }); if any_prerelease { @@ -1518,7 +1529,7 @@ impl std::fmt::Display for PubGrubHint { "hint".bold().cyan(), ":".bold(), name.cyan(), - found_index.redacted().cyan(), + found_index.without_credentials().cyan(), PackageRange::compatibility(&PubGrubPackage::base(name), range, None).cyan(), next_index.cyan(), "--index-strategy unsafe-best-match".green(), @@ -1530,7 +1541,7 @@ impl std::fmt::Display for PubGrubHint { "{}{} An index URL ({}) could not be queried due to a lack of valid authentication credentials ({}).", "hint".bold().cyan(), ":".bold(), - index.redacted().cyan(), + index.without_credentials().cyan(), "401 Unauthorized".red(), ) } @@ -1540,7 +1551,7 @@ impl std::fmt::Display for PubGrubHint { "{}{} An index URL ({}) could not be queried due to a lack of valid authentication credentials ({}).", "hint".bold().cyan(), ":".bold(), - index.redacted().cyan(), + index.without_credentials().cyan(), "403 Forbidden".red(), ) } @@ -1930,11 +1941,11 @@ impl std::fmt::Display for PackageRange<'_> { PackageRangeKind::Available => write!(f, "are available:")?, } } - for segment in &segments { + for (lower, upper) in &segments { if segments.len() > 1 { write!(f, "\n ")?; } - match segment { + match (lower, upper) { (Bound::Unbounded, Bound::Unbounded) => match self.kind { PackageRangeKind::Dependency => write!(f, "{package}")?, PackageRangeKind::Compatibility => write!(f, "all versions of {package}")?, @@ -1950,7 +1961,13 @@ impl std::fmt::Display for PackageRange<'_> { write!(f, "{package}>={v},<={b}")?; } } - (Bound::Included(v), Bound::Excluded(b)) => write!(f, "{package}>={v},<{b}")?, + (Bound::Included(v), Bound::Excluded(b)) => { + if let Some(prefix) = PrefixMatch::from_range(lower, upper) { + write!(f, "{package}{prefix}")?; + } else { + write!(f, "{package}>={v},<{b}")?; + } + } (Bound::Excluded(v), Bound::Unbounded) => write!(f, "{package}>{v}")?, (Bound::Excluded(v), Bound::Included(b)) => write!(f, "{package}>{v},<={b}")?, (Bound::Excluded(v), Bound::Excluded(b)) => write!(f, "{package}>{v},<{b}")?, diff --git a/crates/uv-resolver/src/python_requirement.rs b/crates/uv-resolver/src/python_requirement.rs index 178b77866..0dce9b4f7 100644 --- a/crates/uv-resolver/src/python_requirement.rs +++ b/crates/uv-resolver/src/python_requirement.rs @@ -1,11 +1,10 @@ use std::collections::Bound; +use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerTree}; use uv_python::{Interpreter, PythonVersion}; -use crate::{RequiresPython, RequiresPythonRange}; - #[derive(Debug, Clone, Eq, PartialEq)] pub struct PythonRequirement { source: PythonRequirementSource, diff --git a/crates/uv-resolver/src/redirect.rs b/crates/uv-resolver/src/redirect.rs index 830962d9a..7d2539aba 100644 --- a/crates/uv-resolver/src/redirect.rs +++ b/crates/uv-resolver/src/redirect.rs @@ -1,8 +1,7 @@ -use url::Url; - use uv_git::GitResolver; use uv_pep508::VerbatimUrl; use uv_pypi_types::{ParsedGitUrl, ParsedUrl, VerbatimParsedUrl}; +use uv_redacted::DisplaySafeUrl; /// Map a URL to a precise URL, if possible. pub(crate) fn url_to_precise(url: VerbatimParsedUrl, git: &GitResolver) -> VerbatimParsedUrl { @@ -26,7 +25,7 @@ pub(crate) fn url_to_precise(url: VerbatimParsedUrl, git: &GitResolver) -> Verba url: new_git_url, subdirectory: subdirectory.clone(), }; - let new_url = Url::from(new_parsed_url.clone()); + let new_url = DisplaySafeUrl::from(new_parsed_url.clone()); let new_verbatim_url = apply_redirect(&url.verbatim, new_url); VerbatimParsedUrl { parsed_url: ParsedUrl::Git(new_parsed_url), @@ -36,7 +35,7 @@ pub(crate) fn url_to_precise(url: VerbatimParsedUrl, git: &GitResolver) -> Verba /// Given a [`VerbatimUrl`] and a redirect, apply the redirect to the URL while preserving as much /// of the verbatim representation as possible. -fn apply_redirect(url: &VerbatimUrl, redirect: Url) -> VerbatimUrl { +fn apply_redirect(url: &VerbatimUrl, redirect: DisplaySafeUrl) -> VerbatimUrl { let redirect = VerbatimUrl::from_url(redirect); // The redirect should be the "same" URL, but with a specific commit hash added after the `@`. @@ -85,9 +84,8 @@ fn apply_redirect(url: &VerbatimUrl, redirect: Url) -> VerbatimUrl { #[cfg(test)] mod tests { - use url::Url; - use uv_pep508::VerbatimUrl; + use uv_redacted::DisplaySafeUrl; use crate::redirect::apply_redirect; @@ -97,8 +95,9 @@ mod tests { // to the given representation. let verbatim = VerbatimUrl::parse_url("https://github.com/flask.git")? .with_given("git+https://github.com/flask.git"); - let redirect = - Url::parse("https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe")?; + let redirect = DisplaySafeUrl::parse( + "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", + )?; let expected = VerbatimUrl::parse_url( "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", @@ -111,8 +110,9 @@ mod tests { // representation. let verbatim = VerbatimUrl::parse_url("https://github.com/flask.git@main")? .with_given("git+https://${DOMAIN}.com/flask.git@main"); - let redirect = - Url::parse("https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe")?; + let redirect = DisplaySafeUrl::parse( + "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", + )?; let expected = VerbatimUrl::parse_url( "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", @@ -123,8 +123,9 @@ mod tests { // If there's a conflict after the `@`, discard the original representation. let verbatim = VerbatimUrl::parse_url("https://github.com/flask.git@main")? .with_given("git+https://github.com/flask.git@${TAG}"); - let redirect = - Url::parse("https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe")?; + let redirect = DisplaySafeUrl::parse( + "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", + )?; let expected = VerbatimUrl::parse_url( "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe", @@ -134,7 +135,7 @@ mod tests { // We should preserve subdirectory fragments. let verbatim = VerbatimUrl::parse_url("https://github.com/flask.git#subdirectory=src")? .with_given("git+https://github.com/flask.git#subdirectory=src"); - let redirect = Url::parse( + let redirect = DisplaySafeUrl::parse( "https://github.com/flask.git@b90a4f1f4a370e92054b9cc9db0efcb864f87ebe#subdirectory=src", )?; diff --git a/crates/uv-resolver/src/resolution/display.rs b/crates/uv-resolver/src/resolution/display.rs index 2968074b7..318fb4e54 100644 --- a/crates/uv-resolver/src/resolution/display.rs +++ b/crates/uv-resolver/src/resolution/display.rs @@ -14,7 +14,6 @@ use crate::{ResolverEnvironment, ResolverOutput}; /// A [`std::fmt::Display`] implementation for the resolution graph. #[derive(Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct DisplayResolutionGraph<'a> { /// The underlying graph. resolution: &'a ResolverOutput, @@ -290,7 +289,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> { // `# from https://pypi.org/simple`). if self.include_index_annotation { if let Some(index) = node.dist.index() { - let url = index.redacted(); + let url = index.without_credentials(); writeln!(f, "{}", format!(" # from {url}").green())?; } } diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs index 5df5ae6c3..928b9c605 100644 --- a/crates/uv-resolver/src/resolution/output.rs +++ b/crates/uv-resolver/src/resolution/output.rs @@ -12,8 +12,8 @@ use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use uv_configuration::{Constraints, Overrides}; use uv_distribution::Metadata; use uv_distribution_types::{ - Dist, DistributionMetadata, Edge, IndexUrl, Name, Node, Requirement, ResolutionDiagnostic, - ResolvedDist, VersionId, VersionOrUrlRef, + Dist, DistributionMetadata, Edge, IndexUrl, Name, Node, Requirement, RequiresPython, + ResolutionDiagnostic, ResolvedDist, VersionId, VersionOrUrlRef, }; use uv_git::GitResolver; use uv_normalize::{ExtraName, GroupName, PackageName}; @@ -30,8 +30,7 @@ use crate::resolution_mode::ResolutionStrategy; use crate::resolver::{Resolution, ResolutionDependencyEdge, ResolutionPackage}; use crate::universal_marker::{ConflictMarker, UniversalMarker}; use crate::{ - InMemoryIndex, MetadataResponse, Options, PythonRequirement, RequiresPython, ResolveError, - VersionsResponse, + InMemoryIndex, MetadataResponse, Options, PythonRequirement, ResolveError, VersionsResponse, }; /// The output of a successful resolution. diff --git a/crates/uv-resolver/src/resolution/requirements_txt.rs b/crates/uv-resolver/src/resolution/requirements_txt.rs index 5ad6480c2..bcdef207b 100644 --- a/crates/uv-resolver/src/resolution/requirements_txt.rs +++ b/crates/uv-resolver/src/resolution/requirements_txt.rs @@ -4,16 +4,16 @@ use std::path::Path; use itertools::Itertools; -use uv_distribution_types::{DistributionMetadata, Name, ResolvedDist, Verbatim, VersionOrUrlRef}; +use uv_distribution_types::{ + DistributionMetadata, Name, RequiresPython, ResolvedDist, SimplifiedMarkerTree, Verbatim, + VersionOrUrlRef, +}; use uv_normalize::{ExtraName, PackageName}; use uv_pep440::Version; use uv_pep508::{MarkerTree, Scheme, split_scheme}; use uv_pypi_types::HashDigest; -use crate::{ - requires_python::{RequiresPython, SimplifiedMarkerTree}, - resolution::AnnotatedDist, -}; +use crate::resolution::AnnotatedDist; #[derive(Debug, Clone)] /// A pinned package with its resolved distribution and all the extras that were pinned for it. diff --git a/crates/uv-resolver/src/resolver/availability.rs b/crates/uv-resolver/src/resolver/availability.rs index d2e9296b9..64721b4b6 100644 --- a/crates/uv-resolver/src/resolver/availability.rs +++ b/crates/uv-resolver/src/resolver/availability.rs @@ -7,7 +7,7 @@ use uv_platform_tags::{AbiTag, Tags}; /// The reason why a package or a version cannot be used. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailableReason { +pub enum UnavailableReason { /// The entire package cannot be used. Package(UnavailablePackage), /// A single version cannot be used. @@ -29,7 +29,7 @@ impl Display for UnavailableReason { /// Most variant are from [`MetadataResponse`] without the error source, since we don't format /// the source and we want to merge unavailable messages across versions. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailableVersion { +pub enum UnavailableVersion { /// Version is incompatible because it has no usable distributions IncompatibleDist(IncompatibleDist), /// The wheel metadata was found, but could not be parsed. @@ -123,7 +123,7 @@ impl From<&MetadataUnavailable> for UnavailableVersion { /// The package is unavailable and cannot be used. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailablePackage { +pub enum UnavailablePackage { /// Index lookups were disabled (i.e., `--no-index`) and the package was not found in a flat index (i.e. from `--find-links`). NoIndex, /// Network requests were disabled (i.e., `--offline`), and the package was not found in the cache. diff --git a/crates/uv-resolver/src/resolver/environment.rs b/crates/uv-resolver/src/resolver/environment.rs index 2a87bac47..6e816f991 100644 --- a/crates/uv-resolver/src/resolver/environment.rs +++ b/crates/uv-resolver/src/resolver/environment.rs @@ -1,14 +1,14 @@ use std::sync::Arc; use tracing::trace; +use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use uv_pep440::VersionSpecifiers; use uv_pep508::{MarkerEnvironment, MarkerTree}; use uv_pypi_types::{ConflictItem, ConflictItemRef, ResolverMarkerEnvironment}; use crate::pubgrub::{PubGrubDependency, PubGrubPackage}; -use crate::requires_python::RequiresPythonRange; use crate::resolver::ForkState; use crate::universal_marker::{ConflictMarker, UniversalMarker}; -use crate::{PythonRequirement, RequiresPython, ResolveError}; +use crate::{PythonRequirement, ResolveError}; /// Represents one or more marker environments for a resolution. /// @@ -198,6 +198,14 @@ impl ResolverEnvironment { crate::marker::requires_python(pep508_marker) } + /// For a universal resolution, return the markers of the current fork. + pub(crate) fn fork_markers(&self) -> Option { + match self.kind { + Kind::Specific { .. } => None, + Kind::Universal { markers, .. } => Some(markers), + } + } + /// Narrow this environment given the forking markers. /// /// This effectively intersects any markers in this environment with the @@ -620,7 +628,7 @@ mod tests { use uv_pep440::{LowerBound, UpperBound, Version}; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; - use crate::requires_python::{RequiresPython, RequiresPythonRange}; + use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use super::*; diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index 0194611d4..32d684f04 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -31,7 +31,9 @@ use uv_distribution_types::{ use uv_git::GitResolver; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::{MIN_VERSION, Version, VersionSpecifiers, release_specifiers_to_ranges}; -use uv_pep508::{MarkerExpression, MarkerOperator, MarkerTree, MarkerValueString}; +use uv_pep508::{ + MarkerEnvironment, MarkerExpression, MarkerOperator, MarkerTree, MarkerValueString, +}; use uv_platform_tags::Tags; use uv_pypi_types::{ConflictItem, ConflictItemRef, Conflicts, VerbatimParsedUrl}; use uv_types::{BuildContext, HashStrategy, InstalledPackagesProvider}; @@ -45,7 +47,7 @@ use crate::fork_strategy::ForkStrategy; use crate::fork_urls::ForkUrls; use crate::manifest::Manifest; use crate::pins::FilePins; -use crate::preferences::Preferences; +use crate::preferences::{PreferenceSource, Preferences}; use crate::pubgrub::{ PubGrubDependency, PubGrubDistribution, PubGrubPackage, PubGrubPackageInner, PubGrubPriorities, PubGrubPython, @@ -74,7 +76,10 @@ use crate::resolver::system::SystemDependency; pub(crate) use crate::resolver::urls::Urls; use crate::universal_marker::{ConflictMarker, UniversalMarker}; use crate::yanks::AllowedYanks; -use crate::{DependencyMode, Exclusions, FlatIndex, Options, ResolutionMode, VersionMap, marker}; +use crate::{ + DependencyMode, ExcludeNewer, Exclusions, FlatIndex, Options, ResolutionMode, VersionMap, + marker, +}; pub(crate) use provider::MetadataUnavailable; use uv_torch::TorchStrategy; @@ -115,6 +120,8 @@ struct ResolverState { dependency_mode: DependencyMode, hasher: HashStrategy, env: ResolverEnvironment, + // The environment of the current Python interpreter. + current_environment: MarkerEnvironment, tags: Option, python_requirement: PythonRequirement, conflicts: Conflicts, @@ -158,6 +165,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider> options: Options, python_requirement: &'a PythonRequirement, env: ResolverEnvironment, + current_environment: &MarkerEnvironment, conflicts: Conflicts, tags: Option<&'a Tags>, flat_index: &'a FlatIndex, @@ -184,6 +192,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider> options, hasher, env, + current_environment, tags.cloned(), python_requirement, conflicts, @@ -206,6 +215,7 @@ impl options: Options, hasher: &HashStrategy, env: ResolverEnvironment, + current_environment: &MarkerEnvironment, tags: Option, python_requirement: &PythonRequirement, conflicts: Conflicts, @@ -234,6 +244,7 @@ impl hasher: hasher.clone(), locations: locations.clone(), env, + current_environment: current_environment.clone(), tags, python_requirement: python_requirement.clone(), conflicts, @@ -354,6 +365,8 @@ impl ResolverState ResolverState ResolverState ResolverState, visited: &FxHashSet, ) -> ResolveError { err = NoSolutionError::collapse_local_version_segments(NoSolutionError::collapse_proxies( @@ -2556,10 +2572,27 @@ impl ResolverState= exclude_newer.timestamp_millis() + }) + }) { + continue; + } + } + + package_versions.insert(version.clone()); + } } // Track the indexes in which the package is available. @@ -2589,6 +2622,7 @@ impl ResolverState; pub type WheelMetadataResult = Result; diff --git a/crates/uv-resolver/src/resolver/reporter.rs b/crates/uv-resolver/src/resolver/reporter.rs index f2bf0f006..f2eeea3fa 100644 --- a/crates/uv-resolver/src/resolver/reporter.rs +++ b/crates/uv-resolver/src/resolver/reporter.rs @@ -1,9 +1,8 @@ use std::sync::Arc; -use url::Url; - use uv_distribution_types::{BuildableSource, VersionOrUrlRef}; use uv_normalize::PackageName; +use uv_redacted::DisplaySafeUrl; pub type BuildId = usize; @@ -31,10 +30,10 @@ pub trait Reporter: Send + Sync { fn on_download_complete(&self, name: &PackageName, id: usize); /// Callback to invoke when a repository checkout begins. - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize; + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize; /// Callback to invoke when a repository checkout completes. - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize); + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize); } impl dyn Reporter { @@ -62,11 +61,11 @@ impl uv_distribution::Reporter for Facade { self.reporter.on_build_complete(source, id); } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { self.reporter.on_checkout_complete(url, rev, id); } diff --git a/crates/uv-resolver/src/resolver/system.rs b/crates/uv-resolver/src/resolver/system.rs index 49c23952b..a815697da 100644 --- a/crates/uv-resolver/src/resolver/system.rs +++ b/crates/uv-resolver/src/resolver/system.rs @@ -1,10 +1,10 @@ use std::str::FromStr; use pubgrub::Ranges; -use url::Url; use uv_normalize::PackageName; use uv_pep440::Version; +use uv_redacted::DisplaySafeUrl; use uv_torch::TorchBackend; use crate::pubgrub::{PubGrubDependency, PubGrubPackage, PubGrubPackageInner}; @@ -21,13 +21,19 @@ impl SystemDependency { /// Extract a [`SystemDependency`] from an index URL. /// /// For example, given `https://download.pytorch.org/whl/cu124`, returns CUDA 12.4. - pub(super) fn from_index(index: &Url) -> Option { + pub(super) fn from_index(index: &DisplaySafeUrl) -> Option { let backend = TorchBackend::from_index(index)?; - let cuda_version = backend.cuda_version()?; - Some(Self { - name: PackageName::from_str("cuda").unwrap(), - version: cuda_version, - }) + if let Some(cuda_version) = backend.cuda_version() { + Some(Self { + name: PackageName::from_str("cuda").unwrap(), + version: cuda_version, + }) + } else { + backend.rocm_version().map(|rocm_version| Self { + name: PackageName::from_str("rocm").unwrap(), + version: rocm_version, + }) + } } } @@ -51,22 +57,21 @@ impl From for PubGrubDependency { mod tests { use std::str::FromStr; - use url::Url; - use uv_normalize::PackageName; use uv_pep440::Version; + use uv_redacted::DisplaySafeUrl; use crate::resolver::system::SystemDependency; #[test] fn pypi() { - let url = Url::parse("https://pypi.org/simple").unwrap(); + let url = DisplaySafeUrl::parse("https://pypi.org/simple").unwrap(); assert_eq!(SystemDependency::from_index(&url), None); } #[test] fn pytorch_cuda_12_4() { - let url = Url::parse("https://download.pytorch.org/whl/cu124").unwrap(); + let url = DisplaySafeUrl::parse("https://download.pytorch.org/whl/cu124").unwrap(); assert_eq!( SystemDependency::from_index(&url), Some(SystemDependency { @@ -78,7 +83,13 @@ mod tests { #[test] fn pytorch_cpu() { - let url = Url::parse("https://download.pytorch.org/whl/cpu").unwrap(); + let url = DisplaySafeUrl::parse("https://download.pytorch.org/whl/cpu").unwrap(); + assert_eq!(SystemDependency::from_index(&url), None); + } + + #[test] + fn pytorch_xpu() { + let url = DisplaySafeUrl::parse("https://download.pytorch.org/whl/xpu").unwrap(); assert_eq!(SystemDependency::from_index(&url), None); } } diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs index a41f33371..73d190b4a 100644 --- a/crates/uv-resolver/src/resolver/urls.rs +++ b/crates/uv-resolver/src/resolver/urls.rs @@ -4,7 +4,6 @@ use same_file::is_same_file; use tracing::debug; use uv_cache_key::CanonicalUrl; -use uv_distribution_types::Verbatim; use uv_git::GitResolver; use uv_normalize::PackageName; use uv_pep508::{MarkerTree, VerbatimUrl}; @@ -170,8 +169,8 @@ impl Urls { let [allowed_url] = matching_urls.as_slice() else { let mut conflicting_urls: Vec<_> = matching_urls .into_iter() - .map(|parsed_url| parsed_url.verbatim.verbatim().to_string()) - .chain(std::iter::once(verbatim_url.verbatim().to_string())) + .map(|parsed_url| parsed_url.parsed_url.clone()) + .chain(std::iter::once(parsed_url.clone())) .collect(); conflicting_urls.sort(); return Err(ResolveError::ConflictingUrls { diff --git a/crates/uv-resolver/src/version_map.rs b/crates/uv-resolver/src/version_map.rs index 44e70e73b..63132ad0d 100644 --- a/crates/uv-resolver/src/version_map.rs +++ b/crates/uv-resolver/src/version_map.rs @@ -11,7 +11,8 @@ use uv_configuration::BuildOptions; use uv_distribution_filename::{DistFilename, WheelFilename}; use uv_distribution_types::{ HashComparison, IncompatibleSource, IncompatibleWheel, IndexUrl, PrioritizedDist, - RegistryBuiltWheel, RegistrySourceDist, SourceDistCompatibility, WheelCompatibility, + RegistryBuiltWheel, RegistrySourceDist, RequiresPython, SourceDistCompatibility, + WheelCompatibility, }; use uv_normalize::PackageName; use uv_pep440::Version; @@ -21,7 +22,7 @@ use uv_types::HashStrategy; use uv_warnings::warn_user_once; use crate::flat_index::FlatDistributions; -use crate::{ExcludeNewer, RequiresPython, yanks::AllowedYanks}; +use crate::{ExcludeNewer, yanks::AllowedYanks}; /// A map from versions to distributions. #[derive(Debug)] @@ -344,7 +345,6 @@ struct VersionMapEager { /// avoiding another conversion step into a fully filled out `VersionMap` can /// provide substantial savings in some cases. #[derive(Debug)] -#[allow(clippy::struct_excessive_bools)] struct VersionMapLazy { /// A map from version to possibly-initialized distribution. map: BTreeMap, diff --git a/crates/uv-scripts/Cargo.toml b/crates/uv-scripts/Cargo.toml index 32a6b68b9..124eb1fea 100644 --- a/crates/uv-scripts/Cargo.toml +++ b/crates/uv-scripts/Cargo.toml @@ -14,12 +14,15 @@ workspace = true uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-settings = { workspace = true } +uv-warnings = { workspace = true } uv-workspace = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } indoc = { workspace = true } memchr = { workspace = true } +regex = { workspace = true } serde = { workspace = true, features = ["derive"] } thiserror = { workspace = true } toml = { workspace = true } diff --git a/crates/uv-scripts/src/lib.rs b/crates/uv-scripts/src/lib.rs index 5f169f126..b80cdc219 100644 --- a/crates/uv-scripts/src/lib.rs +++ b/crates/uv-scripts/src/lib.rs @@ -12,7 +12,9 @@ use url::Url; use uv_pep440::VersionSpecifiers; use uv_pep508::PackageName; use uv_pypi_types::VerbatimParsedUrl; +use uv_redacted::DisplaySafeUrl; use uv_settings::{GlobalOptions, ResolverInstallerOptions}; +use uv_warnings::warn_user; use uv_workspace::pyproject::Sources; static FINDER: LazyLock = LazyLock::new(|| Finder::new(b"# /// script")); @@ -25,7 +27,7 @@ pub enum Pep723Item { /// A PEP 723 script provided via `stdin`. Stdin(Pep723Metadata), /// A PEP 723 script provided via a remote URL. - Remote(Pep723Metadata, Url), + Remote(Pep723Metadata, DisplaySafeUrl), } impl Pep723Item { @@ -237,11 +239,25 @@ impl Pep723Script { let metadata = serialize_metadata(&default_metadata); let script = if let Some(existing_contents) = existing_contents { + let (mut shebang, contents) = extract_shebang(&existing_contents)?; + if !shebang.is_empty() { + shebang.push_str("\n#\n"); + // If the shebang doesn't contain `uv`, it's probably something like + // `#! /usr/bin/env python`, which isn't going to respect the inline metadata. + // Issue a warning for users who might not know that. + // TODO: There are a lot of mistakes we could consider detecting here, like + // `uv run` without `--script` when the file doesn't end in `.py`. + if !regex::Regex::new(r"\buv\b").unwrap().is_match(&shebang) { + warn_user!( + "If you execute {} directly, it might ignore its inline metadata.\nConsider replacing its shebang with: {}", + file.to_string_lossy().cyan(), + "#!/usr/bin/env -S uv run --script".cyan(), + ); + } + } indoc::formatdoc! {r" - {metadata} - {content} - ", - content = String::from_utf8(existing_contents).map_err(|err| Pep723Error::Utf8(err.utf8_error()))?} + {shebang}{metadata} + {contents}" } } else { indoc::formatdoc! {r#" {metadata} diff --git a/crates/uv-settings/Cargo.toml b/crates/uv-settings/Cargo.toml index fd4c3c739..1cc376eea 100644 --- a/crates/uv-settings/Cargo.toml +++ b/crates/uv-settings/Cargo.toml @@ -28,10 +28,12 @@ uv-options-metadata = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true, features = ["schemars", "clap"] } +uv-redacted = { workspace = true } uv-resolver = { workspace = true, features = ["schemars", "clap"] } uv-static = { workspace = true } uv-torch = { workspace = true, features = ["schemars", "clap"] } uv-warnings = { workspace = true } +uv-workspace = { workspace = true, features = ["schemars", "clap"] } clap = { workspace = true } fs-err = { workspace = true } diff --git a/crates/uv-settings/src/combine.rs b/crates/uv-settings/src/combine.rs index 3469cedce..8edbd2a05 100644 --- a/crates/uv-settings/src/combine.rs +++ b/crates/uv-settings/src/combine.rs @@ -11,8 +11,10 @@ use uv_distribution_types::{Index, IndexUrl, PipExtraIndex, PipFindLinks, PipInd use uv_install_wheel::LinkMode; use uv_pypi_types::{SchemaConflicts, SupportedEnvironments}; use uv_python::{PythonDownloads, PythonPreference, PythonVersion}; +use uv_redacted::DisplaySafeUrl; use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode}; use uv_torch::TorchMode; +use uv_workspace::pyproject_mut::AddBoundsKind; use crate::{FilesystemOptions, Options, PipOptions}; @@ -73,6 +75,7 @@ macro_rules! impl_combine_or { }; } +impl_combine_or!(AddBoundsKind); impl_combine_or!(AnnotationStyle); impl_combine_or!(ExcludeNewer); impl_combine_or!(ExportFormat); @@ -82,6 +85,7 @@ impl_combine_or!(IndexStrategy); impl_combine_or!(IndexUrl); impl_combine_or!(KeyringProviderType); impl_combine_or!(LinkMode); +impl_combine_or!(DisplaySafeUrl); impl_combine_or!(NonZeroUsize); impl_combine_or!(PathBuf); impl_combine_or!(PipExtraIndex); diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index 58632511b..d80ccce2f 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -1,7 +1,6 @@ use std::{fmt::Debug, num::NonZeroUsize, path::Path, path::PathBuf}; use serde::{Deserialize, Serialize}; -use url::Url; use uv_cache_info::CacheKey; use uv_configuration::{ @@ -17,9 +16,11 @@ use uv_normalize::{ExtraName, PackageName, PipGroupName}; use uv_pep508::Requirement; use uv_pypi_types::{SupportedEnvironments, VerbatimParsedUrl}; use uv_python::{PythonDownloads, PythonPreference, PythonVersion}; +use uv_redacted::DisplaySafeUrl; use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode}; use uv_static::EnvVars; use uv_torch::TorchMode; +use uv_workspace::pyproject_mut::AddBoundsKind; /// A `pyproject.toml` with an (optional) `[tool.uv]` section. #[allow(dead_code)] @@ -40,6 +41,7 @@ pub(crate) struct Tools { #[derive(Debug, Clone, Default, Deserialize, CombineOptions, OptionsMetadata)] #[serde(from = "OptionsWire", rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "schemars", schemars(!from))] pub struct Options { #[serde(flatten)] pub globals: GlobalOptions, @@ -53,6 +55,9 @@ pub struct Options { #[serde(flatten)] pub publish: PublishOptions, + #[serde(flatten)] + pub add: AddOptions, + #[option_group] pub pip: Option, @@ -136,6 +141,9 @@ pub struct Options { #[cfg_attr(feature = "schemars", schemars(skip))] pub default_groups: Option, + #[cfg_attr(feature = "schemars", schemars(skip))] + pub dependency_groups: Option, + #[cfg_attr(feature = "schemars", schemars(skip))] pub managed: Option, @@ -603,16 +611,16 @@ pub struct ResolverInstallerOptions { "# )] pub no_build_isolation_package: Option>, - /// Limit candidate packages to those that were uploaded prior to the given date. + /// Limit candidate packages to those that were uploaded prior to a given point in time. /// - /// Accepts both [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) timestamps (e.g., - /// `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`) in your - /// system's configured time zone. + /// Accepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g., + /// `2006-12-02T02:07:43Z`). A full timestamp is required to ensure that the resolver will + /// behave consistently across timezones. #[option( default = "None", value_type = "str", example = r#" - exclude-newer = "2006-12-02" + exclude-newer = "2006-12-02T02:07:43Z" "# )] pub exclude_newer: Option, @@ -1491,7 +1499,7 @@ pub struct PipOptions { /// Hash-checking mode introduces a number of additional constraints: /// /// - Git dependencies are not supported. - /// - Editable installs are not supported. + /// - Editable installations are not supported. /// - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or /// source archive (`.zip`, `.tar.gz`), as opposed to a directory. #[option( @@ -1837,10 +1845,14 @@ pub struct OptionsWire { // #[serde(flatten)] // publish: PublishOptions - publish_url: Option, + publish_url: Option, trusted_publishing: Option, check_url: Option, + // #[serde(flatten)] + // add: AddOptions + add_bounds: Option, + pip: Option, cache_keys: Option>, @@ -1862,6 +1874,7 @@ pub struct OptionsWire { managed: Option, r#package: Option, default_groups: Option, + dependency_groups: Option, dev_dependencies: Option, // Build backend @@ -1926,9 +1939,11 @@ impl From for Options { workspace, sources, default_groups, + dependency_groups, dev_dependencies, managed, package, + add_bounds: bounds, // Used by the build backend build_backend, } = value; @@ -1996,10 +2011,12 @@ impl From for Options { trusted_publishing, check_url, }, + add: AddOptions { add_bounds: bounds }, workspace, sources, dev_dependencies, default_groups, + dependency_groups, managed, package, } @@ -2019,7 +2036,7 @@ pub struct PublishOptions { publish-url = "https://test.pypi.org/legacy/" "# )] - pub publish_url: Option, + pub publish_url: Option, /// Configure trusted publishing via GitHub Actions. /// @@ -2057,3 +2074,28 @@ pub struct PublishOptions { )] pub check_url: Option, } + +#[derive(Debug, Clone, Default, PartialEq, Eq, Deserialize, CombineOptions, OptionsMetadata)] +#[serde(rename_all = "kebab-case")] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct AddOptions { + /// The default version specifier when adding a dependency. + /// + /// When adding a dependency to the project, if no constraint or URL is provided, a constraint + /// is added based on the latest compatible version of the package. By default, a lower bound + /// constraint is used, e.g., `>=1.2.3`. + /// + /// When `--frozen` is provided, no resolution is performed, and dependencies are always added + /// without constraints. + /// + /// This option is in preview and may change in any future release. + #[option( + default = "\"lower\"", + value_type = "str", + example = r#" + add-bounds = "major" + "#, + possible_values = true + )] + pub add_bounds: Option, +} diff --git a/crates/uv-small-str/src/lib.rs b/crates/uv-small-str/src/lib.rs index 7395c090a..1524f1b99 100644 --- a/crates/uv-small-str/src/lib.rs +++ b/crates/uv-small-str/src/lib.rs @@ -147,15 +147,15 @@ impl PartialOrd for rkyv::string::ArchivedString { /// An [`schemars::JsonSchema`] implementation for [`SmallString`]. #[cfg(feature = "schemars")] impl schemars::JsonSchema for SmallString { - fn is_referenceable() -> bool { - String::is_referenceable() + fn inline_schema() -> bool { + true } - fn schema_name() -> String { + fn schema_name() -> Cow<'static, str> { String::schema_name() } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - String::json_schema(_gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + String::json_schema(generator) } } diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index 58191fe64..5b91fccea 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -359,10 +359,6 @@ impl EnvVars { #[attr_hidden] pub const UV_INTERNAL__SHOW_DERIVATION_TREE: &'static str = "UV_INTERNAL__SHOW_DERIVATION_TREE"; - /// Used to set a temporary directory for some tests. - #[attr_hidden] - pub const UV_INTERNAL__TEST_DIR: &'static str = "UV_INTERNAL__TEST_DIR"; - /// Path to system-level configuration directory on Unix systems. pub const XDG_CONFIG_DIRS: &'static str = "XDG_CONFIG_DIRS"; @@ -406,6 +402,9 @@ impl EnvVars { /// Timeout (in seconds) for HTTP requests. (default: 30 s) pub const UV_HTTP_TIMEOUT: &'static str = "UV_HTTP_TIMEOUT"; + /// The number of retries for HTTP requests. (default: 3) + pub const UV_HTTP_RETRIES: &'static str = "UV_HTTP_RETRIES"; + /// Timeout (in seconds) for HTTP requests. Equivalent to `UV_HTTP_TIMEOUT`. pub const UV_REQUEST_TIMEOUT: &'static str = "UV_REQUEST_TIMEOUT"; @@ -663,10 +662,17 @@ impl EnvVars { #[attr_hidden] pub const UV_TEST_VENDOR_LINKS_URL: &'static str = "UV_TEST_VENDOR_LINKS_URL"; + /// Used to disable delay for HTTP retries in tests. + pub const UV_TEST_NO_HTTP_RETRY_DELAY: &'static str = "UV_TEST_NO_HTTP_RETRY_DELAY"; + /// Used to set an index url for tests. #[attr_hidden] pub const UV_TEST_INDEX_URL: &'static str = "UV_TEST_INDEX_URL"; + /// Used to set the GitHub fast-path url for tests. + #[attr_hidden] + pub const UV_GITHUB_FAST_PATH_URL: &'static str = "UV_GITHUB_FAST_PATH_URL"; + /// Hide progress messages with non-deterministic order in tests. #[attr_hidden] pub const UV_TEST_NO_CLI_PROGRESS: &'static str = "UV_TEST_NO_CLI_PROGRESS"; @@ -718,13 +724,20 @@ impl EnvVars { /// This is a quasi-standard variable, described, e.g., in `ncurses(3x)`. pub const COLUMNS: &'static str = "COLUMNS"; - /// The CUDA driver version to assume when inferring the PyTorch backend. + /// The CUDA driver version to assume when inferring the PyTorch backend (e.g., `550.144.03`). #[attr_hidden] pub const UV_CUDA_DRIVER_VERSION: &'static str = "UV_CUDA_DRIVER_VERSION"; + /// The AMD GPU architecture to assume when inferring the PyTorch backend (e.g., `gfx1100`). + #[attr_hidden] + pub const UV_AMD_GPU_ARCHITECTURE: &'static str = "UV_AMD_GPU_ARCHITECTURE"; + /// Equivalent to the `--torch-backend` command-line argument (e.g., `cpu`, `cu126`, or `auto`). pub const UV_TORCH_BACKEND: &'static str = "UV_TORCH_BACKEND"; /// Equivalent to the `--project` command-line argument. pub const UV_PROJECT: &'static str = "UV_PROJECT"; + + /// Disable GitHub-specific requests that allow uv to skip `git fetch` in some circumstances. + pub const UV_NO_GITHUB_FAST_PATH: &'static str = "UV_NO_GITHUB_FAST_PATH"; } diff --git a/crates/uv-tool/Cargo.toml b/crates/uv-tool/Cargo.toml index d01a3209d..210c17c00 100644 --- a/crates/uv-tool/Cargo.toml +++ b/crates/uv-tool/Cargo.toml @@ -17,6 +17,7 @@ workspace = true [dependencies] uv-cache = { workspace = true } +uv-configuration = { workspace = true } uv-dirs = { workspace = true } uv-distribution-types = { workspace = true } uv-fs = { workspace = true } diff --git a/crates/uv-tool/src/lib.rs b/crates/uv-tool/src/lib.rs index f85075ea6..ee80a2854 100644 --- a/crates/uv-tool/src/lib.rs +++ b/crates/uv-tool/src/lib.rs @@ -1,6 +1,7 @@ use core::fmt; use fs_err as fs; +use uv_configuration::PreviewMode; use uv_dirs::user_executable_directory; use uv_pep440::Version; use uv_pep508::{InvalidNameError, PackageName}; @@ -257,6 +258,7 @@ impl InstalledTools { &self, name: &PackageName, interpreter: Interpreter, + preview: PreviewMode, ) -> Result { let environment_path = self.tool_dir(name); @@ -286,6 +288,8 @@ impl InstalledTools { false, false, false, + false, + preview, )?; Ok(venv) diff --git a/crates/uv-tool/src/tool.rs b/crates/uv-tool/src/tool.rs index df8571c94..cce3a2f58 100644 --- a/crates/uv-tool/src/tool.rs +++ b/crates/uv-tool/src/tool.rs @@ -7,6 +7,7 @@ use toml_edit::{Array, Item, Table, Value, value}; use uv_distribution_types::Requirement; use uv_fs::{PortablePath, Simplified}; use uv_pypi_types::VerbatimParsedUrl; +use uv_python::PythonRequest; use uv_settings::ToolOptions; /// A tool entry. @@ -22,7 +23,7 @@ pub struct Tool { /// The build constraints requested by the user during installation. build_constraints: Vec, /// The Python requested by the user during installation. - python: Option, + python: Option, /// A mapping of entry point names to their metadata. entrypoints: Vec, /// The [`ToolOptions`] used to install this tool. @@ -40,7 +41,7 @@ struct ToolWire { overrides: Vec, #[serde(default)] build_constraint_dependencies: Vec, - python: Option, + python: Option, entrypoints: Vec, #[serde(default)] options: ToolOptions, @@ -164,7 +165,7 @@ impl Tool { constraints: Vec, overrides: Vec, build_constraints: Vec, - python: Option, + python: Option, entrypoints: impl Iterator, options: ToolOptions, ) -> Self { @@ -280,7 +281,13 @@ impl Tool { } if let Some(ref python) = self.python { - table.insert("python", value(python)); + table.insert( + "python", + value(serde::Serialize::serialize( + &python, + toml_edit::ser::ValueSerializer::new(), + )?), + ); } table.insert("entrypoints", { @@ -327,7 +334,7 @@ impl Tool { &self.build_constraints } - pub fn python(&self) -> &Option { + pub fn python(&self) -> &Option { &self.python } diff --git a/crates/uv-torch/src/accelerator.rs b/crates/uv-torch/src/accelerator.rs index 8ec55ac2a..696adc9a1 100644 --- a/crates/uv-torch/src/accelerator.rs +++ b/crates/uv-torch/src/accelerator.rs @@ -1,3 +1,4 @@ +use std::path::Path; use std::str::FromStr; use tracing::debug; @@ -13,30 +14,58 @@ pub enum AcceleratorError { Version(#[from] uv_pep440::VersionParseError), #[error(transparent)] Utf8(#[from] std::string::FromUtf8Error), + #[error(transparent)] + ParseInt(#[from] std::num::ParseIntError), + #[error("Unknown AMD GPU architecture: {0}")] + UnknownAmdGpuArchitecture(String), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum Accelerator { + /// The CUDA driver version (e.g., `550.144.03`). + /// + /// This is in contrast to the CUDA toolkit version (e.g., `12.8.0`). Cuda { driver_version: Version }, + /// The AMD GPU architecture (e.g., `gfx906`). + /// + /// This is in contrast to the user-space ROCm version (e.g., `6.4.0-47`) or the kernel-mode + /// driver version (e.g., `6.12.12`). + Amd { + gpu_architecture: AmdGpuArchitecture, + }, + /// The Intel GPU (XPU). + /// + /// Currently, Intel GPUs do not depend on a driver or toolkit version at this level. + Xpu, } impl std::fmt::Display for Accelerator { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::Cuda { driver_version } => write!(f, "CUDA {driver_version}"), + Self::Amd { gpu_architecture } => write!(f, "AMD {gpu_architecture}"), + Self::Xpu => write!(f, "Intel GPU (XPU)"), } } } impl Accelerator { - /// Detect the CUDA driver version from the system. + /// Detect the GPU driver and/or architecture version from the system. /// /// Query, in order: /// 1. The `UV_CUDA_DRIVER_VERSION` environment variable. - /// 2. `/sys/module/nvidia/version`, which contains the driver version (e.g., `550.144.03`). - /// 3. `/proc/driver/nvidia/version`, which contains the driver version among other information. - /// 4. `nvidia-smi --query-gpu=driver_version --format=csv,noheader`. + /// 2. The `UV_AMD_GPU_ARCHITECTURE` environment variable. + /// 3. `/sys/module/nvidia/version`, which contains the driver version (e.g., `550.144.03`). + /// 4. `/proc/driver/nvidia/version`, which contains the driver version among other information. + /// 5. `nvidia-smi --query-gpu=driver_version --format=csv,noheader`. + /// 6. `rocm_agent_enumerator`, which lists the AMD GPU architectures. + /// 7. `/sys/bus/pci/devices`, filtering for the Intel GPU via PCI. pub fn detect() -> Result, AcceleratorError> { + // Constants used for PCI device detection. + const PCI_BASE_CLASS_MASK: u32 = 0x00ff_0000; + const PCI_BASE_CLASS_DISPLAY: u32 = 0x0003_0000; + const PCI_VENDOR_ID_INTEL: u32 = 0x8086; + // Read from `UV_CUDA_DRIVER_VERSION`. if let Ok(driver_version) = std::env::var(EnvVars::UV_CUDA_DRIVER_VERSION) { let driver_version = Version::from_str(&driver_version)?; @@ -44,6 +73,15 @@ impl Accelerator { return Ok(Some(Self::Cuda { driver_version })); } + // Read from `UV_AMD_GPU_ARCHITECTURE`. + if let Ok(gpu_architecture) = std::env::var(EnvVars::UV_AMD_GPU_ARCHITECTURE) { + let gpu_architecture = AmdGpuArchitecture::from_str(&gpu_architecture)?; + debug!( + "Detected AMD GPU architecture from `UV_AMD_GPU_ARCHITECTURE`: {gpu_architecture}" + ); + return Ok(Some(Self::Amd { gpu_architecture })); + } + // Read from `/sys/module/nvidia/version`. match fs_err::read_to_string("/sys/module/nvidia/version") { Ok(content) => { @@ -100,7 +138,57 @@ impl Accelerator { ); } - debug!("Failed to detect CUDA driver version"); + // Query `rocm_agent_enumerator` to detect the AMD GPU architecture. + // + // See: https://rocm.docs.amd.com/projects/rocminfo/en/latest/how-to/use-rocm-agent-enumerator.html + if let Ok(output) = std::process::Command::new("rocm_agent_enumerator").output() { + if output.status.success() { + let stdout = String::from_utf8(output.stdout)?; + if let Some(gpu_architecture) = stdout + .lines() + .map(str::trim) + .filter_map(|line| AmdGpuArchitecture::from_str(line).ok()) + .min() + { + debug!( + "Detected AMD GPU architecture from `rocm_agent_enumerator`: {gpu_architecture}" + ); + return Ok(Some(Self::Amd { gpu_architecture })); + } + } else { + debug!( + "Failed to query AMD GPU architecture with `rocm_agent_enumerator` with status `{}`: {}", + output.status, + String::from_utf8_lossy(&output.stderr) + ); + } + } + + // Read from `/sys/bus/pci/devices` to filter for Intel GPU via PCI. + match fs_err::read_dir("/sys/bus/pci/devices") { + Ok(entries) => { + for entry in entries.flatten() { + match parse_pci_device_ids(&entry.path()) { + Ok((class, vendor)) => { + if (class & PCI_BASE_CLASS_MASK) == PCI_BASE_CLASS_DISPLAY + && vendor == PCI_VENDOR_ID_INTEL + { + debug!("Detected Intel GPU from PCI: vendor=0x{:04x}", vendor); + return Ok(Some(Self::Xpu)); + } + } + Err(e) => { + debug!("Failed to parse PCI device IDs: {e}"); + } + } + } + } + Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} + Err(e) => return Err(e.into()), + } + + debug!("Failed to detect GPU driver version"); + Ok(None) } } @@ -129,6 +217,79 @@ fn parse_proc_driver_nvidia_version(content: &str) -> Result, Ac Ok(Some(driver_version)) } +/// Reads and parses the PCI class and vendor ID from a given device path under `/sys/bus/pci/devices`. +fn parse_pci_device_ids(device_path: &Path) -> Result<(u32, u32), AcceleratorError> { + // Parse, e.g.: + // ```text + // - `class`: a hexadecimal string such as `0x030000` + // - `vendor`: a hexadecimal string such as `0x8086` + // ``` + let class_content = fs_err::read_to_string(device_path.join("class"))?; + let pci_class = u32::from_str_radix(class_content.trim().trim_start_matches("0x"), 16)?; + + let vendor_content = fs_err::read_to_string(device_path.join("vendor"))?; + let pci_vendor = u32::from_str_radix(vendor_content.trim().trim_start_matches("0x"), 16)?; + + Ok((pci_class, pci_vendor)) +} + +/// A GPU architecture for AMD GPUs. +/// +/// See: +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] +pub enum AmdGpuArchitecture { + Gfx900, + Gfx906, + Gfx908, + Gfx90a, + Gfx942, + Gfx1030, + Gfx1100, + Gfx1101, + Gfx1102, + Gfx1200, + Gfx1201, +} + +impl FromStr for AmdGpuArchitecture { + type Err = AcceleratorError; + + fn from_str(s: &str) -> Result { + match s { + "gfx900" => Ok(Self::Gfx900), + "gfx906" => Ok(Self::Gfx906), + "gfx908" => Ok(Self::Gfx908), + "gfx90a" => Ok(Self::Gfx90a), + "gfx942" => Ok(Self::Gfx942), + "gfx1030" => Ok(Self::Gfx1030), + "gfx1100" => Ok(Self::Gfx1100), + "gfx1101" => Ok(Self::Gfx1101), + "gfx1102" => Ok(Self::Gfx1102), + "gfx1200" => Ok(Self::Gfx1200), + "gfx1201" => Ok(Self::Gfx1201), + _ => Err(AcceleratorError::UnknownAmdGpuArchitecture(s.to_string())), + } + } +} + +impl std::fmt::Display for AmdGpuArchitecture { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Gfx900 => write!(f, "gfx900"), + Self::Gfx906 => write!(f, "gfx906"), + Self::Gfx908 => write!(f, "gfx908"), + Self::Gfx90a => write!(f, "gfx90a"), + Self::Gfx942 => write!(f, "gfx942"), + Self::Gfx1030 => write!(f, "gfx1030"), + Self::Gfx1100 => write!(f, "gfx1100"), + Self::Gfx1101 => write!(f, "gfx1101"), + Self::Gfx1102 => write!(f, "gfx1102"), + Self::Gfx1200 => write!(f, "gfx1200"), + Self::Gfx1201 => write!(f, "gfx1201"), + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/uv-torch/src/backend.rs b/crates/uv-torch/src/backend.rs index d3e5afc55..5ad71b385 100644 --- a/crates/uv-torch/src/backend.rs +++ b/crates/uv-torch/src/backend.rs @@ -35,7 +35,6 @@ //! OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE //! OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //! ``` -//! use std::str::FromStr; use std::sync::LazyLock; @@ -48,7 +47,7 @@ use uv_normalize::PackageName; use uv_pep440::Version; use uv_platform_tags::Os; -use crate::{Accelerator, AcceleratorError}; +use crate::{Accelerator, AcceleratorError, AmdGpuArchitecture}; /// The strategy to use when determining the appropriate PyTorch index. #[derive(Debug, Copy, Clone, Eq, PartialEq, serde::Deserialize, serde::Serialize)] @@ -108,13 +107,86 @@ pub enum TorchMode { Cu90, /// Use the PyTorch index for CUDA 8.0. Cu80, + /// Use the PyTorch index for ROCm 6.3. + #[serde(rename = "rocm6.3")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.3"))] + Rocm63, + /// Use the PyTorch index for ROCm 6.2.4. + #[serde(rename = "rocm6.2.4")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.2.4"))] + Rocm624, + /// Use the PyTorch index for ROCm 6.2. + #[serde(rename = "rocm6.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.2"))] + Rocm62, + /// Use the PyTorch index for ROCm 6.1. + #[serde(rename = "rocm6.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.1"))] + Rocm61, + /// Use the PyTorch index for ROCm 6.0. + #[serde(rename = "rocm6.0")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.0"))] + Rocm60, + /// Use the PyTorch index for ROCm 5.7. + #[serde(rename = "rocm5.7")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.7"))] + Rocm57, + /// Use the PyTorch index for ROCm 5.6. + #[serde(rename = "rocm5.6")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.6"))] + Rocm56, + /// Use the PyTorch index for ROCm 5.5. + #[serde(rename = "rocm5.5")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.5"))] + Rocm55, + /// Use the PyTorch index for ROCm 5.4.2. + #[serde(rename = "rocm5.4.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.4.2"))] + Rocm542, + /// Use the PyTorch index for ROCm 5.4. + #[serde(rename = "rocm5.4")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.4"))] + Rocm54, + /// Use the PyTorch index for ROCm 5.3. + #[serde(rename = "rocm5.3")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.3"))] + Rocm53, + /// Use the PyTorch index for ROCm 5.2. + #[serde(rename = "rocm5.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.2"))] + Rocm52, + /// Use the PyTorch index for ROCm 5.1.1. + #[serde(rename = "rocm5.1.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.1.1"))] + Rocm511, + /// Use the PyTorch index for ROCm 4.2. + #[serde(rename = "rocm4.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.2"))] + Rocm42, + /// Use the PyTorch index for ROCm 4.1. + #[serde(rename = "rocm4.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.1"))] + Rocm41, + /// Use the PyTorch index for ROCm 4.0.1. + #[serde(rename = "rocm4.0.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.0.1"))] + Rocm401, + /// Use the PyTorch index for Intel XPU. + Xpu, } /// The strategy to use when determining the appropriate PyTorch index. #[derive(Debug, Clone, Eq, PartialEq)] pub enum TorchStrategy { - /// Select the appropriate PyTorch index based on the operating system and CUDA driver version. - Auto { os: Os, driver_version: Version }, + /// Select the appropriate PyTorch index based on the operating system and CUDA driver version (e.g., `550.144.03`). + Cuda { os: Os, driver_version: Version }, + /// Select the appropriate PyTorch index based on the operating system and AMD GPU architecture (e.g., `gfx1100`). + Amd { + os: Os, + gpu_architecture: AmdGpuArchitecture, + }, + /// Select the appropriate PyTorch index based on the operating system and Intel GPU presence. + Xpu { os: Os }, /// Use the specified PyTorch index. Backend(TorchBackend), } @@ -123,16 +195,18 @@ impl TorchStrategy { /// Determine the [`TorchStrategy`] from the given [`TorchMode`], [`Os`], and [`Accelerator`]. pub fn from_mode(mode: TorchMode, os: &Os) -> Result { match mode { - TorchMode::Auto => { - if let Some(Accelerator::Cuda { driver_version }) = Accelerator::detect()? { - Ok(Self::Auto { - os: os.clone(), - driver_version: driver_version.clone(), - }) - } else { - Ok(Self::Backend(TorchBackend::Cpu)) - } - } + TorchMode::Auto => match Accelerator::detect()? { + Some(Accelerator::Cuda { driver_version }) => Ok(Self::Cuda { + os: os.clone(), + driver_version: driver_version.clone(), + }), + Some(Accelerator::Amd { gpu_architecture }) => Ok(Self::Amd { + os: os.clone(), + gpu_architecture, + }), + Some(Accelerator::Xpu) => Ok(Self::Xpu { os: os.clone() }), + None => Ok(Self::Backend(TorchBackend::Cpu)), + }, TorchMode::Cpu => Ok(Self::Backend(TorchBackend::Cpu)), TorchMode::Cu128 => Ok(Self::Backend(TorchBackend::Cu128)), TorchMode::Cu126 => Ok(Self::Backend(TorchBackend::Cu126)), @@ -158,6 +232,23 @@ impl TorchStrategy { TorchMode::Cu91 => Ok(Self::Backend(TorchBackend::Cu91)), TorchMode::Cu90 => Ok(Self::Backend(TorchBackend::Cu90)), TorchMode::Cu80 => Ok(Self::Backend(TorchBackend::Cu80)), + TorchMode::Rocm63 => Ok(Self::Backend(TorchBackend::Rocm63)), + TorchMode::Rocm624 => Ok(Self::Backend(TorchBackend::Rocm624)), + TorchMode::Rocm62 => Ok(Self::Backend(TorchBackend::Rocm62)), + TorchMode::Rocm61 => Ok(Self::Backend(TorchBackend::Rocm61)), + TorchMode::Rocm60 => Ok(Self::Backend(TorchBackend::Rocm60)), + TorchMode::Rocm57 => Ok(Self::Backend(TorchBackend::Rocm57)), + TorchMode::Rocm56 => Ok(Self::Backend(TorchBackend::Rocm56)), + TorchMode::Rocm55 => Ok(Self::Backend(TorchBackend::Rocm55)), + TorchMode::Rocm542 => Ok(Self::Backend(TorchBackend::Rocm542)), + TorchMode::Rocm54 => Ok(Self::Backend(TorchBackend::Rocm54)), + TorchMode::Rocm53 => Ok(Self::Backend(TorchBackend::Rocm53)), + TorchMode::Rocm52 => Ok(Self::Backend(TorchBackend::Rocm52)), + TorchMode::Rocm511 => Ok(Self::Backend(TorchBackend::Rocm511)), + TorchMode::Rocm42 => Ok(Self::Backend(TorchBackend::Rocm42)), + TorchMode::Rocm41 => Ok(Self::Backend(TorchBackend::Rocm41)), + TorchMode::Rocm401 => Ok(Self::Backend(TorchBackend::Rocm401)), + TorchMode::Xpu => Ok(Self::Backend(TorchBackend::Xpu)), } } @@ -177,31 +268,35 @@ impl TorchStrategy { | "torchtext" | "torchvision" | "pytorch-triton" + | "pytorch-triton-rocm" + | "pytorch-triton-xpu" ) } /// Return the appropriate index URLs for the given [`TorchStrategy`]. pub fn index_urls(&self) -> impl Iterator { match self { - TorchStrategy::Auto { os, driver_version } => { + TorchStrategy::Cuda { os, driver_version } => { // If this is a GPU-enabled package, and CUDA drivers are installed, use PyTorch's CUDA // indexes. // // See: https://github.com/pmeier/light-the-torch/blob/33397cbe45d07b51ad8ee76b004571a4c236e37f/light_the_torch/_patch.py#L36-L49 match os { - Os::Manylinux { .. } | Os::Musllinux { .. } => Either::Left(Either::Left( - LINUX_DRIVERS - .iter() - .filter_map(move |(backend, version)| { - if driver_version >= version { - Some(backend.index_url()) - } else { - None - } - }) - .chain(std::iter::once(TorchBackend::Cpu.index_url())), - )), - Os::Windows => Either::Left(Either::Right( + Os::Manylinux { .. } | Os::Musllinux { .. } => { + Either::Left(Either::Left(Either::Left( + LINUX_CUDA_DRIVERS + .iter() + .filter_map(move |(backend, version)| { + if driver_version >= version { + Some(backend.index_url()) + } else { + None + } + }) + .chain(std::iter::once(TorchBackend::Cpu.index_url())), + ))) + } + Os::Windows => Either::Left(Either::Left(Either::Right( WINDOWS_CUDA_VERSIONS .iter() .filter_map(move |(backend, version)| { @@ -212,7 +307,7 @@ impl TorchStrategy { } }) .chain(std::iter::once(TorchBackend::Cpu.index_url())), - )), + ))), Os::Macos { .. } | Os::FreeBsd { .. } | Os::NetBsd { .. } @@ -220,12 +315,62 @@ impl TorchStrategy { | Os::Dragonfly { .. } | Os::Illumos { .. } | Os::Haiku { .. } - | Os::Android { .. } => { - Either::Right(std::iter::once(TorchBackend::Cpu.index_url())) + | Os::Android { .. } + | Os::Pyodide { .. } => { + Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url()))) } } } - TorchStrategy::Backend(backend) => Either::Right(std::iter::once(backend.index_url())), + TorchStrategy::Amd { + os, + gpu_architecture, + } => match os { + Os::Manylinux { .. } | Os::Musllinux { .. } => Either::Left(Either::Right( + LINUX_AMD_GPU_DRIVERS + .iter() + .filter_map(move |(backend, architecture)| { + if gpu_architecture == architecture { + Some(backend.index_url()) + } else { + None + } + }) + .chain(std::iter::once(TorchBackend::Cpu.index_url())), + )), + Os::Windows + | Os::Macos { .. } + | Os::FreeBsd { .. } + | Os::NetBsd { .. } + | Os::OpenBsd { .. } + | Os::Dragonfly { .. } + | Os::Illumos { .. } + | Os::Haiku { .. } + | Os::Android { .. } + | Os::Pyodide { .. } => { + Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url()))) + } + }, + TorchStrategy::Xpu { os } => match os { + Os::Manylinux { .. } => Either::Right(Either::Right(Either::Left( + std::iter::once(TorchBackend::Xpu.index_url()), + ))), + Os::Windows + | Os::Musllinux { .. } + | Os::Macos { .. } + | Os::FreeBsd { .. } + | Os::NetBsd { .. } + | Os::OpenBsd { .. } + | Os::Dragonfly { .. } + | Os::Illumos { .. } + | Os::Haiku { .. } + | Os::Android { .. } + | Os::Pyodide { .. } => { + Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url()))) + } + }, + TorchStrategy::Backend(backend) => Either::Right(Either::Right(Either::Right( + std::iter::once(backend.index_url()), + ))), } } } @@ -258,6 +403,23 @@ pub enum TorchBackend { Cu91, Cu90, Cu80, + Rocm63, + Rocm624, + Rocm62, + Rocm61, + Rocm60, + Rocm57, + Rocm56, + Rocm55, + Rocm542, + Rocm54, + Rocm53, + Rocm52, + Rocm511, + Rocm42, + Rocm41, + Rocm401, + Xpu, } impl TorchBackend { @@ -289,6 +451,23 @@ impl TorchBackend { Self::Cu91 => &CU91_INDEX_URL, Self::Cu90 => &CU90_INDEX_URL, Self::Cu80 => &CU80_INDEX_URL, + Self::Rocm63 => &ROCM63_INDEX_URL, + Self::Rocm624 => &ROCM624_INDEX_URL, + Self::Rocm62 => &ROCM62_INDEX_URL, + Self::Rocm61 => &ROCM61_INDEX_URL, + Self::Rocm60 => &ROCM60_INDEX_URL, + Self::Rocm57 => &ROCM57_INDEX_URL, + Self::Rocm56 => &ROCM56_INDEX_URL, + Self::Rocm55 => &ROCM55_INDEX_URL, + Self::Rocm542 => &ROCM542_INDEX_URL, + Self::Rocm54 => &ROCM54_INDEX_URL, + Self::Rocm53 => &ROCM53_INDEX_URL, + Self::Rocm52 => &ROCM52_INDEX_URL, + Self::Rocm511 => &ROCM511_INDEX_URL, + Self::Rocm42 => &ROCM42_INDEX_URL, + Self::Rocm41 => &ROCM41_INDEX_URL, + Self::Rocm401 => &ROCM401_INDEX_URL, + Self::Xpu => &XPU_INDEX_URL, } } @@ -335,6 +514,71 @@ impl TorchBackend { TorchBackend::Cu91 => Some(Version::new([9, 1])), TorchBackend::Cu90 => Some(Version::new([9, 0])), TorchBackend::Cu80 => Some(Version::new([8, 0])), + TorchBackend::Rocm63 => None, + TorchBackend::Rocm624 => None, + TorchBackend::Rocm62 => None, + TorchBackend::Rocm61 => None, + TorchBackend::Rocm60 => None, + TorchBackend::Rocm57 => None, + TorchBackend::Rocm56 => None, + TorchBackend::Rocm55 => None, + TorchBackend::Rocm542 => None, + TorchBackend::Rocm54 => None, + TorchBackend::Rocm53 => None, + TorchBackend::Rocm52 => None, + TorchBackend::Rocm511 => None, + TorchBackend::Rocm42 => None, + TorchBackend::Rocm41 => None, + TorchBackend::Rocm401 => None, + TorchBackend::Xpu => None, + } + } + + /// Returns the ROCM [`Version`] for the given [`TorchBackend`]. + pub fn rocm_version(&self) -> Option { + match self { + TorchBackend::Cpu => None, + TorchBackend::Cu128 => None, + TorchBackend::Cu126 => None, + TorchBackend::Cu125 => None, + TorchBackend::Cu124 => None, + TorchBackend::Cu123 => None, + TorchBackend::Cu122 => None, + TorchBackend::Cu121 => None, + TorchBackend::Cu120 => None, + TorchBackend::Cu118 => None, + TorchBackend::Cu117 => None, + TorchBackend::Cu116 => None, + TorchBackend::Cu115 => None, + TorchBackend::Cu114 => None, + TorchBackend::Cu113 => None, + TorchBackend::Cu112 => None, + TorchBackend::Cu111 => None, + TorchBackend::Cu110 => None, + TorchBackend::Cu102 => None, + TorchBackend::Cu101 => None, + TorchBackend::Cu100 => None, + TorchBackend::Cu92 => None, + TorchBackend::Cu91 => None, + TorchBackend::Cu90 => None, + TorchBackend::Cu80 => None, + TorchBackend::Rocm63 => Some(Version::new([6, 3])), + TorchBackend::Rocm624 => Some(Version::new([6, 2, 4])), + TorchBackend::Rocm62 => Some(Version::new([6, 2])), + TorchBackend::Rocm61 => Some(Version::new([6, 1])), + TorchBackend::Rocm60 => Some(Version::new([6, 0])), + TorchBackend::Rocm57 => Some(Version::new([5, 7])), + TorchBackend::Rocm56 => Some(Version::new([5, 6])), + TorchBackend::Rocm55 => Some(Version::new([5, 5])), + TorchBackend::Rocm542 => Some(Version::new([5, 4, 2])), + TorchBackend::Rocm54 => Some(Version::new([5, 4])), + TorchBackend::Rocm53 => Some(Version::new([5, 3])), + TorchBackend::Rocm52 => Some(Version::new([5, 2])), + TorchBackend::Rocm511 => Some(Version::new([5, 1, 1])), + TorchBackend::Rocm42 => Some(Version::new([4, 2])), + TorchBackend::Rocm41 => Some(Version::new([4, 1])), + TorchBackend::Rocm401 => Some(Version::new([4, 0, 1])), + TorchBackend::Xpu => None, } } } @@ -369,6 +613,23 @@ impl FromStr for TorchBackend { "cu91" => Ok(TorchBackend::Cu91), "cu90" => Ok(TorchBackend::Cu90), "cu80" => Ok(TorchBackend::Cu80), + "rocm6.3" => Ok(TorchBackend::Rocm63), + "rocm6.2.4" => Ok(TorchBackend::Rocm624), + "rocm6.2" => Ok(TorchBackend::Rocm62), + "rocm6.1" => Ok(TorchBackend::Rocm61), + "rocm6.0" => Ok(TorchBackend::Rocm60), + "rocm5.7" => Ok(TorchBackend::Rocm57), + "rocm5.6" => Ok(TorchBackend::Rocm56), + "rocm5.5" => Ok(TorchBackend::Rocm55), + "rocm5.4.2" => Ok(TorchBackend::Rocm542), + "rocm5.4" => Ok(TorchBackend::Rocm54), + "rocm5.3" => Ok(TorchBackend::Rocm53), + "rocm5.2" => Ok(TorchBackend::Rocm52), + "rocm5.1.1" => Ok(TorchBackend::Rocm511), + "rocm4.2" => Ok(TorchBackend::Rocm42), + "rocm4.1" => Ok(TorchBackend::Rocm41), + "rocm4.0.1" => Ok(TorchBackend::Rocm401), + "xpu" => Ok(TorchBackend::Xpu), _ => Err(format!("Unknown PyTorch backend: {s}")), } } @@ -377,7 +638,7 @@ impl FromStr for TorchBackend { /// Linux CUDA driver versions and the corresponding CUDA versions. /// /// See: -static LINUX_DRIVERS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock::new(|| { +static LINUX_CUDA_DRIVERS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock::new(|| { [ // Table 2 from // https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html @@ -450,6 +711,73 @@ static WINDOWS_CUDA_VERSIONS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock ] }); +/// Linux AMD GPU architectures and the corresponding PyTorch backends. +/// +/// These were inferred by running the following snippet for each ROCm version: +/// +/// ```python +/// import torch +/// +/// print(torch.cuda.get_arch_list()) +/// ``` +/// +/// AMD also provides a compatibility matrix: ; +/// however, this list includes a broader array of GPUs than those in the matrix. +static LINUX_AMD_GPU_DRIVERS: LazyLock<[(TorchBackend, AmdGpuArchitecture); 44]> = + LazyLock::new(|| { + [ + // ROCm 6.3 + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1101), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1102), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1200), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1201), + // ROCm 6.2.4 + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1101), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1102), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1200), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1201), + // ROCm 6.2 + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx942), + // ROCm 6.1 + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1101), + // ROCm 6.0 + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx942), + ] + }); + static CPU_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cpu").unwrap()); static CU128_INDEX_URL: LazyLock = @@ -500,3 +828,37 @@ static CU90_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cu90").unwrap()); static CU80_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cu80").unwrap()); +static ROCM63_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.3").unwrap()); +static ROCM624_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.2.4").unwrap()); +static ROCM62_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.2").unwrap()); +static ROCM61_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.1").unwrap()); +static ROCM60_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.0").unwrap()); +static ROCM57_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.7").unwrap()); +static ROCM56_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.6").unwrap()); +static ROCM55_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.5").unwrap()); +static ROCM542_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.4.2").unwrap()); +static ROCM54_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.4").unwrap()); +static ROCM53_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.3").unwrap()); +static ROCM52_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.2").unwrap()); +static ROCM511_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.1.1").unwrap()); +static ROCM42_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.2").unwrap()); +static ROCM41_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.1").unwrap()); +static ROCM401_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.0.1").unwrap()); +static XPU_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/xpu").unwrap()); diff --git a/crates/uv-trampoline-builder/src/lib.rs b/crates/uv-trampoline-builder/src/lib.rs index 15b435ec5..2e1cde872 100644 --- a/crates/uv-trampoline-builder/src/lib.rs +++ b/crates/uv-trampoline-builder/src/lib.rs @@ -521,7 +521,7 @@ if __name__ == "__main__": } #[test] - #[ignore] + #[ignore = "This test will spawn a GUI and wait until you close the window."] fn gui_launcher() -> Result<()> { // Create Temp Dirs let temp_dir = assert_fs::TempDir::new()?; diff --git a/crates/uv-trampoline/Cargo.lock b/crates/uv-trampoline/Cargo.lock index 89a7c5979..37edf9ede 100644 --- a/crates/uv-trampoline/Cargo.lock +++ b/crates/uv-trampoline/Cargo.lock @@ -100,9 +100,9 @@ dependencies = [ [[package]] name = "windows" -version = "0.61.1" +version = "0.61.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" dependencies = [ "windows-collections", "windows-core", @@ -122,9 +122,9 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.61.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", @@ -135,12 +135,13 @@ dependencies = [ [[package]] name = "windows-future" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ "windows-core", "windows-link", + "windows-threading", ] [[package]] @@ -167,9 +168,9 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-numerics" @@ -183,18 +184,27 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" dependencies = [ "windows-link", ] diff --git a/crates/uv-trampoline/src/bounce.rs b/crates/uv-trampoline/src/bounce.rs index 1e90f035d..8d658bdab 100644 --- a/crates/uv-trampoline/src/bounce.rs +++ b/crates/uv-trampoline/src/bounce.rs @@ -78,7 +78,34 @@ fn make_child_cmdline() -> CString { // Only execute the trampoline again if it's a script, otherwise, just invoke Python. match kind { - TrampolineKind::Python => {} + TrampolineKind::Python => { + // SAFETY: `std::env::set_var` is safe to call on Windows, and + // this code only ever runs on Windows. + unsafe { + // Setting this env var will cause `getpath.py` to set + // `executable` to the path to this trampoline. This is + // the approach taken by CPython for Python Launchers + // (in `launcher.c`). This allows virtual environments to + // be correctly detected when using trampolines. + std::env::set_var("__PYVENV_LAUNCHER__", &executable_name); + + // If this is not a virtual environment and `PYTHONHOME` has + // not been set, then set `PYTHONHOME` to the parent directory of + // the executable. This ensures that the correct installation + // directories are added to `sys.path` when running with a junction + // trampoline. + let python_home_set = + std::env::var("PYTHONHOME").is_ok_and(|home| !home.is_empty()); + if !is_virtualenv(python_exe.as_path()) && !python_home_set { + std::env::set_var( + "PYTHONHOME", + python_exe + .parent() + .expect("Python executable should have a parent directory"), + ); + } + } + } TrampolineKind::Script => { // Use the full executable name because CMD only passes the name of the executable (but not the path) // when e.g. invoking `black` instead of `/Scripts/black` and Python then fails @@ -118,6 +145,20 @@ fn push_quoted_path(path: &Path, command: &mut Vec) { command.extend(br#"""#); } +/// Checks if the given executable is part of a virtual environment +/// +/// Checks if a `pyvenv.cfg` file exists in grandparent directory of the given executable. +/// PEP 405 specifies a more robust procedure (checking both the parent and grandparent +/// directory and then scanning for a `home` key), but in practice we have found this to +/// be unnecessary. +fn is_virtualenv(executable: &Path) -> bool { + executable + .parent() + .and_then(Path::parent) + .map(|path| path.join("pyvenv.cfg").is_file()) + .unwrap_or(false) +} + /// Reads the executable binary from the back to find: /// /// * The path to the Python executable @@ -240,10 +281,18 @@ fn read_trampoline_metadata(executable_name: &Path) -> (TrampolineKind, PathBuf) parent_dir.join(path) }; - // NOTICE: dunce adds 5kb~ - let path = dunce::canonicalize(path.as_path()).unwrap_or_else(|_| { - error_and_exit("Failed to canonicalize script path"); - }); + let path = if !path.is_absolute() || matches!(kind, TrampolineKind::Script) { + // NOTICE: dunce adds 5kb~ + // TODO(john): In order to avoid resolving junctions and symlinks for relative paths and + // scripts, we can consider reverting https://github.com/astral-sh/uv/pull/5750/files#diff-969979506be03e89476feade2edebb4689a9c261f325988d3c7efc5e51de26d1L273-L277. + dunce::canonicalize(path.as_path()).unwrap_or_else(|_| { + error_and_exit("Failed to canonicalize script path"); + }) + } else { + // For Python trampolines with absolute paths, we skip `dunce::canonicalize` to + // avoid resolving junctions. + path + }; (kind, path) } diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe index 3b7f76564..5f2d6115e 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe index 74080d4db..3a5a2e348 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe index 3fd1e0aff..bdc225e4d 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe index 4221696a1..d6753380d 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe index 5b8fa6acc..b93c242e7 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe index 8cb19cf8f..c81d8e4e5 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe differ diff --git a/crates/uv-types/Cargo.toml b/crates/uv-types/Cargo.toml index 08a00e006..f29af4ca4 100644 --- a/crates/uv-types/Cargo.toml +++ b/crates/uv-types/Cargo.toml @@ -27,12 +27,13 @@ uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true } +uv-redacted = { workspace = true } uv-workspace = { workspace = true } anyhow = { workspace = true } +dashmap = { workspace = true } rustc-hash = { workspace = true } thiserror = { workspace = true } -url = { workspace = true } [features] default = [] diff --git a/crates/uv-types/src/builds.rs b/crates/uv-types/src/builds.rs index ea5e0b6a3..e8c622057 100644 --- a/crates/uv-types/src/builds.rs +++ b/crates/uv-types/src/builds.rs @@ -1,3 +1,9 @@ +use std::path::Path; +use std::sync::Arc; + +use dashmap::DashMap; + +use uv_configuration::{BuildKind, SourceStrategy}; use uv_pep508::PackageName; use uv_python::PythonEnvironment; @@ -37,3 +43,42 @@ impl BuildIsolation<'_> { } } } + +/// A key for the build cache, which includes the interpreter, source root, subdirectory, source +/// strategy, and build kind. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BuildKey { + pub base_python: Box, + pub source_root: Box, + pub subdirectory: Option>, + pub source_strategy: SourceStrategy, + pub build_kind: BuildKind, +} + +/// An arena of in-process builds. +#[derive(Debug)] +pub struct BuildArena(Arc>); + +impl Default for BuildArena { + fn default() -> Self { + Self(Arc::new(DashMap::new())) + } +} + +impl Clone for BuildArena { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl BuildArena { + /// Insert a build entry into the arena. + pub fn insert(&self, key: BuildKey, value: T) { + self.0.insert(key, value); + } + + /// Remove a build entry from the arena. + pub fn remove(&self, key: &BuildKey) -> Option { + self.0.remove(key).map(|entry| entry.1) + } +} diff --git a/crates/uv-types/src/hash.rs b/crates/uv-types/src/hash.rs index f48ce87ce..fe472b349 100644 --- a/crates/uv-types/src/hash.rs +++ b/crates/uv-types/src/hash.rs @@ -2,7 +2,6 @@ use std::str::FromStr; use std::sync::Arc; use rustc_hash::FxHashMap; -use url::Url; use uv_configuration::HashCheckingMode; use uv_distribution_types::{ @@ -12,6 +11,7 @@ use uv_distribution_types::{ use uv_normalize::PackageName; use uv_pep440::Version; use uv_pypi_types::{HashDigest, HashDigests, HashError, ResolverMarkerEnvironment}; +use uv_redacted::DisplaySafeUrl; #[derive(Debug, Default, Clone)] pub enum HashStrategy { @@ -76,7 +76,7 @@ impl HashStrategy { } /// Return the [`HashPolicy`] for the given direct URL package. - pub fn get_url(&self, url: &Url) -> HashPolicy { + pub fn get_url(&self, url: &DisplaySafeUrl) -> HashPolicy { match self { Self::None => HashPolicy::None, Self::Generate(mode) => HashPolicy::Generate(*mode), @@ -109,7 +109,7 @@ impl HashStrategy { } /// Returns `true` if the given direct URL package is allowed. - pub fn allows_url(&self, url: &Url) -> bool { + pub fn allows_url(&self, url: &DisplaySafeUrl) -> bool { match self { Self::None => true, Self::Generate(_) => true, diff --git a/crates/uv-types/src/traits.rs b/crates/uv-types/src/traits.rs index 6f724b27a..a95367fef 100644 --- a/crates/uv-types/src/traits.rs +++ b/crates/uv-types/src/traits.rs @@ -18,6 +18,8 @@ use uv_pep508::PackageName; use uv_python::{Interpreter, PythonEnvironment}; use uv_workspace::WorkspaceCache; +use crate::BuildArena; + /// Avoids cyclic crate dependencies between resolver, installer and builder. /// /// To resolve the dependencies of a packages, we may need to build one or more source @@ -67,6 +69,9 @@ pub trait BuildContext { /// Return a reference to the Git resolver. fn git(&self) -> &GitResolver; + /// Return a reference to the build arena. + fn build_arena(&self) -> &BuildArena; + /// Return a reference to the discovered registry capabilities. fn capabilities(&self) -> &IndexCapabilities; @@ -180,13 +185,13 @@ pub trait InstalledPackagesProvider: Clone + Send + Sync + 'static { pub struct EmptyInstalledPackages; impl InstalledPackagesProvider for EmptyInstalledPackages { - fn get_packages(&self, _name: &PackageName) -> Vec<&InstalledDist> { - Vec::new() - } - fn iter(&self) -> impl Iterator { std::iter::empty() } + + fn get_packages(&self, _name: &PackageName) -> Vec<&InstalledDist> { + Vec::new() + } } /// [`anyhow::Error`]-like wrapper type for [`BuildDispatch`] method return values, that also makes diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index 2eac37f29..f1b47dd1d 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.7.6" +version = "0.7.20" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv-virtualenv/Cargo.toml b/crates/uv-virtualenv/Cargo.toml index e9610176b..cb0ae1b9d 100644 --- a/crates/uv-virtualenv/Cargo.toml +++ b/crates/uv-virtualenv/Cargo.toml @@ -20,6 +20,7 @@ doctest = false workspace = true [dependencies] +uv-configuration = { workspace = true } uv-fs = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true } diff --git a/crates/uv-virtualenv/src/lib.rs b/crates/uv-virtualenv/src/lib.rs index 8c4e1feab..277ab6a8c 100644 --- a/crates/uv-virtualenv/src/lib.rs +++ b/crates/uv-virtualenv/src/lib.rs @@ -3,6 +3,7 @@ use std::path::Path; use thiserror::Error; +use uv_configuration::PreviewMode; use uv_python::{Interpreter, PythonEnvironment}; mod virtualenv; @@ -15,6 +16,8 @@ pub enum Error { "Could not find a suitable Python executable for the virtual environment based on the interpreter: {0}" )] NotFound(String), + #[error(transparent)] + Python(#[from] uv_python::managed::Error), } /// The value to use for the shell prompt when inside a virtual environment. @@ -50,6 +53,8 @@ pub fn create_venv( allow_existing: bool, relocatable: bool, seed: bool, + upgradeable: bool, + preview: PreviewMode, ) -> Result { // Create the virtualenv at the given location. let virtualenv = virtualenv::create( @@ -60,6 +65,8 @@ pub fn create_venv( allow_existing, relocatable, seed, + upgradeable, + preview, )?; // Create the corresponding `PythonEnvironment`. diff --git a/crates/uv-virtualenv/src/virtualenv.rs b/crates/uv-virtualenv/src/virtualenv.rs index a641e5541..bad380c4c 100644 --- a/crates/uv-virtualenv/src/virtualenv.rs +++ b/crates/uv-virtualenv/src/virtualenv.rs @@ -10,8 +10,10 @@ use fs_err::File; use itertools::Itertools; use tracing::debug; +use uv_configuration::PreviewMode; use uv_fs::{CWD, Simplified, cachedir}; use uv_pypi_types::Scheme; +use uv_python::managed::{PythonMinorVersionLink, create_link_to_executable}; use uv_python::{Interpreter, VirtualEnvironment}; use uv_shell::escape_posix_for_single_quotes; use uv_version::version; @@ -53,6 +55,8 @@ pub(crate) fn create( allow_existing: bool, relocatable: bool, seed: bool, + upgradeable: bool, + preview: PreviewMode, ) -> Result { // Determine the base Python executable; that is, the Python executable that should be // considered the "base" for the virtual environment. @@ -81,7 +85,7 @@ pub(crate) fn create( } else if metadata.is_dir() { if allow_existing { debug!("Allowing existing directory"); - } else if location.join("pyvenv.cfg").is_file() { + } else if uv_fs::is_virtualenv_base(location) { debug!("Removing existing directory"); // On Windows, if the current executable is in the directory, guard against @@ -143,13 +147,51 @@ pub(crate) fn create( // Create a `.gitignore` file to ignore all files in the venv. fs::write(location.join(".gitignore"), "*")?; + let mut using_minor_version_link = false; + let executable_target = if upgradeable && interpreter.is_standalone() { + if let Some(minor_version_link) = PythonMinorVersionLink::from_executable( + base_python.as_path(), + &interpreter.key(), + preview, + ) { + if !minor_version_link.exists() { + base_python.clone() + } else { + let debug_symlink_term = if cfg!(windows) { + "junction" + } else { + "symlink directory" + }; + debug!( + "Using {} {} instead of base Python path: {}", + debug_symlink_term, + &minor_version_link.symlink_directory.display(), + &base_python.display() + ); + using_minor_version_link = true; + minor_version_link.symlink_executable.clone() + } + } else { + base_python.clone() + } + } else { + base_python.clone() + }; + // Per PEP 405, the Python `home` is the parent directory of the interpreter. - let python_home = base_python.parent().ok_or_else(|| { - io::Error::new( - io::ErrorKind::NotFound, - "The Python interpreter needs to have a parent directory", - ) - })?; + // In preview mode, for standalone interpreters, this `home` value will include a + // symlink directory on Unix or junction on Windows to enable transparent Python patch + // upgrades. + let python_home = executable_target + .parent() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::NotFound, + "The Python interpreter needs to have a parent directory", + ) + })? + .to_path_buf(); + let python_home = python_home.as_path(); // Different names for the python interpreter fs::create_dir_all(&scripts)?; @@ -157,7 +199,7 @@ pub(crate) fn create( #[cfg(unix)] { - uv_fs::replace_symlink(&base_python, &executable)?; + uv_fs::replace_symlink(&executable_target, &executable)?; uv_fs::replace_symlink( "python", scripts.join(format!("python{}", interpreter.python_major())), @@ -184,91 +226,102 @@ pub(crate) fn create( } } - // No symlinking on Windows, at least not on a regular non-dev non-admin Windows install. + // On Windows, we use trampolines that point to an executable target. For standalone + // interpreters, this target path includes a minor version junction to enable + // transparent upgrades. if cfg!(windows) { - copy_launcher_windows( - WindowsExecutable::Python, - interpreter, - &base_python, - &scripts, - python_home, - )?; - - if interpreter.markers().implementation_name() == "graalpy" { - copy_launcher_windows( - WindowsExecutable::GraalPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; + if using_minor_version_link { + let target = scripts.join(WindowsExecutable::Python.exe(interpreter)); + create_link_to_executable(target.as_path(), executable_target.clone()) + .map_err(Error::Python)?; + let targetw = scripts.join(WindowsExecutable::Pythonw.exe(interpreter)); + create_link_to_executable(targetw.as_path(), executable_target) + .map_err(Error::Python)?; } else { copy_launcher_windows( - WindowsExecutable::Pythonw, + WindowsExecutable::Python, interpreter, &base_python, &scripts, python_home, )?; - } - if interpreter.markers().implementation_name() == "pypy" { - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyw, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinorw, - interpreter, - &base_python, - &scripts, - python_home, - )?; + if interpreter.markers().implementation_name() == "graalpy" { + copy_launcher_windows( + WindowsExecutable::GraalPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } else { + copy_launcher_windows( + WindowsExecutable::Pythonw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } + + if interpreter.markers().implementation_name() == "pypy" { + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinorw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } } } diff --git a/crates/uv-warnings/src/lib.rs b/crates/uv-warnings/src/lib.rs index 9ed4c646e..2b664be8d 100644 --- a/crates/uv-warnings/src/lib.rs +++ b/crates/uv-warnings/src/lib.rs @@ -13,27 +13,27 @@ pub static ENABLED: AtomicBool = AtomicBool::new(false); /// Enable user-facing warnings. pub fn enable() { - ENABLED.store(true, std::sync::atomic::Ordering::SeqCst); + ENABLED.store(true, std::sync::atomic::Ordering::Relaxed); } /// Disable user-facing warnings. pub fn disable() { - ENABLED.store(false, std::sync::atomic::Ordering::SeqCst); + ENABLED.store(false, std::sync::atomic::Ordering::Relaxed); } /// Warn a user, if warnings are enabled. #[macro_export] macro_rules! warn_user { - ($($arg:tt)*) => { + ($($arg:tt)*) => {{ use $crate::anstream::eprintln; use $crate::owo_colors::OwoColorize; - if $crate::ENABLED.load(std::sync::atomic::Ordering::SeqCst) { + if $crate::ENABLED.load(std::sync::atomic::Ordering::Relaxed) { let message = format!("{}", format_args!($($arg)*)); let formatted = message.bold(); eprintln!("{}{} {formatted}", "warning".yellow().bold(), ":".bold()); } - }; + }}; } pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::default); @@ -42,11 +42,11 @@ pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::d /// message. #[macro_export] macro_rules! warn_user_once { - ($($arg:tt)*) => { + ($($arg:tt)*) => {{ use $crate::anstream::eprintln; use $crate::owo_colors::OwoColorize; - if $crate::ENABLED.load(std::sync::atomic::Ordering::SeqCst) { + if $crate::ENABLED.load(std::sync::atomic::Ordering::Relaxed) { if let Ok(mut states) = $crate::WARNINGS.lock() { let message = format!("{}", format_args!($($arg)*)); if states.insert(message.clone()) { @@ -54,5 +54,5 @@ macro_rules! warn_user_once { } } } - }; + }}; } diff --git a/crates/uv-workspace/Cargo.toml b/crates/uv-workspace/Cargo.toml index 59bc02f29..36059f10f 100644 --- a/crates/uv-workspace/Cargo.toml +++ b/crates/uv-workspace/Cargo.toml @@ -18,6 +18,7 @@ workspace = true [dependencies] uv-build-backend = { workspace = true, features = ["schemars"] } uv-cache-key = { workspace = true } +uv-configuration = { workspace = true } uv-distribution-types = { workspace = true } uv-fs = { workspace = true, features = ["tokio", "schemars"] } uv-git-types = { workspace = true } @@ -27,9 +28,11 @@ uv-options-metadata = { workspace = true } uv-pep440 = { workspace = true } uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } +uv-redacted = { workspace = true } uv-static = { workspace = true } uv-warnings = { workspace = true } +clap = { workspace = true, optional = true } fs-err = { workspace = true } glob = { workspace = true } itertools = { workspace = true } @@ -42,7 +45,6 @@ tokio = { workspace = true } toml = { workspace = true } toml_edit = { workspace = true } tracing = { workspace = true } -url = { workspace = true } [dev-dependencies] anyhow = { workspace = true } @@ -52,7 +54,7 @@ regex = { workspace = true } tempfile = { workspace = true } [features] -schemars = ["dep:schemars", "uv-pypi-types/schemars"] +schemars = ["dep:schemars", "uv-pypi-types/schemars", "uv-redacted/schemars"] [package.metadata.cargo-shear] ignored = ["uv-options-metadata"] diff --git a/crates/uv-workspace/src/dependency_groups.rs b/crates/uv-workspace/src/dependency_groups.rs index e6964544a..2dc2090bf 100644 --- a/crates/uv-workspace/src/dependency_groups.rs +++ b/crates/uv-workspace/src/dependency_groups.rs @@ -1,32 +1,106 @@ -use std::collections::BTreeMap; use std::collections::btree_map::Entry; use std::str::FromStr; +use std::{collections::BTreeMap, path::Path}; use thiserror::Error; use tracing::error; +use uv_distribution_types::RequiresPython; +use uv_fs::Simplified; use uv_normalize::{DEV_DEPENDENCIES, GroupName}; +use uv_pep440::VersionSpecifiers; use uv_pep508::Pep508Error; use uv_pypi_types::{DependencyGroupSpecifier, VerbatimParsedUrl}; +use crate::pyproject::{DependencyGroupSettings, PyProjectToml, ToolUvDependencyGroups}; + /// PEP 735 dependency groups, with any `include-group` entries resolved. #[derive(Debug, Default, Clone)] -pub struct FlatDependencyGroups( - BTreeMap>>, -); +pub struct FlatDependencyGroups(BTreeMap); + +#[derive(Debug, Default, Clone)] +pub struct FlatDependencyGroup { + pub requirements: Vec>, + pub requires_python: Option, +} impl FlatDependencyGroups { + /// Gather and flatten all the dependency-groups defined in the given pyproject.toml + /// + /// The path is only used in diagnostics. + pub fn from_pyproject_toml( + path: &Path, + pyproject_toml: &PyProjectToml, + ) -> Result { + // First, collect `tool.uv.dev_dependencies` + let dev_dependencies = pyproject_toml + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.dev_dependencies.as_ref()); + + // Then, collect `dependency-groups` + let dependency_groups = pyproject_toml + .dependency_groups + .iter() + .flatten() + .collect::>(); + + // Get additional settings + let empty_settings = ToolUvDependencyGroups::default(); + let group_settings = pyproject_toml + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.dependency_groups.as_ref()) + .unwrap_or(&empty_settings); + + // Flatten the dependency groups. + let mut dependency_groups = FlatDependencyGroups::from_dependency_groups( + &dependency_groups, + group_settings.inner(), + ) + .map_err(|err| DependencyGroupError { + package: pyproject_toml + .project + .as_ref() + .map(|project| project.name.to_string()) + .unwrap_or_default(), + path: path.user_display().to_string(), + error: err.with_dev_dependencies(dev_dependencies), + })?; + + // Add the `dev` group, if the legacy `dev-dependencies` is defined. + // + // NOTE: the fact that we do this out here means that nothing can inherit from + // the legacy dev-dependencies group (or define a group requires-python for it). + // This is intentional, we want groups to be defined in a standard interoperable + // way, and letting things include-group a group that isn't defined would be a + // mess for other python tools. + if let Some(dev_dependencies) = dev_dependencies { + dependency_groups + .entry(DEV_DEPENDENCIES.clone()) + .or_insert_with(FlatDependencyGroup::default) + .requirements + .extend(dev_dependencies.clone()); + } + + Ok(dependency_groups) + } + /// Resolve the dependency groups (which may contain references to other groups) into concrete /// lists of requirements. - pub fn from_dependency_groups( + fn from_dependency_groups( groups: &BTreeMap<&GroupName, &Vec>, - ) -> Result { + settings: &BTreeMap, + ) -> Result { fn resolve_group<'data>( - resolved: &mut BTreeMap>>, + resolved: &mut BTreeMap, groups: &'data BTreeMap<&GroupName, &Vec>, + settings: &BTreeMap, name: &'data GroupName, parents: &mut Vec<&'data GroupName>, - ) -> Result<(), DependencyGroupError> { + ) -> Result<(), DependencyGroupErrorInner> { let Some(specifiers) = groups.get(name) else { // Missing group let parent_name = parents @@ -34,7 +108,7 @@ impl FlatDependencyGroups { .last() .copied() .expect("parent when group is missing"); - return Err(DependencyGroupError::GroupNotFound( + return Err(DependencyGroupErrorInner::GroupNotFound( name.clone(), parent_name.clone(), )); @@ -42,7 +116,7 @@ impl FlatDependencyGroups { // "Dependency Group Includes MUST NOT include cycles, and tools SHOULD report an error if they detect a cycle." if parents.contains(&name) { - return Err(DependencyGroupError::DependencyGroupCycle(Cycle( + return Err(DependencyGroupErrorInner::DependencyGroupCycle(Cycle( parents.iter().copied().cloned().collect(), ))); } @@ -54,13 +128,14 @@ impl FlatDependencyGroups { parents.push(name); let mut requirements = Vec::with_capacity(specifiers.len()); + let mut requires_python_intersection = VersionSpecifiers::empty(); for specifier in *specifiers { match specifier { DependencyGroupSpecifier::Requirement(requirement) => { match uv_pep508::Requirement::::from_str(requirement) { Ok(requirement) => requirements.push(requirement), Err(err) => { - return Err(DependencyGroupError::GroupParseError( + return Err(DependencyGroupErrorInner::GroupParseError( name.clone(), requirement.clone(), Box::new(err), @@ -69,72 +144,107 @@ impl FlatDependencyGroups { } } DependencyGroupSpecifier::IncludeGroup { include_group } => { - resolve_group(resolved, groups, include_group, parents)?; - requirements - .extend(resolved.get(include_group).into_iter().flatten().cloned()); + resolve_group(resolved, groups, settings, include_group, parents)?; + if let Some(included) = resolved.get(include_group) { + requirements.extend(included.requirements.iter().cloned()); + + // Intersect the requires-python for this group with the included group's + requires_python_intersection = requires_python_intersection + .into_iter() + .chain(included.requires_python.clone().into_iter().flatten()) + .collect(); + } } DependencyGroupSpecifier::Object(map) => { - return Err(DependencyGroupError::DependencyObjectSpecifierNotSupported( - name.clone(), - map.clone(), - )); + return Err( + DependencyGroupErrorInner::DependencyObjectSpecifierNotSupported( + name.clone(), + map.clone(), + ), + ); } } } + + let empty_settings = DependencyGroupSettings::default(); + let DependencyGroupSettings { requires_python } = + settings.get(name).unwrap_or(&empty_settings); + if let Some(requires_python) = requires_python { + // Intersect the requires-python for this group to get the final requires-python + // that will be used by interpreter discovery and checking. + requires_python_intersection = requires_python_intersection + .into_iter() + .chain(requires_python.clone()) + .collect(); + + // Add the group requires-python as a marker to each requirement + // We don't use `requires_python_intersection` because each `include-group` + // should already have its markers applied to these. + for requirement in &mut requirements { + let extra_markers = + RequiresPython::from_specifiers(requires_python).to_marker_tree(); + requirement.marker.and(extra_markers); + } + } + parents.pop(); - resolved.insert(name.clone(), requirements); + resolved.insert( + name.clone(), + FlatDependencyGroup { + requirements, + requires_python: if requires_python_intersection.is_empty() { + None + } else { + Some(requires_python_intersection) + }, + }, + ); Ok(()) } + // Validate the settings + for (group_name, ..) in settings { + if !groups.contains_key(group_name) { + return Err(DependencyGroupErrorInner::SettingsGroupNotFound( + group_name.clone(), + )); + } + } + let mut resolved = BTreeMap::new(); for name in groups.keys() { let mut parents = Vec::new(); - resolve_group(&mut resolved, groups, name, &mut parents)?; + resolve_group(&mut resolved, groups, settings, name, &mut parents)?; } Ok(Self(resolved)) } /// Return the requirements for a given group, if any. - pub fn get( - &self, - group: &GroupName, - ) -> Option<&Vec>> { + pub fn get(&self, group: &GroupName) -> Option<&FlatDependencyGroup> { self.0.get(group) } /// Return the entry for a given group, if any. - pub fn entry( - &mut self, - group: GroupName, - ) -> Entry>> { + pub fn entry(&mut self, group: GroupName) -> Entry { self.0.entry(group) } /// Consume the [`FlatDependencyGroups`] and return the inner map. - pub fn into_inner(self) -> BTreeMap>> { + pub fn into_inner(self) -> BTreeMap { self.0 } } -impl FromIterator<(GroupName, Vec>)> - for FlatDependencyGroups -{ - fn from_iter< - T: IntoIterator>)>, - >( - iter: T, - ) -> Self { +impl FromIterator<(GroupName, FlatDependencyGroup)> for FlatDependencyGroups { + fn from_iter>(iter: T) -> Self { Self(iter.into_iter().collect()) } } impl IntoIterator for FlatDependencyGroups { - type Item = (GroupName, Vec>); - type IntoIter = std::collections::btree_map::IntoIter< - GroupName, - Vec>, - >; + type Item = (GroupName, FlatDependencyGroup); + type IntoIter = std::collections::btree_map::IntoIter; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() @@ -142,7 +252,24 @@ impl IntoIterator for FlatDependencyGroups { } #[derive(Debug, Error)] -pub enum DependencyGroupError { +#[error("{} has malformed dependency groups", if path.is_empty() && package.is_empty() { + "Project".to_string() +} else if path.is_empty() { + format!("Project `{package}`") +} else if package.is_empty() { + format!("`{path}`") +} else { + format!("Project `{package} @ {path}`") +})] +pub struct DependencyGroupError { + package: String, + path: String, + #[source] + error: DependencyGroupErrorInner, +} + +#[derive(Debug, Error)] +pub enum DependencyGroupErrorInner { #[error("Failed to parse entry in group `{0}`: `{1}`")] GroupParseError( GroupName, @@ -159,9 +286,15 @@ pub enum DependencyGroupError { DependencyGroupCycle(Cycle), #[error("Group `{0}` contains an unknown dependency object specifier: {1:?}")] DependencyObjectSpecifierNotSupported(GroupName, BTreeMap), + #[error("Failed to find group `{0}` specified in `[tool.uv.dependency-groups]`")] + SettingsGroupNotFound(GroupName), + #[error( + "`[tool.uv.dependency-groups]` specifies the `dev` group, but only `tool.uv.dev-dependencies` was found. To reference the `dev` group, remove the `tool.uv.dev-dependencies` section and add any development dependencies to the `dev` entry in the `[dependency-groups]` table instead." + )] + SettingsDevGroupInclude, } -impl DependencyGroupError { +impl DependencyGroupErrorInner { /// Enrich a [`DependencyGroupError`] with the `tool.uv.dev-dependencies` metadata, if applicable. #[must_use] pub fn with_dev_dependencies( @@ -169,10 +302,15 @@ impl DependencyGroupError { dev_dependencies: Option<&Vec>>, ) -> Self { match self { - DependencyGroupError::GroupNotFound(group, parent) + Self::GroupNotFound(group, parent) if dev_dependencies.is_some() && group == *DEV_DEPENDENCIES => { - DependencyGroupError::DevGroupInclude(parent) + Self::DevGroupInclude(parent) + } + Self::SettingsGroupNotFound(group) + if dev_dependencies.is_some() && group == *DEV_DEPENDENCIES => + { + Self::SettingsDevGroupInclude } _ => self, } diff --git a/crates/uv-workspace/src/lib.rs b/crates/uv-workspace/src/lib.rs index 83be6bd88..0e1b3974c 100644 --- a/crates/uv-workspace/src/lib.rs +++ b/crates/uv-workspace/src/lib.rs @@ -1,6 +1,6 @@ pub use workspace::{ - DiscoveryOptions, MemberDiscovery, ProjectWorkspace, VirtualProject, Workspace, WorkspaceCache, - WorkspaceError, WorkspaceMember, + DiscoveryOptions, MemberDiscovery, ProjectWorkspace, RequiresPythonSources, VirtualProject, + Workspace, WorkspaceCache, WorkspaceError, WorkspaceMember, }; pub mod dependency_groups; diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index 4efd322cc..124a62881 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -6,6 +6,8 @@ //! //! Then lowers them into a dependency specification. +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::collections::BTreeMap; use std::fmt::Formatter; use std::ops::Deref; @@ -17,18 +19,19 @@ use owo_colors::OwoColorize; use rustc_hash::{FxBuildHasher, FxHashSet}; use serde::{Deserialize, Deserializer, Serialize, de::IntoDeserializer, de::SeqAccess}; use thiserror::Error; -use url::Url; use uv_build_backend::BuildBackendSettings; use uv_distribution_types::{Index, IndexName, RequirementSource}; use uv_fs::{PortablePathBuf, relative_to}; use uv_git_types::GitReference; use uv_macros::OptionsMetadata; use uv_normalize::{DefaultGroups, ExtraName, GroupName, PackageName}; +use uv_options_metadata::{OptionSet, OptionsMetadata, Visit}; use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::MarkerTree; use uv_pypi_types::{ Conflicts, DependencyGroups, SchemaConflicts, SupportedEnvironments, VerbatimParsedUrl, }; +use uv_redacted::DisplaySafeUrl; #[derive(Error, Debug)] pub enum PyprojectTomlError { @@ -353,6 +356,24 @@ pub struct ToolUv { )] pub default_groups: Option, + /// Additional settings for `dependency-groups`. + /// + /// Currently this can only be used to add `requires-python` constraints + /// to dependency groups (typically to inform uv that your dev tooling + /// has a higher python requirement than your actual project). + /// + /// This cannot be used to define dependency groups, use the top-level + /// `[dependency-groups]` table for that. + #[option( + default = "[]", + value_type = "dict", + example = r#" + [tool.uv.dependency-groups] + my-group = {requires-python = ">=3.12"} + "# + )] + pub dependency_groups: Option, + /// The project's development dependencies. /// /// Development dependencies will be installed by default in `uv run` and `uv sync`, but will @@ -591,7 +612,7 @@ pub struct ToolUv { /// Note that those settings only apply when using the `uv_build` backend, other build backends /// (such as hatchling) have their own configuration. #[option_group] - pub build_backend: Option, + pub build_backend: Option, } #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -653,6 +674,77 @@ impl<'de> serde::de::Deserialize<'de> for ToolUvSources { } } +#[derive(Default, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct ToolUvDependencyGroups(BTreeMap); + +impl ToolUvDependencyGroups { + /// Returns the underlying `BTreeMap` of group names to settings. + pub fn inner(&self) -> &BTreeMap { + &self.0 + } + + /// Convert the [`ToolUvDependencyGroups`] into its inner `BTreeMap`. + #[must_use] + pub fn into_inner(self) -> BTreeMap { + self.0 + } +} + +/// Ensure that all keys in the TOML table are unique. +impl<'de> serde::de::Deserialize<'de> for ToolUvDependencyGroups { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SourcesVisitor; + + impl<'de> serde::de::Visitor<'de> for SourcesVisitor { + type Value = ToolUvDependencyGroups; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a map with unique keys") + } + + fn visit_map(self, mut access: M) -> Result + where + M: serde::de::MapAccess<'de>, + { + let mut groups = BTreeMap::new(); + while let Some((key, value)) = + access.next_entry::()? + { + match groups.entry(key) { + std::collections::btree_map::Entry::Occupied(entry) => { + return Err(serde::de::Error::custom(format!( + "duplicate settings for dependency group `{}`", + entry.key() + ))); + } + std::collections::btree_map::Entry::Vacant(entry) => { + entry.insert(value); + } + } + } + Ok(ToolUvDependencyGroups(groups)) + } + } + + deserializer.deserialize_map(SourcesVisitor) + } +} + +#[derive(Deserialize, Default, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(rename_all = "kebab-case")] +pub struct DependencyGroupSettings { + /// Version of python to require when installing this group + #[cfg_attr(feature = "schemars", schemars(with = "Option"))] + pub requires_python: Option, +} + #[derive(Deserialize, OptionsMetadata, Default, Debug, Clone, PartialEq, Eq)] #[cfg_attr(test, derive(Serialize))] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] @@ -724,12 +816,12 @@ impl<'de> serde::Deserialize<'de> for SerdePattern { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SerdePattern { - fn schema_name() -> String { - ::schema_name() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("SerdePattern") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - ::json_schema(r#gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } } @@ -891,7 +983,7 @@ pub enum Source { /// ``` Git { /// The repository URL (without the `git+` prefix). - git: Url, + git: DisplaySafeUrl, /// The path to the directory with the `pyproject.toml`, if it's not in the archive root. subdirectory: Option, // Only one of the three may be used; we'll validate this later and emit a custom error. @@ -915,7 +1007,7 @@ pub enum Source { /// flask = { url = "https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl" } /// ``` Url { - url: Url, + url: DisplaySafeUrl, /// For source distributions, the path to the directory with the `pyproject.toml`, if it's /// not in the archive root. subdirectory: Option, @@ -989,12 +1081,12 @@ impl<'de> Deserialize<'de> for Source { #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] struct CatchAll { - git: Option, + git: Option, subdirectory: Option, rev: Option, tag: Option, branch: Option, - url: Option, + url: Option, path: Option, editable: Option, package: Option, @@ -1083,7 +1175,7 @@ impl<'de> Deserialize<'de> for Source { // If the user prefixed the URL with `git+`, strip it. let git = if let Some(git) = git.as_str().strip_prefix("git+") { - Url::parse(git).map_err(serde::de::Error::custom)? + DisplaySafeUrl::parse(git).map_err(serde::de::Error::custom)? } else { git }; @@ -1594,3 +1686,44 @@ pub enum DependencyType { /// A dependency in `dependency-groups.{0}`. Group(GroupName), } + +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +pub struct BuildBackendSettingsSchema; + +impl<'de> Deserialize<'de> for BuildBackendSettingsSchema { + fn deserialize(_deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(BuildBackendSettingsSchema) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BuildBackendSettingsSchema { + fn schema_name() -> Cow<'static, str> { + BuildBackendSettings::schema_name() + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + BuildBackendSettings::json_schema(generator) + } +} + +impl OptionsMetadata for BuildBackendSettingsSchema { + fn record(visit: &mut dyn Visit) { + BuildBackendSettings::record(visit); + } + + fn documentation() -> Option<&'static str> { + BuildBackendSettings::documentation() + } + + fn metadata() -> OptionSet + where + Self: Sized + 'static, + { + BuildBackendSettings::metadata() + } +} diff --git a/crates/uv-workspace/src/pyproject_mut.rs b/crates/uv-workspace/src/pyproject_mut.rs index 36f6b3083..73e3833ae 100644 --- a/crates/uv-workspace/src/pyproject_mut.rs +++ b/crates/uv-workspace/src/pyproject_mut.rs @@ -1,13 +1,13 @@ +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use std::fmt::{Display, Formatter}; use std::path::Path; use std::str::FromStr; -use std::{fmt, mem}; - -use itertools::Itertools; +use std::{fmt, iter, mem}; use thiserror::Error; use toml_edit::{ Array, ArrayOfTables, DocumentMut, Formatted, Item, RawString, Table, TomlError, Value, }; -use url::Url; use uv_cache_key::CanonicalUrl; use uv_distribution_types::Index; @@ -15,6 +15,7 @@ use uv_fs::PortablePath; use uv_normalize::GroupName; use uv_pep440::{Version, VersionParseError, VersionSpecifier, VersionSpecifiers}; use uv_pep508::{ExtraName, MarkerTree, PackageName, Requirement, VersionOrUrl}; +use uv_redacted::DisplaySafeUrl; use crate::pyproject::{DependencyType, Source}; @@ -50,6 +51,8 @@ pub enum Error { package_name: PackageName, requirements: Vec, }, + #[error("Unknown bound king {0}")] + UnknownBoundKind(String), } /// The result of editing an array in a TOML document. @@ -83,6 +86,169 @@ impl ArrayEdit { } } +/// The default version specifier when adding a dependency. +// While PEP 440 allows an arbitrary number of version digits, the `major` and `minor` build on +// most projects sticking to two or three components and a SemVer-ish versioning system, so can +// bump the major or minor version of a major.minor or major.minor.patch input version. +#[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Eq, Serialize)] +#[serde(rename_all = "kebab-case")] +#[cfg_attr(feature = "clap", derive(clap::ValueEnum))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub enum AddBoundsKind { + /// Only a lower bound, e.g., `>=1.2.3`. + #[default] + Lower, + /// Allow the same major version, similar to the semver caret, e.g., `>=1.2.3, <2.0.0`. + /// + /// Leading zeroes are skipped, e.g. `>=0.1.2, <0.2.0`. + Major, + /// Allow the same minor version, similar to the semver tilde, e.g., `>=1.2.3, <1.3.0`. + /// + /// Leading zeroes are skipped, e.g. `>=0.1.2, <0.1.3`. + Minor, + /// Pin the exact version, e.g., `==1.2.3`. + /// + /// This option is not recommended, as versions are already pinned in the uv lockfile. + Exact, +} + +impl Display for AddBoundsKind { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self { + Self::Lower => write!(f, "lower"), + Self::Major => write!(f, "major"), + Self::Minor => write!(f, "minor"), + Self::Exact => write!(f, "exact"), + } + } +} + +impl AddBoundsKind { + fn specifiers(self, version: Version) -> VersionSpecifiers { + // Nomenclature: "major" is the most significant component of the version, "minor" is the + // second most significant component, so most versions are either major.minor.patch or + // 0.major.minor. + match self { + AddBoundsKind::Lower => { + VersionSpecifiers::from(VersionSpecifier::greater_than_equal_version(version)) + } + AddBoundsKind::Major => { + let leading_zeroes = version + .release() + .iter() + .take_while(|digit| **digit == 0) + .count(); + + // Special case: The version is 0. + if leading_zeroes == version.release().len() { + let upper_bound = Version::new( + [0, 1] + .into_iter() + .chain(iter::repeat_n(0, version.release().iter().skip(2).len())), + ); + return VersionSpecifiers::from_iter([ + VersionSpecifier::greater_than_equal_version(version), + VersionSpecifier::less_than_version(upper_bound), + ]); + } + + // Compute the new major version and pad it to the same length: + // 1.2.3 -> 2.0.0 + // 1.2 -> 2.0 + // 1 -> 2 + // We ignore leading zeroes, adding Semver-style semantics to 0.x versions, too: + // 0.1.2 -> 0.2.0 + // 0.0.1 -> 0.0.2 + let major = version.release().get(leading_zeroes).copied().unwrap_or(0); + // The length of the lower bound minus the leading zero and bumped component. + let trailing_zeros = version.release().iter().skip(leading_zeroes + 1).len(); + let upper_bound = Version::new( + iter::repeat_n(0, leading_zeroes) + .chain(iter::once(major + 1)) + .chain(iter::repeat_n(0, trailing_zeros)), + ); + + VersionSpecifiers::from_iter([ + VersionSpecifier::greater_than_equal_version(version), + VersionSpecifier::less_than_version(upper_bound), + ]) + } + AddBoundsKind::Minor => { + let leading_zeroes = version + .release() + .iter() + .take_while(|digit| **digit == 0) + .count(); + + // Special case: The version is 0. + if leading_zeroes == version.release().len() { + let upper_bound = [0, 0, 1] + .into_iter() + .chain(iter::repeat_n(0, version.release().iter().skip(3).len())); + return VersionSpecifiers::from_iter([ + VersionSpecifier::greater_than_equal_version(version), + VersionSpecifier::less_than_version(Version::new(upper_bound)), + ]); + } + + // If both major and minor version are 0, the concept of bumping the minor version + // instead of the major version is not useful. Instead, we bump the next + // non-zero part of the version. This avoids extending the three components of 0.0.1 + // to the four components of 0.0.1.1. + if leading_zeroes >= 2 { + let most_significant = + version.release().get(leading_zeroes).copied().unwrap_or(0); + // The length of the lower bound minus the leading zero and bumped component. + let trailing_zeros = version.release().iter().skip(leading_zeroes + 1).len(); + let upper_bound = Version::new( + iter::repeat_n(0, leading_zeroes) + .chain(iter::once(most_significant + 1)) + .chain(iter::repeat_n(0, trailing_zeros)), + ); + return VersionSpecifiers::from_iter([ + VersionSpecifier::greater_than_equal_version(version), + VersionSpecifier::less_than_version(upper_bound), + ]); + } + + // Compute the new minor version and pad it to the same length where possible: + // 1.2.3 -> 1.3.0 + // 1.2 -> 1.3 + // 1 -> 1.1 + // We ignore leading zero, adding Semver-style semantics to 0.x versions, too: + // 0.1.2 -> 0.1.3 + // 0.0.1 -> 0.0.2 + + // If the version has only one digit, say `1`, or if there are only leading zeroes, + // pad with zeroes. + let major = version.release().get(leading_zeroes).copied().unwrap_or(0); + let minor = version + .release() + .get(leading_zeroes + 1) + .copied() + .unwrap_or(0); + let upper_bound = Version::new( + iter::repeat_n(0, leading_zeroes) + .chain(iter::once(major)) + .chain(iter::once(minor + 1)) + .chain(iter::repeat_n( + 0, + version.release().iter().skip(leading_zeroes + 2).len(), + )), + ); + + VersionSpecifiers::from_iter([ + VersionSpecifier::greater_than_equal_version(version), + VersionSpecifier::less_than_version(upper_bound), + ]) + } + AddBoundsKind::Exact => { + VersionSpecifiers::from_iter([VersionSpecifier::equals_version(version)]) + } + } + } +} + /// Specifies whether dependencies are added to a script file or a `pyproject.toml` file. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum DependencyTarget { @@ -171,6 +337,7 @@ impl PyProjectTomlMut { &mut self, req: &Requirement, source: Option<&Source>, + raw: bool, ) -> Result { // Get or create `project.dependencies`. let dependencies = self @@ -180,7 +347,7 @@ impl PyProjectTomlMut { .as_array_mut() .ok_or(Error::MalformedDependencies)?; - let edit = add_dependency(req, dependencies, source.is_some())?; + let edit = add_dependency(req, dependencies, source.is_some(), raw)?; if let Some(source) = source { self.add_source(&req.name, source)?; @@ -196,6 +363,7 @@ impl PyProjectTomlMut { &mut self, req: &Requirement, source: Option<&Source>, + raw: bool, ) -> Result { // Get or create `tool.uv.dev-dependencies`. let dev_dependencies = self @@ -213,7 +381,7 @@ impl PyProjectTomlMut { .as_array_mut() .ok_or(Error::MalformedDependencies)?; - let edit = add_dependency(req, dev_dependencies, source.is_some())?; + let edit = add_dependency(req, dev_dependencies, source.is_some(), raw)?; if let Some(source) = source { self.add_source(&req.name, source)?; @@ -267,7 +435,7 @@ impl PyProjectTomlMut { if table .get("url") .and_then(|item| item.as_str()) - .and_then(|url| Url::parse(url).ok()) + .and_then(|url| DisplaySafeUrl::parse(url).ok()) .is_some_and(|url| { CanonicalUrl::new(&url) == CanonicalUrl::new(index.url.url()) }) @@ -304,10 +472,10 @@ impl PyProjectTomlMut { if table .get("url") .and_then(|item| item.as_str()) - .and_then(|url| Url::parse(url).ok()) + .and_then(|url| DisplaySafeUrl::parse(url).ok()) .is_none_or(|url| CanonicalUrl::new(&url) != CanonicalUrl::new(index.url.url())) { - let mut formatted = Formatted::new(index.url.redacted().to_string()); + let mut formatted = Formatted::new(index.url.without_credentials().to_string()); if let Some(value) = table.get("url").and_then(Item::as_value) { if let Some(prefix) = value.decor().prefix() { formatted.decor_mut().set_prefix(prefix.clone()); @@ -365,7 +533,7 @@ impl PyProjectTomlMut { if table .get("url") .and_then(|item| item.as_str()) - .and_then(|url| Url::parse(url).ok()) + .and_then(|url| DisplaySafeUrl::parse(url).ok()) .is_some_and(|url| CanonicalUrl::new(&url) == CanonicalUrl::new(index.url.url())) { return false; @@ -400,6 +568,7 @@ impl PyProjectTomlMut { group: &ExtraName, req: &Requirement, source: Option<&Source>, + raw: bool, ) -> Result { // Get or create `project.optional-dependencies`. let optional_dependencies = self @@ -428,7 +597,7 @@ impl PyProjectTomlMut { .as_array_mut() .ok_or(Error::MalformedDependencies)?; - let added = add_dependency(req, group, source.is_some())?; + let added = add_dependency(req, group, source.is_some(), raw)?; // If `project.optional-dependencies` is an inline table, reformat it. // @@ -457,6 +626,7 @@ impl PyProjectTomlMut { group: &GroupName, req: &Requirement, source: Option<&Source>, + raw: bool, ) -> Result { // Get or create `dependency-groups`. let dependency_groups = self @@ -492,7 +662,7 @@ impl PyProjectTomlMut { .as_array_mut() .ok_or(Error::MalformedDependencies)?; - let added = add_dependency(req, group, source.is_some())?; + let added = add_dependency(req, group, source.is_some(), raw)?; // To avoid churn in pyproject.toml, we only sort new group keys if the // existing keys were sorted. @@ -519,22 +689,19 @@ impl PyProjectTomlMut { Ok(added) } - /// Set the minimum version for an existing dependency. - pub fn set_dependency_minimum_version( + /// Set the constraint for a requirement for an existing dependency. + pub fn set_dependency_bound( &mut self, dependency_type: &DependencyType, index: usize, version: Version, + bound_kind: AddBoundsKind, ) -> Result<(), Error> { let group = match dependency_type { - DependencyType::Production => self.set_project_dependency_minimum_version()?, - DependencyType::Dev => self.set_dev_dependency_minimum_version()?, - DependencyType::Optional(extra) => { - self.set_optional_dependency_minimum_version(extra)? - } - DependencyType::Group(group) => { - self.set_dependency_group_requirement_minimum_version(group)? - } + DependencyType::Production => self.dependencies_array()?, + DependencyType::Dev => self.dev_dependencies_array()?, + DependencyType::Optional(extra) => self.optional_dependencies_array(extra)?, + DependencyType::Group(group) => self.dependency_groups_array(group)?, }; let Some(req) = group.get(index) else { @@ -545,16 +712,16 @@ impl PyProjectTomlMut { .as_str() .and_then(try_parse_requirement) .ok_or(Error::MalformedDependencies)?; - req.version_or_url = Some(VersionOrUrl::VersionSpecifier(VersionSpecifiers::from( - VersionSpecifier::greater_than_equal_version(version), - ))); + req.version_or_url = Some(VersionOrUrl::VersionSpecifier( + bound_kind.specifiers(version), + )); group.replace(index, req.to_string()); Ok(()) } - /// Set the minimum version for an existing dependency in `project.dependencies`. - fn set_project_dependency_minimum_version(&mut self) -> Result<&mut Array, Error> { + /// Get the TOML array for `project.dependencies`. + fn dependencies_array(&mut self) -> Result<&mut Array, Error> { // Get or create `project.dependencies`. let dependencies = self .project()? @@ -566,8 +733,8 @@ impl PyProjectTomlMut { Ok(dependencies) } - /// Set the minimum version for an existing dependency in `tool.uv.dev-dependencies`. - fn set_dev_dependency_minimum_version(&mut self) -> Result<&mut Array, Error> { + /// Get the TOML array for `tool.uv.dev-dependencies`. + fn dev_dependencies_array(&mut self) -> Result<&mut Array, Error> { // Get or create `tool.uv.dev-dependencies`. let dev_dependencies = self .doc @@ -587,11 +754,8 @@ impl PyProjectTomlMut { Ok(dev_dependencies) } - /// Set the minimum version for an existing dependency in `project.optional-dependencies`. - fn set_optional_dependency_minimum_version( - &mut self, - group: &ExtraName, - ) -> Result<&mut Array, Error> { + /// Get the TOML array for a `project.optional-dependencies` entry. + fn optional_dependencies_array(&mut self, group: &ExtraName) -> Result<&mut Array, Error> { // Get or create `project.optional-dependencies`. let optional_dependencies = self .project()? @@ -619,11 +783,8 @@ impl PyProjectTomlMut { Ok(group) } - /// Set the minimum version for an existing dependency in `dependency-groups`. - fn set_dependency_group_requirement_minimum_version( - &mut self, - group: &GroupName, - ) -> Result<&mut Array, Error> { + /// Get the TOML array for a `dependency-groups` entry. + fn dependency_groups_array(&mut self, group: &GroupName) -> Result<&mut Array, Error> { // Get or create `dependency-groups`. let dependency_groups = self .doc @@ -999,6 +1160,7 @@ pub fn add_dependency( req: &Requirement, deps: &mut Array, has_source: bool, + raw: bool, ) -> Result { let mut to_replace = find_dependencies(&req.name, Some(&req.marker), deps); @@ -1057,7 +1219,11 @@ pub fn add_dependency( Sort::Unsorted }; - let req_string = req.to_string(); + let req_string = if raw { + req.displayable_with_credentials().to_string() + } else { + req.to_string() + }; let index = match sort { Sort::CaseInsensitive => deps.iter().position(|dep| { dep.as_str().is_some_and(|dep| { @@ -1476,7 +1642,9 @@ fn split_specifiers(req: &str) -> (&str, &str) { #[cfg(test)] mod test { - use super::split_specifiers; + use super::{AddBoundsKind, split_specifiers}; + use std::str::FromStr; + use uv_pep440::Version; #[test] fn split() { @@ -1497,4 +1665,107 @@ mod test { ) ); } + + #[test] + fn bound_kind_to_specifiers_exact() { + let tests = [ + ("0", "==0"), + ("0.0", "==0.0"), + ("0.0.0", "==0.0.0"), + ("0.1", "==0.1"), + ("0.0.1", "==0.0.1"), + ("0.0.0.1", "==0.0.0.1"), + ("1.0.0", "==1.0.0"), + ("1.2", "==1.2"), + ("1.2.3", "==1.2.3"), + ("1.2.3.4", "==1.2.3.4"), + ("1.2.3.4a1.post1", "==1.2.3.4a1.post1"), + ]; + + for (version, expected) in tests { + let actual = AddBoundsKind::Exact + .specifiers(Version::from_str(version).unwrap()) + .to_string(); + assert_eq!(actual, expected, "{version}"); + } + } + + #[test] + fn bound_kind_to_specifiers_lower() { + let tests = [ + ("0", ">=0"), + ("0.0", ">=0.0"), + ("0.0.0", ">=0.0.0"), + ("0.1", ">=0.1"), + ("0.0.1", ">=0.0.1"), + ("0.0.0.1", ">=0.0.0.1"), + ("1", ">=1"), + ("1.0.0", ">=1.0.0"), + ("1.2", ">=1.2"), + ("1.2.3", ">=1.2.3"), + ("1.2.3.4", ">=1.2.3.4"), + ("1.2.3.4a1.post1", ">=1.2.3.4a1.post1"), + ]; + + for (version, expected) in tests { + let actual = AddBoundsKind::Lower + .specifiers(Version::from_str(version).unwrap()) + .to_string(); + assert_eq!(actual, expected, "{version}"); + } + } + + #[test] + fn bound_kind_to_specifiers_major() { + let tests = [ + ("0", ">=0, <0.1"), + ("0.0", ">=0.0, <0.1"), + ("0.0.0", ">=0.0.0, <0.1.0"), + ("0.0.0.0", ">=0.0.0.0, <0.1.0.0"), + ("0.1", ">=0.1, <0.2"), + ("0.0.1", ">=0.0.1, <0.0.2"), + ("0.0.1.1", ">=0.0.1.1, <0.0.2.0"), + ("0.0.0.1", ">=0.0.0.1, <0.0.0.2"), + ("1", ">=1, <2"), + ("1.0.0", ">=1.0.0, <2.0.0"), + ("1.2", ">=1.2, <2.0"), + ("1.2.3", ">=1.2.3, <2.0.0"), + ("1.2.3.4", ">=1.2.3.4, <2.0.0.0"), + ("1.2.3.4a1.post1", ">=1.2.3.4a1.post1, <2.0.0.0"), + ]; + + for (version, expected) in tests { + let actual = AddBoundsKind::Major + .specifiers(Version::from_str(version).unwrap()) + .to_string(); + assert_eq!(actual, expected, "{version}"); + } + } + + #[test] + fn bound_kind_to_specifiers_minor() { + let tests = [ + ("0", ">=0, <0.0.1"), + ("0.0", ">=0.0, <0.0.1"), + ("0.0.0", ">=0.0.0, <0.0.1"), + ("0.0.0.0", ">=0.0.0.0, <0.0.1.0"), + ("0.1", ">=0.1, <0.1.1"), + ("0.0.1", ">=0.0.1, <0.0.2"), + ("0.0.1.1", ">=0.0.1.1, <0.0.2.0"), + ("0.0.0.1", ">=0.0.0.1, <0.0.0.2"), + ("1", ">=1, <1.1"), + ("1.0.0", ">=1.0.0, <1.1.0"), + ("1.2", ">=1.2, <1.3"), + ("1.2.3", ">=1.2.3, <1.3.0"), + ("1.2.3.4", ">=1.2.3.4, <1.3.0.0"), + ("1.2.3.4a1.post1", ">=1.2.3.4a1.post1, <1.3.0.0"), + ]; + + for (version, expected) in tests { + let actual = AddBoundsKind::Minor + .specifiers(Version::from_str(version).unwrap()) + .to_string(); + assert_eq!(actual, expected, "{version}"); + } + } } diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index 3caaa8f8c..1349d739c 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -8,6 +8,7 @@ use glob::{GlobError, PatternError, glob}; use rustc_hash::{FxHashMap, FxHashSet}; use tracing::{debug, trace, warn}; +use uv_configuration::DependencyGroupsWithDefaults; use uv_distribution_types::{Index, Requirement, RequirementSource}; use uv_fs::{CWD, Simplified}; use uv_normalize::{DEV_DEPENDENCIES, GroupName, PackageName}; @@ -17,7 +18,7 @@ use uv_pypi_types::{Conflicts, SupportedEnvironments, VerbatimParsedUrl}; use uv_static::EnvVars; use uv_warnings::warn_user_once; -use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroups}; +use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroup, FlatDependencyGroups}; use crate::pyproject::{ Project, PyProjectToml, PyprojectTomlError, Sources, ToolUvSources, ToolUvWorkspace, }; @@ -95,6 +96,8 @@ pub struct DiscoveryOptions { pub members: MemberDiscovery, } +pub type RequiresPythonSources = BTreeMap<(PackageName, Option), VersionSpecifiers>; + /// A workspace, consisting of a root directory and members. See [`ProjectWorkspace`]. #[derive(Debug, Clone)] #[cfg_attr(test, derive(serde::Serialize))] @@ -413,15 +416,44 @@ impl Workspace { } /// Returns an iterator over the `requires-python` values for each member of the workspace. - pub fn requires_python(&self) -> impl Iterator { - self.packages().iter().filter_map(|(name, member)| { - member + pub fn requires_python( + &self, + groups: &DependencyGroupsWithDefaults, + ) -> Result { + let mut requires = RequiresPythonSources::new(); + for (name, member) in self.packages() { + // Get the top-level requires-python for this package, which is always active + // + // Arguably we could check groups.prod() to disable this, since, the requires-python + // of the project is *technically* not relevant if you're doing `--only-group`, but, + // that would be a big surprising change so let's *not* do that until someone asks! + let top_requires = member .pyproject_toml() .project .as_ref() .and_then(|project| project.requires_python.as_ref()) - .map(|requires_python| (name, requires_python)) - }) + .map(|requires_python| ((name.to_owned(), None), requires_python.clone())); + requires.extend(top_requires); + + // Get the requires-python for each enabled group on this package + // We need to do full flattening here because include-group can transfer requires-python + let dependency_groups = + FlatDependencyGroups::from_pyproject_toml(member.root(), &member.pyproject_toml)?; + let group_requires = + dependency_groups + .into_iter() + .filter_map(move |(group_name, flat_group)| { + if groups.contains(&group_name) { + flat_group.requires_python.map(|requires_python| { + ((name.to_owned(), Some(group_name)), requires_python) + }) + } else { + None + } + }); + requires.extend(group_requires); + } + Ok(requires) } /// Returns any requirements that are exclusive to the workspace root, i.e., not included in @@ -439,12 +471,9 @@ impl Workspace { /// corresponding `pyproject.toml`. /// /// Otherwise, returns an empty list. - pub fn dependency_groups( + pub fn workspace_dependency_groups( &self, - ) -> Result< - BTreeMap>>, - DependencyGroupError, - > { + ) -> Result, DependencyGroupError> { if self .packages .values() @@ -455,35 +484,10 @@ impl Workspace { Ok(BTreeMap::default()) } else { // Otherwise, return the dependency groups in the non-project workspace root. - // First, collect `tool.uv.dev_dependencies` - let dev_dependencies = self - .pyproject_toml - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.dev_dependencies.as_ref()); - - // Then, collect `dependency-groups` - let dependency_groups = self - .pyproject_toml - .dependency_groups - .iter() - .flatten() - .collect::>(); - - // Flatten the dependency groups. - let mut dependency_groups = - FlatDependencyGroups::from_dependency_groups(&dependency_groups) - .map_err(|err| err.with_dev_dependencies(dev_dependencies))?; - - // Add the `dev` group, if `dev-dependencies` is defined. - if let Some(dev_dependencies) = dev_dependencies { - dependency_groups - .entry(DEV_DEPENDENCIES.clone()) - .or_insert_with(Vec::new) - .extend(dev_dependencies.clone()); - } - + let dependency_groups = FlatDependencyGroups::from_pyproject_toml( + &self.install_path, + &self.pyproject_toml, + )?; Ok(dependency_groups.into_inner()) } } @@ -1430,6 +1434,33 @@ impl VirtualProject { path: &Path, options: &DiscoveryOptions, cache: &WorkspaceCache, + ) -> Result { + Self::discover_impl(path, options, cache, false).await + } + + /// Equivalent to [`VirtualProject::discover`] but consider it acceptable for + /// both `[project]` and `[tool.uv.workspace]` to be missing. + /// + /// If they are, we act as if an empty `[tool.uv.workspace]` was found. + pub async fn discover_defaulted( + path: &Path, + options: &DiscoveryOptions, + cache: &WorkspaceCache, + ) -> Result { + Self::discover_impl(path, options, cache, true).await + } + + /// Find the current project or virtual workspace root, given the current directory. + /// + /// Similar to calling [`ProjectWorkspace::discover`] with a fallback to [`Workspace::discover`], + /// but avoids rereading the `pyproject.toml` (and relying on error-handling as control flow). + /// + /// This method requires an absolute path and panics otherwise. + async fn discover_impl( + path: &Path, + options: &DiscoveryOptions, + cache: &WorkspaceCache, + default_missing_workspace: bool, ) -> Result { assert!( path.is_absolute(), @@ -1493,6 +1524,24 @@ impl VirtualProject { ) .await?; + Ok(Self::NonProject(workspace)) + } else if default_missing_workspace { + // Otherwise it's a pyproject.toml that maybe contains dependency-groups + // that we want to treat like a project/workspace to handle those uniformly + let project_path = std::path::absolute(project_root) + .map_err(WorkspaceError::Normalize)? + .clone(); + + let workspace = Workspace::collect_members( + project_path, + ToolUvWorkspace::default(), + pyproject_toml, + None, + options, + cache, + ) + .await?; + Ok(Self::NonProject(workspace)) } else { Err(WorkspaceError::MissingProject(pyproject_path)) @@ -1818,6 +1867,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -1913,6 +1963,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2123,6 +2174,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2230,6 +2282,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2350,6 +2403,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2444,6 +2498,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index f4382cdea..7fa28ed67 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.7.6" +version = "0.7.20" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } @@ -58,7 +58,7 @@ uv-types = { workspace = true } uv-version = { workspace = true } uv-virtualenv = { workspace = true } uv-warnings = { workspace = true } -uv-workspace = { workspace = true } +uv-workspace = { workspace = true, features = ["clap"] } anstream = { workspace = true } anyhow = { workspace = true } @@ -70,10 +70,12 @@ clap = { workspace = true, features = ["derive", "string", "wrap_help"] } console = { workspace = true } ctrlc = { workspace = true } dotenvy = { workspace = true } +dunce = { workspace = true } flate2 = { workspace = true, default-features = false } fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } http = { workspace = true } +indexmap = { workspace = true } indicatif = { workspace = true } indoc = { workspace = true } itertools = { workspace = true } @@ -112,7 +114,6 @@ assert_cmd = { version = "2.0.16" } assert_fs = { version = "1.1.2" } base64 = { workspace = true } byteorder = { version = "1.5.0" } -etcetera = { workspace = true } filetime = { version = "0.2.25" } flate2 = { workspace = true, default-features = false } ignore = { version = "0.4.23" } @@ -124,6 +125,7 @@ reqwest = { workspace = true, features = ["blocking"], default-features = false similar = { version = "2.6.0" } tar = { workspace = true } tempfile = { workspace = true } +whoami = { version = "1.6.0" } wiremock = { workspace = true } zip = { workspace = true } @@ -156,6 +158,7 @@ default-tests = [ "pypi", "python", "python-managed", + "python-eol", "slow-tests", "test-ecosystem" ] @@ -169,12 +172,20 @@ pypi = [] python = [] # Introduces a testing dependency on a local Python installation with specific patch versions. python-patch = [] +# Introduces a testing dependency on a local Python installation with an EOL version. +python-eol = [] # Introduces a testing dependency on managed Python installations. python-managed = [] # Include "slow" test cases. slow-tests = [] # Includes test cases that require ecosystem packages test-ecosystem = [] +# Build uvw binary on Windows +windows-gui-bin = [] [package.metadata.dist] dist = true + +[[bin]] +name = "uvw" +required-features = ["windows-gui-bin"] diff --git a/crates/uv/src/bin/uvw.rs b/crates/uv/src/bin/uvw.rs new file mode 100644 index 000000000..3b4ea2288 --- /dev/null +++ b/crates/uv/src/bin/uvw.rs @@ -0,0 +1,111 @@ +#![cfg_attr(windows, windows_subsystem = "windows")] + +use std::convert::Infallible; +use std::path::{Path, PathBuf}; +use std::process::{Command, ExitCode, ExitStatus}; + +/// Spawns a command exec style. +fn exec_spawn(cmd: &mut Command) -> std::io::Result { + #[cfg(unix)] + { + use std::os::unix::process::CommandExt; + let err = cmd.exec(); + Err(err) + } + #[cfg(windows)] + { + use std::os::windows::process::CommandExt; + + const CREATE_NO_WINDOW: u32 = 0x0800_0000; + + cmd.stdin(std::process::Stdio::inherit()); + let status = cmd.creation_flags(CREATE_NO_WINDOW).status()?; + + #[allow(clippy::exit)] + std::process::exit(status.code().unwrap()) + } +} + +/// Assuming the binary is called something like `uvw@1.2.3(.exe)`, compute the `@1.2.3(.exe)` part +/// so that we can preferentially find `uv@1.2.3(.exe)`, for folks who like managing multiple +/// installs in this way. +fn get_uvw_suffix(current_exe: &Path) -> Option<&str> { + let os_file_name = current_exe.file_name()?; + let file_name_str = os_file_name.to_str()?; + file_name_str.strip_prefix("uvw") +} + +/// Gets the path to `uv`, given info about `uvw` +fn get_uv_path(current_exe_parent: &Path, uvw_suffix: Option<&str>) -> std::io::Result { + // First try to find a matching suffixed `uv`, e.g. `uv@1.2.3(.exe)` + let uv_with_suffix = uvw_suffix.map(|suffix| current_exe_parent.join(format!("uv{suffix}"))); + if let Some(uv_with_suffix) = &uv_with_suffix { + #[allow(clippy::print_stderr, reason = "printing a very rare warning")] + match uv_with_suffix.try_exists() { + Ok(true) => return Ok(uv_with_suffix.to_owned()), + Ok(false) => { /* definitely not there, proceed to fallback */ } + Err(err) => { + // We don't know if `uv@1.2.3` exists, something errored when checking. + // We *could* blindly use `uv@1.2.3` in this case, as the code below does, however + // in this extremely narrow corner case it's *probably* better to default to `uv`, + // since we don't want to mess up existing users who weren't using suffixes? + eprintln!( + "warning: failed to determine if `{}` exists, trying `uv` instead: {err}", + uv_with_suffix.display() + ); + } + } + } + + // Then just look for good ol' `uv` + let uv = current_exe_parent.join(format!("uv{}", std::env::consts::EXE_SUFFIX)); + // If we are sure the `uv` binary does not exist, display a clearer error message. + // If we're not certain if uv exists (try_exists == Err), keep going and hope it works. + if matches!(uv.try_exists(), Ok(false)) { + let message = if let Some(uv_with_suffix) = uv_with_suffix { + format!( + "Could not find the `uv` binary at either of:\n {}\n {}", + uv_with_suffix.display(), + uv.display(), + ) + } else { + format!("Could not find the `uv` binary at: {}", uv.display()) + }; + Err(std::io::Error::new(std::io::ErrorKind::NotFound, message)) + } else { + Ok(uv) + } +} + +fn run() -> std::io::Result { + let current_exe = std::env::current_exe()?; + let Some(bin) = current_exe.parent() else { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "Could not determine the location of the `uvw` binary", + )); + }; + let uvw_suffix = get_uvw_suffix(¤t_exe); + let uv = get_uv_path(bin, uvw_suffix)?; + let args = std::env::args_os() + // Skip the `uvw` name + .skip(1) + .collect::>(); + + let mut cmd = Command::new(uv); + cmd.args(&args); + match exec_spawn(&mut cmd)? {} +} + +#[allow(clippy::print_stderr)] +fn main() -> ExitCode { + let result = run(); + match result { + // Fail with 2 if the status cannot be cast to an exit code + Ok(status) => u8::try_from(status.code().unwrap_or(2)).unwrap_or(2).into(), + Err(err) => { + eprintln!("error: {err}"); + ExitCode::from(2) + } + } +} diff --git a/crates/uv/src/commands/run.rs b/crates/uv/src/child.rs similarity index 81% rename from crates/uv/src/commands/run.rs rename to crates/uv/src/child.rs index 4d827d194..6a8033fff 100644 --- a/crates/uv/src/commands/run.rs +++ b/crates/uv/src/child.rs @@ -9,35 +9,35 @@ use crate::commands::ExitStatus; /// long as the command is the last thing that runs in this process; otherwise, we'd need to restore /// the default signal handlers after the command completes. pub(crate) async fn run_to_completion(mut handle: Child) -> anyhow::Result { - // On Unix, shells will send SIGINT to the active process group when a user presses `Ctrl-C`. In - // general, this means that uv should ignore SIGINT, allowing the child process to cleanly exit - // instead. If uv forwarded the SIGINT immediately, the child process would receive _two_ SIGINT - // signals which has semantic meaning for some programs, i.e., slow exit on the first signal and - // fast exit on the second. The exception to this is if a child process changes its process - // group, in which case the shell will _not_ send SIGINT to the child process and uv must take - // ownership of forwarding the signal. + // On Unix, the terminal driver will send SIGINT to the active process group when a user presses + // `Ctrl-C`. In general, this means that uv should ignore SIGINT, allowing the child process to + // cleanly exit instead. If uv forwarded the SIGINT immediately, the child process would receive + // _two_ SIGINT signals which has semantic meaning for some programs, i.e., slow exit on the + // first signal and fast exit on the second. The exception to this is if a child process changes + // its process group, in which case the terminal driver will _not_ send SIGINT to the child + // process and uv must take ownership of forwarding the signal. // - // Note this assumes an interactive shell. If a signal is sent directly to the uv parent process - // (e.g., `kill -2 `), the process group is not involved and a signal is not sent to the - // child by default. In this context, uv must forward the signal to the child. We work around - // this by forwarding SIGINT if it is received more than once. We could attempt to infer if the - // parent is a shell using TTY detection(?), but there hasn't been sufficient motivation to - // explore alternatives yet. + // Note the above only applies in an interactive terminal. If a signal is sent directly to the + // uv parent process (e.g., `kill -2 `), the process group is not involved and a signal is + // not sent to the child by default. In this context, uv must forward the signal to the child. + // uv checks if stdin is a TTY as a heuristic to determine if uv is running in an interactive + // terminal. When not in an interactive terminal, uv will forward SIGINT to the child. // - // Use of SIGTERM is also a bit complicated. If a shell receives a SIGTERM, it just waits for + // Use of SIGTERM is also a bit complicated. If a terminal receives a SIGTERM, it just waits for // its children to exit — multiple SIGTERMs do not have any effect and the signals are not // forwarded to the children. Consequently, the description for SIGINT above does not apply to - // SIGTERM in shells. It is _possible_ to have a parent process that sends a SIGTERM to the - // process group; for example, `tini` supports this via a `-g` option. In this case, it's - // possible that uv will improperly send a second SIGTERM to the child process. However, - // this seems preferable to not forwarding it in the first place. In the Docker case, if `uv` - // is invoked directly (instead of via an init system), it's PID 1 which has a special-cased - // default signal handler for SIGTERM by default. Generally, if a process receives a SIGTERM and - // does not have a SIGTERM handler, it is terminated. However, if PID 1 receives a SIGTERM, it - // is not terminated. In this context, it is essential for uv to forward the SIGTERM to the - // child process or the process will not be killable. + // SIGTERM in the terminal driver. It is _possible_ to have a parent process that sends a + // SIGTERM to the process group; for example, `tini` supports this via a `-g` option. In this + // case, it's possible that uv will improperly send a second SIGTERM to the child process. + // However, this seems preferable to not forwarding it in the first place. In the Docker case, + // if `uv` is invoked directly (instead of via an init system), it's PID 1 which has a + // special-cased default signal handler for SIGTERM by default. Generally, if a process receives + // a SIGTERM and does not have a SIGTERM handler, it is terminated. However, if PID 1 receives a + // SIGTERM, it is not terminated. In this context, it is essential for uv to forward the SIGTERM + // to the child process or the process will not be killable. #[cfg(unix)] let status = { + use std::io::{IsTerminal, stdin}; use std::ops::Deref; use anyhow::Context; @@ -94,7 +94,6 @@ pub(crate) async fn run_to_completion(mut handle: Child) -> anyhow::Result anyhow::Result anyhow::Result anyhow::Result Result { - if list && preview.is_disabled() { - // We need the direct build for list and that is preview only. - writeln!( - printer.stderr(), - "The `--list` option is only available in preview mode; add the `--preview` flag to use `--list`" - )?; - return Ok(BuildResult::Failure); - } - // Extract the resolver settings. let ResolverSettings { index_locations, @@ -214,6 +207,7 @@ async fn build_impl( } = settings; let client_builder = BaseClientBuilder::default() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -471,7 +465,8 @@ async fn build_package( // (3) `Requires-Python` in `pyproject.toml` if interpreter_request.is_none() { if let Ok(workspace) = workspace { - interpreter_request = find_requires_python(workspace)? + let groups = DependencyGroupsWithDefaults::none(); + interpreter_request = find_requires_python(workspace, &groups)? .as_ref() .map(RequiresPython::specifiers) .map(|specifiers| { @@ -495,20 +490,12 @@ async fn build_package( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Read build constraints. let build_constraints = @@ -610,10 +597,7 @@ async fn build_package( } BuildAction::List - } else if preview.is_enabled() - && !force_pep517 - && check_direct_build(source.path(), source.path().user_display()) - { + } else if !force_pep517 && check_direct_build(source.path(), source.path().user_display()) { BuildAction::DirectBuild } else { BuildAction::Pep517 diff --git a/crates/uv/src/commands/mod.rs b/crates/uv/src/commands/mod.rs index 8e3c9ef37..d1e647363 100644 --- a/crates/uv/src/commands/mod.rs +++ b/crates/uv/src/commands/mod.rs @@ -29,6 +29,7 @@ pub(crate) use project::remove::remove; pub(crate) use project::run::{RunCommand, run}; pub(crate) use project::sync::sync; pub(crate) use project::tree::tree; +pub(crate) use project::version::{project_version, self_version}; pub(crate) use publish::publish; pub(crate) use python::dir::dir as python_dir; pub(crate) use python::find::find as python_find; @@ -56,7 +57,6 @@ use uv_normalize::PackageName; use uv_python::PythonEnvironment; use uv_scripts::Pep723Script; pub(crate) use venv::venv; -pub(crate) use version::{project_version, self_version}; use crate::printer::Printer; @@ -72,12 +72,10 @@ mod project; mod publish; mod python; pub(crate) mod reporters; -mod run; #[cfg(feature = "self-update")] mod self_update; mod tool; mod venv; -mod version; #[derive(Copy, Clone)] pub(crate) enum ExitStatus { diff --git a/crates/uv/src/commands/pip/check.rs b/crates/uv/src/commands/pip/check.rs index f504503af..bfbb20ee6 100644 --- a/crates/uv/src/commands/pip/check.rs +++ b/crates/uv/src/commands/pip/check.rs @@ -5,6 +5,7 @@ use anyhow::Result; use owo_colors::OwoColorize; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, InstalledDist}; use uv_installer::{SitePackages, SitePackagesDiagnostic}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; @@ -19,6 +20,7 @@ pub(crate) fn pip_check( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let start = Instant::now(); @@ -27,6 +29,7 @@ pub(crate) fn pip_check( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index f6e0908d2..c40716763 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -3,7 +3,6 @@ use std::env; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::Arc; use anyhow::{Result, anyhow}; use itertools::Itertools; @@ -21,7 +20,7 @@ use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_types::{ DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification, - Origin, Requirement, UnresolvedRequirementSpecification, Verbatim, + Origin, Requirement, RequiresPython, UnresolvedRequirementSpecification, Verbatim, }; use uv_fs::{CWD, Simplified}; use uv_git::ResolvedRepositoryReference; @@ -38,8 +37,8 @@ use uv_requirements::{ }; use uv_resolver::{ AnnotationStyle, DependencyMode, DisplayResolutionGraph, ExcludeNewer, FlatIndex, ForkStrategy, - InMemoryIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, RequiresPython, - ResolutionMode, ResolverEnvironment, + InMemoryIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, ResolutionMode, + ResolverEnvironment, }; use uv_torch::{TorchMode, TorchStrategy}; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; @@ -180,6 +179,7 @@ pub(crate) async fn pip_compile( } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -271,7 +271,13 @@ pub(crate) async fn pip_compile( let environment_preference = EnvironmentPreference::from_system_flag(system, false); let interpreter = if let Some(python) = python.as_ref() { let request = PythonRequest::parse(python); - PythonInstallation::find(&request, environment_preference, python_preference, &cache) + PythonInstallation::find( + &request, + environment_preference, + python_preference, + &cache, + preview, + ) } else { // TODO(zanieb): The split here hints at a problem with the request abstraction; we should // be able to use `PythonInstallation::find(...)` here. @@ -281,7 +287,13 @@ pub(crate) async fn pip_compile( } else { PythonRequest::default() }; - PythonInstallation::find_best(&request, environment_preference, python_preference, &cache) + PythonInstallation::find_best( + &request, + environment_preference, + python_preference, + &cache, + preview, + ) }? .into_interpreter(); @@ -326,13 +338,12 @@ pub(crate) async fn pip_compile( // Determine the Python requirement, if the user requested a specific version. let python_requirement = if universal { - let requires_python = RequiresPython::greater_than_equal_version( - if let Some(python_version) = python_version.as_ref() { - &python_version.version - } else { - interpreter.python_version() - }, - ); + let requires_python = if let Some(python_version) = python_version.as_ref() { + RequiresPython::greater_than_equal_version(&python_version.version) + } else { + let version = interpreter.python_minor_version(); + RequiresPython::greater_than_equal_version(&version) + }; PythonRequirement::from_requires_python(&interpreter, requires_python) } else if let Some(python_version) = python_version.as_ref() { PythonRequirement::from_python_version(&interpreter, python_version) @@ -376,32 +387,21 @@ pub(crate) async fn pip_compile( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -517,6 +517,7 @@ pub(crate) async fn pip_compile( tags.as_deref(), resolver_env.clone(), python_requirement, + interpreter.markers(), Conflicts::empty(), &client, &flat_index, diff --git a/crates/uv/src/commands/pip/freeze.rs b/crates/uv/src/commands/pip/freeze.rs index 7ad5517af..8c8491d45 100644 --- a/crates/uv/src/commands/pip/freeze.rs +++ b/crates/uv/src/commands/pip/freeze.rs @@ -6,6 +6,7 @@ use itertools::Itertools; use owo_colors::OwoColorize; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, InstalledDist, Name}; use uv_installer::SitePackages; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; @@ -23,12 +24,14 @@ pub(crate) fn pip_freeze( paths: Option>, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Detect the current Python interpreter. let environment = PythonEnvironment::find( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 1cfac27ca..bbfe99c50 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -1,12 +1,11 @@ use std::collections::{BTreeMap, BTreeSet}; use std::fmt::Write; use std::path::PathBuf; -use std::sync::Arc; use anyhow::Context; use itertools::Itertools; use owo_colors::OwoColorize; -use tracing::{Level, debug, enabled}; +use tracing::{Level, debug, enabled, warn}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; @@ -100,6 +99,7 @@ pub(crate) async fn pip_install( let start = std::time::Instant::now(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -182,6 +182,7 @@ pub(crate) async fn pip_install( EnvironmentPreference::from_system_flag(system, false), python_preference, &cache, + preview, )?; report_interpreter(&installation, true, printer)?; PythonEnvironment::from_installation(installation) @@ -193,6 +194,7 @@ pub(crate) async fn pip_install( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; environment @@ -235,7 +237,13 @@ pub(crate) async fn pip_install( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Determine the markers to use for the resolution. let interpreter = environment.interpreter(); @@ -254,6 +262,7 @@ pub(crate) async fn pip_install( if reinstall.is_none() && upgrade.is_none() && source_trees.is_empty() + && groups.is_empty() && pylock.is_none() && matches!(modifications, Modifications::Sufficient) { @@ -331,32 +340,21 @@ pub(crate) async fn pip_install( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -480,6 +478,7 @@ pub(crate) async fn pip_install( Some(&tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + interpreter.markers(), Conflicts::empty(), &client, &flat_index, diff --git a/crates/uv/src/commands/pip/latest.rs b/crates/uv/src/commands/pip/latest.rs index ac3ce7d1f..25da8466c 100644 --- a/crates/uv/src/commands/pip/latest.rs +++ b/crates/uv/src/commands/pip/latest.rs @@ -3,10 +3,10 @@ use tracing::debug; use uv_client::{MetadataFormat, RegistryClient, VersionFiles}; use uv_distribution_filename::DistFilename; -use uv_distribution_types::{IndexCapabilities, IndexMetadataRef, IndexUrl}; +use uv_distribution_types::{IndexCapabilities, IndexMetadataRef, IndexUrl, RequiresPython}; use uv_normalize::PackageName; use uv_platform_tags::Tags; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use uv_warnings::warn_user_once; /// A client to fetch the latest version of a package from an index. diff --git a/crates/uv/src/commands/pip/list.rs b/crates/uv/src/commands/pip/list.rs index 48786d86c..40e8c770d 100644 --- a/crates/uv/src/commands/pip/list.rs +++ b/crates/uv/src/commands/pip/list.rs @@ -15,16 +15,18 @@ use uv_cache::{Cache, Refresh}; use uv_cache_info::Timestamp; use uv_cli::ListFormat; use uv_client::{BaseClientBuilder, RegistryClientBuilder}; -use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType}; +use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType, PreviewMode}; use uv_distribution_filename::DistFilename; -use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, InstalledDist, Name}; +use uv_distribution_types::{ + Diagnostic, IndexCapabilities, IndexLocations, InstalledDist, Name, RequiresPython, +}; use uv_fs::Simplified; use uv_installer::SitePackages; use uv_normalize::PackageName; use uv_pep440::Version; use uv_python::PythonRequest; use uv_python::{EnvironmentPreference, PythonEnvironment}; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use crate::commands::ExitStatus; use crate::commands::pip::latest::LatestClient; @@ -52,6 +54,7 @@ pub(crate) async fn pip_list( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Disallow `--outdated` with `--format freeze`. if outdated && matches!(format, ListFormat::Freeze) { @@ -63,6 +66,7 @@ pub(crate) async fn pip_list( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; @@ -83,6 +87,7 @@ pub(crate) async fn pip_list( let capabilities = IndexCapabilities::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 39734fd40..55ab2aa1b 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -8,7 +8,6 @@ use std::fmt::Write; use std::path::PathBuf; use std::sync::Arc; use tracing::debug; -use uv_tool::InstalledTools; use uv_cache::Cache; use uv_client::{BaseClientBuilder, RegistryClient}; @@ -17,9 +16,9 @@ use uv_configuration::{ ExtrasSpecification, Overrides, Reinstall, Upgrade, }; use uv_dispatch::BuildDispatch; -use uv_distribution::DistributionDatabase; +use uv_distribution::{DistributionDatabase, SourcedDependencyGroups}; use uv_distribution_types::{ - CachedDist, Diagnostic, InstalledDist, LocalDist, NameRequirementSpecification, + CachedDist, Diagnostic, InstalledDist, LocalDist, NameRequirementSpecification, Requirement, ResolutionDiagnostic, UnresolvedRequirement, UnresolvedRequirementSpecification, }; use uv_distribution_types::{ @@ -29,6 +28,7 @@ use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::{Plan, Planner, Preparer, SitePackages}; use uv_normalize::{GroupName, PackageName}; +use uv_pep508::{MarkerEnvironment, RequirementOrigin}; use uv_platform_tags::Tags; use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment}; use uv_python::{PythonEnvironment, PythonInstallation}; @@ -40,7 +40,8 @@ use uv_resolver::{ DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference, Preferences, PythonRequirement, Resolver, ResolverEnvironment, ResolverOutput, }; -use uv_types::{HashStrategy, InFlight, InstalledPackagesProvider}; +use uv_tool::InstalledTools; +use uv_types::{BuildContext, HashStrategy, InFlight, InstalledPackagesProvider}; use uv_warnings::warn_user; use crate::commands::pip::loggers::{DefaultInstallLogger, InstallLogger, ResolveLogger}; @@ -119,6 +120,7 @@ pub(crate) async fn resolve( tags: Option<&Tags>, resolver_env: ResolverEnvironment, python_requirement: PythonRequirement, + current_environment: &MarkerEnvironment, conflicts: Conflicts, client: &RegistryClient, flat_index: &FlatIndex, @@ -164,7 +166,6 @@ pub(crate) async fn resolve( if !source_trees.is_empty() { let resolutions = SourceTreeResolver::new( extras, - groups, hasher, index, DistributionDatabase::new(client, build_dispatch, concurrency.downloads), @@ -210,6 +211,47 @@ pub(crate) async fn resolve( ); } + for (pyproject_path, groups) in groups { + let metadata = SourcedDependencyGroups::from_virtual_project( + pyproject_path, + None, + build_dispatch.locations(), + build_dispatch.sources(), + build_dispatch.workspace_cache(), + ) + .await + .map_err(|e| { + anyhow!( + "Failed to read dependency groups from: {}\n{}", + pyproject_path.display(), + e + ) + })?; + + // Complain if dependency groups are named that don't appear. + for name in groups.explicit_names() { + if !metadata.dependency_groups.contains_key(name) { + return Err(anyhow!( + "The dependency group '{name}' was not found in the project: {}", + pyproject_path.user_display() + ))?; + } + } + // Apply dependency-groups + for (group_name, group) in &metadata.dependency_groups { + if groups.contains(group_name) { + requirements.extend(group.iter().cloned().map(|group| Requirement { + origin: Some(RequirementOrigin::Group( + pyproject_path.clone(), + metadata.name.clone(), + group_name.clone(), + )), + ..group + })); + } + } + } + requirements }; @@ -303,6 +345,7 @@ pub(crate) async fn resolve( options, &python_requirement, resolver_env, + current_environment, conflicts, tags, flat_index, diff --git a/crates/uv/src/commands/pip/show.rs b/crates/uv/src/commands/pip/show.rs index a77c29cd5..4d2b3c3a7 100644 --- a/crates/uv/src/commands/pip/show.rs +++ b/crates/uv/src/commands/pip/show.rs @@ -7,6 +7,7 @@ use owo_colors::OwoColorize; use rustc_hash::FxHashMap; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, Name}; use uv_fs::Simplified; use uv_install_wheel::read_record_file; @@ -27,6 +28,7 @@ pub(crate) fn pip_show( files: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { if packages.is_empty() { #[allow(clippy::print_stderr)] @@ -46,6 +48,7 @@ pub(crate) fn pip_show( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 7831eee04..6858ddad0 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -1,10 +1,9 @@ use std::collections::{BTreeMap, BTreeSet}; use std::fmt::Write; -use std::sync::Arc; use anyhow::{Context, Result}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; @@ -82,6 +81,7 @@ pub(crate) async fn pip_sync( preview: PreviewMode, ) -> Result { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -157,6 +157,7 @@ pub(crate) async fn pip_sync( EnvironmentPreference::from_system_flag(system, false), python_preference, &cache, + preview, )?; report_interpreter(&installation, true, printer)?; PythonEnvironment::from_installation(installation) @@ -168,6 +169,7 @@ pub(crate) async fn pip_sync( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; environment @@ -210,7 +212,13 @@ pub(crate) async fn pip_sync( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); let interpreter = environment.interpreter(); @@ -265,32 +273,21 @@ pub(crate) async fn pip_sync( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -416,6 +413,7 @@ pub(crate) async fn pip_sync( Some(&tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + interpreter.markers(), Conflicts::empty(), &client, &flat_index, diff --git a/crates/uv/src/commands/pip/tree.rs b/crates/uv/src/commands/pip/tree.rs index 05290ffd0..81a566b8e 100644 --- a/crates/uv/src/commands/pip/tree.rs +++ b/crates/uv/src/commands/pip/tree.rs @@ -13,15 +13,15 @@ use tokio::sync::Semaphore; use uv_cache::{Cache, Refresh}; use uv_cache_info::Timestamp; use uv_client::{BaseClientBuilder, RegistryClientBuilder}; -use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType}; -use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, Name}; +use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType, PreviewMode}; +use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, Name, RequiresPython}; use uv_installer::SitePackages; use uv_normalize::PackageName; use uv_pep440::Version; use uv_pep508::{Requirement, VersionOrUrl}; use uv_pypi_types::{ResolutionMetadata, ResolverMarkerEnvironment, VerbatimParsedUrl}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use crate::commands::ExitStatus; use crate::commands::pip::latest::LatestClient; @@ -52,12 +52,14 @@ pub(crate) async fn pip_tree( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Detect the current Python interpreter. let environment = PythonEnvironment::find( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; @@ -84,6 +86,7 @@ pub(crate) async fn pip_tree( let capabilities = IndexCapabilities::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) diff --git a/crates/uv/src/commands/pip/uninstall.rs b/crates/uv/src/commands/pip/uninstall.rs index 787ba5aae..f617a0203 100644 --- a/crates/uv/src/commands/pip/uninstall.rs +++ b/crates/uv/src/commands/pip/uninstall.rs @@ -3,11 +3,11 @@ use std::fmt::Write; use anyhow::Result; use itertools::{Either, Itertools}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::BaseClientBuilder; -use uv_configuration::{DryRun, KeyringProviderType}; +use uv_configuration::{DryRun, KeyringProviderType, PreviewMode}; use uv_distribution_types::Requirement; use uv_distribution_types::{InstalledMetadata, Name, UnresolvedRequirement}; use uv_fs::Simplified; @@ -37,10 +37,12 @@ pub(crate) async fn pip_uninstall( network_settings: &NetworkSettings, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { let start = std::time::Instant::now(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -57,6 +59,7 @@ pub(crate) async fn pip_uninstall( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; @@ -98,7 +101,13 @@ pub(crate) async fn pip_uninstall( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Index the current `site-packages` directory. let site_packages = uv_installer::SitePackages::from_environment(&environment)?; diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index aaaeb8f3c..d65866483 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -10,29 +10,31 @@ use anyhow::{Context, Result, bail}; use itertools::Itertools; use owo_colors::OwoColorize; use rustc_hash::{FxBuildHasher, FxHashMap}; -use tracing::debug; +use tracing::{debug, warn}; use url::Url; use uv_cache::Cache; use uv_cache_key::RepositoryUrl; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - Concurrency, Constraints, DependencyGroups, DevMode, DryRun, EditableMode, ExtrasSpecification, - InstallOptions, PreviewMode, SourceStrategy, + Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DevMode, DryRun, + EditableMode, ExtrasSpecification, ExtrasSpecificationWithDefaults, InstallOptions, + PreviewMode, SourceStrategy, }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; use uv_distribution_types::{ - Index, IndexName, IndexUrls, NameRequirementSpecification, Requirement, RequirementSource, - UnresolvedRequirement, VersionId, redact_credentials, + Index, IndexName, IndexUrl, IndexUrls, NameRequirementSpecification, Requirement, + RequirementSource, UnresolvedRequirement, VersionId, }; -use uv_fs::Simplified; +use uv_fs::{LockedFile, Simplified}; use uv_git::GIT_STORE; use uv_git_types::GitReference; -use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, PackageName}; +use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, DefaultGroups, PackageName}; use uv_pep508::{ExtraName, MarkerTree, UnnamedRequirement, VersionOrUrl}; use uv_pypi_types::{ParsedUrl, VerbatimParsedUrl}; use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreference, PythonRequest}; +use uv_redacted::DisplaySafeUrl; use uv_requirements::{NamedRequirementsResolver, RequirementsSource, RequirementsSpecification}; use uv_resolver::FlatIndex; use uv_scripts::{Pep723ItemRef, Pep723Metadata, Pep723Script}; @@ -40,7 +42,7 @@ use uv_settings::PythonInstallMirrors; use uv_types::{BuildIsolation, HashStrategy}; use uv_warnings::warn_user_once; use uv_workspace::pyproject::{DependencyType, Source, SourceError, Sources, ToolUvSources}; -use uv_workspace::pyproject_mut::{ArrayEdit, DependencyTarget, PyProjectTomlMut}; +use uv_workspace::pyproject_mut::{AddBoundsKind, ArrayEdit, DependencyTarget, PyProjectTomlMut}; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceCache}; use crate::commands::pip::loggers::{ @@ -73,13 +75,15 @@ pub(crate) async fn add( editable: Option, dependency_type: DependencyType, raw: bool, + bounds: Option, indexes: Vec, rev: Option, tag: Option, branch: Option, - extras: Vec, + extras_of_dependency: Vec, package: Option, python: Option, + workspace: bool, install_mirrors: PythonInstallMirrors, settings: ResolverInstallerSettings, network_settings: NetworkSettings, @@ -93,6 +97,10 @@ pub(crate) async fn add( printer: Printer, preview: PreviewMode, ) -> Result { + if bounds.is_some() && preview.is_disabled() { + warn_user_once!("The bounds option is in preview and may change in any future release."); + } + for source in &requirements { match source { RequirementsSource::PyprojectToml(_) => { @@ -116,7 +124,35 @@ pub(crate) async fn add( let reporter = PythonDownloadReporter::single(printer); - let target = if let Some(script) = script { + // Determine what defaults/extras we're explicitly enabling + let (extras, groups) = match &dependency_type { + DependencyType::Production => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_dev_mode(DevMode::Exclude); + (extras, groups) + } + DependencyType::Dev => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_dev_mode(DevMode::Include); + (extras, groups) + } + DependencyType::Optional(extra_name) => { + let extras = ExtrasSpecification::from_extra(vec![extra_name.clone()]); + let groups = DependencyGroups::from_dev_mode(DevMode::Exclude); + (extras, groups) + } + DependencyType::Group(group_name) => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_group(group_name.clone()); + (extras, groups) + } + }; + // Default extras currently always disabled + let defaulted_extras = extras.with_defaults(DefaultExtras::default()); + // Default groups we need the actual project for, interpreter discovery will use this! + let defaulted_groups; + + let mut target = if let Some(script) = script { // If we found a PEP 723 script and the user provided a project-only setting, warn. if package.is_some() { warn_user_once!( @@ -140,6 +176,7 @@ pub(crate) async fn add( } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -160,12 +197,16 @@ pub(crate) async fn add( &client_builder, cache, &reporter, + preview, ) .await?; Pep723Script::init(&path, requires_python.specifiers()).await? } }; + // Scripts don't actually have groups + defaulted_groups = groups.with_defaults(DefaultGroups::default()); + // Discover the interpreter. let interpreter = ScriptInterpreter::discover( Pep723ItemRef::Script(&script), @@ -179,6 +220,7 @@ pub(crate) async fn add( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -228,11 +270,16 @@ pub(crate) async fn add( } } + // Enable the default groups of the project + defaulted_groups = + groups.with_defaults(default_dependency_groups(project.pyproject_toml())?); + if frozen || no_sync { // Discover the interpreter. let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &defaulted_groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -243,6 +290,7 @@ pub(crate) async fn add( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -252,6 +300,7 @@ pub(crate) async fn add( // Discover or create the virtual environment. let environment = ProjectEnvironment::get_or_init( project.workspace(), + &defaulted_groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -263,6 +312,7 @@ pub(crate) async fn add( cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -271,7 +321,16 @@ pub(crate) async fn add( } }; + let _lock = target + .acquire_lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(settings.resolver.keyring_provider) @@ -324,16 +383,7 @@ pub(crate) async fn add( let hasher = HashStrategy::default(); let sources = SourceStrategy::Enabled; - // Add all authenticated sources to the cache. - for index in settings.resolver.index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + settings.resolver.index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -431,6 +481,9 @@ pub(crate) async fn add( } } + // Store the content prior to any modifications. + let snapshot = target.snapshot().await?; + // If the user provides a single, named index, pin all requirements to that index. let index = indexes .first() @@ -441,7 +494,72 @@ pub(crate) async fn add( debug!("Pinning all requirements to index: `{index}`"); }); - // Add the requirements to the `pyproject.toml` or script. + // Track modification status, for reverts. + let mut modified = false; + + // If `--workspace` is provided, add any members to the `workspace` section of the + // `pyproject.toml` file. + if workspace { + let AddTarget::Project(project, python_target) = target else { + unreachable!("`--workspace` and `--script` are conflicting options"); + }; + + let workspace = project.workspace(); + let mut toml = PyProjectTomlMut::from_toml( + &workspace.pyproject_toml().raw, + DependencyTarget::PyProjectToml, + )?; + + // Check each requirement to see if it's a path dependency + for requirement in &requirements { + if let RequirementSource::Directory { install_path, .. } = &requirement.source { + let absolute_path = if install_path.is_absolute() { + install_path.to_path_buf() + } else { + project.root().join(install_path) + }; + + // Check if the path is not already included in the workspace. + if !workspace.includes(&absolute_path)? { + let relative_path = absolute_path + .strip_prefix(workspace.install_path()) + .unwrap_or(&absolute_path); + + toml.add_workspace(relative_path)?; + modified |= true; + + writeln!( + printer.stderr(), + "Added `{}` to workspace members", + relative_path.user_display().cyan() + )?; + } + } + } + + // If we modified the workspace root, we need to reload it entirely, since this can impact + // the discovered members, etc. + target = if modified { + let workspace_content = toml.to_string(); + fs_err::write( + workspace.install_path().join("pyproject.toml"), + &workspace_content, + )?; + + AddTarget::Project( + VirtualProject::discover( + project.root(), + &DiscoveryOptions::default(), + &WorkspaceCache::default(), + ) + .await?, + python_target, + ) + } else { + AddTarget::Project(project, python_target) + } + } + let mut toml = match &target { AddTarget::Script(script, _) => { PyProjectTomlMut::from_toml(&script.metadata.raw, DependencyTarget::Script) @@ -451,6 +569,7 @@ pub(crate) async fn add( DependencyTarget::PyProjectToml, ), }?; + let edits = edits( requirements, &target, @@ -460,11 +579,31 @@ pub(crate) async fn add( rev.as_deref(), tag.as_deref(), branch.as_deref(), - &extras, + &extras_of_dependency, index, &mut toml, )?; + // Validate any indexes that were provided on the command-line to ensure + // they point to existing non-empty directories when using path URLs. + let mut valid_indexes = Vec::with_capacity(indexes.len()); + for index in indexes { + if let IndexUrl::Path(url) = &index.url { + let path = url + .to_file_path() + .map_err(|()| anyhow::anyhow!("Invalid file path in index URL: {url}"))?; + if !path.is_dir() { + bail!("Directory not found for index: {url}"); + } + if fs_err::read_dir(&path)?.next().is_none() { + warn_user_once!("Index directory `{url}` is empty, skipping"); + continue; + } + } + valid_indexes.push(index); + } + let indexes = valid_indexes; + // Add any indexes that were provided on the command-line, in priority order. if !raw { let urls = IndexUrls::from_indexes(indexes); @@ -476,7 +615,7 @@ pub(crate) async fn add( let content = toml.to_string(); // Save the modified `pyproject.toml` or script. - let modified = target.write(&content)?; + modified |= target.write(&content)?; // If `--frozen`, exit early. There's no reason to lock and sync, since we don't need a `uv.lock` // to exist at all. @@ -496,9 +635,6 @@ pub(crate) async fn add( } } - // Store the content prior to any modifications. - let snapshot = target.snapshot().await?; - // Update the `pypackage.toml` in-memory. let target = target.update(&content)?; @@ -530,8 +666,10 @@ pub(crate) async fn add( lock_state, sync_state, locked, - &dependency_type, + &defaulted_extras, + &defaulted_groups, raw, + bounds, constraints, &settings, &network_settings, @@ -627,7 +765,7 @@ fn edits( } }; - // Redact any credentials. By default, we avoid writing sensitive credentials to files that + // Remove any credentials. By default, we avoid writing sensitive credentials to files that // will be checked into version control (e.g., `pyproject.toml` and `uv.lock`). Instead, // we store the credentials in a global store, and reuse them during resolution. The // expectation is that subsequent resolutions steps will succeed by reading from (e.g.) the @@ -649,7 +787,7 @@ fn edits( GIT_STORE.insert(RepositoryUrl::new(&git), credentials); // Redact the credentials. - redact_credentials(&mut git); + git.remove_credentials(); } Some(Source::Git { git, @@ -705,13 +843,15 @@ fn edits( // Update the `pyproject.toml`. let edit = match &dependency_type { - DependencyType::Production => toml.add_dependency(&requirement, source.as_ref())?, - DependencyType::Dev => toml.add_dev_dependency(&requirement, source.as_ref())?, + DependencyType::Production => { + toml.add_dependency(&requirement, source.as_ref(), raw)? + } + DependencyType::Dev => toml.add_dev_dependency(&requirement, source.as_ref(), raw)?, DependencyType::Optional(extra) => { - toml.add_optional_dependency(extra, &requirement, source.as_ref())? + toml.add_optional_dependency(extra, &requirement, source.as_ref(), raw)? } DependencyType::Group(group) => { - toml.add_dependency_group_requirement(group, &requirement, source.as_ref())? + toml.add_dependency_group_requirement(group, &requirement, source.as_ref(), raw)? } }; @@ -754,8 +894,10 @@ async fn lock_and_sync( lock_state: UniversalState, sync_state: PlatformState, locked: bool, - dependency_type: &DependencyType, + extras: &ExtrasSpecificationWithDefaults, + groups: &DependencyGroupsWithDefaults, raw: bool, + bound_kind: Option, constraints: Vec, settings: &ResolverInstallerSettings, network_settings: &NetworkSettings, @@ -777,6 +919,7 @@ async fn lock_and_sync( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -831,6 +974,15 @@ async fn lock_and_sync( None => true, }; if !is_empty { + if let Some(bound_kind) = bound_kind { + writeln!( + printer.stderr(), + "{} Using explicit requirement `{}` over bounds preference `{}`", + "note:".bold(), + edit.requirement, + bound_kind + )?; + } continue; } @@ -843,7 +995,12 @@ async fn lock_and_sync( // For example, convert `1.2.3+local` to `1.2.3`. let minimum = (*minimum).clone().without_local(); - toml.set_dependency_minimum_version(&edit.dependency_type, *index, minimum)?; + toml.set_dependency_bound( + &edit.dependency_type, + *index, + minimum, + bound_kind.unwrap_or_default(), + )?; modified = true; } @@ -863,6 +1020,7 @@ async fn lock_and_sync( // Invalidate the project metadata. if let AddTarget::Project(VirtualProject::Project(ref project), _) = target { let url = Url::from_file_path(project.project_root()) + .map(DisplaySafeUrl::from) .expect("project root is a valid URL"); let version_id = VersionId::from_url(&url); let existing = lock_state.index().distributions().remove(&version_id); @@ -883,6 +1041,7 @@ async fn lock_and_sync( Box::new(SummaryResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -902,36 +1061,6 @@ async fn lock_and_sync( return Ok(()); }; - // Sync the environment. - let (extras, dev) = match dependency_type { - DependencyType::Production => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_dev_mode(DevMode::Exclude); - (extras, dev) - } - DependencyType::Dev => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_dev_mode(DevMode::Include); - (extras, dev) - } - DependencyType::Optional(extra_name) => { - let extras = ExtrasSpecification::from_extra(vec![extra_name.clone()]); - let dev = DependencyGroups::from_dev_mode(DevMode::Exclude); - (extras, dev) - } - DependencyType::Group(group_name) => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_group(group_name.clone()); - (extras, dev) - } - }; - - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); - // Identify the installation target. let target = match &project { VirtualProject::Project(project) => InstallTarget::Project { @@ -948,11 +1077,12 @@ async fn lock_and_sync( project::sync::do_sync( target, venv, - &extras.with_defaults(default_extras), - &dev.with_defaults(default_groups), + extras, + groups, EditableMode::Editable, InstallOptions::default(), Modifications::Sufficient, + None, settings.into(), network_settings, &sync_state, @@ -960,6 +1090,7 @@ async fn lock_and_sync( installer_metadata, concurrency, cache, + WorkspaceCache::default(), DryRun::Disabled, printer, preview, @@ -1127,6 +1258,15 @@ impl<'lock> From<&'lock AddTarget> for LockTarget<'lock> { } impl AddTarget { + /// Acquire a file lock mapped to the underlying interpreter to prevent concurrent + /// modifications. + pub(super) async fn acquire_lock(&self) -> Result { + match self { + Self::Script(_, interpreter) => interpreter.lock().await, + Self::Project(_, python_target) => python_target.interpreter().lock().await, + } + } + /// Returns the [`Interpreter`] for the target. pub(super) fn interpreter(&self) -> &Interpreter { match self { @@ -1226,6 +1366,16 @@ impl AddTargetSnapshot { Ok(()) } Self::Project(project, lock) => { + // Write the workspace `pyproject.toml` back to disk. + let workspace = project.workspace(); + if workspace.install_path() != project.root() { + debug!("Reverting changes to workspace `pyproject.toml`"); + fs_err::write( + workspace.install_path().join("pyproject.toml"), + workspace.pyproject_toml().as_ref(), + )?; + } + // Write the `pyproject.toml` back to disk. debug!("Reverting changes to `pyproject.toml`"); fs_err::write( diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index 5d35f3bf8..a3cda28c1 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -1,10 +1,13 @@ +use std::path::Path; + use tracing::debug; use uv_cache::{Cache, CacheBucket}; use uv_cache_key::{cache_digest, hash_digest}; use uv_configuration::{Concurrency, Constraints, PreviewMode}; use uv_distribution_types::{Name, Resolution}; -use uv_python::{Interpreter, PythonEnvironment}; +use uv_fs::PythonExt; +use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable}; use crate::commands::pip::loggers::{InstallLogger, ResolveLogger}; use crate::commands::pip::operations::Modifications; @@ -41,13 +44,16 @@ impl CachedEnvironment { printer: Printer, preview: PreviewMode, ) -> Result { - let interpreter = Self::base_interpreter(interpreter, cache)?; + // Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the + // given interpreter is a virtual environment. + let base_interpreter = Self::base_interpreter(interpreter, cache)?; // Resolve the requirements with the interpreter. let resolution = Resolution::from( resolve_environment( spec, - &interpreter, + &base_interpreter, + build_constraints.clone(), &settings.resolver, network_settings, state, @@ -69,12 +75,34 @@ impl CachedEnvironment { hash_digest(&distributions) }; - // Hash the interpreter based on its path. - // TODO(charlie): Come up with a robust hash for the interpreter. - let interpreter_hash = cache_digest(&interpreter.sys_executable()); + // Construct a hash for the environment. + // + // Use the canonicalized base interpreter path since that's the interpreter we performed the + // resolution with and the interpreter the environment will be created with. + // + // We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the + // virtual environment's path. Originally, we shared cached environments independent of the + // environment they'd be layered on top of. However, this causes collisions as the overlay + // `.pth` file can be overridden by another instance of uv. Including this element in the key + // avoids this problem at the cost of creating separate cached environments for identical + // `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so + // we can canonicalize it without invalidating the purpose of the element — it'd probably be + // safe to just use the absolute `sys.executable` as well. + // + // TODO(zanieb): Since we're not sharing these environmments across projects, we should move + // [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant + // now. + // + // TODO(zanieb): We should include the version of the base interpreter in the hash, so if + // the interpreter at the canonicalized path changes versions we construct a new + // environment. + let environment_hash = cache_digest(&( + &canonicalize_executable(base_interpreter.sys_executable())?, + &interpreter.sys_prefix().canonicalize()?, + )); // Search in the content-addressed cache. - let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash); + let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash); if cache.refresh().is_none() { if let Ok(root) = cache.resolve_link(cache_entry.path()) { @@ -88,12 +116,14 @@ impl CachedEnvironment { let temp_dir = cache.venv_dir()?; let venv = uv_virtualenv::create_venv( temp_dir.path(), - interpreter, + base_interpreter, uv_virtualenv::Prompt::None, false, false, true, false, + false, + preview, )?; sync_environment( @@ -114,9 +144,7 @@ impl CachedEnvironment { .await?; // Now that the environment is complete, sync it to its content-addressed location. - let id = cache - .persist(temp_dir.into_path(), cache_entry.path()) - .await?; + let id = cache.persist(temp_dir.keep(), cache_entry.path()).await?; let root = cache.archive(&id); Ok(Self(PythonEnvironment::from_root(root, cache)?)) @@ -168,6 +196,30 @@ impl CachedEnvironment { Ok(()) } + /// Set the `extends-environment` key in the `pyvenv.cfg` file to the given path. + /// + /// Ephemeral environments created by `uv run --with` extend a parent (virtual or system) + /// environment by adding a `.pth` file to the ephemeral environment's `site-packages` + /// directory. The `pth` file contains Python code to dynamically add the parent + /// environment's `site-packages` directory to Python's import search paths in addition to + /// the ephemeral environment's `site-packages` directory. This works well at runtime, but + /// is too dynamic for static analysis tools like ty to understand. As such, we + /// additionally write the `sys.prefix` of the parent environment to the + /// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it + /// easier for these tools to statically and reliably understand the relationship between + /// the two environments. + #[allow(clippy::result_large_err)] + pub(crate) fn set_parent_environment( + &self, + parent_environment_sys_prefix: &Path, + ) -> Result<(), ProjectError> { + self.0.set_pyvenv_cfg( + "extends-environment", + &parent_environment_sys_prefix.escape_for_python(), + )?; + Ok(()) + } + /// Return the [`Interpreter`] to use for the cached environment, based on a given /// [`Interpreter`]. /// diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index e33de8231..c14bfd904 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -61,7 +61,7 @@ pub(crate) async fn export( install_options: InstallOptions, output_file: Option, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, locked: bool, frozen: bool, @@ -122,7 +122,7 @@ pub(crate) async fn export( ExportTarget::Script(_) => DefaultExtras::default(), }; - let dev = dev.with_defaults(default_groups); + let groups = groups.with_defaults(default_groups); let extras = extras.with_defaults(default_extras); // Find an interpreter for the project, unless `--frozen` is set. @@ -142,12 +142,14 @@ pub(crate) async fn export( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), ExportTarget::Project(project) => ProjectInterpreter::discover( project.workspace(), project_dir, + &groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -158,6 +160,7 @@ pub(crate) async fn export( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -190,6 +193,7 @@ pub(crate) async fn export( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -206,7 +210,7 @@ pub(crate) async fn export( }; // Validate that the set of requested extras and development groups are compatible. - detect_conflicts(&lock, &extras, &dev)?; + detect_conflicts(&lock, &extras, &groups)?; // Identify the installation target. let target = match &target { @@ -259,7 +263,7 @@ pub(crate) async fn export( // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(&extras)?; - target.validate_groups(&dev)?; + target.validate_groups(&groups)?; // Write the resolved dependencies to the output channel. let mut writer = OutputWriter::new(!quiet || output_file.is_none(), output_file.as_deref()); @@ -306,7 +310,7 @@ pub(crate) async fn export( &target, &prune, &extras, - &dev, + &groups, include_annotations, editable, hashes, @@ -328,8 +332,9 @@ pub(crate) async fn export( &target, &prune, &extras, - &dev, + &groups, include_annotations, + editable, &install_options, )?; diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index 91e2fdc0c..9ff321a72 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -4,13 +4,15 @@ use std::fmt::Write; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str::FromStr; +use uv_distribution_types::RequiresPython; use tracing::{debug, trace, warn}; use uv_cache::Cache; use uv_cli::AuthorFrom; use uv_client::BaseClientBuilder; use uv_configuration::{ - PreviewMode, ProjectBuildBackend, VersionControlError, VersionControlSystem, + DependencyGroupsWithDefaults, PreviewMode, ProjectBuildBackend, VersionControlError, + VersionControlSystem, }; use uv_fs::{CWD, Simplified}; use uv_git::GIT; @@ -21,9 +23,9 @@ use uv_python::{ PythonPreference, PythonRequest, PythonVariant, PythonVersionFile, VersionFileDiscoveryOptions, VersionRequest, }; -use uv_resolver::RequiresPython; use uv_scripts::{Pep723Script, ScriptTag}; use uv_settings::PythonInstallMirrors; +use uv_static::EnvVars; use uv_warnings::warn_user_once; use uv_workspace::pyproject_mut::{DependencyTarget, PyProjectTomlMut}; use uv_workspace::{DiscoveryOptions, MemberDiscovery, Workspace, WorkspaceCache, WorkspaceError}; @@ -85,6 +87,7 @@ pub(crate) async fn init( pin_python, package, no_config, + preview, ) .await?; @@ -200,6 +203,7 @@ async fn init_script( pin_python: bool, package: bool, no_config: bool, + preview: PreviewMode, ) -> Result<()> { if no_workspace { warn_user_once!("`--no-workspace` is a no-op for Python scripts, which are standalone"); @@ -214,6 +218,7 @@ async fn init_script( warn_user_once!("`--package` is a no-op for Python scripts, which are standalone"); } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -256,6 +261,7 @@ async fn init_script( &client_builder, cache, &reporter, + preview, ) .await?; @@ -343,6 +349,7 @@ async fn init_project( let reporter = PythonDownloadReporter::single(printer); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -432,6 +439,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -459,6 +467,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -501,7 +510,7 @@ async fn init_project( (requires_python, python_request) } else if let Some(requires_python) = workspace .as_ref() - .map(find_requires_python) + .map(|workspace| find_requires_python(workspace, &DependencyGroupsWithDefaults::none())) .transpose()? .flatten() { @@ -525,6 +534,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -552,6 +562,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -1235,6 +1246,7 @@ fn detect_git_repository(path: &Path) -> GitDiscoveryResult { let Ok(output) = Command::new(git) .arg("rev-parse") .arg("--is-inside-work-tree") + .env(EnvVars::LC_ALL, "C") .current_dir(path) .output() else { diff --git a/crates/uv/src/commands/project/install_target.rs b/crates/uv/src/commands/project/install_target.rs index d225114c9..b0f20e76f 100644 --- a/crates/uv/src/commands/project/install_target.rs +++ b/crates/uv/src/commands/project/install_target.rs @@ -165,11 +165,18 @@ impl<'lock> InstallTarget<'lock> { .requirements() .into_iter() .map(Cow::Owned) - .chain(workspace.dependency_groups().ok().into_iter().flat_map( - |dependency_groups| { - dependency_groups.into_values().flatten().map(Cow::Owned) - }, - )) + .chain( + workspace + .workspace_dependency_groups() + .ok() + .into_iter() + .flat_map(|dependency_groups| { + dependency_groups + .into_values() + .flat_map(|group| group.requirements) + .map(Cow::Owned) + }), + ) .chain(workspace.packages().values().flat_map(|member| { // Iterate over all dependencies in each member. let dependencies = member @@ -316,9 +323,15 @@ impl<'lock> InstallTarget<'lock> { let known_groups = member_packages .iter() .flat_map(|package| package.dependency_groups().keys().map(Cow::Borrowed)) - .chain(workspace.dependency_groups().ok().into_iter().flat_map( - |dependency_groups| dependency_groups.into_keys().map(Cow::Owned), - )) + .chain( + workspace + .workspace_dependency_groups() + .ok() + .into_iter() + .flat_map(|dependency_groups| { + dependency_groups.into_keys().map(Cow::Owned) + }), + ) .collect::>(); for group in groups.explicit_names() { diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 93b51a60a..f79557d9e 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -12,13 +12,14 @@ use tracing::debug; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - Concurrency, Constraints, DryRun, ExtrasSpecification, PreviewMode, Reinstall, Upgrade, + Concurrency, Constraints, DependencyGroupsWithDefaults, DryRun, ExtrasSpecification, + PreviewMode, Reinstall, Upgrade, }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; use uv_distribution_types::{ DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification, - Requirement, UnresolvedRequirementSpecification, + Requirement, RequiresPython, UnresolvedRequirementSpecification, }; use uv_git::ResolvedRepositoryReference; use uv_normalize::{GroupName, PackageName}; @@ -28,7 +29,7 @@ use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreferenc use uv_requirements::ExtrasResolver; use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements}; use uv_resolver::{ - FlatIndex, InMemoryIndex, Lock, Options, OptionsBuilder, PythonRequirement, RequiresPython, + FlatIndex, InMemoryIndex, Lock, Options, OptionsBuilder, PythonRequirement, ResolverEnvironment, ResolverManifest, SatisfiesResult, UniversalMarker, }; use uv_scripts::{Pep723ItemRef, Pep723Script}; @@ -98,6 +99,7 @@ pub(crate) async fn lock( let script = match script { Some(ScriptPath::Path(path)) => { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -113,6 +115,7 @@ pub(crate) async fn lock( &client_builder, cache, &reporter, + preview, ) .await?; Some(Pep723Script::init(&path, requires_python.specifiers()).await?) @@ -142,6 +145,8 @@ pub(crate) async fn lock( LockTarget::Workspace(workspace) => ProjectInterpreter::discover( workspace, project_dir, + // Don't enable any groups' requires-python for interpreter discovery + &DependencyGroupsWithDefaults::none(), python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -152,6 +157,7 @@ pub(crate) async fn lock( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -167,6 +173,7 @@ pub(crate) async fn lock( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -193,6 +200,7 @@ pub(crate) async fn lock( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -257,6 +265,7 @@ pub(super) struct LockOperation<'env> { logger: Box, concurrency: Concurrency, cache: &'env Cache, + workspace_cache: &'env WorkspaceCache, printer: Printer, preview: PreviewMode, } @@ -271,6 +280,7 @@ impl<'env> LockOperation<'env> { logger: Box, concurrency: Concurrency, cache: &'env Cache, + workspace_cache: &'env WorkspaceCache, printer: Printer, preview: PreviewMode, ) -> Self { @@ -283,6 +293,7 @@ impl<'env> LockOperation<'env> { logger, concurrency, cache, + workspace_cache, printer, preview, } @@ -328,6 +339,7 @@ impl<'env> LockOperation<'env> { self.logger, self.concurrency, self.cache, + self.workspace_cache, self.printer, self.preview, ) @@ -366,6 +378,7 @@ impl<'env> LockOperation<'env> { self.logger, self.concurrency, self.cache, + self.workspace_cache, self.printer, self.preview, ) @@ -396,6 +409,7 @@ async fn do_lock( logger: Box, concurrency: Concurrency, cache: &Cache, + workspace_cache: &WorkspaceCache, printer: Printer, preview: PreviewMode, ) -> Result { @@ -437,8 +451,8 @@ async fn do_lock( let build_constraints = target.lower(build_constraints, index_locations, *sources)?; let dependency_groups = dependency_groups .into_iter() - .map(|(name, requirements)| { - let requirements = target.lower(requirements, index_locations, *sources)?; + .map(|(name, group)| { + let requirements = target.lower(group.requirements, index_locations, *sources)?; Ok((name, requirements)) }) .collect::, ProjectError>>()?; @@ -575,21 +589,13 @@ async fn do_lock( // Initialize the client. let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) .allow_insecure_host(network_settings.allow_insecure_host.clone()); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); for index in target.indexes() { if let Some(credentials) = index.credentials() { @@ -648,8 +654,6 @@ async fn do_lock( FlatIndex::from_entries(entries, None, &hasher, build_options) }; - let workspace_cache = WorkspaceCache::default(); - // Create a build dispatch. let build_dispatch = BuildDispatch::new( &client, @@ -668,7 +672,7 @@ async fn do_lock( &build_hasher, *exclude_newer, *sources, - workspace_cache, + workspace_cache.clone(), concurrency, preview, ); @@ -822,6 +826,7 @@ async fn do_lock( None, resolver_env, python_requirement, + interpreter.markers(), conflicts.clone(), &client, &flat_index, @@ -930,7 +935,7 @@ impl ValidatedLock { lock.prerelease_mode().cyan(), options.prerelease_mode.cyan() ); - return Ok(Self::Unusable(lock)); + return Ok(Self::Preferable(lock)); } if lock.fork_strategy() != options.fork_strategy { let _ = writeln!( @@ -979,13 +984,54 @@ impl ValidatedLock { return Ok(Self::Unusable(lock)); } Upgrade::Packages(_) => { - // If the user specified `--upgrade-package`, then at best we can prefer some of - // the existing versions. - debug!("Ignoring existing lockfile due to `--upgrade-package`"); - return Ok(Self::Preferable(lock)); + // This is handled below, after some checks regarding fork + // markers. In particular, we'd like to return `Preferable` + // here, but we shouldn't if the fork markers cannot be + // reused. } } + // NOTE: It's important that this appears before any possible path that + // returns `Self::Preferable`. In particular, if our fork markers are + // bunk, then we shouldn't return a result that indicates we should try + // to re-use the existing fork markers. + if let Err((fork_markers_union, environments_union)) = lock.check_marker_coverage() { + warn_user!( + "Ignoring existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`", + fork_markers_union + .try_to_string() + .unwrap_or("true".to_string()), + environments_union + .try_to_string() + .unwrap_or("true".to_string()), + ); + return Ok(Self::Versions(lock)); + } + + // NOTE: Similarly as above, this should also appear before any + // possible code path that can return `Self::Preferable`. + if let Err((fork_markers_union, requires_python_marker)) = + lock.requires_python_coverage(requires_python) + { + warn_user!( + "Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `{}` vs `{}`", + fork_markers_union + .try_to_string() + .unwrap_or("true".to_string()), + requires_python_marker + .try_to_string() + .unwrap_or("true".to_string()), + ); + return Ok(Self::Versions(lock)); + } + + if let Upgrade::Packages(_) = upgrade { + // If the user specified `--upgrade-package`, then at best we can prefer some of + // the existing versions. + debug!("Ignoring existing lockfile due to `--upgrade-package`"); + return Ok(Self::Preferable(lock)); + } + // If the Requires-Python bound has changed, we have to perform a clean resolution, since // the set of `resolution-markers` may no longer cover the entire supported Python range. if lock.requires_python().range() != requires_python.range() { @@ -1018,19 +1064,6 @@ impl ValidatedLock { return Ok(Self::Versions(lock)); } - if let Err((fork_markers_union, environments_union)) = lock.check_marker_coverage() { - warn_user!( - "Ignoring existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`", - fork_markers_union - .try_to_string() - .unwrap_or("true".to_string()), - environments_union - .try_to_string() - .unwrap_or("true".to_string()), - ); - return Ok(Self::Versions(lock)); - } - // If the set of required platforms has changed, we have to perform a clean resolution. let expected = lock.simplified_required_environments(); let actual = required_environments diff --git a/crates/uv/src/commands/project/lock_target.rs b/crates/uv/src/commands/project/lock_target.rs index cb45aa8ec..4618b3b84 100644 --- a/crates/uv/src/commands/project/lock_target.rs +++ b/crates/uv/src/commands/project/lock_target.rs @@ -3,15 +3,15 @@ use std::path::{Path, PathBuf}; use itertools::Either; -use uv_configuration::SourceStrategy; +use uv_configuration::{DependencyGroupsWithDefaults, SourceStrategy}; use uv_distribution::LoweredRequirement; -use uv_distribution_types::{Index, IndexLocations, Requirement}; +use uv_distribution_types::{Index, IndexLocations, Requirement, RequiresPython}; use uv_normalize::{GroupName, PackageName}; use uv_pep508::RequirementOrigin; use uv_pypi_types::{Conflicts, SupportedEnvironments, VerbatimParsedUrl}; -use uv_resolver::{Lock, LockVersion, RequiresPython, VERSION}; +use uv_resolver::{Lock, LockVersion, VERSION}; use uv_scripts::Pep723Script; -use uv_workspace::dependency_groups::DependencyGroupError; +use uv_workspace::dependency_groups::{DependencyGroupError, FlatDependencyGroup}; use uv_workspace::{Workspace, WorkspaceMember}; use crate::commands::project::{ProjectError, find_requires_python}; @@ -100,12 +100,9 @@ impl<'lock> LockTarget<'lock> { /// attached to any members within the target. pub(crate) fn dependency_groups( self, - ) -> Result< - BTreeMap>>, - DependencyGroupError, - > { + ) -> Result, DependencyGroupError> { match self { - Self::Workspace(workspace) => workspace.dependency_groups(), + Self::Workspace(workspace) => workspace.workspace_dependency_groups(), Self::Script(_) => Ok(BTreeMap::new()), } } @@ -219,7 +216,11 @@ impl<'lock> LockTarget<'lock> { #[allow(clippy::result_large_err)] pub(crate) fn requires_python(self) -> Result, ProjectError> { match self { - Self::Workspace(workspace) => find_requires_python(workspace), + Self::Workspace(workspace) => { + // When locking, don't try to enforce requires-python bounds that appear on groups + let groups = DependencyGroupsWithDefaults::none(); + find_requires_python(workspace, &groups) + } Self::Script(script) => Ok(script .metadata .requires_python diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 9adeb7216..1a0274cac 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -18,13 +18,14 @@ use uv_configuration::{ use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution::{DistributionDatabase, LoweredRequirement}; use uv_distribution_types::{ - Index, Requirement, Resolution, UnresolvedRequirement, UnresolvedRequirementSpecification, + Index, Requirement, RequiresPython, Resolution, UnresolvedRequirement, + UnresolvedRequirementSpecification, }; use uv_fs::{CWD, LockedFile, Simplified}; use uv_git::ResolvedRepositoryReference; use uv_installer::{SatisfiesResult, SitePackages}; use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, ExtraName, GroupName, PackageName}; -use uv_pep440::{Version, VersionSpecifiers}; +use uv_pep440::{TildeVersionSpecifier, Version, VersionSpecifiers}; use uv_pep508::MarkerTreeContents; use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts}; use uv_python::{ @@ -35,8 +36,8 @@ use uv_python::{ use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements}; use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification}; use uv_resolver::{ - FlatIndex, Lock, OptionsBuilder, Preference, PythonRequirement, RequiresPython, - ResolverEnvironment, ResolverOutput, + FlatIndex, Lock, OptionsBuilder, Preference, PythonRequirement, ResolverEnvironment, + ResolverOutput, }; use uv_scripts::Pep723ItemRef; use uv_settings::PythonInstallMirrors; @@ -45,7 +46,7 @@ use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; use uv_warnings::{warn_user, warn_user_once}; use uv_workspace::dependency_groups::DependencyGroupError; use uv_workspace::pyproject::PyProjectToml; -use uv_workspace::{Workspace, WorkspaceCache}; +use uv_workspace::{RequiresPythonSources, Workspace, WorkspaceCache}; use crate::commands::pip::loggers::{InstallLogger, ResolveLogger}; use crate::commands::pip::operations::{Changelog, Modifications}; @@ -67,6 +68,7 @@ pub(crate) mod remove; pub(crate) mod run; pub(crate) mod sync; pub(crate) mod tree; +pub(crate) mod version; #[derive(thiserror::Error, Debug)] pub(crate) enum ProjectError { @@ -107,19 +109,28 @@ pub(crate) enum ProjectError { Conflict(#[from] ConflictError), #[error( - "The requested interpreter resolved to Python {0}, which is incompatible with the project's Python requirement: `{1}`" + "The requested interpreter resolved to Python {_0}, which is incompatible with the project's Python requirement: `{_1}`{}", + format_optional_requires_python_sources(_2, *_3) )] - RequestedPythonProjectIncompatibility(Version, RequiresPython), + RequestedPythonProjectIncompatibility(Version, RequiresPython, RequiresPythonSources, bool), #[error( - "The Python request from `{0}` resolved to Python {1}, which is incompatible with the project's Python requirement: `{2}`. Use `uv python pin` to update the `.python-version` file to a compatible version." + "The Python request from `{_0}` resolved to Python {_1}, which is incompatible with the project's Python requirement: `{_2}`{}\nUse `uv python pin` to update the `.python-version` file to a compatible version", + format_optional_requires_python_sources(_3, *_4) )] - DotPythonVersionProjectIncompatibility(String, Version, RequiresPython), + DotPythonVersionProjectIncompatibility( + String, + Version, + RequiresPython, + RequiresPythonSources, + bool, + ), #[error( - "The resolved Python interpreter (Python {0}) is incompatible with the project's Python requirement: `{1}`" + "The resolved Python interpreter (Python {_0}) is incompatible with the project's Python requirement: `{_1}`{}", + format_optional_requires_python_sources(_2, *_3) )] - RequiresPythonProjectIncompatibility(Version, RequiresPython), + RequiresPythonProjectIncompatibility(Version, RequiresPython, RequiresPythonSources, bool), #[error( "The requested interpreter resolved to Python {0}, which is incompatible with the script's Python requirement: `{1}`" @@ -136,34 +147,6 @@ pub(crate) enum ProjectError { )] RequiresPythonScriptIncompatibility(Version, RequiresPython), - #[error("The requested interpreter resolved to Python {0}, which is incompatible with the project's Python requirement: `{1}`. However, a workspace member (`{member}`) supports Python {3}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _2.cyan(), venv = format!("uv venv --python {_0}").green(), install = "uv pip install -e .".green(), path = _4.user_display().cyan() )] - RequestedMemberIncompatibility( - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - - #[error("The Python request from `{0}` resolved to Python {1}, which is incompatible with the project's Python requirement: `{2}`. However, a workspace member (`{member}`) supports Python {4}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _3.cyan(), venv = format!("uv venv --python {_1}").green(), install = "uv pip install -e .".green(), path = _5.user_display().cyan() )] - DotPythonVersionMemberIncompatibility( - String, - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - - #[error("The resolved Python interpreter (Python {0}) is incompatible with the project's Python requirement: `{1}`. However, a workspace member (`{member}`) supports Python {3}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _2.cyan(), venv = format!("uv venv --python {_0}").green(), install = "uv pip install -e .".green(), path = _4.user_display().cyan() )] - RequiresPythonMemberIncompatibility( - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - #[error("Group `{0}` is not defined in the project's `dependency-groups` table")] MissingGroupProject(GroupName), @@ -193,8 +176,11 @@ pub(crate) enum ProjectError { #[error("Environment markers `{0}` don't overlap with Python requirement `{1}`")] DisjointEnvironment(MarkerTreeContents, VersionSpecifiers), - #[error("The workspace contains conflicting Python requirements:\n{}", _0.iter().map(|(name, specifiers)| format!("- `{name}`: `{specifiers}`")).join("\n"))] - DisjointRequiresPython(BTreeMap), + #[error( + "Found conflicting Python requirements:\n{}", + format_requires_python_sources(_0) + )] + DisjointRequiresPython(BTreeMap<(PackageName, Option), VersionSpecifiers>), #[error("Environment marker is empty")] EmptyEnvironment, @@ -285,7 +271,7 @@ pub(crate) struct ConflictError { /// The items from the set that were enabled, and thus create the conflict. pub(crate) conflicts: Vec, /// Enabled dependency groups with defaults applied. - pub(crate) dev: DependencyGroupsWithDefaults, + pub(crate) groups: DependencyGroupsWithDefaults, } impl std::fmt::Display for ConflictError { @@ -337,7 +323,7 @@ impl std::fmt::Display for ConflictError { .iter() .map(|conflict| match conflict { ConflictPackage::Group(group) - if self.dev.contains_because_default(group) => + if self.groups.contains_because_default(group) => format!("`{group}` (enabled by default)"), ConflictPackage::Group(group) => format!("`{group}`"), ConflictPackage::Extra(..) => unreachable!(), @@ -357,7 +343,7 @@ impl std::fmt::Display for ConflictError { let conflict = match conflict { ConflictPackage::Extra(extra) => format!("extra `{extra}`"), ConflictPackage::Group(group) - if self.dev.contains_because_default(group) => + if self.groups.contains_because_default(group) => { format!("group `{group}` (enabled by default)") } @@ -428,23 +414,40 @@ impl PlatformState { #[allow(clippy::result_large_err)] pub(crate) fn find_requires_python( workspace: &Workspace, + groups: &DependencyGroupsWithDefaults, ) -> Result, ProjectError> { + let requires_python = workspace.requires_python(groups)?; // If there are no `Requires-Python` specifiers in the workspace, return `None`. - if workspace.requires_python().next().is_none() { + if requires_python.is_empty() { return Ok(None); } - match RequiresPython::intersection( - workspace - .requires_python() - .map(|(.., specifiers)| specifiers), - ) { + for ((package, group), specifiers) in &requires_python { + if let [spec] = &specifiers[..] { + if let Some(spec) = TildeVersionSpecifier::from_specifier_ref(spec) { + if spec.has_patch() { + continue; + } + let (lower, upper) = spec.bounding_specifiers(); + let spec_0 = spec.with_patch_version(0); + let (lower_0, upper_0) = spec_0.bounding_specifiers(); + warn_user_once!( + "The `requires-python` specifier (`{spec}`) in `{package}{group}` \ + uses the tilde specifier (`~=`) without a patch version. This will be \ + interpreted as `{lower}, {upper}`. Did you mean `{spec_0}` to constrain the \ + version as `{lower_0}, {upper_0}`? We recommend only using \ + the tilde specifier with a patch version to avoid ambiguity.", + group = if let Some(group) = group { + format!(":{group}") + } else { + String::new() + }, + ); + } + } + } + match RequiresPython::intersection(requires_python.iter().map(|(.., specifiers)| specifiers)) { Some(requires_python) => Ok(Some(requires_python)), - None => Err(ProjectError::DisjointRequiresPython( - workspace - .requires_python() - .map(|(name, specifiers)| (name.clone(), specifiers.clone())) - .collect(), - )), + None => Err(ProjectError::DisjointRequiresPython(requires_python)), } } @@ -456,6 +459,7 @@ pub(crate) fn find_requires_python( pub(crate) fn validate_project_requires_python( interpreter: &Interpreter, workspace: Option<&Workspace>, + groups: &DependencyGroupsWithDefaults, requires_python: &RequiresPython, source: &PythonRequestSource, ) -> Result<(), ProjectError> { @@ -463,57 +467,24 @@ pub(crate) fn validate_project_requires_python( return Ok(()); } - // If the Python version is compatible with one of the workspace _members_, raise - // a dedicated error. For example, if the workspace root requires Python >=3.12, but - // a library in the workspace is compatible with Python >=3.8, the user may attempt - // to sync on Python 3.8. This will fail, but we should provide a more helpful error - // message. - for (name, member) in workspace.into_iter().flat_map(Workspace::packages) { - let Some(project) = member.pyproject_toml().project.as_ref() else { - continue; - }; - let Some(specifiers) = project.requires_python.as_ref() else { - continue; - }; - if specifiers.contains(interpreter.python_version()) { - return match source { - PythonRequestSource::UserRequest => { - Err(ProjectError::RequestedMemberIncompatibility( - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - PythonRequestSource::DotPythonVersion(file) => { - Err(ProjectError::DotPythonVersionMemberIncompatibility( - file.path().user_display().to_string(), - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - PythonRequestSource::RequiresPython => { - Err(ProjectError::RequiresPythonMemberIncompatibility( - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - }; - } - } + // Find all the individual requires_python constraints that conflict + let conflicting_requires = workspace + .and_then(|workspace| workspace.requires_python(groups).ok()) + .into_iter() + .flatten() + .filter(|(.., requires)| !requires.contains(interpreter.python_version())) + .collect::(); + let workspace_non_trivial = workspace + .map(|workspace| workspace.packages().len() > 1) + .unwrap_or(false); match source { PythonRequestSource::UserRequest => { Err(ProjectError::RequestedPythonProjectIncompatibility( interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } PythonRequestSource::DotPythonVersion(file) => { @@ -521,12 +492,16 @@ pub(crate) fn validate_project_requires_python( file.path().user_display().to_string(), interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } PythonRequestSource::RequiresPython => { Err(ProjectError::RequiresPythonProjectIncompatibility( interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } } @@ -674,6 +649,7 @@ impl ScriptInterpreter { active: Option, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // For now, we assume that scripts are never evaluated in the context of a workspace. let workspace = None; @@ -714,6 +690,7 @@ impl ScriptInterpreter { } let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -731,13 +708,20 @@ impl ScriptInterpreter { install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); if let Err(err) = match requires_python { Some((requires_python, RequiresPythonSource::Project)) => { - validate_project_requires_python(&interpreter, workspace, &requires_python, &source) + validate_project_requires_python( + &interpreter, + workspace, + &DependencyGroupsWithDefaults::none(), + &requires_python, + &source, + ) } Some((requires_python, RequiresPythonSource::Script)) => { validate_script_requires_python(&interpreter, &requires_python, &source) @@ -810,7 +794,7 @@ pub(crate) enum EnvironmentIncompatibilityError { RequiresPython(EnvironmentKind, RequiresPython), #[error( - "The interpreter in the {0} environment has different version ({1}) than it was created with ({2})" + "The interpreter in the {0} environment has a different version ({1}) than it was created with ({2})" )] PyenvVersionConflict(EnvironmentKind, Version, Version), } @@ -826,8 +810,8 @@ fn environment_is_usable( if let Some((cfg_version, int_version)) = environment.get_pyvenv_version_conflict() { return Err(EnvironmentIncompatibilityError::PyenvVersionConflict( kind, - cfg_version, int_version, + cfg_version, )); } @@ -873,6 +857,7 @@ impl ProjectInterpreter { pub(crate) async fn discover( workspace: &Workspace, project_dir: &Path, + groups: &DependencyGroupsWithDefaults, python_request: Option, network_settings: &NetworkSettings, python_preference: PythonPreference, @@ -883,14 +868,21 @@ impl ProjectInterpreter { active: Option, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Resolve the Python request and requirement for the workspace. let WorkspacePython { source, python_request, requires_python, - } = WorkspacePython::from_request(python_request, Some(workspace), project_dir, no_config) - .await?; + } = WorkspacePython::from_request( + python_request, + Some(workspace), + groups, + project_dir, + no_config, + ) + .await?; // Read from the virtual environment first. let root = workspace.venv(active); @@ -955,6 +947,7 @@ impl ProjectInterpreter { } let client_builder = BaseClientBuilder::default() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -973,6 +966,7 @@ impl ProjectInterpreter { install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await?; @@ -1001,6 +995,7 @@ impl ProjectInterpreter { validate_project_requires_python( &interpreter, Some(workspace), + groups, requires_python, &source, )?; @@ -1080,10 +1075,14 @@ impl WorkspacePython { pub(crate) async fn from_request( python_request: Option, workspace: Option<&Workspace>, + groups: &DependencyGroupsWithDefaults, project_dir: &Path, no_config: bool, ) -> Result { - let requires_python = workspace.map(find_requires_python).transpose()?.flatten(); + let requires_python = workspace + .map(|workspace| find_requires_python(workspace, groups)) + .transpose()? + .flatten(); let workspace_root = workspace.map(Workspace::install_path); @@ -1164,6 +1163,8 @@ impl ScriptPython { } = WorkspacePython::from_request( python_request, workspace, + // Scripts have no groups to hang requires-python settings off of + &DependencyGroupsWithDefaults::none(), script.path().and_then(Path::parent).unwrap_or(&**CWD), no_config, ) @@ -1230,6 +1231,7 @@ impl ProjectEnvironment { /// Initialize a virtual environment for the current project. pub(crate) async fn get_or_init( workspace: &Workspace, + groups: &DependencyGroupsWithDefaults, python: Option, install_mirrors: &PythonInstallMirrors, network_settings: &NetworkSettings, @@ -1241,13 +1243,25 @@ impl ProjectEnvironment { cache: &Cache, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { // Lock the project environment to avoid synchronization issues. - let _lock = ProjectInterpreter::lock(workspace).await?; + let _lock = ProjectInterpreter::lock(workspace) + .await + .inspect_err(|err| { + warn!("Failed to acquire project environment lock: {err}"); + }) + .ok(); + + let upgradeable = preview.is_enabled() + && python + .as_ref() + .is_none_or(|request| !request.includes_patch()); match ProjectInterpreter::discover( workspace, workspace.install_path().as_ref(), + groups, python, network_settings, python_preference, @@ -1258,6 +1272,7 @@ impl ProjectEnvironment { active, cache, printer, + preview, ) .await? { @@ -1327,6 +1342,8 @@ impl ProjectEnvironment { false, false, false, + upgradeable, + preview, )?; return Ok(if replace { Self::WouldReplace(root, environment, temp_dir) @@ -1364,6 +1381,8 @@ impl ProjectEnvironment { false, false, false, + upgradeable, + preview, )?; if replace { @@ -1447,9 +1466,19 @@ impl ScriptEnvironment { cache: &Cache, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { // Lock the script environment to avoid synchronization issues. - let _lock = ScriptInterpreter::lock(script).await?; + let _lock = ScriptInterpreter::lock(script) + .await + .inspect_err(|err| { + warn!("Failed to acquire script environment lock: {err}"); + }) + .ok(); + + let upgradeable = python_request + .as_ref() + .is_none_or(|request| !request.includes_patch()); match ScriptInterpreter::discover( script, @@ -1463,6 +1492,7 @@ impl ScriptEnvironment { active, cache, printer, + preview, ) .await? { @@ -1495,6 +1525,8 @@ impl ScriptEnvironment { false, false, false, + upgradeable, + preview, )?; return Ok(if root.exists() { Self::WouldReplace(root, environment, temp_dir) @@ -1529,6 +1561,8 @@ impl ScriptEnvironment { false, false, false, + upgradeable, + preview, )?; Ok(if replaced { @@ -1624,21 +1658,14 @@ pub(crate) async fn resolve_names( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env() + .map_err(uv_requirements::Error::ClientError)? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) .allow_insecure_host(network_settings.allow_insecure_host.clone()); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -1707,7 +1734,7 @@ pub(crate) async fn resolve_names( } #[derive(Debug, Clone)] -pub(crate) enum PreferenceSource<'lock> { +pub(crate) enum PreferenceLocation<'lock> { /// The preferences should be extracted from a lockfile. Lock { lock: &'lock Lock, @@ -1722,7 +1749,7 @@ pub(crate) struct EnvironmentSpecification<'lock> { /// The requirements to include in the environment. requirements: RequirementsSpecification, /// The preferences to respect when resolving. - preferences: Option>, + preferences: Option>, } impl From for EnvironmentSpecification<'_> { @@ -1735,9 +1762,9 @@ impl From for EnvironmentSpecification<'_> { } impl<'lock> EnvironmentSpecification<'lock> { - /// Set the [`PreferenceSource`] for the specification. + /// Set the [`PreferenceLocation`] for the specification. #[must_use] - pub(crate) fn with_preferences(self, preferences: PreferenceSource<'lock>) -> Self { + pub(crate) fn with_preferences(self, preferences: PreferenceLocation<'lock>) -> Self { Self { preferences: Some(preferences), ..self @@ -1749,6 +1776,7 @@ impl<'lock> EnvironmentSpecification<'lock> { pub(crate) async fn resolve_environment( spec: EnvironmentSpecification<'_>, interpreter: &Interpreter, + build_constraints: Constraints, settings: &ResolverSettings, network_settings: &NetworkSettings, state: &PlatformState, @@ -1789,6 +1817,7 @@ pub(crate) async fn resolve_environment( } = spec.requirements; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) @@ -1799,16 +1828,7 @@ pub(crate) async fn resolve_environment( let marker_env = interpreter.resolver_marker_environment(); let python_requirement = PythonRequirement::from_interpreter(interpreter); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -1845,7 +1865,6 @@ pub(crate) async fn resolve_environment( let extras = ExtrasSpecification::default(); let groups = BTreeMap::new(); let hasher = HashStrategy::default(); - let build_constraints = Constraints::default(); let build_hasher = HashStrategy::default(); // When resolving from an interpreter, we assume an empty environment, so reinstalls and @@ -1855,7 +1874,7 @@ pub(crate) async fn resolve_environment( // If an existing lockfile exists, build up a set of preferences. let preferences = match spec.preferences { - Some(PreferenceSource::Lock { lock, install_path }) => { + Some(PreferenceLocation::Lock { lock, install_path }) => { let LockedRequirements { preferences, git } = read_lock_requirements(lock, install_path, &upgrade)?; @@ -1867,7 +1886,7 @@ pub(crate) async fn resolve_environment( preferences } - Some(PreferenceSource::Entries(entries)) => entries, + Some(PreferenceLocation::Entries(entries)) => entries, None => vec![], }; @@ -1923,6 +1942,7 @@ pub(crate) async fn resolve_environment( Some(tags), ResolverEnvironment::specific(marker_env), python_requirement, + interpreter.markers(), Conflicts::empty(), &client, &flat_index, @@ -1969,6 +1989,7 @@ pub(crate) async fn sync_environment( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -1980,16 +2001,7 @@ pub(crate) async fn sync_environment( let interpreter = venv.interpreter(); let tags = venv.interpreter().tags()?; - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -2141,6 +2153,7 @@ pub(crate) async fn update_environment( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(*keyring_provider) @@ -2195,16 +2208,7 @@ pub(crate) async fn update_environment( } } - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -2295,6 +2299,7 @@ pub(crate) async fn update_environment( Some(tags), ResolverEnvironment::specific(marker_env.clone()), python_requirement, + venv.interpreter().markers(), Conflicts::empty(), &client, &flat_index, @@ -2358,6 +2363,7 @@ pub(crate) async fn init_script_python_requirement( client_builder: &BaseClientBuilder<'_>, cache: &Cache, reporter: &PythonDownloadReporter, + preview: PreviewMode, ) -> anyhow::Result { let python_request = if let Some(request) = python { // (1) Explicit request from user @@ -2389,6 +2395,7 @@ pub(crate) async fn init_script_python_requirement( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -2431,7 +2438,7 @@ pub(crate) fn default_dependency_groups( pub(crate) fn detect_conflicts( lock: &Lock, extras: &ExtrasSpecification, - dev: &DependencyGroupsWithDefaults, + groups: &DependencyGroupsWithDefaults, ) -> Result<(), ProjectError> { // Note that we need to collect all extras and groups that match in // a particular set, since extras can be declared as conflicting with @@ -2450,7 +2457,7 @@ pub(crate) fn detect_conflicts( } if item .group() - .map(|group| dev.contains(group)) + .map(|group| groups.contains(group)) .unwrap_or(false) { conflicts.push(item.conflict().clone()); @@ -2460,7 +2467,7 @@ pub(crate) fn detect_conflicts( return Err(ProjectError::Conflict(ConflictError { set: set.clone(), conflicts, - dev: dev.clone(), + groups: groups.clone(), })); } } @@ -2674,6 +2681,50 @@ fn cache_name(name: &str) -> Option> { } } +fn format_requires_python_sources(conflicts: &RequiresPythonSources) -> String { + conflicts + .iter() + .map(|((package, group), specifiers)| { + if let Some(group) = group { + format!("- {package}:{group}: {specifiers}") + } else { + format!("- {package}: {specifiers}") + } + }) + .join("\n") +} + +fn format_optional_requires_python_sources( + conflicts: &RequiresPythonSources, + workspace_non_trivial: bool, +) -> String { + // If there's lots of conflicts, print a list + if conflicts.len() > 1 { + return format!( + ".\nThe following `requires-python` declarations do not permit this version:\n{}", + format_requires_python_sources(conflicts) + ); + } + // If there's one conflict, give a clean message + if conflicts.len() == 1 { + let ((package, group), _) = conflicts.iter().next().unwrap(); + if let Some(group) = group { + if workspace_non_trivial { + return format!( + " (from workspace member `{package}`'s `tool.uv.dependency-groups.{group}.requires-python`)." + ); + } + return format!(" (from `tool.uv.dependency-groups.{group}.requires-python`)."); + } + if workspace_non_trivial { + return format!(" (from workspace member `{package}`'s `project.requires-python`)."); + } + return " (from `project.requires-python`)".to_owned(); + } + // Otherwise don't elaborate + String::new() +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/uv/src/commands/project/remove.rs b/crates/uv/src/commands/project/remove.rs index 336641fce..50615699e 100644 --- a/crates/uv/src/commands/project/remove.rs +++ b/crates/uv/src/commands/project/remove.rs @@ -5,7 +5,7 @@ use std::str::FromStr; use anyhow::{Context, Result}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_configuration::{ @@ -13,7 +13,7 @@ use uv_configuration::{ PreviewMode, }; use uv_fs::Simplified; -use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras}; +use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, DefaultGroups}; use uv_pep508::PackageName; use uv_python::{PythonDownloads, PythonPreference, PythonRequest}; use uv_scripts::{Pep723ItemRef, Pep723Metadata, Pep723Script}; @@ -202,6 +202,14 @@ pub(crate) async fn remove( // Update the `pypackage.toml` in-memory. let target = target.update(&content)?; + // Determine enabled groups and extras + let default_groups = match &target { + RemoveTarget::Project(project) => default_dependency_groups(project.pyproject_toml())?, + RemoveTarget::Script(_) => DefaultGroups::default(), + }; + let groups = DependencyGroups::default().with_defaults(default_groups); + let extras = ExtrasSpecification::default().with_defaults(DefaultExtras::default()); + // Convert to an `AddTarget` by attaching the appropriate interpreter or environment. let target = match target { RemoveTarget::Project(project) => { @@ -210,6 +218,7 @@ pub(crate) async fn remove( let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -220,6 +229,7 @@ pub(crate) async fn remove( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -229,6 +239,7 @@ pub(crate) async fn remove( // Discover or create the virtual environment. let environment = ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -240,6 +251,7 @@ pub(crate) async fn remove( cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -260,6 +272,7 @@ pub(crate) async fn remove( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -268,6 +281,14 @@ pub(crate) async fn remove( } }; + let _lock = target + .acquire_lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + // Determine the lock mode. let mode = if locked { LockMode::Locked(target.interpreter()) @@ -287,6 +308,7 @@ pub(crate) async fn remove( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -312,12 +334,6 @@ pub(crate) async fn remove( return Ok(ExitStatus::Success); }; - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); - // Identify the installation target. let target = match &project { VirtualProject::Project(project) => InstallTarget::Project { @@ -336,11 +352,12 @@ pub(crate) async fn remove( match project::sync::do_sync( target, venv, - &ExtrasSpecification::default().with_defaults(default_extras), - &DependencyGroups::default().with_defaults(default_groups), + &extras, + &groups, EditableMode::Editable, InstallOptions::default(), Modifications::Exact, + None, (&settings).into(), &network_settings, &state, @@ -348,6 +365,7 @@ pub(crate) async fn remove( installer_metadata, concurrency, cache, + WorkspaceCache::default(), DryRun::Disabled, printer, preview, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 038891779..63850f563 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -30,6 +30,7 @@ use uv_python::{ PythonInstallation, PythonPreference, PythonRequest, PythonVersionFile, VersionFileDiscoveryOptions, }; +use uv_redacted::DisplaySafeUrl; use uv_requirements::{RequirementsSource, RequirementsSpecification}; use uv_resolver::{Installable, Lock, Preference}; use uv_scripts::Pep723Item; @@ -39,6 +40,7 @@ use uv_static::EnvVars; use uv_warnings::warn_user; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace, WorkspaceCache, WorkspaceError}; +use crate::child::run_to_completion; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, }; @@ -48,13 +50,12 @@ use crate::commands::project::install_target::InstallTarget; use crate::commands::project::lock::LockMode; use crate::commands::project::lock_target::LockTarget; use crate::commands::project::{ - EnvironmentSpecification, PreferenceSource, ProjectEnvironment, ProjectError, + EnvironmentSpecification, PreferenceLocation, ProjectEnvironment, ProjectError, ScriptEnvironment, ScriptInterpreter, UniversalState, WorkspacePython, default_dependency_groups, script_specification, update_environment, validate_project_requires_python, }; use crate::commands::reporters::PythonDownloadReporter; -use crate::commands::run::run_to_completion; use crate::commands::{ExitStatus, diagnostics, project}; use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; @@ -77,7 +78,7 @@ pub(crate) async fn run( no_project: bool, no_config: bool, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, modifications: Modifications, python: Option, @@ -234,10 +235,19 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + // Determine the lock mode. let mode = if frozen { LockMode::Frozen @@ -260,6 +270,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }, concurrency, cache, + &workspace_cache, printer, preview, ) @@ -290,10 +301,11 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl target, &environment, &extras.with_defaults(DefaultExtras::default()), - &dev.with_defaults(DefaultGroups::default()), + &groups.with_defaults(DefaultGroups::default()), editable, install_options, modifications, + None, (&settings).into(), &network_settings, &sync_state, @@ -305,6 +317,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -358,6 +371,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -379,6 +393,14 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl ) }); + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + match update_environment( environment, spec, @@ -400,7 +422,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, - workspace_cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -432,6 +454,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl active.map_or(Some(false), Some), cache, printer, + preview, ) .await? .into_interpreter(); @@ -445,6 +468,8 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )?; Some(environment.into_interpreter()) @@ -455,7 +480,6 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }; // Discover and sync the base environment. - let workspace_cache = WorkspaceCache::default(); let temp_dir; let base_interpreter = if let Some(script_interpreter) = script_interpreter { // If we found a PEP 723 script and the user provided a project-only setting, warn. @@ -467,7 +491,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl if !extras.is_empty() { warn_user!("Extras are not supported for Python scripts with inline metadata"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` is not supported for Python scripts with inline metadata"); } if all_packages { @@ -542,7 +566,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl for flag in extras.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used alongside `--no-project`"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used alongside `--no-project`"); } if locked { @@ -559,7 +583,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl for flag in extras.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used outside of a project"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used outside of a project"); } if locked { @@ -582,6 +606,11 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl project.workspace().install_path().display() ); } + // Determine the groups and extras to include. + let default_groups = default_dependency_groups(project.pyproject_toml())?; + let default_extras = DefaultExtras::default(); + let groups = groups.with_defaults(default_groups); + let extras = extras.with_defaults(default_extras); let venv = if isolated { debug!("Creating isolated virtual environment"); @@ -589,6 +618,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl // If we're isolating the environment, use an ephemeral virtual environment as the // base environment for the project. let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -601,6 +631,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl } = WorkspacePython::from_request( python.as_deref().map(PythonRequest::parse), Some(project.workspace()), + &groups, project_dir, no_config, ) @@ -617,6 +648,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -625,6 +657,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl validate_project_requires_python( &interpreter, Some(project.workspace()), + &groups, requires_python, &source, )?; @@ -640,12 +673,15 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )? } else { // If we're not isolating the environment, reuse the base environment for the // project. ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -657,6 +693,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()? @@ -676,13 +713,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl .map(|lock| (lock, project.workspace().install_path().to_owned())); } } else { - // Validate that any referenced dependency groups are defined in the workspace. - - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); + let _lock = venv + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Determine the lock mode. let mode = if frozen { @@ -705,6 +742,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }, concurrency, cache, + &workspace_cache, printer, preview, ) @@ -768,21 +806,19 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }; let install_options = InstallOptions::default(); - let dev = dev.with_defaults(default_groups); - let extras = extras.with_defaults(default_extras); - // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(&extras)?; - target.validate_groups(&dev)?; + target.validate_groups(&groups)?; match project::sync::do_sync( target, &venv, &extras, - &dev, + &groups, editable, install_options, modifications, + None, (&settings).into(), &network_settings, &sync_state, @@ -794,6 +830,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -823,6 +860,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl let interpreter = { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -852,6 +890,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await?; @@ -871,6 +910,8 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )?; venv.into_interpreter() } else { @@ -890,6 +931,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl None } else { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -921,10 +963,10 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl let spec = EnvironmentSpecification::from(spec).with_preferences( if let Some((lock, install_path)) = base_lock.as_ref() { // If we have a lockfile, use the locked versions as preferences. - PreferenceSource::Lock { lock, install_path } + PreferenceLocation::Lock { lock, install_path } } else { // Otherwise, extract preferences from the base environment. - PreferenceSource::Entries( + PreferenceLocation::Entries( base_site_packages .iter() .filter_map(Preference::from_installed) @@ -991,6 +1033,16 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl site_packages.escape_for_python() ))?; + // Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg` + // file. This helps out static-analysis tools such as ty (see docs on + // `CachedEnvironment::set_parent_environment`). + // + // Note that we do this even if the parent environment is not a virtual environment. + // For ephemeral environments created by `uv run --with`, the parent environment's + // `site-packages` directory is added to `sys.path` even if the parent environment is not + // a virtual environment and even if `--system-site-packages` was not explicitly selected. + ephemeral_env.set_parent_environment(base_interpreter.sys_prefix())?; + // If `--system-site-packages` is enabled, add the system site packages to the ephemeral // environment. if base_interpreter.is_virtualenv() @@ -1198,7 +1250,7 @@ pub(crate) enum RunCommand { /// Execute a `pythonw` script provided via `stdin`. PythonGuiStdin(Vec, Vec), /// Execute a Python script provided via a remote URL. - PythonRemote(Url, tempfile::NamedTempFile, Vec), + PythonRemote(DisplaySafeUrl, tempfile::NamedTempFile, Vec), /// Execute an external command. External(OsString, Vec), /// Execute an empty command (in practice, `python` with no arguments). @@ -1464,7 +1516,7 @@ impl RunCommand { // We don't do this check on Windows since the file path would // be invalid anyway, and thus couldn't refer to a local file. if !cfg!(unix) || matches!(target_path.try_exists(), Ok(false)) { - let url = Url::parse(&target.to_string_lossy())?; + let url = DisplaySafeUrl::parse(&target.to_string_lossy())?; let file_stem = url .path_segments() @@ -1477,11 +1529,16 @@ impl RunCommand { .tempfile()?; let client = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()) .build(); - let response = client.for_host(&url).get(url.clone()).send().await?; + let response = client + .for_host(&url) + .get(Url::from(url.clone())) + .send() + .await?; // Stream the response to the file. let mut writer = file.as_file(); diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 8e78836b4..a9a161527 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -6,13 +6,14 @@ use std::sync::Arc; use anyhow::{Context, Result}; use itertools::Itertools; use owo_colors::OwoColorize; +use tracing::warn; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode, ExtrasSpecification, ExtrasSpecificationWithDefaults, HashCheckingMode, InstallOptions, - PreviewMode, + PreviewMode, TargetTriple, }; use uv_dispatch::BuildDispatch; use uv_distribution_types::{ @@ -33,8 +34,9 @@ use uv_workspace::pyproject::Source; use uv_workspace::{DiscoveryOptions, MemberDiscovery, VirtualProject, Workspace, WorkspaceCache}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger, InstallLogger}; -use crate::commands::pip::operations; use crate::commands::pip::operations::Modifications; +use crate::commands::pip::resolution_markers; +use crate::commands::pip::{operations, resolution_tags}; use crate::commands::project::install_target::InstallTarget; use crate::commands::project::lock::{LockMode, LockOperation, LockResult}; use crate::commands::project::lock_target::LockTarget; @@ -57,11 +59,12 @@ pub(crate) async fn sync( all_packages: bool, package: Option, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, install_options: InstallOptions, modifications: Modifications, python: Option, + python_platform: Option, install_mirrors: PythonInstallMirrors, python_preference: PythonPreference, python_downloads: PythonDownloads, @@ -116,23 +119,24 @@ pub(crate) async fn sync( SyncTarget::Project(project) }; - // Determine the default groups to include. + // Determine the groups and extras to include. let default_groups = match &target { SyncTarget::Project(project) => default_dependency_groups(project.pyproject_toml())?, SyncTarget::Script(..) => DefaultGroups::default(), }; - - // Determine the default extras to include. let default_extras = match &target { SyncTarget::Project(_project) => DefaultExtras::default(), SyncTarget::Script(..) => DefaultExtras::default(), }; + let groups = groups.with_defaults(default_groups); + let extras = extras.with_defaults(default_extras); // Discover or create the virtual environment. let environment = match &target { SyncTarget::Project(project) => SyncEnvironment::Project( ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -144,6 +148,7 @@ pub(crate) async fn sync( cache, dry_run, printer, + preview, ) .await?, ), @@ -161,11 +166,20 @@ pub(crate) async fn sync( cache, dry_run, printer, + preview, ) .await?, ), }; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + // Notify the user of any environment changes. match &environment { SyncEnvironment::Project(ProjectEnvironment::Existing(environment)) @@ -319,7 +333,7 @@ pub(crate) async fn sync( installer_metadata, concurrency, cache, - workspace_cache, + workspace_cache.clone(), dry_run, printer, preview, @@ -366,6 +380,7 @@ pub(crate) async fn sync( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -435,11 +450,12 @@ pub(crate) async fn sync( match do_sync( sync_target, &environment, - &extras.with_defaults(default_extras), - &dev.with_defaults(default_groups), + &extras, + &groups, editable, install_options, modifications, + python_platform.as_ref(), (&settings).into(), &network_settings, &state, @@ -447,6 +463,7 @@ pub(crate) async fn sync( installer_metadata, concurrency, cache, + workspace_cache, dry_run, printer, preview, @@ -571,10 +588,11 @@ pub(super) async fn do_sync( target: InstallTarget<'_>, venv: &PythonEnvironment, extras: &ExtrasSpecificationWithDefaults, - dev: &DependencyGroupsWithDefaults, + groups: &DependencyGroupsWithDefaults, editable: EditableMode, install_options: InstallOptions, modifications: Modifications, + python_platform: Option<&TargetTriple>, settings: InstallerSettingsRef<'_>, network_settings: &NetworkSettings, state: &PlatformState, @@ -582,6 +600,7 @@ pub(super) async fn do_sync( installer_metadata: bool, concurrency: Concurrency, cache: &Cache, + workspace_cache: WorkspaceCache, dry_run: DryRun, printer: Printer, preview: PreviewMode, @@ -604,6 +623,7 @@ pub(super) async fn do_sync( } = settings; let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .keyring(keyring_provider) @@ -622,14 +642,14 @@ pub(super) async fn do_sync( } // Validate that the set of requested extras and development groups are compatible. - detect_conflicts(target.lock(), extras, dev)?; + detect_conflicts(target.lock(), extras, groups)?; // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(extras)?; - target.validate_groups(dev)?; + target.validate_groups(groups)?; // Determine the markers to use for resolution. - let marker_env = venv.interpreter().resolver_marker_environment(); + let marker_env = resolution_markers(None, python_platform, venv.interpreter()); // Validate that the platform is supported by the lockfile. let environments = target.lock().supported_environments(); @@ -655,15 +675,15 @@ pub(super) async fn do_sync( } } - // Determine the tags to use for resolution. - let tags = venv.interpreter().tags()?; + // Determine the tags to use for the resolution. + let tags = resolution_tags(None, python_platform, venv.interpreter())?; // Read the lockfile. let resolution = target.to_resolution( &marker_env, - tags, + &tags, extras, - dev, + groups, build_options, &install_options, )?; @@ -674,16 +694,7 @@ pub(super) async fn do_sync( // If necessary, convert editable to non-editable distributions. let resolution = apply_editable_mode(resolution, editable); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Populate credentials from the target. store_credentials_from_target(target); @@ -722,7 +733,7 @@ pub(super) async fn do_sync( let entries = client .fetch_all(index_locations.flat_indexes().map(Index::url)) .await?; - FlatIndex::from_entries(entries, Some(tags), &hasher, build_options) + FlatIndex::from_entries(entries, Some(&tags), &hasher, build_options) }; // Create a build dispatch. @@ -743,7 +754,7 @@ pub(super) async fn do_sync( &build_hasher, exclude_newer, sources, - WorkspaceCache::default(), + workspace_cache.clone(), concurrency, preview, ); @@ -762,7 +773,7 @@ pub(super) async fn do_sync( index_locations, config_setting, &hasher, - tags, + &tags, &client, state.in_flight(), concurrency, @@ -841,7 +852,7 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu /// These credentials can come from any of `tool.uv.sources`, `tool.uv.dev-dependencies`, /// `project.dependencies`, and `project.optional-dependencies`. fn store_credentials_from_target(target: InstallTarget<'_>) { - // Iterate over any idnexes in the target. + // Iterate over any indexes in the target. for index in target.indexes() { if let Some(credentials) = index.credentials() { let credentials = Arc::new(credentials); diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index 6bf57d1a7..cd1339d3e 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -34,7 +34,7 @@ use crate::settings::{NetworkSettings, ResolverSettings}; #[allow(clippy::fn_params_excessive_bools)] pub(crate) async fn tree( project_dir: &Path, - dev: DependencyGroups, + groups: DependencyGroups, locked: bool, frozen: bool, universal: bool, @@ -71,11 +71,12 @@ pub(crate) async fn tree( LockTarget::Workspace(&workspace) }; - // Determine the default groups to include. - let defaults = match target { + // Determine the groups to include. + let default_groups = match target { LockTarget::Workspace(workspace) => default_dependency_groups(workspace.pyproject_toml())?, LockTarget::Script(_) => DefaultGroups::default(), }; + let groups = groups.with_defaults(default_groups); let native_tls = network_settings.native_tls; @@ -96,12 +97,14 @@ pub(crate) async fn tree( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), LockTarget::Workspace(workspace) => ProjectInterpreter::discover( workspace, project_dir, + &groups, python.as_deref().map(PythonRequest::parse), network_settings, python_preference, @@ -112,6 +115,7 @@ pub(crate) async fn tree( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -142,6 +146,7 @@ pub(crate) async fn tree( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -210,6 +215,7 @@ pub(crate) async fn tree( let client = RegistryClientBuilder::new( cache.clone().with_refresh(Refresh::All(Timestamp::now())), ) + .retries_from_env()? .native_tls(network_settings.native_tls) .connectivity(network_settings.connectivity) .allow_insecure_host(network_settings.allow_insecure_host.clone()) @@ -271,7 +277,7 @@ pub(crate) async fn tree( depth.into(), &prune, &package, - &dev.with_defaults(defaults), + &groups, no_dedupe, invert, ); diff --git a/crates/uv/src/commands/project/version.rs b/crates/uv/src/commands/project/version.rs new file mode 100644 index 000000000..ed1e9e246 --- /dev/null +++ b/crates/uv/src/commands/project/version.rs @@ -0,0 +1,700 @@ +use std::fmt::Write; +use std::path::Path; +use std::str::FromStr; + +use anyhow::{Context, Result, anyhow}; +use owo_colors::OwoColorize; + +use tracing::debug; +use uv_cache::Cache; +use uv_cli::version::VersionInfo; +use uv_cli::{VersionBump, VersionFormat}; +use uv_configuration::{ + Concurrency, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode, + ExtrasSpecification, InstallOptions, PreviewMode, +}; +use uv_fs::Simplified; +use uv_normalize::DefaultExtras; +use uv_pep440::{BumpCommand, PrereleaseKind, Version}; +use uv_pep508::PackageName; +use uv_python::{PythonDownloads, PythonPreference, PythonRequest}; +use uv_settings::PythonInstallMirrors; +use uv_warnings::warn_user; +use uv_workspace::pyproject_mut::Error; +use uv_workspace::{ + DiscoveryOptions, WorkspaceCache, + pyproject_mut::{DependencyTarget, PyProjectTomlMut}, +}; +use uv_workspace::{VirtualProject, Workspace}; + +use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; +use crate::commands::pip::operations::Modifications; +use crate::commands::project::add::{AddTarget, PythonTarget}; +use crate::commands::project::install_target::InstallTarget; +use crate::commands::project::lock::LockMode; +use crate::commands::project::{ + ProjectEnvironment, ProjectError, ProjectInterpreter, UniversalState, default_dependency_groups, +}; +use crate::commands::{ExitStatus, diagnostics, project}; +use crate::printer::Printer; +use crate::settings::{NetworkSettings, ResolverInstallerSettings}; + +/// Display version information for uv itself (`uv self version`) +pub(crate) fn self_version( + short: bool, + output_format: VersionFormat, + printer: Printer, +) -> Result { + let version_info = uv_cli::version::uv_self_version(); + print_version(version_info, None, short, output_format, printer)?; + + Ok(ExitStatus::Success) +} + +/// Read or update project version (`uv version`) +#[allow(clippy::fn_params_excessive_bools)] +pub(crate) async fn project_version( + value: Option, + mut bump: Vec, + short: bool, + output_format: VersionFormat, + strict: bool, + project_dir: &Path, + package: Option, + dry_run: bool, + locked: bool, + frozen: bool, + active: Option, + no_sync: bool, + python: Option, + install_mirrors: PythonInstallMirrors, + settings: ResolverInstallerSettings, + network_settings: NetworkSettings, + python_preference: PythonPreference, + python_downloads: PythonDownloads, + installer_metadata: bool, + concurrency: Concurrency, + no_config: bool, + cache: &Cache, + printer: Printer, + preview: PreviewMode, +) -> Result { + // Read the metadata + let project = match find_target(project_dir, package.as_ref()).await { + Ok(target) => target, + Err(err) => { + // If strict, hard bail on failing to find the pyproject.toml + if strict { + return Err(err)?; + } + // Otherwise, warn and provide fallback to the old `uv version` from before 0.7.0 + warn_user!( + "Failed to read project metadata ({err}). Running `{}` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.", + "uv self version".green() + ); + return self_version(short, output_format, printer); + } + }; + + let pyproject_path = project.root().join("pyproject.toml"); + let Some(name) = project.project_name().cloned() else { + return Err(anyhow!( + "Missing `project.name` field in: {}", + pyproject_path.user_display() + )); + }; + + // Short-circuit early for a frozen read + let is_read_only = value.is_none() && bump.is_empty(); + if frozen && is_read_only { + return Box::pin(print_frozen_version( + project, + &name, + project_dir, + active, + python, + install_mirrors, + &settings, + network_settings, + python_preference, + python_downloads, + concurrency, + no_config, + cache, + short, + output_format, + printer, + preview, + )) + .await; + } + + let mut toml = PyProjectTomlMut::from_toml( + project.pyproject_toml().raw.as_ref(), + DependencyTarget::PyProjectToml, + )?; + + let old_version = toml.version().map_err(|err| match err { + Error::MalformedWorkspace => { + if toml.has_dynamic_version() { + anyhow!( + "We cannot get or set dynamic project versions in: {}", + pyproject_path.user_display() + ) + } else { + anyhow!( + "There is no 'project.version' field in: {}", + pyproject_path.user_display() + ) + } + } + err => { + anyhow!("{err}: {}", pyproject_path.user_display()) + } + })?; + + // Figure out new metadata + let new_version = if let Some(value) = value { + match Version::from_str(&value) { + Ok(version) => Some(version), + Err(err) => match &*value { + "major" | "minor" | "patch" | "alpha" | "beta" | "rc" | "dev" | "post" + | "stable" => { + return Err(anyhow!( + "Invalid version `{value}`, did you mean to pass `--bump {value}`?" + )); + } + _ => { + return Err(err)?; + } + }, + } + } else if !bump.is_empty() { + // While we can rationalize many of these combinations of operations together, + // we want to conservatively refuse to support any of them until users demand it. + // + // The most complex thing we *do* allow is `--bump major --bump beta --bump dev` + // because that makes perfect sense and is reasonable to do. + let release_components: Vec<_> = bump + .iter() + .filter(|bump| { + matches!( + bump, + VersionBump::Major | VersionBump::Minor | VersionBump::Patch + ) + }) + .collect(); + let prerelease_components: Vec<_> = bump + .iter() + .filter(|bump| { + matches!( + bump, + VersionBump::Alpha | VersionBump::Beta | VersionBump::Rc | VersionBump::Dev + ) + }) + .collect(); + let post_count = bump + .iter() + .filter(|bump| *bump == &VersionBump::Post) + .count(); + let stable_count = bump + .iter() + .filter(|bump| *bump == &VersionBump::Stable) + .count(); + + // Very little reason to do "bump to stable" and then do other things, + // even if we can make sense of it. + if stable_count > 0 && bump.len() > 1 { + let components = bump + .iter() + .map(ToString::to_string) + .collect::>() + .join(", "); + return Err(anyhow!( + "`--bump stable` cannot be used with another `--bump` value, got: {components}" + )); + } + + // Very little reason to "bump to post" and then do other things, + // how is it a post-release otherwise? + if post_count > 0 && bump.len() > 1 { + let components = bump + .iter() + .map(ToString::to_string) + .collect::>() + .join(", "); + return Err(anyhow!( + "`--bump post` cannot be used with another `--bump` value, got: {components}" + )); + } + + // `--bump major --bump minor` makes perfect sense (1.2.3 => 2.1.0) + // ...but it's weird and probably a mistake? + // `--bump major --bump major` perfect sense (1.2.3 => 3.0.0) + // ...but it's weird and probably a mistake? + if release_components.len() > 1 { + let components = release_components + .iter() + .map(ToString::to_string) + .collect::>() + .join(", "); + return Err(anyhow!( + "Only one release version component can be provided to `--bump`, got: {components}" + )); + } + + // `--bump alpha --bump beta` is basically completely incoherent + // `--bump beta --bump beta` makes perfect sense (1.2.3b4 => 1.2.3b6) + // ...but it's weird and probably a mistake? + // `--bump beta --bump dev` makes perfect sense (1.2.3 => 1.2.3b1.dev1) + // ...but we want to discourage mixing `dev` with pre-releases + if prerelease_components.len() > 1 { + let components = prerelease_components + .iter() + .map(ToString::to_string) + .collect::>() + .join(", "); + return Err(anyhow!( + "Only one pre-release version component can be provided to `--bump`, got: {components}" + )); + } + + // Sort the given commands so the user doesn't have to care about + // the ordering of `--bump minor --bump beta` (only one ordering is ever useful) + bump.sort(); + + // Apply all the bumps + let mut new_version = old_version.clone(); + for bump in &bump { + let command = match *bump { + VersionBump::Major => BumpCommand::BumpRelease { index: 0 }, + VersionBump::Minor => BumpCommand::BumpRelease { index: 1 }, + VersionBump::Patch => BumpCommand::BumpRelease { index: 2 }, + VersionBump::Alpha => BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Alpha, + }, + VersionBump::Beta => BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Beta, + }, + VersionBump::Rc => BumpCommand::BumpPrerelease { + kind: PrereleaseKind::Rc, + }, + VersionBump::Post => BumpCommand::BumpPost, + VersionBump::Dev => BumpCommand::BumpDev, + VersionBump::Stable => BumpCommand::MakeStable, + }; + new_version.bump(command); + } + + if new_version <= old_version { + if old_version.is_stable() && new_version.is_pre() { + return Err(anyhow!( + "{old_version} => {new_version} didn't increase the version; when bumping to a pre-release version you also need to increase a release version component, e.g., with `--bump `" + )); + } + return Err(anyhow!( + "{old_version} => {new_version} didn't increase the version; provide the exact version to force an update" + )); + } + + Some(new_version) + } else { + None + }; + + // Update the toml and lock + let status = if dry_run { + ExitStatus::Success + } else if let Some(new_version) = &new_version { + let project = update_project(project, new_version, &mut toml, &pyproject_path)?; + Box::pin(lock_and_sync( + project, + project_dir, + locked, + frozen, + active, + no_sync, + python, + install_mirrors, + &settings, + network_settings, + python_preference, + python_downloads, + installer_metadata, + concurrency, + no_config, + cache, + printer, + preview, + )) + .await? + } else { + debug!("No changes to version; skipping update"); + ExitStatus::Success + }; + + // Report the results + let old_version = VersionInfo::new(Some(&name), &old_version); + let new_version = new_version.map(|version| VersionInfo::new(Some(&name), &version)); + print_version(old_version, new_version, short, output_format, printer)?; + + Ok(status) +} + +/// Find the pyproject.toml we're modifying +/// +/// Note that `uv version` never needs to support PEP 723 scripts, as those are unversioned. +async fn find_target(project_dir: &Path, package: Option<&PackageName>) -> Result { + // Find the project in the workspace. + // No workspace caching since `uv version` changes the workspace definition. + let project = if let Some(package) = package { + VirtualProject::Project( + Workspace::discover( + project_dir, + &DiscoveryOptions::default(), + &WorkspaceCache::default(), + ) + .await? + .with_current_project(package.clone()) + .with_context(|| format!("Package `{package}` not found in workspace"))?, + ) + } else { + VirtualProject::discover( + project_dir, + &DiscoveryOptions::default(), + &WorkspaceCache::default(), + ) + .await? + }; + Ok(project) +} + +/// Update the pyproject.toml on-disk and in-memory with a new version +fn update_project( + project: VirtualProject, + new_version: &Version, + toml: &mut PyProjectTomlMut, + pyproject_path: &Path, +) -> Result { + // Save to disk + toml.set_version(new_version)?; + let content = toml.to_string(); + fs_err::write(pyproject_path, &content)?; + + // Update the `pyproject.toml` in-memory. + let project = project + .with_pyproject_toml(toml::from_str(&content).map_err(ProjectError::PyprojectTomlParse)?) + .ok_or(ProjectError::PyprojectTomlUpdate)?; + + Ok(project) +} + +/// Do the minimal work to try to find the package in the lockfile and print its version +async fn print_frozen_version( + project: VirtualProject, + name: &PackageName, + project_dir: &Path, + active: Option, + python: Option, + install_mirrors: PythonInstallMirrors, + settings: &ResolverInstallerSettings, + network_settings: NetworkSettings, + python_preference: PythonPreference, + python_downloads: PythonDownloads, + concurrency: Concurrency, + no_config: bool, + cache: &Cache, + short: bool, + output_format: VersionFormat, + printer: Printer, + preview: PreviewMode, +) -> Result { + // Discover the interpreter (this is the same interpreter --no-sync uses). + let interpreter = ProjectInterpreter::discover( + project.workspace(), + project_dir, + &DependencyGroupsWithDefaults::none(), + python.as_deref().map(PythonRequest::parse), + &network_settings, + python_preference, + python_downloads, + &install_mirrors, + false, + no_config, + active, + cache, + printer, + preview, + ) + .await? + .into_interpreter(); + + let target = AddTarget::Project(project, Box::new(PythonTarget::Interpreter(interpreter))); + + // Initialize any shared state. + let state = UniversalState::default(); + + // Lock and sync the environment, if necessary. + let lock = match project::lock::LockOperation::new( + LockMode::Frozen, + &settings.resolver, + &network_settings, + &state, + Box::new(DefaultResolveLogger), + concurrency, + cache, + &WorkspaceCache::default(), + printer, + preview, + ) + .execute((&target).into()) + .await + { + Ok(result) => result.into_lock(), + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls) + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())); + } + Err(err) => return Err(err.into()), + }; + + // Try to find the package of interest in the lock + let Some(package) = lock + .packages() + .iter() + .find(|package| package.name() == name) + else { + return Err(anyhow!( + "Failed to find the {name}'s version in the frozen lockfile" + )); + }; + let Some(version) = package.version() else { + return Err(anyhow!( + "Failed to find the {name}'s version in the frozen lockfile" + )); + }; + + // Finally, print! + let old_version = VersionInfo::new(Some(name), version); + print_version(old_version, None, short, output_format, printer)?; + + Ok(ExitStatus::Success) +} + +/// Re-lock and re-sync the project after a series of edits. +#[allow(clippy::fn_params_excessive_bools)] +async fn lock_and_sync( + project: VirtualProject, + project_dir: &Path, + locked: bool, + frozen: bool, + active: Option, + no_sync: bool, + python: Option, + install_mirrors: PythonInstallMirrors, + settings: &ResolverInstallerSettings, + network_settings: NetworkSettings, + python_preference: PythonPreference, + python_downloads: PythonDownloads, + installer_metadata: bool, + concurrency: Concurrency, + no_config: bool, + cache: &Cache, + printer: Printer, + preview: PreviewMode, +) -> Result { + // If frozen, don't touch the lock or sync at all + if frozen { + return Ok(ExitStatus::Success); + } + + // Determine the groups and extras that should be enabled. + let default_groups = default_dependency_groups(project.pyproject_toml())?; + let default_extras = DefaultExtras::default(); + let groups = DependencyGroups::default().with_defaults(default_groups); + let extras = ExtrasSpecification::default().with_defaults(default_extras); + let install_options = InstallOptions::default(); + + // Convert to an `AddTarget` by attaching the appropriate interpreter or environment. + let target = if no_sync { + // Discover the interpreter. + let interpreter = ProjectInterpreter::discover( + project.workspace(), + project_dir, + &groups, + python.as_deref().map(PythonRequest::parse), + &network_settings, + python_preference, + python_downloads, + &install_mirrors, + false, + no_config, + active, + cache, + printer, + preview, + ) + .await? + .into_interpreter(); + + AddTarget::Project(project, Box::new(PythonTarget::Interpreter(interpreter))) + } else { + // Discover or create the virtual environment. + let environment = ProjectEnvironment::get_or_init( + project.workspace(), + &groups, + python.as_deref().map(PythonRequest::parse), + &install_mirrors, + &network_settings, + python_preference, + python_downloads, + no_sync, + no_config, + active, + cache, + DryRun::Disabled, + printer, + preview, + ) + .await? + .into_environment()?; + + AddTarget::Project(project, Box::new(PythonTarget::Environment(environment))) + }; + + // Determine the lock mode. + let mode = if locked { + LockMode::Locked(target.interpreter()) + } else { + LockMode::Write(target.interpreter()) + }; + + // Initialize any shared state. + let state = UniversalState::default(); + let workspace_cache = WorkspaceCache::default(); + + // Lock and sync the environment, if necessary. + let lock = match project::lock::LockOperation::new( + mode, + &settings.resolver, + &network_settings, + &state, + Box::new(DefaultResolveLogger), + concurrency, + cache, + &workspace_cache, + printer, + preview, + ) + .execute((&target).into()) + .await + { + Ok(result) => result.into_lock(), + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls) + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())); + } + Err(err) => return Err(err.into()), + }; + + let AddTarget::Project(project, environment) = target else { + // If we're not adding to a project, exit early. + return Ok(ExitStatus::Success); + }; + + let PythonTarget::Environment(venv) = &*environment else { + // If we're not syncing, exit early. + return Ok(ExitStatus::Success); + }; + + // Perform a full sync, because we don't know what exactly is affected by the version. + + // Identify the installation target. + let target = match &project { + VirtualProject::Project(project) => InstallTarget::Project { + workspace: project.workspace(), + name: project.project_name(), + lock: &lock, + }, + VirtualProject::NonProject(workspace) => InstallTarget::NonProjectWorkspace { + workspace, + lock: &lock, + }, + }; + + let state = state.fork(); + + match project::sync::do_sync( + target, + venv, + &extras, + &groups, + EditableMode::Editable, + install_options, + Modifications::Sufficient, + None, + settings.into(), + &network_settings, + &state, + Box::new(DefaultInstallLogger), + installer_metadata, + concurrency, + cache, + workspace_cache, + DryRun::Disabled, + printer, + preview, + ) + .await + { + Ok(()) => {} + Err(ProjectError::Operation(err)) => { + return diagnostics::OperationDiagnostic::native_tls(network_settings.native_tls) + .report(err) + .map_or(Ok(ExitStatus::Failure), |err| Err(err.into())); + } + Err(err) => return Err(err.into()), + } + + Ok(ExitStatus::Success) +} + +fn print_version( + old_version: VersionInfo, + new_version: Option, + short: bool, + output_format: VersionFormat, + printer: Printer, +) -> Result<()> { + match output_format { + VersionFormat::Text => { + if let Some(name) = &old_version.package_name { + if !short { + write!(printer.stdout(), "{name} ")?; + } + } + if let Some(new_version) = new_version { + if short { + writeln!(printer.stdout(), "{}", new_version.cyan())?; + } else { + writeln!( + printer.stdout(), + "{} => {}", + old_version.cyan(), + new_version.cyan() + )?; + } + } else { + writeln!(printer.stdout(), "{}", old_version.cyan())?; + } + } + VersionFormat::Json => { + let final_version = new_version.unwrap_or(old_version); + let string = serde_json::to_string_pretty(&final_version)?; + writeln!(printer.stdout(), "{string}")?; + } + } + Ok(()) +} diff --git a/crates/uv/src/commands/publish.rs b/crates/uv/src/commands/publish.rs index 98d7a0699..e7f5e00a2 100644 --- a/crates/uv/src/commands/publish.rs +++ b/crates/uv/src/commands/publish.rs @@ -8,7 +8,6 @@ use console::Term; use owo_colors::OwoColorize; use tokio::sync::Semaphore; use tracing::{debug, info}; -use url::Url; use uv_auth::Credentials; use uv_cache::Cache; use uv_client::{AuthIntegration, BaseClient, BaseClientBuilder, RegistryClientBuilder}; @@ -17,6 +16,7 @@ use uv_distribution_types::{Index, IndexCapabilities, IndexLocations, IndexUrl}; use uv_publish::{ CheckUrlClient, TrustedPublishResult, check_trusted_publishing, files_for_publishing, upload, }; +use uv_redacted::DisplaySafeUrl; use uv_warnings::warn_user_once; use crate::commands::reporters::PublishReporter; @@ -26,7 +26,7 @@ use crate::settings::NetworkSettings; pub(crate) async fn publish( paths: Vec, - publish_url: Url, + publish_url: DisplaySafeUrl, trusted_publishing: TrustedPublishing, keyring_provider: KeyringProviderType, network_settings: &NetworkSettings, @@ -95,6 +95,7 @@ pub(crate) async fn publish( false, ); let registry_client_builder = RegistryClientBuilder::new(cache.clone()) + .retries_from_env()? .native_tls(network_settings.native_tls) .connectivity(network_settings.connectivity) .allow_insecure_host(network_settings.allow_insecure_host.clone()) @@ -196,7 +197,7 @@ enum Prompt { /// /// Returns the publish URL, the username and the password. async fn gather_credentials( - mut publish_url: Url, + mut publish_url: DisplaySafeUrl, mut username: Option, mut password: Option, trusted_publishing: TrustedPublishing, @@ -205,7 +206,7 @@ async fn gather_credentials( check_url: Option<&IndexUrl>, prompt: Prompt, printer: Printer, -) -> Result<(Url, Credentials)> { +) -> Result<(DisplaySafeUrl, Credentials)> { // Support reading username and password from the URL, for symmetry with the index API. if let Some(url_password) = publish_url.password() { if password.is_some_and(|password| password != url_password) { @@ -296,7 +297,7 @@ async fn gather_credentials( if let Some(username) = &username { debug!("Fetching password from keyring"); if let Some(keyring_password) = keyring_provider - .fetch(&publish_url, Some(username)) + .fetch(DisplaySafeUrl::ref_cast(&publish_url), Some(username)) .await .as_ref() .and_then(|credentials| credentials.password()) @@ -342,13 +343,14 @@ mod tests { use std::str::FromStr; use insta::assert_snapshot; - use url::Url; + + use uv_redacted::DisplaySafeUrl; async fn get_credentials( - url: Url, + url: DisplaySafeUrl, username: Option, password: Option, - ) -> Result<(Url, Credentials)> { + ) -> Result<(DisplaySafeUrl, Credentials)> { let client = BaseClientBuilder::new().build(); gather_credentials( url, @@ -366,10 +368,10 @@ mod tests { #[tokio::test] async fn username_password_sources() { - let example_url = Url::from_str("https://example.com").unwrap(); - let example_url_username = Url::from_str("https://ferris@example.com").unwrap(); + let example_url = DisplaySafeUrl::from_str("https://example.com").unwrap(); + let example_url_username = DisplaySafeUrl::from_str("https://ferris@example.com").unwrap(); let example_url_username_password = - Url::from_str("https://ferris:f3rr1s@example.com").unwrap(); + DisplaySafeUrl::from_str("https://ferris:f3rr1s@example.com").unwrap(); let (publish_url, credentials) = get_credentials(example_url.clone(), None, None) .await diff --git a/crates/uv/src/commands/python/find.rs b/crates/uv/src/commands/python/find.rs index 63e25fed1..e188e9d20 100644 --- a/crates/uv/src/commands/python/find.rs +++ b/crates/uv/src/commands/python/find.rs @@ -3,6 +3,7 @@ use std::fmt::Write; use std::path::Path; use uv_cache::Cache; +use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode}; use uv_fs::Simplified; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonInstallation, PythonPreference, PythonRequest, @@ -31,6 +32,7 @@ pub(crate) async fn find( python_preference: PythonPreference, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let environment_preference = if system { EnvironmentPreference::OnlySystem @@ -56,6 +58,8 @@ pub(crate) async fn find( } }; + // Don't enable the requires-python settings on groups + let groups = DependencyGroupsWithDefaults::none(); let WorkspacePython { source, python_request, @@ -63,6 +67,7 @@ pub(crate) async fn find( } = WorkspacePython::from_request( request.map(|request| PythonRequest::parse(&request)), project.as_ref().map(VirtualProject::workspace), + &groups, project_dir, no_config, ) @@ -73,6 +78,7 @@ pub(crate) async fn find( environment_preference, python_preference, cache, + preview, )?; // Warn if the discovered Python version is incompatible with the current workspace @@ -80,6 +86,7 @@ pub(crate) async fn find( match validate_project_requires_python( python.interpreter(), project.as_ref().map(VirtualProject::workspace), + &groups, &requires_python, &source, ) { @@ -116,6 +123,7 @@ pub(crate) async fn find_script( no_config: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let interpreter = match ScriptInterpreter::discover( script, @@ -129,6 +137,7 @@ pub(crate) async fn find_script( Some(false), cache, printer, + preview, ) .await { diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 7e190d31d..8c8387d07 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -1,11 +1,14 @@ use std::borrow::Cow; +use std::collections::BTreeMap; use std::fmt::Write; use std::io::ErrorKind; use std::path::{Path, PathBuf}; +use std::str::FromStr; use anyhow::{Error, Result}; use futures::StreamExt; use futures::stream::FuturesUnordered; +use indexmap::IndexSet; use itertools::{Either, Itertools}; use owo_colors::OwoColorize; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,14 +16,17 @@ use tracing::{debug, trace}; use uv_configuration::PreviewMode; use uv_fs::Simplified; -use uv_python::downloads::{self, DownloadResult, ManagedPythonDownload, PythonDownloadRequest}; +use uv_python::downloads::{ + self, ArchRequest, DownloadResult, ManagedPythonDownload, PythonDownloadRequest, +}; use uv_python::managed::{ - ManagedPythonInstallation, ManagedPythonInstallations, python_executable_dir, + ManagedPythonInstallation, ManagedPythonInstallations, PythonMinorVersionLink, + create_link_to_executable, python_executable_dir, }; use uv_python::platform::{Arch, Libc}; use uv_python::{ - PythonDownloads, PythonInstallationKey, PythonRequest, PythonVersionFile, - VersionFileDiscoveryOptions, VersionFilePreference, + PythonDownloads, PythonInstallationKey, PythonInstallationMinorVersionKey, PythonRequest, + PythonVersionFile, VersionFileDiscoveryOptions, VersionFilePreference, VersionRequest, }; use uv_shell::Shell; use uv_trampoline_builder::{Launcher, LauncherKind}; @@ -32,7 +38,7 @@ use crate::commands::{ExitStatus, elapsed}; use crate::printer::Printer; use crate::settings::NetworkSettings; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] struct InstallRequest { /// The original request from the user request: PythonRequest, @@ -61,7 +67,9 @@ impl InstallRequest { Ok(download) => download, Err(downloads::Error::NoDownloadFound(request)) if request.libc().is_some_and(Libc::is_musl) - && request.arch().is_some_and(Arch::is_arm) => + && request + .arch() + .is_some_and(|arch| Arch::is_arm(&arch.inner())) => { return Err(anyhow::anyhow!( "uv does not yet provide musl Python distributions on aarch64." @@ -80,6 +88,10 @@ impl InstallRequest { fn matches_installation(&self, installation: &ManagedPythonInstallation) -> bool { self.download_request.satisfied_by_key(installation.key()) } + + fn python_request(&self) -> &PythonRequest { + &self.request + } } impl std::fmt::Display for InstallRequest { @@ -130,6 +142,7 @@ pub(crate) async fn install( install_dir: Option, targets: Vec, reinstall: bool, + upgrade: bool, force: bool, python_install_mirror: Option, pypy_install_mirror: Option, @@ -151,34 +164,66 @@ pub(crate) async fn install( return Ok(ExitStatus::Failure); } + if upgrade && preview.is_disabled() { + warn_user!( + "`uv python upgrade` is experimental and may change without warning. Pass `--preview` to disable this warning" + ); + } + if default && targets.len() > 1 { anyhow::bail!("The `--default` flag cannot be used with multiple targets"); } + // Read the existing installations, lock the directory for the duration + let installations = ManagedPythonInstallations::from_settings(install_dir.clone())?.init()?; + let installations_dir = installations.root(); + let scratch_dir = installations.scratch(); + let _lock = installations.lock().await?; + let existing_installations: Vec<_> = installations + .find_all()? + .inspect(|installation| trace!("Found existing installation {}", installation.key())) + .collect(); + // Resolve the requests let mut is_default_install = false; + let mut is_unspecified_upgrade = false; let requests: Vec<_> = if targets.is_empty() { - PythonVersionFile::discover( - project_dir, - &VersionFileDiscoveryOptions::default() - .with_no_config(no_config) - .with_preference(VersionFilePreference::Versions), - ) - .await? - .map(PythonVersionFile::into_versions) - .unwrap_or_else(|| { - // If no version file is found and no requests were made - is_default_install = true; - vec![if reinstall { - // On bare `--reinstall`, reinstall all Python versions - PythonRequest::Any - } else { - PythonRequest::Default - }] - }) - .into_iter() - .map(|a| InstallRequest::new(a, python_downloads_json_url.as_deref())) - .collect::>>()? + if upgrade { + is_unspecified_upgrade = true; + let mut minor_version_requests = IndexSet::::default(); + for installation in &existing_installations { + let request = VersionRequest::major_minor_request_from_key(installation.key()); + if let Ok(request) = InstallRequest::new( + PythonRequest::Version(request), + python_downloads_json_url.as_deref(), + ) { + minor_version_requests.insert(request); + } + } + minor_version_requests.into_iter().collect::>() + } else { + PythonVersionFile::discover( + project_dir, + &VersionFileDiscoveryOptions::default() + .with_no_config(no_config) + .with_preference(VersionFilePreference::Versions), + ) + .await? + .map(PythonVersionFile::into_versions) + .unwrap_or_else(|| { + // If no version file is found and no requests were made + is_default_install = true; + vec![if reinstall { + // On bare `--reinstall`, reinstall all Python versions + PythonRequest::Any + } else { + PythonRequest::Default + }] + }) + .into_iter() + .map(|a| InstallRequest::new(a, python_downloads_json_url.as_deref())) + .collect::>>()? + } } else { targets .iter() @@ -188,18 +233,39 @@ pub(crate) async fn install( }; let Some(first_request) = requests.first() else { + if upgrade { + writeln!( + printer.stderr(), + "There are no installed versions to upgrade" + )?; + } return Ok(ExitStatus::Success); }; - // Read the existing installations, lock the directory for the duration - let installations = ManagedPythonInstallations::from_settings(install_dir)?.init()?; - let installations_dir = installations.root(); - let scratch_dir = installations.scratch(); - let _lock = installations.lock().await?; - let existing_installations: Vec<_> = installations - .find_all()? - .inspect(|installation| trace!("Found existing installation {}", installation.key())) - .collect(); + let requested_minor_versions = requests + .iter() + .filter_map(|request| { + if let PythonRequest::Version(VersionRequest::MajorMinor(major, minor, ..)) = + request.python_request() + { + uv_pep440::Version::from_str(&format!("{major}.{minor}")).ok() + } else { + None + } + }) + .collect::>(); + + if upgrade + && requests + .iter() + .any(|request| request.request.includes_patch()) + { + writeln!( + printer.stderr(), + "error: `uv python upgrade` only accepts minor versions" + )?; + return Ok(ExitStatus::Failure); + } // Find requests that are already satisfied let mut changelog = Changelog::default(); @@ -257,15 +323,20 @@ pub(crate) async fn install( } } } - (vec![], unsatisfied) } else { // If we can find one existing installation that matches the request, it is satisfied requests.iter().partition_map(|request| { - if let Some(installation) = existing_installations - .iter() - .find(|installation| request.matches_installation(installation)) - { + if let Some(installation) = existing_installations.iter().find(|installation| { + if upgrade { + // If this is an upgrade, the requested version is a minor version + // but the requested download is the highest patch for that minor + // version. We need to install it unless an exact match is found. + request.download.key() == installation.key() + } else { + request.matches_installation(installation) + } + }) { debug!( "Found `{}` for request `{}`", installation.key().green(), @@ -305,6 +376,7 @@ pub(crate) async fn install( // Download and unpack the Python versions concurrently let client = uv_client::BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()) @@ -333,6 +405,7 @@ pub(crate) async fn install( let mut errors = vec![]; let mut downloaded = Vec::with_capacity(downloads.len()); + let mut requests_by_new_installation = BTreeMap::new(); while let Some((download, result)) = tasks.next().await { match result { Ok(download_result) => { @@ -344,10 +417,19 @@ pub(crate) async fn install( let installation = ManagedPythonInstallation::new(path, download); changelog.installed.insert(installation.key().clone()); + for request in &requests { + // Take note of which installations satisfied which requests + if request.matches_installation(&installation) { + requests_by_new_installation + .entry(installation.key().clone()) + .or_insert(Vec::new()) + .push(request); + } + } if changelog.existing.contains(installation.key()) { changelog.uninstalled.insert(installation.key().clone()); } - downloaded.push(installation); + downloaded.push(installation.clone()); } Err(err) => { errors.push((download.key().clone(), anyhow::Error::new(err))); @@ -383,18 +465,24 @@ pub(crate) async fn install( .expect("We should have a bin directory with preview enabled") .as_path(); + let upgradeable = (default || is_default_install) + || requested_minor_versions.contains(&installation.key().version().python_version()); + create_bin_links( installation, bin, reinstall, force, default, + upgradeable, + upgrade, is_default_install, first_request, &existing_installations, &installations, &mut changelog, &mut errors, + preview, )?; if preview.is_enabled() { @@ -405,19 +493,92 @@ pub(crate) async fn install( } } + let minor_versions = + PythonInstallationMinorVersionKey::highest_installations_by_minor_version_key( + installations + .iter() + .copied() + .chain(existing_installations.iter()), + ); + + for installation in minor_versions.values() { + if upgrade { + // During an upgrade, update existing symlinks but avoid + // creating new ones. + installation.update_minor_version_link(preview)?; + } else { + installation.ensure_minor_version_link(preview)?; + } + } + if changelog.installed.is_empty() && errors.is_empty() { if is_default_install { writeln!( printer.stderr(), "Python is already installed. Use `uv python install ` to install another version.", )?; + } else if upgrade && requests.is_empty() { + writeln!( + printer.stderr(), + "There are no installed versions to upgrade" + )?; } else if requests.len() > 1 { - writeln!(printer.stderr(), "All requested versions already installed")?; + if upgrade { + if is_unspecified_upgrade { + writeln!( + printer.stderr(), + "All versions already on latest supported patch release" + )?; + } else { + writeln!( + printer.stderr(), + "All requested versions already on latest supported patch release" + )?; + } + } else { + writeln!(printer.stderr(), "All requested versions already installed")?; + } } return Ok(ExitStatus::Success); } if !changelog.installed.is_empty() { + for install_key in &changelog.installed { + // Make a note if the selected python is non-native for the architecture, + // if none of the matching user requests were explicit + let native_arch = Arch::from_env(); + if install_key.arch().family() != native_arch.family() { + let not_explicit = + requests_by_new_installation + .get(install_key) + .and_then(|requests| { + let all_non_explicit = requests.iter().all(|request| { + if let PythonRequest::Key(key) = &request.request { + !matches!(key.arch(), Some(ArchRequest::Explicit(_))) + } else { + true + } + }); + if all_non_explicit { + requests.iter().next() + } else { + None + } + }); + if let Some(not_explicit) = not_explicit { + let native_request = + not_explicit.download_request.clone().with_arch(native_arch); + writeln!( + printer.stderr(), + "{} uv selected a Python distribution with an emulated architecture ({}) for your platform because support for the native architecture ({}) is not yet mature; to override this behaviour, request the native architecture explicitly with: {}", + "note:".bold(), + install_key.arch(), + native_arch, + native_request + )?; + } + } + } if changelog.installed.len() == 1 { let installed = changelog.installed.iter().next().unwrap(); // Ex) "Installed Python 3.9.7 in 1.68s" @@ -518,12 +679,15 @@ fn create_bin_links( reinstall: bool, force: bool, default: bool, + upgradeable: bool, + upgrade: bool, is_default_install: bool, first_request: &InstallRequest, existing_installations: &[ManagedPythonInstallation], installations: &[&ManagedPythonInstallation], changelog: &mut Changelog, errors: &mut Vec<(PythonInstallationKey, Error)>, + preview: PreviewMode, ) -> Result<(), Error> { let targets = if (default || is_default_install) && first_request.matches_installation(installation) { @@ -538,7 +702,19 @@ fn create_bin_links( for target in targets { let target = bin.join(target); - match installation.create_bin_link(&target) { + let executable = if upgradeable { + if let Some(minor_version_link) = + PythonMinorVersionLink::from_installation(installation, preview) + { + minor_version_link.symlink_executable.clone() + } else { + installation.executable(false) + } + } else { + installation.executable(false) + }; + + match create_link_to_executable(&target, executable.clone()) { Ok(()) => { debug!( "Installed executable at `{}` for {}", @@ -587,13 +763,23 @@ fn create_bin_links( // There's an existing executable we don't manage, require `--force` if valid_link { if !force { - errors.push(( - installation.key().clone(), - anyhow::anyhow!( - "Executable already exists at `{}` but is not managed by uv; use `--force` to replace it", - to.simplified_display() - ), - )); + if upgrade { + warn_user!( + "Executable already exists at `{}` but is not managed by uv; use `uv python install {}.{}{} --force` to replace it", + to.simplified_display(), + installation.key().major(), + installation.key().minor(), + installation.key().variant().suffix() + ); + } else { + errors.push(( + installation.key().clone(), + anyhow::anyhow!( + "Executable already exists at `{}` but is not managed by uv; use `--force` to replace it", + to.simplified_display() + ), + )); + } continue; } debug!( @@ -674,7 +860,7 @@ fn create_bin_links( .remove(&target); } - installation.create_bin_link(&target)?; + create_link_to_executable(&target, executable)?; debug!( "Updated executable at `{}` to {}", target.simplified_display(), @@ -745,8 +931,7 @@ fn warn_if_not_on_path(bin: &Path) { /// Find the [`ManagedPythonInstallation`] corresponding to an executable link installed at the /// given path, if any. /// -/// Like [`ManagedPythonInstallation::is_bin_link`], but this method will only resolve the -/// given path one time. +/// Will resolve symlinks on Unix. On Windows, will resolve the target link for a trampoline. fn find_matching_bin_link<'a>( mut installations: impl Iterator, path: &Path, @@ -755,13 +940,13 @@ fn find_matching_bin_link<'a>( if !path.is_symlink() { return None; } - path.read_link().ok()? + fs_err::canonicalize(path).ok()? } else if cfg!(windows) { let launcher = Launcher::try_from_path(path).ok()??; if !matches!(launcher.kind, LauncherKind::Python) { return None; } - launcher.python_path + dunce::canonicalize(launcher.python_path).ok()? } else { unreachable!("Only Windows and Unix are supported") }; diff --git a/crates/uv/src/commands/python/list.rs b/crates/uv/src/commands/python/list.rs index a7f43ccdc..2cd54747c 100644 --- a/crates/uv/src/commands/python/list.rs +++ b/crates/uv/src/commands/python/list.rs @@ -2,6 +2,7 @@ use serde::Serialize; use std::collections::BTreeSet; use std::fmt::Write; use uv_cli::PythonListFormat; +use uv_configuration::PreviewMode; use uv_pep440::Version; use anyhow::Result; @@ -64,6 +65,7 @@ pub(crate) async fn list( python_downloads: PythonDownloads, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let request = request.as_deref().map(PythonRequest::parse); let base_download_request = if python_preference == PythonPreference::OnlySystem { @@ -124,6 +126,7 @@ pub(crate) async fn list( EnvironmentPreference::OnlySystem, python_preference, cache, + preview, ) // Raise discovery errors if critical .filter(|result| { @@ -167,16 +170,20 @@ pub(crate) async fn list( } } - // Only show the latest patch version for each download unless all were requested + // Only show the latest patch version for each download unless all were requested. + // + // We toggle off platforms/arches based unless all_platforms/all_arches because + // we want to only show the "best" option for each version by default, even + // if e.g. the x86_32 build would also work on x86_64. if !matches!(kind, Kind::System) { if let [major, minor, ..] = *key.version().release() { if !seen_minor.insert(( - *key.os(), + all_platforms.then_some(*key.os()), major, minor, key.variant(), key.implementation(), - *key.arch(), + all_arches.then_some(*key.arch()), *key.libc(), )) { if matches!(kind, Kind::Download) && !all_versions { @@ -186,13 +193,13 @@ pub(crate) async fn list( } if let [major, minor, patch] = *key.version().release() { if !seen_patch.insert(( - *key.os(), + all_platforms.then_some(*key.os()), major, minor, patch, key.variant(), key.implementation(), - *key.arch(), + all_arches.then_some(*key.arch()), key.libc(), )) { if matches!(kind, Kind::Download) { diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs index 40dbfa1d9..f4d10cdfa 100644 --- a/crates/uv/src/commands/python/pin.rs +++ b/crates/uv/src/commands/python/pin.rs @@ -7,28 +7,40 @@ use owo_colors::OwoColorize; use tracing::debug; use uv_cache::Cache; +use uv_client::BaseClientBuilder; +use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode}; use uv_dirs::user_uv_config_dir; use uv_fs::Simplified; use uv_python::{ - EnvironmentPreference, PYTHON_VERSION_FILENAME, PythonInstallation, PythonPreference, - PythonRequest, PythonVersionFile, VersionFileDiscoveryOptions, + EnvironmentPreference, PYTHON_VERSION_FILENAME, PythonDownloads, PythonInstallation, + PythonPreference, PythonRequest, PythonVersionFile, VersionFileDiscoveryOptions, }; +use uv_settings::PythonInstallMirrors; use uv_warnings::warn_user_once; use uv_workspace::{DiscoveryOptions, VirtualProject, WorkspaceCache}; -use crate::commands::{ExitStatus, project::find_requires_python}; +use crate::commands::{ + ExitStatus, project::find_requires_python, reporters::PythonDownloadReporter, +}; use crate::printer::Printer; +use crate::settings::NetworkSettings; /// Pin to a specific Python version. +#[allow(clippy::fn_params_excessive_bools)] pub(crate) async fn pin( project_dir: &Path, request: Option, resolved: bool, python_preference: PythonPreference, + python_downloads: PythonDownloads, no_project: bool, global: bool, + rm: bool, + install_mirrors: PythonInstallMirrors, + network_settings: NetworkSettings, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let workspace_cache = WorkspaceCache::default(); let virtual_project = if no_project { @@ -45,16 +57,29 @@ pub(crate) async fn pin( } }; - let version_file = if global { - if let Some(path) = user_uv_config_dir() { - PythonVersionFile::discover_user_config(path, &VersionFileDiscoveryOptions::default()) - .await - } else { - Ok(None) - } - } else { - PythonVersionFile::discover(project_dir, &VersionFileDiscoveryOptions::default()).await - }; + // Search for an existing file, we won't necessarily write to this, we'll construct a target + // path if there's a request later on. + let version_file = PythonVersionFile::discover( + project_dir, + &VersionFileDiscoveryOptions::default().with_no_local(global), + ) + .await; + + if rm { + let Some(file) = version_file? else { + if global { + bail!("No global Python pin found"); + } + bail!("No Python version file found"); + }; + fs_err::tokio::remove_file(file.path()).await?; + writeln!( + printer.stdout(), + "Removed Python version file at `{}`", + file.path().user_display() + )?; + return Ok(ExitStatus::Success); + } let Some(request) = request else { // Display the current pinned Python version @@ -67,12 +92,13 @@ pub(crate) async fn pin( virtual_project, python_preference, cache, + preview, ); } } return Ok(ExitStatus::Success); } - bail!("No pinned Python version found") + bail!("No Python version file found; specify a version to create one") }; let request = PythonRequest::parse(&request); @@ -80,18 +106,41 @@ pub(crate) async fn pin( bail!("Requests for arbitrary names (e.g., `{name}`) are not supported in version files"); } - let python = match PythonInstallation::find( - &request, + let client_builder = BaseClientBuilder::new() + .retries_from_env()? + .connectivity(network_settings.connectivity) + .native_tls(network_settings.native_tls) + .allow_insecure_host(network_settings.allow_insecure_host.clone()); + let reporter = PythonDownloadReporter::single(printer); + + let python = match PythonInstallation::find_or_download( + Some(&request), EnvironmentPreference::OnlySystem, python_preference, + python_downloads, + &client_builder, cache, - ) { + Some(&reporter), + install_mirrors.python_install_mirror.as_deref(), + install_mirrors.pypy_install_mirror.as_deref(), + install_mirrors.python_downloads_json_url.as_deref(), + preview, + ) + .await + { Ok(python) => Some(python), // If no matching Python version is found, don't fail unless `resolved` was requested - Err(uv_python::Error::MissingPython(err)) if !resolved => { + Err(uv_python::Error::MissingPython(err, ..)) if !resolved => { + // N.B. We omit the hint and just show the inner error message warn_user_once!("{err}"); None } + // If there was some other error, log it + Err(err) if !resolved => { + debug!("{err}"); + None + } + // If `resolved` was requested, we must find an interpreter — fail otherwise Err(err) => return Err(err.into()), }; @@ -213,6 +262,7 @@ fn warn_if_existing_pin_incompatible_with_project( virtual_project: &VirtualProject, python_preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) { // Check if the pinned version is compatible with the project. if let Some(pin_version) = pep440_version_from_request(pin) { @@ -237,6 +287,7 @@ fn warn_if_existing_pin_incompatible_with_project( EnvironmentPreference::OnlySystem, python_preference, cache, + preview, ) { Ok(python) => { let python_version = python.python_version(); @@ -276,6 +327,9 @@ struct Pin<'a> { /// Checks if the pinned Python version is compatible with the workspace/project's `Requires-Python`. fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProject) -> Result<()> { + // Don't factor in requires-python settings on dependency-groups + let groups = DependencyGroupsWithDefaults::none(); + let (requires_python, project_type) = match virtual_project { VirtualProject::Project(project_workspace) => { debug!( @@ -283,7 +337,8 @@ fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProjec project_workspace.project_name(), project_workspace.workspace().install_path().display() ); - let requires_python = find_requires_python(project_workspace.workspace())?; + + let requires_python = find_requires_python(project_workspace.workspace(), &groups)?; (requires_python, "project") } VirtualProject::NonProject(workspace) => { @@ -291,7 +346,7 @@ fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProjec "Discovered virtual workspace at: {}", workspace.install_path().display() ); - let requires_python = find_requires_python(workspace)?; + let requires_python = find_requires_python(workspace, &groups)?; (requires_python, "workspace") } }; diff --git a/crates/uv/src/commands/python/uninstall.rs b/crates/uv/src/commands/python/uninstall.rs index ac159344c..642942d07 100644 --- a/crates/uv/src/commands/python/uninstall.rs +++ b/crates/uv/src/commands/python/uninstall.rs @@ -5,6 +5,7 @@ use std::path::PathBuf; use anyhow::Result; use futures::StreamExt; use futures::stream::FuturesUnordered; +use indexmap::IndexSet; use itertools::Itertools; use owo_colors::OwoColorize; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,8 +14,10 @@ use tracing::{debug, warn}; use uv_configuration::PreviewMode; use uv_fs::Simplified; use uv_python::downloads::PythonDownloadRequest; -use uv_python::managed::{ManagedPythonInstallations, python_executable_dir}; -use uv_python::{PythonInstallationKey, PythonRequest}; +use uv_python::managed::{ + ManagedPythonInstallations, PythonMinorVersionLink, python_executable_dir, +}; +use uv_python::{PythonInstallationKey, PythonInstallationMinorVersionKey, PythonRequest}; use crate::commands::python::install::format_executables; use crate::commands::python::{ChangeEvent, ChangeEventKind}; @@ -87,7 +90,6 @@ async fn do_uninstall( // Always include pre-releases in uninstalls .map(|result| result.map(|request| request.with_prereleases(true))) .collect::>>()?; - let installed_installations: Vec<_> = installations.find_all()?.collect(); let mut matching_installations = BTreeSet::default(); for (request, download_request) in requests.iter().zip(download_requests) { @@ -198,13 +200,13 @@ async fn do_uninstall( }); } - let mut uninstalled = vec![]; + let mut uninstalled = IndexSet::::default(); let mut errors = vec![]; while let Some((key, result)) = tasks.next().await { if let Err(err) = result { errors.push((key.clone(), anyhow::Error::new(err))); } else { - uninstalled.push(key.clone()); + uninstalled.insert(key.clone()); } } @@ -218,29 +220,86 @@ async fn do_uninstall( uv_python::windows_registry::remove_orphan_registry_entries(&installed_installations); } + // Read all existing managed installations and find the highest installed patch + // for each installed minor version. Ensure the minor version link directory + // is still valid. + let uninstalled_minor_versions: IndexSet<_> = uninstalled + .iter() + .map(PythonInstallationMinorVersionKey::ref_cast) + .collect(); + let remaining_installations: Vec<_> = installed_installations + .into_iter() + .filter(|installation| !uninstalled.contains(installation.key())) + .collect(); + + let remaining_minor_versions = + PythonInstallationMinorVersionKey::highest_installations_by_minor_version_key( + remaining_installations.iter(), + ); + + for (_, installation) in remaining_minor_versions + .iter() + .filter(|(minor_version, _)| uninstalled_minor_versions.contains(minor_version)) + { + installation.update_minor_version_link(preview)?; + } + // For each uninstalled installation, check if there are no remaining installations + // for its minor version. If there are none remaining, remove the symlink directory + // (or junction on Windows) if it exists. + for installation in &matching_installations { + if !remaining_minor_versions.contains_key(installation.minor_version_key()) { + if let Some(minor_version_link) = + PythonMinorVersionLink::from_installation(installation, preview) + { + if minor_version_link.exists() { + let result = if cfg!(windows) { + fs_err::remove_dir(minor_version_link.symlink_directory.as_path()) + } else { + fs_err::remove_file(minor_version_link.symlink_directory.as_path()) + }; + if result.is_err() { + return Err(anyhow::anyhow!( + "Failed to remove symlink directory {}", + minor_version_link.symlink_directory.display() + )); + } + let symlink_term = if cfg!(windows) { + "junction" + } else { + "symlink directory" + }; + debug!( + "Removed {}: {}", + symlink_term, + minor_version_link.symlink_directory.to_string_lossy() + ); + } + } + } + } + // Report on any uninstalled installations. - if !uninstalled.is_empty() { - if let [uninstalled] = uninstalled.as_slice() { + if let Some(first_uninstalled) = uninstalled.first() { + if uninstalled.len() == 1 { // Ex) "Uninstalled Python 3.9.7 in 1.68s" writeln!( printer.stderr(), "{}", format!( "Uninstalled {} {}", - format!("Python {}", uninstalled.version()).bold(), + format!("Python {}", first_uninstalled.version()).bold(), format!("in {}", elapsed(start.elapsed())).dimmed() ) .dimmed() )?; } else { // Ex) "Uninstalled 2 versions in 1.68s" - let s = if uninstalled.len() == 1 { "" } else { "s" }; writeln!( printer.stderr(), "{}", format!( "Uninstalled {} {}", - format!("{} version{s}", uninstalled.len()).bold(), + format!("{} versions", uninstalled.len()).bold(), format!("in {}", elapsed(start.elapsed())).dimmed() ) .dimmed() diff --git a/crates/uv/src/commands/reporters.rs b/crates/uv/src/commands/reporters.rs index ed0f11bc6..3943c941e 100644 --- a/crates/uv/src/commands/reporters.rs +++ b/crates/uv/src/commands/reporters.rs @@ -8,8 +8,6 @@ use std::time::Duration; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use owo_colors::OwoColorize; use rustc_hash::FxHashMap; -use url::Url; -use uv_redacted::redacted_url; use crate::commands::human_readable_bytes; use crate::printer::Printer; @@ -20,6 +18,7 @@ use uv_distribution_types::{ use uv_normalize::PackageName; use uv_pep440::Version; use uv_python::PythonInstallationKey; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; /// Since downloads, fetches and builds run in parallel, their message output order is @@ -359,8 +358,7 @@ impl ProgressReporter { self.on_request_start(Direction::Upload, name, size) } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { - let url = redacted_url(url); + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { let ProgressMode::Multi { multi_progress, state, @@ -390,8 +388,7 @@ impl ProgressReporter { id } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { - let url = redacted_url(url); + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { let ProgressMode::Multi { state, multi_progress, @@ -481,11 +478,11 @@ impl uv_installer::PrepareReporter for PrepareReporter { self.reporter.on_download_complete(id); } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { self.reporter.on_checkout_complete(url, rev, id); } } @@ -545,11 +542,11 @@ impl uv_resolver::ResolverReporter for ResolverReporter { self.reporter.on_build_complete(source, id); } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { self.reporter.on_checkout_complete(url, rev, id); } @@ -587,11 +584,11 @@ impl uv_distribution::Reporter for ResolverReporter { self.reporter.on_download_complete(id); } - fn on_checkout_start(&self, url: &Url, rev: &str) -> usize { + fn on_checkout_start(&self, url: &DisplaySafeUrl, rev: &str) -> usize { self.reporter.on_checkout_start(url, rev) } - fn on_checkout_complete(&self, url: &Url, rev: &str, id: usize) { + fn on_checkout_complete(&self, url: &DisplaySafeUrl, rev: &str, id: usize) { self.reporter.on_checkout_complete(url, rev, id); } } diff --git a/crates/uv/src/commands/self_update.rs b/crates/uv/src/commands/self_update.rs index 00741f725..4b4fd4830 100644 --- a/crates/uv/src/commands/self_update.rs +++ b/crates/uv/src/commands/self_update.rs @@ -10,6 +10,7 @@ use uv_fs::Simplified; use crate::commands::ExitStatus; use crate::printer::Printer; +use crate::settings::NetworkSettings; /// Attempt to update the uv binary. pub(crate) async fn self_update( @@ -17,7 +18,23 @@ pub(crate) async fn self_update( token: Option, dry_run: bool, printer: Printer, + network_settings: NetworkSettings, ) -> Result { + if network_settings.connectivity.is_offline() { + writeln!( + printer.stderr(), + "{}", + format_args!( + concat!( + "{}{} Self-update is not possible because network connectivity is disabled (i.e., with `--offline`)" + ), + "error".red().bold(), + ":".bold() + ) + )?; + return Ok(ExitStatus::Failure); + } + let mut updater = AxoUpdater::new_for("uv"); updater.disable_installer_output(); @@ -46,6 +63,14 @@ pub(crate) async fn self_update( return Ok(ExitStatus::Error); }; + // If we know what our version is, ignore whatever the receipt thinks it is! + // This makes us behave better if someone manually installs a random version of uv + // in a way that doesn't update the receipt. + if let Ok(version) = env!("CARGO_PKG_VERSION").parse() { + // This is best-effort, it's fine if it fails (also it can't actually fail) + let _ = updater.set_current_version(version); + } + // Ensure the receipt is for the current binary. If it's not, then the user likely has multiple // uv binaries installed, and the current binary was _not_ installed via the standalone // installation scripts. diff --git a/crates/uv/src/commands/tool/common.rs b/crates/uv/src/commands/tool/common.rs index 77aba8619..ffc1b5645 100644 --- a/crates/uv/src/commands/tool/common.rs +++ b/crates/uv/src/commands/tool/common.rs @@ -7,6 +7,7 @@ use std::{collections::BTreeSet, ffi::OsString}; use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::BaseClientBuilder; +use uv_configuration::PreviewMode; use uv_distribution_types::Requirement; use uv_distribution_types::{InstalledDist, Name}; use uv_fs::Simplified; @@ -80,6 +81,7 @@ pub(crate) async fn refine_interpreter( python_preference: PythonPreference, python_downloads: PythonDownloads, cache: &Cache, + preview: PreviewMode, ) -> anyhow::Result, ProjectError> { let pip::operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(no_solution_err)) = err @@ -151,6 +153,7 @@ pub(crate) async fn refine_interpreter( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -158,14 +161,18 @@ pub(crate) async fn refine_interpreter( Ok(Some(interpreter)) } -/// Installs tool executables for a given package and handles any conflicts. -pub(crate) fn install_executables( +/// Finalizes a tool installation, after creation of an environment. +/// +/// Installs tool executables for a given package, handling any conflicts. +/// +/// Adds a receipt for the tool. +pub(crate) fn finalize_tool_install( environment: &PythonEnvironment, name: &PackageName, installed_tools: &InstalledTools, options: ToolOptions, force: bool, - python: Option, + python: Option, requirements: Vec, constraints: Vec, overrides: Vec, @@ -211,7 +218,7 @@ pub(crate) fn install_executables( if target_entry_points.is_empty() { writeln!( printer.stdout(), - "No executables are provided by `{from}`", + "No executables are provided by package `{from}`; removing tool", from = name.cyan() )?; @@ -347,7 +354,9 @@ fn hint_executable_from_dependency( let command = format!("uv tool install {}", package.name()); writeln!( printer.stdout(), - "However, an executable with the name `{}` is available via dependency `{}`.\nDid you mean `{}`?", + "{}{} An executable with the name `{}` is available via dependency `{}`.\n Did you mean `{}`?", + "hint".bold().cyan(), + ":".bold(), name.cyan(), package.name().cyan(), command.bold(), @@ -356,7 +365,9 @@ fn hint_executable_from_dependency( packages => { writeln!( printer.stdout(), - "However, an executable with the name `{}` is available via the following dependencies::", + "{}{} An executable with the name `{}` is available via the following dependencies::", + "hint".bold().cyan(), + ":".bold(), name.cyan(), )?; @@ -365,7 +376,7 @@ fn hint_executable_from_dependency( } writeln!( printer.stdout(), - "Did you mean to install one of them instead?" + " Did you mean to install one of them instead?" )?; } } diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index bf05142f9..27f18abe4 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -33,7 +33,9 @@ use crate::commands::project::{ EnvironmentSpecification, PlatformState, ProjectError, resolve_environment, resolve_names, sync_environment, update_environment, }; -use crate::commands::tool::common::{install_executables, refine_interpreter, remove_entrypoints}; +use crate::commands::tool::common::{ + finalize_tool_install, refine_interpreter, remove_entrypoints, +}; use crate::commands::tool::{Target, ToolRequest}; use crate::commands::{diagnostics, reporters::PythonDownloadReporter}; use crate::printer::Printer; @@ -64,6 +66,7 @@ pub(crate) async fn install( preview: PreviewMode, ) -> Result { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -85,6 +88,7 @@ pub(crate) async fn install( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -94,12 +98,13 @@ pub(crate) async fn install( let workspace_cache = WorkspaceCache::default(); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); // Parse the input requirement. - let request = ToolRequest::parse(&package, from.as_deref()); + let request = ToolRequest::parse(&package, from.as_deref())?; // If the user passed, e.g., `ruff@latest`, refresh the cache. let cache = if request.is_latest() { @@ -109,9 +114,12 @@ pub(crate) async fn install( }; // Resolve the `--from` requirement. - let from = match &request.target { + let from = match &request { // Ex) `ruff` - Target::Unspecified(from) => { + ToolRequest::Package { + executable, + target: Target::Unspecified(from), + } => { let source = if editable { RequirementsSource::from_editable(from)? } else { @@ -122,7 +130,7 @@ pub(crate) async fn install( .requirements; // If the user provided an executable name, verify that it matches the `--from` requirement. - let executable = if let Some(executable) = request.executable { + let executable = if let Some(executable) = executable { let Ok(executable) = PackageName::from_str(executable) else { bail!( "Package requirement (`{from}`) provided with `--from` conflicts with install request (`{executable}`)", @@ -165,7 +173,10 @@ pub(crate) async fn install( requirement } // Ex) `ruff@0.6.0` - Target::Version(.., name, extras, version) => { + ToolRequest::Package { + target: Target::Version(.., name, extras, version), + .. + } => { if editable { bail!("`--editable` is only supported for local packages"); } @@ -186,7 +197,10 @@ pub(crate) async fn install( } } // Ex) `ruff@latest` - Target::Latest(.., name, extras) => { + ToolRequest::Package { + target: Target::Latest(.., name, extras), + .. + } => { if editable { bail!("`--editable` is only supported for local packages"); } @@ -204,16 +218,16 @@ pub(crate) async fn install( origin: None, } } + // Ex) `python` + ToolRequest::Python { .. } => { + return Err(anyhow::anyhow!( + "Cannot install Python with `{}`. Did you mean to use `{}`?", + "uv tool install".cyan(), + "uv python install".cyan(), + )); + } }; - if from.name.as_str().eq_ignore_ascii_case("python") { - return Err(anyhow::anyhow!( - "Cannot install Python with `{}`. Did you mean to use `{}`?", - "uv tool install".cyan(), - "uv python install".cyan(), - )); - } - // If the user passed, e.g., `ruff@latest`, we need to mark it as upgradable. let settings = if request.is_latest() { ResolverInstallerSettings { @@ -465,6 +479,7 @@ pub(crate) async fn install( let resolution = resolve_environment( spec.clone(), &interpreter, + Constraints::from_requirements(build_constraints.iter().cloned()), &settings.resolver, &network_settings, &state, @@ -497,6 +512,7 @@ pub(crate) async fn install( python_preference, python_downloads, &cache, + preview, ) .await .ok() @@ -517,6 +533,7 @@ pub(crate) async fn install( match resolve_environment( spec, &interpreter, + Constraints::from_requirements(build_constraints.iter().cloned()), &settings.resolver, &network_settings, &state, @@ -543,7 +560,7 @@ pub(crate) async fn install( }, }; - let environment = installed_tools.create_environment(&from.name, interpreter)?; + let environment = installed_tools.create_environment(&from.name, interpreter, preview)?; // At this point, we removed any existing environment, so we should remove any of its // executables. @@ -583,13 +600,13 @@ pub(crate) async fn install( } }; - install_executables( + finalize_tool_install( &environment, &from.name, &installed_tools, options, force || invalid_tool_receipt, - python, + python_request, requirements, constraints, overrides, diff --git a/crates/uv/src/commands/tool/list.rs b/crates/uv/src/commands/tool/list.rs index 4def9cfe4..21b16b020 100644 --- a/crates/uv/src/commands/tool/list.rs +++ b/crates/uv/src/commands/tool/list.rs @@ -13,10 +13,12 @@ use crate::commands::ExitStatus; use crate::printer::Printer; /// List installed tools. +#[allow(clippy::fn_params_excessive_bools)] pub(crate) async fn list( show_paths: bool, show_version_specifiers: bool, show_with: bool, + show_extras: bool, cache: &Cache, printer: Printer, ) -> Result { @@ -80,6 +82,21 @@ pub(crate) async fn list( }) .unwrap_or_default(); + let extra_requirements = show_extras + .then(|| { + tool.requirements() + .iter() + .filter(|req| req.name == name) + .flat_map(|req| req.extras.iter()) // Flatten the extras from all matching requirements + .peekable() + }) + .take_if(|extras| extras.peek().is_some()) + .map(|extras| { + let extras_str = extras.map(ToString::to_string).join(", "); + format!(" [extras: {extras_str}]") + }) + .unwrap_or_default(); + let with_requirements = show_with .then(|| { tool.requirements() @@ -100,14 +117,20 @@ pub(crate) async fn list( writeln!( printer.stdout(), "{} ({})", - format!("{name} v{version}{version_specifier}{with_requirements}").bold(), + format!( + "{name} v{version}{version_specifier}{extra_requirements}{with_requirements}" + ) + .bold(), installed_tools.tool_dir(&name).simplified_display().cyan(), )?; } else { writeln!( printer.stdout(), "{}", - format!("{name} v{version}{version_specifier}{with_requirements}").bold() + format!( + "{name} v{version}{version_specifier}{extra_requirements}{with_requirements}" + ) + .bold() )?; } diff --git a/crates/uv/src/commands/tool/mod.rs b/crates/uv/src/commands/tool/mod.rs index c690136e5..474f27d89 100644 --- a/crates/uv/src/commands/tool/mod.rs +++ b/crates/uv/src/commands/tool/mod.rs @@ -4,6 +4,7 @@ use tracing::debug; use uv_normalize::{ExtraName, PackageName}; use uv_pep440::Version; +use uv_python::PythonRequest; mod common; pub(crate) mod dir; @@ -16,44 +17,60 @@ pub(crate) mod upgrade; /// A request to run or install a tool (e.g., `uvx ruff@latest`). #[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct ToolRequest<'a> { - /// The executable name (e.g., `ruff`), if specified explicitly. - pub(crate) executable: Option<&'a str>, - /// The target to install or run (e.g., `ruff@latest` or `ruff==0.6.0`). - pub(crate) target: Target<'a>, +pub(crate) enum ToolRequest<'a> { + // Running the interpreter directly e.g. `uvx python` or `uvx pypy@3.8` + Python { + /// The executable name (e.g., `bash`), if the interpreter was given via --from. + executable: Option<&'a str>, + // The interpreter to install or run (e.g., `python@3.8` or `pypy311`. + request: PythonRequest, + }, + // Running a Python package + Package { + /// The executable name (e.g., `ruff`), if the target was given via --from. + executable: Option<&'a str>, + /// The target to install or run (e.g., `ruff@latest` or `ruff==0.6.0`). + target: Target<'a>, + }, } impl<'a> ToolRequest<'a> { /// Parse a tool request into an executable name and a target. - pub(crate) fn parse(command: &'a str, from: Option<&'a str>) -> Self { - if let Some(from) = from { - let target = Target::parse(from); - Self { - executable: Some(command), - target, - } - } else { - let target = Target::parse(command); - Self { - executable: None, - target, - } - } - } - - /// Returns whether the target package is Python. - pub(crate) fn is_python(&self) -> bool { - let name = match self.target { - Target::Unspecified(name) => name, - Target::Version(name, ..) => name, - Target::Latest(name, ..) => name, + pub(crate) fn parse(command: &'a str, from: Option<&'a str>) -> anyhow::Result { + // If --from is used, the command could be an arbitrary binary in the PATH (e.g. `bash`), + // and we don't try to parse it. + let (component_to_parse, executable) = match from { + Some(from) => (from, Some(command)), + None => (command, None), }; - name.eq_ignore_ascii_case("python") || cfg!(windows) && name.eq_ignore_ascii_case("pythonw") + + // First try parsing the command as a Python interpreter, like `python`, `python39`, or + // `pypy@39`. `pythonw` is also allowed on Windows. This overlaps with how `--python` flag + // values are parsed, but see `PythonRequest::parse` vs `PythonRequest::try_from_tool_name` + // for the differences. + if let Some(python_request) = PythonRequest::try_from_tool_name(component_to_parse)? { + Ok(Self::Python { + request: python_request, + executable, + }) + } else { + // Otherwise the command is a Python package, like `ruff` or `ruff@0.6.0`. + Ok(Self::Package { + target: Target::parse(component_to_parse), + executable, + }) + } } /// Returns `true` if the target is `latest`. pub(crate) fn is_latest(&self) -> bool { - matches!(self.target, Target::Latest(..)) + matches!( + self, + Self::Package { + target: Target::Latest(..), + .. + } + ) } } diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 42f9e99a5..f6b79774c 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -29,7 +29,6 @@ use uv_installer::{SatisfiesResult, SitePackages}; use uv_normalize::PackageName; use uv_pep440::{VersionSpecifier, VersionSpecifiers}; use uv_pep508::MarkerTree; -use uv_python::VersionRequest; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest, @@ -43,6 +42,7 @@ use uv_warnings::warn_user; use uv_warnings::warn_user_once; use uv_workspace::WorkspaceCache; +use crate::child::run_to_completion; use crate::commands::ExitStatus; use crate::commands::pip::loggers::{ DefaultInstallLogger, DefaultResolveLogger, SummaryInstallLogger, SummaryResolveLogger, @@ -52,7 +52,6 @@ use crate::commands::project::{ EnvironmentSpecification, PlatformState, ProjectError, resolve_names, }; use crate::commands::reporters::PythonDownloadReporter; -use crate::commands::run::run_to_completion; use crate::commands::tool::common::{matching_packages, refine_interpreter}; use crate::commands::tool::{Target, ToolRequest}; use crate::commands::{diagnostics, project::environment::CachedEnvironment}; @@ -249,7 +248,7 @@ pub(crate) async fn run( } } - let request = ToolRequest::parse(target, from.as_deref()); + let request = ToolRequest::parse(target, from.as_deref())?; // If the user passed, e.g., `ruff@latest`, refresh the cache. let cache = if request.is_latest() { @@ -322,7 +321,7 @@ pub(crate) async fn run( // Check if the provided command is not part of the executables for the `from` package, // and if it's provided by another package in the environment. let provider_hints = match &from { - ToolRequirement::Python => None, + ToolRequirement::Python { .. } => None, ToolRequirement::Package { requirement, .. } => Some(ExecutableProviderHints::new( executable, requirement, @@ -637,7 +636,9 @@ impl std::fmt::Display for ExecutableProviderHints<'_> { #[derive(Debug)] #[allow(clippy::large_enum_variant)] pub(crate) enum ToolRequirement { - Python, + Python { + executable: String, + }, Package { executable: String, requirement: Requirement, @@ -647,7 +648,7 @@ pub(crate) enum ToolRequirement { impl ToolRequirement { fn executable(&self) -> &str { match self { - ToolRequirement::Python => "python", + ToolRequirement::Python { executable, .. } => executable, ToolRequirement::Package { executable, .. } => executable, } } @@ -656,7 +657,7 @@ impl ToolRequirement { impl std::fmt::Display for ToolRequirement { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - ToolRequirement::Python => write!(f, "python"), + ToolRequirement::Python { .. } => write!(f, "python"), ToolRequirement::Package { requirement, .. } => write!(f, "{requirement}"), } } @@ -689,42 +690,50 @@ async fn get_or_create_environment( preview: PreviewMode, ) -> Result<(ToolRequirement, PythonEnvironment), ProjectError> { let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); let reporter = PythonDownloadReporter::single(printer); - // Check if the target is `python` - let python_request = if request.is_python() { - let target_request = match &request.target { - Target::Unspecified(_) => None, - Target::Version(_, _, _, version) => Some(PythonRequest::Version( - VersionRequest::from_str(&version.to_string()).map_err(anyhow::Error::from)?, - )), - // TODO(zanieb): Add `PythonRequest::Latest` - Target::Latest(_, _, _) => { - return Err(anyhow::anyhow!( - "Requesting the 'latest' Python version is not yet supported" - ) - .into()); - } - }; + // Figure out what Python we're targeting, either explicitly like `uvx python@3`, or via the + // -p/--python flag. + let python_request = match request { + ToolRequest::Python { + request: tool_python_request, + .. + } => { + match python { + None => Some(tool_python_request.clone()), - if let Some(target_request) = &target_request { - if let Some(python) = python { - return Err(anyhow::anyhow!( - "Received multiple Python version requests: `{}` and `{}`", - python.to_string().cyan(), - target_request.to_canonical_string().cyan(), - ) - .into()); + // The user is both invoking a python interpreter directly and also supplying the + // -p/--python flag. Cases like `uvx -p pypy python` are allowed, for two reasons: + // 1) Previously this was the only way to invoke e.g. PyPy via `uvx`, and it's nice + // to remain compatible with that. 2) A script might define an alias like `uvx + // --python $MY_PYTHON ...`, and it's nice to be able to run the interpreter + // directly while sticking to that alias. + // + // However, we want to error out if we see conflicting or redundant versions like + // `uvx -p python38 python39`. + // + // Note that a command like `uvx default` doesn't bring us here. ToolRequest::parse + // returns ToolRequest::Package rather than ToolRequest::Python in that case. See + // PythonRequest::try_from_tool_name. + Some(python_flag) => { + if tool_python_request != &PythonRequest::Default { + return Err(anyhow::anyhow!( + "Received multiple Python version requests: `{}` and `{}`", + python_flag.to_string().cyan(), + tool_python_request.to_canonical_string().cyan() + ) + .into()); + } + Some(PythonRequest::parse(python_flag)) + } } } - - target_request.or_else(|| python.map(PythonRequest::parse)) - } else { - python.map(PythonRequest::parse) + ToolRequest::Package { .. } => python.map(PythonRequest::parse), }; // Discover an interpreter. @@ -739,6 +748,7 @@ async fn get_or_create_environment( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -747,117 +757,112 @@ async fn get_or_create_environment( let state = PlatformState::default(); let workspace_cache = WorkspaceCache::default(); - let from = if request.is_python() { - ToolRequirement::Python - } else { - let (executable, requirement) = match &request.target { - // Ex) `ruff>=0.6.0` - Target::Unspecified(requirement) => { - let spec = RequirementsSpecification::parse_package(requirement)?; + let from = match request { + ToolRequest::Python { + executable: request_executable, + .. + } => ToolRequirement::Python { + executable: request_executable.unwrap_or("python").to_string(), + }, + ToolRequest::Package { + executable: request_executable, + target, + } => { + let (executable, requirement) = match target { + // Ex) `ruff>=0.6.0` + Target::Unspecified(requirement) => { + let spec = RequirementsSpecification::parse_package(requirement)?; - // Extract the verbatim executable name, if possible. - let name = match &spec.requirement { - UnresolvedRequirement::Named(..) => { - // Identify the package name from the PEP 508 specifier. - // - // For example, given `ruff>=0.6.0`, extract `ruff`, to use as the executable name. - let content = requirement.trim(); - let index = content - .find(|c| !matches!(c, 'A'..='Z' | 'a'..='z' | '0'..='9' | '-' | '_' | '.')) - .unwrap_or(content.len()); - Some(&content[..index]) - } - UnresolvedRequirement::Unnamed(..) => None, - }; + // Extract the verbatim executable name, if possible. + let name = match &spec.requirement { + UnresolvedRequirement::Named(..) => { + // Identify the package name from the PEP 508 specifier. + // + // For example, given `ruff>=0.6.0`, extract `ruff`, to use as the executable name. + let content = requirement.trim(); + let index = content + .find(|c| !matches!(c, 'A'..='Z' | 'a'..='z' | '0'..='9' | '-' | '_' | '.')) + .unwrap_or(content.len()); + Some(&content[..index]) + } + UnresolvedRequirement::Unnamed(..) => None, + }; - if let UnresolvedRequirement::Named(requirement) = &spec.requirement { - if requirement.name.as_str() == "python" { - return Err(anyhow::anyhow!( - "Using `{}` is not supported. Use `{}` instead.", - "--from python".cyan(), - "python@".cyan(), - ) - .into()); - } + let requirement = resolve_names( + vec![spec], + &interpreter, + settings, + network_settings, + &state, + concurrency, + cache, + &workspace_cache, + printer, + preview, + ) + .await? + .pop() + .unwrap(); + + // Prefer, in order: + // 1. The verbatim executable provided by the user, independent of the requirement (as in: `uvx --from package executable`). + // 2. The verbatim executable provided by the user as a named requirement (as in: `uvx change_wheel_version`). + // 3. The resolved package name (as in: `uvx git+https://github.com/pallets/flask`). + let executable = request_executable + .map(ToString::to_string) + .or_else(|| name.map(ToString::to_string)) + .unwrap_or_else(|| requirement.name.to_string()); + + (executable, requirement) } + // Ex) `ruff@0.6.0` + Target::Version(executable, name, extras, version) => { + let executable = request_executable + .map(ToString::to_string) + .unwrap_or_else(|| (*executable).to_string()); + let requirement = Requirement { + name: name.clone(), + extras: extras.clone(), + groups: Box::new([]), + marker: MarkerTree::default(), + source: RequirementSource::Registry { + specifier: VersionSpecifiers::from(VersionSpecifier::equals_version( + version.clone(), + )), + index: None, + conflict: None, + }, + origin: None, + }; - let requirement = resolve_names( - vec![spec], - &interpreter, - settings, - network_settings, - &state, - concurrency, - cache, - &workspace_cache, - printer, - preview, - ) - .await? - .pop() - .unwrap(); + (executable, requirement) + } + // Ex) `ruff@latest` + Target::Latest(executable, name, extras) => { + let executable = request_executable + .map(ToString::to_string) + .unwrap_or_else(|| (*executable).to_string()); + let requirement = Requirement { + name: name.clone(), + extras: extras.clone(), + groups: Box::new([]), + marker: MarkerTree::default(), + source: RequirementSource::Registry { + specifier: VersionSpecifiers::empty(), + index: None, + conflict: None, + }, + origin: None, + }; - // Prefer, in order: - // 1. The verbatim executable provided by the user, independent of the requirement (as in: `uvx --from package executable`). - // 2. The verbatim executable provided by the user as a named requirement (as in: `uvx change_wheel_version`). - // 3. The resolved package name (as in: `uvx git+https://github.com/pallets/flask`). - let executable = request - .executable - .map(ToString::to_string) - .or_else(|| name.map(ToString::to_string)) - .unwrap_or_else(|| requirement.name.to_string()); + (executable, requirement) + } + }; - (executable, requirement) + ToolRequirement::Package { + executable, + requirement, } - // Ex) `ruff@0.6.0` - Target::Version(executable, name, extras, version) => { - let executable = request - .executable - .map(ToString::to_string) - .unwrap_or_else(|| (*executable).to_string()); - let requirement = Requirement { - name: name.clone(), - extras: extras.clone(), - groups: Box::new([]), - marker: MarkerTree::default(), - source: RequirementSource::Registry { - specifier: VersionSpecifiers::from(VersionSpecifier::equals_version( - version.clone(), - )), - index: None, - conflict: None, - }, - origin: None, - }; - - (executable, requirement) - } - // Ex) `ruff@latest` - Target::Latest(executable, name, extras) => { - let executable = request - .executable - .map(ToString::to_string) - .unwrap_or_else(|| (*executable).to_string()); - let requirement = Requirement { - name: name.clone(), - extras: extras.clone(), - groups: Box::new([]), - marker: MarkerTree::default(), - source: RequirementSource::Registry { - specifier: VersionSpecifiers::empty(), - index: None, - conflict: None, - }, - origin: None, - }; - - (executable, requirement) - } - }; - - ToolRequirement::Package { - executable, - requirement, } }; @@ -875,7 +880,7 @@ async fn get_or_create_environment( let requirements = { let mut requirements = Vec::with_capacity(1 + with.len()); match &from { - ToolRequirement::Python => {} + ToolRequirement::Python { .. } => {} ToolRequirement::Package { requirement, .. } => requirements.push(requirement.clone()), } requirements.extend( @@ -1033,6 +1038,7 @@ async fn get_or_create_environment( python_preference, python_downloads, cache, + preview, ) .await .ok() diff --git a/crates/uv/src/commands/tool/upgrade.rs b/crates/uv/src/commands/tool/upgrade.rs index 9f4d3bcab..9d2d32a21 100644 --- a/crates/uv/src/commands/tool/upgrade.rs +++ b/crates/uv/src/commands/tool/upgrade.rs @@ -29,7 +29,7 @@ use crate::commands::project::{ }; use crate::commands::reporters::PythonDownloadReporter; use crate::commands::tool::common::remove_entrypoints; -use crate::commands::{ExitStatus, conjunction, tool::common::install_executables}; +use crate::commands::{ExitStatus, conjunction, tool::common::finalize_tool_install}; use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; @@ -80,6 +80,7 @@ pub(crate) async fn upgrade( let reporter = PythonDownloadReporter::single(printer); let client_builder = BaseClientBuilder::new() + .retries_from_env()? .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) .allow_insecure_host(network_settings.allow_insecure_host.clone()); @@ -99,6 +100,7 @@ pub(crate) async fn upgrade( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(), @@ -297,6 +299,7 @@ async fn upgrade_tool( let resolution = resolve_environment( spec.into(), interpreter, + build_constraints.clone(), &settings.resolver, network_settings, &state, @@ -308,7 +311,7 @@ async fn upgrade_tool( ) .await?; - let environment = installed_tools.create_environment(name, interpreter.clone())?; + let environment = installed_tools.create_environment(name, interpreter.clone(), preview)?; let environment = sync_environment( environment, @@ -375,7 +378,7 @@ async fn upgrade_tool( remove_entrypoints(&existing_tool_receipt); // If we modified the target tool, reinstall the entrypoints. - install_executables( + finalize_tool_install( &environment, name, installed_tools, diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index c0cf03921..6d6e15758 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -13,14 +13,15 @@ use thiserror::Error; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, KeyringProviderType, - NoBinary, NoBuild, PreviewMode, SourceStrategy, + BuildOptions, Concurrency, ConfigSettings, Constraints, DependencyGroups, IndexStrategy, + KeyringProviderType, NoBinary, NoBuild, PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_types::Requirement; use uv_distribution_types::{DependencyMetadata, Index, IndexLocations}; use uv_fs::Simplified; use uv_install_wheel::LinkMode; +use uv_normalize::DefaultGroups; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonInstallation, PythonPreference, PythonRequest, }; @@ -39,12 +40,14 @@ use crate::commands::reporters::PythonDownloadReporter; use crate::printer::Printer; use crate::settings::NetworkSettings; +use super::project::default_dependency_groups; + /// Create a virtual environment. #[allow(clippy::unnecessary_wraps, clippy::fn_params_excessive_bools)] pub(crate) async fn venv( project_dir: &Path, path: Option, - python_request: Option<&str>, + python_request: Option, install_mirrors: PythonInstallMirrors, python_preference: PythonPreference, python_downloads: PythonDownloads, @@ -127,7 +130,7 @@ enum VenvError { async fn venv_impl( project_dir: &Path, path: Option, - python_request: Option<&str>, + python_request: Option, install_mirrors: PythonInstallMirrors, link_mode: LinkMode, index_locations: &IndexLocations, @@ -190,6 +193,9 @@ async fn venv_impl( .unwrap_or(PathBuf::from(".venv")), ); + // TODO(zanieb): We don't use [`BaseClientBuilder::retries_from_env`] here because it's a pain + // to map into a miette diagnostic. We should just remove miette diagnostics here, we're not + // using them elsewhere. let client_builder = BaseClientBuilder::default() .connectivity(network_settings.connectivity) .native_tls(network_settings.native_tls) @@ -197,13 +203,21 @@ async fn venv_impl( let reporter = PythonDownloadReporter::single(printer); + // If the default dependency-groups demand a higher requires-python + // we should bias an empty venv to that to avoid churn. + let default_groups = match &project { + Some(project) => default_dependency_groups(project.pyproject_toml()).into_diagnostic()?, + None => DefaultGroups::default(), + }; + let groups = DependencyGroups::default().with_defaults(default_groups); let WorkspacePython { source, python_request, requires_python, } = WorkspacePython::from_request( - python_request.map(PythonRequest::parse), + python_request, project.as_ref().map(VirtualProject::workspace), + &groups, project_dir, no_config, ) @@ -223,6 +237,7 @@ async fn venv_impl( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await .into_diagnostic()?; @@ -230,22 +245,14 @@ async fn venv_impl( python.into_interpreter() }; - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Check if the discovered Python version is incompatible with the current workspace if let Some(requires_python) = requires_python { match validate_project_requires_python( &interpreter, project.as_ref().map(VirtualProject::workspace), + &groups, &requires_python, &source, ) { @@ -264,6 +271,11 @@ async fn venv_impl( ) .into_diagnostic()?; + let upgradeable = preview.is_enabled() + && python_request + .as_ref() + .is_none_or(|request| !request.includes_patch()); + // Create the virtual environment. let venv = uv_virtualenv::create_venv( &path, @@ -273,6 +285,8 @@ async fn venv_impl( allow_existing, relocatable, seed, + upgradeable, + preview, ) .map_err(VenvError::Creation)?; diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 05174a947..84d889599 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -27,7 +27,7 @@ use uv_cli::SelfUpdateArgs; use uv_cli::{ BuildBackendCommand, CacheCommand, CacheNamespace, Cli, Commands, PipCommand, PipNamespace, ProjectCommand, PythonCommand, PythonNamespace, SelfCommand, SelfNamespace, ToolCommand, - ToolNamespace, TopLevelArgs, VersionArgs, compat::CompatArgs, + ToolNamespace, TopLevelArgs, compat::CompatArgs, }; use uv_configuration::min_stack_size; use uv_fs::{CWD, Simplified}; @@ -35,6 +35,7 @@ use uv_fs::{CWD, Simplified}; use uv_pep440::release_specifiers_to_ranges; use uv_pep508::VersionOrUrl; use uv_pypi_types::{ParsedDirectoryUrl, ParsedUrl}; +use uv_python::PythonRequest; use uv_requirements::RequirementsSource; use uv_requirements_txt::RequirementsTxtRequirement; use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script}; @@ -51,6 +52,7 @@ use crate::settings::{ PublishSettings, }; +pub(crate) mod child; pub(crate) mod commands; pub(crate) mod logging; pub(crate) mod printer; @@ -347,9 +349,9 @@ async fn run(mut cli: Cli) -> Result { // Configure the `tracing` crate, which controls internal logging. #[cfg(feature = "tracing-durations-export")] - let (duration_layer, _duration_guard) = logging::setup_duration()?; + let (durations_layer, _duration_guard) = logging::setup_durations()?; #[cfg(not(feature = "tracing-durations-export"))] - let duration_layer = None::; + let durations_layer = None::; logging::setup_logging( match globals.verbose { 0 => logging::Level::Off, @@ -357,7 +359,7 @@ async fn run(mut cli: Cli) -> Result { 2 => logging::Level::TraceUv, 3.. => logging::Level::TraceAll, }, - duration_layer, + durations_layer, globals.color, )?; @@ -399,7 +401,7 @@ async fn run(mut cli: Cli) -> Result { }))?; // Don't initialize the rayon threadpool yet, this is too costly when we're doing a noop sync. - uv_configuration::RAYON_PARALLELISM.store(globals.concurrency.installs, Ordering::SeqCst); + uv_configuration::RAYON_PARALLELISM.store(globals.concurrency.installs, Ordering::Relaxed); debug!("uv {}", uv_cli::version::uv_self_version()); @@ -793,6 +795,7 @@ async fn run(mut cli: Cli) -> Result { &globals.network_settings, args.dry_run, printer, + globals.preview, ) .await } @@ -814,6 +817,7 @@ async fn run(mut cli: Cli) -> Result { args.paths, &cache, printer, + globals.preview, ) } Commands::Pip(PipNamespace { @@ -845,6 +849,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) .await } @@ -866,6 +871,7 @@ async fn run(mut cli: Cli) -> Result { args.files, &cache, printer, + globals.preview, ) } Commands::Pip(PipNamespace { @@ -897,6 +903,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) .await } @@ -915,6 +922,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) } Commands::Cache(CacheNamespace { @@ -1016,10 +1024,13 @@ async fn run(mut cli: Cli) -> Result { } }); + let python_request: Option = + args.settings.python.as_deref().map(PythonRequest::parse); + commands::venv( &project_dir, args.path, - args.settings.python.as_deref(), + python_request, args.settings.install_mirrors, globals.python_preference, globals.python_downloads, @@ -1046,6 +1057,7 @@ async fn run(mut cli: Cli) -> Result { } Commands::Project(project) => { Box::pin(run_project( + cli.top_level.global_args.project.is_some(), project, &project_dir, run_command, @@ -1066,7 +1078,16 @@ async fn run(mut cli: Cli) -> Result { token, dry_run, }), - }) => commands::self_update(target_version, token, dry_run, printer).await, + }) => { + commands::self_update( + target_version, + token, + dry_run, + printer, + globals.network_settings, + ) + .await + } Commands::Self_(SelfNamespace { command: SelfCommand::Version { @@ -1084,33 +1105,6 @@ async fn run(mut cli: Cli) -> Result { is not available. Please use your package manager to update uv." ); } - Commands::Version(VersionArgs { - value, - bump, - dry_run, - short, - output_format, - }) => { - // If they specified any of these flags, they probably don't mean `uv self version` - let strict = cli.top_level.global_args.project.is_some() - || globals.preview.is_enabled() - || dry_run - || bump.is_some() - || value.is_some(); - commands::project_version( - &project_dir, - value, - bump, - dry_run, - short, - output_format, - strict, - &workspace_cache, - printer, - ) - .await - } - Commands::GenerateShellCompletion(args) => { args.shell.generate(&mut Cli::command(), &mut stdout()); Ok(ExitStatus::Success) @@ -1306,6 +1300,7 @@ async fn run(mut cli: Cli) -> Result { args.show_paths, args.show_version_specifiers, args.show_with, + args.show_extras, &cache, printer, ) @@ -1386,6 +1381,7 @@ async fn run(mut cli: Cli) -> Result { globals.python_downloads, &cache, printer, + globals.preview, ) .await } @@ -1395,12 +1391,43 @@ async fn run(mut cli: Cli) -> Result { // Resolve the settings from the command-line arguments and workspace configuration. let args = settings::PythonInstallSettings::resolve(args, filesystem); show_settings!(args); + // TODO(john): If we later want to support `--upgrade`, we need to replace this. + let upgrade = false; commands::python_install( &project_dir, args.install_dir, args.targets, args.reinstall, + upgrade, + args.force, + args.python_install_mirror, + args.pypy_install_mirror, + args.python_downloads_json_url, + globals.network_settings, + args.default, + globals.python_downloads, + cli.top_level.no_config, + globals.preview, + printer, + ) + .await + } + Commands::Python(PythonNamespace { + command: PythonCommand::Upgrade(args), + }) => { + // Resolve the settings from the command-line arguments and workspace configuration. + let args = settings::PythonUpgradeSettings::resolve(args, filesystem); + show_settings!(args); + let reinstall = false; + let upgrade = true; + + commands::python_install( + &project_dir, + args.install_dir, + args.targets, + reinstall, + upgrade, args.force, args.python_install_mirror, args.pypy_install_mirror, @@ -1449,6 +1476,7 @@ async fn run(mut cli: Cli) -> Result { cli.top_level.no_config, &cache, printer, + globals.preview, ) .await } else { @@ -1462,6 +1490,7 @@ async fn run(mut cli: Cli) -> Result { globals.python_preference, &cache, printer, + globals.preview, ) .await } @@ -1480,10 +1509,15 @@ async fn run(mut cli: Cli) -> Result { args.request, args.resolved, globals.python_preference, + globals.python_downloads, args.no_project, args.global, + args.rm, + args.install_mirrors, + globals.network_settings, &cache, printer, + globals.preview, ) .await } @@ -1614,6 +1648,7 @@ async fn run(mut cli: Cli) -> Result { /// Run a [`ProjectCommand`]. async fn run_project( + project_was_explicit: bool, project_command: Box, project_dir: &Path, command: Option, @@ -1715,7 +1750,7 @@ async fn run_project( args.no_project, no_config, args.extras, - args.dev, + args.groups, args.editable, args.modifications, args.python, @@ -1763,11 +1798,12 @@ async fn run_project( args.all_packages, args.package, args.extras, - args.dev, + args.groups, args.editable, args.install_options, args.modifications, args.python, + args.python_platform, args.install_mirrors, globals.python_preference, globals.python_downloads, @@ -1923,6 +1959,7 @@ async fn run_project( args.editable, args.dependency_type, args.raw, + args.bounds, args.indexes, args.rev, args.tag, @@ -1930,6 +1967,7 @@ async fn run_project( args.extras, args.package, args.python, + args.workspace, args.install_mirrors, args.settings, globals.network_settings, @@ -1989,6 +2027,53 @@ async fn run_project( )) .await } + ProjectCommand::Version(args) => { + // Resolve the settings from the command-line arguments and workspace configuration. + let args = settings::VersionSettings::resolve(args, filesystem); + show_settings!(args); + + // Initialize the cache. + let cache = cache.init()?.with_refresh( + args.refresh + .combine(Refresh::from(args.settings.reinstall.clone())) + .combine(Refresh::from(args.settings.resolver.upgrade.clone())), + ); + + // If they specified any of these flags, they probably don't mean `uv self version` + let strict = project_was_explicit + || globals.preview.is_enabled() + || args.dry_run + || !args.bump.is_empty() + || args.value.is_some() + || args.package.is_some(); + Box::pin(commands::project_version( + args.value, + args.bump, + args.short, + args.output_format, + strict, + project_dir, + args.package, + args.dry_run, + args.locked, + args.frozen, + args.active, + args.no_sync, + args.python, + args.install_mirrors, + args.settings, + globals.network_settings, + globals.python_preference, + globals.python_downloads, + globals.installer_metadata, + globals.concurrency, + no_config, + &cache, + printer, + globals.preview, + )) + .await + } ProjectCommand::Tree(args) => { // Resolve the settings from the command-line arguments and workspace configuration. let args = settings::TreeSettings::resolve(args, filesystem); @@ -2006,7 +2091,7 @@ async fn run_project( Box::pin(commands::tree( project_dir, - args.dev, + args.groups, args.locked, args.frozen, args.universal, @@ -2058,7 +2143,7 @@ async fn run_project( args.install_options, args.output_file, args.extras, - args.dev, + args.groups, args.editable, args.locked, args.frozen, diff --git a/crates/uv/src/logging.rs b/crates/uv/src/logging.rs index 98dad22c3..d09cbc3f9 100644 --- a/crates/uv/src/logging.rs +++ b/crates/uv/src/logging.rs @@ -114,7 +114,7 @@ where /// includes targets and timestamps, along with all `uv=debug` messages by default. pub(crate) fn setup_logging( level: Level, - durations: impl Layer + Send + Sync, + durations_layer: Option + Send + Sync>, color: ColorChoice, ) -> anyhow::Result<()> { // We use directives here to ensure `RUST_LOG` can override them @@ -137,12 +137,14 @@ pub(crate) fn setup_logging( } }; - // Only record our own spans. - let durations_layer = durations.with_filter( - tracing_subscriber::filter::Targets::new() - .with_target("", tracing::level_filters::LevelFilter::INFO), - ); - + // Avoid setting the default log level to INFO + let durations_layer = durations_layer.map(|durations_layer| { + durations_layer.with_filter( + // Only record our own spans + tracing_subscriber::filter::Targets::new() + .with_target("", tracing::level_filters::LevelFilter::INFO), + ) + }); let filter = EnvFilter::builder() .with_default_directive(default_directive) .from_env() @@ -205,7 +207,7 @@ pub(crate) fn setup_logging( /// Setup the `TRACING_DURATIONS_FILE` environment variable to enable tracing durations. #[cfg(feature = "tracing-durations-export")] -pub(crate) fn setup_duration() -> anyhow::Result<( +pub(crate) fn setup_durations() -> anyhow::Result<( Option>, Option, )> { diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index f11ca7834..f89704d45 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -4,16 +4,15 @@ use std::path::PathBuf; use std::process; use std::str::FromStr; -use url::Url; - use uv_cache::{CacheArgs, Refresh}; use uv_cli::comma::CommaSeparatedRequirements; use uv_cli::{ AddArgs, ColorChoice, ExternalCommand, GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe, PipCheckArgs, PipCompileArgs, PipFreezeArgs, PipInstallArgs, PipListArgs, PipShowArgs, PipSyncArgs, PipTreeArgs, PipUninstallArgs, PythonFindArgs, PythonInstallArgs, PythonListArgs, - PythonListFormat, PythonPinArgs, PythonUninstallArgs, RemoveArgs, RunArgs, SyncArgs, - ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, VenvArgs, + PythonListFormat, PythonPinArgs, PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs, + SyncArgs, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, + VenvArgs, VersionArgs, VersionBump, VersionFormat, }; use uv_cli::{ AuthorFrom, BuildArgs, ExportArgs, PublishArgs, PythonDirArgs, ResolverInstallerArgs, @@ -34,6 +33,7 @@ use uv_normalize::{PackageName, PipGroupName}; use uv_pep508::{ExtraName, MarkerTree, RequirementOrigin}; use uv_pypi_types::SupportedEnvironments; use uv_python::{Prefix, PythonDownloads, PythonPreference, PythonVersion, Target}; +use uv_redacted::DisplaySafeUrl; use uv_resolver::{ AnnotationStyle, DependencyMode, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode, }; @@ -45,6 +45,7 @@ use uv_static::EnvVars; use uv_torch::TorchMode; use uv_warnings::warn_user_once; use uv_workspace::pyproject::DependencyType; +use uv_workspace::pyproject_mut::AddBoundsKind; use crate::commands::ToolRunCommand; use crate::commands::{InitKind, InitProjectKind, pip::operations::Modifications}; @@ -53,7 +54,6 @@ use crate::commands::{InitKind, InitProjectKind, pip::operations::Modifications} const PYPI_PUBLISH_URL: &str = "https://upload.pypi.org/legacy/"; /// The resolved global settings to use for any invocation of the CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct GlobalSettings { pub(crate) required_version: Option, @@ -118,16 +118,20 @@ impl GlobalSettings { }, show_settings: args.show_settings, preview: PreviewMode::from( - flag(args.preview, args.no_preview) + flag(args.preview, args.no_preview, "preview") .combine(workspace.and_then(|workspace| workspace.globals.preview)) .unwrap_or(false), ), python_preference, - python_downloads: flag(args.allow_python_downloads, args.no_python_downloads) - .map(PythonDownloads::from) - .combine(env(env::UV_PYTHON_DOWNLOADS)) - .combine(workspace.and_then(|workspace| workspace.globals.python_downloads)) - .unwrap_or_default(), + python_downloads: flag( + args.allow_python_downloads, + args.no_python_downloads, + "python-downloads", + ) + .map(PythonDownloads::from) + .combine(env(env::UV_PYTHON_DOWNLOADS)) + .combine(workspace.and_then(|workspace| workspace.globals.python_downloads)) + .unwrap_or_default(), // Disable the progress bar with `RUST_LOG` to avoid progress fragments interleaving // with log messages. no_progress: args.no_progress || std::env::var_os(EnvVars::RUST_LOG).is_some(), @@ -161,7 +165,7 @@ pub(crate) struct NetworkSettings { impl NetworkSettings { pub(crate) fn resolve(args: &GlobalArgs, workspace: Option<&FilesystemOptions>) -> Self { - let connectivity = if flag(args.offline, args.no_offline) + let connectivity = if flag(args.offline, args.no_offline, "offline") .combine(workspace.and_then(|workspace| workspace.globals.offline)) .unwrap_or(false) { @@ -169,7 +173,7 @@ impl NetworkSettings { } else { Connectivity::Online }; - let native_tls = flag(args.native_tls, args.no_native_tls) + let native_tls = flag(args.native_tls, args.no_native_tls, "native-tls") .combine(workspace.and_then(|workspace| workspace.globals.native_tls)) .unwrap_or(false); let allow_insecure_host = args @@ -198,7 +202,6 @@ impl NetworkSettings { } /// The resolved cache settings to use for any invocation of the CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct CacheSettings { pub(crate) no_cache: bool, @@ -221,7 +224,6 @@ impl CacheSettings { } /// The resolved settings to use for a `init` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct InitSettings { pub(crate) path: Option, @@ -276,8 +278,12 @@ impl InitSettings { (_, _, _) => unreachable!("`app`, `lib`, and `script` are mutually exclusive"), }; - let package = flag(package || build_backend.is_some(), no_package || r#virtual) - .unwrap_or(kind.packaged_by_default()); + let package = flag( + package || build_backend.is_some(), + no_package || r#virtual, + "virtual", + ) + .unwrap_or(kind.packaged_by_default()); let install_mirrors = filesystem .map(|fs| fs.install_mirrors.clone()) @@ -297,7 +303,7 @@ impl InitSettings { build_backend, no_readme: no_readme || bare, author_from, - pin_python: flag(pin_python, no_pin_python).unwrap_or(!bare), + pin_python: flag(pin_python, no_pin_python, "pin-python").unwrap_or(!bare), no_workspace, python: python.and_then(Maybe::into_option), install_mirrors, @@ -306,13 +312,12 @@ impl InitSettings { } /// The resolved settings to use for a `run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct RunSettings { pub(crate) locked: bool, pub(crate) frozen: bool, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) modifications: Modifications, pub(crate) with: Vec, @@ -401,9 +406,9 @@ impl RunSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -414,7 +419,7 @@ impl RunSettings { all_groups, ), editable: EditableMode::from_args(no_editable), - modifications: if flag(exact, inexact).unwrap_or(false) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(false) { Modifications::Exact } else { Modifications::Sufficient @@ -437,7 +442,7 @@ impl RunSettings { package, no_project, no_sync, - active: flag(active, no_active), + active: flag(active, no_active, "active"), python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), settings: ResolverInstallerSettings::combine( @@ -453,7 +458,6 @@ impl RunSettings { } /// The resolved settings to use for a `tool run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolRunSettings { pub(crate) command: Option, @@ -585,7 +589,6 @@ impl ToolRunSettings { } /// The resolved settings to use for a `tool install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolInstallSettings { pub(crate) package: String, @@ -680,7 +683,6 @@ impl ToolInstallSettings { } /// The resolved settings to use for a `tool upgrade` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolUpgradeSettings { pub(crate) names: Vec, @@ -775,12 +777,12 @@ impl ToolUpgradeSettings { } /// The resolved settings to use for a `tool list` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolListSettings { pub(crate) show_paths: bool, pub(crate) show_version_specifiers: bool, pub(crate) show_with: bool, + pub(crate) show_extras: bool, } impl ToolListSettings { @@ -791,6 +793,7 @@ impl ToolListSettings { show_paths, show_version_specifiers, show_with, + show_extras, python_preference: _, no_python_downloads: _, } = args; @@ -799,12 +802,12 @@ impl ToolListSettings { show_paths, show_version_specifiers, show_with, + show_extras, } } } /// The resolved settings to use for a `tool uninstall` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolUninstallSettings { pub(crate) name: Vec, @@ -823,7 +826,6 @@ impl ToolUninstallSettings { } /// The resolved settings to use for a `tool dir` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolDirSettings { pub(crate) bin: bool, @@ -850,7 +852,6 @@ pub(crate) enum PythonListKinds { } /// The resolved settings to use for a `tool run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonListSettings { pub(crate) request: Option, @@ -910,7 +911,6 @@ impl PythonListSettings { } /// The resolved settings to use for a `python dir` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonDirSettings { pub(crate) bin: bool, @@ -927,7 +927,6 @@ impl PythonDirSettings { } /// The resolved settings to use for a `python install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonInstallSettings { pub(crate) install_dir: Option, @@ -982,9 +981,61 @@ impl PythonInstallSettings { } } -/// The resolved settings to use for a `python uninstall` invocation. +/// The resolved settings to use for a `python upgrade` invocation. #[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] +pub(crate) struct PythonUpgradeSettings { + pub(crate) install_dir: Option, + pub(crate) targets: Vec, + pub(crate) force: bool, + pub(crate) python_install_mirror: Option, + pub(crate) pypy_install_mirror: Option, + pub(crate) python_downloads_json_url: Option, + pub(crate) default: bool, +} + +impl PythonUpgradeSettings { + /// Resolve the [`PythonUpgradeSettings`] from the CLI and filesystem configuration. + #[allow(clippy::needless_pass_by_value)] + pub(crate) fn resolve(args: PythonUpgradeArgs, filesystem: Option) -> Self { + let options = filesystem.map(FilesystemOptions::into_options); + let (python_mirror, pypy_mirror, python_downloads_json_url) = match options { + Some(options) => ( + options.install_mirrors.python_install_mirror, + options.install_mirrors.pypy_install_mirror, + options.install_mirrors.python_downloads_json_url, + ), + None => (None, None, None), + }; + let python_mirror = args.mirror.or(python_mirror); + let pypy_mirror = args.pypy_mirror.or(pypy_mirror); + let python_downloads_json_url = + args.python_downloads_json_url.or(python_downloads_json_url); + let force = false; + let default = false; + + let PythonUpgradeArgs { + install_dir, + targets, + mirror: _, + pypy_mirror: _, + python_downloads_json_url: _, + } = args; + + Self { + install_dir, + targets, + force, + python_install_mirror: python_mirror, + pypy_install_mirror: pypy_mirror, + python_downloads_json_url, + default, + } + } +} + +/// The resolved settings to use for a `python uninstall` invocation. +#[derive(Debug, Clone)] pub(crate) struct PythonUninstallSettings { pub(crate) install_dir: Option, pub(crate) targets: Vec, @@ -1013,7 +1064,6 @@ impl PythonUninstallSettings { } /// The resolved settings to use for a `python find` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonFindSettings { pub(crate) request: Option, @@ -1039,38 +1089,46 @@ impl PythonFindSettings { request, show_version, no_project, - system: flag(system, no_system).unwrap_or_default(), + system: flag(system, no_system, "system").unwrap_or_default(), } } } /// The resolved settings to use for a `python pin` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonPinSettings { pub(crate) request: Option, pub(crate) resolved: bool, pub(crate) no_project: bool, pub(crate) global: bool, + pub(crate) rm: bool, + pub(crate) install_mirrors: PythonInstallMirrors, } impl PythonPinSettings { /// Resolve the [`PythonPinSettings`] from the CLI and workspace configuration. #[allow(clippy::needless_pass_by_value)] - pub(crate) fn resolve(args: PythonPinArgs, _filesystem: Option) -> Self { + pub(crate) fn resolve(args: PythonPinArgs, filesystem: Option) -> Self { let PythonPinArgs { request, no_resolved, resolved, no_project, global, + rm, } = args; + let install_mirrors = filesystem + .map(|fs| fs.install_mirrors.clone()) + .unwrap_or_default(); + Self { request, - resolved: flag(resolved, no_resolved).unwrap_or(false), + resolved: flag(resolved, no_resolved, "resolved").unwrap_or(false), no_project, global, + rm, + install_mirrors, } } } @@ -1085,13 +1143,14 @@ pub(crate) struct SyncSettings { pub(crate) script: Option, pub(crate) active: Option, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) install_options: InstallOptions, pub(crate) modifications: Modifications, pub(crate) all_packages: bool, pub(crate) package: Option, pub(crate) python: Option, + pub(crate) python_platform: Option, pub(crate) install_mirrors: PythonInstallMirrors, pub(crate) refresh: Refresh, pub(crate) settings: ResolverInstallerSettings, @@ -1132,6 +1191,7 @@ impl SyncSettings { package, script, python, + python_platform, check, no_check, } = args; @@ -1145,7 +1205,7 @@ impl SyncSettings { filesystem, ); - let check = flag(check, no_check).unwrap_or_default(); + let check = flag(check, no_check, "check").unwrap_or_default(); let dry_run = if check { DryRun::Check } else { @@ -1157,7 +1217,7 @@ impl SyncSettings { frozen, dry_run, script, - active: flag(active, no_active), + active: flag(active, no_active, "active"), extras: ExtrasSpecification::from_args( extra.unwrap_or_default(), no_extra, @@ -1165,9 +1225,9 @@ impl SyncSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1183,7 +1243,7 @@ impl SyncSettings { no_install_workspace, no_install_package, ), - modifications: if flag(exact, inexact).unwrap_or(true) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(true) { Modifications::Exact } else { Modifications::Sufficient @@ -1191,6 +1251,7 @@ impl SyncSettings { all_packages, package, python: python.and_then(Maybe::into_option), + python_platform, refresh: Refresh::from(refresh), settings, install_mirrors, @@ -1261,12 +1322,14 @@ pub(crate) struct AddSettings { pub(crate) editable: Option, pub(crate) extras: Vec, pub(crate) raw: bool, + pub(crate) bounds: Option, pub(crate) rev: Option, pub(crate) tag: Option, pub(crate) branch: Option, pub(crate) package: Option, pub(crate) script: Option, pub(crate) python: Option, + pub(crate) workspace: bool, pub(crate) install_mirrors: PythonInstallMirrors, pub(crate) refresh: Refresh, pub(crate) indexes: Vec, @@ -1289,6 +1352,7 @@ impl AddSettings { no_editable, extra, raw, + bounds, rev, tag, branch, @@ -1303,6 +1367,7 @@ impl AddSettings { package, script, python, + workspace, } = args; let dependency_type = if let Some(extra) = optional { @@ -1334,6 +1399,12 @@ impl AddSettings { ) .collect::>(); + // Warn user if an ambiguous relative path was passed as a value for + // `--index` or `--default-index`. + indexes + .iter() + .for_each(|index| index.url().warn_on_disambiguated_relative_path()); + // If the user passed an `--index-url` or `--extra-index-url`, warn. if installer .index_args @@ -1370,14 +1441,16 @@ impl AddSettings { } let install_mirrors = filesystem - .clone() + .as_ref() .map(|fs| fs.install_mirrors.clone()) .unwrap_or_default(); + let bounds = bounds.or(filesystem.as_ref().and_then(|fs| fs.add.add_bounds)); + Self { locked, frozen, - active: flag(active, no_active), + active: flag(active, no_active, "active"), no_sync, packages, requirements, @@ -1388,13 +1461,15 @@ impl AddSettings { marker, dependency_type, raw, + bounds, rev, tag, branch, package, script, python: python.and_then(Maybe::into_option), - editable: flag(editable, no_editable), + workspace, + editable: flag(editable, no_editable, "editable"), extras: extra.unwrap_or_default(), refresh: Refresh::from(refresh), indexes, @@ -1470,7 +1545,7 @@ impl RemoveSettings { Self { locked, frozen, - active: flag(active, no_active), + active: flag(active, no_active, "active"), no_sync, packages, dependency_type, @@ -1487,11 +1562,79 @@ impl RemoveSettings { } } +/// The resolved settings to use for a `version` invocation. +#[allow(clippy::struct_excessive_bools, dead_code)] +#[derive(Debug, Clone)] +pub(crate) struct VersionSettings { + pub(crate) value: Option, + pub(crate) bump: Vec, + pub(crate) short: bool, + pub(crate) output_format: VersionFormat, + pub(crate) dry_run: bool, + pub(crate) locked: bool, + pub(crate) frozen: bool, + pub(crate) active: Option, + pub(crate) no_sync: bool, + pub(crate) package: Option, + pub(crate) python: Option, + pub(crate) install_mirrors: PythonInstallMirrors, + pub(crate) refresh: Refresh, + pub(crate) settings: ResolverInstallerSettings, +} + +impl VersionSettings { + /// Resolve the [`RemoveSettings`] from the CLI and filesystem configuration. + #[allow(clippy::needless_pass_by_value)] + pub(crate) fn resolve(args: VersionArgs, filesystem: Option) -> Self { + let VersionArgs { + value, + bump, + short, + output_format, + dry_run, + no_sync, + locked, + frozen, + active, + no_active, + installer, + build, + refresh, + package, + python, + } = args; + + let install_mirrors = filesystem + .clone() + .map(|fs| fs.install_mirrors.clone()) + .unwrap_or_default(); + + Self { + value, + bump, + short, + output_format, + dry_run, + locked, + frozen, + active: flag(active, no_active, "active"), + no_sync, + package, + python: python.and_then(Maybe::into_option), + refresh: Refresh::from(refresh), + settings: ResolverInstallerSettings::combine( + resolver_installer_options(installer, build), + filesystem, + ), + install_mirrors, + } + } +} + /// The resolved settings to use for a `tree` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct TreeSettings { - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) locked: bool, pub(crate) frozen: bool, pub(crate) universal: bool, @@ -1539,7 +1682,7 @@ impl TreeSettings { .unwrap_or_default(); Self { - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1577,7 +1720,7 @@ pub(crate) struct ExportSettings { pub(crate) package: Option, pub(crate) prune: Vec, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) hashes: bool, pub(crate) install_options: InstallOptions, @@ -1650,9 +1793,9 @@ impl ExportSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1663,7 +1806,7 @@ impl ExportSettings { all_groups, ), editable: EditableMode::from_args(no_editable), - hashes: flag(hashes, no_hashes).unwrap_or(true), + hashes: flag(hashes, no_hashes, "hashes").unwrap_or(true), install_options: InstallOptions::new( no_emit_project, no_emit_workspace, @@ -1672,8 +1815,8 @@ impl ExportSettings { output_file, locked, frozen, - include_annotations: flag(annotate, no_annotate).unwrap_or(true), - include_header: flag(header, no_header).unwrap_or(true), + include_annotations: flag(annotate, no_annotate, "annotate").unwrap_or(true), + include_header: flag(header, no_header, "header").unwrap_or(true), script, python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), @@ -1684,7 +1827,6 @@ impl ExportSettings { } /// The resolved settings to use for a `pip compile` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipCompileSettings { pub(crate) format: Option, @@ -1827,30 +1969,42 @@ impl PipCompileSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - no_build: flag(no_build, build), + system: flag(system, no_system, "system"), + no_build: flag(no_build, build, "build"), no_binary, only_binary, extra, - all_extras: flag(all_extras, no_all_extras), - no_deps: flag(no_deps, deps), + all_extras: flag(all_extras, no_all_extras, "all-extras"), + no_deps: flag(no_deps, deps, "deps"), group: Some(group), output_file, - no_strip_extras: flag(no_strip_extras, strip_extras), - no_strip_markers: flag(no_strip_markers, strip_markers), - no_annotate: flag(no_annotate, annotate), - no_header: flag(no_header, header), + no_strip_extras: flag(no_strip_extras, strip_extras, "strip-extras"), + no_strip_markers: flag(no_strip_markers, strip_markers, "strip-markers"), + no_annotate: flag(no_annotate, annotate, "annotate"), + no_header: flag(no_header, header, "header"), custom_compile_command, - generate_hashes: flag(generate_hashes, no_generate_hashes), + generate_hashes: flag(generate_hashes, no_generate_hashes, "generate-hashes"), python_version, python_platform, - universal: flag(universal, no_universal), + universal: flag(universal, no_universal, "universal"), no_emit_package, - emit_index_url: flag(emit_index_url, no_emit_index_url), - emit_find_links: flag(emit_find_links, no_emit_find_links), - emit_build_options: flag(emit_build_options, no_emit_build_options), - emit_marker_expression: flag(emit_marker_expression, no_emit_marker_expression), - emit_index_annotation: flag(emit_index_annotation, no_emit_index_annotation), + emit_index_url: flag(emit_index_url, no_emit_index_url, "emit-index-url"), + emit_find_links: flag(emit_find_links, no_emit_find_links, "emit-find-links"), + emit_build_options: flag( + emit_build_options, + no_emit_build_options, + "emit-build-options", + ), + emit_marker_expression: flag( + emit_marker_expression, + no_emit_marker_expression, + "emit-marker-expression", + ), + emit_index_annotation: flag( + emit_index_annotation, + no_emit_index_annotation, + "emit-index-annotation", + ), annotation_style, torch_backend, ..PipOptions::from(resolver) @@ -1862,7 +2016,6 @@ impl PipCompileSettings { } /// The resolved settings to use for a `pip sync` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipSyncSettings { pub(crate) src_file: Vec, @@ -1923,22 +2076,27 @@ impl PipSyncSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, - require_hashes: flag(require_hashes, no_require_hashes), - verify_hashes: flag(verify_hashes, no_verify_hashes), - no_build: flag(no_build, build), + require_hashes: flag(require_hashes, no_require_hashes, "require-hashes"), + verify_hashes: flag(verify_hashes, no_verify_hashes, "verify-hashes"), + no_build: flag(no_build, build, "build"), no_binary, only_binary, allow_empty_requirements: flag( allow_empty_requirements, no_allow_empty_requirements, + "allow-empty-requirements", ), python_version, python_platform, - strict: flag(strict, no_strict), + strict: flag(strict, no_strict, "strict"), torch_backend, ..PipOptions::from(installer) }, @@ -1949,7 +2107,6 @@ impl PipSyncSettings { } /// The resolved settings to use for a `pip install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipInstallSettings { pub(crate) package: Vec, @@ -2073,7 +2230,7 @@ impl PipInstallSettings { constraints_from_workspace, overrides_from_workspace, build_constraints_from_workspace, - modifications: if flag(exact, inexact).unwrap_or(false) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(false) { Modifications::Exact } else { Modifications::Sufficient @@ -2082,22 +2239,26 @@ impl PipInstallSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, - no_build: flag(no_build, build), + no_build: flag(no_build, build, "build"), no_binary, only_binary, - strict: flag(strict, no_strict), + strict: flag(strict, no_strict, "strict"), extra, - all_extras: flag(all_extras, no_all_extras), + all_extras: flag(all_extras, no_all_extras, "all-extras"), group: Some(group), - no_deps: flag(no_deps, deps), + no_deps: flag(no_deps, deps, "deps"), python_version, python_platform, - require_hashes: flag(require_hashes, no_require_hashes), - verify_hashes: flag(verify_hashes, no_verify_hashes), + require_hashes: flag(require_hashes, no_require_hashes, "require-hashes"), + verify_hashes: flag(verify_hashes, no_verify_hashes, "verify-hashes"), torch_backend, ..PipOptions::from(installer) }, @@ -2108,7 +2269,6 @@ impl PipInstallSettings { } /// The resolved settings to use for a `pip uninstall` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipUninstallSettings { pub(crate) package: Vec, @@ -2142,8 +2302,12 @@ impl PipUninstallSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, keyring_provider, @@ -2156,7 +2320,6 @@ impl PipUninstallSettings { } /// The resolved settings to use for a `pip freeze` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipFreezeSettings { pub(crate) exclude_editable: bool, @@ -2184,8 +2347,8 @@ impl PipFreezeSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::default() }, filesystem, @@ -2195,7 +2358,6 @@ impl PipFreezeSettings { } /// The resolved settings to use for a `pip list` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipListSettings { pub(crate) editable: Option, @@ -2225,15 +2387,15 @@ impl PipListSettings { } = args; Self { - editable: flag(editable, exclude_editable), + editable: flag(editable, exclude_editable, "exclude-editable"), exclude, format, - outdated: flag(outdated, no_outdated).unwrap_or(false), + outdated: flag(outdated, no_outdated, "outdated").unwrap_or(false), settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::from(fetch) }, filesystem, @@ -2243,7 +2405,6 @@ impl PipListSettings { } /// The resolved settings to use for a `pip show` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipShowSettings { pub(crate) package: Vec, @@ -2271,8 +2432,8 @@ impl PipShowSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::default() }, filesystem, @@ -2282,7 +2443,6 @@ impl PipShowSettings { } /// The resolved settings to use for a `pip tree` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipTreeSettings { pub(crate) show_version_specifiers: bool, @@ -2321,8 +2481,8 @@ impl PipTreeSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::from(fetch) }, filesystem, @@ -2332,7 +2492,6 @@ impl PipTreeSettings { } /// The resolved settings to use for a `pip check` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipCheckSettings { pub(crate) settings: PipSettings, @@ -2351,7 +2510,7 @@ impl PipCheckSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), + system: flag(system, no_system, "system"), ..PipOptions::default() }, filesystem, @@ -2361,7 +2520,6 @@ impl PipCheckSettings { } /// The resolved settings to use for a `build` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct BuildSettings { pub(crate) src: Option, @@ -2419,15 +2577,15 @@ impl BuildSettings { sdist, wheel, list, - build_logs: flag(build_logs, no_build_logs).unwrap_or(true), + build_logs: flag(build_logs, no_build_logs, "build-logs").unwrap_or(true), build_constraints: build_constraints .into_iter() .filter_map(Maybe::into_option) .collect(), force_pep517, hash_checking: HashCheckingMode::from_args( - flag(require_hashes, no_require_hashes), - flag(verify_hashes, no_verify_hashes), + flag(require_hashes, no_require_hashes, "require-hashes"), + flag(verify_hashes, no_verify_hashes, "verify-hashes"), ), python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), @@ -2438,7 +2596,6 @@ impl BuildSettings { } /// The resolved settings to use for a `venv` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct VenvSettings { pub(crate) seed: bool, @@ -2487,7 +2644,7 @@ impl VenvSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), + system: flag(system, no_system, "system"), index_strategy, keyring_provider, exclude_newer, @@ -2525,7 +2682,6 @@ pub(crate) struct InstallerSettingsRef<'a> { /// /// Combines the `[tool.uv]` persistent configuration with the command-line arguments /// ([`ResolverArgs`], represented as [`ResolverOptions`]). -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone, Default)] pub(crate) struct ResolverSettings { pub(crate) build_options: BuildOptions, @@ -2615,7 +2771,6 @@ impl From for ResolverSettings { /// /// Represents the shared settings that are used across all uv commands outside the `pip` API. /// Analogous to the settings contained in the `[tool.uv]` table, combined with [`ResolverInstallerArgs`]. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone, Default)] pub(crate) struct ResolverInstallerSettings { pub(crate) resolver: ResolverSettings, @@ -2705,7 +2860,6 @@ impl From for ResolverInstallerSettings { /// /// Represents the shared settings that are used across all `pip` commands. Analogous to the /// settings contained in the `[tool.uv.pip]` table. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipSettings { pub(crate) index_locations: IndexLocations, @@ -3082,7 +3236,6 @@ impl<'a> From<&'a ResolverInstallerSettings> for InstallerSettingsRef<'a> { } /// The resolved settings to use for an invocation of the `uv publish` CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PublishSettings { // CLI only, see [`PublishArgs`] for docs. @@ -3092,7 +3245,7 @@ pub(crate) struct PublishSettings { pub(crate) index: Option, // Both CLI and configuration. - pub(crate) publish_url: Url, + pub(crate) publish_url: DisplaySafeUrl, pub(crate) trusted_publishing: TrustedPublishing, pub(crate) keyring_provider: KeyringProviderType, pub(crate) check_url: Option, @@ -3137,7 +3290,7 @@ impl PublishSettings { publish_url: args .publish_url .combine(publish_url) - .unwrap_or_else(|| Url::parse(PYPI_PUBLISH_URL).unwrap()), + .unwrap_or_else(|| DisplaySafeUrl::parse(PYPI_PUBLISH_URL).unwrap()), trusted_publishing: trusted_publishing .combine(args.trusted_publishing) .unwrap_or_default(), diff --git a/crates/uv/tests/it/branching_urls.rs b/crates/uv/tests/it/branching_urls.rs index 09c77ef59..a02ec0de3 100644 --- a/crates/uv/tests/it/branching_urls.rs +++ b/crates/uv/tests/it/branching_urls.rs @@ -14,6 +14,7 @@ use crate::common::{TestContext, make_project, uv_snapshot}; /// ] /// ``` #[test] +#[cfg(feature = "pypi")] fn branching_urls_disjoint() -> Result<()> { let context = TestContext::new("3.12"); @@ -47,6 +48,7 @@ fn branching_urls_disjoint() -> Result<()> { /// ] /// ``` #[test] +#[cfg(feature = "pypi")] fn branching_urls_overlapping() -> Result<()> { let context = TestContext::new("3.12"); @@ -83,6 +85,7 @@ fn branching_urls_overlapping() -> Result<()> { /// a -> b -> b2 -> https://../iniconfig-2.0.0-py3-none-any.whl /// ``` #[test] +#[cfg(feature = "pypi")] fn root_package_splits_but_transitive_conflict() -> Result<()> { let context = TestContext::new("3.12"); @@ -151,6 +154,7 @@ fn root_package_splits_but_transitive_conflict() -> Result<()> { /// a -> b -> b2 ; python_version >= '3.12' -> https://../iniconfig-2.0.0-py3-none-any.whl /// ``` #[test] +#[cfg(feature = "pypi")] fn root_package_splits_transitive_too() -> Result<()> { let context = TestContext::new("3.12"); @@ -356,6 +360,7 @@ fn root_package_splits_transitive_too() -> Result<()> { /// a -> b2 ; python_version >= '3.12' -> iniconfig==2.0.0 /// ``` #[test] +#[cfg(feature = "pypi")] fn root_package_splits_other_dependencies_too() -> Result<()> { let context = TestContext::new("3.12"); @@ -539,6 +544,7 @@ fn root_package_splits_other_dependencies_too() -> Result<()> { /// ] /// ``` #[test] +#[cfg(feature = "pypi")] fn branching_between_registry_and_direct_url() -> Result<()> { let context = TestContext::new("3.12"); @@ -624,7 +630,7 @@ fn branching_between_registry_and_direct_url() -> Result<()> { /// ] /// ``` #[test] -#[cfg(feature = "git")] +#[cfg(all(feature = "git", feature = "pypi"))] fn branching_urls_of_different_sources_disjoint() -> Result<()> { let context = TestContext::new("3.12"); @@ -708,7 +714,7 @@ fn branching_urls_of_different_sources_disjoint() -> Result<()> { /// ] /// ``` #[test] -#[cfg(feature = "git")] +#[cfg(all(feature = "git", feature = "pypi"))] fn branching_urls_of_different_sources_conflict() -> Result<()> { let context = TestContext::new("3.12"); diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs index dc74d263f..3d08a90d4 100644 --- a/crates/uv/tests/it/build.rs +++ b/crates/uv/tests/it/build.rs @@ -1,4 +1,4 @@ -use crate::common::{TestContext, uv_snapshot}; +use crate::common::{DEFAULT_PYTHON_VERSION, TestContext, uv_snapshot}; use anyhow::Result; use assert_cmd::assert::OutputAssertExt; use assert_fs::prelude::*; @@ -7,7 +7,6 @@ use indoc::indoc; use insta::assert_snapshot; use predicates::prelude::predicate; use std::env::current_dir; -use std::process::Command; use zip::ZipArchive; #[test] @@ -16,7 +15,7 @@ fn build_basic() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -134,7 +133,7 @@ fn build_sdist() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -190,7 +189,7 @@ fn build_wheel() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -246,7 +245,7 @@ fn build_sdist_wheel() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -304,7 +303,7 @@ fn build_wheel_from_sdist() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -413,7 +412,7 @@ fn build_fail() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -489,7 +488,6 @@ fn build_workspace() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member\]", "[PKG]"), @@ -695,7 +693,6 @@ fn build_all_with_failure() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member-\w+\]", "[PKG]"), @@ -841,7 +838,7 @@ fn build_constraints() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -898,11 +895,11 @@ fn build_constraints() -> Result<()> { #[test] fn build_sha() -> Result<()> { - let context = TestContext::new("3.8"); + let context = TestContext::new(DEFAULT_PYTHON_VERSION); let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -1188,7 +1185,7 @@ fn build_tool_uv_sources() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let build = context.temp_dir.child("backend"); @@ -1338,7 +1335,6 @@ fn build_non_package() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member\]", "[PKG]"), @@ -1857,7 +1853,7 @@ fn build_unconfigured_setuptools() -> Result<()> { + greet==0.1.0 (from file://[TEMP_DIR]/) "###); - uv_snapshot!(context.filters(), Command::new(context.interpreter()).arg("-c").arg("import greet"), @r###" + uv_snapshot!(context.filters(), context.python_command().arg("-c").arg("import greet"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1931,7 +1927,7 @@ fn build_with_nonnormalized_name() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -1982,3 +1978,60 @@ fn build_with_nonnormalized_name() -> Result<()> { Ok(()) } + +/// Check that `--force-pep517` is respected. +/// +/// The error messages for a broken project are different for direct builds vs. PEP 517. +#[test] +fn force_pep517() -> Result<()> { + // We need to use a real `uv_build` package. + let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z"); + + context + .init() + .arg("--build-backend") + .arg("uv") + .assert() + .success(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "does_not_exist" + + [build-system] + requires = ["uv_build>=0.5.15,<10000"] + build-backend = "uv_build" + "#})?; + + uv_snapshot!(context.filters(), context.build().env("RUST_BACKTRACE", "0"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Building source distribution (uv build backend)... + × Failed to build `[TEMP_DIR]/` + ╰─▶ Expected a Python module at: `src/does_not_exist/__init__.py` + "); + + uv_snapshot!(context.filters(), context.build().arg("--force-pep517").env("RUST_BACKTRACE", "0"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Building source distribution... + Error: Missing module directory for `does_not_exist` in `src`. Found: `temp` + × Failed to build `[TEMP_DIR]/` + ├─▶ The build backend returned an error + ╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1) + hint: This usually indicates a problem with the package or the build environment. + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/build_backend.rs b/crates/uv/tests/it/build_backend.rs index ee4cde32b..b3bd337ae 100644 --- a/crates/uv/tests/it/build_backend.rs +++ b/crates/uv/tests/it/build_backend.rs @@ -1,10 +1,10 @@ use crate::common::{TestContext, uv_snapshot, venv_bin_path}; use anyhow::Result; use assert_cmd::assert::OutputAssertExt; -use assert_fs::fixture::{FileWriteStr, PathChild}; +use assert_fs::fixture::{FileWriteStr, PathChild, PathCreateDir}; use flate2::bufread::GzDecoder; use fs_err::File; -use indoc::indoc; +use indoc::{formatdoc, indoc}; use std::env; use std::io::BufReader; use std::path::Path; @@ -24,6 +24,7 @@ const BUILT_BY_UV_TEST_SCRIPT: &str = indoc! {r#" /// /// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel. #[test] +#[cfg(feature = "pypi")] fn built_by_uv_direct_wheel() -> Result<()> { let context = TestContext::new("3.12"); let built_by_uv = Path::new("../../scripts/packages/built-by-uv"); @@ -49,13 +50,9 @@ fn built_by_uv_direct_wheel() -> Result<()> { .assert() .success(); - uv_snapshot!(context - .run() - .arg("python") + uv_snapshot!(context.python_command() .arg("-c") - .arg(BUILT_BY_UV_TEST_SCRIPT) - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg(BUILT_BY_UV_TEST_SCRIPT), @r###" success: true exit_code: 0 ----- stdout ----- @@ -83,6 +80,7 @@ fn built_by_uv_direct_wheel() -> Result<()> { /// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel, /// so we call the build backend directly. #[test] +#[cfg(feature = "pypi")] fn built_by_uv_direct() -> Result<()> { let context = TestContext::new("3.12"); let built_by_uv = Path::new("../../scripts/packages/built-by-uv"); @@ -136,13 +134,9 @@ fn built_by_uv_direct() -> Result<()> { drop(wheel_dir); - uv_snapshot!(context - .run() - .arg("python") + uv_snapshot!(context.python_command() .arg("-c") - .arg(BUILT_BY_UV_TEST_SCRIPT) - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg(BUILT_BY_UV_TEST_SCRIPT), @r###" success: true exit_code: 0 ----- stdout ----- @@ -160,13 +154,15 @@ fn built_by_uv_direct() -> Result<()> { /// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel, /// so we call the build backend directly. #[test] +#[cfg(feature = "pypi")] fn built_by_uv_editable() -> Result<()> { let context = TestContext::new("3.12"); let built_by_uv = Path::new("../../scripts/packages/built-by-uv"); // Without the editable, pytest fails. context.pip_install().arg("pytest").assert().success(); - Command::new(context.interpreter()) + context + .python_command() .arg("-m") .arg("pytest") .current_dir(built_by_uv) @@ -197,7 +193,7 @@ fn built_by_uv_editable() -> Result<()> { drop(wheel_dir); // Now, pytest passes. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-m") .arg("pytest") // Avoid showing absolute paths and column dependent layout @@ -228,7 +224,6 @@ fn preserve_executable_bit() -> Result<()> { .init() .arg("--build-backend") .arg("uv") - .arg("--preview") .arg(&project_dir) .assert() .success(); @@ -320,8 +315,7 @@ fn rename_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-wheel") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -337,11 +331,9 @@ fn rename_module() -> Result<()> { .success(); // Importing the module with the `module-name` name succeeds. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import bar") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import bar"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -351,11 +343,9 @@ fn rename_module() -> Result<()> { "###); // Importing the package name fails, it was overridden by `module-name`. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import foo") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import foo"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -399,8 +389,7 @@ fn rename_module_editable_build() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-editable") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -416,11 +405,9 @@ fn rename_module_editable_build() -> Result<()> { .success(); // Importing the module with the `module-name` name succeeds. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import bar") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import bar"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -467,7 +454,7 @@ fn build_module_name_normalization() -> Result<()> { ----- stdout ----- ----- stderr ----- - error: Missing module directory for `Django_plugin` in `src`. Found: `` + error: Expected a Python module at: `src/Django_plugin/__init__.py` "); fs_err::create_dir_all(context.temp_dir.join("src/Django_plugin"))?; @@ -481,7 +468,7 @@ fn build_module_name_normalization() -> Result<()> { ----- stdout ----- ----- stderr ----- - error: Expected a Python module directory at: `src/Django_plugin/__init__.py` + error: Expected a Python module at: `src/Django_plugin/__init__.py` "); // Use `Django_plugin` instead of `django_plugin` @@ -511,11 +498,9 @@ fn build_module_name_normalization() -> Result<()> { .assert() .success(); - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import Django_plugin") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r" + .arg("import Django_plugin"), @r" success: true exit_code: 0 ----- stdout ----- @@ -580,8 +565,7 @@ fn build_sdist_with_long_path() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -614,14 +598,13 @@ fn sdist_error_without_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r" + .arg(temp_dir.path()), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Missing source directory at: `src` + error: Expected a Python module at: `src/foo/__init__.py` "); fs_err::create_dir(context.temp_dir.join("src"))?; @@ -629,14 +612,276 @@ fn sdist_error_without_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r" + .arg(temp_dir.path()), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Missing module directory for `foo` in `src`. Found: `` + error: Expected a Python module at: `src/foo/__init__.py` + "); + + Ok(()) +} + +#[test] +fn complex_namespace_packages() -> Result<()> { + let context = TestContext::new("3.12"); + let dist = context.temp_dir.child("dist"); + dist.create_dir_all()?; + + let init_py_a = indoc! {" + def one(): + return 1 + "}; + + let init_py_b = indoc! {" + from complex_project.part_a import one + + def two(): + return one() + one() + "}; + + let projects = [ + ("complex-project", "part_a", init_py_a), + ("complex-project", "part_b", init_py_b), + ]; + + for (project_name, part_name, init_py) in projects { + let project = context + .temp_dir + .child(format!("{project_name}-{part_name}")); + let project_name_dist_info = project_name.replace('-', "_"); + let pyproject_toml = formatdoc! {r#" + [project] + name = "{project_name}-{part_name}" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "{project_name_dist_info}.{part_name}" + + [build-system] + requires = ["uv_build>=0.5.15,<10000"] + build-backend = "uv_build" + "# + }; + project.child("pyproject.toml").write_str(&pyproject_toml)?; + + project + .child("src") + .child(project_name_dist_info) + .child(part_name) + .child("__init__.py") + .write_str(init_py)?; + + context + .build() + .arg(project.path()) + .arg("--out-dir") + .arg(dist.path()) + .assert() + .success(); + } + + uv_snapshot!( + context.filters(), + context + .pip_install() + .arg("complex-project-part-a") + .arg("complex-project-part-b") + .arg("--offline") + .arg("--find-links") + .arg(dist.path()), + @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Installed 2 packages in [TIME] + + complex-project-part-a==1.0.0 + + complex-project-part-b==1.0.0 + " + ); + + uv_snapshot!(context.python_command() + .arg("-c") + .arg("from complex_project.part_b import two; print(two())"), + @r" + success: true + exit_code: 0 + ----- stdout ----- + 2 + + ----- stderr ----- + " + ); + + // Test editable installs + uv_snapshot!( + context.filters(), + context + .pip_install() + .arg("-e") + .arg("complex-project-part_a") + .arg("-e") + .arg("complex-project-part_b") + .arg("--offline"), + @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Uninstalled 2 packages in [TIME] + Installed 2 packages in [TIME] + - complex-project-part-a==1.0.0 + + complex-project-part-a==1.0.0 (from file://[TEMP_DIR]/complex-project-part_a) + - complex-project-part-b==1.0.0 + + complex-project-part-b==1.0.0 (from file://[TEMP_DIR]/complex-project-part_b) + " + ); + + uv_snapshot!(context.python_command() + .arg("-c") + .arg("from complex_project.part_b import two; print(two())"), + @r" + success: true + exit_code: 0 + ----- stdout ----- + 2 + + ----- stderr ----- + " + ); + Ok(()) +} + +/// Test that a symlinked file (here: license) gets included. +#[test] +#[cfg(unix)] +fn symlinked_file() -> Result<()> { + let context = TestContext::new("3.12"); + + let project = context.temp_dir.child("project"); + context + .init() + .arg("--build-backend") + .arg("uv") + .arg(project.path()) + .assert() + .success(); + + project.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "project" + version = "1.0.0" + license-files = ["LICENSE"] + + [build-system] + requires = ["uv_build>=0.5.15,<10000"] + build-backend = "uv_build" + "# + })?; + + let license_file = context.temp_dir.child("LICENSE"); + let license_symlink = project.child("LICENSE"); + + let license_text = "Project license"; + license_file.write_str(license_text)?; + fs_err::os::unix::fs::symlink(license_file.path(), license_symlink.path())?; + + uv_snapshot!(context + .build_backend() + .arg("build-sdist") + .arg(context.temp_dir.path()) + .current_dir(project.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + project-1.0.0.tar.gz + + ----- stderr ----- + "); + + uv_snapshot!(context + .build_backend() + .arg("build-wheel") + .arg(context.temp_dir.path()) + .current_dir(project.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + project-1.0.0-py3-none-any.whl + + ----- stderr ----- + "); + + uv_snapshot!(context.filters(), context.pip_install().arg("project-1.0.0-py3-none-any.whl"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + project==1.0.0 (from file://[TEMP_DIR]/project-1.0.0-py3-none-any.whl) + "); + + // Check that we included the actual license text and not a broken symlink. + let installed_license = context + .site_packages() + .join("project-1.0.0.dist-info") + .join("licenses") + .join("LICENSE"); + assert!( + fs_err::symlink_metadata(&installed_license)? + .file_type() + .is_file() + ); + let license = fs_err::read_to_string(&installed_license)?; + assert_eq!(license, license_text); + + Ok(()) +} + +/// Ignore invalid build backend settings when not building. +/// +/// They may be from another `uv_build` version that has a different schema. +#[test] +fn invalid_build_backend_settings_are_ignored() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "built-by-uv" + version = "0.1.0" + requires-python = ">=3.12" + + [tool.uv.build-backend] + # Error: `source-include` must be a list + source-include = "data/build-script.py" + + [build-system] + requires = ["uv_build>=10000,<10001"] + build-backend = "uv_build" + "#})?; + + // Since we are not building, this must pass without complaining about the error in + // `tool.uv.build-backend`. + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] "); Ok(()) diff --git a/crates/uv/tests/it/cache_prune.rs b/crates/uv/tests/it/cache_prune.rs index d760786d7..a6ec48bd4 100644 --- a/crates/uv/tests/it/cache_prune.rs +++ b/crates/uv/tests/it/cache_prune.rs @@ -250,19 +250,19 @@ fn prune_unzipped() -> Result<()> { requirements_txt.write_str(indoc! { r" iniconfig " })?; - uv_snapshot!(&filters, context.pip_install().arg("-r").arg("requirements.txt").arg("--offline"), @r###" + uv_snapshot!(&filters, context.pip_install().arg("-r").arg("requirements.txt").arg("--offline"), @r" success: false exit_code: 1 ----- stdout ----- ----- stderr ----- × No solution found when resolving dependencies: - ╰─▶ Because iniconfig<=2.0.0 needs to be downloaded from a registry and you require iniconfig, we can conclude that your requirements are unsatisfiable. + ╰─▶ Because all versions of iniconfig need to be downloaded from a registry and you require iniconfig, we can conclude that your requirements are unsatisfiable. hint: Pre-releases are available for `iniconfig` in the requested range (e.g., 0.2.dev0), but pre-releases weren't enabled (try: `--prerelease=allow`) hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache. - "###); + "); Ok(()) } diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index e21314456..90f436f6f 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -13,7 +13,6 @@ use assert_cmd::assert::{Assert, OutputAssertExt}; use assert_fs::assert::PathAssert; use assert_fs::fixture::{ChildPath, PathChild, PathCopy, PathCreateDir, SymlinkToFile}; use base64::{Engine, prelude::BASE64_STANDARD as base64}; -use etcetera::BaseStrategy; use futures::StreamExt; use indoc::formatdoc; use itertools::Itertools; @@ -22,6 +21,7 @@ use regex::Regex; use tokio::io::AsyncWriteExt; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_fs::Simplified; use uv_python::managed::ManagedPythonInstallations; use uv_python::{ @@ -32,7 +32,8 @@ use uv_static::EnvVars; // Exclude any packages uploaded after this date. static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z"; -pub const PACKSE_VERSION: &str = "0.3.46"; +pub const PACKSE_VERSION: &str = "0.3.47"; +pub const DEFAULT_PYTHON_VERSION: &str = "3.12"; /// Using a find links url allows using `--index-url` instead of `--extra-index-url` in tests /// to prevent dependency confusion attacks against our test suite. @@ -65,7 +66,7 @@ pub const INSTA_FILTERS: &[(&str, &str)] = &[ (r"uv\.exe", "uv"), // uv version display ( - r"uv(-.*)? \d+\.\d+\.\d+(\+\d+)?( \(.*\))?", + r"uv(-.*)? \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?( \([^)]*\))?", r"uv [VERSION] ([COMMIT] DATE)", ), // Trim end-of-line whitespaces, to allow removing them on save. @@ -194,6 +195,12 @@ impl TestContext { "managed installations, search path, or registry".to_string(), "[PYTHON SOURCES]".to_string(), )); + self.filters.push(( + "registry or search path".to_string(), + "[PYTHON SOURCES]".to_string(), + )); + self.filters + .push(("search path".to_string(), "[PYTHON SOURCES]".to_string())); self } @@ -243,6 +250,30 @@ impl TestContext { self } + /// Filtering for various keys in a `pyvenv.cfg` file that will vary + /// depending on the specific machine used: + /// - `home = foo/bar/baz/python3.X.X/bin` + /// - `uv = X.Y.Z` + /// - `extends-environment = ` + #[must_use] + pub fn with_pyvenv_cfg_filters(mut self) -> Self { + let added_filters = [ + (r"home = .+".to_string(), "home = [PYTHON_HOME]".to_string()), + ( + r"uv = \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?".to_string(), + "uv = [UV_VERSION]".to_string(), + ), + ( + r"extends-environment = .+".to_string(), + "extends-environment = [PARENT_VENV]".to_string(), + ), + ]; + for filter in added_filters { + self.filters.insert(0, filter); + } + self + } + /// Add extra filtering for ` -> ` symlink display for Python versions in the test /// context, e.g., for use in `uv python list`. #[must_use] @@ -381,25 +412,20 @@ impl TestContext { self } - /// Discover the path to the XDG state directory. We use this, rather than the OS-specific - /// temporary directory, because on macOS (and Windows on GitHub Actions), they involve - /// symlinks. (On macOS, the temporary directory is, like `/var/...`, which resolves to - /// `/private/var/...`.) + /// Default to the canonicalized path to the temp directory. We need to do this because on + /// macOS (and Windows on GitHub Actions) the standard temp dir is a symlink. (On macOS, the + /// temporary directory is, like `/var/...`, which resolves to `/private/var/...`.) /// /// It turns out that, at least on macOS, if we pass a symlink as `current_dir`, it gets /// _immediately_ resolved (such that if you call `current_dir` in the running `Command`, it - /// returns resolved symlink). This is problematic, as we _don't_ want to resolve symlinks - /// for user-provided paths. + /// returns resolved symlink). This breaks some snapshot tests, since we _don't_ want to + /// resolve symlinks for user-provided paths. pub fn test_bucket_dir() -> PathBuf { - env::var(EnvVars::UV_INTERNAL__TEST_DIR) - .map(PathBuf::from) - .unwrap_or_else(|_| { - etcetera::base_strategy::choose_base_strategy() - .expect("Failed to find base strategy") - .data_dir() - .join("uv") - .join("tests") - }) + std::env::temp_dir() + .simple_canonicalize() + .expect("failed to canonicalize temp dir") + .join("uv") + .join("tests") } /// Create a new test context with multiple Python versions. @@ -497,6 +523,8 @@ impl TestContext { if cfg!(windows) { filters.push((" --link-mode ".to_string(), String::new())); filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new())); + // Unix uses "exit status", Windows uses "exit code" + filters.push((r"exit code: ".to_string(), "exit status: ".to_string())); } filters.extend( @@ -625,6 +653,8 @@ impl TestContext { format!("https://raw.githubusercontent.com/astral-sh/packse/{PACKSE_VERSION}/"), "https://raw.githubusercontent.com/astral-sh/packse/PACKSE_VERSION/".to_string(), )); + // For wiremock tests + filters.push((r"127\.0\.0\.1:\d*".to_string(), "[LOCALHOST]".to_string())); Self { root: ChildPath::new(root.path()), @@ -720,6 +750,7 @@ impl TestContext { .env_remove(EnvVars::UV_CACHE_DIR) .env_remove(EnvVars::UV_TOOL_BIN_DIR) .env_remove(EnvVars::XDG_CONFIG_HOME) + .env_remove(EnvVars::XDG_DATA_HOME) .current_dir(self.temp_dir.path()); for (key, value) in &self.extra_env { @@ -878,6 +909,13 @@ impl TestContext { command } + pub fn self_update(&self) -> Command { + let mut command = self.new_command(); + command.arg("self").arg("update"); + self.add_shared_options(&mut command, false); + command + } + /// Create a `uv publish` command with options shared across scenarios. pub fn publish(&self) -> Command { let mut command = self.new_command(); @@ -892,8 +930,7 @@ impl TestContext { .arg("python") .arg("find") .env(EnvVars::UV_PREVIEW, "1") - .env(EnvVars::UV_PYTHON_INSTALL_DIR, "") - .current_dir(&self.temp_dir); + .env(EnvVars::UV_PYTHON_INSTALL_DIR, ""); self.add_shared_options(&mut command, false); command } @@ -904,8 +941,7 @@ impl TestContext { command .arg("python") .arg("list") - .env(EnvVars::UV_PYTHON_INSTALL_DIR, "") - .current_dir(&self.temp_dir); + .env(EnvVars::UV_PYTHON_INSTALL_DIR, ""); self.add_shared_options(&mut command, false); command } @@ -914,10 +950,7 @@ impl TestContext { pub fn python_install(&self) -> Command { let mut command = self.new_command(); self.add_shared_options(&mut command, true); - command - .arg("python") - .arg("install") - .current_dir(&self.temp_dir); + command.arg("python").arg("install"); command } @@ -925,10 +958,15 @@ impl TestContext { pub fn python_uninstall(&self) -> Command { let mut command = self.new_command(); self.add_shared_options(&mut command, true); + command.arg("python").arg("uninstall"); command - .arg("python") - .arg("uninstall") - .current_dir(&self.temp_dir); + } + + /// Create a `uv python upgrade` command with options shared across scenarios. + pub fn python_upgrade(&self) -> Command { + let mut command = self.new_command(); + self.add_shared_options(&mut command, true); + command.arg("python").arg("upgrade"); command } @@ -1058,15 +1096,30 @@ impl TestContext { } pub fn interpreter(&self) -> PathBuf { - venv_to_interpreter(&self.venv) + let venv = &self.venv; + if cfg!(unix) { + venv.join("bin").join("python") + } else if cfg!(windows) { + venv.join("Scripts").join("python.exe") + } else { + unimplemented!("Only Windows and Unix are supported") + } + } + + pub fn python_command(&self) -> Command { + let mut command = self.new_command_with(&self.interpreter()); + command + // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files + // https://github.com/python/cpython/issues/75953 + .arg("-B") + // Python on windows + .env(EnvVars::PYTHONUTF8, "1"); + command } /// Run the given python code and check whether it succeeds. pub fn assert_command(&self, command: &str) -> Assert { - self.new_command_with(&venv_to_interpreter(&self.venv)) - // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files - // https://github.com/python/cpython/issues/75953 - .arg("-B") + self.python_command() .arg("-c") .arg(command) .current_dir(&self.temp_dir) @@ -1075,10 +1128,7 @@ impl TestContext { /// Run the given python file and check whether it succeeds. pub fn assert_file(&self, file: impl AsRef) -> Assert { - self.new_command_with(&venv_to_interpreter(&self.venv)) - // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files - // https://github.com/python/cpython/issues/75953 - .arg("-B") + self.python_command() .arg(file.as_ref()) .current_dir(&self.temp_dir) .assert() @@ -1093,6 +1143,12 @@ impl TestContext { .stdout(version); } + /// Assert a package is not installed. + pub fn assert_not_installed(&self, package: &'static str) { + self.assert_command(format!("import {package}").as_str()) + .failure(); + } + /// Generate various escaped regex patterns for the given path. pub fn path_patterns(path: impl AsRef) -> Vec { let mut patterns = Vec::new(); @@ -1320,16 +1376,6 @@ pub fn venv_bin_path(venv: impl AsRef) -> PathBuf { } } -pub fn venv_to_interpreter(venv: &Path) -> PathBuf { - if cfg!(unix) { - venv.join("bin").join("python") - } else if cfg!(windows) { - venv.join("Scripts").join("python.exe") - } else { - unimplemented!("Only Windows and Unix are supported") - } -} - /// Get the path to the python interpreter for a specific python version. pub fn get_python(version: &PythonVersion) -> PathBuf { ManagedPythonInstallations::from_settings(None) @@ -1399,6 +1445,7 @@ pub fn python_installations_for_versions( EnvironmentPreference::OnlySystem, PythonPreference::Managed, &cache, + PreviewMode::Disabled, ) { python.into_interpreter().sys_executable().to_owned() } else { @@ -1602,6 +1649,8 @@ pub const READ_ONLY_GITHUB_TOKEN_2: &[&str] = &[ "SHIzUG1tRVZRSHMzQTl2a3NiVnB4Tmk0eTR3R2JVYklLck1qY05naHhMSFVMTDZGVElIMXNYeFhYN2gK", ]; +pub const READ_ONLY_GITHUB_SSH_DEPLOY_KEY: &str = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNBeTF1SnNZK1JXcWp1NkdIY3Z6a3AwS21yWDEwdmo3RUZqTkpNTkRqSGZPZ0FBQUpqWUpwVnAyQ2FWCmFRQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQXkxdUpzWStSV3FqdTZHSGN2emtwMEttclgxMHZqN0VGak5KTU5EakhmT2cKQUFBRUMwbzBnd1BxbGl6TFBJOEFXWDVaS2dVZHJyQ2ptMDhIQm9FenB4VDg3MXBqTFc0bXhqNUZhcU83b1lkeS9PU25RcQphdGZYUytQc1FXTTBrdzBPTWQ4NkFBQUFFR3R2Ym5OMGFVQmhjM1J5WVd3dWMyZ0JBZ01FQlE9PQotLS0tLUVORCBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0K"; + /// Decode a split, base64 encoded authentication token. /// We split and encode the token to bypass revoke by GitHub's secret scanning pub fn decode_token(content: &[&str]) -> String { @@ -1630,9 +1679,13 @@ pub async fn download_to_disk(url: &str, path: &Path) { let client = uv_client::BaseClientBuilder::new() .allow_insecure_host(trusted_hosts) .build(); - let url: reqwest::Url = url.parse().unwrap(); - let client = client.for_host(&url); - let response = client.request(http::Method::GET, url).send().await.unwrap(); + let url = url.parse().unwrap(); + let response = client + .for_host(&url) + .get(reqwest::Url::from(url)) + .send() + .await + .unwrap(); let mut file = tokio::fs::File::create(path).await.unwrap(); let mut stream = response.bytes_stream(); diff --git a/crates/uv/tests/it/ecosystem.rs b/crates/uv/tests/it/ecosystem.rs index e96dca62c..a3804f426 100644 --- a/crates/uv/tests/it/ecosystem.rs +++ b/crates/uv/tests/it/ecosystem.rs @@ -73,8 +73,8 @@ fn saleor() -> Result<()> { // Currently ignored because the project doesn't build with `uv` yet. // // Source: https://github.com/apache/airflow/blob/c55438d9b2eb9b6680641eefdd0cbc67a28d1d29/pyproject.toml -#[ignore] #[test] +#[ignore = "Airflow doesn't build with `uv` yet"] fn airflow() -> Result<()> { lock_ecosystem_package("3.12", "airflow") } diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index dda76b0b9..ddaed434f 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -14,6 +14,7 @@ use assert_fs::prelude::*; use indoc::{formatdoc, indoc}; use insta::assert_snapshot; use std::path::Path; +use url::Url; use uv_fs::Simplified; use wiremock::{Mock, MockServer, ResponseTemplate, matchers::method}; @@ -405,6 +406,8 @@ fn add_git_private_source() -> Result<()> { fn add_git_private_raw() -> Result<()> { let context = TestContext::new("3.12"); let token = decode_token(READ_ONLY_GITHUB_TOKEN); + let mut filters = context.filters(); + filters.push((&token, "***")); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str(indoc! {r#" @@ -415,7 +418,7 @@ fn add_git_private_raw() -> Result<()> { dependencies = [] "#})?; - uv_snapshot!(context.filters(), context.add().arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")).arg("--raw-sources"), @r" + uv_snapshot!(filters, context.add().arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")).arg("--raw-sources"), @r" success: true exit_code: 0 ----- stdout ----- @@ -429,16 +432,11 @@ fn add_git_private_raw() -> Result<()> { let pyproject_toml = context.read("pyproject.toml"); - let filters: Vec<_> = [(token.as_str(), "***")] - .into_iter() - .chain(context.filters()) - .collect(); - insta::with_settings!({ - filters => filters + filters => filters.clone() }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "project" version = "0.1.0" @@ -446,14 +444,14 @@ fn add_git_private_raw() -> Result<()> { dependencies = [ "uv-private-pypackage @ git+https://***@github.com/astral-test/uv-private-pypackage", ] - "### + "# ); }); let lock = context.read("uv.lock"); insta::with_settings!({ - filters => context.filters(), + filters => filters.clone(), }, { assert_snapshot!( lock, @r#" @@ -484,7 +482,7 @@ fn add_git_private_raw() -> Result<()> { }); // Install from the lockfile. - uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r" + uv_snapshot!(filters, context.sync().arg("--frozen"), @r" success: true exit_code: 0 ----- stdout ----- @@ -496,6 +494,90 @@ fn add_git_private_raw() -> Result<()> { Ok(()) } +#[tokio::test] +#[cfg(feature = "git")] +async fn add_git_private_rate_limited_by_github_rest_api_403_response() -> Result<()> { + let context = TestContext::new("3.12"); + let token = decode_token(READ_ONLY_GITHUB_TOKEN); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(403)) + .expect(1) + .mount(&server) + .await; + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + uv_snapshot!(context.filters(), context + .add() + .arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")) + .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071) + "); + + Ok(()) +} + +#[tokio::test] +#[cfg(feature = "git")] +async fn add_git_private_rate_limited_by_github_rest_api_429_response() -> Result<()> { + use uv_client::DEFAULT_RETRIES; + + let context = TestContext::new("3.12"); + let token = decode_token(READ_ONLY_GITHUB_TOKEN); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(429)) + .expect(1 + u64::from(DEFAULT_RETRIES)) // Middleware retries on 429 by default + .mount(&server) + .await; + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + uv_snapshot!(context.filters(), context + .add() + .arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")) + .env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri()) + .env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true") + .env_remove(EnvVars::UV_HTTP_RETRIES), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071) + "); + + Ok(()) +} + #[test] #[cfg(feature = "git")] fn add_git_error() -> Result<()> { @@ -2016,6 +2098,42 @@ fn remove_both_dev() -> Result<()> { Ok(()) } +/// Do not allow add for groups in scripts. +#[test] +fn disallow_group_script_add() -> Result<()> { + let context = TestContext::new("3.12"); + + let script = context.temp_dir.child("main.py"); + script.write_str(indoc! {r#" + # /// script + # requires-python = ">=3.13" + # dependencies = [] + # + # /// + "#})?; + + uv_snapshot!(context.filters(), context + .add() + .arg("--group") + .arg("dev") + .arg("anyio==3.7.0") + .arg("--script") + .arg("main.py"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: the argument '--group ' cannot be used with '--script